From cf82c00c5421ab333d4c00b73ce79af7f4b663ca Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sun, 1 Dec 2024 11:43:38 -0500 Subject: [PATCH 01/29] start new config, get rid of unified schema, start repository store --- .vscode/extensions.json | 3 +- SQL/database_schema.sql | 15 ++++++ SQL/unified_schema.sql | 17 ------- citadel.dme | 4 ++ code/__HELPERS/text.dm | 3 -- .../configuration/entries/database.dm | 3 -- code/controllers/repository.dm | 32 +++++++++++- code/controllers/repository/designs.dm | 3 ++ code/controllers/toml_config/README.md | 3 ++ .../toml_config/entries/backend.dm | 6 +++ .../toml_config/entries/backend.repository.dm | 16 ++++++ code/controllers/toml_config/toml_config.dm | 51 +++++++++++++++++++ .../toml_config/toml_config_entry.dm | 42 +++++++++++++++ config.default/README.md | 4 ++ config.default/config.toml | 2 + config/config.toml | 1 + tools/ci/config.toml | 0 tools/ci/run_server.sh | 1 + 18 files changed, 181 insertions(+), 25 deletions(-) delete mode 100644 SQL/unified_schema.sql create mode 100644 code/controllers/toml_config/README.md create mode 100644 code/controllers/toml_config/entries/backend.dm create mode 100644 code/controllers/toml_config/entries/backend.repository.dm create mode 100644 code/controllers/toml_config/toml_config.dm create mode 100644 code/controllers/toml_config/toml_config_entry.dm create mode 100644 config.default/README.md create mode 100644 config.default/config.toml create mode 100644 config/config.toml create mode 100644 tools/ci/config.toml diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 87869168fcf6..44df201905e9 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -8,6 +8,7 @@ "donkie.vscode-tgstation-test-adapter", "anturk.dmi-editor", "aaron-bond.better-comments", - "ss13.opendream" + "ss13.opendream", + "tamasfe.even-better-toml" ] } diff --git a/SQL/database_schema.sql b/SQL/database_schema.sql index f1b50df6fd40..5a74011dbe77 100644 --- a/SQL/database_schema.sql +++ b/SQL/database_schema.sql @@ -18,6 +18,21 @@ CREATE TABLE IF NOT EXISTS `%_PREFIX_%schema_revision` ( PRIMARY KEY (`major`, `minor`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +-- Backend Stores -- +-- -- + +CREATE TABLE IF NOT EXISTS `%_PREFIX_%backend_repository` ( + `repository` VARCHAR(64) NOT NULL, + `id` VARCHAR(128) NOT NULL, + `version` INT(11) NOT NULL, + `data` MEDIUMTEXT NOT NULL, + `storedTime` DATETIME NOT NULL DEFAULT Now(), + `modifiedTime` DATETIME NOT NULL DEFAULT Now(), + PRIMARY KEY(`repository`, `id`), + INDEX(`repository`), + INDEX(`id`) +) + -- persistence -- -- SSpersistence modules/bulk_entity diff --git a/SQL/unified_schema.sql b/SQL/unified_schema.sql deleted file mode 100644 index 32441c5e22e6..000000000000 --- a/SQL/unified_schema.sql +++ /dev/null @@ -1,17 +0,0 @@ -/** - * make sure to bump schema version and mark changes in database_changelog.md! - * - * you MUST use unified_ as a prefix. - * - * unified schema for citadel, **sync changes to both servers.** - **/ - --- --- Table structure for table `schema_revision` --- -CREATE TABLE IF NOT EXISTS `unified_schema_revision` ( - `major` TINYINT(3) unsigned NOT NULL, - `minor` TINYINT(3) unsigned NOT NULL, - `date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, - PRIMARY KEY (`major`, `minor`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; diff --git a/citadel.dme b/citadel.dme index 2c00ac7ddeb7..b1c5bc8633d6 100644 --- a/citadel.dme +++ b/citadel.dme @@ -685,6 +685,10 @@ #include "code\controllers\subsystem\sound\_sound.dm" #include "code\controllers\subsystem\sound\channel_manager.dm" #include "code\controllers\subsystem\sound\soundbyte_manager.dm" +#include "code\controllers\toml_config\toml_config.dm" +#include "code\controllers\toml_config\toml_config_entry.dm" +#include "code\controllers\toml_config\entries\backend.dm" +#include "code\controllers\toml_config\entries\backend.repository.dm" #include "code\datums\ability.dm" #include "code\datums\ability_handler.dm" #include "code\datums\access.dm" diff --git a/code/__HELPERS/text.dm b/code/__HELPERS/text.dm index 7fcdc575e3a9..9abb75fb9732 100644 --- a/code/__HELPERS/text.dm +++ b/code/__HELPERS/text.dm @@ -15,9 +15,6 @@ /proc/format_table_name(table) return CONFIG_GET(string/sql_server_prefix) + table -/proc/format_unified_table_name(table) - return CONFIG_GET(string/sql_unified_prefix) + table - /** *! Text sanitization */ diff --git a/code/controllers/configuration/entries/database.dm b/code/controllers/configuration/entries/database.dm index 3fbc32d38fe7..f930d31acbfb 100644 --- a/code/controllers/configuration/entries/database.dm +++ b/code/controllers/configuration/entries/database.dm @@ -4,9 +4,6 @@ /datum/config_entry/string/sql_server_prefix protection = CONFIG_ENTRY_LOCKED -/datum/config_entry/string/sql_unified_prefix - protection = CONFIG_ENTRY_LOCKED - /datum/config_entry/string/sql_address protection = CONFIG_ENTRY_HIDDEN | CONFIG_ENTRY_LOCKED config_entry_value = "localhost" diff --git a/code/controllers/repository.dm b/code/controllers/repository.dm index 402a48e30dd4..a10f4b1dbb45 100644 --- a/code/controllers/repository.dm +++ b/code/controllers/repository.dm @@ -25,6 +25,13 @@ /// expected type of prototype var/expected_type + /// database key; this is immutable. + /// * persistence is disabled if this is not set + var/database_key + /// store version + /// * persistence is disabled if this is not set + /// * migration is triggered if this doesn't match a loaded entry + var/store_version /// by-id lookup var/list/id_lookup @@ -34,6 +41,9 @@ /// fetched subtype lists var/tmp/list/subtype_lists + /// 'doesn't exist' cache for DB loads + var/tmp/list/doesnt_exist_cache + /// temporary id to path lookup used during init // todo: figure out a way to not do this, this is bad var/tmp/list/init_reverse_lookup_shim @@ -43,6 +53,7 @@ type_lookup = list() subtype_lists = list() init_reverse_lookup_shim = list() + doesnt_exist_cache = list() for(var/datum/prototype/casted as anything in subtypesof(expected_type)) if(initial(casted.abstract_type) == casted) continue @@ -119,7 +130,7 @@ if(init_reverse_lookup_shim) var/potential_path = init_reverse_lookup_shim[type_or_id] return fetch(potential_path) - return id_lookup[type_or_id] + return id_lookup[type_or_id] || handle_db_load(type_or_id) else if(ispath(type_or_id)) . = type_lookup[type_or_id] if(.) @@ -185,6 +196,7 @@ */ /datum/controller/repository/proc/register(datum/prototype/instance) return load(instance) + #warn db store //* Private API *// @@ -230,3 +242,21 @@ subtype_lists = list() type_lookup -= instance.type return TRUE + +/** + * Perform migration on a data-list from the database. + * + * * Edit the passed in list directly. + */ +/datum/controller/repository/proc/migrate(list/modifying, from_version) + PROTECTED_PROC(TRUE) + +/datum/controller/repository/proc/handle_db_store(datum/prototype/instance) + doesnt_exist_cache -= instance.id + +/datum/controller/repository/proc/handle_db_load(instance_id) + if(doesnt_exist_cache[instance_id]) + return + + +#warn impl diff --git a/code/controllers/repository/designs.dm b/code/controllers/repository/designs.dm index 23ed3186a240..9d7fca6884f0 100644 --- a/code/controllers/repository/designs.dm +++ b/code/controllers/repository/designs.dm @@ -4,6 +4,7 @@ REPOSITORY_DEF(designs) name = "Repository - Designs" expected_type = /datum/prototype/design + database_key = "design" //* caches *// @@ -32,3 +33,5 @@ REPOSITORY_DEF(designs) autolathe_design_ids -= instance.id if(istype(instance, /datum/prototype/design/medical)) medical_mini_design_ids -= instance.id + +#warn persistence-ify designs diff --git a/code/controllers/toml_config/README.md b/code/controllers/toml_config/README.md new file mode 100644 index 000000000000..025e5b593f15 --- /dev/null +++ b/code/controllers/toml_config/README.md @@ -0,0 +1,3 @@ +# Configuration Module + +Experimental new TOML configuration to replace old configuration with eventually. diff --git a/code/controllers/toml_config/entries/backend.dm b/code/controllers/toml_config/entries/backend.dm new file mode 100644 index 000000000000..25e66949ac30 --- /dev/null +++ b/code/controllers/toml_config/entries/backend.dm @@ -0,0 +1,6 @@ +//* This file is explicitly licensed under the MIT license. *// +//* Copyright (c) 2024 Citadel Station Developers *// + +/datum/toml_config_entry/backend + abstract_type = /datum/toml_config_entry/backend + category = "backend" diff --git a/code/controllers/toml_config/entries/backend.repository.dm b/code/controllers/toml_config/entries/backend.repository.dm new file mode 100644 index 000000000000..8a3ddc733af4 --- /dev/null +++ b/code/controllers/toml_config/entries/backend.repository.dm @@ -0,0 +1,16 @@ +//* This file is explicitly licensed under the MIT license. *// +//* Copyright (c) 2024 Citadel Station Developers *// + +/datum/toml_config_entry/backend/repository + abstract_type = /datum/toml_config_entry/backend/repository + category = "backend.repository" + +/datum/toml_config_entry/backend/repository/persistence + key = "persistence" + desc = {" + Enable repository persistence. This requires the database to be available. Without this, most persistence + features will not function. + "} + default = TRUE + +#warn impl diff --git a/code/controllers/toml_config/toml_config.dm b/code/controllers/toml_config/toml_config.dm new file mode 100644 index 000000000000..9c9fbf026c86 --- /dev/null +++ b/code/controllers/toml_config/toml_config.dm @@ -0,0 +1,51 @@ +//* This file is explicitly licensed under the MIT license. *// +//* Copyright (c) 2024 Citadel Station Developers *// + +GLOBAL_DATUM(toml_config, /datum/toml_config) + +/datum/toml_config + /// Entries by type. + VAR_PRIVATE/list/datum/toml_config_entry/keyed_entries +/** + * HEY! LISTEN! By calling this proc you are affirming that: + * + * * The entry type you are passing in is static and not a variable that can be tampered with. + * * The value you get will be immediately consumed in a non-VV-able manner. + */ +/datum/toml_config/proc/get_sensitive_entry(datum/toml_config_entry/entry_type) + +/** + * HEY! LISTEN! By calling this proc you are affirming that: + * + * * The entry type you are passing in is static and not a variable that can be tampered with. + * * The value you are passing in is trusted and validated and not a variable that can be tampered with. + */ +/datum/toml_config/proc/set_sensitive_entry(datum/toml_config_entry/entry_type, value) + +/datum/toml_config/proc/get_entry(datum/toml_config_entry/entry_type) + +/datum/toml_config/proc/set_entry(datum/toml_config_entry/entry_type, value) + +/** + * Automatically loads default config, and the server's config file. + */ +/datum/toml_config/proc/reload() + reset() + load("config.default/config.toml") + load("config/config.toml") + +/** + * Resets the configuration. + */ +/datum/toml_config/proc/reset() + +/** + * Loads from a given layer. + * * This will not reset the configuration. Repeated calls to load will allow for layered configuration. + * + * HEY! LISTEN! By calling this proc you are affirming that: + * * The file you are passing in is trusted and not a variable that can be tampered with via VV. + */ +/datum/toml_config/proc/load(filelike) + +#warn impl diff --git a/code/controllers/toml_config/toml_config_entry.dm b/code/controllers/toml_config/toml_config_entry.dm new file mode 100644 index 000000000000..ccc69481a281 --- /dev/null +++ b/code/controllers/toml_config/toml_config_entry.dm @@ -0,0 +1,42 @@ +//* This file is explicitly licensed under the MIT license. *// +//* Copyright (c) 2024 Citadel Station Developers *// + +/** + * Config entry. + * + * Supports at time of writing: + * * numbers + * * strings + * * (nested) lists + * * (nested) dictionaries + * + * Supports at time of writing auditing VV edits to: + * * numbers + * * strings + */ +/datum/toml_config_entry + abstract_type = /datum/toml_config_entry + /// key / name + var/key + /// category / where this is + var/category + + /// description of this entry + var/desc + + /// default value + var/default + /// current value + var/value + + /// vv edit disallowed + var/vv_locked = FALSE + /// vv read disallowed + /// * does not automatically imply [vv_locked] + var/vv_secret = FALSE + /// sensitive + /// * requires get_sensitive_entry() and set_sensitive_entry() to read/write + /// * does not actually imply [vv_locked] and [vv_secret] + var/sensitive = FALSE + +#warn impl diff --git a/config.default/README.md b/config.default/README.md new file mode 100644 index 000000000000..616c7c225371 --- /dev/null +++ b/config.default/README.md @@ -0,0 +1,4 @@ +# Default Configuration + +Due to the nature of TGS, we can either make an entire directory and everything in it static or make nothing in it static. +This is annoying, and necessitates default config be in another directory. diff --git a/config.default/config.toml b/config.default/config.toml new file mode 100644 index 000000000000..d95a377f9c1e --- /dev/null +++ b/config.default/config.toml @@ -0,0 +1,2 @@ +[[backend.repository]] + persistence = true diff --git a/config/config.toml b/config/config.toml new file mode 100644 index 000000000000..c38562316155 --- /dev/null +++ b/config/config.toml @@ -0,0 +1 @@ +# Config file. Loaded second in the load order, after `config.default.toml`. diff --git a/tools/ci/config.toml b/tools/ci/config.toml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tools/ci/run_server.sh b/tools/ci/run_server.sh index da75f5597f7f..1ff652438306 100755 --- a/tools/ci/run_server.sh +++ b/tools/ci/run_server.sh @@ -7,6 +7,7 @@ mkdir ci_test/data #test config cp tools/ci/ci_config.txt ci_test/config/config.txt +cp tools/ci/config.toml ci_test/config/config.toml cd ci_test DreamDaemon citadel.dmb -close -trusted -verbose -params "log-directory=ci" From f62b2d9f2bb21905e30eb08095880c766a3db73b Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Thu, 5 Dec 2024 08:16:10 -0500 Subject: [PATCH 02/29] old --- {SQL => sql_old}/database_changelog.md | 0 {SQL => sql_old}/database_schema.sql | 0 {SQL => sql_old}/database_schema_prefixed.sql | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename {SQL => sql_old}/database_changelog.md (100%) rename {SQL => sql_old}/database_schema.sql (100%) rename {SQL => sql_old}/database_schema_prefixed.sql (100%) diff --git a/SQL/database_changelog.md b/sql_old/database_changelog.md similarity index 100% rename from SQL/database_changelog.md rename to sql_old/database_changelog.md diff --git a/SQL/database_schema.sql b/sql_old/database_schema.sql similarity index 100% rename from SQL/database_schema.sql rename to sql_old/database_schema.sql diff --git a/SQL/database_schema_prefixed.sql b/sql_old/database_schema_prefixed.sql similarity index 100% rename from SQL/database_schema_prefixed.sql rename to sql_old/database_schema_prefixed.sql From fcb533b59c7264f05d4f5a3144c3cfb69bec374c Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Thu, 5 Dec 2024 08:27:09 -0500 Subject: [PATCH 03/29] t --- README.md | 8 ++++++-- sql_old/database_schema_prefixed.sql | 22 +++++++++++++++------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index b0228569fead..09b0e395273f 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,12 @@ On **May 9, 2022** we have changed the way to compile the codebase. ## SQL Setup The SQL backend for the library and stats tracking requires a MariaDB server. -Your server details go in /config/legacy/dbconfig.txt, and the SQL schema is in /SQL/tgstation_schema.sql. -More detailed setup instructions arecoming soon, for now ask in our Discord. +Your server details go in /config/legacy/dbconfig.txt. + +Flyway is used for setup and migration. Run the migrations in `sql/migrations` against your database, and everything should just work. +We do not use table prefixes. + +More detailed setup instructions are coming soon, for now ask in our Discord. todo: update this section diff --git a/sql_old/database_schema_prefixed.sql b/sql_old/database_schema_prefixed.sql index 58b5b32cc103..0319f8832e91 100644 --- a/sql_old/database_schema_prefixed.sql +++ b/sql_old/database_schema_prefixed.sql @@ -18,6 +18,21 @@ CREATE TABLE IF NOT EXISTS `rp_schema_revision` ( PRIMARY KEY (`major`, `minor`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; +-- Backend Stores -- +-- -- + +CREATE TABLE IF NOT EXISTS `rp_backend_repository` ( + `repository` VARCHAR(64) NOT NULL, + `id` VARCHAR(128) NOT NULL, + `version` INT(11) NOT NULL, + `data` MEDIUMTEXT NOT NULL, + `storedTime` DATETIME NOT NULL DEFAULT Now(), + `modifiedTime` DATETIME NOT NULL DEFAULT Now(), + PRIMARY KEY(`repository`, `id`), + INDEX(`repository`), + INDEX(`id`) +) + -- persistence -- -- SSpersistence modules/bulk_entity @@ -381,13 +396,6 @@ CREATE TABLE IF NOT EXISTS `rp_privacy` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; -CREATE TABLE IF NOT EXISTS `rp_vr_player_hours` ( - `ckey` varchar(32) CHARACTER SET latin1 COLLATE latin1_general_ci NOT NULL, - `department` varchar(64) CHARACTER SET latin1 COLLATE latin1_general_ci NOT NULL, - `hours` double NOT NULL, - PRIMARY KEY (`ckey`,`department`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - CREATE TABLE IF NOT EXISTS `rp_death` ( `id` INT(11) NOT NULL AUTO_INCREMENT , `pod` TEXT NOT NULL COMMENT 'Place of death' , From dd443a5e39cec6e66a108fff6c5bdf735031d683 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Thu, 5 Dec 2024 08:53:32 -0500 Subject: [PATCH 04/29] start this mess --- code/__HELPERS/nameof.dm | 6 + .../configuration/configuration.dm | 2 +- code/controllers/master.dm | 4 +- code/controllers/repository.dm | 16 +- code/controllers/subsystem.dm | 2 +- code/controllers/subsystem/assets.dm | 2 +- code/controllers/subsystem/ipintel.dm | 2 +- .../toml_config/toml_config_entry.dm | 60 ++- sql/README.md | 38 ++ .../migrations/V0.0.1__PrepLegacyTables.sql | 2 + sql/migrations/V0.0.2__DeprefixTables.sql | 33 ++ sql/migrations/V0.0.3__AddRepositoryStore.sql | 11 + ...0.0.4__RemakePlaytimeTriggerJustInCase.sql | 15 + sql_old/database_schema.sql | 454 ------------------ 14 files changed, 184 insertions(+), 463 deletions(-) create mode 100644 sql/README.md rename sql_old/database_schema_prefixed.sql => sql/migrations/V0.0.1__PrepLegacyTables.sql (99%) create mode 100644 sql/migrations/V0.0.2__DeprefixTables.sql create mode 100644 sql/migrations/V0.0.3__AddRepositoryStore.sql create mode 100644 sql/migrations/V0.0.4__RemakePlaytimeTriggerJustInCase.sql delete mode 100644 sql_old/database_schema.sql diff --git a/code/__HELPERS/nameof.dm b/code/__HELPERS/nameof.dm index 7cd5777f4652..6e625578ae60 100644 --- a/code/__HELPERS/nameof.dm +++ b/code/__HELPERS/nameof.dm @@ -4,6 +4,12 @@ * datum may be null, but it does need to be a typed var. **/ #define NAMEOF(datum, X) (#X || ##datum.##X) +/** + * NAMEOF: Compile time checked variable name to string conversion + * evaluates to a string equal to "X", but compile errors if X isn't a var on datum. + * datum may be null, but it does need to be a typed var. + **/ +#define NAMEOF_PROC(datum, X) (#X || ##datum.##X()) /** * NAMEOF that actually works in static definitions because src::type requires src to be defined diff --git a/code/controllers/configuration/configuration.dm b/code/controllers/configuration/configuration.dm index b933dea3bd4f..e8f712a9c106 100644 --- a/code/controllers/configuration/configuration.dm +++ b/code/controllers/configuration/configuration.dm @@ -59,7 +59,7 @@ loaded = TRUE if (Master) - Master.OnConfigLoad() + Master.on_config_reload() /datum/controller/configuration/proc/full_wipe() if(IsAdminAdvancedProcCall()) diff --git a/code/controllers/master.dm b/code/controllers/master.dm index 7d0c002dad77..35cf731a0f34 100644 --- a/code/controllers/master.dm +++ b/code/controllers/master.dm @@ -883,10 +883,10 @@ GLOBAL_REAL(Master, /datum/controller/master) = new var/datum/controller/subsystem/SS = S SS.StopLoadingMap() -/datum/controller/master/proc/OnConfigLoad() +/datum/controller/master/proc/on_config_reload() for (var/thing in subsystems) var/datum/controller/subsystem/SS = thing - SS.OnConfigLoad() + SS.on_config_reload() /** * CitRP snowflake special: Check if any subsystems are sleeping. diff --git a/code/controllers/repository.dm b/code/controllers/repository.dm index a10f4b1dbb45..06fb2d64c087 100644 --- a/code/controllers/repository.dm +++ b/code/controllers/repository.dm @@ -195,8 +195,10 @@ * After this call, the repository now owns the instance, not whichever system created it. */ /datum/controller/repository/proc/register(datum/prototype/instance) - return load(instance) - #warn db store + . = load(instance) + if(!.) + return + handle_db_store(instance) //* Private API *// @@ -252,11 +254,21 @@ PROTECTED_PROC(TRUE) /datum/controller/repository/proc/handle_db_store(datum/prototype/instance) + if(!global.toml_config.get_entry(/datum/toml_config_entry/backend/repository/persistence)) + return doesnt_exist_cache -= instance.id /datum/controller/repository/proc/handle_db_load(instance_id) if(doesnt_exist_cache[instance_id]) return + var/const/doesnt_exist_cache_trim_at = 1000 + var/const/doesnt_exist_cache_trim_to = 500 + if(!global.toml_config.get_entry(/datum/toml_config_entry/backend/repository/persistence)) + doesnt_exist_cache[instance_id] = TRUE + if(length(doesnt_exist_cache) > doesnt_exist_cache_trim_at) + doesnt_exist_cache.len = doesnt_exist_cache_trim_to + return + #warn impl diff --git a/code/controllers/subsystem.dm b/code/controllers/subsystem.dm index bf1333d28b4e..0204320f6415 100644 --- a/code/controllers/subsystem.dm +++ b/code/controllers/subsystem.dm @@ -421,7 +421,7 @@ state = SS_PAUSING /// Called after the config has been loaded or reloaded. -/datum/controller/subsystem/proc/OnConfigLoad() +/datum/controller/subsystem/proc/on_config_reload() return /** diff --git a/code/controllers/subsystem/assets.dm b/code/controllers/subsystem/assets.dm index eff83427a200..9f40129f4e55 100644 --- a/code/controllers/subsystem/assets.dm +++ b/code/controllers/subsystem/assets.dm @@ -191,7 +191,7 @@ SUBSYSTEM_DEF(assets) /datum/controller/subsystem/assets/proc/get_dynamic_item_url_by_name(name) return dynamic_asset_items_by_name[name]?.get_url() -/datum/controller/subsystem/assets/OnConfigLoad() +/datum/controller/subsystem/assets/on_config_reload() var/newtransporttype = /datum/asset_transport/browse_rsc switch (CONFIG_GET(string/asset_transport)) if ("webroot") diff --git a/code/controllers/subsystem/ipintel.dm b/code/controllers/subsystem/ipintel.dm index 170e520e4cfe..7306fc085ee3 100644 --- a/code/controllers/subsystem/ipintel.dm +++ b/code/controllers/subsystem/ipintel.dm @@ -20,7 +20,7 @@ SUBSYSTEM_DEF(ipintel) /// max retries var/max_retries = 1 -/datum/controller/subsystem/ipintel/OnConfigLoad() +/datum/controller/subsystem/ipintel/on_config_reload() . = ..() enabled = !!CONFIG_GET(flag/ipintel_enabled) consequetive_errors = 0 diff --git a/code/controllers/toml_config/toml_config_entry.dm b/code/controllers/toml_config/toml_config_entry.dm index ccc69481a281..40ce2c279005 100644 --- a/code/controllers/toml_config/toml_config_entry.dm +++ b/code/controllers/toml_config/toml_config_entry.dm @@ -39,4 +39,62 @@ /// * does not actually imply [vv_locked] and [vv_secret] var/sensitive = FALSE -#warn impl +/datum/toml_config_entry/vv_edit_var(var_name, var_value, mass_edit, raw_edit) + switch(var_name) + if(NAMEOF(src, default)) + return FALSE + if(NAMEOF(src, value)) + if(vv_locked) + return FALSE + if(NAMEOF(src, key)) + return FALSE + if(NAMEOF(src, category)) + return FALSE + if(NAMEOF(src, desc)) + return FALSE + if(NAMEOF(src, vv_locked)) + return FALSE + if(NAMEOF(src, vv_secret)) + return FALSE + if(NAMEOF(src, sensitive)) + return FALSE + return ..() + +/datum/toml_config_entry/vv_get_var(var_name, resolve) + switch(var_name) + if(NAMEOF(src, value)) + if(vv_locked) + return "-- secret --" + return ..() + +/datum/toml_config_entry/CanProcCall(procname) + switch(procname) + if(NAMEOF_PROC(src, reset)) + return FALSE + if(NAMEOF_PROC(src, apply)) + return FALSE + return ..() + +/** + * Called once when resetting. + */ +/datum/toml_config_entry/proc/reset() + if(isnum(default)) + value = default + else if(istext(default)) + value = default + else if(islist(default)) + value = deep_copy_list(default) + else + CRASH("unexpected value in default.") + +/** + * Called once with the value from each load. + * + * Can be used to overlay values. + * + * @params + * * raw_config_value - Raw parsed data. We own the reference to this once this proc is called. + */ +/datum/toml_config_entry/proc/apply(raw_config_value) + value = raw_config_value diff --git a/sql/README.md b/sql/README.md new file mode 100644 index 000000000000..193118249fc7 --- /dev/null +++ b/sql/README.md @@ -0,0 +1,38 @@ +# SQL + +The SQL database is managed by flyway. + +Migrations are in sql/migrations. + +## What happened to prefixes? + +Prefixes are no longer necessary. + +## How To Deal With Old Tables + +Data may **never** be destroyed by migrations, only deprecated. + +Prefix them with `legacy_`. + +## Table Groupings + +### Backend - `backend_` + +Backend store for server systems. Required for persistence to function and for metrics to be recorded. + +Contains: + +- Metrics +- Repositories +- Filestores + +### Character - `character_` + +Character store. Required for character persistence and fast character setup handling. + +Contains: + +- Characters +- Character records +- Character persistence +- Character things in general diff --git a/sql_old/database_schema_prefixed.sql b/sql/migrations/V0.0.1__PrepLegacyTables.sql similarity index 99% rename from sql_old/database_schema_prefixed.sql rename to sql/migrations/V0.0.1__PrepLegacyTables.sql index 0319f8832e91..11ba1fdfd5ef 100644 --- a/sql_old/database_schema_prefixed.sql +++ b/sql/migrations/V0.0.1__PrepLegacyTables.sql @@ -1,3 +1,5 @@ +-- DIRECT RIP FROM OLD PREFIXED TABLES FILE!! -- + /** * make sure to bump schema version and mark changes in database_changelog.md! * diff --git a/sql/migrations/V0.0.2__DeprefixTables.sql b/sql/migrations/V0.0.2__DeprefixTables.sql new file mode 100644 index 000000000000..5f8f6eaedecb --- /dev/null +++ b/sql/migrations/V0.0.2__DeprefixTables.sql @@ -0,0 +1,33 @@ +RENAME TABLE `rp_schema_revision` TO `schema_revision`; +RENAME TABLE `rp_persistence_bulk_entity` TO `persistence_bulk_entity`; +RENAME TABLE `rp_persistence_static_level_objects` TO `persistence_static_level_objects`; +RENAME TABLE `rp_persistence_static_map_objects` TO `persistence_static_map_objects`; +RENAME TABLE `rp_persistence_static_global_objects` TO `persistence_static_global_objects`; +RENAME TABLE `rp_persistence_dynamic_objects` TO `persistence_dynamic_objects`; +RENAME TABLE `rp_persistence_level_metadata` TO `persistence_level_metadata`; +RENAME TABLE `rp_persistence_string_kv` TO `persistence_string_kv`; +RENAME TABLE `rp_pictures` TO `pictures`; +RENAME TABLE `rp_photographs` TO `photographs`; +RENAME TABLE `rp_player_lookup` TO `player_lookup`; +RENAME TABLE `rp_player` TO `player`; +RENAME TABLE `rp_playtime` TO `playtime`; +RENAME TABLE `rp_playtime_log` TO `playtime_log`; +RENAME TABLE `rp_game_preferences` TO `game_preferences`; +RENAME TABLE `rp_ipintel` TO `ipintel`; +RENAME TABLE `rp_round` TO `round`; +RENAME TABLE `rp_connection_log` TO `connection_log`; +RENAME TABLE `rp_character` TO `character`; +RENAME TABLE `rp_admin` TO `admin`; +RENAME TABLE `rp_admin_log` TO `admin_log`; +RENAME TABLE `rp_ban` TO `ban`; +RENAME TABLE `rp_feedback` TO `feedback`; +RENAME TABLE `rp_poll_option` TO `poll_option`; +RENAME TABLE `rp_poll_question` TO `poll_question`; +RENAME TABLE `rp_poll_textreply` TO `poll_textreply`; +RENAME TABLE `rp_poll_vote` TO `poll_vote`; +RENAME TABLE `rp_privacy` TO `privacy`; +RENAME TABLE `rp_death` TO `death`; +RENAME TABLE `rp_karma` TO `karma`; +RENAME TABLE `rp_karmatotals` TO `karmatotals`; +RENAME TABLE `rp_library` TO `library`; +RENAME TABLE `rp_population` TO `population`; diff --git a/sql/migrations/V0.0.3__AddRepositoryStore.sql b/sql/migrations/V0.0.3__AddRepositoryStore.sql new file mode 100644 index 000000000000..57fcf52bfd96 --- /dev/null +++ b/sql/migrations/V0.0.3__AddRepositoryStore.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS `backend_repository` ( + `repository` VARCHAR(64) NOT NULL, + `id` VARCHAR(128) NOT NULL, + `version` INT(11) NOT NULL, + `data` MEDIUMTEXT NOT NULL, + `storedTime` DATETIME NOT NULL DEFAULT Now(), + `modifiedTime` DATETIME NOT NULL DEFAULT Now(), + PRIMARY KEY(`repository`, `id`), + INDEX(`repository`), + INDEX(`id`) +) diff --git a/sql/migrations/V0.0.4__RemakePlaytimeTriggerJustInCase.sql b/sql/migrations/V0.0.4__RemakePlaytimeTriggerJustInCase.sql new file mode 100644 index 000000000000..a4ad866da02b --- /dev/null +++ b/sql/migrations/V0.0.4__RemakePlaytimeTriggerJustInCase.sql @@ -0,0 +1,15 @@ +DELETE TRIGGER `playtimeTlogupdate`; +DELETE TRIGGER `playtimeTloginsert`; +DELETE TRIGGER `playtimeTlogdelete`; + +DELIMITER $$ +CREATE TRIGGER `playtimeTlogupdate` AFTER UPDATE ON `playtime` FOR EACH ROW BEGIN INSERT into `playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes-OLD.minutes); +END +$$ +CREATE TRIGGER `playtimeTloginsert` AFTER INSERT ON `playtime` FOR EACH ROW BEGIN INSERT into `playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes); +END +$$ +CREATE TRIGGER `playtimeTlogdelete` AFTER DELETE ON `playtime` FOR EACH ROW BEGIN INSERT into `playtime_log` (player, roleid, delta) VALUES (OLD.player, OLD.roleid, 0-OLD.minutes); +END +$$ +DELIMITER ; diff --git a/sql_old/database_schema.sql b/sql_old/database_schema.sql deleted file mode 100644 index 5a74011dbe77..000000000000 --- a/sql_old/database_schema.sql +++ /dev/null @@ -1,454 +0,0 @@ -/** - * make sure to bump schema version and mark changes in database_changelog.md! - * - * default prefix is rp_ - * find replace case sensitive %_PREFIX_% - * PRESERVE ANY vr_'s! We need to replace those tables and features at some point, that's how we konw. - **/ - --- core -- - --- --- Table structure for table `schema_revision` --- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%schema_revision` ( - `major` TINYINT(3) unsigned NOT NULL, - `minor` TINYINT(3) unsigned NOT NULL, - `date` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (`major`, `minor`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Backend Stores -- --- -- - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%backend_repository` ( - `repository` VARCHAR(64) NOT NULL, - `id` VARCHAR(128) NOT NULL, - `version` INT(11) NOT NULL, - `data` MEDIUMTEXT NOT NULL, - `storedTime` DATETIME NOT NULL DEFAULT Now(), - `modifiedTime` DATETIME NOT NULL DEFAULT Now(), - PRIMARY KEY(`repository`, `id`), - INDEX(`repository`), - INDEX(`id`) -) - --- persistence -- - --- SSpersistence modules/bulk_entity -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_bulk_entity` ( - `id` INT(24) NOT NULL AUTO_INCREMENT, - `generation` INT(11) NOT NULL, - `persistence_key` VARCHAR(64) NOT NULL, - `level_id` VARCHAR(64) NOT NULL, - `data` MEDIUMTEXT, - `round_id` INT(11) NOT NULL, - PRIMARY KEY (`id`), - INDEX(`level_id`, `generation`, `persistence_key`), - INDEX(`level_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- SSpersistence modules/level_objects -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_static_level_objects` ( - `generation` INT(11) NOT NULL, - `object_id` VARCHAR(64) NOT NULL, - `level_id` VARCHAR(64) NOT NULL, - `data` MEDIUMTEXT NOT NULL, - PRIMARY KEY(`generation`, `object_id`, `level_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- SSpersistence modules/level_objects -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_static_map_objects` ( - `generation` INT(11) NOT NULL, - `object_id` VARCHAR(64) NOT NULL, - `map_id` VARCHAR(64) NOT NULL, - `data` MEDIUMTEXT NOT NULL, - PRIMARY KEY(`generation`, `object_id`, `map_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- SSpersistence modules/level_objects -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_static_global_objects` ( - `generation` INT(11) NOT NULL, - `object_id` VARCHAR(64) NOT NULL, - `data` MEDIUMTEXT NOT NULL, - PRIMARY KEY(`generation`, `object_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- SSpersistence modules/level_objects -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_dynamic_objects` ( - `generation` INT(11) NOT NULL, - `object_id` INT(24) NOT NULL AUTO_INCREMENT, - `level_id` VARCHAR(64) NOT NULL, - `prototype_id` VARCHAR(256) NOT NULL, - `status` INT(24) NOT NULL DEFAULT 0, - `data` MEDIUMTEXT NOT NULL, - `x` INT(8) NOT NULL, - `y` INT(8) NoT NULL, - PRIMARY KEY(`object_id`, `generation`), - INDEX(`object_id`), - INDEX(`level_id`, `generation`), - INDEX(`prototype_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- SSpersistence modules/spatial_metadata -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_level_metadata` ( - `created` DATETIME NOT NULL DEFAULT Now(), - `saved` DATETIME NOT NULL, - `saved_round_id` INT(11) NOT NULL, - `level_id` VARCHAR(64) NOT NULL, - `data` MEDIUMTEXT NOT NULL, - `generation` INT(11) NOT NULL, - PRIMARY KEY(`level_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- SSpersistence modules/string_kv -CREATE TABLE IF NOT EXISTS `%_PREFIX_%persistence_string_kv` ( - `created` DATETIME NOT NULL DEFAULT Now(), - `modified` DATETIME NOT NULL, - `key` VARCHAR(64) NOT NULL, - `value` MEDIUMTEXT NULL, - `group` VARCHAR(64) NOT NULL, - `revision` INT(11) NOT NULL, - PRIMARY KEY(`key`, `group`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- photography -- - --- picture table -- --- used to store data about pictures -- --- hash is in sha1 format. -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%pictures` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `hash` char(40) NOT NULL, - `created` datetime NOT NULL DEFAULT Now(), - `width` int NOT NULL, - `height` int NOT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `hash` (`hash`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- photograph table -- --- used to store data about photographs -- --- picture is picture hash in picture table -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%photographs` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `picture` char(40) NOT NULL, - `created` datetime NOT NULL DEFAULT Now(), - `scene` MEDIUMTEXT null, - `desc` MEDIUMTEXT null, - CONSTRAINT `linked_picture` FOREIGN KEY (`picture`) - REFERENCES `%_PREFIX_%pictures` (`hash`) - ON DELETE CASCADE - ON UPDATE CASCADE, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Players -- - --- Player lookup table -- --- Used to look up player ID from ckey, as well as -- --- store last computerid/ip for a ckey. -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%player_lookup` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `ckey` varchar(32) NOT NULL, - `firstseen` datetime NOT NULL, - `lastseen` datetime NOT NULL, - `ip` varchar(18) NOT NULL, - `computerid` varchar(32) NOT NULL, - `lastadminrank` varchar(32) NOT NULL DEFAULT 'Player', - `playerid` int(11), - PRIMARY KEY (`id`), - UNIQUE KEY `ckey` (`ckey`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Primary player table -- --- Allows for one-to-many player-ckey association. -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%player` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `flags` int(24) NOT NULL DEFAULT 0, - `firstseen` datetime NOT NULL DEFAULT Now(), - `lastseen` datetime NOT NULL, - `misc` MEDIUMTEXT NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Playtime / JEXP -- - --- Role Time Table - Master -- --- Stores total role time. -- - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%playtime` ( - `player` INT(11) NOT NULL, - `roleid` VARCHAR(64) NOT NULL, - `minutes` INT UNSIGNED NOT NULL, - PRIMARY KEY(`player`, `roleid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Role Time - Logging -- --- Stores changes in role time -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%playtime_log` ( - `player` INT(11), - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `roleid` VARCHAR(64) NOT NULL, - `delta` INT(11) NOT NULL, - `datetime` TIMESTAMP NOT NULL DEFAULT NOW() ON UPDATE NOW(), - PRIMARY KEY (`id`), - KEY `player` (`player`), - KEY `roleid` (`roleid`), - KEY `datetime` (`datetime`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -DELIMITER $$ -CREATE TRIGGER `playtimeTlogupdate` AFTER UPDATE ON `%_PREFIX_%playtime` FOR EACH ROW BEGIN INSERT into `%_PREFIX_%playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes-OLD.minutes); -END -$$ -CREATE TRIGGER `playtimeTloginsert` AFTER INSERT ON `%_PREFIX_%playtime` FOR EACH ROW BEGIN INSERT into `%_PREFIX_%playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes); -END -$$ -CREATE TRIGGER `playtimeTlogdelete` AFTER DELETE ON `%_PREFIX_%playtime` FOR EACH ROW BEGIN INSERT into `%_PREFIX_%playtime_log` (player, roleid, delta) VALUES (OLD.player, OLD.roleid, 0-OLD.minutes); -END -$$ -DELIMITER ; - - --- Preferences -- - --- Stores game preferences -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%game_preferences` ( - `player` INT(11) NOT NULL, - `entries` MEDIUMTEXT NOT NULL, - `misc` MEDIUMTEXT NOT NULL, - `keybinds` MEDIUMTEXT NOT NULL, - `toggles` MEDIUMTEXT NOT NULL, - `modified` DATETIME NOT NULL, - `version` INT(11) NOT NULL, - PRIMARY KEY (`player`), - CONSTRAINT `linked_player` FOREIGN KEY (`player`) - REFERENCES `%_PREFIX_%player` (`id`) - ON DELETE CASCADE - ON UPDATE CASCADE -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Security - Ipintel -- - --- Ipintel Cache Table -- --- Stores cache entries for IPIntel -- --- IP is in INET_ATON. -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%ipintel` ( - `ip` INT(10) unsigned NOT NULL, - `date` TIMESTAMP NOT NULL DEFAULT NOW() ON UPDATE NOW(), - `intel` double NOT NULL DEFAULT '0', - PRIMARY KEY (`ip`), - KEY `idx_ipintel` (`ip`, `intel`, `date`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- --- Table structure for table `round` --- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%round` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `initialize_datetime` DATETIME NOT NULL, - `start_datetime` DATETIME NULL, - `shutdown_datetime` DATETIME NULL, - `end_datetime` DATETIME NULL, - `server_ip` INT(10) UNSIGNED NOT NULL, - `server_port` SMALLINT(5) UNSIGNED NOT NULL, - `commit_hash` CHAR(40) NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- Connection log -- --- Logs all connections to the server. -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%connection_log` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `datetime` datetime NOT NULL, - `serverip` varchar(45) NOT NULL, - `ckey` varchar(32) NOT NULL, - `ip` varchar(45) NOT NULL, - `computerid` varchar(32) NOT NULL, - PRIMARY KEY(`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- /datum/character - Character Table -- -CREATE TABLE IF NOT EXISTS `%_PREFIX_%character` ( - `id` INT(11) NOT NULL AUTO_INCREMENT, - `created` DATETIME NOT NULL DEFAULT Now(), - `last_played` DATETIME NULL, - `last_persisted` DATETIME NULL, - `playerid` INT(11) NOT NULL, - `canonical_name` VARCHAR(128) NOT NULL, - `persist_data` MEDIUMTEXT NULL, - `character_type` VARCHAR(64) NOT NULL, - PRIMARY KEY(`id`), - CONSTRAINT `character_has_player` FOREIGN KEY (`playerid`) - REFERENCES `%_PREFIX_%player` (`id`) - ON DELETE CASCADE - ON UPDATE CASCADE, - UNIQUE (`playerid`, `canonical_name`, `character_type`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%admin` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `ckey` varchar(32) NOT NULL, - `rank` varchar(32) NOT NULL DEFAULT 'Administrator', - `level` int(2) NOT NULL DEFAULT '0', - `flags` int(16) NOT NULL DEFAULT '0', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%admin_log` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `datetime` datetime NOT NULL, - `adminckey` varchar(32) NOT NULL, - `adminip` varchar(18) NOT NULL, - `log` text NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%ban` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `bantime` datetime NOT NULL, - `serverip` varchar(32) NOT NULL, - `bantype` varchar(32) NOT NULL, - `reason` text NOT NULL, - `job` varchar(32) DEFAULT NULL, - `duration` int(11) NOT NULL, - `rounds` int(11) DEFAULT NULL, - `expiration_time` datetime NOT NULL, - `ckey` varchar(32) NOT NULL, - `computerid` varchar(32) NOT NULL, - `ip` varchar(32) NOT NULL, - `a_ckey` varchar(32) NOT NULL, - `a_computerid` varchar(32) NOT NULL, - `a_ip` varchar(32) NOT NULL, - `who` text NOT NULL, - `adminwho` text NOT NULL, - `edits` text, - `unbanned` tinyint(1) DEFAULT NULL, - `unbanned_datetime` datetime DEFAULT NULL, - `unbanned_ckey` varchar(32) DEFAULT NULL, - `unbanned_computerid` varchar(32) DEFAULT NULL, - `unbanned_ip` varchar(32) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%feedback` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `time` datetime NOT NULL, - `round_id` int(8) NOT NULL, - `var_name` varchar(32) NOT NULL, - `var_value` int(16) DEFAULT NULL, - `details` text, - PRIMARY KEY (`id`) -) ENGINE=MyISAM DEFAULT CHARSET=latin1 ; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%poll_option` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `pollid` int(11) NOT NULL, - `text` varchar(255) NOT NULL, - `percentagecalc` tinyint(1) NOT NULL DEFAULT '1', - `minval` int(3) DEFAULT NULL, - `maxval` int(3) DEFAULT NULL, - `descmin` varchar(32) DEFAULT NULL, - `descmid` varchar(32) DEFAULT NULL, - `descmax` varchar(32) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%poll_question` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `polltype` varchar(16) NOT NULL DEFAULT 'OPTION', - `starttime` datetime NOT NULL, - `endtime` datetime NOT NULL, - `question` varchar(255) NOT NULL, - `adminonly` tinyint(1) DEFAULT '0', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%poll_textreply` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `datetime` datetime NOT NULL, - `pollid` int(11) NOT NULL, - `ckey` varchar(32) NOT NULL, - `ip` varchar(18) NOT NULL, - `replytext` text NOT NULL, - `adminrank` varchar(32) NOT NULL DEFAULT 'Player', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%poll_vote` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `datetime` datetime NOT NULL, - `pollid` int(11) NOT NULL, - `optionid` int(11) NOT NULL, - `ckey` varchar(255) NOT NULL, - `ip` varchar(16) NOT NULL, - `adminrank` varchar(32) NOT NULL, - `rating` int(2) DEFAULT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%privacy` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `datetime` datetime NOT NULL, - `ckey` varchar(32) NOT NULL, - `option` varchar(128) NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%death` ( - `id` INT(11) NOT NULL AUTO_INCREMENT , - `pod` TEXT NOT NULL COMMENT 'Place of death' , - `coord` TEXT NOT NULL COMMENT 'X, Y, Z POD' , - `tod` DATETIME NOT NULL COMMENT 'Time of death' , - `job` TEXT NOT NULL , - `special` TEXT NOT NULL , - `name` TEXT NOT NULL , - `byondkey` TEXT NOT NULL , - `laname` TEXT NOT NULL COMMENT 'Last attacker name' , - `lakey` TEXT NOT NULL COMMENT 'Last attacker key' , - `gender` TEXT NOT NULL , - `bruteloss` INT(11) NOT NULL , - `brainloss` INT(11) NOT NULL , - `fireloss` INT(11) NOT NULL , - `oxyloss` INT(11) NOT NULL , - PRIMARY KEY (`id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%karma` ( - `id` INT(11) NOT NULL AUTO_INCREMENT , - `spendername` TEXT NOT NULL , - `spenderkey` TEXT NOT NULL , - `receivername` TEXT NOT NULL , - `receiverkey` TEXT NOT NULL , - `receiverrole` TEXT NOT NULL , - `receiverspecial` TEXT NOT NULL , - `isnegative` TINYINT(1) NOT NULL , - `spenderip` TEXT NOT NULL , - `time` DATETIME NOT NULL , - PRIMARY KEY (`id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%karmatotals` ( - `id` INT(11) NOT NULL AUTO_INCREMENT , - `byondkey` TEXT NOT NULL , - `karma` INT(11) NOT NULL , - PRIMARY KEY (`id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%library` ( - `id` INT(11) NOT NULL AUTO_INCREMENT , - `author` TEXT NOT NULL , - `title` TEXT NOT NULL , - `content` TEXT NOT NULL , - `category` TEXT NOT NULL , - PRIMARY KEY (`id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - -CREATE TABLE IF NOT EXISTS `%_PREFIX_%population` ( - `id` INT(11) NOT NULL AUTO_INCREMENT , - `playercount` INT(11) NULL DEFAULT NULL , - `admincount` INT(11) NULL DEFAULT NULL , - `time` DATETIME NOT NULL , - PRIMARY KEY (`id`) - ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; From b360e089911b1ade5a681f83287b526e889989fc Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Fri, 6 Dec 2024 03:01:13 -0500 Subject: [PATCH 05/29] i'm definiteiyl the bad guy here --- code/controllers/repository/designs.dm | 2 - code/controllers/repository/guidebook.dm | 1 + dependencies.sh | 3 + tools/setup_dev_db/README.md | 19 +++++ tools/setup_dev_db/invoke.py | 101 +++++++++++++++++++++++ tools/setup_dev_db/setup.ps1 | 43 ++++++++++ 6 files changed, 167 insertions(+), 2 deletions(-) create mode 100644 tools/setup_dev_db/README.md create mode 100644 tools/setup_dev_db/invoke.py create mode 100644 tools/setup_dev_db/setup.ps1 diff --git a/code/controllers/repository/designs.dm b/code/controllers/repository/designs.dm index 9d7fca6884f0..2667472b765f 100644 --- a/code/controllers/repository/designs.dm +++ b/code/controllers/repository/designs.dm @@ -33,5 +33,3 @@ REPOSITORY_DEF(designs) autolathe_design_ids -= instance.id if(istype(instance, /datum/prototype/design/medical)) medical_mini_design_ids -= instance.id - -#warn persistence-ify designs diff --git a/code/controllers/repository/guidebook.dm b/code/controllers/repository/guidebook.dm index f5ea86eb1870..33c878a9fb77 100644 --- a/code/controllers/repository/guidebook.dm +++ b/code/controllers/repository/guidebook.dm @@ -1,6 +1,7 @@ //* This file is explicitly licensed under the MIT license. *// //* Copyright (c) 2023 Citadel Station developers. *// +// todo: this shouldn't be a repository, it can just be a controller REPOSITORY_DEF(guidebook) name = "Repository - Guidebook" expected_type = /datum/prototype/guidebook_section diff --git a/dependencies.sh b/dependencies.sh index d8af558c6f38..aae7799e72e2 100755 --- a/dependencies.sh +++ b/dependencies.sh @@ -23,3 +23,6 @@ export SPACEMAN_DMM_VERSION=suite-1.8 # Python version for mapmerge and other tools export PYTHON_VERSION=3.9.0 + +# MariaDB version +export MARIADB_VERSION=11.4.4 diff --git a/tools/setup_dev_db/README.md b/tools/setup_dev_db/README.md new file mode 100644 index 000000000000..7d79145ae101 --- /dev/null +++ b/tools/setup_dev_db/README.md @@ -0,0 +1,19 @@ +# Dev Database Setup Tool + +Requires Python 3.12+ + +Sets up a database for use in development, automatically downloading a portable version of MariaDB and Flyway as needed. + +Only works on windows right now, if you're on linux you should know how to set up a database. + +## WARNING + +**Do not, under any circumstances, use or attempt to modify this tool to run in production.** + +If you do, and you get breached / bad things happen, I will not provide any help or condolences. This is purely a dev tool. **It is the responsibility of the server owner to set up a proper production database and to maintain it.** + +## License + +This entire folder is under the MIT license. + +The bootstrap system used to run Python, however, is under AGPL, as it's from /tg/. diff --git a/tools/setup_dev_db/invoke.py b/tools/setup_dev_db/invoke.py new file mode 100644 index 000000000000..d265dcf94a35 --- /dev/null +++ b/tools/setup_dev_db/invoke.py @@ -0,0 +1,101 @@ +import argparse; +import subprocess; +import signal; +import sys; +import time; + +def log_message(source: str, string: str): + print('%s: %s' % (source, string)) + +if __name__ == "main": + argparser = argparse.ArgumentParser( + prog="invoke.py", + usage="invoke.py --daemon [path-to-mysqld] --flyway [path-to-flyway] --migrations [path-to-migrations-folder]", + ) + argparser.add_argument("--daemon", type=str) + argparser.add_argument("--flyway", type=str) + argparser.add_argument("--migrations", type=str) + argparser.add_argument("--port", required=False, default=3306, type=int) + argparser.add_argument("--dbname", required=False, default="ss13", type=str) + + parsed_args = argparser.parse_args(sys.argv) + + PATH_TO_MYSQLD: str = parsed_args.daemon + PATH_TO_FLYWAY: str = parsed_args.flyway + PATH_TO_MIGRATIONS: str = parsed_args.migrations + USE_PORT: int = parsed_args.port + USE_DATABASE: str = parsed_args.dbname + + log_message("setup_dev_db", "WARNING: This is a very, very lazy Python app! Logs are not necessarily in order of occurence; the script is just a very, very dumb while(True) loop that is just jank enough to work. Do not use this for production") + log_message("setup_dev_db", "Starting processes...") + + mysqld: subprocess.Popen | None = subprocess.Popen( + [], + executable="", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + flyway: subprocess.Popen | None = subprocess.Popen( + [], + executable="", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + # main loop + keep_running: bool = True + while(keep_running): + # pull outputs + polled: list[str] + exited: int | None + + if mysqld != None: + polled = mysqld.stdout.readlines() + for string in polled: + log_message("mysqld-out", string) + + polled = mysqld.stderr.readlines() + for string in polled: + log_message("mysqld-err", string) + + exited = mysqld.poll() + if exited != None: + mysqld = None + log_message("setup_dev_db", 'mysqld exited with code %d' % exited) + + if flyway != None: + polled = flyway.stdout.readlines() + for string in polled: + log_message("flyway-out", string) + + polled = flyway.stderr.readlines() + for string in polled: + log_message("flyway-err", string) + + exited = flyway.poll() + if exited != None: + flyway = None + log_message("setup_dev_db", 'flyway exited with code %d' % exited) + + # "this is async right" + # "yeah" + # pulls the cover off + # "what the hell, this is just an infinite loop!" + time.sleep(0.001) + + # exit mysqld and flyway + + if mysqld != None: + mysqld.send_signal(sig=signal.CTRL_C_EVENT) + + if flyway != None: + flyway.send_signal(sig=signal.CTRL_C_EVENT) + + # block on mysqld/flyway exiting + mysqld_exitcode: int | None = mysqld.wait() + log_message("setup_dev_db", 'mysqld exited with code %d' % exited) + + flyway_exitcode: int | None = flyway.wait() + log_message("setup_dev_db", 'flyway exited with code %d' % exited) + diff --git a/tools/setup_dev_db/setup.ps1 b/tools/setup_dev_db/setup.ps1 new file mode 100644 index 000000000000..59ef839f55db --- /dev/null +++ b/tools/setup_dev_db/setup.ps1 @@ -0,0 +1,43 @@ +function ExtractVersion { + param([string] $Path, [string] $Key) + foreach ($Line in Get-Content $Path) { + if ($Line.StartsWith("export $Key=")) { + return $Line.Substring("export $Key=".Length) + } + } + throw "Couldn't find value for $Key in $Path" +} + +function ResolveMariadbURL { + param([string] $Version) + return "https://mirror.rackspace.com/mariadb//mariadb-$Version/winx64-packages/mariadb-$Version-winx64.zip" +} + +function ResolveFlywayURL { + param([string] $Version) + return "https://download.red-gate.com/maven/release/com/redgate/flyway/flyway-commandline/$Version/flyway-commandline-$Version-windows-x64.zip" +} + +$ToolRoot = Split-Path $script:MyInvocation.MyCommand.Path + +$MARIADB_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key "MARIADB_VERSION" +$FLYWAY_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key "FLYWAY_VERSION" + +$MYSQLD_PATH = "$ToolRoot/" +$FLYWAY_PATH = "" + +# GET mariadb IF NOT EXISTS + +if(!(Test-Path $MYSQLD_PATH -PathType Leaf)) { + +} + +# GET flyway IF NOT EXISTS + +if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { + +} + +# run database + +& $ToolRoot/../bootstrap/python._ps1 $ToolRoot/invoke.py $args From c0265d4492ac36a552e448a264cf991316a2bba6 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sat, 7 Dec 2024 13:58:00 -0500 Subject: [PATCH 06/29] That --- README.md | 1 + citadel.dme | 2 +- code/controllers/README.md | 13 ++ .../configuration/configuration.dm | 13 -- code/controllers/controller.dm | 3 + code/controllers/master.dm | 3 + code/controllers/repository.dm | 4 +- code/controllers/subsystem/statpanel.dm | 4 + .../toml_config/entries/backend.repository.dm | 2 - code/controllers/toml_config/toml_config.dm | 51 ------ .../toml_config/toml_config_entry.dm | 13 +- .../toml_config/toml_configuration.dm | 151 ++++++++++++++++++ code/datums/datumvars.dm | 10 ++ code/game/turfs/turf.dm | 5 + .../admin/view_variables/admin_delete.dm | 4 +- 15 files changed, 205 insertions(+), 74 deletions(-) create mode 100644 code/controllers/README.md delete mode 100644 code/controllers/toml_config/toml_config.dm create mode 100644 code/controllers/toml_config/toml_configuration.dm diff --git a/README.md b/README.md index 09b0e395273f..a957b846c105 100644 --- a/README.md +++ b/README.md @@ -66,6 +66,7 @@ These are also the folders you are likely going to encounter while managing the - /players: player data, like saves and characters get dumped in here - /tmp: server scratch space - /assets - for asset generation + - /config - used as scratch space for config You only need to make the top level folders (e.g. config, data) static folders in TGS4. diff --git a/citadel.dme b/citadel.dme index b1c5bc8633d6..08f8aa6128bf 100644 --- a/citadel.dme +++ b/citadel.dme @@ -685,8 +685,8 @@ #include "code\controllers\subsystem\sound\_sound.dm" #include "code\controllers\subsystem\sound\channel_manager.dm" #include "code\controllers\subsystem\sound\soundbyte_manager.dm" -#include "code\controllers\toml_config\toml_config.dm" #include "code\controllers\toml_config\toml_config_entry.dm" +#include "code\controllers\toml_config\toml_configuration.dm" #include "code\controllers\toml_config\entries\backend.dm" #include "code\controllers\toml_config\entries\backend.repository.dm" #include "code\datums\ability.dm" diff --git a/code/controllers/README.md b/code/controllers/README.md new file mode 100644 index 000000000000..1364ca595eea --- /dev/null +++ b/code/controllers/README.md @@ -0,0 +1,13 @@ +# Controllers + +Backend controllers orchestrating the game. + +## Globals + +Many, but not all, controllers are accessible from anywhere in the code with standardized names. + +- Master: As the name implies, the Master Controller performs init, shutdown, and acts as a process scheduler during a round. +- Failsafe: A controller that ensures the Master Controller is running properly. +- Configuration: A global datum that holds server configuration. +- RSname: Repository controllers storing /datum/prototype's that can be queried. +- SSname: Subsystems that handle init behavior, ticking, and other game functions.. diff --git a/code/controllers/configuration/configuration.dm b/code/controllers/configuration/configuration.dm index e8f712a9c106..b30a5bff3855 100644 --- a/code/controllers/configuration/configuration.dm +++ b/code/controllers/configuration/configuration.dm @@ -8,19 +8,6 @@ var/list/entries var/list/entries_by_type - // var/list/maplist - // var/datum/map_config/defaultmap - - /* - var/list/modes // allowed modes - var/list/gamemode_cache - var/list/votable_modes // votable modes - var/list/storyteller_cache - var/list/mode_names - var/list/mode_reports - var/list/mode_false_report_weight - */ - var/motd /// If the configuration is loaded diff --git a/code/controllers/controller.dm b/code/controllers/controller.dm index 93d5897ff453..252845318f52 100644 --- a/code/controllers/controller.dm +++ b/code/controllers/controller.dm @@ -15,6 +15,9 @@ // todo: kil var/verbose_logging = FALSE +/datum/controller/vv_delete() + return FALSE + /** * Called to initialize a controller. * diff --git a/code/controllers/master.dm b/code/controllers/master.dm index 35cf731a0f34..04a2e00fe05d 100644 --- a/code/controllers/master.dm +++ b/code/controllers/master.dm @@ -117,6 +117,9 @@ GLOBAL_REAL(Master, /datum/controller/master) = new load_configuration() if(!config) config = new + if(!Configuration) + Configuration = new + Configuration.Initialize() //# 2. set up random seed if(!random_seed) diff --git a/code/controllers/repository.dm b/code/controllers/repository.dm index 06fb2d64c087..822559c6f665 100644 --- a/code/controllers/repository.dm +++ b/code/controllers/repository.dm @@ -254,7 +254,7 @@ PROTECTED_PROC(TRUE) /datum/controller/repository/proc/handle_db_store(datum/prototype/instance) - if(!global.toml_config.get_entry(/datum/toml_config_entry/backend/repository/persistence)) + if(!Configuration.get_entry(/datum/toml_config_entry/backend/repository/persistence)) return doesnt_exist_cache -= instance.id @@ -264,7 +264,7 @@ var/const/doesnt_exist_cache_trim_at = 1000 var/const/doesnt_exist_cache_trim_to = 500 - if(!global.toml_config.get_entry(/datum/toml_config_entry/backend/repository/persistence)) + if(!Configuration.get_entry(/datum/toml_config_entry/backend/repository/persistence)) doesnt_exist_cache[instance_id] = TRUE if(length(doesnt_exist_cache) > doesnt_exist_cache_trim_at) doesnt_exist_cache.len = doesnt_exist_cache_trim_to diff --git a/code/controllers/subsystem/statpanel.dm b/code/controllers/subsystem/statpanel.dm index 44f7944a2b98..5a204c1320d3 100644 --- a/code/controllers/subsystem/statpanel.dm +++ b/code/controllers/subsystem/statpanel.dm @@ -89,6 +89,10 @@ SUBSYSTEM_DEF(statpanels) STATPANEL_DATA_CLICK(config.stat_key(), config.stat_entry(), "\ref[config]") else STATPANEL_DATA_LINE("FATAL - NO CONFIG") + if(config) + STATPANEL_DATA_CLICK(Configuration.stat_key(), Configuration.stat_entry(), "\ref[Configuration]") + else + STATPANEL_DATA_LINE("FATAL - NO CONFIG (NEW)") STATPANEL_DATA_ENTRY("BYOND:", "(FPS:[world.fps]) (TickCount:[world.time/world.tick_lag]) (TickDrift:[round(Master.tickdrift,1)]([round((Master.tickdrift/(world.time/world.tick_lag))*100,0.1)]%)) (Internal Tick Usage: [round(MAPTICK_LAST_INTERNAL_TICK_USAGE,0.1)]%)") if(Master) STATPANEL_DATA_CLICK(Master.stat_key(), Master.stat_entry(), "\ref[Master]") diff --git a/code/controllers/toml_config/entries/backend.repository.dm b/code/controllers/toml_config/entries/backend.repository.dm index 8a3ddc733af4..9c812b38a105 100644 --- a/code/controllers/toml_config/entries/backend.repository.dm +++ b/code/controllers/toml_config/entries/backend.repository.dm @@ -12,5 +12,3 @@ features will not function. "} default = TRUE - -#warn impl diff --git a/code/controllers/toml_config/toml_config.dm b/code/controllers/toml_config/toml_config.dm deleted file mode 100644 index 9c9fbf026c86..000000000000 --- a/code/controllers/toml_config/toml_config.dm +++ /dev/null @@ -1,51 +0,0 @@ -//* This file is explicitly licensed under the MIT license. *// -//* Copyright (c) 2024 Citadel Station Developers *// - -GLOBAL_DATUM(toml_config, /datum/toml_config) - -/datum/toml_config - /// Entries by type. - VAR_PRIVATE/list/datum/toml_config_entry/keyed_entries -/** - * HEY! LISTEN! By calling this proc you are affirming that: - * - * * The entry type you are passing in is static and not a variable that can be tampered with. - * * The value you get will be immediately consumed in a non-VV-able manner. - */ -/datum/toml_config/proc/get_sensitive_entry(datum/toml_config_entry/entry_type) - -/** - * HEY! LISTEN! By calling this proc you are affirming that: - * - * * The entry type you are passing in is static and not a variable that can be tampered with. - * * The value you are passing in is trusted and validated and not a variable that can be tampered with. - */ -/datum/toml_config/proc/set_sensitive_entry(datum/toml_config_entry/entry_type, value) - -/datum/toml_config/proc/get_entry(datum/toml_config_entry/entry_type) - -/datum/toml_config/proc/set_entry(datum/toml_config_entry/entry_type, value) - -/** - * Automatically loads default config, and the server's config file. - */ -/datum/toml_config/proc/reload() - reset() - load("config.default/config.toml") - load("config/config.toml") - -/** - * Resets the configuration. - */ -/datum/toml_config/proc/reset() - -/** - * Loads from a given layer. - * * This will not reset the configuration. Repeated calls to load will allow for layered configuration. - * - * HEY! LISTEN! By calling this proc you are affirming that: - * * The file you are passing in is trusted and not a variable that can be tampered with via VV. - */ -/datum/toml_config/proc/load(filelike) - -#warn impl diff --git a/code/controllers/toml_config/toml_config_entry.dm b/code/controllers/toml_config/toml_config_entry.dm index 40ce2c279005..88eb4e6039a5 100644 --- a/code/controllers/toml_config/toml_config_entry.dm +++ b/code/controllers/toml_config/toml_config_entry.dm @@ -30,13 +30,15 @@ var/value /// vv edit disallowed + /// * Does not stop get_entry and set_entry from setting our value. Those are not considered vv-protected. var/vv_locked = FALSE /// vv read disallowed - /// * does not automatically imply [vv_locked] + /// * Does not automatically imply [vv_locked]. + /// * Does not stop get_entry and set_entry from pulling our value. Those are not considered vv-protected. var/vv_secret = FALSE /// sensitive - /// * requires get_sensitive_entry() and set_sensitive_entry() to read/write - /// * does not actually imply [vv_locked] and [vv_secret] + /// * Requires get_sensitive_entry() and set_sensitive_entry() to read/write. + /// * Does not actually imply [vv_locked] and [vv_secret]. var/sensitive = FALSE /datum/toml_config_entry/vv_edit_var(var_name, var_value, mass_edit, raw_edit) @@ -69,12 +71,17 @@ /datum/toml_config_entry/CanProcCall(procname) switch(procname) + if(NAMEOF_PROC(src, New), NAMEOF_PROC(src, Destroy)) + return FALSE if(NAMEOF_PROC(src, reset)) return FALSE if(NAMEOF_PROC(src, apply)) return FALSE return ..() +/datum/toml_config_entry/vv_delete() + return FALSE + /** * Called once when resetting. */ diff --git a/code/controllers/toml_config/toml_configuration.dm b/code/controllers/toml_config/toml_configuration.dm new file mode 100644 index 000000000000..571ff972f56a --- /dev/null +++ b/code/controllers/toml_config/toml_configuration.dm @@ -0,0 +1,151 @@ +//* This file is explicitly licensed under the MIT license. *// +//* Copyright (c) 2024 Citadel Station Developers *// + +GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) + +// todo: /datum/controller/config +/datum/controller/toml_configuration + /// Entries by type. + VAR_PRIVATE/list/datum/toml_config_entry/typed_entries + /// Entries as same structure as the underlying toml/json + VAR_PRIVATE/list/datum/toml_config_entry/keyed_entries + +/datum/controller/toml_configuration/CanProcCall(procname) + switch(procname) + if(NAMEOF_PROC(src, New), NAMEOF_PROC(src, Destroy), NAMEOF_PROC(src, Initialize)) + return FALSE + if(NAMEOF_PROC(src, get_entry), NAMEOF_PROC(src, set_entry)) + return FALSE + if(NAMEOF_PROC(src, get_sensitive_entry), NAMEOF_PROC(src, set_sensitive_entry)) + return FALSE + if(NAMEOF_PROC(src, reload), NAMEOF_PROC(src, reset), NAMEOF_PROC(src, load)) + return FALSE + return ..() + +/datum/controller/toml_configuration/vv_edit_var(var_name, var_value, mass_edit, raw_edit) + switch(var_name) + if(NAMEOF(src, keyed_entries)) + return FALSE + return ..() + +/datum/controller/toml_configuration/vv_get_var(var_name, resolve) + switch(var_name) + if(NAMEOF(src, keyed_entries)) + return debug_variable(NAMEOF(src, keyed_entries), deep_copy_list(keyed_entries), 0, src) + if(NAMEOF(src, typed_entries)) + return debug_variable(NAMEOF(src, typed_entries), typed_entries.Copy(), 0, src) + return ..() + +/datum/controller/toml_configuration/New() + if(Configuration != src) + if(Configuration) + qdel(Configuration) + Configuration = src + +/datum/controller/toml_configuration/Initialize() + keyed_entries = list() + typed_entries = list() + for(var/datum/toml_config_entry/path as anything in typesof(/datum/toml_config_entry)) + if(initial(path.abstract_type) == path) + continue + var/datum/toml_config_entry/entry = new path + typed_entries[entry.type] = entry + var/list/nesting = splittext(entry.key, ".") + var/list/current_list = keyed_entries + for(var/i in 1 to length(nesting) - 1) + LAZYINITLIST(current_list[nesting[i]]) + current_list = nesting[i] + current_list[nesting[length(nesting)]] = entry + reload() + +/datum/controller/toml_configuration/stat_key() + return "Configuration (New):" + +/datum/controller/toml_configuration/stat_entry() + return "Edit" + +/** + * HEY! LISTEN! By calling this proc you are affirming that: + * + * * The entry type you are passing in is static and not a variable that can be tampered with. + * * The value you get will be immediately consumed in a non-VV-able manner. + */ +/datum/controller/toml_configuration/proc/get_sensitive_entry(datum/toml_config_entry/entry_type) + // todo: cache / optimize + var/datum/toml_config_entry/entry = typed_entries[entry_type] + if(!entry) + return + if(!entry.sensitive) + CRASH("attempted to get sensitive entry with sensitive get entry.") + return entry.value + +/** + * HEY! LISTEN! By calling this proc you are affirming that: + * + * * The entry type you are passing in is static and not a variable that can be tampered with. + * * The value you are passing in is trusted and validated and not a variable that can be tampered with. + */ +/datum/controller/toml_configuration/proc/set_sensitive_entry(datum/toml_config_entry/entry_type, value) + // todo: cache / optimize + var/datum/toml_config_entry/entry = typed_entries[entry_type] + if(!entry) + return + if(entry.sensitive) + CRASH("attempted to set non-sensitive entry with sensitive set entry.") + entry.value = value + +/datum/controller/toml_configuration/proc/get_entry(datum/toml_config_entry/entry_type) + // todo: cache / optimize + var/datum/toml_config_entry/entry = typed_entries[entry_type] + if(!entry) + return + if(entry.sensitive) + CRASH("attempted to get sensitive entry with normal get entry.") + return entry.value + +/datum/controller/toml_configuration/proc/set_entry(datum/toml_config_entry/entry_type, value) + // todo: cache / optimize + var/datum/toml_config_entry/entry = typed_entries[entry_type] + if(!entry) + return + if(entry.sensitive) + CRASH("attempted to set sensitive entry with normal set entry.") + entry.value = value + +/** + * Automatically loads default config, and the server's config file. + * + * todo: allow for overriding directories + */ +/datum/controller/toml_configuration/proc/reload() + reset() + load("config.default/config.toml") + load("config/config.toml") + +/** + * Resets the configuration. + */ +/datum/controller/toml_configuration/proc/reset() + for(var/path in typed_entries) + var/datum/toml_config_entry/entry = typed_entries[path] + entry.reset() + +/** + * Loads from a given layer. + * * This will not reset the configuration. Repeated calls to load will allow for layered configuration. + * + * HEY! LISTEN! By calling this proc you are affirming that: + * * The file you are passing in is trusted and not a variable that can be tampered with via VV. + */ +/datum/controller/toml_configuration/proc/load(filelike) + var/list/decoded + if(istext(filelike)) + decoded = rustg_read_toml_file(filelike) + else if(isfile(filelike)) + // noa path, it might be rsc cache; rust_g can't read that directly. + fdel("tmp/config/loading.toml") + fcopy(filelike, "tmp/config/loading.toml") + decoded = rustg_read_toml_file("tmp/config/loading.toml") + fdel("tmp/config/loading.toml") + if(!decoded) + CRASH("failed to decode config [filelike]!") diff --git a/code/datums/datumvars.dm b/code/datums/datumvars.dm index c93c2373e7b9..5f0f3d18ef90 100644 --- a/code/datums/datumvars.dm +++ b/code/datums/datumvars.dm @@ -1,3 +1,13 @@ +/** + * Called when an admin attempts to delete us with introspection tools. + */ +/datum/proc/vv_delete() + . = TRUE + // incase qdel returns QDEL_HINT_HARDDEL_NOW + var/datum/deleting = src + src = null + qdel(deleting) + /datum/proc/CanProcCall(procname) return TRUE diff --git a/code/game/turfs/turf.dm b/code/game/turfs/turf.dm index 456b459d9484..9116c4e498cb 100644 --- a/code/game/turfs/turf.dm +++ b/code/game/turfs/turf.dm @@ -676,3 +676,8 @@ thing.update_hiding_underfloor( (thing.hides_underfloor != OBJ_UNDERFLOOR_NEVER) && we_should_cover, ) + +//* VV *// + +/turf/vv_delete() + ScrapeAway() diff --git a/code/modules/admin/view_variables/admin_delete.dm b/code/modules/admin/view_variables/admin_delete.dm index 7157a1cce2bd..16c84012c99f 100644 --- a/code/modules/admin/view_variables/admin_delete.dm +++ b/code/modules/admin/view_variables/admin_delete.dm @@ -16,9 +16,9 @@ //SSblackbox.record_feedback("tally", "admin_verb", 1, "Delete") //If you are copy-pasting this, ensure the 2nd parameter is unique to the new proc! if(isturf(D)) var/turf/T = D - T.ScrapeAway() + T.vv_delete() else vv_update_display(D, "deleted", VV_MSG_DELETED) - qdel(D) + D.vv_delete() if(!QDELETED(D)) vv_update_display(D, "deleted", "") From 71ce6e1506b9a406b89e5113488d82f82157c74b Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sat, 7 Dec 2024 14:06:28 -0500 Subject: [PATCH 07/29] load behavior --- citadel.dme | 1 + .../toml_config/toml_configuration.dm | 29 +++++++++++++++++-- code/modules/admin/verbs/debug.dm | 10 ------- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/citadel.dme b/citadel.dme index 08f8aa6128bf..be185e6d39aa 100644 --- a/citadel.dme +++ b/citadel.dme @@ -2238,6 +2238,7 @@ #include "code\modules\admin\verbs\debug\fucky_wucky.dm" #include "code\modules\admin\verbs\debug\profiling.dm" #include "code\modules\admin\verbs\debug\reestablish_db_connection.dm" +#include "code\modules\admin\verbs\debug\reload_configuration.dm" #include "code\modules\admin\verbs\debug\spawn.dm" #include "code\modules\admin\verbs\SDQL2\SDQL_2.dm" #include "code\modules\admin\verbs\SDQL2\SDQL_2_parser.dm" diff --git a/code/controllers/toml_config/toml_configuration.dm b/code/controllers/toml_config/toml_configuration.dm index 571ff972f56a..8d1b2bf6eade 100644 --- a/code/controllers/toml_config/toml_configuration.dm +++ b/code/controllers/toml_config/toml_configuration.dm @@ -18,7 +18,7 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) return FALSE if(NAMEOF_PROC(src, get_sensitive_entry), NAMEOF_PROC(src, set_sensitive_entry)) return FALSE - if(NAMEOF_PROC(src, reload), NAMEOF_PROC(src, reset), NAMEOF_PROC(src, load)) + if(NAMEOF_PROC(src, reload), NAMEOF_PROC(src, reset), NAMEOF_PROC(src, load), NAMEOF_PROC(src, recursively_load_from_list)) return FALSE return ..() @@ -50,12 +50,12 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) continue var/datum/toml_config_entry/entry = new path typed_entries[entry.type] = entry - var/list/nesting = splittext(entry.key, ".") + var/list/nesting = splittext(entry.category, ".") var/list/current_list = keyed_entries for(var/i in 1 to length(nesting) - 1) LAZYINITLIST(current_list[nesting[i]]) current_list = nesting[i] - current_list[nesting[length(nesting)]] = entry + current_list[entry.key] = entry reload() /datum/controller/toml_configuration/stat_key() @@ -112,6 +112,9 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) CRASH("attempted to set sensitive entry with normal set entry.") entry.value = value +/datum/controller/toml_configuration/proc/admin_reload() + reload() + /** * Automatically loads default config, and the server's config file. * @@ -149,3 +152,23 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) fdel("tmp/config/loading.toml") if(!decoded) CRASH("failed to decode config [filelike]!") + + recursively_load_from_list(decoded, keyed_entries) + +/datum/controller/toml_configuration/proc/recursively_load_from_list(list/decoded_list, list/entry_list) + if(!decoded_list || !entry_list) + return + for(var/key in decoded_list) + var/value = decoded_list[key] + if(islist(value)) + var/list/entry_list = entry_list[key] + if(!islist(entry_list)) + // todo: warn + else + recursively_load_from_list(value, entry_list[key]) + else + var/datum/toml_config_entry/entry = entry_list[key] + if(!istype(entry)) + // todo: warn + else + entry.apply(value) diff --git a/code/modules/admin/verbs/debug.dm b/code/modules/admin/verbs/debug.dm index fa96e39a4c46..5b40ba3bc97f 100644 --- a/code/modules/admin/verbs/debug.dm +++ b/code/modules/admin/verbs/debug.dm @@ -687,16 +687,6 @@ message_admins(log) log_admin(log) -/client/proc/reload_configuration() - set category = "Debug" - set name = "Reload Configuration" - set desc = "Force config reload to world default" - if(!check_rights(R_DEBUG)) - return - if(alert(usr, "Are you absolutely sure you want to reload the configuration from the default path on the disk, wiping any in-round modificatoins?", "Really reset?", "No", "Yes") == "Yes") - config.admin_reload() - load_configuration() //for legacy - /datum/admins/proc/quick_nif() set category = "Fun" set name = "Quick NIF" From bda249aad92ec698e4760295a4812ea497c224f6 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sat, 7 Dec 2024 14:06:33 -0500 Subject: [PATCH 08/29] don't forget that file --- .../admin/verbs/debug/reload_configuration.dm | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 code/modules/admin/verbs/debug/reload_configuration.dm diff --git a/code/modules/admin/verbs/debug/reload_configuration.dm b/code/modules/admin/verbs/debug/reload_configuration.dm new file mode 100644 index 000000000000..fd80f16133f8 --- /dev/null +++ b/code/modules/admin/verbs/debug/reload_configuration.dm @@ -0,0 +1,12 @@ + +/client/proc/reload_configuration() + set category = "Debug" + set name = "Reload Configuration" + set desc = "Force config reload to world default" + if(!check_rights(R_DEBUG)) + return + if(alert(usr, "Are you absolutely sure you want to reload the configuration from the default path on the disk, wiping any in-round modificatoins?", "Really reset?", "No", "Yes") == "Yes") + log_and_message_admins("[key_name(usr)] reloaded server configuration.") + config.admin_reload() + Configuration.admin_reload() + load_configuration() //for legacy From 77fc2e850cd02bfadc6866c9622dbbf95e6ec330 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sat, 7 Dec 2024 14:12:07 -0500 Subject: [PATCH 09/29] Fix --- code/controllers/toml_config/toml_configuration.dm | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/code/controllers/toml_config/toml_configuration.dm b/code/controllers/toml_config/toml_configuration.dm index 8d1b2bf6eade..e48b1223b4cb 100644 --- a/code/controllers/toml_config/toml_configuration.dm +++ b/code/controllers/toml_config/toml_configuration.dm @@ -161,11 +161,11 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) for(var/key in decoded_list) var/value = decoded_list[key] if(islist(value)) - var/list/entry_list = entry_list[key] - if(!islist(entry_list)) + var/list/next_entry_list = entry_list[key] + if(!islist(next_entry_list)) // todo: warn else - recursively_load_from_list(value, entry_list[key]) + recursively_load_from_list(value, next_entry_list[key]) else var/datum/toml_config_entry/entry = entry_list[key] if(!istype(entry)) From f22e11b21ce669159d9dde29a70acedfc01d2689 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sat, 7 Dec 2024 14:21:07 -0500 Subject: [PATCH 10/29] Fix --- code/controllers/toml_config/toml_configuration.dm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/controllers/toml_config/toml_configuration.dm b/code/controllers/toml_config/toml_configuration.dm index e48b1223b4cb..c93a3d712bfc 100644 --- a/code/controllers/toml_config/toml_configuration.dm +++ b/code/controllers/toml_config/toml_configuration.dm @@ -54,7 +54,7 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) var/list/current_list = keyed_entries for(var/i in 1 to length(nesting) - 1) LAZYINITLIST(current_list[nesting[i]]) - current_list = nesting[i] + current_list = current_list[nesting[i]] current_list[entry.key] = entry reload() From 84a639935fcf701f02d1861bb84b665249a584dd Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Sun, 8 Dec 2024 01:03:23 -0500 Subject: [PATCH 11/29] Fix / change --- code/__DEFINES/controllers/_repository.dm | 11 + code/__DEFINES/controllers/dbcore.dm | 5 + code/__HELPERS/text.dm | 8 - .../configuration/configuration.dm | 2 +- code/controllers/master.dm | 6 +- code/controllers/repository.dm | 206 ++++++++++++++++-- code/controllers/subsystem.dm | 2 +- code/controllers/subsystem/assets.dm | 2 +- .../subsystem/characters/storage.dm | 12 +- code/controllers/subsystem/dbcore/_dbcore.dm | 10 +- code/controllers/subsystem/ipintel.dm | 6 +- code/controllers/subsystem/materials.dm | 43 +++- .../persistence/modules/bulk_entity.dm | 8 +- .../persistence/modules/level_objects.dm | 34 +-- .../persistence/modules/spatial_metadata.dm | 4 +- .../persistence/modules/string_kkv.dm | 4 +- .../subsystem/persistence/world.dm | 6 +- code/controllers/subsystem/photography.dm | 8 +- code/controllers/subsystem/playtime.dm | 2 +- code/controllers/subsystem/repository.dm | 3 + .../toml_config/toml_configuration.dm | 10 +- code/game/machinery/telecomms/blackbox.dm | 4 +- code/game/objects/materials.dm | 14 +- code/game/objects/structures/barricade.dm | 7 +- code/game/objects/structures/low_wall.dm | 11 +- .../objects/structures/props/puzzledoor.dm | 2 +- code/game/objects/structures/simple_doors.dm | 11 +- code/game/statistics.dm | 10 +- code/game/turfs/simulated/wall/materials.dm | 6 +- code/modules/admin/DB ban/functions.dm | 16 +- code/modules/admin/IsBanned.dm | 2 +- code/modules/admin/admin_ranks.dm | 2 +- code/modules/admin/banjob.dm | 4 +- .../admin/permissionverbs/permissionedit.dm | 20 +- .../admin/verbs/check_customitem_activity.dm | 4 +- code/modules/artwork/items/poster.dm | 2 +- code/modules/artwork/structures/poster.dm | 2 +- .../artwork/structures/sculpting_block.dm | 10 +- code/modules/client/client_procs.dm | 4 +- code/modules/client/connection.dm | 8 +- code/modules/client/data/client_data.dm | 2 +- code/modules/client/data/player_data.dm | 14 +- .../game_preferences/game_preferences.dm | 4 +- code/modules/language/language.dm | 6 +- code/modules/library/lib_machines.dm | 8 +- code/modules/loot/table.dm | 2 +- code/modules/materials/material_sheets.dm | 4 +- code/modules/mining/misc/abandonedcrates.dm | 2 +- code/modules/mob/living/silicon/silicon.dm | 2 +- code/modules/mob/new_player/new_player.dm | 6 +- code/modules/mob/new_player/poll.dm | 40 ++-- code/modules/species/species_getters.dm | 4 +- ....0.3__RemakePlaytimeTriggerJustInCase.sql} | 0 ...ore.sql => V0.0.4__AddRepositoryStore.sql} | 2 +- 54 files changed, 430 insertions(+), 197 deletions(-) rename sql/migrations/{V0.0.4__RemakePlaytimeTriggerJustInCase.sql => V0.0.3__RemakePlaytimeTriggerJustInCase.sql} (100%) rename sql/migrations/{V0.0.3__AddRepositoryStore.sql => V0.0.4__AddRepositoryStore.sql} (86%) diff --git a/code/__DEFINES/controllers/_repository.dm b/code/__DEFINES/controllers/_repository.dm index b2f5702f7524..9f08048dc8d8 100644 --- a/code/__DEFINES/controllers/_repository.dm +++ b/code/__DEFINES/controllers/_repository.dm @@ -7,6 +7,9 @@ //* This is here in [code/__DEFINES/controllers/_repositories.dm] for compile order reasons. *// /datum/controller/subsystem/repository/proc/__init_repositories() +//* This is here in [code/__DEFINES/controllers/_repositories.dm] for compile order reasons. *// +/datum/controller/subsystem/repository/proc/__get_all_repositories() + // todo: redo recover logic; maybe /datum/controller as a whole should be brushed up #define REPOSITORY_DEF(what) \ GLOBAL_REAL(RS##what, /datum/controller/repository/##what); \ @@ -27,4 +30,12 @@ GLOBAL_REAL(RS##what, /datum/controller/repository/##what); \ ..(); \ RS##what.Initialize(); \ } \ +/datum/controller/subsystem/repository/__get_all_repositories() { \ + . = ..(); \ + . += RS##what; \ +} \ /datum/controller/repository/##what + +/// Returned from /datum/controller/repository's fetch_or_defer() if we don't have something +/// on hand, but also don't know that it doesn't exist. +#define REPOSITORY_FETCH_DEFER "defer" diff --git a/code/__DEFINES/controllers/dbcore.dm b/code/__DEFINES/controllers/dbcore.dm index afc6709805cc..19df14d5588f 100644 --- a/code/__DEFINES/controllers/dbcore.dm +++ b/code/__DEFINES/controllers/dbcore.dm @@ -17,6 +17,11 @@ */ #define DB_MINOR_VERSION 3 +//* Tables *// + +/// Prefixes are currently disabled. +#define DB_PREFIX_TABLE_NAME(TABLE) TABLE + //* Misc *// /// pass this into duplicate_key on mass_insert() to overwrite old values diff --git a/code/__HELPERS/text.dm b/code/__HELPERS/text.dm index 9abb75fb9732..5c4a63dc189a 100644 --- a/code/__HELPERS/text.dm +++ b/code/__HELPERS/text.dm @@ -1,20 +1,12 @@ /** * Holds procs designed to help with filtering text * Contains groups: - * ! SQL sanitization * ! Text sanitization * ! Text searches * ! Text modification * ! Misc */ -/** - *! SQL sanitization - */ - -/proc/format_table_name(table) - return CONFIG_GET(string/sql_server_prefix) + table - /** *! Text sanitization */ diff --git a/code/controllers/configuration/configuration.dm b/code/controllers/configuration/configuration.dm index b30a5bff3855..5474503030a5 100644 --- a/code/controllers/configuration/configuration.dm +++ b/code/controllers/configuration/configuration.dm @@ -46,7 +46,7 @@ loaded = TRUE if (Master) - Master.on_config_reload() + Master.on_config_loaded() /datum/controller/configuration/proc/full_wipe() if(IsAdminAdvancedProcCall()) diff --git a/code/controllers/master.dm b/code/controllers/master.dm index 04a2e00fe05d..60a8334a375b 100644 --- a/code/controllers/master.dm +++ b/code/controllers/master.dm @@ -886,10 +886,12 @@ GLOBAL_REAL(Master, /datum/controller/master) = new var/datum/controller/subsystem/SS = S SS.StopLoadingMap() -/datum/controller/master/proc/on_config_reload() +/datum/controller/master/proc/on_config_loaded() for (var/thing in subsystems) var/datum/controller/subsystem/SS = thing - SS.on_config_reload() + SS.on_config_loaded() + for(var/datum/controller/repository/repository in SSrepository.get_all_repositories()) + repository.on_config_loaded() /** * CitRP snowflake special: Check if any subsystems are sleeping. diff --git a/code/controllers/repository.dm b/code/controllers/repository.dm index 822559c6f665..e548f5a9d30f 100644 --- a/code/controllers/repository.dm +++ b/code/controllers/repository.dm @@ -25,13 +25,17 @@ /// expected type of prototype var/expected_type + /// database key; this is immutable. /// * persistence is disabled if this is not set var/database_key /// store version /// * persistence is disabled if this is not set /// * migration is triggered if this doesn't match a loaded entry + /// * this should only ever be incremented. var/store_version + /// store enabled? Updated by config reloads. + var/store_enabled = FALSE /// by-id lookup var/list/id_lookup @@ -43,6 +47,8 @@ /// 'doesn't exist' cache for DB loads var/tmp/list/doesnt_exist_cache + var/const/doesnt_exist_cache_trim_at = 1000 + var/const/doesnt_exist_cache_trim_to = 500 /// temporary id to path lookup used during init // todo: figure out a way to not do this, this is bad @@ -68,6 +74,14 @@ init_reverse_lookup_shim = null return ..() +/datum/controller/repository/vv_edit_var(var_name, var_value, mass_edit, raw_edit) + switch(var_name) + if(NAMEOF(src, store_version), NAMEOF(src, database_key), NAMEOF(src, store_enabled)) + return FALSE + if(NAMEOF(src, expected_type)) + return FALSE + return ..() + /** * Repository Recover() * @@ -93,6 +107,13 @@ . = FALSE src.subtype_lists = list() +/** + * Called when config is reloaded. + */ +/datum/controller/repository/proc/on_config_loaded() + SHOULD_CALL_PARENT(TRUE) + store_enabled = Configuration.get_entry(/datum/toml_config_entry/backend/repository/persistence) && database_key && store_version + /** * regenerates entries, kicking out anything that's in the way */ @@ -116,6 +137,7 @@ * * * Allows passing in a prototype instance which will be returned as itself. * Useful for procs that should accept types, IDs, *and* instances. + * * Unlike fetch local / fetch or defer, this **can** sleep! * * prototypes returned should never, ever be modified * @@ -130,7 +152,101 @@ if(init_reverse_lookup_shim) var/potential_path = init_reverse_lookup_shim[type_or_id] return fetch(potential_path) - return id_lookup[type_or_id] || handle_db_load(type_or_id) + if(.) + return + if(!store_enabled) + return + if(doesnt_exist_cache[type_or_id]) + return + return handle_db_load(type_or_id) + else if(ispath(type_or_id)) + . = type_lookup[type_or_id] + if(.) + return + if(initial(type_or_id.abstract_type) == type_or_id) + CRASH("tried to fetch an abstract prototype") + var/datum/prototype/loading = new type_or_id + loading.hardcoded = TRUE + load(loading) + return loading + else if(istype(type_or_id)) + return type_or_id + else + CRASH("what?") + +/** + * Fetches a prototype by type or ID. + * + * * Allows passing in a prototype instance which will be returned as itself. + * Useful for procs that should accept types, IDs, *and* instances. + * * If something doesn't exist and we don't know if it exists in the database, we throw a runtime error. + * * If fetching a hardcoded path, this should generally be used as it never sleeps. + * + * prototypes returned should never, ever be modified + * + * @return prototype instance or null + */ +/datum/controller/repository/proc/fetch_local_or_throw(datum/prototype/type_or_id) as /datum/prototype + RETURN_TYPE(/datum/prototype) + // todo: optimize + if(isnull(type_or_id)) + return + else if(istext(type_or_id)) + if(init_reverse_lookup_shim) + var/potential_path = init_reverse_lookup_shim[type_or_id] + return fetch_local_or_throw(potential_path) + . = id_lookup[type_or_id] + if(.) + return + if(!store_enabled) + return + if(doesnt_exist_cache[type_or_id]) + return + CRASH("fetch_local_or_throw of [type_or_id] couldn't determine if id existed without a fetch.") + else if(ispath(type_or_id)) + . = type_lookup[type_or_id] + if(.) + return + if(initial(type_or_id.abstract_type) == type_or_id) + CRASH("tried to fetch an abstract prototype") + var/datum/prototype/loading = new type_or_id + loading.hardcoded = TRUE + load(loading) + return loading + else if(istype(type_or_id)) + return type_or_id + else + CRASH("what?") + +/** + * Fetches a prototype by type or ID. + * + * * Allows passing in a prototype instance which will be returned as itself. + * Useful for procs that should accept types, IDs, *and* instances. + * * If something doesn't exist and we don't know if it exists in the database, we return + * REPOSITORY_FETCH_DEFER. The caller should invoke normal fetch() at a time when sleeping is allowed. + * + * prototypes returned should never, ever be modified + * + * @return prototype instance or null + */ +/datum/controller/repository/proc/fetch_or_defer(datum/prototype/type_or_id) as /datum/prototype + RETURN_TYPE(/datum/prototype) + // todo: optimize + if(isnull(type_or_id)) + return + else if(istext(type_or_id)) + if(init_reverse_lookup_shim) + var/potential_path = init_reverse_lookup_shim[type_or_id] + return fetch_or_defer(potential_path) + . = id_lookup[type_or_id] + if(.) + return + if(!store_enabled) + return + if(doesnt_exist_cache[type_or_id]) + return + return REPOSITORY_FETCH_DEFER else if(ispath(type_or_id)) . = type_lookup[type_or_id] if(.) @@ -176,7 +292,7 @@ for(var/datum/prototype/casted as anything in subtypesof(path)) if(initial(casted.abstract_type) == casted) continue - var/datum/prototype/instance = fetch(casted) + var/datum/prototype/instance = fetch_local_or_throw(casted) generating += instance return generating @@ -198,7 +314,8 @@ . = load(instance) if(!.) return - handle_db_store(instance) + if(store_enabled) + handle_db_store(instance) //* Private API *// @@ -217,7 +334,7 @@ CRASH("attempted to load an instance that collides with a currently loaded instance on type.") if(!instance.register()) . = FALSE - CRASH("instance refused to unregister. this is undefined behavior.") + CRASH("instance failed to register. this is undefined behavior.") id_lookup[instance.id] = instance if(instance.hardcoded) // invalidate cache @@ -236,7 +353,7 @@ PROTECTED_PROC(TRUE) if(!instance.unregister()) . = FALSE - CRASH("instance refused to unregister. this is undefined behavior.") + CRASH("instance failed to unregister. this is undefined behavior.") id_lookup -= instance.id if(instance.hardcoded) // invalidate cache @@ -249,26 +366,85 @@ * Perform migration on a data-list from the database. * * * Edit the passed in list directly. + * * This should update to latest. + * + * todo: proc to auto-migrate everything. */ /datum/controller/repository/proc/migrate(list/modifying, from_version) PROTECTED_PROC(TRUE) /datum/controller/repository/proc/handle_db_store(datum/prototype/instance) - if(!Configuration.get_entry(/datum/toml_config_entry/backend/repository/persistence)) - return doesnt_exist_cache -= instance.id + // intentionally allow admin proccalls to bypass checks in NewQuery() + var/old_usr = usr + usr = null + + var/datum/db_query/store_query = SSdbcore.NewQuery( + "INSERT INTO " + DB_PREFIX_TABLE_NAME("backend_repository") + "(repository, id, version, data) VALUES \ + (:repo, :id, :version, :data) ON DUPLICATE KEY UPDATE data = :data, modifiedTime = Now(), version = :version", + list( + "repo" = database_key, + "id" = instance.id, + "version" = store_version, + "data" = json_encode(instance.serialize()), + ), + ) + + usr = old_usr + + store_query.Execute(TRUE) + qdel(store_query) + /datum/controller/repository/proc/handle_db_load(instance_id) if(doesnt_exist_cache[instance_id]) return - var/const/doesnt_exist_cache_trim_at = 1000 - var/const/doesnt_exist_cache_trim_to = 500 - if(!Configuration.get_entry(/datum/toml_config_entry/backend/repository/persistence)) - doesnt_exist_cache[instance_id] = TRUE - if(length(doesnt_exist_cache) > doesnt_exist_cache_trim_at) - doesnt_exist_cache.len = doesnt_exist_cache_trim_to - return + // intentionally allow admin proccalls to bypass checks in NewQuery() + var/old_usr = usr + usr = null + + var/datum/db_query/load_query = SSdbcore.NewQuery( + "SELECT version, data FROM " + DB_PREFIX_TABLE_NAME("backend_repository") + " WHERE repository = :repo, id = :id", + list( + "repo" = database_key, + "id" = instance_id, + ), + ) + + usr = old_usr + + load_query.Execute(TRUE) + + if(!length(load_query.item)) + mark_doesnt_exist(instance_id) + else + var/list/fetched = load_query.item[1] + var/version = fetched[1] + var/encoded_data = fetched[2] + var/list/decoded_data = json_decode(encoded_data) + var/migrated = FALSE + + if(version < store_version) + migrate(decoded_data, version) + migrated = TRUE + else if(version == store_version) + mark_doesnt_exist(instance_id) + CRASH("[version] was not less or eq to [store_version]. something's very wrong!") + + var/datum/prototype/loaded_instance = new expected_type + loaded_instance.deserialize(decoded_data) + if(!load(loaded_instance)) + mark_doesnt_exist(instance_id) + CRASH("[instance_id] failed to load into the repository during database load!") + . = loaded_instance + + if(migrated) + handle_db_store(loaded_instance) + qdel(load_query) -#warn impl +/datum/controller/repository/proc/mark_doesnt_exist(instance_id) + doesnt_exist_cache[instance_id] = TRUE + if(length(doesnt_exist_cache) > doesnt_exist_cache_trim_at) + doesnt_exist_cache.len = doesnt_exist_cache_trim_to diff --git a/code/controllers/subsystem.dm b/code/controllers/subsystem.dm index 0204320f6415..3287b34004fc 100644 --- a/code/controllers/subsystem.dm +++ b/code/controllers/subsystem.dm @@ -421,7 +421,7 @@ state = SS_PAUSING /// Called after the config has been loaded or reloaded. -/datum/controller/subsystem/proc/on_config_reload() +/datum/controller/subsystem/proc/on_config_loaded() return /** diff --git a/code/controllers/subsystem/assets.dm b/code/controllers/subsystem/assets.dm index 9f40129f4e55..ff29b03f16ef 100644 --- a/code/controllers/subsystem/assets.dm +++ b/code/controllers/subsystem/assets.dm @@ -191,7 +191,7 @@ SUBSYSTEM_DEF(assets) /datum/controller/subsystem/assets/proc/get_dynamic_item_url_by_name(name) return dynamic_asset_items_by_name[name]?.get_url() -/datum/controller/subsystem/assets/on_config_reload() +/datum/controller/subsystem/assets/on_config_loaded() var/newtransporttype = /datum/asset_transport/browse_rsc switch (CONFIG_GET(string/asset_transport)) if ("webroot") diff --git a/code/controllers/subsystem/characters/storage.dm b/code/controllers/subsystem/characters/storage.dm index 5a1b6045010c..a8afe9f55fa7 100644 --- a/code/controllers/subsystem/characters/storage.dm +++ b/code/controllers/subsystem/characters/storage.dm @@ -33,7 +33,7 @@ // last played is not updated by this proc // everything else can though! var/datum/db_query/update_query = SSdbcore.NewQuery( - "UPDATE [format_table_name("character")] \ + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] \ SET[persisting? " last_persisted = NOW()," : ""] canonical_name = :name, persist_data = :data, \ playerid = :pid \ WHERE id = :id", @@ -48,7 +48,7 @@ qdel(update_query) else var/datum/db_query/insert_query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("character")] \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] \ (`created`, `last_played`, `last_persisted`, `playerid`, `canonical_name`, \ `persist_data`, `character_type`) \ VALUES (NOW(), NULL, [persisting? "NOW" : "NULL"], :pid, :name, :data, :type)", @@ -85,7 +85,7 @@ var/datum/db_query/load_query = SSdbcore.NewQuery( "SELECT `id` FROM \ - [format_table_name("character")] WHERE playerid = :id AND canonical_name = :name AND character_type = :type", + [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] WHERE playerid = :id AND canonical_name = :name AND character_type = :type", list( "id" = playerid, "canonical_name" = name, @@ -134,7 +134,7 @@ var/datum/db_query/load_query = SSdbcore.NewQuery( "SELECT `created`, `last_played`, `last_persisted`, `playerid`, `canonical_name`, `persist_data`, `character_type` FROM \ - [format_table_name("character")] WHERE id = :id", + [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] WHERE id = :id", list( "id" = id, ) @@ -190,7 +190,7 @@ . = list() var/datum/db_query/iteration_query = SSdbcore.ExecuteQuery( - "SELECT id FROM [format_table_name("character")] WHERE playerid = :id", + "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] WHERE playerid = :id", list( "id" = playerid ) @@ -223,7 +223,7 @@ // section below can never be allowed to runtime var/datum/db_query/mark_query = SSdbcore.ExecuteQuery( - "UPDATE [format_table_name("character")] SET last_played = NOW() WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] SET last_played = NOW() WHERE id = :id", list( "id" = id ) diff --git a/code/controllers/subsystem/dbcore/_dbcore.dm b/code/controllers/subsystem/dbcore/_dbcore.dm index 106a7fe09119..fc91dc5636ae 100644 --- a/code/controllers/subsystem/dbcore/_dbcore.dm +++ b/code/controllers/subsystem/dbcore/_dbcore.dm @@ -46,7 +46,7 @@ SUBSYSTEM_DEF(dbcore) //This is as close as we can get to the true round end before Disconnect() without changing where it's called, defeating the reason this is a subsystem if(SSdbcore.Connect()) var/datum/db_query/query_round_shutdown = SSdbcore.NewQuery( - "UPDATE [format_table_name("round")] SET shutdown_datetime = Now() WHERE id = :round_id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] SET shutdown_datetime = Now() WHERE id = :round_id", list("round_id" = GLOB.round_id) ) query_round_shutdown.Execute() @@ -114,7 +114,7 @@ SUBSYSTEM_DEF(dbcore) if(CONFIG_GET(flag/sql_enabled)) if(Connect()) log_world("Database connection established.") - var/datum/db_query/query_db_version = NewQuery("SELECT major, minor FROM [format_table_name("schema_revision")] ORDER BY date DESC LIMIT 1") + var/datum/db_query/query_db_version = NewQuery("SELECT major, minor FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("schema_revision")] ORDER BY date DESC LIMIT 1") query_db_version.Execute() if(query_db_version.NextRow()) db_major = text2num(query_db_version.item[1]) @@ -135,7 +135,7 @@ SUBSYSTEM_DEF(dbcore) if(!Connect()) return var/datum/db_query/query_round_initialize = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("round")] (initialize_datetime, server_ip, server_port) VALUES (Now(), INET_ATON(:internet_address), :port)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] (initialize_datetime, server_ip, server_port) VALUES (Now(), INET_ATON(:internet_address), :port)", list("internet_address" = world.internet_address || "0", "port" = "[world.port]") ) query_round_initialize.Execute(async = FALSE) @@ -147,7 +147,7 @@ SUBSYSTEM_DEF(dbcore) if(!Connect()) return var/datum/db_query/query_round_start = SSdbcore.NewQuery( - "UPDATE [format_table_name("round")] SET start_datetime = Now() WHERE id = :round_id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] SET start_datetime = Now() WHERE id = :round_id", list("round_id" = GLOB.round_id) ) query_round_start.Execute() @@ -157,7 +157,7 @@ SUBSYSTEM_DEF(dbcore) if(!Connect()) return var/datum/db_query/query_round_end = SSdbcore.NewQuery( - "UPDATE [format_table_name("round")] SET end_datetime = Now() WHERE id = :round_id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] SET end_datetime = Now() WHERE id = :round_id", list("round_id" = GLOB.round_id) ) query_round_end.Execute() diff --git a/code/controllers/subsystem/ipintel.dm b/code/controllers/subsystem/ipintel.dm index 7306fc085ee3..57d79422bb9b 100644 --- a/code/controllers/subsystem/ipintel.dm +++ b/code/controllers/subsystem/ipintel.dm @@ -20,7 +20,7 @@ SUBSYSTEM_DEF(ipintel) /// max retries var/max_retries = 1 -/datum/controller/subsystem/ipintel/on_config_reload() +/datum/controller/subsystem/ipintel/on_config_loaded() . = ..() enabled = !!CONFIG_GET(flag/ipintel_enabled) consequetive_errors = 0 @@ -139,7 +139,7 @@ SUBSYSTEM_DEF(ipintel) /datum/controller/subsystem/ipintel/proc/ipintel_cache_fetch_impl(address) PRIVATE_PROC(TRUE) var/datum/db_query/fetch = SSdbcore.NewQuery( - "SELECT date, intel, TIMESTAMPDIFF(MINUTE,date,NOW()) FROM [format_table_name("ipintel")] WHERE ip = INET_ATON(:ip)", + "SELECT date, intel, TIMESTAMPDIFF(MINUTE,date,NOW()) FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ipintel")] WHERE ip = INET_ATON(:ip)", list( "ip" = address, ) @@ -167,7 +167,7 @@ SUBSYSTEM_DEF(ipintel) /datum/controller/subsystem/ipintel/proc/ipintel_cache_store_impl(datum/ipintel/entry) PRIVATE_PROC(TRUE) var/datum/db_query/update = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("ipintel")] (ip, intel) VALUES (INET_ATON(:ip), :intel) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ipintel")] (ip, intel) VALUES (INET_ATON(:ip), :intel) \ ON DUPLICATE KEY UPDATE intel = VALUES(intel), date = NOW()", list( "ip" = entry.address, diff --git a/code/controllers/subsystem/materials.dm b/code/controllers/subsystem/materials.dm index 877dcbe0bd1e..c679ee1f9de7 100644 --- a/code/controllers/subsystem/materials.dm +++ b/code/controllers/subsystem/materials.dm @@ -112,12 +112,16 @@ SUBSYSTEM_DEF(materials) // todo: optimize . = list() for(var/i in 1 to length(L)) - var/key = L[i] - var/datum/prototype/material/resolved = RSmaterials.fetch(key) - if(isnull(resolved)) - continue + var/datum/prototype/material/key = L[i] var/value = L[key] - .[resolved.id] = value + if(istype(key)) + key = key.id + else if(ispath(key)) + key = initial(key.id) + else if(istext(key)) + else + CRASH("what? '[key]'") + .[key] = value /** * ensures a list is full of material references for keys @@ -130,10 +134,14 @@ SUBSYSTEM_DEF(materials) . = list() for(var/i in 1 to length(L)) var/key = L[i] - var/datum/prototype/material/resolved = RSmaterials.fetch(key) - if(isnull(resolved)) - continue var/value = L[key] + var/datum/prototype/material/resolved = RSmaterials.fetch_or_defer(key) + switch(resolved) + if(null) + continue + if(REPOSITORY_FETCH_DEFER) + // todo: handle this + continue .[resolved] = value /** @@ -146,9 +154,15 @@ SUBSYSTEM_DEF(materials) . = list() for(var/i in 1 to length(L)) var/key = L[i] - var/value = L[key] - var/datum/prototype/material/resolved = RSmaterials.fetch(value) - .[key] = resolved?.id + var/datum/prototype/material/value = L[key] + if(istype(value)) + value = value.id + else if(ispath(value)) + value = initial(value.id) + else if(istext(value)) + else + CRASH("what? '[value]'") + .[key] = value /** * ensures a list is full of material references for values @@ -161,7 +175,12 @@ SUBSYSTEM_DEF(materials) for(var/i in 1 to length(L)) var/key = L[i] var/value = L[key] - var/datum/prototype/material/resolved = RSmaterials.fetch(value) + var/datum/prototype/material/resolved = RSmaterials.fetch_or_defer(key) + switch(resolved) + if(REPOSITORY_FETCH_DEFER) + // todo: handle this + else + value = resolved .[key] = resolved /** diff --git a/code/controllers/subsystem/persistence/modules/bulk_entity.dm b/code/controllers/subsystem/persistence/modules/bulk_entity.dm index 7d04d37568ee..6f48a4ba5f86 100644 --- a/code/controllers/subsystem/persistence/modules/bulk_entity.dm +++ b/code/controllers/subsystem/persistence/modules/bulk_entity.dm @@ -19,7 +19,7 @@ for(var/datum/bulk_entity_chunk/chunk as anything in chunks) var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_bulk_entity")] \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] \ (generation, persistence_key, level_id, data, round_id) \ VALUES (:generation, :persistence, :level, :data, :round)", list( @@ -47,7 +47,7 @@ usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT data FROM [format_table_name("persistence_bulk_entity")] \ + "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] \ WHERE generation = :generation AND persistence_key = :persistence AND level_id = :level", list( "generation" = generation, @@ -85,7 +85,7 @@ SSdbcore.dangerously_block_on_multiple_unsanitized_queries( list( - "TRUNCATE TABLE [format_table_name("persistence_bulk_entity")]", + "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")]", ), ) @@ -99,7 +99,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_bulk_entity")] WHERE level_id = :level", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] WHERE level_id = :level", list( "level" = level_id, ), diff --git a/code/controllers/subsystem/persistence/modules/level_objects.dm b/code/controllers/subsystem/persistence/modules/level_objects.dm index 43072527d789..379d291d7616 100644 --- a/code/controllers/subsystem/persistence/modules/level_objects.dm +++ b/code/controllers/subsystem/persistence/modules/level_objects.dm @@ -20,7 +20,7 @@ switch(entity.obj_persist_static_mode) if(OBJ_PERSIST_STATIC_MODE_LEVEL) query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_static_level_objects")] (generation, object_id, level_id, data) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] (generation, object_id, level_id, data) \ VALUES (:generation, :object_id, :level_id, :data) ON DUPLICATE KEY UPDATE \ data = VALUES(data)", list( @@ -32,7 +32,7 @@ ) if(OBJ_PERSIST_STATIC_MODE_MAP) query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_static_map_objects")] (generation, object_id, map_id, data) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")] (generation, object_id, map_id, data) \ VALUES (:generation, :object_id, :map_id, :data) ON DUPLICATE KEY UPDATE \ data = VALUES(data)", list( @@ -44,7 +44,7 @@ ) if(OBJ_PERSIST_STATIC_MODE_GLOBAL) query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_static_global_objects")] (generation, object_id, data) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")] (generation, object_id, data) \ VALUES (:generation, :object_id, :data) ON DUPLICATE KEY UPDATE \ data = VALUES(data)", list( @@ -79,7 +79,7 @@ var/datum/db_query/query if(entity.obj_persist_dynamic_id != PERSISTENCE_DYNAMIC_ID_AUTOSET) query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_dynamic_objects")] (id, generation, status, data, prototype_id, level_id, x, y) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] (id, generation, status, data, prototype_id, level_id, x, y) \ VALUES (:status, :data, :prototype, :level, :x, :y) ON DUPLICATE KEY UPDATE \ x = VALUES(x), y = VALUES(y), data = VALUES(data), prototype = VALUES(prototype), level = VALUES(level), \ status = VALUES(status)", @@ -97,7 +97,7 @@ query.warn_execute() else query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_dynamic_objects")] (status, data, prototype_id, level_id, x, y) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] (status, data, prototype_id, level_id, x, y) \ VALUES (:status, :data, :prototype, :level, :x, :y)", list( "status" = entity.obj_persist_dynamic_status, @@ -139,7 +139,7 @@ var/datum/db_query/query = SSdbcore.NewQuery( "SELECT object_id, prototype_id, status, data, x, y \ - FROM [format_table_name("persistence_dynamic_objects")] \ + FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] \ WHERE level_id = :level AND generation = :generation", list( "generation" = generation, @@ -200,7 +200,7 @@ switch(entity.obj_persist_static_mode) if(OBJ_PERSIST_STATIC_MODE_GLOBAL) query = SSdbcore.NewQuery( - "SELECT data FROM [format_table_name("persistence_static_global_objects")] \ + "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")] \ WHERE object_id = :object AND generation = :generation", list( "object" = entity.obj_persist_static_id, @@ -209,7 +209,7 @@ ) if(OBJ_PERSIST_STATIC_MODE_LEVEL) query = SSdbcore.NewQuery( - "SELECT data FROM [format_table_name("persistence_static_level_objects")] \ + "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] \ WHERE object_id = :object AND level_id = :level AND generation = :generation", list( "object" = entity.obj_persist_static_id, @@ -220,7 +220,7 @@ bind_id = level_id if(OBJ_PERSIST_STATIC_MODE_MAP) query = SSdbcore.NewQuery( - "SELECT data FROM [format_table_name("persistence_static_map_objects")] \ + "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")] \ WHERE object_id = :object AND map_id = :map AND generation = :generation", list( "object" = entity.obj_persist_static_id, @@ -258,9 +258,9 @@ SSdbcore.dangerously_block_on_multiple_unsanitized_queries( list( - "TRUNCATE TABLE [format_table_name("persistence_static_map_objects")]", - "TRUNCATE TABLE [format_table_name("persistence_static_level_objects")]", - "TRUNCATE TABLE [format_table_name("persistence_static_global_objects")]", + "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")]", + "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")]", + "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")]", ), ) @@ -276,7 +276,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_static_level_objects")] WHERE level_id = :level", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] WHERE level_id = :level", list( "level" = level_id, ), @@ -294,7 +294,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_static_map_objects")] WHERE map_id = :map", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")] WHERE map_id = :map", list( "map" = map_id, ), @@ -312,7 +312,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_static_global_objects")]", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")]", ) usr = intentionally_allow_admin_proccall @@ -327,7 +327,7 @@ usr = null SSdbcore.RunQuery( - "TRUNCATE TABLE [format_table_name("persistence_dynamic_objects")]", + "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")]", ) usr = intentionally_allow_admin_proccall @@ -342,7 +342,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_dynamic_objects")] WHERE level_id = :level", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] WHERE level_id = :level", list( "level" = level_id, ), diff --git a/code/controllers/subsystem/persistence/modules/spatial_metadata.dm b/code/controllers/subsystem/persistence/modules/spatial_metadata.dm index 8318e210040a..4cec788d7f3e 100644 --- a/code/controllers/subsystem/persistence/modules/spatial_metadata.dm +++ b/code/controllers/subsystem/persistence/modules/spatial_metadata.dm @@ -66,7 +66,7 @@ var/datum/db_query/query = SSdbcore.NewQuery( "SELECT TIMESTAMPDIFF(HOUR, saved, NOW()), saved_round_id, data, generation \ - FROM [format_table_name("persistence_level_metadata")] \ + FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_level_metadata")] \ WHERE level_id = :level", list( "level" = level_id, @@ -103,7 +103,7 @@ src.round_id_saved = GLOB.round_number SSdbcore.RunQuery( - "INSERT INTO [format_table_name("persistence_level_metadata")] (saved, saved_round_id, level_id, data, generation) \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_level_metadata")] (saved, saved_round_id, level_id, data, generation) \ VALUES (Now(), :round, :level, :data, :generation) ON DUPLICATE KEY UPDATE \ data = VALUES(data), generation = VALUES(generation), saved_round_id = VALUES(saved_round_id), saved = VALUES(saved)", list( diff --git a/code/controllers/subsystem/persistence/modules/string_kkv.dm b/code/controllers/subsystem/persistence/modules/string_kkv.dm index 6ae51d36a225..0038e510e7b8 100644 --- a/code/controllers/subsystem/persistence/modules/string_kkv.dm +++ b/code/controllers/subsystem/persistence/modules/string_kkv.dm @@ -60,7 +60,7 @@ var/oldusr = usr usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT `value` FROM [format_table_name("persistence_string_kkv")] WHERE `group` = :group AND `key` = :key", + "SELECT `value` FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_string_kkv")] WHERE `group` = :group AND `key` = :key", list( "group" = group, "key" = key @@ -79,7 +79,7 @@ var/oldusr = usr usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("persistence_string_kkv")] (`group`, `key`, `value`) VALUES (:group, :key, :value) ON DUPLICATE KEY UPDATE `value` = VALUES(`value`), `modified` = Now(), `revision` = `revision` + 1", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_string_kkv")] (`group`, `key`, `value`) VALUES (:group, :key, :value) ON DUPLICATE KEY UPDATE `value` = VALUES(`value`), `modified` = Now(), `revision` = `revision` + 1", list( "group" = group, "key" = key, diff --git a/code/controllers/subsystem/persistence/world.dm b/code/controllers/subsystem/persistence/world.dm index 30ec4b2cfd2a..76a69b5ee881 100644 --- a/code/controllers/subsystem/persistence/world.dm +++ b/code/controllers/subsystem/persistence/world.dm @@ -234,21 +234,21 @@ for(var/datum/map_level_persistence/level_metadata as anything in ordered_level_metadata) SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_bulk_entity")] WHERE level_id = :level, generation != :generation", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] WHERE level_id = :level, generation != :generation", list( "level" = level_metadata.level_id, "generation" = level_metadata.generation, ), ) SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_static_level_objects")] WHERE level_id = :level, generation != :generation", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] WHERE level_id = :level, generation != :generation", list( "level" = level_metadata.level_id, "generation" = level_metadata.generation, ), ) SSdbcore.RunQuery( - "DELETE FROM [format_table_name("persistence_dynamic_objects")] WHERE level_id = :level, generation != :generation", + "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] WHERE level_id = :level, generation != :generation", list( "level" = level_metadata.level_id, "generation" = level_metadata.generation, diff --git a/code/controllers/subsystem/photography.dm b/code/controllers/subsystem/photography.dm index 7e4fbbcbccd4..c8d9cfe89da4 100644 --- a/code/controllers/subsystem/photography.dm +++ b/code/controllers/subsystem/photography.dm @@ -115,7 +115,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" - INSERT INTO [format_table_name("pictures")] + INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("pictures")] (`hash`, `width`, `height`) VALUES (:hash, :width, :height) "}, @@ -140,7 +140,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" SELECT `width`, `height` - FROM [format_table_name("pictures")] + FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("pictures")] WHERE `hash` = :hash "}, list( @@ -195,7 +195,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" - INSERT INTO [format_table_name("photographs")] + INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("photographs")] (`picture`, `scene`, `desc`) VALUES (:hash, :scene, :desc) "}, @@ -220,7 +220,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" SELECT `picture`, `scene`, `desc` - FROM [format_table_name("photographs")] + FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("photographs")] WHERE `id` = :id "}, list( diff --git a/code/controllers/subsystem/playtime.dm b/code/controllers/subsystem/playtime.dm index 617cdbf03350..b7dca61f319c 100644 --- a/code/controllers/subsystem/playtime.dm +++ b/code/controllers/subsystem/playtime.dm @@ -46,7 +46,7 @@ SUBSYSTEM_DEF(playtime) "player" = playerid ) C.persistent.playtime_queued = list() - SSdbcore.MassInsertLegacy(format_table_name("playtime"), built, duplicate_key = "ON DUPLICATE KEY UPDATE minutes = minutes + VALUES(minutes)") + SSdbcore.MassInsertLegacy(fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("playtime"), built, duplicate_key = "ON DUPLICATE KEY UPDATE minutes = minutes + VALUES(minutes)") /** * returns a list of playtime roles diff --git a/code/controllers/subsystem/repository.dm b/code/controllers/subsystem/repository.dm index 04db037a4973..4464288f229d 100644 --- a/code/controllers/subsystem/repository.dm +++ b/code/controllers/subsystem/repository.dm @@ -11,3 +11,6 @@ SUBSYSTEM_DEF(repository) __create_repositories() __init_repositories() return SS_INIT_SUCCESS + +/datum/controller/subsystem/repository/proc/get_all_repositories() + return __get_all_repositories() diff --git a/code/controllers/toml_config/toml_configuration.dm b/code/controllers/toml_config/toml_configuration.dm index c93a3d712bfc..3b4faeecffdc 100644 --- a/code/controllers/toml_config/toml_configuration.dm +++ b/code/controllers/toml_config/toml_configuration.dm @@ -28,14 +28,6 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) return FALSE return ..() -/datum/controller/toml_configuration/vv_get_var(var_name, resolve) - switch(var_name) - if(NAMEOF(src, keyed_entries)) - return debug_variable(NAMEOF(src, keyed_entries), deep_copy_list(keyed_entries), 0, src) - if(NAMEOF(src, typed_entries)) - return debug_variable(NAMEOF(src, typed_entries), typed_entries.Copy(), 0, src) - return ..() - /datum/controller/toml_configuration/New() if(Configuration != src) if(Configuration) @@ -52,7 +44,7 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) typed_entries[entry.type] = entry var/list/nesting = splittext(entry.category, ".") var/list/current_list = keyed_entries - for(var/i in 1 to length(nesting) - 1) + for(var/i in 1 to length(nesting)) LAZYINITLIST(current_list[nesting[i]]) current_list = current_list[nesting[i]] current_list[entry.key] = entry diff --git a/code/game/machinery/telecomms/blackbox.dm b/code/game/machinery/telecomms/blackbox.dm index 61e83517e4c9..24108ea87719 100644 --- a/code/game/machinery/telecomms/blackbox.dm +++ b/code/game/machinery/telecomms/blackbox.dm @@ -182,7 +182,7 @@ var/obj/machinery/blackbox_recorder/blackbox var/round_id var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT MAX(round_id) AS round_id FROM [format_table_name("feedback")]", + "SELECT MAX(round_id) AS round_id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")]", list() ) @@ -195,7 +195,7 @@ var/obj/machinery/blackbox_recorder/blackbox for(var/datum/feedback_variable/FV in feedback) SSdbcore.RunQuery( - "INSERT INTO [format_table_name("feedback")] VALUES (null, Now(), :round_id, :variable, :value, :details)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")] VALUES (null, Now(), :round_id, :variable, :value, :details)", list( "round_id" = "[round_id]", "variable" = "[FV.get_variable()]", diff --git a/code/game/objects/materials.dm b/code/game/objects/materials.dm index 8f6e73ce51cd..7a8c5573657d 100644 --- a/code/game/objects/materials.dm +++ b/code/game/objects/materials.dm @@ -91,7 +91,12 @@ if(islist(material_parts)) var/list/parts = list() for(var/key in material_parts) - parts[key] = RSmaterials.fetch(key) + var/datum/prototype/material/result = RSmaterials.fetch_or_defer(key) + switch(result) + if(REPOSITORY_FETCH_DEFER) + // todo: handle this + result = null + parts[key] = result update_material_multi(parts) else if(material_parts == MATERIAL_DEFAULT_DISABLED) else if(material_parts == MATERIAL_DEFAULT_ABSTRACTED) @@ -99,7 +104,12 @@ // skip specifying parts because abstracted update_material_multi() else - update_material_single((material_parts = RSmaterials.fetch(material_parts))) + var/datum/prototype/material/result = RSmaterials.fetch_or_defer(material_parts) + switch(result) + if(REPOSITORY_FETCH_DEFER) + // todo: handle this + result = null + update_material_single((material_parts = result)) /** * forces a material update diff --git a/code/game/objects/structures/barricade.dm b/code/game/objects/structures/barricade.dm index e490a2965fa4..e34557966bd6 100644 --- a/code/game/objects/structures/barricade.dm +++ b/code/game/objects/structures/barricade.dm @@ -13,7 +13,12 @@ /obj/structure/barricade/Initialize(mapload, datum/prototype/material/material_like) if(!isnull(material_like)) - set_primary_material(RSmaterials.fetch(material_like)) + var/resolved_material = RSmaterials.fetch_or_defer(material_like) + switch(resolved_material) + if(REPOSITORY_FETCH_DEFER) + // todo: handle + else + set_primary_material(resolved_material) return ..() /obj/structure/barricade/update_material_single(datum/prototype/material/material) diff --git a/code/game/objects/structures/low_wall.dm b/code/game/objects/structures/low_wall.dm index e3419164af54..e3d4b324711c 100644 --- a/code/game/objects/structures/low_wall.dm +++ b/code/game/objects/structures/low_wall.dm @@ -48,9 +48,14 @@ GLOBAL_LIST_INIT(wallframe_typecache, typecacheof(list( paint_color = COLOR_WALL_GUNMETAL stripe_color = COLOR_WALL_GUNMETAL -/obj/structure/wall_frame/Initialize(mapload, material) - if(!isnull(material)) - set_primary_material(RSmaterials.fetch(material)) +/obj/structure/wall_frame/Initialize(mapload, datum/prototype/material/material_like) + if(!isnull(material_like)) + var/resolved_material = RSmaterials.fetch_or_defer(material_like) + switch(resolved_material) + if(REPOSITORY_FETCH_DEFER) + // todo: handle + else + set_primary_material(resolved_material) . = ..() update_overlays() diff --git a/code/game/objects/structures/props/puzzledoor.dm b/code/game/objects/structures/props/puzzledoor.dm index 66c6b1736827..a73e4cd16311 100644 --- a/code/game/objects/structures/props/puzzledoor.dm +++ b/code/game/objects/structures/props/puzzledoor.dm @@ -36,7 +36,7 @@ /obj/machinery/door/blast/puzzle/Initialize(mapload) . = ..() - implicit_material = RSmaterials.fetch(/datum/prototype/material/alienalloy/dungeonium) + implicit_material = RSmaterials.fetch_local_or_throw(/datum/prototype/material/alienalloy/dungeonium) if(locks.len) return var/check_range = world.view * checkrange_mult diff --git a/code/game/objects/structures/simple_doors.dm b/code/game/objects/structures/simple_doors.dm index 273b68e575f6..09824f2a6936 100644 --- a/code/game/objects/structures/simple_doors.dm +++ b/code/game/objects/structures/simple_doors.dm @@ -15,9 +15,14 @@ var/isSwitchingStates = 0 var/oreAmount = 7 -/obj/structure/simple_door/Initialize(mapload, material) - if(!isnull(material)) - set_primary_material(RSmaterials.fetch(material)) +/obj/structure/simple_door/Initialize(mapload, datum/prototype/material/material_like) + if(!isnull(material_like)) + var/resolved_material = RSmaterials.fetch_or_defer(material_like) + switch(resolved_material) + if(REPOSITORY_FETCH_DEFER) + // todo: handle + else + set_primary_material(resolved_material) return ..() /obj/structure/simple_door/update_material_single(datum/prototype/material/material) diff --git a/code/game/statistics.dm b/code/game/statistics.dm index e8f73892bd62..647965dc8964 100644 --- a/code/game/statistics.dm +++ b/code/game/statistics.dm @@ -9,7 +9,7 @@ log_game("SQL ERROR during population polling. Failed to connect.") else var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("population")] (playercount, admincount, time) VALUES (:pc, :ac, NOW())", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("population")] (playercount, admincount, time) VALUES (:pc, :ac, NOW())", list( "pc" = sanitizeSQL(playercount), "ac" = sanitizeSQL(admincount), @@ -48,7 +48,7 @@ log_game("SQL ERROR during death reporting. Failed to connect.") else var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ (:name, :key, :job, :special, :pod, :time, :laname, :lakey, :gender, :bruteloss, :fireloss, :brainloss, :oxyloss, :coord)", list( "name" = sqlname, @@ -98,7 +98,7 @@ log_game("SQL ERROR during death reporting. Failed to connect.") else var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ (:name, :key, :job, :special, :pod, :time, :laname, :lakey, :geender, :bruteloss, :fireloss, :brainloss, :oxyloss, :coord)", list( "name" = sqlname, @@ -146,7 +146,7 @@ else var/datum/db_query/max_query = SSdbcore.RunQuery( - "SELECT MAX(roundid) AS max_round_id FROM [format_table_name("feedback")]", + "SELECT MAX(roundid) AS max_round_id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")]", list(), ) @@ -168,7 +168,7 @@ var/value = item.get_value() var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("feedback")] (id, roundid, time, variable, value) VALUES (null, :rid, Now(), :var, :val)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")] (id, roundid, time, variable, value) VALUES (null, :rid, Now(), :var, :val)", list( "rid" = newroundid, "var" = sanitizeSQL(variable), diff --git a/code/game/turfs/simulated/wall/materials.dm b/code/game/turfs/simulated/wall/materials.dm index 507a17b1032d..1867c7a0725b 100644 --- a/code/game/turfs/simulated/wall/materials.dm +++ b/code/game/turfs/simulated/wall/materials.dm @@ -1,7 +1,7 @@ /turf/simulated/wall/proc/init_materials(datum/prototype/material/outer = material_outer, datum/prototype/material/reinforcing = material_reinf, datum/prototype/material/girder = material_girder) - outer = RSmaterials.fetch(outer) - reinforcing = RSmaterials.fetch(reinforcing) - girder = RSmaterials.fetch(girder) + outer = RSmaterials.fetch_local_or_throw(outer) + reinforcing = RSmaterials.fetch_local_or_throw(reinforcing) + girder = RSmaterials.fetch_local_or_throw(girder) if(!isnull(outer)) material_outer = outer diff --git a/code/modules/admin/DB ban/functions.dm b/code/modules/admin/DB ban/functions.dm index 0f195dd1d715..d871a341cd8c 100644 --- a/code/modules/admin/DB ban/functions.dm +++ b/code/modules/admin/DB ban/functions.dm @@ -72,7 +72,7 @@ computerid = "" if(isnull(ip)) ip = "" - var/sql = "INSERT INTO [format_table_name("ban")] \ + var/sql = "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] \ (`id`,`bantime`,`serverip`,`bantype`,`reason`,`job`,`duration`,`rounds`,`expiration_time`,`ckey`,`computerid`,`ip`,`a_ckey`,`a_computerid`,`a_ip`,`who`,`adminwho`,`edits`,`unbanned`,`unbanned_datetime`,`unbanned_ckey`,`unbanned_computerid`,`unbanned_ip`) \ VALUES (null, Now(), :serverip, :type, :reason, :job, :duration, :rounds, Now() + INTERVAL :duration MINUTE, :ckey, :cid, :ip, :a_ckey, :a_cid, :a_ip, :who, :adminwho, '', null, null, null, null, null)" SSdbcore.RunQuery( @@ -133,7 +133,7 @@ else bantype_sql = "bantype = '[bantype_str]'" - var/sql = "SELECT id FROM [format_table_name("ban")] WHERE ckey = :ckey AND [bantype_sql] AND (unbanned is null OR unbanned = false)" + var/sql = "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE ckey = :ckey AND [bantype_sql] AND (unbanned is null OR unbanned = false)" if(job) sql += " AND job = :job" @@ -183,7 +183,7 @@ return var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, duration, reason FROM [format_table_name("ban")] WHERE id = :id", + "SELECT ckey, duration, reason FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE id = :id", list( "id" = banid ) @@ -215,7 +215,7 @@ return SSdbcore.RunQuery( - "UPDATE [format_table_name("ban")] SET reason = :reason, \ + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] SET reason = :reason, \ edits = CONCAT(edits, '- :ckey changed ban reason from \\\":oldreason\\\" to \\\":reason\\\"
') \ WHERE id = :id", list( @@ -233,7 +233,7 @@ to_chat(usr, "Cancelled") return SSdbcore.RunQuery( - "UPDATE [format_table_name("ban")] SET duration = :duration, \ + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] SET duration = :duration, \ edits = CONCAT(edits, '- :ckey changed ban duration from :oldduration to :duration
'), expiration_time = DATE_ADD(bantime, INTERVAL :duration MINUTE) \ WHERE id = :id", list( @@ -264,7 +264,7 @@ var/pckey var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey FROM [format_table_name("ban")] WHERE id = :id", + "SELECT ckey FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE id = :id", list( "id" = id ) @@ -292,7 +292,7 @@ message_admins("[key_name_admin(usr)] has lifted [pckey]'s ban.",1) SSdbcore.RunQuery( - "UPDATE [format_table_name("ban")] SET unbanned = 1, unbanned_datetime = Now(), unbanned_ckey = :ckey, unbanned_computerid = :cid, unbanned_ip = :ip WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] SET unbanned = 1, unbanned_datetime = Now(), unbanned_ckey = :ckey, unbanned_computerid = :cid, unbanned_ip = :ip WHERE id = :id", list( "ckey" = unban_ckey, "cid" = unban_computerid, @@ -458,7 +458,7 @@ var/datum/db_query/select_query = SSdbcore.RunQuery( "SELECT id, bantime, bantype, reason, job, duration, expiration_time, ckey, a_ckey, unbanned, unbanned_ckey, unbanned_datetime, edits, ip, computerid \ - FROM [format_table_name("ban")] \ + FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] \ WHERE 1 [playersearch] [adminsearch] [ipsearch] [cidsearch] [bantypesearch] ORDER BY bantime DESC LIMIT 100", search_params ) diff --git a/code/modules/admin/IsBanned.dm b/code/modules/admin/IsBanned.dm index 2eb8325ac572..58c936a1f098 100644 --- a/code/modules/admin/IsBanned.dm +++ b/code/modules/admin/IsBanned.dm @@ -88,7 +88,7 @@ cidquery = " OR computerid = ':cid' " var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, ip, computerid, a_ckey, reason, expiration_time, duration, bantime, bantype FROM [format_table_name("ban")] WHERE (ckey = :ckey [ipquery] [cidquery]) AND (bantype = 'PERMABAN' OR (bantype = 'TEMPBAN' AND expiration_time > Now())) AND isnull(unbanned)", + "SELECT ckey, ip, computerid, a_ckey, reason, expiration_time, duration, bantime, bantype FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE (ckey = :ckey [ipquery] [cidquery]) AND (bantype = 'PERMABAN' OR (bantype = 'TEMPBAN' AND expiration_time > Now())) AND isnull(unbanned)", list( "ckey" = ckeytext, "ip" = address, diff --git a/code/modules/admin/admin_ranks.dm b/code/modules/admin/admin_ranks.dm index 29500fb71be7..25664de498ec 100644 --- a/code/modules/admin/admin_ranks.dm +++ b/code/modules/admin/admin_ranks.dm @@ -113,7 +113,7 @@ var/list/admin_ranks = list() //list of all ranks with associated rights return var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, rank, level, flags FROM [format_table_name("admin")]", + "SELECT ckey, rank, level, flags FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")]", list() ) diff --git a/code/modules/admin/banjob.dm b/code/modules/admin/banjob.dm index 147e5c3cd7cc..717b9156a06c 100644 --- a/code/modules/admin/banjob.dm +++ b/code/modules/admin/banjob.dm @@ -74,7 +74,7 @@ DEBUG //Job permabans var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, job FROM [format_table_name("ban")] WHERE bantype = 'JOB_PERMABAN' AND isnull(unbanned)", + "SELECT ckey, job FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE bantype = 'JOB_PERMABAN' AND isnull(unbanned)", list() ) @@ -86,7 +86,7 @@ DEBUG //Job tempbans var/datum/db_query/query1 = SSdbcore.RunQuery( - "SELECT ckey, job FROM [format_table_name("ban")] WHERE bantype = 'JOB_TEMPBAN' AND isnull(unbanned) AND expiration_time > Now()", + "SELECT ckey, job FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE bantype = 'JOB_TEMPBAN' AND isnull(unbanned) AND expiration_time > Now()", list() ) diff --git a/code/modules/admin/permissionverbs/permissionedit.dm b/code/modules/admin/permissionverbs/permissionedit.dm index fad215244819..eae21a1f2650 100644 --- a/code/modules/admin/permissionverbs/permissionedit.dm +++ b/code/modules/admin/permissionverbs/permissionedit.dm @@ -78,7 +78,7 @@ return var/datum/db_query/select_query = SSdbcore.RunQuery( - "SELECT id FROM [format_table_name("admin")] WHERE ckey = :ckey", + "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] WHERE ckey = :ckey", list( "ckey" = adm_ckey ) @@ -92,14 +92,14 @@ if(new_admin) SSdbcore.RunQuery( - "INSERT INTO [format_table_name("admin")] (id, ckey, rank, level, flags) VALUES (null, :ckey, :rank, -1, 0)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] (id, ckey, rank, level, flags) VALUES (null, :ckey, :rank, -1, 0)", list( "ckey" = adm_ckey, "rank" = new_rank ) ) SSdbcore.RunQuery( - "INSERT INTO [format_table_name("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, NOW(), :ckey, :ip, :logstr)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, NOW(), :ckey, :ip, :logstr)", list( "ckey" = sanitizeSQL(usr.ckey), "ip" = sanitizeSQL(usr.client.address), @@ -110,14 +110,14 @@ else if(!isnull(admin_id) && isnum(admin_id)) SSdbcore.RunQuery( - "UPDATE [format_table_name("admin")] SET rank = :rank WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] SET rank = :rank WHERE id = :id", list( "rank" = new_rank, "id" = admin_id ) ) SSdbcore.RunQuery( - "INSERT INTO [format_table_name("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", list( "ckey" = usr.ckey, "addr" = usr.client.address, @@ -155,7 +155,7 @@ return var/datum/db_query/select_query = SSdbcore.RunQuery( - "SELECT id, flags FROM [format_table_name("admin")] WHERE ckey = :ckey", + "SELECT id, flags FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] WHERE ckey = :ckey", list( "ckey" = adm_ckey ) @@ -172,14 +172,14 @@ if(admin_rights & new_permission) //This admin already has this permission, so we are removing it. SSdbcore.RunQuery( - "UPDATE [format_table_name("admin")] SET flags = :flags WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] SET flags = :flags WHERE id = :id", list( "flags" = admin_rights & ~new_permission, "id" = admin_id ) ) SSdbcore.RunQuery( - "INSERT INTO [format_table_name("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", list( "ckey" = usr.ckey, "addr" = usr.client.address, @@ -189,14 +189,14 @@ to_chat(usr, "Permission removed.") else //This admin doesn't have this permission, so we are adding it. SSdbcore.RunQuery( - "UPDATE [format_table_name("admin")] SET flags = :flags WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] SET flags = :flags WHERE id = :id", list( "flags" = admin_rights | new_permission, "id" = admin_id ) ) SSdbcore.RunQuery( - "INSERT INTO [format_table_name("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", list( "ckey" = usr.ckey, "addr" = usr.client.address, diff --git a/code/modules/admin/verbs/check_customitem_activity.dm b/code/modules/admin/verbs/check_customitem_activity.dm index 02e577d13e4d..efc9a005446c 100644 --- a/code/modules/admin/verbs/check_customitem_activity.dm +++ b/code/modules/admin/verbs/check_customitem_activity.dm @@ -55,7 +55,7 @@ var/inactive_keys = "None
" var/list/inactive_ckeys = list() if(ckeys_with_customitems.len) var/datum/db_query/query_inactive = SSdbcore.RunQuery( - "SELECT ckey, lastseen FROM [format_table_name("player_lookup")] WHERE datediff(Now(), lastseen) > 60", + "SELECT ckey, lastseen FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE datediff(Now(), lastseen) > 60", list() ) while(query_inactive.NextRow()) @@ -69,7 +69,7 @@ var/inactive_keys = "None
" if(ckeys_with_customitems.len) for(var/cur_ckey in ckeys_with_customitems) var/datum/db_query/query_inactive = SSdbcore.RunQuery( - "SELECT ckey FROM [format_table_name("player_lookup")] WHERE ckey = :ckey", + "SELECT ckey FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE ckey = :ckey", list( "ckey" = cur_ckey ) diff --git a/code/modules/artwork/items/poster.dm b/code/modules/artwork/items/poster.dm index be689b6a4f99..d1be0da56d4d 100644 --- a/code/modules/artwork/items/poster.dm +++ b/code/modules/artwork/items/poster.dm @@ -26,7 +26,7 @@ poster_design_id = pick(RSposter_designs.fetch_by_tag_mutable(poster_random_tag)) if(poster_design_id != src.poster_design_id) src.poster_design_id = poster_design_id - set_poster_design(RSposter_designs.fetch(poster_design_id)) + set_poster_design(RSposter_designs.fetch_local_or_throw(poster_design_id)) /obj/item/poster/proc/set_poster_design(datum/prototype/poster_design/design) src.name = "rolled-up-poster - [design.name]" diff --git a/code/modules/artwork/structures/poster.dm b/code/modules/artwork/structures/poster.dm index 4b707581a185..0c2f3147a1e8 100644 --- a/code/modules/artwork/structures/poster.dm +++ b/code/modules/artwork/structures/poster.dm @@ -41,7 +41,7 @@ poster_design_id = pick(RSposter_designs.fetch_by_tag_mutable(poster_random_tag)) if(poster_design_id != src.poster_design_id) src.poster_design_id = poster_design_id - set_poster_design(RSposter_designs.fetch(poster_design_id)) + set_poster_design(RSposter_designs.fetch_local_or_throw(poster_design_id)) /obj/structure/poster/proc/set_poster_design(datum/prototype/poster_design/design) src.name = "rolled-up-poster - [design.name]" diff --git a/code/modules/artwork/structures/sculpting_block.dm b/code/modules/artwork/structures/sculpting_block.dm index d9e53ef0ced6..b8598d4cbbde 100644 --- a/code/modules/artwork/structures/sculpting_block.dm +++ b/code/modules/artwork/structures/sculpting_block.dm @@ -78,9 +78,15 @@ /// sculpting mask for our block var/icon/sculpting_rolldown_mask -/obj/structure/sculpting_block/Initialize(mapload, material) +/obj/structure/sculpting_block/Initialize(mapload, datum/prototype/material/material_like) // todo: materials system - src.material = RSmaterials.fetch(material || src.material) + if(!isnull(material_like)) + var/resolved_material = RSmaterials.fetch_or_defer(material_like) + switch(resolved_material) + if(REPOSITORY_FETCH_DEFER) + // todo: handle + else + src.material = resolved_material || RSmaterials.fetch_local_or_throw(/datum/prototype/material/steel) // todo: if it autoinit'd, don't do this reset_sculpting() return ..() diff --git a/code/modules/client/client_procs.dm b/code/modules/client/client_procs.dm index bb1edf8d647a..dfbca7ea652a 100644 --- a/code/modules/client/client_procs.dm +++ b/code/modules/client/client_procs.dm @@ -467,7 +467,7 @@ var/sql_system_ckey = sanitizeSQL(system_ckey) var/sql_ckey = sanitizeSQL(ckey) //check to see if we noted them in the last day. - var/datum/DBQuery/query_get_notes = SSdbcore.NewQuery("SELECT id FROM [format_table_name("messages")] WHERE type = 'note' AND targetckey = '[sql_ckey]' AND adminckey = '[sql_system_ckey]' AND timestamp + INTERVAL 1 DAY < NOW() AND deleted = 0 AND expire_timestamp > NOW()") + var/datum/DBQuery/query_get_notes = SSdbcore.NewQuery("SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("messages")] WHERE type = 'note' AND targetckey = '[sql_ckey]' AND adminckey = '[sql_system_ckey]' AND timestamp + INTERVAL 1 DAY < NOW() AND deleted = 0 AND expire_timestamp > NOW()") if(!query_get_notes.Execute()) qdel(query_get_notes) return @@ -476,7 +476,7 @@ return qdel(query_get_notes) //regardless of above, make sure their last note is not from us, as no point in repeating the same note over and over. - query_get_notes = SSdbcore.NewQuery("SELECT adminckey FROM [format_table_name("messages")] WHERE targetckey = '[sql_ckey]' AND deleted = 0 AND expire_timestamp > NOW() ORDER BY timestamp DESC LIMIT 1") + query_get_notes = SSdbcore.NewQuery("SELECT adminckey FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("messages")] WHERE targetckey = '[sql_ckey]' AND deleted = 0 AND expire_timestamp > NOW() ORDER BY timestamp DESC LIMIT 1") if(!query_get_notes.Execute()) qdel(query_get_notes) return diff --git a/code/modules/client/connection.dm b/code/modules/client/connection.dm index 5e45f624d366..0d90f1bcfbf7 100644 --- a/code/modules/client/connection.dm +++ b/code/modules/client/connection.dm @@ -7,7 +7,7 @@ return var/datum/db_query/lookup = SSdbcore.NewQuery( - "SELECT id FROM [format_table_name("player_lookup")] WHERE ckey = :ckey", + "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE ckey = :ckey", list( "ckey" = ckey, ) @@ -20,7 +20,7 @@ if(sql_id) var/datum/db_query/update = SSdbcore.NewQuery( - "UPDATE [format_table_name("player_lookup")] SET lastseen = Now(), ip = :ip, computerid = :computerid, lastadminrank = :lastadminrank WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] SET lastseen = Now(), ip = :ip, computerid = :computerid, lastadminrank = :lastadminrank WHERE id = :id", list( "ip" = address, "computerid" = computer_id, @@ -33,7 +33,7 @@ else //New player!! Need to insert all the stuff var/datum/db_query/insert = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("player_lookup")] (id, ckey, firstseen, lastseen, ip, computerid, lastadminrank) VALUES (null, :ckey, Now(), Now(), :ip, :cid, :rank)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] (id, ckey, firstseen, lastseen, ip, computerid, lastadminrank) VALUES (null, :ckey, Now(), Now(), :ip, :cid, :rank)", list( "ckey" = ckey, "ip" = address, @@ -55,7 +55,7 @@ return SSdbcore.RunQuery( - "INSERT INTO [format_table_name("connection_log")] (id, datetime, serverip, ckey, ip, computerid) VALUES (null, Now(), :server, :ckey, :ip, :cid)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("connection_log")] (id, datetime, serverip, ckey, ip, computerid) VALUES (null, Now(), :server, :ckey, :ip, :cid)", list( "server" = "[world.internet_address]:[world.port]", "ckey" = ckey, diff --git a/code/modules/client/data/client_data.dm b/code/modules/client/data/client_data.dm index 8b74a28b368e..1e4c8499a7fa 100644 --- a/code/modules/client/data/client_data.dm +++ b/code/modules/client/data/client_data.dm @@ -136,7 +136,7 @@ GLOBAL_LIST_EMPTY(client_data) playtime_mutex = FALSE return var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT `roleid`, `minutes` FROM [format_table_name("playtime")] WHERE player = :player", + "SELECT `roleid`, `minutes` FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("playtime")] WHERE player = :player", list( "player" = player_id, ) diff --git a/code/modules/client/data/player_data.dm b/code/modules/client/data/player_data.dm index dfbc4c6faa97..9bb57add9af3 100644 --- a/code/modules/client/data/player_data.dm +++ b/code/modules/client/data/player_data.dm @@ -94,7 +94,7 @@ GLOBAL_LIST_EMPTY(player_data) return var/datum/db_query/lookup lookup = SSdbcore.ExecuteQuery( - "SELECT id, playerid, firstseen FROM [format_table_name("player_lookup")] WHERE ckey = :ckey", + "SELECT id, playerid, firstseen FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE ckey = :ckey", list( "ckey" = ckey ) @@ -111,7 +111,7 @@ GLOBAL_LIST_EMPTY(player_data) lookup_pid = text2num(lookup_pid) qdel(lookup) lookup = SSdbcore.ExecuteQuery( - "SELECT id, flags, datediff(Now(), firstseen), firstseen, misc FROM [format_table_name("player")] WHERE id = :id", + "SELECT id, flags, datediff(Now(), firstseen), firstseen, misc FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] WHERE id = :id", list( "id" = lookup_pid ) @@ -147,7 +147,7 @@ GLOBAL_LIST_EMPTY(player_data) var/datum/db_query/insert if(migrate_firstseen) insert = SSdbcore.ExecuteQuery( - "INSERT INTO [format_table_name("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, :fs, Now(), :misc)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, :fs, Now(), :misc)", list( "flags" = player_flags, "fs" = migrate_firstseen, @@ -157,7 +157,7 @@ GLOBAL_LIST_EMPTY(player_data) player_first_seen = migrate_firstseen else insert = SSdbcore.ExecuteQuery( - "INSERT INTO [format_table_name("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, Now(), Now(), :misc)", + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, Now(), Now(), :misc)", list( "flags" = player_flags, "misc" = safe_json_encode(player_misc), @@ -173,7 +173,7 @@ GLOBAL_LIST_EMPTY(player_data) qdel(insert) // now update lookup insert = SSdbcore.ExecuteQuery( - "UPDATE [format_table_name("player_lookup")] SET playerid = :pid WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] SET playerid = :pid WHERE id = :id", list( "id" = lookup_id, "pid" = insert_id @@ -211,7 +211,7 @@ GLOBAL_LIST_EMPTY(player_data) /datum/player_data/proc/_save() qdel(SSdbcore.ExecuteQuery( - "UPDATE [format_table_name("player")] SET flags = :flags, misc = :misc WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] SET flags = :flags, misc = :misc WHERE id = :id", list( "flags" = player_flags, "id" = player_id, @@ -232,7 +232,7 @@ GLOBAL_LIST_EMPTY(player_data) if(!block_on_available()) return FALSE qdel(SSdbcore.ExecuteQuery( - "UPDATE [format_table_name("player")] SET lastseen = Now() WHERE id = :id", + "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] SET lastseen = Now() WHERE id = :id", list( "id" = player_id, ) diff --git a/code/modules/client/game_preferences/game_preferences.dm b/code/modules/client/game_preferences/game_preferences.dm index c54d71592dc4..9265df691072 100644 --- a/code/modules/client/game_preferences/game_preferences.dm +++ b/code/modules/client/game_preferences/game_preferences.dm @@ -369,7 +369,7 @@ usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT `toggles`, `entries`, `misc`, `keybinds`, `version` FROM [format_table_name("game_preferences")] \ + "SELECT `toggles`, `entries`, `misc`, `keybinds`, `version` FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("game_preferences")] \ WHERE `player` = :player", list( "player" = authoritative_player_id, @@ -413,7 +413,7 @@ usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [format_table_name("game_preferences")] \ + "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("game_preferences")] \ (`player`, `toggles`, `entries`, `misc`, `keybinds`, `version`, `modified`) VALUES \ (:player, :toggles, :entries, :misc, :keybinds, :version, Now()) ON DUPLICATE KEY UPDATE \ `player` = VALUES(player), `toggles` = VALUES(toggles), `entries` = VALUES(entries), `misc` = VALUES(misc), \ diff --git a/code/modules/language/language.dm b/code/modules/language/language.dm index be809af403a0..ecb4d2eada44 100644 --- a/code/modules/language/language.dm +++ b/code/modules/language/language.dm @@ -232,7 +232,7 @@ // Language handling. /mob/proc/add_language(var/language) - var/datum/prototype/language/new_language = RSlanguages.fetch(language) || RSlanguages.legacy_resolve_language_name(language) + var/datum/prototype/language/new_language = RSlanguages.fetch_or_defer(language) || RSlanguages.legacy_resolve_language_name(language) if(!istype(new_language) || (new_language in languages)) return 0 @@ -241,12 +241,12 @@ return 1 /mob/proc/remove_language(var/rem_language) - var/datum/prototype/language/L = RSlanguages.fetch(rem_language) + var/datum/prototype/language/L = RSlanguages.fetch_or_defer(rem_language) . = (L in languages) languages.Remove(L) /mob/living/remove_language(rem_language) - var/datum/prototype/language/L = RSlanguages.fetch(rem_language) + var/datum/prototype/language/L = RSlanguages.fetch_or_defer(rem_language) if(default_language == L) default_language = null return ..() diff --git a/code/modules/library/lib_machines.dm b/code/modules/library/lib_machines.dm index 3980cecb08d0..639411c75779 100644 --- a/code/modules/library/lib_machines.dm +++ b/code/modules/library/lib_machines.dm @@ -49,7 +49,7 @@ AUTHORTITLECATEGORYSS13BN"} var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT author, title, category, id FROM [format_table_name("library")] WHERE author LIKE '%:author%' AND title LIKE '%:title%'[category == "Any"? "" : " AND category = :category"]", + "SELECT author, title, category, id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("library")] WHERE author LIKE '%:author%' AND title LIKE '%:title%'[category == "Any"? "" : " AND category = :category"]", category == "Any"? list("author" = author, "title" = title) : list("author" = author, "title" = title, "category" = category) ) @@ -229,7 +229,7 @@ "} var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT author, title, category, id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("library")] WHERE author LIKE '%:author%' AND title LIKE '%:title%'[category == "Any"? "" : " AND category = :category"]", + "SELECT author, title, category, id FROM [DB_PREFIX_TABLE_NAME("library")] WHERE author LIKE '%:author%' AND title LIKE '%:title%'[category == "Any"? "" : " AND category = :category"]", category == "Any"? list("author" = author, "title" = title) : list("author" = author, "title" = title, "category" = category) ) @@ -229,7 +229,7 @@
TITLE Date: Sun, 8 Dec 2024 15:38:59 -0500 Subject: [PATCH 12/29] that --- .../subsystem/characters/storage.dm | 12 +++--- code/controllers/subsystem/dbcore/_dbcore.dm | 10 ++--- code/controllers/subsystem/ipintel.dm | 4 +- .../persistence/modules/bulk_entity.dm | 8 ++-- .../persistence/modules/level_objects.dm | 34 ++++++++-------- .../persistence/modules/spatial_metadata.dm | 4 +- .../persistence/modules/string_kkv.dm | 4 +- .../subsystem/persistence/world.dm | 6 +-- code/controllers/subsystem/photography.dm | 8 ++-- code/controllers/subsystem/playtime.dm | 2 +- code/game/machinery/telecomms/blackbox.dm | 4 +- code/game/statistics.dm | 10 ++--- code/modules/admin/DB ban/functions.dm | 16 ++++---- code/modules/admin/IsBanned.dm | 2 +- code/modules/admin/admin_ranks.dm | 2 +- code/modules/admin/banjob.dm | 4 +- .../admin/permissionverbs/permissionedit.dm | 20 +++++----- .../admin/verbs/check_customitem_activity.dm | 4 +- code/modules/client/client_procs.dm | 4 +- code/modules/client/connection.dm | 8 ++-- code/modules/client/data/client_data.dm | 2 +- code/modules/client/data/player_data.dm | 14 +++---- .../game_preferences/game_preferences.dm | 4 +- code/modules/library/lib_machines.dm | 8 ++-- code/modules/mob/new_player/new_player.dm | 6 +-- code/modules/mob/new_player/poll.dm | 40 +++++++++---------- tools/setup_dev_db/invoke.py | 7 +--- tools/setup_dev_db/setup.ps1 | 2 + 28 files changed, 124 insertions(+), 125 deletions(-) diff --git a/code/controllers/subsystem/characters/storage.dm b/code/controllers/subsystem/characters/storage.dm index a8afe9f55fa7..bb84401ee120 100644 --- a/code/controllers/subsystem/characters/storage.dm +++ b/code/controllers/subsystem/characters/storage.dm @@ -33,7 +33,7 @@ // last played is not updated by this proc // everything else can though! var/datum/db_query/update_query = SSdbcore.NewQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] \ + "UPDATE [DB_PREFIX_TABLE_NAME("character")] \ SET[persisting? " last_persisted = NOW()," : ""] canonical_name = :name, persist_data = :data, \ playerid = :pid \ WHERE id = :id", @@ -48,7 +48,7 @@ qdel(update_query) else var/datum/db_query/insert_query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("character")] \ (`created`, `last_played`, `last_persisted`, `playerid`, `canonical_name`, \ `persist_data`, `character_type`) \ VALUES (NOW(), NULL, [persisting? "NOW" : "NULL"], :pid, :name, :data, :type)", @@ -85,7 +85,7 @@ var/datum/db_query/load_query = SSdbcore.NewQuery( "SELECT `id` FROM \ - [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] WHERE playerid = :id AND canonical_name = :name AND character_type = :type", + [DB_PREFIX_TABLE_NAME("character")] WHERE playerid = :id AND canonical_name = :name AND character_type = :type", list( "id" = playerid, "canonical_name" = name, @@ -134,7 +134,7 @@ var/datum/db_query/load_query = SSdbcore.NewQuery( "SELECT `created`, `last_played`, `last_persisted`, `playerid`, `canonical_name`, `persist_data`, `character_type` FROM \ - [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] WHERE id = :id", + [DB_PREFIX_TABLE_NAME("character")] WHERE id = :id", list( "id" = id, ) @@ -190,7 +190,7 @@ . = list() var/datum/db_query/iteration_query = SSdbcore.ExecuteQuery( - "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] WHERE playerid = :id", + "SELECT id FROM [DB_PREFIX_TABLE_NAME("character")] WHERE playerid = :id", list( "id" = playerid ) @@ -223,7 +223,7 @@ // section below can never be allowed to runtime var/datum/db_query/mark_query = SSdbcore.ExecuteQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("character")] SET last_played = NOW() WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("character")] SET last_played = NOW() WHERE id = :id", list( "id" = id ) diff --git a/code/controllers/subsystem/dbcore/_dbcore.dm b/code/controllers/subsystem/dbcore/_dbcore.dm index fc91dc5636ae..7e2b57d43fef 100644 --- a/code/controllers/subsystem/dbcore/_dbcore.dm +++ b/code/controllers/subsystem/dbcore/_dbcore.dm @@ -46,7 +46,7 @@ SUBSYSTEM_DEF(dbcore) //This is as close as we can get to the true round end before Disconnect() without changing where it's called, defeating the reason this is a subsystem if(SSdbcore.Connect()) var/datum/db_query/query_round_shutdown = SSdbcore.NewQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] SET shutdown_datetime = Now() WHERE id = :round_id", + "UPDATE [DB_PREFIX_TABLE_NAME("round")] SET shutdown_datetime = Now() WHERE id = :round_id", list("round_id" = GLOB.round_id) ) query_round_shutdown.Execute() @@ -114,7 +114,7 @@ SUBSYSTEM_DEF(dbcore) if(CONFIG_GET(flag/sql_enabled)) if(Connect()) log_world("Database connection established.") - var/datum/db_query/query_db_version = NewQuery("SELECT major, minor FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("schema_revision")] ORDER BY date DESC LIMIT 1") + var/datum/db_query/query_db_version = NewQuery("SELECT major, minor FROM [DB_PREFIX_TABLE_NAME("schema_revision")] ORDER BY date DESC LIMIT 1") query_db_version.Execute() if(query_db_version.NextRow()) db_major = text2num(query_db_version.item[1]) @@ -135,7 +135,7 @@ SUBSYSTEM_DEF(dbcore) if(!Connect()) return var/datum/db_query/query_round_initialize = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] (initialize_datetime, server_ip, server_port) VALUES (Now(), INET_ATON(:internet_address), :port)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("round")] (initialize_datetime, server_ip, server_port) VALUES (Now(), INET_ATON(:internet_address), :port)", list("internet_address" = world.internet_address || "0", "port" = "[world.port]") ) query_round_initialize.Execute(async = FALSE) @@ -147,7 +147,7 @@ SUBSYSTEM_DEF(dbcore) if(!Connect()) return var/datum/db_query/query_round_start = SSdbcore.NewQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] SET start_datetime = Now() WHERE id = :round_id", + "UPDATE [DB_PREFIX_TABLE_NAME("round")] SET start_datetime = Now() WHERE id = :round_id", list("round_id" = GLOB.round_id) ) query_round_start.Execute() @@ -157,7 +157,7 @@ SUBSYSTEM_DEF(dbcore) if(!Connect()) return var/datum/db_query/query_round_end = SSdbcore.NewQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("round")] SET end_datetime = Now() WHERE id = :round_id", + "UPDATE [DB_PREFIX_TABLE_NAME("round")] SET end_datetime = Now() WHERE id = :round_id", list("round_id" = GLOB.round_id) ) query_round_end.Execute() diff --git a/code/controllers/subsystem/ipintel.dm b/code/controllers/subsystem/ipintel.dm index 57d79422bb9b..e0f1dfd5cbb7 100644 --- a/code/controllers/subsystem/ipintel.dm +++ b/code/controllers/subsystem/ipintel.dm @@ -139,7 +139,7 @@ SUBSYSTEM_DEF(ipintel) /datum/controller/subsystem/ipintel/proc/ipintel_cache_fetch_impl(address) PRIVATE_PROC(TRUE) var/datum/db_query/fetch = SSdbcore.NewQuery( - "SELECT date, intel, TIMESTAMPDIFF(MINUTE,date,NOW()) FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ipintel")] WHERE ip = INET_ATON(:ip)", + "SELECT date, intel, TIMESTAMPDIFF(MINUTE,date,NOW()) FROM [DB_PREFIX_TABLE_NAME("ipintel")] WHERE ip = INET_ATON(:ip)", list( "ip" = address, ) @@ -167,7 +167,7 @@ SUBSYSTEM_DEF(ipintel) /datum/controller/subsystem/ipintel/proc/ipintel_cache_store_impl(datum/ipintel/entry) PRIVATE_PROC(TRUE) var/datum/db_query/update = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ipintel")] (ip, intel) VALUES (INET_ATON(:ip), :intel) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("ipintel")] (ip, intel) VALUES (INET_ATON(:ip), :intel) \ ON DUPLICATE KEY UPDATE intel = VALUES(intel), date = NOW()", list( "ip" = entry.address, diff --git a/code/controllers/subsystem/persistence/modules/bulk_entity.dm b/code/controllers/subsystem/persistence/modules/bulk_entity.dm index 6f48a4ba5f86..a17d389e00be 100644 --- a/code/controllers/subsystem/persistence/modules/bulk_entity.dm +++ b/code/controllers/subsystem/persistence/modules/bulk_entity.dm @@ -19,7 +19,7 @@ for(var/datum/bulk_entity_chunk/chunk as anything in chunks) var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_bulk_entity")] \ (generation, persistence_key, level_id, data, round_id) \ VALUES (:generation, :persistence, :level, :data, :round)", list( @@ -47,7 +47,7 @@ usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] \ + "SELECT data FROM [DB_PREFIX_TABLE_NAME("persistence_bulk_entity")] \ WHERE generation = :generation AND persistence_key = :persistence AND level_id = :level", list( "generation" = generation, @@ -85,7 +85,7 @@ SSdbcore.dangerously_block_on_multiple_unsanitized_queries( list( - "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")]", + "TRUNCATE TABLE [DB_PREFIX_TABLE_NAME("persistence_bulk_entity")]", ), ) @@ -99,7 +99,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] WHERE level_id = :level", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_bulk_entity")] WHERE level_id = :level", list( "level" = level_id, ), diff --git a/code/controllers/subsystem/persistence/modules/level_objects.dm b/code/controllers/subsystem/persistence/modules/level_objects.dm index 379d291d7616..8016594e6314 100644 --- a/code/controllers/subsystem/persistence/modules/level_objects.dm +++ b/code/controllers/subsystem/persistence/modules/level_objects.dm @@ -20,7 +20,7 @@ switch(entity.obj_persist_static_mode) if(OBJ_PERSIST_STATIC_MODE_LEVEL) query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] (generation, object_id, level_id, data) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_static_level_objects")] (generation, object_id, level_id, data) \ VALUES (:generation, :object_id, :level_id, :data) ON DUPLICATE KEY UPDATE \ data = VALUES(data)", list( @@ -32,7 +32,7 @@ ) if(OBJ_PERSIST_STATIC_MODE_MAP) query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")] (generation, object_id, map_id, data) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_static_map_objects")] (generation, object_id, map_id, data) \ VALUES (:generation, :object_id, :map_id, :data) ON DUPLICATE KEY UPDATE \ data = VALUES(data)", list( @@ -44,7 +44,7 @@ ) if(OBJ_PERSIST_STATIC_MODE_GLOBAL) query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")] (generation, object_id, data) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_static_global_objects")] (generation, object_id, data) \ VALUES (:generation, :object_id, :data) ON DUPLICATE KEY UPDATE \ data = VALUES(data)", list( @@ -79,7 +79,7 @@ var/datum/db_query/query if(entity.obj_persist_dynamic_id != PERSISTENCE_DYNAMIC_ID_AUTOSET) query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] (id, generation, status, data, prototype_id, level_id, x, y) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_dynamic_objects")] (id, generation, status, data, prototype_id, level_id, x, y) \ VALUES (:status, :data, :prototype, :level, :x, :y) ON DUPLICATE KEY UPDATE \ x = VALUES(x), y = VALUES(y), data = VALUES(data), prototype = VALUES(prototype), level = VALUES(level), \ status = VALUES(status)", @@ -97,7 +97,7 @@ query.warn_execute() else query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] (status, data, prototype_id, level_id, x, y) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_dynamic_objects")] (status, data, prototype_id, level_id, x, y) \ VALUES (:status, :data, :prototype, :level, :x, :y)", list( "status" = entity.obj_persist_dynamic_status, @@ -139,7 +139,7 @@ var/datum/db_query/query = SSdbcore.NewQuery( "SELECT object_id, prototype_id, status, data, x, y \ - FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] \ + FROM [DB_PREFIX_TABLE_NAME("persistence_dynamic_objects")] \ WHERE level_id = :level AND generation = :generation", list( "generation" = generation, @@ -200,7 +200,7 @@ switch(entity.obj_persist_static_mode) if(OBJ_PERSIST_STATIC_MODE_GLOBAL) query = SSdbcore.NewQuery( - "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")] \ + "SELECT data FROM [DB_PREFIX_TABLE_NAME("persistence_static_global_objects")] \ WHERE object_id = :object AND generation = :generation", list( "object" = entity.obj_persist_static_id, @@ -209,7 +209,7 @@ ) if(OBJ_PERSIST_STATIC_MODE_LEVEL) query = SSdbcore.NewQuery( - "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] \ + "SELECT data FROM [DB_PREFIX_TABLE_NAME("persistence_static_level_objects")] \ WHERE object_id = :object AND level_id = :level AND generation = :generation", list( "object" = entity.obj_persist_static_id, @@ -220,7 +220,7 @@ bind_id = level_id if(OBJ_PERSIST_STATIC_MODE_MAP) query = SSdbcore.NewQuery( - "SELECT data FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")] \ + "SELECT data FROM [DB_PREFIX_TABLE_NAME("persistence_static_map_objects")] \ WHERE object_id = :object AND map_id = :map AND generation = :generation", list( "object" = entity.obj_persist_static_id, @@ -258,9 +258,9 @@ SSdbcore.dangerously_block_on_multiple_unsanitized_queries( list( - "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")]", - "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")]", - "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")]", + "TRUNCATE TABLE [DB_PREFIX_TABLE_NAME("persistence_static_map_objects")]", + "TRUNCATE TABLE [DB_PREFIX_TABLE_NAME("persistence_static_level_objects")]", + "TRUNCATE TABLE [DB_PREFIX_TABLE_NAME("persistence_static_global_objects")]", ), ) @@ -276,7 +276,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] WHERE level_id = :level", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_static_level_objects")] WHERE level_id = :level", list( "level" = level_id, ), @@ -294,7 +294,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_map_objects")] WHERE map_id = :map", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_static_map_objects")] WHERE map_id = :map", list( "map" = map_id, ), @@ -312,7 +312,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_global_objects")]", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_static_global_objects")]", ) usr = intentionally_allow_admin_proccall @@ -327,7 +327,7 @@ usr = null SSdbcore.RunQuery( - "TRUNCATE TABLE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")]", + "TRUNCATE TABLE [DB_PREFIX_TABLE_NAME("persistence_dynamic_objects")]", ) usr = intentionally_allow_admin_proccall @@ -342,7 +342,7 @@ usr = null SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] WHERE level_id = :level", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_dynamic_objects")] WHERE level_id = :level", list( "level" = level_id, ), diff --git a/code/controllers/subsystem/persistence/modules/spatial_metadata.dm b/code/controllers/subsystem/persistence/modules/spatial_metadata.dm index 4cec788d7f3e..22d8c1a1b4b6 100644 --- a/code/controllers/subsystem/persistence/modules/spatial_metadata.dm +++ b/code/controllers/subsystem/persistence/modules/spatial_metadata.dm @@ -66,7 +66,7 @@ var/datum/db_query/query = SSdbcore.NewQuery( "SELECT TIMESTAMPDIFF(HOUR, saved, NOW()), saved_round_id, data, generation \ - FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_level_metadata")] \ + FROM [DB_PREFIX_TABLE_NAME("persistence_level_metadata")] \ WHERE level_id = :level", list( "level" = level_id, @@ -103,7 +103,7 @@ src.round_id_saved = GLOB.round_number SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_level_metadata")] (saved, saved_round_id, level_id, data, generation) \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_level_metadata")] (saved, saved_round_id, level_id, data, generation) \ VALUES (Now(), :round, :level, :data, :generation) ON DUPLICATE KEY UPDATE \ data = VALUES(data), generation = VALUES(generation), saved_round_id = VALUES(saved_round_id), saved = VALUES(saved)", list( diff --git a/code/controllers/subsystem/persistence/modules/string_kkv.dm b/code/controllers/subsystem/persistence/modules/string_kkv.dm index 0038e510e7b8..66b1ff79468d 100644 --- a/code/controllers/subsystem/persistence/modules/string_kkv.dm +++ b/code/controllers/subsystem/persistence/modules/string_kkv.dm @@ -60,7 +60,7 @@ var/oldusr = usr usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT `value` FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_string_kkv")] WHERE `group` = :group AND `key` = :key", + "SELECT `value` FROM [DB_PREFIX_TABLE_NAME("persistence_string_kkv")] WHERE `group` = :group AND `key` = :key", list( "group" = group, "key" = key @@ -79,7 +79,7 @@ var/oldusr = usr usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_string_kkv")] (`group`, `key`, `value`) VALUES (:group, :key, :value) ON DUPLICATE KEY UPDATE `value` = VALUES(`value`), `modified` = Now(), `revision` = `revision` + 1", + "INSERT INTO [DB_PREFIX_TABLE_NAME("persistence_string_kkv")] (`group`, `key`, `value`) VALUES (:group, :key, :value) ON DUPLICATE KEY UPDATE `value` = VALUES(`value`), `modified` = Now(), `revision` = `revision` + 1", list( "group" = group, "key" = key, diff --git a/code/controllers/subsystem/persistence/world.dm b/code/controllers/subsystem/persistence/world.dm index 76a69b5ee881..e1f82acdc8dd 100644 --- a/code/controllers/subsystem/persistence/world.dm +++ b/code/controllers/subsystem/persistence/world.dm @@ -234,21 +234,21 @@ for(var/datum/map_level_persistence/level_metadata as anything in ordered_level_metadata) SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_bulk_entity")] WHERE level_id = :level, generation != :generation", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_bulk_entity")] WHERE level_id = :level, generation != :generation", list( "level" = level_metadata.level_id, "generation" = level_metadata.generation, ), ) SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_static_level_objects")] WHERE level_id = :level, generation != :generation", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_static_level_objects")] WHERE level_id = :level, generation != :generation", list( "level" = level_metadata.level_id, "generation" = level_metadata.generation, ), ) SSdbcore.RunQuery( - "DELETE FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("persistence_dynamic_objects")] WHERE level_id = :level, generation != :generation", + "DELETE FROM [DB_PREFIX_TABLE_NAME("persistence_dynamic_objects")] WHERE level_id = :level, generation != :generation", list( "level" = level_metadata.level_id, "generation" = level_metadata.generation, diff --git a/code/controllers/subsystem/photography.dm b/code/controllers/subsystem/photography.dm index c8d9cfe89da4..37f23abe4c9a 100644 --- a/code/controllers/subsystem/photography.dm +++ b/code/controllers/subsystem/photography.dm @@ -115,7 +115,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" - INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("pictures")] + INSERT INTO [DB_PREFIX_TABLE_NAME("pictures")] (`hash`, `width`, `height`) VALUES (:hash, :width, :height) "}, @@ -140,7 +140,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" SELECT `width`, `height` - FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("pictures")] + FROM [DB_PREFIX_TABLE_NAME("pictures")] WHERE `hash` = :hash "}, list( @@ -195,7 +195,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" - INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("photographs")] + INSERT INTO [DB_PREFIX_TABLE_NAME("photographs")] (`picture`, `scene`, `desc`) VALUES (:hash, :scene, :desc) "}, @@ -220,7 +220,7 @@ SUBSYSTEM_DEF(photography) var/datum/db_query/query = SSdbcore.NewQuery( {" SELECT `picture`, `scene`, `desc` - FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("photographs")] + FROM [DB_PREFIX_TABLE_NAME("photographs")] WHERE `id` = :id "}, list( diff --git a/code/controllers/subsystem/playtime.dm b/code/controllers/subsystem/playtime.dm index b7dca61f319c..5f1d1c2ec220 100644 --- a/code/controllers/subsystem/playtime.dm +++ b/code/controllers/subsystem/playtime.dm @@ -46,7 +46,7 @@ SUBSYSTEM_DEF(playtime) "player" = playerid ) C.persistent.playtime_queued = list() - SSdbcore.MassInsertLegacy(fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("playtime"), built, duplicate_key = "ON DUPLICATE KEY UPDATE minutes = minutes + VALUES(minutes)") + SSdbcore.MassInsertLegacy(DB_PREFIX_TABLE_NAME("playtime"), built, duplicate_key = "ON DUPLICATE KEY UPDATE minutes = minutes + VALUES(minutes)") /** * returns a list of playtime roles diff --git a/code/game/machinery/telecomms/blackbox.dm b/code/game/machinery/telecomms/blackbox.dm index 24108ea87719..fb9cef3c7c4f 100644 --- a/code/game/machinery/telecomms/blackbox.dm +++ b/code/game/machinery/telecomms/blackbox.dm @@ -182,7 +182,7 @@ var/obj/machinery/blackbox_recorder/blackbox var/round_id var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT MAX(round_id) AS round_id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")]", + "SELECT MAX(round_id) AS round_id FROM [DB_PREFIX_TABLE_NAME("feedback")]", list() ) @@ -195,7 +195,7 @@ var/obj/machinery/blackbox_recorder/blackbox for(var/datum/feedback_variable/FV in feedback) SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")] VALUES (null, Now(), :round_id, :variable, :value, :details)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("feedback")] VALUES (null, Now(), :round_id, :variable, :value, :details)", list( "round_id" = "[round_id]", "variable" = "[FV.get_variable()]", diff --git a/code/game/statistics.dm b/code/game/statistics.dm index 647965dc8964..2a67a26b9058 100644 --- a/code/game/statistics.dm +++ b/code/game/statistics.dm @@ -9,7 +9,7 @@ log_game("SQL ERROR during population polling. Failed to connect.") else var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("population")] (playercount, admincount, time) VALUES (:pc, :ac, NOW())", + "INSERT INTO [DB_PREFIX_TABLE_NAME("population")] (playercount, admincount, time) VALUES (:pc, :ac, NOW())", list( "pc" = sanitizeSQL(playercount), "ac" = sanitizeSQL(admincount), @@ -48,7 +48,7 @@ log_game("SQL ERROR during death reporting. Failed to connect.") else var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ (:name, :key, :job, :special, :pod, :time, :laname, :lakey, :gender, :bruteloss, :fireloss, :brainloss, :oxyloss, :coord)", list( "name" = sqlname, @@ -98,7 +98,7 @@ log_game("SQL ERROR during death reporting. Failed to connect.") else var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("death")] (name, byondkey, job, special, pod, tod, laname, lakey, gender, bruteloss, fireloss, brainloss, oxyloss, coord) VALUES \ (:name, :key, :job, :special, :pod, :time, :laname, :lakey, :geender, :bruteloss, :fireloss, :brainloss, :oxyloss, :coord)", list( "name" = sqlname, @@ -146,7 +146,7 @@ else var/datum/db_query/max_query = SSdbcore.RunQuery( - "SELECT MAX(roundid) AS max_round_id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")]", + "SELECT MAX(roundid) AS max_round_id FROM [DB_PREFIX_TABLE_NAME("feedback")]", list(), ) @@ -168,7 +168,7 @@ var/value = item.get_value() var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("feedback")] (id, roundid, time, variable, value) VALUES (null, :rid, Now(), :var, :val)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("feedback")] (id, roundid, time, variable, value) VALUES (null, :rid, Now(), :var, :val)", list( "rid" = newroundid, "var" = sanitizeSQL(variable), diff --git a/code/modules/admin/DB ban/functions.dm b/code/modules/admin/DB ban/functions.dm index d871a341cd8c..e332f48c88f2 100644 --- a/code/modules/admin/DB ban/functions.dm +++ b/code/modules/admin/DB ban/functions.dm @@ -72,7 +72,7 @@ computerid = "" if(isnull(ip)) ip = "" - var/sql = "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] \ + var/sql = "INSERT INTO [DB_PREFIX_TABLE_NAME("ban")] \ (`id`,`bantime`,`serverip`,`bantype`,`reason`,`job`,`duration`,`rounds`,`expiration_time`,`ckey`,`computerid`,`ip`,`a_ckey`,`a_computerid`,`a_ip`,`who`,`adminwho`,`edits`,`unbanned`,`unbanned_datetime`,`unbanned_ckey`,`unbanned_computerid`,`unbanned_ip`) \ VALUES (null, Now(), :serverip, :type, :reason, :job, :duration, :rounds, Now() + INTERVAL :duration MINUTE, :ckey, :cid, :ip, :a_ckey, :a_cid, :a_ip, :who, :adminwho, '', null, null, null, null, null)" SSdbcore.RunQuery( @@ -133,7 +133,7 @@ else bantype_sql = "bantype = '[bantype_str]'" - var/sql = "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE ckey = :ckey AND [bantype_sql] AND (unbanned is null OR unbanned = false)" + var/sql = "SELECT id FROM [DB_PREFIX_TABLE_NAME("ban")] WHERE ckey = :ckey AND [bantype_sql] AND (unbanned is null OR unbanned = false)" if(job) sql += " AND job = :job" @@ -183,7 +183,7 @@ return var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, duration, reason FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE id = :id", + "SELECT ckey, duration, reason FROM [DB_PREFIX_TABLE_NAME("ban")] WHERE id = :id", list( "id" = banid ) @@ -215,7 +215,7 @@ return SSdbcore.RunQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] SET reason = :reason, \ + "UPDATE [DB_PREFIX_TABLE_NAME("ban")] SET reason = :reason, \ edits = CONCAT(edits, '- :ckey changed ban reason from \\\":oldreason\\\" to \\\":reason\\\"
') \ WHERE id = :id", list( @@ -233,7 +233,7 @@ to_chat(usr, "Cancelled") return SSdbcore.RunQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] SET duration = :duration, \ + "UPDATE [DB_PREFIX_TABLE_NAME("ban")] SET duration = :duration, \ edits = CONCAT(edits, '- :ckey changed ban duration from :oldduration to :duration
'), expiration_time = DATE_ADD(bantime, INTERVAL :duration MINUTE) \ WHERE id = :id", list( @@ -264,7 +264,7 @@ var/pckey var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE id = :id", + "SELECT ckey FROM [DB_PREFIX_TABLE_NAME("ban")] WHERE id = :id", list( "id" = id ) @@ -292,7 +292,7 @@ message_admins("[key_name_admin(usr)] has lifted [pckey]'s ban.",1) SSdbcore.RunQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] SET unbanned = 1, unbanned_datetime = Now(), unbanned_ckey = :ckey, unbanned_computerid = :cid, unbanned_ip = :ip WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("ban")] SET unbanned = 1, unbanned_datetime = Now(), unbanned_ckey = :ckey, unbanned_computerid = :cid, unbanned_ip = :ip WHERE id = :id", list( "ckey" = unban_ckey, "cid" = unban_computerid, @@ -458,7 +458,7 @@ var/datum/db_query/select_query = SSdbcore.RunQuery( "SELECT id, bantime, bantype, reason, job, duration, expiration_time, ckey, a_ckey, unbanned, unbanned_ckey, unbanned_datetime, edits, ip, computerid \ - FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] \ + FROM [DB_PREFIX_TABLE_NAME("ban")] \ WHERE 1 [playersearch] [adminsearch] [ipsearch] [cidsearch] [bantypesearch] ORDER BY bantime DESC LIMIT 100", search_params ) diff --git a/code/modules/admin/IsBanned.dm b/code/modules/admin/IsBanned.dm index 58c936a1f098..42bfc1dc3650 100644 --- a/code/modules/admin/IsBanned.dm +++ b/code/modules/admin/IsBanned.dm @@ -88,7 +88,7 @@ cidquery = " OR computerid = ':cid' " var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, ip, computerid, a_ckey, reason, expiration_time, duration, bantime, bantype FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE (ckey = :ckey [ipquery] [cidquery]) AND (bantype = 'PERMABAN' OR (bantype = 'TEMPBAN' AND expiration_time > Now())) AND isnull(unbanned)", + "SELECT ckey, ip, computerid, a_ckey, reason, expiration_time, duration, bantime, bantype FROM [DB_PREFIX_TABLE_NAME("ban")] WHERE (ckey = :ckey [ipquery] [cidquery]) AND (bantype = 'PERMABAN' OR (bantype = 'TEMPBAN' AND expiration_time > Now())) AND isnull(unbanned)", list( "ckey" = ckeytext, "ip" = address, diff --git a/code/modules/admin/admin_ranks.dm b/code/modules/admin/admin_ranks.dm index 25664de498ec..58583b9bad28 100644 --- a/code/modules/admin/admin_ranks.dm +++ b/code/modules/admin/admin_ranks.dm @@ -113,7 +113,7 @@ var/list/admin_ranks = list() //list of all ranks with associated rights return var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, rank, level, flags FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")]", + "SELECT ckey, rank, level, flags FROM [DB_PREFIX_TABLE_NAME("admin")]", list() ) diff --git a/code/modules/admin/banjob.dm b/code/modules/admin/banjob.dm index 717b9156a06c..65148f569a98 100644 --- a/code/modules/admin/banjob.dm +++ b/code/modules/admin/banjob.dm @@ -74,7 +74,7 @@ DEBUG //Job permabans var/datum/db_query/query = SSdbcore.RunQuery( - "SELECT ckey, job FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE bantype = 'JOB_PERMABAN' AND isnull(unbanned)", + "SELECT ckey, job FROM [DB_PREFIX_TABLE_NAME("ban")] WHERE bantype = 'JOB_PERMABAN' AND isnull(unbanned)", list() ) @@ -86,7 +86,7 @@ DEBUG //Job tempbans var/datum/db_query/query1 = SSdbcore.RunQuery( - "SELECT ckey, job FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("ban")] WHERE bantype = 'JOB_TEMPBAN' AND isnull(unbanned) AND expiration_time > Now()", + "SELECT ckey, job FROM [DB_PREFIX_TABLE_NAME("ban")] WHERE bantype = 'JOB_TEMPBAN' AND isnull(unbanned) AND expiration_time > Now()", list() ) diff --git a/code/modules/admin/permissionverbs/permissionedit.dm b/code/modules/admin/permissionverbs/permissionedit.dm index eae21a1f2650..d3a701a40555 100644 --- a/code/modules/admin/permissionverbs/permissionedit.dm +++ b/code/modules/admin/permissionverbs/permissionedit.dm @@ -78,7 +78,7 @@ return var/datum/db_query/select_query = SSdbcore.RunQuery( - "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] WHERE ckey = :ckey", + "SELECT id FROM [DB_PREFIX_TABLE_NAME("admin")] WHERE ckey = :ckey", list( "ckey" = adm_ckey ) @@ -92,14 +92,14 @@ if(new_admin) SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] (id, ckey, rank, level, flags) VALUES (null, :ckey, :rank, -1, 0)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("admin")] (id, ckey, rank, level, flags) VALUES (null, :ckey, :rank, -1, 0)", list( "ckey" = adm_ckey, "rank" = new_rank ) ) SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, NOW(), :ckey, :ip, :logstr)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, NOW(), :ckey, :ip, :logstr)", list( "ckey" = sanitizeSQL(usr.ckey), "ip" = sanitizeSQL(usr.client.address), @@ -110,14 +110,14 @@ else if(!isnull(admin_id) && isnum(admin_id)) SSdbcore.RunQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] SET rank = :rank WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("admin")] SET rank = :rank WHERE id = :id", list( "rank" = new_rank, "id" = admin_id ) ) SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", list( "ckey" = usr.ckey, "addr" = usr.client.address, @@ -155,7 +155,7 @@ return var/datum/db_query/select_query = SSdbcore.RunQuery( - "SELECT id, flags FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] WHERE ckey = :ckey", + "SELECT id, flags FROM [DB_PREFIX_TABLE_NAME("admin")] WHERE ckey = :ckey", list( "ckey" = adm_ckey ) @@ -172,14 +172,14 @@ if(admin_rights & new_permission) //This admin already has this permission, so we are removing it. SSdbcore.RunQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] SET flags = :flags WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("admin")] SET flags = :flags WHERE id = :id", list( "flags" = admin_rights & ~new_permission, "id" = admin_id ) ) SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", list( "ckey" = usr.ckey, "addr" = usr.client.address, @@ -189,14 +189,14 @@ to_chat(usr, "Permission removed.") else //This admin doesn't have this permission, so we are adding it. SSdbcore.RunQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin")] SET flags = :flags WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("admin")] SET flags = :flags WHERE id = :id", list( "flags" = admin_rights | new_permission, "id" = admin_id ) ) SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("admin_log")] (id, datetime, adminckey, adminip, log) VALUES (NULL, Now(), :ckey, :addr, :log)", list( "ckey" = usr.ckey, "addr" = usr.client.address, diff --git a/code/modules/admin/verbs/check_customitem_activity.dm b/code/modules/admin/verbs/check_customitem_activity.dm index efc9a005446c..6b329a5fadb0 100644 --- a/code/modules/admin/verbs/check_customitem_activity.dm +++ b/code/modules/admin/verbs/check_customitem_activity.dm @@ -55,7 +55,7 @@ var/inactive_keys = "None
" var/list/inactive_ckeys = list() if(ckeys_with_customitems.len) var/datum/db_query/query_inactive = SSdbcore.RunQuery( - "SELECT ckey, lastseen FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE datediff(Now(), lastseen) > 60", + "SELECT ckey, lastseen FROM [DB_PREFIX_TABLE_NAME("player_lookup")] WHERE datediff(Now(), lastseen) > 60", list() ) while(query_inactive.NextRow()) @@ -69,7 +69,7 @@ var/inactive_keys = "None
" if(ckeys_with_customitems.len) for(var/cur_ckey in ckeys_with_customitems) var/datum/db_query/query_inactive = SSdbcore.RunQuery( - "SELECT ckey FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE ckey = :ckey", + "SELECT ckey FROM [DB_PREFIX_TABLE_NAME("player_lookup")] WHERE ckey = :ckey", list( "ckey" = cur_ckey ) diff --git a/code/modules/client/client_procs.dm b/code/modules/client/client_procs.dm index dfbca7ea652a..8d20b0cbcad9 100644 --- a/code/modules/client/client_procs.dm +++ b/code/modules/client/client_procs.dm @@ -467,7 +467,7 @@ var/sql_system_ckey = sanitizeSQL(system_ckey) var/sql_ckey = sanitizeSQL(ckey) //check to see if we noted them in the last day. - var/datum/DBQuery/query_get_notes = SSdbcore.NewQuery("SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("messages")] WHERE type = 'note' AND targetckey = '[sql_ckey]' AND adminckey = '[sql_system_ckey]' AND timestamp + INTERVAL 1 DAY < NOW() AND deleted = 0 AND expire_timestamp > NOW()") + var/datum/DBQuery/query_get_notes = SSdbcore.NewQuery("SELECT id FROM [DB_PREFIX_TABLE_NAME("messages")] WHERE type = 'note' AND targetckey = '[sql_ckey]' AND adminckey = '[sql_system_ckey]' AND timestamp + INTERVAL 1 DAY < NOW() AND deleted = 0 AND expire_timestamp > NOW()") if(!query_get_notes.Execute()) qdel(query_get_notes) return @@ -476,7 +476,7 @@ return qdel(query_get_notes) //regardless of above, make sure their last note is not from us, as no point in repeating the same note over and over. - query_get_notes = SSdbcore.NewQuery("SELECT adminckey FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("messages")] WHERE targetckey = '[sql_ckey]' AND deleted = 0 AND expire_timestamp > NOW() ORDER BY timestamp DESC LIMIT 1") + query_get_notes = SSdbcore.NewQuery("SELECT adminckey FROM [DB_PREFIX_TABLE_NAME("messages")] WHERE targetckey = '[sql_ckey]' AND deleted = 0 AND expire_timestamp > NOW() ORDER BY timestamp DESC LIMIT 1") if(!query_get_notes.Execute()) qdel(query_get_notes) return diff --git a/code/modules/client/connection.dm b/code/modules/client/connection.dm index 0d90f1bcfbf7..8f52aebf6ecb 100644 --- a/code/modules/client/connection.dm +++ b/code/modules/client/connection.dm @@ -7,7 +7,7 @@ return var/datum/db_query/lookup = SSdbcore.NewQuery( - "SELECT id FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE ckey = :ckey", + "SELECT id FROM [DB_PREFIX_TABLE_NAME("player_lookup")] WHERE ckey = :ckey", list( "ckey" = ckey, ) @@ -20,7 +20,7 @@ if(sql_id) var/datum/db_query/update = SSdbcore.NewQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] SET lastseen = Now(), ip = :ip, computerid = :computerid, lastadminrank = :lastadminrank WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("player_lookup")] SET lastseen = Now(), ip = :ip, computerid = :computerid, lastadminrank = :lastadminrank WHERE id = :id", list( "ip" = address, "computerid" = computer_id, @@ -33,7 +33,7 @@ else //New player!! Need to insert all the stuff var/datum/db_query/insert = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] (id, ckey, firstseen, lastseen, ip, computerid, lastadminrank) VALUES (null, :ckey, Now(), Now(), :ip, :cid, :rank)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("player_lookup")] (id, ckey, firstseen, lastseen, ip, computerid, lastadminrank) VALUES (null, :ckey, Now(), Now(), :ip, :cid, :rank)", list( "ckey" = ckey, "ip" = address, @@ -55,7 +55,7 @@ return SSdbcore.RunQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("connection_log")] (id, datetime, serverip, ckey, ip, computerid) VALUES (null, Now(), :server, :ckey, :ip, :cid)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("connection_log")] (id, datetime, serverip, ckey, ip, computerid) VALUES (null, Now(), :server, :ckey, :ip, :cid)", list( "server" = "[world.internet_address]:[world.port]", "ckey" = ckey, diff --git a/code/modules/client/data/client_data.dm b/code/modules/client/data/client_data.dm index 1e4c8499a7fa..b1f70aec30eb 100644 --- a/code/modules/client/data/client_data.dm +++ b/code/modules/client/data/client_data.dm @@ -136,7 +136,7 @@ GLOBAL_LIST_EMPTY(client_data) playtime_mutex = FALSE return var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT `roleid`, `minutes` FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("playtime")] WHERE player = :player", + "SELECT `roleid`, `minutes` FROM [DB_PREFIX_TABLE_NAME("playtime")] WHERE player = :player", list( "player" = player_id, ) diff --git a/code/modules/client/data/player_data.dm b/code/modules/client/data/player_data.dm index 9bb57add9af3..591ed3bb33b5 100644 --- a/code/modules/client/data/player_data.dm +++ b/code/modules/client/data/player_data.dm @@ -94,7 +94,7 @@ GLOBAL_LIST_EMPTY(player_data) return var/datum/db_query/lookup lookup = SSdbcore.ExecuteQuery( - "SELECT id, playerid, firstseen FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] WHERE ckey = :ckey", + "SELECT id, playerid, firstseen FROM [DB_PREFIX_TABLE_NAME("player_lookup")] WHERE ckey = :ckey", list( "ckey" = ckey ) @@ -111,7 +111,7 @@ GLOBAL_LIST_EMPTY(player_data) lookup_pid = text2num(lookup_pid) qdel(lookup) lookup = SSdbcore.ExecuteQuery( - "SELECT id, flags, datediff(Now(), firstseen), firstseen, misc FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] WHERE id = :id", + "SELECT id, flags, datediff(Now(), firstseen), firstseen, misc FROM [DB_PREFIX_TABLE_NAME("player")] WHERE id = :id", list( "id" = lookup_pid ) @@ -147,7 +147,7 @@ GLOBAL_LIST_EMPTY(player_data) var/datum/db_query/insert if(migrate_firstseen) insert = SSdbcore.ExecuteQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, :fs, Now(), :misc)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, :fs, Now(), :misc)", list( "flags" = player_flags, "fs" = migrate_firstseen, @@ -157,7 +157,7 @@ GLOBAL_LIST_EMPTY(player_data) player_first_seen = migrate_firstseen else insert = SSdbcore.ExecuteQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, Now(), Now(), :misc)", + "INSERT INTO [DB_PREFIX_TABLE_NAME("player")] (flags, firstseen, lastseen, misc) VALUES (:flags, Now(), Now(), :misc)", list( "flags" = player_flags, "misc" = safe_json_encode(player_misc), @@ -173,7 +173,7 @@ GLOBAL_LIST_EMPTY(player_data) qdel(insert) // now update lookup insert = SSdbcore.ExecuteQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player_lookup")] SET playerid = :pid WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("player_lookup")] SET playerid = :pid WHERE id = :id", list( "id" = lookup_id, "pid" = insert_id @@ -211,7 +211,7 @@ GLOBAL_LIST_EMPTY(player_data) /datum/player_data/proc/_save() qdel(SSdbcore.ExecuteQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] SET flags = :flags, misc = :misc WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("player")] SET flags = :flags, misc = :misc WHERE id = :id", list( "flags" = player_flags, "id" = player_id, @@ -232,7 +232,7 @@ GLOBAL_LIST_EMPTY(player_data) if(!block_on_available()) return FALSE qdel(SSdbcore.ExecuteQuery( - "UPDATE [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("player")] SET lastseen = Now() WHERE id = :id", + "UPDATE [DB_PREFIX_TABLE_NAME("player")] SET lastseen = Now() WHERE id = :id", list( "id" = player_id, ) diff --git a/code/modules/client/game_preferences/game_preferences.dm b/code/modules/client/game_preferences/game_preferences.dm index 9265df691072..667a98c8d4ed 100644 --- a/code/modules/client/game_preferences/game_preferences.dm +++ b/code/modules/client/game_preferences/game_preferences.dm @@ -369,7 +369,7 @@ usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "SELECT `toggles`, `entries`, `misc`, `keybinds`, `version` FROM [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("game_preferences")] \ + "SELECT `toggles`, `entries`, `misc`, `keybinds`, `version` FROM [DB_PREFIX_TABLE_NAME("game_preferences")] \ WHERE `player` = :player", list( "player" = authoritative_player_id, @@ -413,7 +413,7 @@ usr = null var/datum/db_query/query = SSdbcore.NewQuery( - "INSERT INTO [fetch_local_or_tDB_PREFIX_TABLE_NAMEhrow("game_preferences")] \ + "INSERT INTO [DB_PREFIX_TABLE_NAME("game_preferences")] \ (`player`, `toggles`, `entries`, `misc`, `keybinds`, `version`, `modified`) VALUES \ (:player, :toggles, :entries, :misc, :keybinds, :version, Now()) ON DUPLICATE KEY UPDATE \ `player` = VALUES(player), `toggles` = VALUES(toggles), `entries` = VALUES(entries), `misc` = VALUES(misc), \ diff --git a/code/modules/library/lib_machines.dm b/code/modules/library/lib_machines.dm index 639411c75779..c9b60e2e4b91 100644 --- a/code/modules/library/lib_machines.dm +++ b/code/modules/library/lib_machines.dm @@ -49,7 +49,7 @@
AUTHORTITLECATEGORYSS13BN
TITLE Date: Mon, 9 Dec 2024 02:39:19 -0500 Subject: [PATCH 13/29] Sigh --- tools/setup_dev_db/setup.ps1 | 35 +++++++++++++++++++++++++++++------ 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/tools/setup_dev_db/setup.ps1 b/tools/setup_dev_db/setup.ps1 index c2cfc17e3c21..d667844254eb 100644 --- a/tools/setup_dev_db/setup.ps1 +++ b/tools/setup_dev_db/setup.ps1 @@ -8,7 +8,7 @@ function ExtractVersion { throw "Couldn't find value for $Key in $Path" } -function ResolveMariadbURL { +function ResolveMariaDBURL { param([string] $Version) return "https://mirror.rackspace.com/mariadb//mariadb-$Version/winx64-packages/mariadb-$Version-winx64.zip" } @@ -18,26 +18,49 @@ function ResolveFlywayURL { return "https://download.red-gate.com/maven/release/com/redgate/flyway/flyway-commandline/$Version/flyway-commandline-$Version-windows-x64.zip" } -function +# Path is a /path/to/folder +# Archive will be temporarily downloaded as /path/to/folder.zip +function PullURLAndUnpack { + param([string] $URL, [string] $Path) + Remove-Item -Path "$Path.zip" + Invoke-WebRequest ` + "$URL" ` + -OutFile "$Path.zip" ` + -ErrorAction Stop + Expand-Archive "$Path.zip" -DestinationPath "$Path" +} $ToolRoot = Split-Path $script:MyInvocation.MyCommand.Path $MARIADB_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key "MARIADB_VERSION" $FLYWAY_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key "FLYWAY_VERSION" -$MYSQLD_PATH = "$ToolRoot/" -$FLYWAY_PATH = "" +$MARIADB_FOLDER = "$ToolRoot/.cache/mariadb/$MARIADB_VERSION" +$FLYWAY_FOLDER = "$ToolRoot/.cache/flyway/$FLYWAY_VERSION" + +$MYSQLD_PATH = "$MARIADB_FOLDER/" +$FLYWAY_PATH = "$FLYWAY_FOLDER/" # GET mariadb IF NOT EXISTS if(!(Test-Path $MYSQLD_PATH -PathType Leaf)) { - + $MARIADB_URL = ResolveMariaDBURL $MARIADB_VERSION + PullURLAndUnpack $MARIADB_URL $MARIADB_FOLDER + if(!(Test-Path $MYSQLD_PATH -PathType Leaf)) { + Write-Error "Failed to find '$MYSQLD_PATH' after unpacking." + exit 1 + } } # GET flyway IF NOT EXISTS if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { - + $FLYWAY_URL = ResolveMariaDBURL $FLYWAY_VERSION + PullURLAndUnpack $FLYWAY_URL $FLYWAY_FOLDER + if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { + Write-Error "Failed to find '$FLYWAY_PATH' after unpacking." + exit 1 + } } # run database From b306baefc3396825397c196b52ee34e9f759b5d0 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 03:02:26 -0500 Subject: [PATCH 14/29] that --- dependencies.sh | 3 +++ tools/README.md | 2 ++ tools/setup_dev_db/invoke.py | 14 +++++++++++--- tools/setup_dev_db/setup.ps1 | 16 +++++++++++----- 4 files changed, 27 insertions(+), 8 deletions(-) diff --git a/dependencies.sh b/dependencies.sh index aae7799e72e2..3979f19b924a 100755 --- a/dependencies.sh +++ b/dependencies.sh @@ -26,3 +26,6 @@ export PYTHON_VERSION=3.9.0 # MariaDB version export MARIADB_VERSION=11.4.4 + +# Flyway version +export FLYWAY_VERSION=11.0.1 diff --git a/tools/README.md b/tools/README.md index e6d5e49456b3..801d400e9765 100644 --- a/tools/README.md +++ b/tools/README.md @@ -8,6 +8,8 @@ All versions are grabbed from ../dependencies.sh - /build: The build system - /ci: Scripts used for our CI suite - /common: Common functions/things to include. +- /setup_dev_db: Automatic way to setup and provision a development database, stored in the local data directory. +- /tgs4_scripts: Scripts for tgstation-server to orchestrate server compiles/deploys/etc. # Todo diff --git a/tools/setup_dev_db/invoke.py b/tools/setup_dev_db/invoke.py index 7897408b942a..3221d3215347 100644 --- a/tools/setup_dev_db/invoke.py +++ b/tools/setup_dev_db/invoke.py @@ -7,10 +7,10 @@ def log_message(source: str, string: str): print('%s: %s' % (source, string)) -if __name__ == "main": +if __name__ == "__main__": argparser = argparse.ArgumentParser( prog="setup.ps1", - usage="setup.ps1 --daemon [path-to-mysqld] --flyway [path-to-flyway] --migrations [path-to-migrations-folder]", + usage="setup.ps1 --port [port] --dbname [dbname]", ) argparser.add_argument("--daemon", type=str) argparser.add_argument("--flyway", type=str) @@ -18,7 +18,14 @@ def log_message(source: str, string: str): argparser.add_argument("--port", required=False, default=3306, type=int) argparser.add_argument("--dbname", required=False, default="ss13", type=str) - parsed_args = argparser.parse_args(sys.argv) + # we slice it, as being invoked from bootstrap consumes this script's file path as the first arg + effective_args: list[str] = sys.argv[1:] + + if len(effective_args) == 0: + argparser.print_help() + exit(1) + + parsed_args = argparser.parse_args(effective_args) PATH_TO_MYSQLD: str = parsed_args.daemon PATH_TO_FLYWAY: str = parsed_args.flyway @@ -27,6 +34,7 @@ def log_message(source: str, string: str): USE_DATABASE: str = parsed_args.dbname log_message("setup_dev_db", "WARNING: This is a very, very lazy Python app! Logs are not necessarily in order of occurence; the script is just a very, very dumb while(True) loop that is just jank enough to work. Do not use this for production") + log_message("setup_dev_db", 'Using port %d and setting up on database %s. Use --port and --dbname to override!' % (USE_PORT, USE_DATABASE)) log_message("setup_dev_db", "Starting processes...") mysqld: subprocess.Popen | None = subprocess.Popen( diff --git a/tools/setup_dev_db/setup.ps1 b/tools/setup_dev_db/setup.ps1 index d667844254eb..bbb9484c36a1 100644 --- a/tools/setup_dev_db/setup.ps1 +++ b/tools/setup_dev_db/setup.ps1 @@ -22,11 +22,17 @@ function ResolveFlywayURL { # Archive will be temporarily downloaded as /path/to/folder.zip function PullURLAndUnpack { param([string] $URL, [string] $Path) - Remove-Item -Path "$Path.zip" + if(Test-Path "$Path.zip" -PathType Leaf) { + Remove-Item -Path "$Path.zip" + } + Write-Output "Pulling $URL to $Path.zip" + $ProgressPreference = 'SilentlyContinue' Invoke-WebRequest ` "$URL" ` -OutFile "$Path.zip" ` -ErrorAction Stop + $ProgressPreference = 'Continue' + Write-Output "Expanding $Path.zip to $Path" Expand-Archive "$Path.zip" -DestinationPath "$Path" } @@ -38,8 +44,8 @@ $FLYWAY_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key "F $MARIADB_FOLDER = "$ToolRoot/.cache/mariadb/$MARIADB_VERSION" $FLYWAY_FOLDER = "$ToolRoot/.cache/flyway/$FLYWAY_VERSION" -$MYSQLD_PATH = "$MARIADB_FOLDER/" -$FLYWAY_PATH = "$FLYWAY_FOLDER/" +$MYSQLD_PATH = "$MARIADB_FOLDER/mariadb-$MARIADB_VERSION-winx64/bin/mariadbd.exe" +$FLYWAY_PATH = "$FLYWAY_FOLDER/flyway-$FLYWAY_VERSION/flyway.cmd" # GET mariadb IF NOT EXISTS @@ -55,7 +61,7 @@ if(!(Test-Path $MYSQLD_PATH -PathType Leaf)) { # GET flyway IF NOT EXISTS if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { - $FLYWAY_URL = ResolveMariaDBURL $FLYWAY_VERSION + $FLYWAY_URL = ResolveFlywayURL $FLYWAY_VERSION PullURLAndUnpack $FLYWAY_URL $FLYWAY_FOLDER if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { Write-Error "Failed to find '$FLYWAY_PATH' after unpacking." @@ -65,4 +71,4 @@ if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { # run database -& $ToolRoot/../bootstrap/python._ps1 $ToolRoot/invoke.py $args +& $ToolRoot/../bootstrap/python_.ps1 $ToolRoot/invoke.py --daemon $MYSQLD_PATH --flyway $FLYWAY_PATH --migrations "../../sql/migrations" $args From e67d99327d0c38ae284d1aaf374e006c6104a9c8 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 03:10:19 -0500 Subject: [PATCH 15/29] fix --- dependencies.sh | 2 +- tools/requirements.txt | 6 +++--- tools/setup_dev_db/invoke.py | 17 +++++++++++------ 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/dependencies.sh b/dependencies.sh index 3979f19b924a..eb5b3f04e766 100755 --- a/dependencies.sh +++ b/dependencies.sh @@ -22,7 +22,7 @@ export NODE_VERSION_COMPAT=20.2.0 export SPACEMAN_DMM_VERSION=suite-1.8 # Python version for mapmerge and other tools -export PYTHON_VERSION=3.9.0 +export PYTHON_VERSION=3.12.0 # MariaDB version export MARIADB_VERSION=11.4.4 diff --git a/tools/requirements.txt b/tools/requirements.txt index 032ffe66e727..adaf3cf43d90 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1,6 +1,6 @@ -pygit2==1.7.2 -bidict==0.22.0 -Pillow==10.0.1 +pygit2==1.15 +bidict==0.23.1 +Pillow==11.0.0 # changelogs PyYaml==6.0.1 diff --git a/tools/setup_dev_db/invoke.py b/tools/setup_dev_db/invoke.py index 3221d3215347..20b8fd586b52 100644 --- a/tools/setup_dev_db/invoke.py +++ b/tools/setup_dev_db/invoke.py @@ -39,14 +39,14 @@ def log_message(source: str, string: str): mysqld: subprocess.Popen | None = subprocess.Popen( [], - executable="", + executable=PATH_TO_MYSQLD, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) flyway: subprocess.Popen | None = subprocess.Popen( [], - executable="", + executable=PATH_TO_FLYWAY, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -86,6 +86,9 @@ def log_message(source: str, string: str): flyway = None log_message("setup_dev_db", 'flyway exited with code %d' % exited) + if flyway == None and mysqld == None: + keep_running = False + # "this is async right" # "yeah" # pulls the cover off @@ -99,8 +102,10 @@ def log_message(source: str, string: str): flyway.send_signal(sig=signal.CTRL_C_EVENT) # block on mysqld/flyway exiting - mysqld_exitcode: int | None = mysqld.wait() - log_message("setup_dev_db", 'mysqld exited with code %d' % exited) - flyway_exitcode: int | None = flyway.wait() - log_message("setup_dev_db", 'flyway exited with code %d' % exited) + if mysqld != None: + mysqld_exitcode: int | None = mysqld.wait() + log_message("setup_dev_db", 'mysqld exited with code %d' % exited) + if flyway != None: + flyway_exitcode: int | None = flyway.wait() + log_message("setup_dev_db", 'flyway exited with code %d' % exited) From 3a1f957c415a510e052a986b300a70d546bdfbd8 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 05:05:11 -0500 Subject: [PATCH 16/29] this is just evil --- tools/setup_dev_db/README.md | 5 +++ tools/setup_dev_db/invoke.py | 79 ++++++++++++++++++++++++------------ tools/setup_dev_db/setup.ps1 | 14 ++++++- 3 files changed, 70 insertions(+), 28 deletions(-) diff --git a/tools/setup_dev_db/README.md b/tools/setup_dev_db/README.md index 7d79145ae101..d388b42bc689 100644 --- a/tools/setup_dev_db/README.md +++ b/tools/setup_dev_db/README.md @@ -12,6 +12,11 @@ Only works on windows right now, if you're on linux you should know how to set u If you do, and you get breached / bad things happen, I will not provide any help or condolences. This is purely a dev tool. **It is the responsibility of the server owner to set up a proper production database and to maintain it.** +## Defaults + +* The default root password is set to `password`. +* The default database is not secured at all. + ## License This entire folder is under the MIT license. diff --git a/tools/setup_dev_db/invoke.py b/tools/setup_dev_db/invoke.py index 20b8fd586b52..2d57f7018182 100644 --- a/tools/setup_dev_db/invoke.py +++ b/tools/setup_dev_db/invoke.py @@ -1,11 +1,32 @@ import argparse; +import codecs; +import encodings; +import io; +import os; import subprocess; import signal; import sys; import time; +import threading; -def log_message(source: str, string: str): - print('%s: %s' % (source, string)) +keep_running: bool = True + +def on_interrupt(): + global keep_running + print("ctrl+C caught!") + keep_running = False + +signal.signal(signal.SIGINT, on_interrupt) + +def log_message(source: str, string: str, end: str = "\n"): + print('%s: %s' % (source, string), end=end) + sys.stdout.flush() + +def thread_pipe_dump(source: str, pipe: io.TextIOWrapper): + while True: + for line in pipe.readlines(): + log_message(source, line, end="") + time.sleep(0.001) if __name__ == "__main__": argparser = argparse.ArgumentParser( @@ -15,6 +36,7 @@ def log_message(source: str, string: str): argparser.add_argument("--daemon", type=str) argparser.add_argument("--flyway", type=str) argparser.add_argument("--migrations", type=str) + argparser.add_argument("--dataDir", type=str) argparser.add_argument("--port", required=False, default=3306, type=int) argparser.add_argument("--dbname", required=False, default="ss13", type=str) @@ -30,57 +52,60 @@ def log_message(source: str, string: str): PATH_TO_MYSQLD: str = parsed_args.daemon PATH_TO_FLYWAY: str = parsed_args.flyway PATH_TO_MIGRATIONS: str = parsed_args.migrations + USE_DATADIR: str = parsed_args.dataDir USE_PORT: int = parsed_args.port USE_DATABASE: str = parsed_args.dbname log_message("setup_dev_db", "WARNING: This is a very, very lazy Python app! Logs are not necessarily in order of occurence; the script is just a very, very dumb while(True) loop that is just jank enough to work. Do not use this for production") log_message("setup_dev_db", 'Using port %d and setting up on database %s. Use --port and --dbname to override!' % (USE_PORT, USE_DATABASE)) + log_message("setup_dev_db", 'Using data directory %s.' % (USE_DATADIR)) log_message("setup_dev_db", "Starting processes...") + log_message("setup_dev_db", "Starting mysqld...") mysqld: subprocess.Popen | None = subprocess.Popen( - [], - executable=PATH_TO_MYSQLD, + [ + PATH_TO_MYSQLD, + '--datadir', + USE_DATADIR, + "--console", + ], stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, ) + log_message("setup_dev_db", "Starting flyway...") flyway: subprocess.Popen | None = subprocess.Popen( - [], - executable=PATH_TO_FLYWAY, + [ + PATH_TO_FLYWAY, + ], stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, ) + os.set_blocking(mysqld.stdout.fileno(), False) + os.set_blocking(flyway.stdout.fileno(), False) + + mysqld_out_dump = threading.Thread(target=thread_pipe_dump, args=("mysqld-out", mysqld.stdout), daemon=True) + flyway_out_dump = threading.Thread(target=thread_pipe_dump, args=("flyway-out", flyway.stdout), daemon=True) + + mysqld_out_dump.start() + flyway_out_dump.start() + # main loop - keep_running: bool = True - while(keep_running): + while keep_running == True: # pull outputs polled: list[str] exited: int | None if mysqld != None: - polled = mysqld.stdout.readlines() - for string in polled: - log_message("mysqld-out", string) - - polled = mysqld.stderr.readlines() - for string in polled: - log_message("mysqld-err", string) - exited = mysqld.poll() if exited != None: mysqld = None log_message("setup_dev_db", 'mysqld exited with code %d' % exited) if flyway != None: - polled = flyway.stdout.readlines() - for string in polled: - log_message("flyway-out", string) - - polled = flyway.stderr.readlines() - for string in polled: - log_message("flyway-err", string) - exited = flyway.poll() if exited != None: flyway = None @@ -95,6 +120,8 @@ def log_message(source: str, string: str): # "what the hell, this is just an infinite loop!" time.sleep(0.001) + log_message("setup_dev_db", 'exiting...') + # exit mysqld and flyway if mysqld != None: mysqld.send_signal(sig=signal.CTRL_C_EVENT) diff --git a/tools/setup_dev_db/setup.ps1 b/tools/setup_dev_db/setup.ps1 index bbb9484c36a1..416c82839e6f 100644 --- a/tools/setup_dev_db/setup.ps1 +++ b/tools/setup_dev_db/setup.ps1 @@ -42,9 +42,10 @@ $MARIADB_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key " $FLYWAY_VERSION = ExtractVersion -Path "$ToolRoot/../../dependencies.sh" -Key "FLYWAY_VERSION" $MARIADB_FOLDER = "$ToolRoot/.cache/mariadb/$MARIADB_VERSION" +$MARIADB_BIN_FOLDER = "$MARIADB_FOLDER/mariadb-$MARIADB_VERSION-winx64/bin" $FLYWAY_FOLDER = "$ToolRoot/.cache/flyway/$FLYWAY_VERSION" -$MYSQLD_PATH = "$MARIADB_FOLDER/mariadb-$MARIADB_VERSION-winx64/bin/mariadbd.exe" +$MYSQLD_PATH = "$MARIADB_BIN_FOLDER/mariadbd.exe" $FLYWAY_PATH = "$FLYWAY_FOLDER/flyway-$FLYWAY_VERSION/flyway.cmd" # GET mariadb IF NOT EXISTS @@ -71,4 +72,13 @@ if(!(Test-Path $FLYWAY_PATH -PathType Leaf)) { # run database -& $ToolRoot/../bootstrap/python_.ps1 $ToolRoot/invoke.py --daemon $MYSQLD_PATH --flyway $FLYWAY_PATH --migrations "../../sql/migrations" $args +$DATABASE_DATA_DIR_RAW = "$ToolRoot/../../data/setup_dev_db" +if(!(Test-Path $DATABASE_DATA_DIR_RAW -PathType Container)) { + New-Item $DATABASE_DATA_DIR_RAW -ItemType Directory + $DATABASE_DATA_DIR = Resolve-Path "$ToolRoot/../../data/setup_dev_db" + Write-Output "Bootstrapping database with data directory '$DATABASE_DATA_DIR'." + & $MARIADB_BIN_FOLDER/mariadb-install-db.exe -d $DATABASE_DATA_DIR -p password +} +$DATABASE_DATA_DIR = Resolve-Path "$ToolRoot/../../data/setup_dev_db" + +& $ToolRoot/../bootstrap/python_.ps1 $ToolRoot/invoke.py --dataDir $DATABASE_DATA_DIR --daemon $MYSQLD_PATH --flyway $FLYWAY_PATH --migrations "../../sql/migrations" $args From 4c7ca9a6ebdd9a8faf2d4109ec1e85d7a844c172 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 16:26:22 -0500 Subject: [PATCH 17/29] Update datumvars.dm --- code/datums/datumvars.dm | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/code/datums/datumvars.dm b/code/datums/datumvars.dm index 5f0f3d18ef90..745ad7051e49 100644 --- a/code/datums/datumvars.dm +++ b/code/datums/datumvars.dm @@ -3,10 +3,7 @@ */ /datum/proc/vv_delete() . = TRUE - // incase qdel returns QDEL_HINT_HARDDEL_NOW - var/datum/deleting = src - src = null - qdel(deleting) + qdel(src) /datum/proc/CanProcCall(procname) return TRUE From 109698ab7c2abffc54191ee1dfabfbac6e1480c8 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:08:36 -0500 Subject: [PATCH 18/29] done. --- sql/migrations/V0.0.1__PrepLegacyTables.sql | 15 ----- ...0.0.3__RemakePlaytimeTriggerJustInCase.sql | 6 +- tools/bootstrap/python_.ps1 | 14 ++-- tools/setup_dev_db/README.md | 6 ++ tools/setup_dev_db/invoke.py | 66 ++++++++++++------- tools/setup_dev_db/setup.ps1 | 5 +- 6 files changed, 61 insertions(+), 51 deletions(-) diff --git a/sql/migrations/V0.0.1__PrepLegacyTables.sql b/sql/migrations/V0.0.1__PrepLegacyTables.sql index 11ba1fdfd5ef..751737b2d190 100644 --- a/sql/migrations/V0.0.1__PrepLegacyTables.sql +++ b/sql/migrations/V0.0.1__PrepLegacyTables.sql @@ -20,21 +20,6 @@ CREATE TABLE IF NOT EXISTS `rp_schema_revision` ( PRIMARY KEY (`major`, `minor`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; --- Backend Stores -- --- -- - -CREATE TABLE IF NOT EXISTS `rp_backend_repository` ( - `repository` VARCHAR(64) NOT NULL, - `id` VARCHAR(128) NOT NULL, - `version` INT(11) NOT NULL, - `data` MEDIUMTEXT NOT NULL, - `storedTime` DATETIME NOT NULL DEFAULT Now(), - `modifiedTime` DATETIME NOT NULL DEFAULT Now(), - PRIMARY KEY(`repository`, `id`), - INDEX(`repository`), - INDEX(`id`) -) - -- persistence -- -- SSpersistence modules/bulk_entity diff --git a/sql/migrations/V0.0.3__RemakePlaytimeTriggerJustInCase.sql b/sql/migrations/V0.0.3__RemakePlaytimeTriggerJustInCase.sql index a4ad866da02b..5b97699a4119 100644 --- a/sql/migrations/V0.0.3__RemakePlaytimeTriggerJustInCase.sql +++ b/sql/migrations/V0.0.3__RemakePlaytimeTriggerJustInCase.sql @@ -1,6 +1,6 @@ -DELETE TRIGGER `playtimeTlogupdate`; -DELETE TRIGGER `playtimeTloginsert`; -DELETE TRIGGER `playtimeTlogdelete`; +DROP TRIGGER `playtimeTlogupdate`; +DROP TRIGGER `playtimeTloginsert`; +DROP TRIGGER `playtimeTlogdelete`; DELIMITER $$ CREATE TRIGGER `playtimeTlogupdate` AFTER UPDATE ON `playtime` FOR EACH ROW BEGIN INSERT into `playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes-OLD.minutes); diff --git a/tools/bootstrap/python_.ps1 b/tools/bootstrap/python_.ps1 index f37589f10644..fe89450f14e1 100644 --- a/tools/bootstrap/python_.ps1 +++ b/tools/bootstrap/python_.ps1 @@ -9,6 +9,7 @@ # The underscore in the name is so that typing `bootstrap/python` into # PowerShell finds the `.bat` file first, which ensures this script executes # regardless of ExecutionPolicy. + $host.ui.RawUI.WindowTitle = "starting :: python $args" $ErrorActionPreference = "Stop" [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 @@ -95,14 +96,9 @@ if (!(Test-Path "$PythonDir/requirements.txt") -or ((Get-FileHash "$Tools/requir Write-Output $PythonExe | Out-File -Encoding utf8 $Log [System.String]::Join([System.Environment]::NewLine, $args) | Out-File -Encoding utf8 -Append $Log Write-Output "---" | Out-File -Encoding utf8 -Append $Log + $host.ui.RawUI.WindowTitle = "python $args" -$ErrorActionPreference = "Continue" -& $PythonExe -u $args 2>&1 | ForEach-Object { - $str = "$_" - if ($_.GetType() -eq [System.Management.Automation.ErrorRecord]) { - $str = $str.TrimEnd("`r`n") - } - $str | Out-File -Encoding utf8 -Append $Log - $str | Out-Host -} + +& $PythonExe -u $args + exit $LastExitCode diff --git a/tools/setup_dev_db/README.md b/tools/setup_dev_db/README.md index d388b42bc689..486b901966ca 100644 --- a/tools/setup_dev_db/README.md +++ b/tools/setup_dev_db/README.md @@ -6,6 +6,12 @@ Sets up a database for use in development, automatically downloading a portable Only works on windows right now, if you're on linux you should know how to set up a database. +## What this is not for. + +This is **not** for developing the database. + +This sounds counterintuitive, but this script pretty much just blindly runs migrations. If a migration fails, there's nothing it can do about it unless there's an undo migration (which is usually not the case). + ## WARNING **Do not, under any circumstances, use or attempt to modify this tool to run in production.** diff --git a/tools/setup_dev_db/invoke.py b/tools/setup_dev_db/invoke.py index 2d57f7018182..6737f5ac2e8b 100644 --- a/tools/setup_dev_db/invoke.py +++ b/tools/setup_dev_db/invoke.py @@ -11,10 +11,10 @@ keep_running: bool = True -def on_interrupt(): +def on_interrupt(signo, frame): global keep_running - print("ctrl+C caught!") keep_running = False + print("ctrl+C caught!") signal.signal(signal.SIGINT, on_interrupt) @@ -33,12 +33,14 @@ def thread_pipe_dump(source: str, pipe: io.TextIOWrapper): prog="setup.ps1", usage="setup.ps1 --port [port] --dbname [dbname]", ) - argparser.add_argument("--daemon", type=str) + argparser.add_argument("--mysqld", type=str) + argparser.add_argument("--mysql_admin", type=str) argparser.add_argument("--flyway", type=str) argparser.add_argument("--migrations", type=str) argparser.add_argument("--dataDir", type=str) argparser.add_argument("--port", required=False, default=3306, type=int) argparser.add_argument("--dbname", required=False, default="ss13", type=str) + argparser.add_argument("--no_migrations", required=False, action="store_true", default=False) # we slice it, as being invoked from bootstrap consumes this script's file path as the first arg effective_args: list[str] = sys.argv[1:] @@ -49,7 +51,8 @@ def thread_pipe_dump(source: str, pipe: io.TextIOWrapper): parsed_args = argparser.parse_args(effective_args) - PATH_TO_MYSQLD: str = parsed_args.daemon + PATH_TO_MYSQLD: str = parsed_args.mysqld + PATH_TO_MYSQL_ADMIN: str = parsed_args.mysql_admin PATH_TO_FLYWAY: str = parsed_args.flyway PATH_TO_MIGRATIONS: str = parsed_args.migrations USE_DATADIR: str = parsed_args.dataDir @@ -73,25 +76,45 @@ def thread_pipe_dump(source: str, pipe: io.TextIOWrapper): stderr=subprocess.STDOUT, text=True, ) + os.set_blocking(mysqld.stdout.fileno(), False) + mysqld_out_dump = threading.Thread(target=thread_pipe_dump, args=("mysqld-out", mysqld.stdout), daemon=True) + mysqld_out_dump.start() - log_message("setup_dev_db", "Starting flyway...") - flyway: subprocess.Popen | None = subprocess.Popen( + log_message("setup_dev_db", "Creating database...") + create_db_run: subprocess.CompletedProcess = subprocess.run( [ - PATH_TO_FLYWAY, + PATH_TO_MYSQL_ADMIN, + "--user=root", + '--password=password', + "create", + USE_DATABASE, ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, ) + log_message("mariadb_admin", create_db_run.stdout or "\n", end="") + + flyway: subprocess.Popen | None = None + if not parsed_args.no_migrations: + log_message("setup_dev_db", "Starting flyway and migrating...") + flyway = subprocess.Popen( + [ + PATH_TO_FLYWAY, + "-user=root", + "-password=password", + '-url=jdbc:mariadb://localhost:%d/%s' % (USE_PORT, USE_DATABASE), + '-locations=filesystem:%s' % (PATH_TO_MIGRATIONS), + "migrate", + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) + os.set_blocking(flyway.stdout.fileno(), False) + flyway_out_dump = threading.Thread(target=thread_pipe_dump, args=("flyway-out", flyway.stdout), daemon=True) + flyway_out_dump.start() - os.set_blocking(mysqld.stdout.fileno(), False) - os.set_blocking(flyway.stdout.fileno(), False) - - mysqld_out_dump = threading.Thread(target=thread_pipe_dump, args=("mysqld-out", mysqld.stdout), daemon=True) - flyway_out_dump = threading.Thread(target=thread_pipe_dump, args=("flyway-out", flyway.stdout), daemon=True) - - mysqld_out_dump.start() - flyway_out_dump.start() # main loop while keep_running == True: @@ -103,13 +126,13 @@ def thread_pipe_dump(source: str, pipe: io.TextIOWrapper): exited = mysqld.poll() if exited != None: mysqld = None - log_message("setup_dev_db", 'mysqld exited with code %d' % exited) + log_message("setup_dev_db", 'mysqld exited with code %d' % (exited)) if flyway != None: exited = flyway.poll() if exited != None: flyway = None - log_message("setup_dev_db", 'flyway exited with code %d' % exited) + log_message("setup_dev_db", 'flyway exited with code %d' % (exited)) if flyway == None and mysqld == None: keep_running = False @@ -124,15 +147,14 @@ def thread_pipe_dump(source: str, pipe: io.TextIOWrapper): # exit mysqld and flyway if mysqld != None: - mysqld.send_signal(sig=signal.CTRL_C_EVENT) + mysqld.terminate() if flyway != None: - flyway.send_signal(sig=signal.CTRL_C_EVENT) + flyway.terminate() # block on mysqld/flyway exiting if mysqld != None: mysqld_exitcode: int | None = mysqld.wait() - log_message("setup_dev_db", 'mysqld exited with code %d' % exited) + log_message("setup_dev_db", 'mysqld exited with code %d' % (mysqld_exitcode)) if flyway != None: flyway_exitcode: int | None = flyway.wait() - log_message("setup_dev_db", 'flyway exited with code %d' % exited) - + log_message("setup_dev_db", 'flyway exited with code %d' % (flyway_exitcode)) diff --git a/tools/setup_dev_db/setup.ps1 b/tools/setup_dev_db/setup.ps1 index 416c82839e6f..b6712444cc44 100644 --- a/tools/setup_dev_db/setup.ps1 +++ b/tools/setup_dev_db/setup.ps1 @@ -46,6 +46,7 @@ $MARIADB_BIN_FOLDER = "$MARIADB_FOLDER/mariadb-$MARIADB_VERSION-winx64/bin" $FLYWAY_FOLDER = "$ToolRoot/.cache/flyway/$FLYWAY_VERSION" $MYSQLD_PATH = "$MARIADB_BIN_FOLDER/mariadbd.exe" +$MYSQLD_ADMIN_PATH = "$MARIADB_BIN_FOLDER/mariadb-admin.exe" $FLYWAY_PATH = "$FLYWAY_FOLDER/flyway-$FLYWAY_VERSION/flyway.cmd" # GET mariadb IF NOT EXISTS @@ -77,8 +78,8 @@ if(!(Test-Path $DATABASE_DATA_DIR_RAW -PathType Container)) { New-Item $DATABASE_DATA_DIR_RAW -ItemType Directory $DATABASE_DATA_DIR = Resolve-Path "$ToolRoot/../../data/setup_dev_db" Write-Output "Bootstrapping database with data directory '$DATABASE_DATA_DIR'." - & $MARIADB_BIN_FOLDER/mariadb-install-db.exe -d $DATABASE_DATA_DIR -p password + & $MARIADB_BIN_FOLDER/mariadb-install-db.exe -d $DATABASE_DATA_DIR -p password -D } $DATABASE_DATA_DIR = Resolve-Path "$ToolRoot/../../data/setup_dev_db" -& $ToolRoot/../bootstrap/python_.ps1 $ToolRoot/invoke.py --dataDir $DATABASE_DATA_DIR --daemon $MYSQLD_PATH --flyway $FLYWAY_PATH --migrations "../../sql/migrations" $args +& $ToolRoot/../bootstrap/python_.ps1 $ToolRoot/invoke.py --dataDir $DATABASE_DATA_DIR --mysqld $MYSQLD_PATH --flyway $FLYWAY_PATH --migrations "../../sql/migrations" --mysql_admin $MYSQLD_ADMIN_PATH $args From 708651efcf4aff0a5ce3cc8bcfd4fce068d8397f Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:24:28 -0500 Subject: [PATCH 19/29] lol this is gonna be a dumpster fire --- .github/workflows/ci_suite.yml | 16 +++++++++++----- tools/ci/ci_config.txt | 2 +- tools/ci/install/install_flyway.sh | 17 +++++++++++++++++ 3 files changed, 29 insertions(+), 6 deletions(-) create mode 100644 tools/ci/install/install_flyway.sh diff --git a/.github/workflows/ci_suite.yml b/.github/workflows/ci_suite.yml index d29a4c2d96ea..250d11f51993 100644 --- a/.github/workflows/ci_suite.yml +++ b/.github/workflows/ci_suite.yml @@ -140,19 +140,25 @@ jobs: options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 steps: - uses: actions/checkout@v4 + - name: Restore Flyway + uses: actions/cache@v4 + with: + path: ~/flyway + key: ${{ runner.os }}-flyway-${{ hashFiles('dependencies.sh') }} - name: Restore BYOND cache uses: actions/cache@v4 with: path: ~/BYOND key: ${{ runner.os }}-byond-${{ hashFiles('dependencies.sh') }} + - name: Install flyway + run: | + bash tools/ci/install/install_flyway.sh - name: Setup database run: | sudo systemctl start mysql - mysql -u root -proot -e 'CREATE DATABASE ss13_ci;' - mysql -u root -proot ss13_ci < SQL/database_schema_prefixed.sql - mysql -u root -proot ss13_ci < SQL/unified_schema.sql - # mysql -u root -proot -e 'CREATE DATABASE tg_ci_prefixed;' - # mysql -u root -proot tg_ci_prefixed < SQL/tgstation_schema_prefixed.sql + mysql -u root -proot -e 'CREATE DATABASE ss13;' + source dependencies.sh + ~/flyway/flyway-$FLYWAY_VERSION/flyway -user=root -password=root -url=jdbc:mariadb://localhost:3306/ss13 -locations="filesystem:sql/migrations" migrate - name: Install rust-g run: | bash tools/ci/install/install_rust_g.sh diff --git a/tools/ci/ci_config.txt b/tools/ci/ci_config.txt index e7e05a8fe21f..b7a9a95df652 100644 --- a/tools/ci/ci_config.txt +++ b/tools/ci/ci_config.txt @@ -2,7 +2,7 @@ SQL_ENABLED SQL_ADDRESS 127.0.0.1 SQL_USER root SQL_PORT 3306 -SQL_PASSWORD +SQL_PASSWORD root SQL_DATABASE ss13_ci SQL_SERVER_PREFIX rp_ diff --git a/tools/ci/install/install_flyway.sh b/tools/ci/install/install_flyway.sh new file mode 100644 index 000000000000..7f8c5f8f8927 --- /dev/null +++ b/tools/ci/install/install_flyway.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euo pipefail + +source dependencies.sh + +if [ -d "~/flyway/flyway-$FLYWAY_VERSION" ]; then + echo "Using cached flyway $FLYWAY_VERSION." +else + echo "Pulling flyway $FLYWAY_VERSION." + rm -rf ~/flyway/flyway-$FLYWAY_VERSION + mkdir ~/flyway + cd ~/flyway + curl "https://download.red-gate.com/maven/release/com/redgate/flyway/flyway-commandline/$FLYWAY_VERSION/flyway-commandline-$FLYWAY_VERSION-linux-x64.tar.gz" -o flyway.zip + unzip flyway.zip + rm flyway.zip + cd ~ +fi From 393d26358e7f177913652d3a7d1bb50bb5bbe0bb Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:27:26 -0500 Subject: [PATCH 20/29] out with the old, in with the new --- .github/workflows/ci_suite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_suite.yml b/.github/workflows/ci_suite.yml index 250d11f51993..f76c9720820a 100644 --- a/.github/workflows/ci_suite.yml +++ b/.github/workflows/ci_suite.yml @@ -30,7 +30,7 @@ jobs: run_linters: name: Run Linters needs: start_gate - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 10 steps: From 1b8671fed0eb744cf5a0ba0283008d9060f2ab0b Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:28:19 -0500 Subject: [PATCH 21/29] update path --- tools/ci/install/install_flyway.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tools/ci/install/install_flyway.sh b/tools/ci/install/install_flyway.sh index 7f8c5f8f8927..1cdd7f50dd83 100644 --- a/tools/ci/install/install_flyway.sh +++ b/tools/ci/install/install_flyway.sh @@ -10,8 +10,6 @@ else rm -rf ~/flyway/flyway-$FLYWAY_VERSION mkdir ~/flyway cd ~/flyway - curl "https://download.red-gate.com/maven/release/com/redgate/flyway/flyway-commandline/$FLYWAY_VERSION/flyway-commandline-$FLYWAY_VERSION-linux-x64.tar.gz" -o flyway.zip - unzip flyway.zip - rm flyway.zip + curl "https://download.red-gate.com/maven/release/com/redgate/flyway/flyway-commandline/$FLYWAY_VERSION/flyway-commandline-$FLYWAY_VERSION-linux-x64.tar.gz" | tar -xvz cd ~ fi From 4b6ee744568b10fa3bfe72a0f71c9c9403809702 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:30:28 -0500 Subject: [PATCH 22/29] please work --- .github/workflows/ci_suite.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci_suite.yml b/.github/workflows/ci_suite.yml index f76c9720820a..eaff8feb25af 100644 --- a/.github/workflows/ci_suite.yml +++ b/.github/workflows/ci_suite.yml @@ -75,6 +75,7 @@ jobs: bash tools/ci/install_node.sh bash tools/ci/install/install_spaceman_dmm.sh dreamchecker bash tools/ci/install_ripgrep.sh + sudo apt install pip tools/bootstrap/python -c '' - name: Give Linters A Go id: linter-setup From 366b70694b1cdf4d2ad04023bdd3b825480469fb Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:30:56 -0500 Subject: [PATCH 23/29] please work --- .github/workflows/ci_suite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_suite.yml b/.github/workflows/ci_suite.yml index eaff8feb25af..46492e19abe6 100644 --- a/.github/workflows/ci_suite.yml +++ b/.github/workflows/ci_suite.yml @@ -75,7 +75,7 @@ jobs: bash tools/ci/install_node.sh bash tools/ci/install/install_spaceman_dmm.sh dreamchecker bash tools/ci/install_ripgrep.sh - sudo apt install pip + sudo apt install -y python3-pip tools/bootstrap/python -c '' - name: Give Linters A Go id: linter-setup From fee1d54b40b9640327bf20eb47aa9b97a4e967c1 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:40:58 -0500 Subject: [PATCH 24/29] version bump --- dependencies.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.sh b/dependencies.sh index eb5b3f04e766..358da47695bc 100755 --- a/dependencies.sh +++ b/dependencies.sh @@ -19,7 +19,7 @@ export NODE_VERSION_LTS=20.13.0 export NODE_VERSION_COMPAT=20.2.0 # SpacemanDMM git tag -export SPACEMAN_DMM_VERSION=suite-1.8 +export SPACEMAN_DMM_VERSION=suite-1.9 # Python version for mapmerge and other tools export PYTHON_VERSION=3.12.0 From 15b74908b85747992c0487369bb51c47d1411d39 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 22:55:00 -0500 Subject: [PATCH 25/29] maybe i really should listen to lohikar this is not going to go well --- code/controllers/repository.dm | 1 + code/game/turfs/simulated/floor/floor.dm | 2 +- code/game/turfs/simulated/floor_types/water.dm | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/code/controllers/repository.dm b/code/controllers/repository.dm index e548f5a9d30f..a53aa8e7ffc2 100644 --- a/code/controllers/repository.dm +++ b/code/controllers/repository.dm @@ -152,6 +152,7 @@ if(init_reverse_lookup_shim) var/potential_path = init_reverse_lookup_shim[type_or_id] return fetch(potential_path) + . = id_lookup[type_or_id] if(.) return if(!store_enabled) diff --git a/code/game/turfs/simulated/floor/floor.dm b/code/game/turfs/simulated/floor/floor.dm index 793169f852d6..98b2f44c5577 100644 --- a/code/game/turfs/simulated/floor/floor.dm +++ b/code/game/turfs/simulated/floor/floor.dm @@ -73,7 +73,7 @@ CRASH("additional arg detected in /floor Initialize. turfs do not have init arguments as ChangeTurf does not accept them.") var/datum/prototype/flooring/set_flooring_to - if(initial_flooring && (set_flooring_to = RSflooring.fetch(initial_flooring))) + if(initial_flooring && (set_flooring_to = RSflooring.fetch_local_or_throw(initial_flooring))) set_flooring(set_flooring_to, TRUE) else // todo: these are only here under else because set flooring will trigger it diff --git a/code/game/turfs/simulated/floor_types/water.dm b/code/game/turfs/simulated/floor_types/water.dm index 3e1fd52501f9..beaee03cc0e9 100644 --- a/code/game/turfs/simulated/floor_types/water.dm +++ b/code/game/turfs/simulated/floor_types/water.dm @@ -24,7 +24,7 @@ /turf/simulated/floor/water/Initialize(mapload) . = ..() - var/datum/prototype/flooring/F = RSflooring.fetch(/datum/prototype/flooring/water) + var/datum/prototype/flooring/F = RSflooring.fetch_local_or_throw(/datum/prototype/flooring/water) footstep_sounds = F?.footstep_sounds update_icon() From bd5a91fea8f5f7d9f99b3cb60b6be3d2e16b296e Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:17:58 -0500 Subject: [PATCH 26/29] Fix --- code/controllers/subsystem/materials.dm | 2 +- tools/deploy.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/code/controllers/subsystem/materials.dm b/code/controllers/subsystem/materials.dm index 048792848567..edd53d804671 100644 --- a/code/controllers/subsystem/materials.dm +++ b/code/controllers/subsystem/materials.dm @@ -171,7 +171,7 @@ SUBSYSTEM_DEF(materials) for(var/i in 1 to length(L)) var/key = L[i] var/value = L[key] - var/datum/prototype/material/resolved = RSmaterials.fetch_or_defer(key) + var/datum/prototype/material/resolved = RSmaterials.fetch_or_defer(value) switch(resolved) if(REPOSITORY_FETCH_DEFER) // todo: handle this diff --git a/tools/deploy.sh b/tools/deploy.sh index ac0a4892adc4..160a561913b0 100755 --- a/tools/deploy.sh +++ b/tools/deploy.sh @@ -11,6 +11,7 @@ fi mkdir -p \ $1/_mapload \ + $1/config.default \ $1/maps \ $1/icons \ $1/sound \ From 9dc4cf6b00e90cbdebc2b7d26690ca13258a6f92 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Mon, 9 Dec 2024 23:31:42 -0500 Subject: [PATCH 27/29] fix --- code/controllers/toml_config/toml_configuration.dm | 5 ++++- tools/deploy.sh | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/code/controllers/toml_config/toml_configuration.dm b/code/controllers/toml_config/toml_configuration.dm index 3b4faeecffdc..ec2984bcdde6 100644 --- a/code/controllers/toml_config/toml_configuration.dm +++ b/code/controllers/toml_config/toml_configuration.dm @@ -1,9 +1,10 @@ //* This file is explicitly licensed under the MIT license. *// //* Copyright (c) 2024 Citadel Station Developers *// +// todo: maybe rename to config? or keep it as Configuration to keep with naming scheme of other 'system / backend' modules like the MC? GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) -// todo: /datum/controller/config +// todo: /datum/controller/configuration /datum/controller/toml_configuration /// Entries by type. VAR_PRIVATE/list/datum/toml_config_entry/typed_entries @@ -135,6 +136,8 @@ GLOBAL_REAL(Configuration, /datum/controller/toml_configuration) /datum/controller/toml_configuration/proc/load(filelike) var/list/decoded if(istext(filelike)) + if(!fexists(filelike)) + CRASH("failed to load [filelike]: does not exist") decoded = rustg_read_toml_file(filelike) else if(isfile(filelike)) // noa path, it might be rsc cache; rust_g can't read that directly. diff --git a/tools/deploy.sh b/tools/deploy.sh index 160a561913b0..baa4de5b5291 100755 --- a/tools/deploy.sh +++ b/tools/deploy.sh @@ -27,6 +27,8 @@ fi cp citadel.dmb citadel.rsc $1/ # mapload: has basemap.dmm, runtime loaded cp -r _mapload/* $1/_mapload/ +# default configs. self-explanatory. +cp -r config.default/* $1/config.default/ # maps: map .dmms and potential assets, runtime loaded cp -r maps/* $1/maps/ # icons: .dmi assets, runtime loaded From fb1443236492ea437a1560c611fc5057d30dcbad Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Tue, 10 Dec 2024 19:16:30 -0500 Subject: [PATCH 28/29] how silly of me --- ...{V0.0.1__PrepLegacyTables.sql => V1.0.1__PrepLegacyTables.sql} | 0 .../{V0.0.2__DeprefixTables.sql => V1.0.2__DeprefixTables.sql} | 0 ...JustInCase.sql => V1.0.3__RemakePlaytimeTriggerJustInCase.sql} | 0 ...0.4__AddRepositoryStore.sql => V1.0.4__AddRepositoryStore.sql} | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename sql/migrations/{V0.0.1__PrepLegacyTables.sql => V1.0.1__PrepLegacyTables.sql} (100%) rename sql/migrations/{V0.0.2__DeprefixTables.sql => V1.0.2__DeprefixTables.sql} (100%) rename sql/migrations/{V0.0.3__RemakePlaytimeTriggerJustInCase.sql => V1.0.3__RemakePlaytimeTriggerJustInCase.sql} (100%) rename sql/migrations/{V0.0.4__AddRepositoryStore.sql => V1.0.4__AddRepositoryStore.sql} (100%) diff --git a/sql/migrations/V0.0.1__PrepLegacyTables.sql b/sql/migrations/V1.0.1__PrepLegacyTables.sql similarity index 100% rename from sql/migrations/V0.0.1__PrepLegacyTables.sql rename to sql/migrations/V1.0.1__PrepLegacyTables.sql diff --git a/sql/migrations/V0.0.2__DeprefixTables.sql b/sql/migrations/V1.0.2__DeprefixTables.sql similarity index 100% rename from sql/migrations/V0.0.2__DeprefixTables.sql rename to sql/migrations/V1.0.2__DeprefixTables.sql diff --git a/sql/migrations/V0.0.3__RemakePlaytimeTriggerJustInCase.sql b/sql/migrations/V1.0.3__RemakePlaytimeTriggerJustInCase.sql similarity index 100% rename from sql/migrations/V0.0.3__RemakePlaytimeTriggerJustInCase.sql rename to sql/migrations/V1.0.3__RemakePlaytimeTriggerJustInCase.sql diff --git a/sql/migrations/V0.0.4__AddRepositoryStore.sql b/sql/migrations/V1.0.4__AddRepositoryStore.sql similarity index 100% rename from sql/migrations/V0.0.4__AddRepositoryStore.sql rename to sql/migrations/V1.0.4__AddRepositoryStore.sql From 5e6e6af8fa881e5943663dadfb4041bd07dd9692 Mon Sep 17 00:00:00 2001 From: silicons <2003111+silicons@users.noreply.github.com> Date: Tue, 10 Dec 2024 19:18:14 -0500 Subject: [PATCH 29/29] will that work? --- sql/migrations/V1.0.1__PrepLegacyTables.sql | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/migrations/V1.0.1__PrepLegacyTables.sql b/sql/migrations/V1.0.1__PrepLegacyTables.sql index 751737b2d190..f0b9dfa48ec9 100644 --- a/sql/migrations/V1.0.1__PrepLegacyTables.sql +++ b/sql/migrations/V1.0.1__PrepLegacyTables.sql @@ -186,13 +186,13 @@ CREATE TABLE IF NOT EXISTS `rp_playtime_log` ( ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; DELIMITER $$ -CREATE TRIGGER `playtimeTlogupdate` AFTER UPDATE ON `rp_playtime` FOR EACH ROW BEGIN INSERT into `rp_playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes-OLD.minutes); +CREATE TRIGGER IF NOT EXISTS `playtimeTlogupdate` AFTER UPDATE ON `rp_playtime` FOR EACH ROW BEGIN INSERT into `rp_playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes-OLD.minutes); END $$ -CREATE TRIGGER `playtimeTloginsert` AFTER INSERT ON `rp_playtime` FOR EACH ROW BEGIN INSERT into `rp_playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes); +CREATE TRIGGER IF NOT EXISTS `playtimeTloginsert` AFTER INSERT ON `rp_playtime` FOR EACH ROW BEGIN INSERT into `rp_playtime_log` (player, roleid, delta) VALUES (NEW.player, NEW.roleid, NEW.minutes); END $$ -CREATE TRIGGER `playtimeTlogdelete` AFTER DELETE ON `rp_playtime` FOR EACH ROW BEGIN INSERT into `rp_playtime_log` (player, roleid, delta) VALUES (OLD.player, OLD.roleid, 0-OLD.minutes); +CREATE TRIGGER IF NOT EXISTS `playtimeTlogdelete` AFTER DELETE ON `rp_playtime` FOR EACH ROW BEGIN INSERT into `rp_playtime_log` (player, roleid, delta) VALUES (OLD.player, OLD.roleid, 0-OLD.minutes); END $$ DELIMITER ;