From 3020318025690ee0eb90275140f0f7361cb15ba0 Mon Sep 17 00:00:00 2001 From: Anton Tarasenko Date: Fri, 3 Mar 2023 01:49:21 +0700 Subject: [PATCH] Complete user groups/adata implementation --- config/AcediaDB.ini | 5 + config/AcediaUsers.ini | 34 +- .../AcediaEnvironment/AcediaEnvironment.uc | 1 + sources/Data/Collections/ArrayList.uc | 84 +- sources/Data/Collections/Collection.uc | 99 +- sources/Data/Collections/HashTable.uc | 46 +- .../Data/Collections/Tests/TEST_ArrayList.uc | 52 +- .../Data/Collections/Tests/TEST_HashTable.uc | 82 ++ sources/Data/Database/Connection/DBCache.uc | 1099 +++++++++++++++++ .../Data/Database/Connection/DBConnection.uc | 791 ++++++++++++ .../Events/DBConnection_EditResult_Signal.uc | 40 + .../Events/DBConnection_EditResult_Slot.uc | 41 + .../DBConnection_StateChanged_Signal.uc | 43 + .../Events/DBConnection_StateChanged_Slot.uc | 44 + .../Connection/Tests/TEST_DBConnection.uc | 389 ++++++ sources/Data/Database/DBAPI.uc | 60 +- sources/Data/Database/DBTask.uc | 24 +- sources/Data/Database/Database.uc | 129 +- sources/Data/Database/Local/DBRecord.uc | 3 + .../Database/Local/LocalDatabaseInstance.uc | 97 +- sources/Data/Database/Tasks/DBCheckTask.uc | 7 +- .../Data/Database/Tasks/DBIncrementTask.uc | 9 +- sources/Data/Database/Tasks/DBKeysTask.uc | 9 +- sources/Data/Database/Tasks/DBReadTask.uc | 9 +- sources/Data/Database/Tasks/DBRemoveTask.uc | 9 +- sources/Data/Database/Tasks/DBSizeTask.uc | 10 +- sources/Data/Database/Tasks/DBWriteTask.uc | 9 +- .../Data/Database/Tests/TEST_LocalDatabase.uc | 210 +++- sources/Manifest.uc | 7 +- sources/Players/EPlayer.uc | 5 +- sources/Text/BaseText.uc | 48 + sources/Text/JSON/JSONAPI.uc | 311 ++++- sources/Text/JSON/JSONPointer.uc | 143 ++- sources/Text/Tests/TEST_JSON.uc | 514 +++++++- sources/Users/ACommandUserGroups.uc | 29 +- ...ataManager_OnPersistentDataReady_Signal.uc | 40 + ...tDataManager_OnPersistentDataReady_Slot.uc | 41 + .../PersistentData/PersistentDataManager.uc | 407 ++++++ sources/Users/User.uc | 468 +------ sources/Users/UserAPI.uc | 162 +-- sources/Users/UserDatabase.uc | 15 +- sources/Users/Users.uc | 36 +- sources/Users/Users_Feature.uc | 336 ++++- 43 files changed, 5239 insertions(+), 758 deletions(-) create mode 100644 config/AcediaDB.ini create mode 100644 sources/Data/Database/Connection/DBCache.uc create mode 100644 sources/Data/Database/Connection/DBConnection.uc create mode 100644 sources/Data/Database/Connection/Events/DBConnection_EditResult_Signal.uc create mode 100644 sources/Data/Database/Connection/Events/DBConnection_EditResult_Slot.uc create mode 100644 sources/Data/Database/Connection/Events/DBConnection_StateChanged_Signal.uc create mode 100644 sources/Data/Database/Connection/Events/DBConnection_StateChanged_Slot.uc create mode 100644 sources/Data/Database/Connection/Tests/TEST_DBConnection.uc create mode 100644 sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Signal.uc create mode 100644 sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Slot.uc create mode 100644 sources/Users/PersistentData/PersistentDataManager.uc diff --git a/config/AcediaDB.ini b/config/AcediaDB.ini new file mode 100644 index 0000000..d57cfbc --- /dev/null +++ b/config/AcediaDB.ini @@ -0,0 +1,5 @@ +; Define all databases you want Acedia to use here. +; For simply making default Acedia configs work, set `createIfMissing` below +; to `true`. +[Database LocalDatabase] +createIfMissing=false \ No newline at end of file diff --git a/config/AcediaUsers.ini b/config/AcediaUsers.ini index 6b910d7..17353ff 100644 --- a/config/AcediaUsers.ini +++ b/config/AcediaUsers.ini @@ -1,10 +1,30 @@ +; Acedia requires adding its own `GameRules` to listen to many different +; game events. + +; In this config you can setup Acedia's user groups and persistent data +; storage. Enabling this feature automatically enables user group support, +; while persistent data is optional. +; Databases can be configured in `AcediaDB.ini`. [default Users] -useDatabase=false -databaseLink="[local]database:/users" -userGroup=admin -userGroup=moderator -userGroup=trusted +; Configures whether to use database (and which) for storing user groups. +; Set `useDatabaseForGroupsData` to `false` if you want to define which users +; belong to what groups inside this config. +useDatabaseForGroupsData=true +groupsDatabaseLink=[local]Database:/group_data +; Configures whether persistent data should be additionally used. +; It can only be stored inside a database. +usePersistentData=true +persistentDataDatabaseLink=[local]Database:/user_data +; Available groups. Only used if `useDatabaseForGroupsData` is set to `false`. +localUserGroup=admin +localUserGroup=moderator +localUserGroup=trusted +; These groups definitions only work in case you're using a config with +; `useDatabaseForGroupsData` set to `false`. Simply add new `user=` record, +; specifying SteamIDs of the players, e.g. `user=76561197960287930`. +; You can also optionally specify a human-readable lable for the SteamID after +; slash "/", e.g. `user=76561197960287930/gabe`. [admin UserGroup] ;user= @@ -12,6 +32,4 @@ userGroup=trusted ;user= [trusted UserGroup] -;user= - -; ?wanted, banned? \ No newline at end of file +;user= \ No newline at end of file diff --git a/sources/BaseRealm/AcediaEnvironment/AcediaEnvironment.uc b/sources/BaseRealm/AcediaEnvironment/AcediaEnvironment.uc index dbbec75..040a096 100644 --- a/sources/BaseRealm/AcediaEnvironment/AcediaEnvironment.uc +++ b/sources/BaseRealm/AcediaEnvironment/AcediaEnvironment.uc @@ -414,6 +414,7 @@ public final function Feature GetEnabledFeature(class featureClass) /** * Enables given `Feature` instance `newEnabledFeature` with a given config. + * Does not change a config for already enabled feature, failing instead. * * @see `Feature::EnableMe()`. * diff --git a/sources/Data/Collections/ArrayList.uc b/sources/Data/Collections/ArrayList.uc index 5ea3635..abc0a56 100644 --- a/sources/Data/Collections/ArrayList.uc +++ b/sources/Data/Collections/ArrayList.uc @@ -5,7 +5,7 @@ * `AcediaObject`s. * Appropriate classes and APIs for their construction are provided for * main primitive types and can be extended to any custom `struct`. - * Copyright 2022 Anton Tarasenko + * Copyright 2022-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -24,6 +24,7 @@ */ class ArrayList extends Collection; +var bool FLAG; // Actual storage of all our data. var private array storedObjects; @@ -54,6 +55,34 @@ public final function int GetLength() return storedObjects.length; } +/** + * Appends objects from another `ArrayList` to the caller one. + * + * @param other Array to append objects from. `none` means nothing will be + * added. + * @return Reference to the caller `ArrayList` to allow for method chaining. + */ +public final function ArrayList Append(ArrayList other) +{ + local int i, shift; + local array otherObjects; + + if (other == none) return self; + if (other.GetLength() <= 0) return self; + + shift = storedObjects.length; + otherObjects = other.storedObjects; + SetLength(storedObjects.length + otherObjects.length); + for (i = 0; i < otherObjects.length; i += 1) + { + if (otherObjects[i] != none) { + otherObjects[i].NewRef(); + } + storedObjects[i + shift] = otherObjects[i]; + } + return self; +} + /** * Changes length of the caller `ArrayList`. * If `ArrayList` size is increased as a result - added items will be @@ -67,12 +96,15 @@ public final function ArrayList SetLength(int newLength) { local int i; - if (newLength < 0) { - return self; - } + if (newLength < 0) return self; + if (storedObjects.length == newLength) return self; + for (i = newLength; i < storedObjects.length; i += 1) { FreeItem(i); } + if (storedObjects.length <= 0) { + storedObjects[0] = none; + } storedObjects.length = newLength; return self; } @@ -218,20 +250,15 @@ public final function ArrayList RemoveIndex(int index) } /** - * Validates item at `index`: in case it was erroneously deallocated while - * being stored in caller `ArrayList` - forgets stored `AcediaObject` - * reference. + * Validates item at `index`: whether it fits in current array bounds. * - * @param index Index of an item to validate/ + * @param index Index of an item to validate. * @return `true` if `index` is valid for `storedObjects` array. */ private final function bool ValidateIndex(int index) { - local AcediaObject item; - if (index < 0) return false; if (index >= storedObjects.length) return false; - item = storedObjects[index]; return true; } @@ -429,7 +456,40 @@ public final function int Find(AcediaObject item) return -1; } -protected function AcediaObject GetByText(BaseText key) +/** + * `ArrayList` only support `IntBox` and `IntRef` indices for this method. + */ +public function bool HasKey(AcediaObject key) +{ + if (key == none) { + return false; + } + else if (key.class == class'IntBox') { + return ValidateIndex(IntBox(key).Get()); + } + else if (key.class == class'IntRef') { + return ValidateIndex(IntRef(key).Get()); + } + return false; +} + +public function bool HasKeyByText(Text key) +{ + local int index, consumed; + local Parser parser; + + parser = _.text.Parse(key); + parser.MUnsignedInteger(index,,, consumed); + if (!parser.Ok()) + { + parser.FreeSelf(); + return false; + } + parser.FreeSelf(); + return ValidateIndex(index); +} + +public function AcediaObject GetByText(Text key) { local int index, consumed; local Parser parser; diff --git a/sources/Data/Collections/Collection.uc b/sources/Data/Collections/Collection.uc index 322283c..bc76b95 100644 --- a/sources/Data/Collections/Collection.uc +++ b/sources/Data/Collections/Collection.uc @@ -2,7 +2,7 @@ * Acedia provides a small set of collections for easier data storage. * This is their base class that provides a simple interface for * common methods. - * Copyright 2020 - 2022 Anton Tarasenko + * Copyright 2020-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -25,18 +25,46 @@ class Collection extends AcediaObject var protected class iteratorClass; /** - * Method that must be overloaded for `GetItemByPointer()` to properly work. + * Checks if caller `Collection` has value recorded with a given `key`. * - * This method must return an item that `key` refers to with it's - * textual content (not as an object itself). + * Not all collections must support all possible types of keys. + * Key equality is determined by `AcediaObject::IsEqual()` method. + * + * @return `true` if caller `Collection` has value recorded with + * a given `key` and `false` otherwise. + */ +public function bool HasKey(AcediaObject key); + +/** + * Checks if caller `Collection` has value that a given `key` refers to with + * its textual content (not as an object itself). * For example, `ArrayList` parses it into unsigned number, while * `HashTable` uses it as a key directly. + * There is no requirement that all stored values must be reachable by + * this method (i.e. `HashTable` only lets you access values with + * `Text` keys). * + * @return `true` if caller `Collection` has value recorded with + * a given `key` (understood within its textual content) and `false` + * otherwise. + */ +public function bool HasKeyByText(Text key); + +/** + * This method must return an item that `key` refers to with its + * textual content (not as an object itself). + * For example, `ArrayList` parses it into unsigned number, while + * `HashTable` uses it as a key directly. * There is no requirement that all stored values must be reachable by * this method (i.e. `HashTable` only lets you access values with * `Text` keys). + * + * To check whether such value even exists in the collection @see HasKeyByText. + * + * @param key Key that refers to the value to return. + * @return Value that `key` refers to with its textual content. */ -protected function AcediaObject GetByText(BaseText key); +public function AcediaObject GetByText(Text key); /** * Creates an `Iterator` instance to iterate over stored items. @@ -88,7 +116,7 @@ public function Empty() {} * this method (i.e. `HashTable` only lets you access values with `Text` keys). * * @param jsonPointer Path, given by a JSON pointer. - * @return An item `jsonPointerAsText` is referring to (according to the above + * @return An item `jsonPointer` is referring to (according to the above * stated rules). `none` if such item does not exist. */ public final function AcediaObject GetItemByJSON(JSONPointer jsonPointer) @@ -522,7 +550,35 @@ public final function Text GetTextBy(BaseText jsonPointerAsText) } /** - * Returns an `HashTable` value (stored in the caller `Collection` or + * Returns a generic `Collection` value (stored in the caller `Collection` or + * one of it's sub-collections) pointed by + * [JSON pointer](https://tools.ietf.org/html/rfc6901). + * See `GetItemBy()` for more information. + * + * Referred value must be stored as `Collection` + * (or one of it's sub-classes) for this method to work. + * + * @param jsonPointerAsText Description of a path to the `Collection` value. + * @return `Collection` value, stored at `jsonPointerAsText` or + * `none` if it is missing or has a different type. + */ +public final function Collection GetCollectionBy( + BaseText jsonPointerAsText) +{ + local Collection asCollection; + local AcediaObject result; + + result = GetItemBy(jsonPointerAsText); + asCollection = Collection(result); + if (asCollection != none) { + return asCollection; + } + _.memory.Free(result); + return none; +} + +/** + * Returns a `HashTable` value (stored in the caller `Collection` or * one of it's sub-collections) pointed by * [JSON pointer](https://tools.ietf.org/html/rfc6901). * See `GetItemBy()` for more information. @@ -904,7 +960,34 @@ public final function Text GetTextByJSON(JSONPointer jsonPointer) } /** - * Returns an `HashTable` value (stored in the caller `Collection` or + * Returns a generic `Collection` value (stored in the caller `Collection` or + * one of it's sub-collections) pointed by JSON pointer. + * See `GetItemByJSON()` for more information. + * + * Referred value must be stored as `Collection` + * (or one of it's sub-classes) for this method to work. + * + * @param jsonPointer JSON path to the `Collection` value. + * @return `Collection` value, stored at `jsonPointerAsText` or + * `none` if it is missing or has a different type. + */ +public final function Collection GetCollectionByJSON( + JSONPointer jsonPointer) +{ + local AcediaObject result; + local Collection asCollection; + + result = GetItemByJSON(jsonPointer); + asCollection = Collection(result); + if (asCollection != none) { + return asCollection; + } + _.memory.Free(result); + return none; +} + +/** + * Returns a `HashTable` value (stored in the caller `Collection` or * one of it's sub-collections) pointed by JSON pointer. * See `GetItemByJSON()` for more information. * diff --git a/sources/Data/Collections/HashTable.uc b/sources/Data/Collections/HashTable.uc index 4df9ee1..844f343 100644 --- a/sources/Data/Collections/HashTable.uc +++ b/sources/Data/Collections/HashTable.uc @@ -197,6 +197,37 @@ private final function ResizeHashTable(int newSize) } } +/** + * Appends objects from another `HashTable` to the caller one. + * + * @param other Array to append objects from. `none` means nothing will be + * added. + * @return Reference to the caller `HashTable` to allow for method chaining. + */ +public final function HashTable Append(HashTable other) +{ + local AcediaObject nextKey, nextValue; + local HashTableIterator iter; + + if (other == none) return self; + if (other.GetLength() <= 0) return self; + + iter = HashTableIterator(other.Iterate()); + while (!iter.HasFinished()) + { + nextKey = iter.GetKey(); + nextValue = iter.Get(); + if (!HasKey(nextKey)) { + SetItem(nextKey, nextValue); + } + _.memory.Free(nextKey); + _.memory.Free(nextValue); + iter.Next(); + } + _.memory.Free(iter); + return self; +} + /** * Returns minimal capacity of the caller associative array. * @@ -234,19 +265,18 @@ public final function SetMinimalCapacity(int newMinimalCapacity) UpdateHashTableSize(); } -/** - * Checks if caller `HashTable` has value recorded with a given `key`. - * - * @return `true` if caller `HashTable` has value recorded with - * a given `key` and `false` otherwise. - */ -public final function bool HasKey(AcediaObject key) +public function bool HasKey(AcediaObject key) { local int bucketIndex, entryIndex; return FindEntryIndices(key, bucketIndex, entryIndex); } +public function bool HasKeyByText(Text key) +{ + return HasKey(key); +} + /** * Returns borrowed value recorded by a given key `key` in the caller * `HashTable`. @@ -629,7 +659,7 @@ public final function AcediaObject GetKeyByIndex(Index index) return key.NewRef(); } -protected function AcediaObject GetByText(BaseText key) +public function AcediaObject GetByText(Text key) { return GetItem(key); } diff --git a/sources/Data/Collections/Tests/TEST_ArrayList.uc b/sources/Data/Collections/Tests/TEST_ArrayList.uc index 9834b5e..06423ae 100644 --- a/sources/Data/Collections/Tests/TEST_ArrayList.uc +++ b/sources/Data/Collections/Tests/TEST_ArrayList.uc @@ -31,6 +31,7 @@ protected static function TESTS() Test_Find(); Test_ReferenceManagementGet(); Test_Take(); + Test_Append(); } protected static function Test_GetSet() @@ -289,7 +290,7 @@ protected static function Test_Take() local array allocatedItems; array = NewMockArray(20, allocatedItems); - Context("Testing how well `ArrayList`'s `TakeItem()` command"); + Context("Testing how well `ArrayList`'s `TakeItem()` command."); Issue("`TakeItem()` return wrongs item."); for (i = 0; i < allocatedItems.length; i += 1) { @@ -312,6 +313,55 @@ protected static function Test_Take() } } +protected static function Test_Append() +{ + local int i; + local ArrayList main, additive, empty; + + main = __().collections.EmptyArrayList(); + additive = __().collections.EmptyArrayList(); + empty = __().collections.EmptyArrayList(); + // Ref counter = 2, from creation and adding to collection + main.AddItem(__().box.int(76)).AddItem(__().text.FromString("yoyoyo")); + main.AddItem(none).AddItem(__().ref.float(34.3)); + additive.AddItem(none).AddItem(__().ref.bool(true)); + + Context("Testing appending `ArrayList`'s."); + Issue("`Append(none)` changes caller `ArrayList`."); + main.Append(none); + TEST_ExpectTrue(__().json.Print(main).ToString() + == "[76,\"yoyoyo\",null,34.3]"); + + Issue("`Append()` doesn't properly work on empty `ArrayList`"); + // main ref = 3, +1 from copying + // additive ref = 2 (still) + empty.Append(main); + TEST_ExpectTrue(__().json.Print(main).ToString() + == "[76,\"yoyoyo\",null,34.3]"); + + Issue("`Append()` doesn't properly append `ArrayList`s."); + // main ref = 3 + // additive ref = 3, +1 from copying + main.Append(additive); + TEST_ExpectTrue(__().json.Print(main).ToString() + == "[76,\"yoyoyo\",null,34.3,null,true]"); + + Issue("`Append()` changes appended `ArrayList`"); + TEST_ExpectTrue(__().json.Print(additive).ToString() == "[null,true]"); + + Issue("`Append()` incorrectly changes reference count of stored objects."); + // Ref counter = 3, but will be visible as 4 from getters + for (i = 0; i < main.GetLength(); i += 1) + { + if (i == 2 || i == 4) { + TEST_ExpectNone(main.GetItem(i)); + } + else { + TEST_ExpectTrue(main.GetItem(i)._getRefCount() == 4); + } + } +} + defaultproperties { caseGroup = "Collections" diff --git a/sources/Data/Collections/Tests/TEST_HashTable.uc b/sources/Data/Collections/Tests/TEST_HashTable.uc index 66b461e..cfd02df 100644 --- a/sources/Data/Collections/Tests/TEST_HashTable.uc +++ b/sources/Data/Collections/Tests/TEST_HashTable.uc @@ -37,6 +37,7 @@ protected static function TESTS() Test_ReferenceManagement(); Test_Take(); Test_LargeArray(); + Test_Append(); } protected static function AcediaObject NewKey(int value) @@ -570,6 +571,87 @@ protected static function Test_LargeArray() } } +protected static function Test_Append() +{ + local HashTable main, additive, empty; + + main = __().collections.EmptyHashTable(); + additive = __().collections.EmptyHashTable(); + empty = __().collections.EmptyHashTable(); + // Ref count in main = 2, creation and copy into collection + main.SetItem(P("A"), __().text.FromString("value of A")); + main.SetItem(P("B"), __().text.FromString("value of B")); + main.SetItem(P("C"), __().text.FromString("value of C")); + main.SetItem(P("D"), none); + additive.SetItem(P("C"), __().text.FromString("other value of C!")); + additive.SetItem(P("D"), __().text.FromString("value of D")); + additive.SetItem(P("E"), __().text.FromString("value of E")); + + Context("Testing appending `HashTable`'s."); + SubTest_EmptyCopies(main, additive, empty); + SubTest_ProperCopies(main, additive, empty); +} + +protected static function SubTest_EmptyCopies( + HashTable main, + HashTable additive, + HashTable empty) +{ + Issue("`Append(none)` changes caller `HashTable`."); + main.Append(none); + TEST_ExpectTrue(main.GetLength() == 4); + TEST_ExpectTrue(main.GetString(P("A")) == "value of A"); + TEST_ExpectTrue(main.GetString(P("B")) == "value of B"); + TEST_ExpectTrue(main.GetString(P("C")) == "value of C"); + TEST_ExpectNone(main.GetItem(P("D"))); + + Issue("`Append()` for empty argument changes caller `HashTable`."); + main.Append(empty); + TEST_ExpectTrue(main.GetLength() == 4); + TEST_ExpectTrue(main.GetString(P("A")) == "value of A"); + TEST_ExpectTrue(main.GetString(P("B")) == "value of B"); + TEST_ExpectTrue(main.GetString(P("C")) == "value of C"); + TEST_ExpectNone(main.GetItem(P("D"))); + + Issue("`Append()` doesn't properly work on empty `HashTable`"); + // Ref count in main = 3, +1 for appending + empty.Append(main); + TEST_ExpectTrue(empty.GetLength() == 4); + TEST_ExpectTrue(empty.GetString(P("A")) == "value of A"); + TEST_ExpectTrue(empty.GetString(P("B")) == "value of B"); + TEST_ExpectTrue(empty.GetString(P("C")) == "value of C"); + TEST_ExpectNone(empty.GetItem(P("D"))); +} + +protected static function SubTest_ProperCopies( + HashTable main, + HashTable additive, + HashTable empty) +{ + Issue("`Append()` doesn't properly append `HashTable`s."); + main.Append(additive); + TEST_ExpectTrue(main.GetLength() == 5); + TEST_ExpectTrue(main.GetString(P("A")) == "value of A"); + TEST_ExpectTrue(main.GetString(P("B")) == "value of B"); + TEST_ExpectTrue(main.GetString(P("C")) == "value of C"); + TEST_ExpectNone(main.GetItem(P("D"))); + TEST_ExpectTrue(main.GetString(P("E")) == "value of E"); + + Issue("`Append()` changes appended `HashTable`"); + TEST_ExpectTrue(additive.GetLength() == 3); + TEST_ExpectTrue(additive.GetString(P("C")) == "other value of C!"); + TEST_ExpectTrue(additive.GetString(P("D")) == "value of D"); + TEST_ExpectTrue(additive.GetString(P("E")) == "value of E"); + + Issue("`Append()` incorrectly changes reference counts of items inside" + @ "`HashTable`"); + // Ref count in main = 3, so 4 after getter + TEST_ExpectTrue(main.GetItem(P("A"))._getRefCount() == 4); + TEST_ExpectTrue(main.GetItem(P("B"))._getRefCount() == 4); + TEST_ExpectTrue(main.GetItem(P("C"))._getRefCount() == 4); + TEST_ExpectTrue(main.GetItem(P("E"))._getRefCount() == 4); +} + defaultproperties { caseGroup = "Collections" diff --git a/sources/Data/Database/Connection/DBCache.uc b/sources/Data/Database/Connection/DBCache.uc new file mode 100644 index 0000000..22ee9b8 --- /dev/null +++ b/sources/Data/Database/Connection/DBCache.uc @@ -0,0 +1,1099 @@ +/** + * Object designed to allow for locally caching database's data and tracking + * all applied changes, even if database is yet to respond/rejected them. + * This includes tracking changes even *before* database's data is available, + * storing them inside as a series to edits to apply. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class DBCache extends AcediaObject; + +/** + * # `DBCache` + * + * Object designed to allow for locally caching database's data and tracking + * all applied changes, even if database is yet to respond/rejected them. + * This includes tracking changes even *before* database's data is available, + * storing them inside as a series to edits to apply. + * + * ## Usage + * + * You can simply read and write JSON data with `Read(JSONPointer)` and + * `Write(JSONPointer, AcediaObject)` right after `DBCache`'s creation. + * Once real database's data has arrived, you can set it with `SetRealData()`. + * Data recorded before the `SetRealData()` call is an *approximation* and + * might not function as a real JSON value/database. Because `DBCache` doesn't + * yet know the real data in the database and even if you expect there to be + * a certain hierarchy of objects/arrays - `DBCache` cannot perform checks that + * they are there. This is why it simply lets you write any data at any path + * like "/A/B/C" in hopes that it data will be there after `SetRealData()` + * call. + * You can also "pre-create" such data by calling `Increment()` method with + * empty `Collection`s: + * + * ```unrealscript + * local DBCache cache; + * local JSONPointer dataLocation,; + * local HashTable emptyObject; + * + * cache = DBCache(_.memory.Allocate(class'DBCache')); + * emptyObject = _.collections.EmptyHashTable(); + * dataLocation = _.json.Pointer(); + * cache.Increment(dataLocation, emptyObject); + * cache.Push(P("A")); + * cache.Increment(dataLocation, emptyObject); + * cache.Push(P("B")); + * cache.Increment(dataLocation, emptyObject); + * cache.Push(P("C")); + * _.memory.Free(emptyObject); + * _.memory.Free(dataLocation); + * ``` + * + * After `SetRealData()` edits you've made prior will be reapplied to + * provided data and you'll get report on what edits were attempted and what + * have failed: + * + * ```unrealscript + * local array completedEdits; + * // ... + * completedEdits = localCache.SetRealData(data); + * for (i = 0; i < completedEdits.length; i += 1) + * { + * if (completedEdits[i].successful) { + * // Do something joyful! + * } + * else { + * // Wail in your misery! + * } + * _.memory.Free(completedEdits[i].location); + * _.memory.Free(completedEdits[i].data); + * } + * ``` + * + * One more example of appending arrays using "-" JSON pointer component. + * + * ```unrealscript + * local DBCache cache; + * local JSONPointer newDataLocation, arrayLocation; + * local Text data; + * + * cache = DBCache(_.memory.Allocate(class'DBCache')); + * // "-" in JSON pointer allows us to append to array from the end + * newDataLocation = _.json.Pointer_S("/array/-"); + * data = _.text.FromString("Just new data!"); + * cache.Write(newDataLocation, data); + * + * // Now add database's data: supposing `dbData` was a following JSON object: + * // {"array": [1, 3, true], "tag": "db data!"} + * cache.SetRealData(dbData); + * + * // Now read changes back:t + * arrayLocation = _.json.Pointer_S("/array"); + * cache.ReadData(arrayLocation); + * // ^ returns `ArrayList` with following contents: + * [1, 3, true, "Just new data!"] + * ``` + * + * ## Implementation + * + * Cache can be in two distinct states: before (`cachedData` is `false`) + * and after (`cachedData` is `true`) obtaining database's actual data. + * In the second state it simply stores `AcediaObject` that represents stored + * JSON value and applies all changes to it directly / reads from it directly. + * In case database's data wasn't yet obtained - stores all valid `Write()` + * requests as an array of edits `pendingEdits`. Any `Read()` causes us to go + * through that array until we: + * + * 1. Find an edit that could've written a data user; + * 2. Obtain necessary data from that edit (we might want some folded + * sub-object); + * 3. Reapply all later edits to that data and return it. + * + * We also use similar process when adding new edits during `Write()`: + * if we know for a fact that at "/a/b/c" is a non-container + * (like a JSON string or number), then we simply reject any writes to its + * sub-data like "/a/b/c/d", since it is impossible to write anything inside. + * This also applies to the JSON arrays if we want to write into them using + * non-numeric keys. + * Additionally, for the sake of efficiency, `DBCache` erases old edits in + * case their data gets completely overwritten by new ones: if we first write + * something inside "/array/1" and then rewrite the whole "/array" - we no + * longer need to store the first edit for anything. + * + * ## Remarks + * + * Before `SetRealData()` is called, the collection inside `DBCache` is mostly + * faked. In most practical cases it shouldn't noticeable and the most notable + * issue one can stumble on is that `DBCache` allows to write data at paths, it + * is not sure even exist, leading to weird behavior: + * + * ```unrealscript + * local DBCache cache; + * local JSONPointer newDataLocation, objectLocation; + * local Text data; + * + * cache = DBCache(_.memory.Allocate(class'DBCache')); + * // "-" in JSON pointer allows us to append to array from the end + * newDataLocation = _.json.Pointer_S("/subObject/field"); + * data = _.text.FromString("Just new data!"); + * cache.Write(newDataLocation, data); + * + * // Without adding database's data with `SetRealData()` we get: + * objectLocation = _.json.Pointer_S("/subObject"); + * // returns `none`, since we have no info about data at that path + * cache.ReadData(objectLocation); // none + * // But still remembers that this is "Just new data!" + * cache.ReadData(newDataLocation); // "Just new data!" + * ``` + */ + +enum DBCacheEditType +{ + DBCET_Write, + DBCET_Increment, + DBCET_Remove +}; + +// Represents a single edit made as a result of `Write()` call +struct PendingEdit +{ + var public DBCacheEditType type; + var public JSONPointer location; + var public AcediaObject data; + var public bool successful; +}; +// All valid edits made so far (minus impossible and overwritten ones) in +// order they were made: the lower index, the older the edit. +var private array pendingEdits; + +// Was data already cached? +// We cannot simply use `cachedData == none`, since `none` is a valid value. +var private bool isDataCached; +// Data, obtained from the database +var private AcediaObject cachedData; + +protected function Finalizer() +{ + local int i; + + for (i = 0; i < pendingEdits.length; i += 1) { + FreePendingWrite(pendingEdits[i]); + } + pendingEdits.length = 0; + _.memory.Free(cachedData); + cachedData = none; + isDataCached = false; +} + +/** + * Reads data from `DBCache` stored at the pointer `location`. + * If no data is recorded at `location`, returns `none`. + * + * NOTE: If the real database's data wasn't yet set with `SetRealData()`, + * then this method can return `none` for path like "/a/b", even if value + * "/a/b/c" was already set. This is because `DBCache` doesn't try to guess + * types of containers on the way to the recorded data: if 'b' were to + * be numeric - then we'd have no idea whether it is an array of an object. + * + * @param location Location inside `DBCache`'s stored data, from which to + * read data of interest. + * @return Data stored at location given by `location`, `none` if nothing is + * stored there. + */ +public final function AcediaObject Read(JSONPointer location) +{ + local Collection cachedCollection; + + if (location == none) { + return none; + } + if (!isDataCached) { + return ReadPending(location); + } + if (location.IsEmpty()) { + return _.json.Copy(cachedData); + } + // For non-empty pointers, `cachedCollection` must be a `Collection` + cachedCollection = Collection(cachedData); + if (cachedCollection != none) { + return cachedCollection.GetItemByJSON(location); + } + return none; +} + +/** + * Writes data into `DBCache` at the given location. + * + * This method can work differently depending on whether `SetRealData()` call + * was already made: + * + * 1. Before the `SetRealData()` call it basically knows nothing about + * object inside database (the real data) and will freely write at any + * location as long as that operation won't contradict previous edits + * (it will attempt to recognize and prevent removing sub-values inside + * null, booleans, strings and numbers or non-numeric keys into + * arrays). + * 2. After the `SetRealData()` call it already knows what data was stored + * inside before edits were made and would only allow to write data at + * "/path/to/value" if "/path/to" (i.e. path without its last + * component) corresponds to an appropriate collection (JSON object or + * JSON array if last component is numeric or "-"). + * + * @param location Location into which to write new data. + * @param data Data to write, expected to consist only of + * the JSON-compatible types. It will be copied with `_.json.Copy()`, + * so the reference won't be kept. + * @return `true` if write was successful and `false` otherwise. Note that + * operations made before `SetRealData()` can be reported as successful, + * but then rejected after the real data is set if they're incompatible + * with its structure (@see `SetRealData()` for more information). + */ +public final function bool Write(JSONPointer location, AcediaObject data) +{ + local Collection cachedCollection; + + if (location == none) { + return false; + } + if (!isDataCached) { + return AddPendingEdit(location, data, DBCET_Write); + } + if (location.IsEmpty()) + { + _.memory.Free(cachedData); + cachedData = _.json.Copy(data); + return true; + } + cachedCollection = Collection(cachedData); + // At this point `EditJSONCollection()`'s contract of + // `cachedCollection != none` and `location` isn't `none` or empty is + // satisfied. + if (cachedCollection != none) + { + return EditJSONCollection( + cachedCollection, + location, + data, + DBCET_Write); + } + return false; +} + +/** + * Removes data from `DBCache` at the given location. + * + * This method can work differently depending on whether `SetRealData()` call + * was already made: + * + * 1. Before the `SetRealData()` call it basically knows nothing about + * object inside database (the real data) and will freely perform + * removal at any location as long as that operation won't obviously + * contradict previous edits (it will attempt to recognize and prevent + * removing sub-values inside null, booleans, strings and numbers or + * non-numeric keys into arrays). + * 2. After the `SetRealData()` call it already knows what data was stored + * inside before edits were made and would only allow to write data at + * "/path/to/value" if "/path/to" (i.e. path without its last + * component) corresponds to an appropriate collection (JSON object or + * JSON array if last component is numeric or "-"). + * + * @param location Location from which to remove all data. + * @return `true` if removal was successful and `false` otherwise. Note that + * operations made before `SetRealData()` can be reported as successful, + * but then rejected after the real data is set if they're incompatible + * with its structure (@see `SetRealData()` for more information). + */ +public final function bool Remove(JSONPointer location) +{ + local Collection cachedCollection; + + if (location == none) { + return false; + } + if (!isDataCached) { + return AddPendingEdit(location, none, DBCET_Remove); + } + if (location.IsEmpty()) + { + _.memory.Free(cachedData); + cachedData = none; + return true; + } + cachedCollection = Collection(cachedData); + // At this point `EditJSONCollection()`'s contract of + // `cachedCollection != none` and `location` isn't `none` or empty is + // satisfied. + if (cachedCollection != none) + { + return EditJSONCollection( + cachedCollection, + location, + none, + DBCET_Remove); + } + return false; +} + +/** + * Increments data inside `DBCache` at the given location. + * + * @see `_.json.Increment()`. + * + * This method can work differently depending on whether `SetRealData()` call + * was already made: + * + * 1. Before the `SetRealData()` call it basically knows nothing about + * object inside database (the real data) and will freely increment at + * any location as long as that operation won't contradict previous + * edits (it will attempt to recognize and prevent removing sub-values + * inside null, booleans, strings and numbers or non-numeric keys into + * arrays). + * 2. After the `SetRealData()` call it already knows what data was stored + * inside before edits were made and would only allow to incrementing + * data at "/path/to/value" if "/path/to" (i.e. path without its last + * component) corresponds to an appropriate collection (JSON object or + * JSON array if last component is numeric or "-"). + * + * @param location Location of the value to increment with new, given data. + * @param data Data to increment with, expected to consist only of + * the JSON-compatible types. It will be copied with `_.json.Copy()`, + * so the reference won't be kept. + * @return `true` if increment was successful and `false` otherwise. Note that + * operations made before `SetRealData()` can be reported as successful, + * but then rejected after the real data is set if they're incompatible + * with its structure (@see `SetRealData()` for more information). + */ +public final function bool Increment(JSONPointer location, AcediaObject data) +{ + local AcediaObject incrementedRoot; + local Collection cachedCollection; + + if (location == none) { + return false; + } + if (!isDataCached) { + return AddPendingEdit(location, data, DBCET_Increment); + } + cachedCollection = Collection(cachedData); + if (cachedCollection != none) { + return EditJSONCollection( + cachedCollection, + location, + data, + DBCET_Increment); + } + else if (location.IsEmpty()) + { + incrementedRoot = _.json.increment(cachedData, data); + _.memory.Free(cachedData); + cachedData = incrementedRoot; + return true; + } + return false; +} +/*INC 2 +Cache inc 1 {} /test True +Cache inc 2 +Cache inc 3 +Cache inc 4 +INC 5 +WriteDataByJSON #1 +WriteDataByJSON #2 */ +/** + * Checks whether `SetRealData()` was called. + * + * @return `true` if `SetRealData()` was called and `DBCache` is in + * second mode, working on the given, cached data instead of the edits. + * `false` otherwise. + */ +public final function bool IsRealDataSet() +{ + return isDataCached; +} + +/** + * Sets real data that `DBCache` must use as basis for all of its changes. + * + * Any valid changes (for which `Write()` has previously returned `true` and + * which weren't overwritten by later changes) will be reapplied to given + * object and whether applying each edit ended in success of failure will be + * reported in the returned value. + * + * Can only be called once, all subsequent call will do nothing and will return + * empty array. + * + * @param realData Data to use as basis, expected to consist only of + * the JSON-compatible types. It will be copied with `_.json.Copy()`, + * so the reference won't be kept. + * @return All valid edits made so far (minus impossible and overwritten ones) + * in order they were made: the lower index, the older the edit. Includes + * a boolean flag that indicates whether each particular edit was + * successfully applied to given data. + */ +public final function array SetRealData(AcediaObject realData) +{ + local int i; + local Collection cachedCollection; + local array pendingEditsCopy; + + if (isDataCached) { + return pendingEdits; + } + cachedData = _.json.Copy(realData); + for (i = 0; i < pendingEdits.length; i += 1) + { + cachedCollection = Collection(cachedData); + if ( pendingEdits[i].location.IsEmpty() + && pendingEdits[i].type != DBCET_Increment) + { + // Generally `pendingEdits[i].location.IsEmpty()` should be `true` + // for non-incrementing operations only for one index, since all + // edits would get overwritten be overwritten by the newest one, + // but let's do these changes just in case. + _.memory.Free(cachedData); + if (pendingEdits[i].type == DBCET_Write) + { + pendingEdits[i].successful = true; + cachedData = _.json.Copy(pendingEdits[i].data); + } + else // (pendingEdits[i].type == DBCET_Remove) + { + pendingEdits[i].successful = (cachedData != none); + cachedData = none; + } + } + else if (cachedCollection != none) + { + // Any other edits affect sub-objects and can, therefore, only be + // applied to `Collection`s (JSON objects and arrays). + pendingEdits[i].successful = EditJSONCollection( + cachedCollection, + pendingEdits[i].location, + pendingEdits[i].data, + pendingEdits[i].type); + } + } + pendingEditsCopy = pendingEdits; + pendingEdits.length = 0; + isDataCached = true; + return pendingEditsCopy; +} + +// For reading data when before the `SetRealData()` call. +private final function AcediaObject ReadPending(JSONPointer location) +{ + local int nextEditIndex; + local int newestOverridingEdit; + + if (location == none) { + return none; + } + // Go from the newest to the latest edit and find newest edit that + // *completely overwrites* data at `location`. + // This can be any newest pointer that serves as prefix to `location` + // that *writes* or *removes* data. + // If there are only *append* edits, we need to take the oldest one, + // since it is possible for several appending edits to stuck on top of each + // other. + newestOverridingEdit = -1; + nextEditIndex = pendingEdits.length - 1; + while (nextEditIndex >= 0) + { + + if (location.StartsWith(pendingEdits[nextEditIndex].location)) + { + newestOverridingEdit = nextEditIndex; + if (pendingEdits[nextEditIndex].type != DBCET_Increment) { + break; + } + } + nextEditIndex -= 1; + } + if (newestOverridingEdit >= 0) { + return ReconstructFromEdit(location, newestOverridingEdit); + } + return none; +} + +// Takes data from the given edit from `pendingEdits` as a basis and +// reapplies newer applicable edits to it. +// Assumes `location` is not `none`. +// Assumes `pendingEdits[editIndex].location` is prefix of `location`. +private final function AcediaObject ReconstructFromEdit( + JSONPointer location, + int editIndex) +{ + local int startIndex; + local AcediaObject result; + local Collection outerCollection; + + outerCollection = Collection(pendingEdits[editIndex].data); + if (location.GetLength() == pendingEdits[editIndex].location.GetLength()) { + // In case pending edit was made at the exactly `location`, simply copy + // its data, since `location` is pointing right at it. + result = _.json.Copy(pendingEdits[editIndex].data); + } + else if (outerCollection != none) + { + // Otherwise `location` is pointing deeper than + // `pendingEdits[editIndex].location` and we need to return + // a sub-object. This means that `data` has to be a `Collection`. + // First find that `Collection` (stored inside `outerCollection`), + // pointed by `location` inside `pendingEdits[editIndex].data` + // (with removed `pendingEdits[editIndex].location` prefix). + startIndex = pendingEdits[editIndex].location.GetLength(); + result = ApplyPointer( + outerCollection, + location, + startIndex, + location.GetLength()); + // We can safely release our rights to keep `result` reference and + // still use it, since it is still stored `outerCollection` and won't + // get deallocated. + _.memory.Free(result); + // `startIndex` is an `out` variable that records how far + // `ApplyPointer()` was able to travel along `location`. + // We are only successful if we reached the end. + if (startIndex == location.GetLength()) { + result = _.json.Copy(result); + } + else { + result = none; + } + } + ApplyCorrectingWrites(result, location, editIndex + 1); + return result; +} + +// Attempts to apply all sufficiently new (at least with index +// `startIndex`) edits to the `target`. +// Assumes `locationToCorrect` in not `none`. +private final function ApplyCorrectingWrites( + out AcediaObject target, + JSONPointer locationToCorrect, + int startIndex) +{ + local int i; + local Collection targetAsCollection; + local JSONPointer subLocation, nextLocation;; + + if (target == none) { + return; + } + for (i = startIndex; i < pendingEdits.length; i += 1) + { + nextLocation = pendingEdits[i].location; + if (!nextLocation.StartsWith(locationToCorrect)) { + continue; + } + targetAsCollection = Collection(target); + // `Collection`s (JSON arrays or objects) have to be handled + // differently, since we might need to change values stored deep within + // the `Collection`, while for other variables we can change `target` + // directly. + if (targetAsCollection != none) + { + subLocation = nextLocation.Copy(locationToCorrect.GetLength()); + EditJSONCollection( + Collection(target), + subLocation, + pendingEdits[i].data, + pendingEdits[i].type); + subLocation.FreeSelf(); + } + else if (nextLocation.GetLength() == locationToCorrect.GetLength()) + { + EditJSONSimpleValue( + target, + pendingEdits[i].data, + pendingEdits[i].type); + } + } +} + +// Applies operation of type `editType` to the given object. +// Assumes that `target` isn't `none`. +// Makes a copy of the `value`. +private final function bool EditJSONSimpleValue( + out AcediaObject target, + AcediaObject value, + DBCacheEditType editType) +{ + local AcediaObject newTarget; + + if (editType == DBCET_Write) { + newTarget = _.json.Copy(value); + } + else if (editType == DBCET_Remove) { + newTarget = none; + } + else + { + newTarget = _.json.Increment(target, value); + if (newTarget == none) { + return false; + } + } + _.memory.Free(target); + target = newTarget; +} + +// Applies operation of type `editType` to the object stored inside +// given `Collection`, given by `location`. +// Makes a copy of the `value`. +// Assumes that `location` can only be an empty pointer if `editType` is +// `DBCET_Increment`. +// Assumes that `target` isn't `none`. +private final function bool EditJSONCollection( + Collection target, + JSONPointer location, + AcediaObject value, + DBCacheEditType editType) +{ + local bool success; + local Text key; + local ArrayList arrayCollection; + local HashTable objectCollection; + local Collection innerCollection; + local JSONPointer poppedLocation; + local AcediaObject valueCopy; + + // Empty pointer is only allowed if we're incrementing; + if (location.IsEmpty()) + { + return (editType == DBCET_Increment + && IncrementCollection(target, value)); + } + // First get `Collection` that stores data, pointed by `location` + // (which is data pointed by `location` without the last segment). + // Last segment will serve as a key in that `Collection`, so also + // keep it. + poppedLocation = location.Copy(); + key = poppedLocation.Pop(); + innerCollection = target.GetCollectionByJSON(poppedLocation); + // Then, depending on the collection, get the actual data + arrayCollection = ArrayList(innerCollection); + objectCollection = HashTable(innerCollection); + valueCopy = _.json.Copy(value); + if (arrayCollection != none) + { + success = EditArrayList( + arrayCollection, + key, + value, + editType, + location.PopNumeric(true)); + } + if (objectCollection != none) { + success = EditHashTable(objectCollection, key, value, editType); + } + _.memory.Free(innerCollection); + _.memory.Free(poppedLocation); + _.memory.Free(key); + _.memory.Free(valueCopy); + return success; +} + +// Assumes `collection != none` and `key != none` +// Assumes value is already copied and won't be stored anywhere else +private final function bool EditArrayList( + ArrayList collection, + Text key, + AcediaObject value, + DBCacheEditType editType, + int numericKey) +{ + local AcediaObject incrementedValue; + + // Only valid case of `numericKey < 0` is when `key` is "-", which can only + // be used with `DBCET_Write` to append to `collection` + if (numericKey < 0 && editType != DBCET_Write) { + return false; + } + if (editType == DBCET_Write) + { + if (numericKey >= 0) { + collection.SetItem(numericKey, value); + } + else if (key.IsEqual(P("-"))) { + collection.AddItem(value); + } + else { + return false; + } + } + else if (editType == DBCET_Remove) + { + if (numericKey >= collection.GetLength()) { + return false; + } + collection.RemoveIndex(numericKey); + return true; + } + else // if (editType == DBCET_Increment) + { + if (value == none) + { + if (numericKey >= collection.GetLength()) { + collection.SetItem(numericKey, none); + } + // Incrementing by `none` is a success for any reachable value + // (including a missing one, if the immediate parent is present) + return true; + } + incrementedValue = + EfficientIncrement(collection.GetItem(numericKey), value); + if (incrementedValue != none) { + collection.SetItem(numericKey, incrementedValue); + } + _.memory.Free(incrementedValue); // `none` or moved into `collection` + return (incrementedValue != none); + } + return true; +} + +// Assumes `collection != none` and `key != none` +// Assumes value is already copied and won't be stored anywhere else +private final function bool EditHashTable( + HashTable collection, + Text key, + AcediaObject value, + DBCacheEditType editType) +{ + local AcediaObject incrementedValue; + + if (editType == DBCET_Write) { + collection.SetItem(key, value); + } + else if (editType == DBCET_Remove) + { + if (!collection.HasKey(key)) { + return false; + } + collection.RemoveItem(key); + return true; + } + else // if (editType == DBCET_Increment) + { + if (value == none) + { + if (!collection.HasKey(key)) { + collection.SetItem(key, none); + } + // Incrementing by `none` is a success for any reachable value + // (including a missing one, if the immediate parent is present) + return true; + } + incrementedValue = EfficientIncrement(collection.GetItem(key), value); + if (incrementedValue != none) { + collection.SetItem(key, incrementedValue); + } + _.memory.Free(incrementedValue); // `none` or moved into `collection` + return (incrementedValue != none); + } + return true; +} + +// This method is supposed to be more efficient than +// `_.json.Increment()` because it can skip copying `valueToIncrement` +// in case it's a collection and append to it directly. +// Assumes `increment` is not `none`. +// Returning `none` means increment has failed (could only happen possible +// if `increment` is `none`, which is impossible) +private final function AcediaObject EfficientIncrement( + /*take*/ AcediaObject valueToIncrement, + AcediaObject increment) +{ + local AcediaObject incrementedValue; + + if (valueToIncrement == none) { + return _.json.Copy(increment); + } + // This is the "efficient part": we first try to directly append + // `increment` to `valueToIncrement`, since it can avoid unnecessary + // copying of huge collections + if ( Collection(valueToIncrement) != none + && valueToIncrement.class == increment.class) + { + // If we're inside, then we are sure that both arguments are either + // `ArrayList`s or `HashTable`s (and not `none`!) + IncrementCollection(valueToIncrement, increment); + // We reuse `valueToIncrementAsHashTable`, so simply return reference + // we took ownership of + return valueToIncrement; + } + // Since all correct `Collection` cases were handled above, we only need to + // do the normal, "inefficient" incrementing when both arguments aren't + // `Collection`s + if (Collection(valueToIncrement) == none && Collection(increment) == none) { + incrementedValue = _.json.Increment(valueToIncrement, increment); + } + // We do not reuse either `valueToIncrement`, so we should release it + _.memory.Free(valueToIncrement); + // This will be `none` in case `_.json.Increment()` wasn't called + return incrementedValue; + +} + +// Increments `valueToIncrement` (changing its value) by `increment`. +// Only does work if both arguments are the same type of `Collection`. +// Returns `true` if it actually incremented `valueToIncrement` and `false` +// otherwise. +// If arguments have different types - does nothing. +private final function bool IncrementCollection( + AcediaObject valueToIncrement, + AcediaObject increment) +{ + local ArrayList valueToIncrementAsArrayList, incrementAsArrayList; + local HashTable valueToIncrementAsHashTable, incrementAsHashTable; + + valueToIncrementAsArrayList = ArrayList(valueToIncrement); + if (valueToIncrementAsArrayList != none) + { + incrementAsArrayList = ArrayList(increment); + if (incrementAsArrayList != none) + { + valueToIncrementAsArrayList.Append(incrementAsArrayList); + return true; + } + } + valueToIncrementAsHashTable = HashTable(valueToIncrement); + if (valueToIncrementAsHashTable != none) + { + incrementAsHashTable = HashTable(increment); + if (incrementAsHashTable != none) + { + valueToIncrementAsHashTable.Append(incrementAsHashTable); + return true; + } + } + return false; +} + +// For writing data when before the `SetRealData()` call. +// Assumes `location` isn't `none`. +private final function bool AddPendingEdit( + JSONPointer location, + AcediaObject data, + DBCacheEditType type) +{ + local int i, index; + local bool isIncrementing; + local AcediaObject leafItem; + local PendingEdit newWrite; + + // We basically just want to add new edit struct into `pendingEdits` + // array, but there's three additional consideration: + // + // 1. Some edits can be decided to be *impossible*: if, at the earlier + // stage, we wrote a simple type (not JSON array or object) at + // some location "/a/b" and then try to write a sub-object at + // the longer path "/a/b/c". This action is impossible and should + // be rejected outright, to prevent reading methods from reading + // data that will obviously be rejected on real object. + // NOTE: We only catch some of such cases, checking against + // the very first edit in the chain that build up data at + // `location`, since checking checking against data written by + // the other edits on top would require us to do too much work. + // 2. Some new edits can overwrite older ones: if we wrote something + // at location "/a/b/c" and then write something at "/a/b" - we can + // completely disregard and remove older edit at "/a/b/c". + // 3. Whenever we're doing incrementing edit, we want to be able to + // keep several of such edits at once (possibly on top of some + // writing edit), without them overwriting each other (as per + // previous point). There is also a special case for when we're + // writing into JSON array with pointer ending in "-" - it + // indicates adding a new element, which also makes it incrementing + // operation. + + // This variable will store whether `location` ends with "-" (writing + // operation corresponds to what was discussed in point 3) + isIncrementing = (type == DBCET_Increment) + || IsPointerAppendingToArray(location); + while (i < pendingEdits.length) + { + if (pendingEdits[i].location.StartsWith(location)) + { + // Here we're in situation described in point 2, where new edit + // will overwrite `pendingEdits[i].location`. + /*isSameLength = + (location.GetLength() == pendingEdits[i].location.GetLength());*/ + // Since `location` is prefix for `pendingEdits[i].location`, + // then it is either shorter or the same length. Same length here + // also means that these pointers are *identical*. + // We can prevent removal of the old rule only in situation of + // point 3, where new edit wants to increment an item (guaranteed + // by `isIncrementing`). + // + // NOTE: Here we make no check for whether we're writing into + // an object, which can result in us keeping several redundant + // edits. But we expect this to be a rare case and a fine trade off + // for skipping additional costly checks. + if (!isIncrementing) + { + FreePendingWrite(pendingEdits[i]); + pendingEdits.Remove(i, 1); + continue; + } + } + else if (pendingEdits[i].type == DBCET_Write + && location.StartsWith(pendingEdits[i].location)) + { + // Here we perform checks described in point 1: follow along + // the `location` pointer as far as possible and check if last + // known structure can at least in theory be explored by the rest + // of `location` after database's real data is loaded and set. + index = pendingEdits[i].location.GetLength(); + leafItem = ApplyPointer( + pendingEdits[i].data, + location, + index, + location.GetLength() - 1); + if ( Collection(leafItem) == none + || !IsKeyAcceptable(Collection(leafItem), location, index)) + { + return false; + } + } + i += 1; + } + // After all checks have passed and all older irrelevant edits were + // filtered out - add the new one. + newWrite.location = location.Copy(); + newWrite.data = _.json.Copy(data); + newWrite.type = type; + pendingEdits[pendingEdits.length] = newWrite; + return true; +} + +// Checks if `pointer`'s last component is "-", which denotes appending +// new item to the JSON array. +// Assumes `pointer != none`. +private final function bool IsPointerAppendingToArray(JSONPointer pointer) +{ + local int lastComponentIndex; + + lastComponentIndex = pointer.GetLength() - 1; + if (!pointer.IsComponentArrayApplicable(lastComponentIndex)) { + return false; + } + return (pointer.GetNumericComponent(lastComponentIndex) < 0); +} + +// Checks whether given key is acceptable for given collection. +// To avoid unnecessary copying the key is specified as a component of +// JSON pointer `path` with index `keyIndex`. +private final function bool IsKeyAcceptable( + Collection target, + JSONPointer path, + int keyIndex) +{ + local ArrayList arrayCollection; + + if (HashTable(target) != none) return true; + arrayCollection = ArrayList(target); + if (arrayCollection == none) return false; + if (keyIndex >= path.GetLength()) return true; + if (path.IsComponentArrayApplicable(keyIndex)) return true; + + return false; +} + +// Finds item inside `data` by using part of given JSON pointer as its own +// pointer. Part is defined as pointer given by components with indices inside +// `[from; to - 1]`. +// `from` is an `out` argument that will return index of pointer's +// component after that one used to obtained return value. +// E.g. if pointer is "/a/b/c/d" and we returned value at "/a/b", then +// `from` will contain index `2` of the component "c". +private final function AcediaObject ApplyPointer( + AcediaObject data, + JSONPointer pointer, + out int from, + int to) +{ + local int nextNumericKey; + local Text nextKey; + local ArrayList nextArray; + local HashTable nextObject; + + if (from < 0 || from > to) return none; + if (data == none) return none; + if (pointer == none) return none; + if (to > pointer.GetLength()) return none; + + // At each iteration in the `while` cycle below, `data` stores some + // reference to the next collection to "dig in" with our JSON pointer. + // This collection is normally obtained by `GetItem()` method on one of + // the iterations and, therefore, we own a reference to it that we must + // release. + // However, on the first iteration it is the same as passed argument + // and so we do not own it and cannot release it yet. We take ownership of + // it here yo hack around that issue. + data.NewRef(); + while (from < to) + { + nextObject = HashTable(data); + nextArray = ArrayList(data); + // Safe use `data` (and, therefore, both `nextObject` and `nextArray`) + // after this release, since `data` is either: + // 1. an argument and was added a reference before the loop + // 2. or is stored in another collection and, therefore, has + // another reference that way. + // Choice of `_.memory.Free()` instead of `self.FreeSelf()` is + // important here, since `data` can also be equal to `none`. + //_.memory.Free(data); + if (nextObject != none) + { + nextKey = pointer.GetComponent(from); + if (!nextObject.HasKey(nextKey)) + { + _.memory.Free(nextKey); + return nextObject; + } + data.FreeSelf(); + data = nextObject.GetItem(nextKey); + nextkey.FreeSelf(); + } + else if (nextArray != none) + { + nextNumericKey = pointer.GetNumericComponent(from); + if (nextNumericKey < 0 || nextNumericKey >= nextArray.GetLength()) { + return nextArray; + } + data.FreeSelf(); + data = nextArray.GetItem(nextNumericKey); + } + else { + // Not a collection => we cannot "go in" + return data; + } + from += 1; + } + return data; +} + +// Proper clean up of `PendingEdit` +private final function FreePendingWrite(PendingEdit edit) +{ + _.memory.Free(edit.location); + _.memory.Free(edit.data); +} + +defaultproperties +{ +} \ No newline at end of file diff --git a/sources/Data/Database/Connection/DBConnection.uc b/sources/Data/Database/Connection/DBConnection.uc new file mode 100644 index 0000000..3e35a18 --- /dev/null +++ b/sources/Data/Database/Connection/DBConnection.uc @@ -0,0 +1,791 @@ +/** + * Auxiliary object for simplifying working with databases. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class DBConnection extends AcediaObject + dependson(Database) + dependson(DBCache); + +/** + * # `DBConnection` + * + * Auxiliary object for simplifying working with databases. + * `Database` class has a rather simple interface and there are several issues + * that constantly arise when trying to use it: + * + * 1. If one tries to read/write data from/to specific location in + * the database, then `JSONPointer` has to be kept in addition to + * the `Database` reference at all times; + * 2. One has to perform initial checks about whether database can even be + * connected to, if at desired location there is a proper data + * structure, etc.. If one also wants to start using data before + * database's response or after its failure, then the same work of + * duplication that data locally must be performed. + * 3. Instead of immediate operations, database operations are delayed and + * user has to handle their results asynchronously in separate methods. + * + * `DBConnection` takes care of these issues by providing you synchronous + * methods for accessing cached version of the data at the given location that + * is duplicated to the database as soon as possible (and even if its no longer + * possible in the case of a failure). + * `DBConnection` makes immediate changes on the local cache and reports + * about possible failures with database later through signals `OnEditResult()` + * (reports about success of writing operations) and `OnStateChanged()` + * (reports about state changes of connected database, including complete + * failures). + * The only reading of database's values occurs at the moment of connecting + * to it, after that all the data is read from the local cache. + * Possible `DBConnection` states include: + * + * * `DBCS_Idle` - database was created, but not yet connected; + * * `DBCS_Connecting` - `Connect()` method was successfully called, but + * its result is still unknown; + * * `DBCS_Connected` - database is connected and properly working; + * * `DBCS_Disconnected` - database was manually disconnected and now + * operates solely on the local cache. Once disconnected `DBConnection` + * cannot be reconnected - create a new one instead. + * * `DBCS_FaultyDatabase` - database is somehow faulty. Precise reason + * can be found out with `GetError()` method: + * + * * `FDE_None` - no error has yet occurred; + * * `FDE_CannotReadRootData` - root data couldn't be read from + * the database, most likely because of the invalid `JSONPointer` + * for the root value; + * * `FDE_UnexpectedRootData` - root data was read from the database, + * but has an unexpected format. `DBConnection` expects either + * JSON object or array (can be specified which one) and this error + * occurs if its not found at specified database's location; + * * `FDE_Unknown` - database returned `DBR_InvalidDatabase` result + * for one of the queries. This is likely to happen when database + * is damaged. More precise details depend on the implementation, + * but result boils down to database being unusable. + * + * ## Usage + * + * Usage is straightforward: + * + * 1. Initialize with appropriate database by calling `Initialize()`; + * 2. Start connecting to it by calling `Connect()`; + * 3. Use `ReadDataByJSON()`/`WriteDataByJSON()` to read/write into + * the connected database; + * + * You can use it transparently even if database connection fails, but if you + * need to handle such failure - connect to the `OnStateChanged()` signal for + * tracking `DBCS_FaultyDatabase` state and to `OnEditResult()` for tracking + * success of writing operations. + * + * ## Implementation + * + * The brunt of work is done by `DBCache` and most of the logic in this + * class is for tracking state of the connection to the database and then + * reporting these changes through its own signals. + * The most notable hidden functionality is tracking requests by ID - + * `DBConnection` makes each request with unique ID and then stores them inside + * `requestIDs` (and in case of write requests - along with corresponding + * `JSONPointer` inside `queuedPointers`). This is necessary because: + * + * 1. Even if `DBConnection` gets reallocated - as far as UnrealScript is + * concerned it is still the same object, so the responses we no longer + * care about will still arrive. Keeping track of the IDs that interest + * us inside `requestIDs` allows us to filter out responses we no + * longer care about; + * 2. Tracking corresponding (to the IDs) `queuedPointers` also allow us + * to know responses to which writing requests we've received. + * + * ## Remarks + * + * Currently `DBConnection` doesn't support important feature of *incrementing* + * data that allows several sources to safely change the same value + * asynchronously. We're skipping on it right now to save time as its not + * really currently needed, however it will be added in the future. + */ + +enum DBConnectionState +{ + // `DBConnection` was created, but didn't yet attempt to connect + // to database + DBCS_Idle, + // `DBConnection` is currently connecting + DBCS_Connecting, + // `DBConnection` has already connected without errors + DBCS_Connected, + // `DBConnection` was manually disconnected + DBCS_Disconnected, + // `DBConnection` was disconnected because of the database error, + // @see `FaultyDatabaseError` for more. + DBCS_FaultyDatabase +}; +// Current connection state +var private DBConnectionState currentState; + +enum FaultyDatabaseError +{ + // No error has occurred yet + FDE_None, + // Root data isn't available + FDE_CannotReadRootData, + // Root data has incorrect format + FDE_UnexpectedRootData, + // Some internal error in database has occurred + FDE_Unknown +}; +// Reason for why current state is in `DBCS_FaultyDatabase` state; +// `FDE_None` if `DBConnection` is in any other state. +var private FaultyDatabaseError dbFailureReason; +// Keeps track whether root value read from the database was of the correct +// type. Only relevant after database tried connecting (i.e. it is in states +// `DBCS_Connected`, `DBCS_Disconnected` or `DBCS_FaultyDatabase`). +// This variable helps us determine whether error should be +// `FDE_CannotReadRootData` or `FDE_UnexpectedRootData`. +var private bool rootIsOfExpectedType; + +// `Database` + `JSONPointer` combo that point at the data we want to +// connect to +var private Database dbInstance; +var private JSONPointer rootPointer; +// Local, cached version of that data +var private DBCache localCache; + +// This is basically an array of (`int`, `JSONPointer`) pairs for tracking +// database requests of interest +var private array requestIDs; +var private array queuedPointers; +// Next usable ID. `DBConnection` is expected to always use unique IDs. +var private int nextRequestID; + + +var private DBConnection_StateChanged_Signal onStateChangedSignal; +var private DBConnection_EditResult_Signal onEditResultSignal; + +var private LoggerAPI.Definition errDoubleInitialization; + +/** + * Signal that will be emitted whenever `DBConnection` changes state. + * List of the available states: + * + * * `DBCS_Idle` - database was created, but not yet connected; + * * `DBCS_Connecting` - `Connect()` method was successfully called, but + * its result is still unknown; + * * `DBCS_Connected` - database is connected and properly working; + * * `DBCS_Disconnected` - database was manually disconnected and now + * operates solely on the local cache. Once disconnected `DBConnection` + * cannot be reconnected - create a new one instead. + * * `DBCS_FaultyDatabase` - database is somehow faulty. Precise reason + * can be found out with `GetError()` method. + * + * This method *is not* called when `DBConnection` is deallocated. + * + * [Signature] + * void ( + * DBConnection instance, + * DBConnectionState oldState, + * DBConnectionState newState) + * + * @param instance Instance of the `DBConnection` that has changed state. + * @param oldState State it was previously in. + * @param oldState New state. + */ +/* SIGNAL */ +public final function DBConnection_StateChanged_Slot OnStateChanged( + AcediaObject receiver) +{ + return DBConnection_StateChanged_Slot(onStateChangedSignal + .NewSlot(receiver)); +} + +/** + * Signal that will be emitted whenever `DBConnection` receives response from + * connected database about success of writing operation. + * + * Responses to old requests can still be received even if database got + * disconnected. + * + * Any emissions of this signal when the database's state is `DBCS_Connecting` + * correspond to reapplying edits made prior connection was established. + * + * [Signature] + * void (JSONPointer editLocation, bool isSuccessful) + * + * @param editLocation Location of the writing operation this is + * a response to. + * @param isSuccessful Whether writing operation ended in the success. + */ +/* SIGNAL */ +public final function DBConnection_EditResult_Slot OnEditResult( + AcediaObject receiver) +{ + return DBConnection_EditResult_Slot(onEditResultSignal.NewSlot(receiver)); +} + +protected function Constructor() +{ + localCache = DBCache(_.memory.Allocate(class'DBCache')); + onStateChangedSignal = DBConnection_StateChanged_Signal( + _.memory.Allocate(class'DBConnection_StateChanged_Signal')); + onEditResultSignal = DBConnection_EditResult_Signal( + _.memory.Allocate(class'DBConnection_EditResult_Signal')); +} + +protected function Finalizer() +{ + rootIsOfExpectedType = false; + currentState = DBCS_Idle; + _.memory.Free(dbInstance); + _.memory.Free(rootPointer); + _.memory.Free(localCache); + dbInstance = none; + rootPointer = none; + localCache = none; + _.memory.FreeMany(queuedPointers); + queuedPointers.length = 0; + requestIDs.length = 0; + // Free signals + _.memory.Free(onStateChangedSignal); + _.memory.Free(onEditResultSignal); + onStateChangedSignal = none; + onEditResultSignal = none; +} + +/** + * Initializes `DBConnection` with database and location to which it must be + * connected. + * + * For the initialization to be successful `DBConnection` must not yet be + * initialized and `initDatabase` be not `none`. + * + * To check whether caller `DBConnection` is initialized + * @see `IsInitialized()`. + * + * @param initDatabase Database with data we want to connect to. + * @param initRootPointer Location of said data in the given database. + * If `none` is specified, uses root object of the database. + * @return `true` if initialization was successful and `false` otherwise. + */ +public final function bool Initialize( + Database initDatabase, + optional JSONPointer initRootPointer) +{ + if (IsInitialized()) return false; + if (initDatabase == none) return false; + if (!initDatabase.IsAllocated()) return false; + + dbInstance = initDatabase; + dbInstance.NewRef(); + if (initRootPointer != none) { + rootPointer = initRootPointer.Copy(); + } + else { + rootPointer = _.json.Pointer(); + } + return true; +} + +/** + * Reads data from the `DBConnection` at the location defined by the given + * `JSONPointer`. + * + * If data was initialized with non-empty location for the root data, then + * actual returned data's location in the database is defined by appending + * given `pointer` to that root pointer. + * + * Data is actually always read from the local cache and, therefore, we can + * read data we've written via `DBConnection` even without actually connecting + * to the database. + * + * @param pointer Location from which to read the data. + * @return Data recorded for the given `JSONPointer`. `none` if it is missing. + */ +public final function AcediaObject ReadDataByJSON(JSONPointer pointer) +{ + return localCache.Read(pointer); +} + +/** + * Writes given data into the `DBConnection` at the location defined by + * the given `JSONPointer`. + * + * If data was initialized with non-empty location for the root data, then + * actual location for writing data in the database is defined by appending + * given `pointer` to that root pointer. + * + * Data is actually always also written into the local cache, even when + * there is no connection to the database. Once connection is made - all valid + * changes will be duplicated into it. + * Success of failure of actually making changes into the database can be + * tracked with `OnEditResult()` signal. + * + * This operation also returns immediate indication of whether it has + * failed *locally*. This can happen when trying to perform operation + * impossible for the local cache. For example, we cannot write any data at + * location "/a/b/c" for the JSON object "{"a":45.6}". + * If operation ended in failure locally, then change to database won't + * even be attempted. + * + * @param pointer Location into which to write the data. + * @param data Data to write into the connection. + * @return `true` on success and `false` on failure. `true` is required for + * the writing database request to be made. + */ +public final function bool WriteDataByJSON( + JSONPointer pointer, + AcediaObject data) +{ + if (pointer == none) { + return false; + } + if (localCache.Write(pointer, data)) + { + ModifyDataInDatabase(pointer, data, false); + return true; + } + return false; +} + +/** + * Increments given data into the `DBConnection` at the location defined by + * the given `JSONPointer`. + * + * If data was initialized with non-empty location for the root data, then + * actual location for incrementing data in the database is defined by + * appending given `pointer` to that root pointer. + * + * Data is actually always also incremented into the local cache, even when + * there is no connection to the database. Once connection is made - all valid + * changes will be duplicated into it. + * Success of failure of actually making changes into the database can be + * tracked with `OnEditResult()` signal. + * + * This operation also returns immediate indication of whether it has + * failed *locally*. This can happen when trying to perform operation + * impossible for the local cache. For example, we cannot increment any data at + * location "/a/b/c" for the JSON object "{"a":45.6}". + * If operation ended in failure locally, then change to database won't + * even be attempted. + * + * @param pointer Location at which to increment the data. + * @param data Data with which to increment value inside the connection. + * @return `true` on success and `false` on failure. `true` is required for + * the incrementing database request to be made. + */ +public final function bool IncrementDataByJSON( + JSONPointer pointer, + AcediaObject data) +{ + if (pointer == none) { + return false; + } + if (localCache.Increment(pointer, data)) + { + ModifyDataInDatabase(pointer, data, true); + return true; + } + return false; +} + +/** + * Removes data from the `DBConnection` at the location defined by the given + * `JSONPointer`. + * + * If data was initialized with non-empty location for the root data, then + * actual location at which to remove data in the database is defined by + * appending given `pointer` to that root pointer. + * + * Data is actually always also removed from the local cache, even when + * there is no connection to the database. Once connection is made - all valid + * changes will be duplicated into it. + * Success of failure of actually making changes into the database can be + * tracked with `OnEditResult()` signal. + * + * This operation also returns immediate indication of whether it has + * failed *locally*. + * If operation ended in failure locally, then change to database won't + * even be attempted. + * + * @param pointer Location at which to remove data. + * @return `true` on success and `false` on failure. `true` is required for + * the removal database request to be made. + */ +public final function bool RemoveDataByJSON(JSONPointer pointer) +{ + if (pointer == none) { + return false; + } + if (localCache.Remove(pointer)) + { + RemoveDataInDatabase(pointer); + return true; + } + return false; +} + +private final function ModifyDataInDatabase( + JSONPointer pointer, + AcediaObject data, + bool increment) +{ + local JSONPointer dataPointer; + + if (currentState != DBCS_Connected) { + return; + } + dataPointer = rootPointer.Copy(); + dataPointer.Append(pointer); + // `dataPointer` is consumed by `RegisterNextRequestID()` method + if (increment) + { + dbInstance + .IncrementData( + dataPointer, + data, + RegisterNextRequestID(dataPointer)) + .connect = EditDataHandler; + } + else + { + dbInstance + .WriteData(dataPointer, data, RegisterNextRequestID(dataPointer)) + .connect = EditDataHandler; + } +} + +private final function RemoveDataInDatabase(JSONPointer pointer) +{ + local JSONPointer dataPointer; + + if (currentState != DBCS_Connected) { + return; + } + dataPointer = rootPointer.Copy(); + dataPointer.Append(pointer); + // `dataPointer` is consumed by `RegisterNextRequestID()` method + dbInstance + .RemoveData(dataPointer, RegisterNextRequestID(dataPointer)) + .connect = EditDataHandler; +} + +/** + * Checks caller `DBConnection` was successfully initialized. + * + * @return `true` if caller `DBConnection` was initialized and `false` + * otherwise. + */ +public final function bool IsInitialized() +{ + return (dbInstance != none); +} + +/** + * Returns current state of the connection of `DBConnection` to the database + * it was initialized with. + * + * @see `OnStateChanged()` for more information about connection states. + * @return Current connection state. + */ +public final function DBConnectionState GetConnectionState() +{ + return currentState; +} + +/** + * Checks whether caller `DBConnection` is currently connected without errors + * to the database it was initialized with. + * + * @return `true` if caller `DBConnection` is connected to the database and + * `false` otherwise. + */ +public final function bool IsConnected() +{ + return (currentState == DBCS_Connected); +} + +/** + * Checks whether an error has occurred with connection to the database. + * + * `DBConnection` can get disconnected from database manually and without + * any errors, so, if you simply want to check whether connection exists, + * @see `IsConnected()` or @see `GetConnectionState()`. + * To obtain more detailed information @see `GetError()`. + * + * @return `true` if there were no error thus far and `false` otherwise. + */ +public final function bool IsOk() +{ + return (dbFailureReason == FDE_None); +} + +/** + * Returns error that has occurred during connection. + * + * @return Error that has occurred during connection to the database, + * `FDE_None` if there was no errors. + */ +public final function FaultyDatabaseError GetError() +{ + return dbFailureReason; +} + +private final function ChangeState(DBConnectionState newState) +{ + local DBConnectionState oldState; + + oldState = currentState; + currentState = newState; + onStateChangedSignal.Emit(self, oldState, newState); +} + +/** + * Attempts connection to the database caller `DBConnection` was initialized + * with. Result isn't immediate and can be tracked with `OnStateChanged()` + * signal. + * + * Connection checks whether data by the initialization address can be read and + * has proper type (by default JSON object, but JSON array can be used + * instead). + * + * Whether connection is successfully established isn't known at the moment + * this function returns. User `OnStateChanged()` to track that. + * + * @param expectArray Set this to `true` if the expected root value is + * JSON array. + */ +public final function Connect(optional bool expectArray) +{ + local Collection incrementObject; + + if (!IsInitialized()) return; + if (currentState != DBCS_Idle) return; + + if (expectArray) { + incrementObject = _.collections.EmptyArrayList(); + } + else { + incrementObject = _.collections.EmptyHashTable(); + } + dbInstance.IncrementData( + rootPointer, + incrementObject, + RegisterNextRequestID()).connect = IncrementCheckHandler; + incrementObject.FreeSelf(); + // Copy of the `rootPointer` is consumed by `RegisterNextRequestID()` + // method + dbInstance.ReadData(rootPointer,, RegisterNextRequestID(rootPointer.Copy())) + .connect = InitialLoadingHandler; + ChangeState(DBCS_Connecting); +} + +/** + * Disconnects `DBConnection` from its database, preventing its further + * updates. + * + * Database can only be disconnected if connection was at least initialized + * (state isn't `DBCS_Idle`) and no error has yet occurred (state isn't + * `DBCS_FaultyDatabase`). + * + * @return `true` if `DBConnection` was disconnected from the database and + * `false` otherwise (including if it already was disconnected). + */ +public final function bool Disconnect() +{ + if ( currentState != DBCS_FaultyDatabase + && currentState != DBCS_Idle + && currentState != DBCS_Disconnected) + { + ChangeState(DBCS_Disconnected); + return true; + } + return false; +} + +private final function int RegisterNextRequestID( + optional /*take*/ JSONPointer relativePointer) +{ + if (relativePointer != none) { + queuedPointers[queuedPointers.length] = relativePointer; + } + else { + queuedPointers[queuedPointers.length] = _.json.Pointer(); + } + requestIDs[requestIDs.length] = nextRequestID; + nextRequestID += 1; + return (nextRequestID - 1); +} + +private final function JSONPointer FetchRequestPointer(int requestID) +{ + local int i; + local JSONPointer result; + + while (i < requestIDs.length) + { + if (requestIDs[i] < requestID) + { + // We receive all requests in order, so if `requestID` is higher + // than IDs of some other requests - it means that they are older, + // lost requests + _.memory.Free(queuedPointers[i]); + queuedPointers.Remove(i, 1); + requestIDs.Remove(i, 1); + } + if (requestIDs[i] == requestID) + { + result = queuedPointers[i]; + queuedPointers.Remove(i, 1); + requestIDs.Remove(i, 1); + return result; + } + i += 1; + } + return none; +} + +private final function bool FetchIfRequestStillValid(int requestID) +{ + local JSONPointer result; + + result = FetchRequestPointer(requestID); + if (result != none) + { + _.memory.Free(result); + return true; + } + return false; +} + +private final function IncrementCheckHandler( + Database.DBQueryResult result, + Database source, + int requestID) +{ + if (!FetchIfRequestStillValid(requestID)) { + return; + } + // If we could successfully increment value with appropriate JSON value, + // then its type is correct + rootIsOfExpectedType = (result == DBR_Success); +} + +private final function InitialLoadingHandler( + Database.DBQueryResult result, + /*take*/ AcediaObject data, + Database source, + int requestID) +{ + local int i; + local array completedEdits; + + if (!FetchIfRequestStillValid(requestID)) + { + _.memory.Free(data); + return; + } + if (HandleInitializationError(result)) + { + _.memory.Free(data); + return; + } + completedEdits = localCache.SetRealData(data); + for (i = 0; i < completedEdits.length; i += 1) + { + if (completedEdits[i].successful) + { + if (completedEdits[i].type == DBCET_Remove) { + RemoveDataInDatabase(completedEdits[i].location); + } + else + { + ModifyDataInDatabase( + completedEdits[i].location, + completedEdits[i].data, + completedEdits[i].type == DBCET_Increment); + } + } + else { + onEditResultSignal.Emit(completedEdits[i].location, false); + } + _.memory.Free(completedEdits[i].location); + _.memory.Free(completedEdits[i].data); + } + _.memory.Free(data); + ChangeState(DBCS_Connected); +} + +// Return `true` if further initialization must be stopped. +private final function bool HandleInitializationError( + Database.DBQueryResult result) +{ + // Get disconnected before even response has even arrived + if (currentState == DBCS_Disconnected) { + return true; + } + if (currentState == DBCS_Connected) + { + _.logger.Auto(errDoubleInitialization).Arg(rootPointer.ToText()); + return true; + } + if (result == DBR_InvalidDatabase) + { + dbFailureReason = FDE_Unknown; + ChangeState(DBCS_FaultyDatabase); + return true; + } + if (result != DBR_Success) + { + dbFailureReason = FDE_CannotReadRootData; + ChangeState(DBCS_FaultyDatabase); + return true; + } + if (!rootIsOfExpectedType) + { + dbFailureReason = FDE_UnexpectedRootData; + ChangeState(DBCS_FaultyDatabase); + return true; + } + return false; +} + +private final function EditDataHandler( + Database.DBQueryResult result, + Database source, + int requestID) +{ + local JSONPointer relatedPointer; + + relatedPointer = FetchRequestPointer(requestID); + if (relatedPointer == none) { + return; + } + if (result == DBR_InvalidDatabase) + { + dbFailureReason = FDE_Unknown; + ChangeState(DBCS_FaultyDatabase); + relatedPointer.FreeSelf(); + return; + } + if (result == DBR_Success) { + onEditResultSignal.Emit(relatedPointer, true); + } + else { + onEditResultSignal.Emit(relatedPointer, false); + } + relatedPointer.FreeSelf(); +} + +defaultproperties +{ + errDoubleInitialization = (l=LOG_Error,m="`DBConnection` connected to \"%1\" was double-initialized. This SHOULD NOT happen. Please report this bug.") +} \ No newline at end of file diff --git a/sources/Data/Database/Connection/Events/DBConnection_EditResult_Signal.uc b/sources/Data/Database/Connection/Events/DBConnection_EditResult_Signal.uc new file mode 100644 index 0000000..8ec43fc --- /dev/null +++ b/sources/Data/Database/Connection/Events/DBConnection_EditResult_Signal.uc @@ -0,0 +1,40 @@ +/** + * Signal class for `DBConnections`'s `OnEditResult()` signal. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class DBConnection_EditResult_Signal extends Signal + dependson(DBConnection); + +public final function Emit(JSONPointer editLocation, bool isSuccessful) +{ + local Slot nextSlot; + StartIterating(); + nextSlot = GetNextSlot(); + while (nextSlot != none) + { + DBConnection_EditResult_Slot(nextSlot) + .connect(editLocation, isSuccessful); + nextSlot = GetNextSlot(); + } + CleanEmptySlots(); +} + +defaultproperties +{ + relatedSlotClass = class'DBConnection_EditResult_Slot' +} \ No newline at end of file diff --git a/sources/Data/Database/Connection/Events/DBConnection_EditResult_Slot.uc b/sources/Data/Database/Connection/Events/DBConnection_EditResult_Slot.uc new file mode 100644 index 0000000..6ffcbc9 --- /dev/null +++ b/sources/Data/Database/Connection/Events/DBConnection_EditResult_Slot.uc @@ -0,0 +1,41 @@ +/** + * Slot class for `DBConnections`'s `OnEditResult()` signal. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class DBConnection_EditResult_Slot extends Slot + dependson(DBConnection); + +delegate connect(JSONPointer editLocation, bool isSuccessful) +{ + DummyCall(); +} + +protected function Constructor() +{ + connect = none; +} + +protected function Finalizer() +{ + super.Finalizer(); + connect = none; +} + +defaultproperties +{ +} \ No newline at end of file diff --git a/sources/Data/Database/Connection/Events/DBConnection_StateChanged_Signal.uc b/sources/Data/Database/Connection/Events/DBConnection_StateChanged_Signal.uc new file mode 100644 index 0000000..a7147ae --- /dev/null +++ b/sources/Data/Database/Connection/Events/DBConnection_StateChanged_Signal.uc @@ -0,0 +1,43 @@ +/** + * Signal class for `DBConnections`'s `OnStatusChanged()` signal. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class DBConnection_StateChanged_Signal extends Signal + dependson(DBConnection); + +public final function Emit( + DBConnection instance, + DBConnection.DBConnectionState oldState, + DBConnection.DBConnectionState newState) +{ + local Slot nextSlot; + StartIterating(); + nextSlot = GetNextSlot(); + while (nextSlot != none) + { + DBConnection_StateChanged_Slot(nextSlot) + .connect(instance, oldState, newState); + nextSlot = GetNextSlot(); + } + CleanEmptySlots(); +} + +defaultproperties +{ + relatedSlotClass = class'DBConnection_StateChanged_Slot' +} \ No newline at end of file diff --git a/sources/Data/Database/Connection/Events/DBConnection_StateChanged_Slot.uc b/sources/Data/Database/Connection/Events/DBConnection_StateChanged_Slot.uc new file mode 100644 index 0000000..f0772e0 --- /dev/null +++ b/sources/Data/Database/Connection/Events/DBConnection_StateChanged_Slot.uc @@ -0,0 +1,44 @@ +/** + * Slot class for `DBConnections`'s `OnStatusChanged()` signal. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class DBConnection_StateChanged_Slot extends Slot + dependson(DBConnection); + +delegate connect( + DBConnection instance, + DBConnection.DBConnectionState oldState, + DBConnection.DBConnectionState newState) +{ + DummyCall(); +} + +protected function Constructor() +{ + connect = none; +} + +protected function Finalizer() +{ + super.Finalizer(); + connect = none; +} + +defaultproperties +{ +} \ No newline at end of file diff --git a/sources/Data/Database/Connection/Tests/TEST_DBConnection.uc b/sources/Data/Database/Connection/Tests/TEST_DBConnection.uc new file mode 100644 index 0000000..2918dd4 --- /dev/null +++ b/sources/Data/Database/Connection/Tests/TEST_DBConnection.uc @@ -0,0 +1,389 @@ +/** + * Set of tests for `DBConnection` and `DBCache` classes. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class TEST_DBConnection extends TestCase + abstract; + +var DBCache cache; + +protected static function TESTS() +{ + Context("Testing how `DBCache` handles data writes and reads when" + @ "the actual database's data is yet to be downloaded."); + Test_DBCache_BasicWriteRead_DuringLoading(); + Test_DBCache_AllOperations_DuringLoading(); + Context("Testing how `DBCache` handles applying pending writes after real" + @ "data was set."); + Test_DBCache_ApplyingPendingEdits(); + Test_DBCache_ApplyingPendingEdit_AllOperations(); + Context("Testing how `DBCache` handles data writes and reads when" + @ "the actual database's data is already setup."); + Test_DBCache_BasicWriteRead_AfterLoading(); + Test_DBCache_AllOperations_AfterLoading(); +} + +/* Creates following object: +{ + "A": "simpleValue", + "B": { + "A": [true, { + "A": "simpleValue", + "B": 11.12, + "": [true, null, "huh"] + }, "huh"], + "B": -13.95 + }, + "C": -5, + "D": [] +} */ +protected static function HashTable MakeTestJSONObject() +{ + local ArrayList outerArray, innerArray; + local HashTable result, outerObject, innerObject; + + innerArray = __().collections.EmptyArrayList(); + innerArray.AddBool(true).AddItem(none).AddString("huh"); + innerObject = __().collections.EmptyHashTable(); + innerObject.SetString(P("A"), "simpleValue"); + innerObject.SetFloat(P("B"), 11.12).SetItem(P(""), innerArray); + outerArray = __().collections.EmptyArrayList(); + outerArray.AddBool(true).AddItem(innerObject).AddString("huh"); + outerObject = __().collections.EmptyHashTable(); + outerObject.SetItem(P("A"), outerArray).SetFloat(P("B"), -13.95); + result = __().collections.EmptyHashTable(); + result.SetString(P("A"), "simpleValue"); + result.SetItem(P("B"), outerObject); + result.SetInt(P("C"), -5); + result.SetItem(P("D"), __().collections.EmptyArrayList()); + return result; +} + +protected static function CheckLocation(string pointer, string expectedResult) +{ + local AcediaObject value; + + value = default.cache + .Read(__().json.Pointer(__().text.FromString(pointer))); + if (BaseText(value) != none) + { + TEST_ExpectTrue( + __().json.Print(value).ToString() + == ("\"" $ expectedResult $ "\"")); + } + else { + TEST_ExpectTrue(__().json.Print(value).ToString() == expectedResult); + } +} + +// This is a rather extensive and long test. +// Despite its length, I've chosen to not break it up into smaller parts, +// since the same value `default.cache` is changed and built up all the way +// through this method. +// Trying to break it up into smaller and simpler tests would only mean +// putting less stress of `DBCache` than we currently do. +protected static function Test_DBCache_BasicWriteRead_DuringLoading() +{ + local AcediaObject complexData; + + Issue("Simple write/read sequence not working correctly."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + complexData = MakeTestJSONObject(); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away")), + complexData); + default.cache.Write( + __().json.Pointer(P("/just/another/place/not/near")), + complexData); + CheckLocation("/just/another/place/not/near/A", "simpleValue"); + CheckLocation("/just/some/place/far/away/B/A/2", "huh"); + CheckLocation("/just/another/place/not/near/B/A/1//2", "huh"); + CheckLocation("/just/some/place/far/away/B/A/1/", "[true,null,\"huh\"]"); + + Issue("Data is read incorrectly after being overridden."); + default.cache.Write( + __().json.Pointer(P("/just/another/place/not/near/B/A/1//1")), + __().box.bool(false)); + CheckLocation( + "/just/another/place/not/near/B/A/1/", + "[true,false,\"huh\"]"); + default.cache.Write( + __().json.Pointer(P("/just/another/place/not/near/B/A")), + __().box.int(121)); + CheckLocation("/just/another/place/not/near/B/A", "121"); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/B/A/1/B")), + complexData); + CheckLocation("/just/some/place/far/away/B/A/1/B/B/A/0", "true"); + default.cache.Write( + __().json.Pointer( + P("/just/some/place/far/away/C/inside_the_number!/hey")), + __().box.float(1.1)); + CheckLocation("/just/some/place/far/away/C/inside_the_number!/hey", "null"); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/D/7/hey")), + __().box.int(-345)); + CheckLocation("/just/some/place/far/away/D", "[]"); + CheckLocation("/just/some/place/far/away/D/7/hey", "-345"); + default.cache.Write( + __().json.Pointer( + P("/just/some/place/far/away/D/inside_the_array!/hey")), + __().box.float(1.1)); + CheckLocation("/just/some/place/far/away/D/inside_the_array!/hey", "null"); + CheckLocation("/just/some/place/far/away/D", "[]"); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/D/-")), + __().box.bool(true)); + CheckLocation("/just/some/place/far/away/D", "[true]"); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/D/7")), + __().box.bool(true)); + CheckLocation( + "/just/some/place/far/away/D", + "[true,null,null,null,null,null,null,true]"); + Issue("Writing at the end of a JSON array several times doesn't correctly" + @ "keep all values."); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/D/-")), + __().box.int(13524)); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/D/-")), none); + default.cache.Write( + __().json.Pointer(P("/just/some/place/far/away/D/-")), + __().box.int(121)); + CheckLocation( + "/just/some/place/far/away/D", + "[true,null,null,null,null,null,null,true,13524,null,121]"); +} + +protected static function Test_DBCache_AllOperations_DuringLoading() +{ + local AcediaObject complexData; + local ArrayList newArrayData; + + newArrayData = __().collections.EmptyArrayList(); + newArrayData = newArrayData.AddInt(45); + newArrayData = newArrayData.AddItem(none); + newArrayData = newArrayData.AddString("lol"); + + Issue("Increment/remove/read sequence not working correctly."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + complexData = MakeTestJSONObject(); + default.cache.Increment(__().json.Pointer(P("")), complexData); + default.cache.Increment(__().json.Pointer(P("/B/A/1/A")), + __().text.FromString("oi")); + default.cache.Remove(__().json.Pointer(P("/B/A/1/B"))); + default.cache.Remove(__().json.Pointer(P("/B/A/1/"))); + default.cache.Increment(__().json.Pointer(P("/B/A")), newArrayData); + default.cache.Increment(__().json.Pointer(P("/C")), __().box.float(34.5)); + default.cache.Increment(__().json.Pointer(P("/C")), __().box.bool(true)); + default.cache.Increment(__().json.Pointer(P("/D")), newArrayData); + default.cache.Increment(__().json.Pointer(P("/D")), newArrayData); + default.cache.Increment(__().json.Pointer(P("/D")), newArrayData); + default.cache.Increment(__().json.Pointer(P("/B/A/1/A")), + __().text.FromString("! Yeah!")); + // Override all increments! + default.cache.Write(__().json.Pointer(P("/D")), newArrayData); + CheckLocation("/B/A/1/A", "simpleValueoi! Yeah!"); + CheckLocation("/B/A", + "[true,{\"A\":\"simpleValueoi! Yeah!\"},\"huh\",45,null,\"lol\"]"); + CheckLocation("/C", "29.5"); + CheckLocation("/D", "[45,null,\"lol\"]"); +} + +protected static function Test_DBCache_ApplyingPendingEdits() +{ + SubTest_DBCache_ApplyingPendingEdits_Simple(); + SubTest_DBCache_ApplyingPendingEdits_Complex(); +} + +protected static function SubTest_DBCache_ApplyingPendingEdits_Simple() +{ + local int i; + local array result; + + Issue("Pending writes successfully apply in simple JSON object case."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + default.cache.Write( + __().json.Pointer(P("")), + __().collections.EmptyHashTable()); + default.cache.Write(__().json.Pointer(P("/hey")), __().box.int(476)); + default.cache.Write(__().json.Pointer(P("/hope")), none); + default.cache.Write(__().json.Pointer(P("/-")), __().ref.float(324.3)); + result = default.cache.SetRealData(__().text.FromString("woah")); + for (i = 0; i < result.length; i += 1) { + TEST_ExpectTrue(result[i].successful); + } + CheckLocation("/hey", "476"); + CheckLocation("/hope", "null"); + CheckLocation("/-", "324.3"); + + Issue("Pending don't fail in a simple case of writing into JSON string."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + default.cache.Write(__().json.Pointer(P("/hey")), __().box.int(476)); + default.cache.Write(__().json.Pointer(P("/hope")), none); + default.cache.Write(__().json.Pointer(P("/-")), __().ref.float(324.3)); + result = default.cache.SetRealData(__().text.FromString("woah")); + for (i = 0; i < result.length; i += 1) { + TEST_ExpectFalse(result[i].successful); + } + CheckLocation("/hey", "null"); + CheckLocation("/hope", "null"); + CheckLocation("/-", "null"); +} + +protected static function SubTest_DBCache_ApplyingPendingEdits_Complex() +{ + local array result; + + Issue("Pending writes incorrectly apply in complex case."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + default.cache.Write(__().json.Pointer(P("/B/A/1/B")), __().box.int(777)); + default.cache.Write(__().json.Pointer(P("/B/A/-")), __().box.bool(true)); + default.cache.Write(__().json.Pointer(P("/D/5")), __().box.float(1.1)); + default.cache.Write( + __().json.Pointer(P("/new")), + __().collections.EmptyHashTable()); + default.cache.Write( + __().json.Pointer(P("/new/sub")), + __().text.FromString("!SubString!")); + default.cache.Write( + __().json.Pointer(P("/D/impossiburu")), + __().text.FromString("!SubString!")); + result = default.cache.SetRealData(MakeTestJSONObject()); + CheckLocation("/B/A/1/B", "777"); + CheckLocation("/B/A/3", "true"); + CheckLocation("/D/5", "1.1"); + CheckLocation("/D/2", "null"); + CheckLocation("/new", "{\"sub\":\"!SubString!\"}"); + TEST_ExpectTrue(result[0].successful); + TEST_ExpectTrue(result[1].successful); + TEST_ExpectTrue(result[2].successful); + TEST_ExpectTrue(result[3].successful); + TEST_ExpectTrue(result[4].successful); + TEST_ExpectFalse(result[5].successful); +} + +protected static function Test_DBCache_ApplyingPendingEdit_AllOperations() +{ + local ArrayList newArrayData; + local array result; + + newArrayData = __().collections.EmptyArrayList(); + newArrayData = newArrayData.AddInt(45).AddItem(none).AddString("lol"); + + Issue("Pending increments and removals incorrectly apply in complex case."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + default.cache.Increment(__().json.Pointer(P("/B/A/1/A")), + __().text.FromString("oi")); + default.cache.Remove(__().json.Pointer(P("/B/A/1/B"))); + default.cache.Remove(__().json.Pointer(P("/B/A/1/"))); + default.cache.Increment(__().json.Pointer(P("/B/A")), newArrayData); + default.cache.Increment(__().json.Pointer(P("/C")), __().box.float(34.5)); + default.cache.Increment(__().json.Pointer(P("/C")), __().box.bool(true)); + default.cache.Increment(__().json.Pointer(P("/D")), newArrayData); + default.cache.Increment(__().json.Pointer(P("/D")), newArrayData); + default.cache.Remove(__().json.Pointer(P("/B/A/Y"))); + default.cache.Increment(__().json.Pointer(P("/B/A/1/A")), + __().text.FromString("! Yeah!")); + default.cache.Write(__().json.Pointer(P("/D")), newArrayData); + result = default.cache.SetRealData(MakeTestJSONObject()); + TEST_ExpectTrue(result.length == 9); + TEST_ExpectTrue(result[0].successful); + TEST_ExpectTrue(result[1].successful); + TEST_ExpectTrue(result[2].successful); + TEST_ExpectTrue(result[3].successful); + TEST_ExpectTrue(result[4].successful); + TEST_ExpectFalse(result[5].successful); + TEST_ExpectFalse(result[6].successful); + TEST_ExpectTrue(result[7].successful); + TEST_ExpectTrue(result[8].successful); +} + +protected static function Test_DBCache_BasicWriteRead_AfterLoading() +{ + local AcediaObject complexData; + + Issue("Simple write/read sequence not working."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + complexData = MakeTestJSONObject(); + TEST_ExpectTrue(default.cache.SetRealData(complexData).length == 0); + default.cache.Write(__().json.Pointer(P("/B/A/1/B")), __().box.int(777)); + default.cache.Write(__().json.Pointer(P("/B/A/-")), __().box.bool(true)); + default.cache.Write(__().json.Pointer(P("/D/5")), __().box.float(1.1)); + default.cache.Write( + __().json.Pointer(P("/new")), + __().collections.EmptyHashTable()); + default.cache.Write( + __().json.Pointer(P("/new/sub")), + __().text.FromString("!SubString!")); + default.cache.Write( + __().json.Pointer(P("/D/impossiburu")), + __().text.FromString("!SubString!")); + CheckLocation("/B/A/1/B", "777"); + CheckLocation("/B/A/3", "true"); + CheckLocation("/D/5", "1.1"); + CheckLocation("/new", "{\"sub\":\"!SubString!\"}"); + + Issue("Simple write/read are affecting data when they shouldn't."); + CheckLocation("/D/2", "null"); + default.cache.Write(__().json.Pointer(P("")), __().box.float(1.1)); + default.cache.Write(__().json.Pointer(P("/hm?")), __().box.float(2.2)); + CheckLocation("", "1.1"); +} + +protected static function Test_DBCache_AllOperations_AfterLoading() +{ + local ArrayList newArrayData; + + newArrayData = __().collections.EmptyArrayList(); + newArrayData = newArrayData.AddInt(45).AddItem(none).AddString("lol"); + + Issue("Increment/remove/read sequence not working correctly."); + default.cache = DBCache(__().memory.Allocate(class'DBCache')); + default.cache.SetRealData(MakeTestJSONObject()); + TEST_ExpectTrue(default.cache.Increment(__().json.Pointer(P("/B/A/1/A")), + __().text.FromString("oi"))); + TEST_ExpectTrue(default.cache.Remove(__().json.Pointer(P("/B/A/1/B")))); + TEST_ExpectTrue(default.cache.Remove(__().json.Pointer(P("/B/A/1/")))); + TEST_ExpectTrue(default.cache.Increment(__().json.Pointer(P("/B/A")), + newArrayData)); + TEST_ExpectTrue(default.cache.Increment(__().json.Pointer(P("/C")), + __().box.float(34.5))); + TEST_ExpectFalse(default.cache.Increment(__().json.Pointer(P("/C")), + __().box.bool(true))); + TEST_ExpectTrue(default.cache.Increment(__().json.Pointer(P("/D")), + newArrayData)); + TEST_ExpectTrue(default.cache.Increment(__().json.Pointer(P("/D")), + newArrayData)); + TEST_ExpectFalse(default.cache.Remove(__().json.Pointer(P("/B/A/Y")))); + TEST_ExpectTrue(default.cache.Increment(__().json.Pointer(P("/B/A/1/A")), + __().text.FromString("! Yeah!"))); + default.cache.Write(__().json.Pointer(P("/D")), newArrayData); + CheckLocation("/B/A/1/A", "simpleValueoi! Yeah!"); + CheckLocation("/B/A", + "[true,{\"A\":\"simpleValueoi! Yeah!\"},\"huh\",45,null,\"lol\"]"); + CheckLocation("/C", "29.5"); + CheckLocation("/D", "[45,null,\"lol\"]"); +} + +defaultproperties +{ + caseGroup = "Database" + caseName = "DBConnection related tests" +} \ No newline at end of file diff --git a/sources/Data/Database/DBAPI.uc b/sources/Data/Database/DBAPI.uc index 9f750e8..54ab288 100644 --- a/sources/Data/Database/DBAPI.uc +++ b/sources/Data/Database/DBAPI.uc @@ -55,7 +55,6 @@ public final function Database Load(BaseText databaseLink) { local Parser parser; local Database result; - local Text immutableDatabaseName; local MutableText databaseName; if (databaseLink == none) { @@ -71,11 +70,9 @@ public final function Database Load(BaseText databaseLink) parser.FreeSelf(); return none; } - immutableDatabaseName = databaseName.Copy(); - result = LoadLocal(immutableDatabaseName); + result = LoadLocal(databaseName); parser.FreeSelf(); databaseName.FreeSelf(); - immutableDatabaseName.FreeSelf(); return result; } @@ -114,6 +111,56 @@ public final function JSONPointer GetPointer(BaseText databaseLink) return result; } +/** + * Opens a new `DBConnection` to the data referred to by the database link. + * + * Opened `DBConnection` doesn't automatically start a connection, so you + * need to call its `Connect()` method. + * + * @param databaseLink Database link to the data we want to connect to. + * @return Initialized `DBConnection` in case given link is valid and `none` + * otherwise. + */ +public final function DBConnection OpenConnection(BaseText databaseLink) +{ + local DBConnection result; + local Parser parser; + local Database databaseToConnect; + local JSONPointer locationToConnect; + local MutableText databaseName, textPointer; + + if (databaseLink == none) { + return none; + } + parser = _.text.Parse(databaseLink); + // Only local DBs are supported for now! + // So just consume this prefix, if it's present. + parser.Match(P("[local]")).Confirm(); + textPointer = parser + .R() + .MUntil(databaseName, _.text.GetCharacter(":")) + .MatchS(":") + .GetRemainderM(); + if (parser.Ok()) + { + databaseToConnect = LoadLocal(databaseName); + locationToConnect = _.json.Pointer(textPointer); + result = DBConnection(_.memory.Allocate(class'DBConnection')); + result.Initialize(databaseToConnect, locationToConnect); + _.memory.Free(databaseToConnect); + _.memory.Free(locationToConnect); + } + parser.FreeSelf(); + _.memory.Free(databaseName); + _.memory.Free(textPointer); + if (result != none && !result.IsInitialized()) + { + result.FreeSelf(); + result = none; + } + return result; +} + /** * Creates new local database with name `databaseName`. * @@ -169,6 +216,7 @@ public final function LocalDatabaseInstance LoadLocal(BaseText databaseName) local Text rootRecordName; local LocalDatabase newConfig; local LocalDatabaseInstance newLocalDBInstance; + local Text dbKey; if (databaseName == none) { return none; @@ -189,7 +237,9 @@ public final function LocalDatabaseInstance LoadLocal(BaseText databaseName) return none; } newLocalDBInstance = LocalDatabaseInstance(_.memory.Allocate(localDBClass)); - loadedLocalDatabases.SetItem(databaseName.Copy(), newLocalDBInstance); + dbKey = databaseName.Copy(); + loadedLocalDatabases.SetItem(dbKey, newLocalDBInstance); + dbKey.FreeSelf(); if (newConfig.HasDefinedRoot()) { rootRecordName = newConfig.GetRootName(); diff --git a/sources/Data/Database/DBTask.uc b/sources/Data/Database/DBTask.uc index 40e947c..2324725 100644 --- a/sources/Data/Database/DBTask.uc +++ b/sources/Data/Database/DBTask.uc @@ -6,7 +6,7 @@ * completed and will self-destruct afterwards. Concrete delegates are * declared in child classes of this `DBTask`, since they can have different * signatures, depending on the query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -54,6 +54,7 @@ var private int previousTaskLifeVersion; var private Database.DBQueryResult taskResult; var private bool isReadyToComplete; +var private int requestID; var private LoggerAPI.Definition errLoopInTaskChain; @@ -65,6 +66,18 @@ protected function Finalizer() previousTask = none; previousTaskLifeVersion = -1; isReadyToComplete = false; + requestID = 0; +} + +/** + * Returns ID of the request set inside `SetResult()` for the caller `DBTask`. + * + * @return ID of the request set inside `SetResult()` for the caller `DBTask`. + * If `SetResult()` wasn't yet called returns `0`. + */ +protected function int GetRequestID() +{ + return requestID; } /** @@ -105,12 +118,17 @@ public final function Database.DBQueryResult GetResult() * This value can be assigned several times and the last assigned value will * be used. * - * @param result Result of the query, relevant to the caller task. + * @param result Result of the query, relevant to the caller task. + * @param requestID ID of the request this task is responding to, specified + * at the time request was made. */ -public final function SetResult(Database.DBQueryResult result) +public final function SetResult( + Database.DBQueryResult result, + optional int completedRequestID) { taskResult = result; isReadyToComplete = true; + requestID = completedRequestID; } /** diff --git a/sources/Data/Database/Database.uc b/sources/Data/Database/Database.uc index e1f6f83..a296020 100644 --- a/sources/Data/Database/Database.uc +++ b/sources/Data/Database/Database.uc @@ -7,7 +7,7 @@ * All of the methods are asynchronous - they do not return requested * values immediately and instead require user to provide a handler function * that will be called once operation is completed. - * Copyright 2021-2022 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -73,15 +73,27 @@ enum DBQueryResult * to load data as immutable Acedia's types and `true` will make it load * data as mutable types. This setting does not affect `Collection`s into * which JSON arrays and objects are converted - they are always mutable. + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `ReadData()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when reading task is complete: - * `ReadData(...).connect = handler`, - * where `handler` must have the following signature: - * `connect(DBQueryResult result, AcediaObject data)`; + * `ReadData(...).connect = handler`, + * where `handler` must have the following signature: + * ``` + * connect( + * DBQueryResult result, + * take AcediaObject data, + * Database source, + * int requestID)`; + * ``` * * Ownership of `data` object returned in the `connect()` is considered * to be transferred to whoever handled result of this query. * It must be deallocated once no longer needed. + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer` and `DBR_InvalidDatabase`; * * `data` is guaranteed to be `none` if `result != DBR_Success`; @@ -90,7 +102,8 @@ enum DBQueryResult */ public function DBReadTask ReadData( JSONPointer pointer, - optional bool makeMutable) + optional bool makeMutable, + optional int requestID) { return none; } @@ -110,12 +123,18 @@ public function DBReadTask ReadData( * @param data Data that needs to be written at the specified location * inside the database. For method to succeed this object needs to have * JSON-compatible type (see `_.json.IsCompatible()` for more details). + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `WriteData()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when writing task is complete: * `WriteData(...).connect = handler`, * where `handler` must have the following signature: - * `connect(DBQueryResult result)`; + * `connect(DBQueryResult result, Database source, int requestID)`; + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer`, `DBR_InvalidDatabase` and `DBR_InvalidData`; * * Data is actually written inside the database iff @@ -128,7 +147,10 @@ public function DBReadTask ReadData( * Example: writing data at "/sub-object/valueA" will always fail if * "sub-object" does not exist. */ -public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data) +public function DBWriteTask WriteData( + JSONPointer pointer, + AcediaObject data, + optional int requestID) { return none; } @@ -141,12 +163,18 @@ public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data) * * @param pointer JSON pointer to the location of the data to remove from * database. `none` is always treated as an invalid JSON pointer. + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `RemoveData()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when writing task is complete: * `RemoveData(...).connect = handler`, * where `handler` must have the following signature: - * `connect(DBQueryResult result)`. + * `connect(DBQueryResult result, Database source, int requestID)`. + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer` and `DBR_InvalidDatabase`; * * Data is actually removed from the database iff @@ -154,7 +182,9 @@ public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data) * * `DBR_InvalidPointer` can be produced if either `pointer == none` or * it does not point at any existing value inside the caller database. */ -public function DBRemoveTask RemoveData(JSONPointer pointer) +public function DBRemoveTask RemoveData( + JSONPointer pointer, + optional int requestID) { return none; } @@ -166,12 +196,24 @@ public function DBRemoveTask RemoveData(JSONPointer pointer) * @param pointer JSON pointer to the location of the data for which type * needs to be checked. * `none` is always treated as an invalid JSON pointer. + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `CheckDataType()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when reading task is complete: * `CheckDataType(...).connect = handler`, * where `handler` must have the following signature: - * `connect(DBQueryResult result, Database.DataType type)`; + * ``` + * connect( + * DBQueryResult result, + * Database.DataType type, + * Database source, + * int requestID) + * ``` + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer` and `DBR_InvalidDatabase`; * * This task can only fail if either caller database is broken @@ -181,7 +223,9 @@ public function DBRemoveTask RemoveData(JSONPointer pointer) * * Data is actually removed from the database iff * `result == DBR_Success`. */ -public function DBCheckTask CheckDataType(JSONPointer pointer) +public function DBCheckTask CheckDataType( + JSONPointer pointer, + optional int requestID) { return none; } @@ -197,12 +241,24 @@ public function DBCheckTask CheckDataType(JSONPointer pointer) * @param pointer JSON pointer to the location of the JSON object or array * for which size needs to be obtained. * `none` is always treated as an invalid JSON pointer. + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `GetDataSize()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when reading task is complete: * `GetDataSize(...).connect = handler`, * where `handler` must have the following signature: - * `connect(DBQueryResult result, int size)`. + * ``` + * connect( + * DBQueryResult result, + * int size, + * Database source, + * int requestID) + * ``` + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer` and `DBR_InvalidDatabase`; * * Returned `size` value is actually a size of referred @@ -211,7 +267,9 @@ public function DBCheckTask CheckDataType(JSONPointer pointer) * it does not point at a JSON object or array inside the * caller database. */ -public function DBSizeTask GetDataSize(JSONPointer pointer) +public function DBSizeTask GetDataSize( + JSONPointer pointer, + optional int requestID) { return none; } @@ -225,15 +283,27 @@ public function DBSizeTask GetDataSize(JSONPointer pointer) * @param pointer JSON pointer to the location of the JSON object for which * keys need to be obtained. * `none` is always treated as an invalid JSON pointer. + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `GetDataKeys()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when reading task is complete: * `GetDataKeys(...).connect = handler`, * where `handler` must have the following signature: - * `connect(DBQueryResult result, ArrayList keys)`. + * ``` + * connect( + * DBQueryResult result, + * take ArrayList keys, + * Database source, + * int requestID) + * ``` * * Ownership of `keys` array returned in the `connect()` is considered * to be transferred to whoever handled result of this query. * It must be deallocated once no longer needed. + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer`, `DBR_InvalidData` and `DBR_InvalidDatabase`; * * Returned `keys` will be non-`none` and contain keys of the referred @@ -243,7 +313,9 @@ public function DBSizeTask GetDataSize(JSONPointer pointer) * point at a JSON object inside caller database * (value can either not exist at all or have some other type). */ -public function DBKeysTask GetDataKeys(JSONPointer pointer) +public function DBKeysTask GetDataKeys( + JSONPointer pointer, + optional int requestID) { return none; } @@ -255,19 +327,21 @@ public function DBKeysTask GetDataKeys(JSONPointer pointer) * "Incrementing" is an operation that is safe from the point of view of * simultaneous access. What "incrementing" actually does depends on * the passed JSON value (`increment` parameter): - * (0. Unless `pointer` points at the JSON null value - then "increment" - * acts as a `WriteData()` method regardless of `increment`'s value); + * + * (0. ...unless `pointer` points at the JSON null or missing value (within + * existing container - then "increment" acts as a `WriteData()` method + * regardless of `increment`'s value;) * 1. JSON null: it never modifies existing value and reports an error if * existing value was not itself JSON null; - * 2. JSON bool: if combines with stored JSON bool value - + * 2. JSON bool: if combined with stored JSON bool value - * performs logical "or" operation. Otherwise fails; - * 3. JSON number: if combines with stored JSON numeric value - + * 3. JSON number: if combined with stored JSON numeric value - * adds values together. Otherwise fails. - * 4. JSON string: if combines with stored JSON string value - + * 4. JSON string: if combined with stored JSON string value - * concatenates itself at the end. Otherwise fails. - * 5. JSON array: if combines with stored JSON array value - + * 5. JSON array: if combined with stored JSON array value - * concatenates itself at the end. Otherwise fails. - * 6. JSON object: if combines with stored JSON object value - + * 6. JSON object: if combined with stored JSON object value - * `increment` adds it's own values with new keys into the stored * JSON object. Does not override old values. * Fails when combined with any other type. @@ -280,12 +354,18 @@ public function DBKeysTask GetDataKeys(JSONPointer pointer) * with `increment` parameter. * @param increment JSON-compatible value to be used as an increment for * the data at the specified location inside the database. + * @param requestID ID of this request. It will be reported when + * database's task is completed. Can be used to correspond database's + * responses with particular requests. * @return Task object that corresponds to this `IncrementData()` call. * * Guaranteed to be not `none`; * * Use it to connect a handler for when reading task is complete: * `IncrementData(...).connect = handler`, * where `handler` must have the following signature: - * `connect(DBQueryResult result)`. + * `connect(DBQueryResult result, Database source, int requestID)`. + * * `source` provides reference to the database, whose data was + * requested, `requestID` provides the same number as `requestID` + * parameter of this method. * * Possible `DBQueryResult` types are `DBR_Success`, * `DBR_InvalidPointer`, `DBR_InvalidData` and `DBR_InvalidDatabase`; * * Data is actually incremented iff `result == DBR_Success`; @@ -299,7 +379,8 @@ public function DBKeysTask GetDataKeys(JSONPointer pointer) */ public function DBIncrementTask IncrementData( JSONPointer pointer, - AcediaObject increment) + AcediaObject increment, + optional int requestID) { return none; } diff --git a/sources/Data/Database/Local/DBRecord.uc b/sources/Data/Database/Local/DBRecord.uc index a2730fd..0510ff1 100644 --- a/sources/Data/Database/Local/DBRecord.uc +++ b/sources/Data/Database/Local/DBRecord.uc @@ -184,6 +184,7 @@ private final function DBRecordPointer MakeRecordPointer( return pointer; } +// Converts `JSONPointer` into our internal representation. private final function DBRecordPointer ConvertPointer(JSONPointer jsonPointer) { if (jsonPointer == none) { @@ -192,6 +193,8 @@ private final function DBRecordPointer ConvertPointer(JSONPointer jsonPointer) return ConvertPointerPath(jsonPointer, 0, jsonPointer.GetLength()); } +// Produced out internal pointer representation `DBRecordPointer` to +// the container that stores object, referred to by a given `JSONPointer`. private final function DBRecordPointer ConvertContainerPointer( JSONPointer jsonPointer) { diff --git a/sources/Data/Database/Local/LocalDatabaseInstance.uc b/sources/Data/Database/Local/LocalDatabaseInstance.uc index 1ff544a..0ccdea6 100644 --- a/sources/Data/Database/Local/LocalDatabaseInstance.uc +++ b/sources/Data/Database/Local/LocalDatabaseInstance.uc @@ -149,54 +149,61 @@ private final function DBTask MakeNewTask(class newTaskClass) return newTask; } -private function bool ValidatePointer(JSONPointer pointer, DBTask relevantTask) +private function bool ValidatePointer( + JSONPointer pointer, + DBTask relevantTask, + int requestID) { if (pointer != none) { return true; } - relevantTask.SetResult(DBR_InvalidPointer); + relevantTask.SetResult(DBR_InvalidPointer, requestID); return false; } -private function bool ValidateRootRecord(DBTask relevantTask) +private function bool ValidateRootRecord(DBTask relevantTask, int requestID) { if (rootRecord != none) { return true; } - relevantTask.SetResult(DBR_InvalidDatabase); + relevantTask.SetResult(DBR_InvalidDatabase, requestID); return false; } public function DBReadTask ReadData( JSONPointer pointer, - optional bool makeMutable) + optional bool makeMutable, + optional int requestID) { local AcediaObject queryResult; local DBReadTask readTask; readTask = DBReadTask(MakeNewTask(class'DBReadTask')); - if (!ValidatePointer(pointer, readTask)) return readTask; - if (!ValidateRootRecord(readTask)) return readTask; + if (!ValidatePointer(pointer, readTask, requestID)) return readTask; + if (!ValidateRootRecord(readTask, requestID)) return readTask; if (rootRecord.LoadObject(pointer, queryResult, makeMutable)) { readTask.SetReadData(queryResult); - readTask.SetResult(DBR_Success); + readTask.SetResult(DBR_Success, requestID); } else { - readTask.SetResult(DBR_InvalidPointer); + readTask.SetResult(DBR_InvalidPointer, requestID); _.memory.Free(queryResult); // just in case } return readTask; } -public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data) +public function DBWriteTask WriteData( + JSONPointer pointer, + AcediaObject data, + optional int requestID) { local bool isDataStorable; local DBWriteTask writeTask; writeTask = DBWriteTask(MakeNewTask(class'DBWriteTask')); - if (!ValidatePointer(pointer, writeTask)) return writeTask; - if (!ValidateRootRecord(writeTask)) return writeTask; + if (!ValidatePointer(pointer, writeTask, requestID)) return writeTask; + if (!ValidateRootRecord(writeTask, requestID)) return writeTask; // We can only write JSON array as the root value if (data != none && pointer.GetLength() <= 0) { @@ -207,99 +214,111 @@ public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data) } if (!isDataStorable) { - writeTask.SetResult(DBR_InvalidData); + writeTask.SetResult(DBR_InvalidData, requestID); return writeTask; } if (rootRecord.SaveObject(pointer, data)) { - writeTask.SetResult(DBR_Success); + writeTask.SetResult(DBR_Success, requestID); ScheduleDiskUpdate(); } else { - writeTask.SetResult(DBR_InvalidPointer); + writeTask.SetResult(DBR_InvalidPointer, requestID); } return writeTask; } -public function DBRemoveTask RemoveData(JSONPointer pointer) +public function DBRemoveTask RemoveData( + JSONPointer pointer, + optional int requestID) { local DBRemoveTask removeTask; removeTask = DBRemoveTask(MakeNewTask(class'DBRemoveTask')); - if (!ValidatePointer(pointer, removeTask)) return removeTask; - if (!ValidateRootRecord(removeTask)) return removeTask; + if (!ValidatePointer(pointer, removeTask, requestID)) return removeTask; + if (!ValidateRootRecord(removeTask, requestID)) return removeTask; if (pointer.GetLength() == 0) { rootRecord.EmptySelf(); - removeTask.SetResult(DBR_Success); + removeTask.SetResult(DBR_Success, requestID); return removeTask; } if (rootRecord.RemoveObject(pointer)) { - removeTask.SetResult(DBR_Success); + removeTask.SetResult(DBR_Success, requestID); ScheduleDiskUpdate(); } else { - removeTask.SetResult(DBR_InvalidPointer); + removeTask.SetResult(DBR_InvalidPointer, requestID); } return removeTask; } -public function DBCheckTask CheckDataType(JSONPointer pointer) +public function DBCheckTask CheckDataType( + JSONPointer pointer, + optional int requestID) { local DBCheckTask checkTask; checkTask = DBCheckTask(MakeNewTask(class'DBCheckTask')); - if (!ValidatePointer(pointer, checkTask)) return checkTask; - if (!ValidateRootRecord(checkTask)) return checkTask; + if (!ValidatePointer(pointer, checkTask, requestID)) return checkTask; + if (!ValidateRootRecord(checkTask, requestID)) return checkTask; checkTask.SetDataType(rootRecord.GetObjectType(pointer)); - checkTask.SetResult(DBR_Success); + checkTask.SetResult(DBR_Success, requestID); return checkTask; } -public function DBSizeTask GetDataSize(JSONPointer pointer) +public function DBSizeTask GetDataSize( + JSONPointer pointer, + optional int requestID) { local DBSizeTask sizeTask; sizeTask = DBSizeTask(MakeNewTask(class'DBSizeTask')); - if (!ValidatePointer(pointer, sizeTask)) return sizeTask; - if (!ValidateRootRecord(sizeTask)) return sizeTask; + if (!ValidatePointer(pointer, sizeTask, requestID)) return sizeTask; + if (!ValidateRootRecord(sizeTask, requestID)) return sizeTask; sizeTask.SetDataSize(rootRecord.GetObjectSize(pointer)); - sizeTask.SetResult(DBR_Success); + sizeTask.SetResult(DBR_Success, requestID); return sizeTask; } -public function DBKeysTask GetDataKeys(JSONPointer pointer) +public function DBKeysTask GetDataKeys( + JSONPointer pointer, + optional int requestID) { local ArrayList keys; local DBKeysTask keysTask; keysTask = DBKeysTask(MakeNewTask(class'DBKeysTask')); - if (!ValidatePointer(pointer, keysTask)) return keysTask; - if (!ValidateRootRecord(keysTask)) return keysTask; + if (!ValidatePointer(pointer, keysTask, requestID)) return keysTask; + if (!ValidateRootRecord(keysTask, requestID)) return keysTask; keys = rootRecord.GetObjectKeys(pointer); keysTask.SetDataKeys(keys); if (keys == none) { - keysTask.SetResult(DBR_InvalidData); + keysTask.SetResult(DBR_InvalidData, requestID); } else { - keysTask.SetResult(DBR_Success); + keysTask.SetResult(DBR_Success, requestID); } return keysTask; } public function DBIncrementTask IncrementData( JSONPointer pointer, - AcediaObject increment) + AcediaObject increment, + optional int requestID) { local DBQueryResult queryResult; local DBIncrementTask incrementTask; incrementTask = DBIncrementTask(MakeNewTask(class'DBIncrementTask')); - if (!ValidatePointer(pointer, incrementTask)) return incrementTask; - if (!ValidateRootRecord(incrementTask)) return incrementTask; - + if (!ValidatePointer(pointer, incrementTask, requestID)) { + return incrementTask; + } + if (!ValidateRootRecord(incrementTask, requestID)) { + return incrementTask; + } queryResult = rootRecord.IncrementObject(pointer, increment); - incrementTask.SetResult(queryResult); + incrementTask.SetResult(queryResult, requestID); if (queryResult == DBR_Success) { ScheduleDiskUpdate(); } diff --git a/sources/Data/Database/Tasks/DBCheckTask.uc b/sources/Data/Database/Tasks/DBCheckTask.uc index 694343a..e355262 100644 --- a/sources/Data/Database/Tasks/DBCheckTask.uc +++ b/sources/Data/Database/Tasks/DBCheckTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `CheckDataType()` query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -24,7 +24,8 @@ var private Database.DataType queryTypeResponse; delegate connect( Database.DBQueryResult result, Database.DataType type, - Database source) {} + Database source, + int requestID) {} protected function Finalizer() { @@ -40,7 +41,7 @@ public function SetDataType(Database.DataType type) protected function CompleteSelf(Database source) { - connect(GetResult(), queryTypeResponse, source); + connect(GetResult(), queryTypeResponse, source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tasks/DBIncrementTask.uc b/sources/Data/Database/Tasks/DBIncrementTask.uc index 886c739..c18d53b 100644 --- a/sources/Data/Database/Tasks/DBIncrementTask.uc +++ b/sources/Data/Database/Tasks/DBIncrementTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `IncrementData()` query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -19,7 +19,10 @@ */ class DBIncrementTask extends DBTask; -delegate connect(Database.DBQueryResult result, Database source) {} +delegate connect( + Database.DBQueryResult result, + Database source, + int requestID) {} protected function Finalizer() { @@ -29,7 +32,7 @@ protected function Finalizer() protected function CompleteSelf(Database source) { - connect(GetResult(), source); + connect(GetResult(), source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tasks/DBKeysTask.uc b/sources/Data/Database/Tasks/DBKeysTask.uc index f074609..2f3097b 100644 --- a/sources/Data/Database/Tasks/DBKeysTask.uc +++ b/sources/Data/Database/Tasks/DBKeysTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `GetDataKeys()` query. - * Copyright 2021-2022 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -23,8 +23,9 @@ var private ArrayList queryKeysResponse; delegate connect( Database.DBQueryResult result, - ArrayList keys, - Database source) {} + /*take*/ ArrayList keys, + Database source, + int requestID) {} protected function Finalizer() { @@ -40,7 +41,7 @@ public function SetDataKeys(/* take */ ArrayList keys) protected function CompleteSelf(Database source) { - connect(GetResult(), queryKeysResponse, source); + connect(GetResult(), queryKeysResponse, source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tasks/DBReadTask.uc b/sources/Data/Database/Tasks/DBReadTask.uc index 54b4925..7a62467 100644 --- a/sources/Data/Database/Tasks/DBReadTask.uc +++ b/sources/Data/Database/Tasks/DBReadTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `ReadData()` query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -23,8 +23,9 @@ var private AcediaObject queryDataResponse; delegate connect( Database.DBQueryResult result, - AcediaObject data, - Database source) {} + /*take*/ AcediaObject data, + Database source, + int requestID) {} protected function Finalizer() { @@ -40,7 +41,7 @@ public function SetReadData(AcediaObject data) protected function CompleteSelf(Database source) { - connect(GetResult(), queryDataResponse, source); + connect(GetResult(), queryDataResponse, source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tasks/DBRemoveTask.uc b/sources/Data/Database/Tasks/DBRemoveTask.uc index 50278f7..4b74a2f 100644 --- a/sources/Data/Database/Tasks/DBRemoveTask.uc +++ b/sources/Data/Database/Tasks/DBRemoveTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `RemoveData()` query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -19,7 +19,10 @@ */ class DBRemoveTask extends DBTask; -delegate connect(Database.DBQueryResult result, Database source) {} +delegate connect( + Database.DBQueryResult result, + Database source, + int requestID) {} protected function Finalizer() { @@ -29,7 +32,7 @@ protected function Finalizer() protected function CompleteSelf(Database source) { - connect(GetResult(), source); + connect(GetResult(), source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tasks/DBSizeTask.uc b/sources/Data/Database/Tasks/DBSizeTask.uc index 564ac9e..9178700 100644 --- a/sources/Data/Database/Tasks/DBSizeTask.uc +++ b/sources/Data/Database/Tasks/DBSizeTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `GetDataSize()` query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -21,7 +21,11 @@ class DBSizeTask extends DBTask; var private int querySizeResponse; -delegate connect(Database.DBQueryResult result, int size, Database source) {} +delegate connect( + Database.DBQueryResult result, + int size, + Database source, + int requestID) {} protected function Finalizer() { @@ -37,7 +41,7 @@ public function SetDataSize(int size) protected function CompleteSelf(Database source) { - connect(GetResult(), querySizeResponse, source); + connect(GetResult(), querySizeResponse, source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tasks/DBWriteTask.uc b/sources/Data/Database/Tasks/DBWriteTask.uc index 5c3961d..3036920 100644 --- a/sources/Data/Database/Tasks/DBWriteTask.uc +++ b/sources/Data/Database/Tasks/DBWriteTask.uc @@ -1,6 +1,6 @@ /** * Variant of `DBTask` for `WriteData()` query. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -19,7 +19,10 @@ */ class DBWriteTask extends DBTask; -delegate connect(Database.DBQueryResult result, Database source) {} +delegate connect( + Database.DBQueryResult result, + Database source, + int requestID) {} protected function Finalizer() { @@ -29,7 +32,7 @@ protected function Finalizer() protected function CompleteSelf(Database source) { - connect(GetResult(), source); + connect(GetResult(), source, GetRequestID()); } defaultproperties diff --git a/sources/Data/Database/Tests/TEST_LocalDatabase.uc b/sources/Data/Database/Tests/TEST_LocalDatabase.uc index bceb211..d983167 100644 --- a/sources/Data/Database/Tests/TEST_LocalDatabase.uc +++ b/sources/Data/Database/Tests/TEST_LocalDatabase.uc @@ -27,63 +27,78 @@ var protected Database.DBQueryResult resultType; var protected Database.DataType resultDataType; var protected HashTable resultData; var protected AcediaObject resultObject; +var protected int resultRequestID; protected function DBReadingHandler( Database.DBQueryResult result, AcediaObject data, - Database source) + Database source, + int requestID) { default.resultType = result; default.resultObject = data; default.resultData = HashTable(data); + default.resultRequestID = requestID; } protected function DBKeysHandler( Database.DBQueryResult result, ArrayList keys, - Database source) + Database source, + int requestID) { - default.resultType = result; - default.resultKeys = keys; + default.resultType = result; + default.resultKeys = keys; + default.resultRequestID = requestID; } protected function DBCheckHandler( Database.DBQueryResult result, Database.DataType type, - Database source) + Database source, + int requestID) { - default.resultType = result; - default.resultDataType = type; + default.resultType = result; + default.resultDataType = type; + default.resultRequestID = requestID; } protected function DBSizeHandler( Database.DBQueryResult result, int size, - Database source) + Database source, + int requestID) { - default.resultType = result; - default.resultSize = size; + default.resultType = result; + default.resultSize = size; + default.resultRequestID = requestID; } protected function DBWritingHandler( Database.DBQueryResult result, - Database source) + Database source, + int requestID) { - default.resultType = result; + default.resultType = result; + default.resultRequestID = requestID; } protected function DBIncrementHandler( Database.DBQueryResult result, - Database source) + Database source, + int requestID) { - default.resultType = result; + default.resultType = result; + default.resultRequestID = requestID; } protected function DBRemoveHandler( Database.DBQueryResult result, - Database source) + Database source, + int requestID) { - default.resultType = result; + default.resultType = result; + default.resultRequestID = requestID; } protected static function ReadFromDB(LocalDatabaseInstance db, string pointer) @@ -220,6 +235,7 @@ protected static function TESTS() Test_TaskChaining(); Test_Removal(); Test_Increment(); + Test_RequestID(); } protected static function Test_LoadingPrepared() @@ -1232,8 +1248,9 @@ protected static function SubTest_IncrementRewriteArray( protected static function SubTest_IncrementMissing(LocalDatabaseInstance db) { - local DBIncrementTask task; - Issue("New values are created in database after incrementing with path" + local DBIncrementTask task; + local DBCheckTask checkTask; + Issue("New values are not created in database after incrementing with path" @ "pointing to non-existing value."); task = db.IncrementData(__().json.Pointer(P("/L")), __().box.int(345)); task.connect = DBIncrementHandler; @@ -1245,13 +1262,168 @@ protected static function SubTest_IncrementMissing(LocalDatabaseInstance db) task.connect = DBIncrementHandler; task.TryCompleting(); TEST_ExpectTrue(default.resultType == DBR_Success); - db.CheckDataType(__().json.Pointer(P("/L"))).connect = DBCheckHandler; + checkTask = db.CheckDataType(__().json.Pointer(P("/L"))); + checkTask.connect = DBCheckHandler; + checkTask.TryCompleting(); + TEST_ExpectTrue(default.resultDataType == JSON_Number); + TEST_ExpectTrue(default.resultType == DBR_Success); ReadFromDB(db, "/B/A/1/"); TEST_ExpectTrue(default.resultDataType == JSON_Number); TEST_ExpectTrue(ArrayList(default.resultObject).GetLength() == 12); TEST_ExpectTrue(ArrayList(default.resultObject).GetInt(11) == 85); } +protected static function Test_RequestID() +{ + local LocalDatabaseInstance db; + local ArrayList templateArray; + local HashTable templateObject; + templateObject = GetJSONSubTemplateObject(); + templateArray = GetJSONSubTemplateArray(); + db = __core().db.NewLocal(P("TEST_DB")); + db.WriteData(__().json.Pointer(P("")), templateObject); + db.WriteData(__().json.Pointer(P("/B")), templateObject); + db.WriteData(__().json.Pointer(P("/C")), __().box.int(-5)); + db.WriteData(__().json.Pointer(P("/D")), __().box.bool(false)); + db.WriteData(__().json.Pointer(P("/B/A")), templateArray); + db.WriteData(__().json.Pointer(P("/B/A/1")), templateObject); + db.WriteData(__().json.Pointer(P("/B/A/1/")), templateArray); + /* `db` now contains: + { + "A": "simpleValue", + "B": { + "A": [true, { + "A": "simpleValue", + "B": 11.12, + "": [true, null, "huh"] + }, "huh"], + "B": 11.12 + }, + "C": -5, + "D": false + } + */ + // Constantly recreating `db` takes time, so we make test dependent + // on each other. + // Generally speaking this is not great, but we cannot run them in + // parallel anyway. + Context("Testing whether database operations report correct request ID."); + SubTest_RequestIDForCheck(db); + SubTest_RequestIDForIncrement(db); + SubTest_RequestIDForKeys(db); + SubTest_RequestIDForRead(db); + SubTest_RequestIDForRemove(db); + SubTest_RequestIDForSize(db); + SubTest_RequestIDForWrite(db); + __core().db.DeleteLocal(P("TEST_DB")); +} + +protected static function SubTest_RequestIDForCheck(LocalDatabaseInstance db) +{ + local DBCheckTask task; + + Issue("Type checking operation isn't returning correct request ID."); + task = db.CheckDataType(__().json.Pointer(P("/L"))); + task.connect = DBCheckHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.CheckDataType(__().json.Pointer(P("/L")), 29); + task.connect = DBCheckHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 29); +} + +protected static function SubTest_RequestIDForIncrement( + LocalDatabaseInstance db) +{ + local DBIncrementTask task; + + Issue("Increment operation isn't returning correct request ID."); + task = db.IncrementData(__().json.Pointer(P("/L")), __().box.int(29)); + task.connect = DBIncrementHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.IncrementData(__().json.Pointer(P("/L")), __().box.int(29), -7); + task.connect = DBIncrementHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == -7); +} + +protected static function SubTest_RequestIDForKeys(LocalDatabaseInstance db) +{ + local DBKeysTask task; + + Issue("Keys list operation isn't returning correct request ID."); + task = db.GetDataKeys(__().json.Pointer(P(""))); + task.connect = DBKeysHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.GetDataKeys(__().json.Pointer(P("")), 11); + task.connect = DBKeysHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 11); +} + +protected static function SubTest_RequestIDForRead(LocalDatabaseInstance db) +{ + local DBReadTask task; + + Issue("Reading operation isn't returning correct request ID."); + task = db.ReadData(__().json.Pointer(P("/L")),); + task.connect = DBReadingHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.ReadData(__().json.Pointer(P("/L")),, 666); + task.connect = DBReadingHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 666); +} + +protected static function SubTest_RequestIDForRemove(LocalDatabaseInstance db) +{ + local DBRemoveTask task; + + Issue("Removing operation isn't returning correct request ID."); + task = db.RemoveData(__().json.Pointer(P("/L"))); + task.connect = DBRemoveHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.RemoveData(__().json.Pointer(P("/L")), 80); + task.connect = DBRemoveHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 80); +} + +protected static function SubTest_RequestIDForSize(LocalDatabaseInstance db) +{ + local DBSizeTask task; + + Issue("Size getting operation isn't returning correct request ID."); + task = db.GetDataSize(__().json.Pointer(P("/L"))); + task.connect = DBSizeHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.GetDataSize(__().json.Pointer(P("/L")), 7); + task.connect = DBSizeHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 7); +} + +protected static function SubTest_RequestIDForWrite(LocalDatabaseInstance db) +{ + local DBWriteTask task; + + Issue("Writing operation isn't returning correct request ID."); + task = db.WriteData(__().json.Pointer(P("/L")), none); + task.connect = DBWritingHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 0); + task = db.WriteData(__().json.Pointer(P("/L")), none, 42); + task.connect = DBWritingHandler; + task.TryCompleting(); + TEST_ExpectTrue(default.resultRequestID == 42); +} + defaultproperties { caseGroup = "Database" diff --git a/sources/Manifest.uc b/sources/Manifest.uc index 8b140e9..4e35b39 100644 --- a/sources/Manifest.uc +++ b/sources/Manifest.uc @@ -54,7 +54,8 @@ defaultproperties testCases(25) = class'TEST_BigInt' testCases(26) = class'TEST_DatabaseCommon' testCases(27) = class'TEST_LocalDatabase' - testCases(28) = class'TEST_AcediaConfig' - testCases(29) = class'TEST_UTF8EncoderDecoder' - testCases(30) = class'TEST_AvariceStreamReader' + testCases(28) = class'TEST_DBConnection' + testCases(29) = class'TEST_AcediaConfig' + testCases(30) = class'TEST_UTF8EncoderDecoder' + testCases(31) = class'TEST_AvariceStreamReader' } \ No newline at end of file diff --git a/sources/Players/EPlayer.uc b/sources/Players/EPlayer.uc index 210e709..40dabce 100644 --- a/sources/Players/EPlayer.uc +++ b/sources/Players/EPlayer.uc @@ -57,6 +57,7 @@ protected function Finalizer() { _.memory.Free(controller); _.memory.Free(consoleInstance); + _.memory.Free(identity); controller = none; consoleInstance = none; // No need to deallocate `User` objects, since they are all have unique @@ -96,7 +97,6 @@ public final /* unreal */ function bool Initialize( idHash = _.text.FromString(initController.GetPlayerIDHash()); identity = _.users.FetchByIDHash(idHash); idHash.FreeSelf(); - idHash = none; } signalsReferences = playerSignals; controller = _server.unreal.ActorRef(initController); @@ -121,6 +121,9 @@ public function EInterface Copy() // not initialized return playerCopy; } + if (identity != none) { + identity.NewRef(); + } playerCopy.identity = identity; playerCopy.Initialize( PlayerController(controller.Get()), signalsReferences); diff --git a/sources/Text/BaseText.uc b/sources/Text/BaseText.uc index cc5bec7..218788e 100644 --- a/sources/Text/BaseText.uc +++ b/sources/Text/BaseText.uc @@ -404,6 +404,54 @@ public final function MutableText UpperMutableCopy( return textCopy; } +/** + * Checks whether all letters in the caller text is in the lower case. + * + * @return `true` if there are no characters that qualify as upper case and + * `false` otherwise. + */ +public final function bool IsLowerCase() +{ + local int i; + local Character nextCharacter; + + if (IsEmpty()) { + return true; + } + for (i = 0; i < GetLength(); i += 1) + { + nextCharacter = GetCharacter(i); + if (_.text.IsUpper(nextCharacter)) { + return false; + } + } + return true; +} + +/** + * Checks whether all letters in the caller text is in the lower case. + * + * @return `true` if there are no characters that qualify as lower case and + * `false` otherwise. + */ +public final function bool IsUpperCase() +{ + local int i; + local Character nextCharacter; + + if (IsEmpty()) { + return true; + } + for (i = 0; i < GetLength(); i += 1) + { + nextCharacter = GetCharacter(i); + if (_.text.IsLower(nextCharacter)) { + return false; + } + } + return true; +} + /** * Checks if caller `BaseText` contains a valid name object or not. * diff --git a/sources/Text/JSON/JSONAPI.uc b/sources/Text/JSON/JSONAPI.uc index 9caf7f8..04a2674 100644 --- a/sources/Text/JSON/JSONAPI.uc +++ b/sources/Text/JSON/JSONAPI.uc @@ -5,7 +5,7 @@ * both valid and invalid JSON. However only correctly parsing valid JSON * is guaranteed. This means that you should not rely on these methods to parse * any JSON extensions or validate JSON for you. - * Copyright 2021-2022 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -1381,6 +1381,315 @@ private final function int GetEscapedVersion(int codePoint) return codePoint; } +/** + * Increments given value by another value, producing result as a new + * JSON-compatible value. + * + * What "incrementing" actually does depends on the passed JSON values: + * `valueToIncrement` and `increment` parameters. Unless either of them is `none` + * (then "increment" simply acts as a `_.json.Copy()` method for + * the non-`none` one), they must represent the same JSON type and: + * + * 1. JSON bool: performs logical "or" operation on given values; + * 2. JSON number: adds values together; + * 3. JSON string: appends `increment` at the end of `valueToIncrement`; + * 4. JSON array: appends copy of elements of `increment` at the end of + * array of copies of elements of `valueToIncrement` + * (calls `ArrayList::append()` method); + * 5. JSON object: creates new collection, based on `valueToIncrement` + * with added key-value pairs from `increment` (all elements are + * copied). Does not override old values + * (calls `HashTable::append()` method). + * + * In case they represent different JSON types (that aren't "null") - + * incrementing should produce `none`. + * + * @param valueToIncrement Value to increment. Can be any JSON-compatible + * value. + * @param increment Value to increment it by. Can be any + * JSON-compatible value. + * @return Incremented data (guaranteed to contain copies and not actual + * objects from either `valueToIncrement` or `increment`). `none` if + * argument types were incompatible. Whether type of the result will be + * immutable (boxes and `Text`) or mutable (refs and `MutableText`) + * depends on immutability of `valueToIncrement`. When adding two numbers, + * whether result will be boxed (this includes both boxes and refs) `int` + * or `float` depends on both parameters - if either of them if `float`, + * then result will be `float`. + */ +public final function AcediaObject Increment( + AcediaObject valueToIncrement, + AcediaObject increment) +{ + local AcediaObject result; + + if (valueToIncrement == none) { + result = _.json.Copy(increment); + } + else if (increment == none) { + result = _.json.Copy(valueToIncrement); + } + else if ( valueToIncrement.class == class'IntBox' + || valueToIncrement.class == class'IntRef' + || valueToIncrement.class == class'FloatBox' + || valueToIncrement.class == class'FloatRef') + { + result = Increment_Number(valueToIncrement, increment); + } + else if ( valueToIncrement.class == class'BoolBox' + || valueToIncrement.class == class'BoolRef') + { + result = Increment_Bool(valueToIncrement, increment); + } + else if ( valueToIncrement.class == class'Text' + || valueToIncrement.class == class'MutableText') + { + result = Increment_Text(valueToIncrement, increment); + } + else { + result = Increment_Collections(valueToIncrement, increment); + } + return result; +} + +// Assumes `valueToIncrement` and `increment` aren't `none` +private final function AcediaObject Increment_Collections( + AcediaObject valueToIncrement, + AcediaObject increment) +{ + local AcediaObject result; + local ArrayList arrayListCopy; + local HashTable hashTableCopy; + + if ( valueToIncrement.class == class'ArrayList' + && increment.class == class'ArrayList') + { + arrayListCopy = ArrayList(Copy(ArrayList(increment))); + result = CopyArrayList(ArrayList(valueToIncrement)) + .Append(arrayListCopy); + _.memory.Free(arrayListCopy); + } + else if ( valueToIncrement.class == class'HashTable' + && increment.class == class'HashTable') + { + hashTableCopy = HashTable(Copy(HashTable(increment))); + result = CopyHashTable(HashTable(valueToIncrement)) + .Append(hashTableCopy); + _.memory.Free(hashTableCopy); + } + return result; +} + +// Assumes `valueToIncrement` and `increment` aren't `none` +private final function AcediaObject Increment_Text( + AcediaObject valueToIncrement, + AcediaObject increment) +{ + local BaseText textIncrement; + local MutableText builder; + + textIncrement = BaseText(increment); + if (BaseText(increment) == none) { + return none; + } + builder = BaseText(valueToIncrement).MutableCopy(); + builder.Append(textIncrement); + if (Text(valueToIncrement) != none) { + return builder.IntoText(); + } + return builder; +} + +// Assumes `valueToIncrement` and `increment` aren't `none` +private final function AcediaObject Increment_Bool( + AcediaObject valueToIncrement, + AcediaObject increment) +{ + local bool value1, value2; + + if (valueToIncrement.class == class'BoolBox') { + value1 = BoolBox(valueToIncrement).Get(); + } + if (valueToIncrement.class == class'BoolRef') { + value1 = BoolRef(valueToIncrement).Get(); + } + if (increment.class == class'BoolBox') { + value2 = BoolBox(increment).Get(); + } + else if (increment.class == class'BoolRef') { + value2 = BoolRef(increment).Get(); + } + else { + return none; + } + if (ValueBox(valueToIncrement) != none) { + return _.box.bool(value1 || value2); + } + return _.ref.bool(value1 || value2); +} + +// Assumes `valueToIncrement` and `increment` aren't `none` +// Assumes `valueToIncrement` is one of four classes: `IntBox`, `IntRef`, +// `FloatBox` or `FloatRef`. +private final function AcediaObject Increment_Number( + AcediaObject valueToIncrement, + AcediaObject increment) +{ + local bool hasFloats; + local int intSummand1, intSummand2, intSum; + local float floatSummand1, floatSummand2, floatSum; + + hasFloats = valueToIncrement.class == class'FloatBox' + || valueToIncrement.class == class'FloatRef' + || increment.class == class'FloatBox' + || increment.class == class'FloatRef'; + // `valueToIncrement` is guaranteed to have an appropriate type, + // but `increment` might not, so only do check on second call + ExtractBoxedNumericValue(valueToIncrement, intSummand1, floatSummand1); + if (!ExtractBoxedNumericValue(increment, intSummand2, floatSummand2)) { + return none; + } + if (hasFloats) + { + floatSum = floatSummand1 + floatSummand2 + + float(intSummand1 + intSummand2); + if (ValueBox(valueToIncrement) != none) { + return _.box.float(floatSum); + } + return _.ref.float(floatSum); + } + intSum = intSummand1 + intSummand2 + int(floatSummand1 + floatSummand2); + if (ValueBox(valueToIncrement) != none) { + return _.box.int(intSum); + } + return _.ref.int(intSum); +} + +// Extracts numeric value and records it into one of two out arguments: +// +// * `asInteger` iff `value` is either `IntBox` or `IntRef`; +// * `asFloat` iff `value` is either `FloatBox` or `FloatRef`; +// +// Does not change the value in remaining parameter. +// Returns `true` in case of success (method managed to read the value) and +// `false` otherwise (`value` had non-numeric or unknown type). +private final function bool ExtractBoxedNumericValue( + AcediaObject value, + out int asInteger, + out float asFloat) +{ + local bool success; + + if (value.class == class'IntBox') + { + asInteger = IntBox(value).Get(); + success = true; + } + else if (value.class == class'IntRef') + { + asInteger = IntRef(value).Get(); + success = true; + } + else if (value.class == class'FloatBox') + { + asFloat = FloatBox(value).Get(); + success = true; + } + else if (value.class == class'FloatRef') + { + asFloat = FloatRef(value).Get(); + success = true; + } + return success; +} + +/** + * Performs a deep copy of the `inputData`. This means it copies not only + * `inputData` itself, but (in case it is a container) all of the values + * within it, instead of simply storing the same references (as opposed to + * shallow copy). Stored values also recursively deep copied. + * + * @param inputData Data to copy. Can be any JSON-compatible value. + * @return Copy of the `inputData`. Any non-JSON values are copied as `none`. + */ +public final function AcediaObject Copy(AcediaObject inputData) +{ + if (inputData == none) { + return none; + } + if (inputData.class == class'IntBox') { + return _.box.int(IntBox(inputData).Get()); + } + if (inputData.class == class'IntRef') { + return _.ref.int(IntRef(inputData).Get()); + } + if (inputData.class == class'BoolBox') { + return _.box.bool(BoolBox(inputData).Get()); + } + if (inputData.class == class'BoolRef') { + return _.ref.bool(BoolRef(inputData).Get()); + } + if (inputData.class == class'FloatBox') { + return _.box.float(FloatBox(inputData).Get()); + } + if (inputData.class == class'FloatRef') { + return _.ref.float(FloatRef(inputData).Get()); + } + if (inputData.class == class'Text') { + return Text(inputData).Copy(); + } + if (inputData.class == class'MutableText') { + return MutableText(inputData).MutableCopy(); + } + if (inputData.class == class'ArrayList') { + return CopyArrayList(ArrayList(inputData)); + } + if (inputData.class == class'HashTable') { + return CopyHashTable(HashTable(inputData)); + } + return none; +} + +private final function ArrayList CopyArrayList(ArrayList inputList) +{ + local int i, inputListLength; + local ArrayList result; + local AcediaObject nextObject, nextCopy; + + result = _.collections.EmptyArrayList(); + inputListLength = inputList.GetLength(); + for (i = 0; i < inputListLength; i += 1) + { + nextObject = inputList.GetItem(i); + nextCopy = Copy(nextObject); + result.AddItem(nextCopy); + _.memory.Free(nextCopy); + _.memory.Free(nextObject); + } + return result; +} + +private final function HashTable CopyHashTable(HashTable inputTable) +{ + local int i; + local HashTable result; + local array textKeys; + local AcediaObject nextObject, nextCopy; + + result = _.collections.EmptyHashTable(); + textKeys = inputTable.GetTextKeys(); + for (i = 0; i < textKeys.length; i += 1) + { + nextObject = inputTable.GetItem(textKeys[i]); + nextCopy = Copy(nextObject); + result.SetItem(textKeys[i], nextCopy); + _.memory.Free(nextCopy); + _.memory.Free(nextObject); + } + _.memory.FreeMany(textKeys); + return result; +} + defaultproperties { MAX_FLOAT_PRECISION = 4 diff --git a/sources/Text/JSON/JSONPointer.uc b/sources/Text/JSON/JSONPointer.uc index 3358714..63ed173 100644 --- a/sources/Text/JSON/JSONPointer.uc +++ b/sources/Text/JSON/JSONPointer.uc @@ -5,7 +5,7 @@ * Path "/a/b/c" will be stored as a sequence of components "a", "b" and "c", * path "/" will be stored as a singular empty component "" * and empty path "" would mean that there is not components at all. - * Copyright 2021 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -293,6 +293,30 @@ public final function int GetNumericComponent(int index) return components[index].asNumber; } +/** + * Checks whether component at given index can be used to index array. + * + * This method accepts numeric components plus component equal to "-", that can + * be used to point at the element after the last on in the `JSONArray`. + * + * @param index Index of the component to check. + * @param `true` if component with given index exists and it either positive + * number or "-". + */ +public final function bool IsComponentArrayApplicable(int index) +{ + local bool isAddElementAlias; + local Text component; + + if (GetNumericComponent(index) >= 0) { + return true; + } + component = GetComponent(index); + isAddElementAlias = P("-").IsEqual(component); + _.memory.Free(component); + return isAddElementAlias; +} + /** * Converts caller `JSONPointer` into it's `Text` representation. * @@ -375,18 +399,42 @@ public final function int GetFoldsAmount() /** * Makes an exact copy of the caller `JSONPointer`. * - * @return Copy of the caller `JSONPointer`. + * Copies components in the range `[startIndex; startIndex + maxLength - 1]` + * If provided parameters `startIndex` and `maxLength` define a range that + * goes beyond `[0; self.GetLength() - 1]`, then intersection with a valid + * range will be used. + * + * @param startIndex Position of the first component to copy. + * By default `0`, corresponding to the very first component. + * @param maxLength Max length of the extracted JSON pointer (in amount of + * components). By default `0` - that and all negative values mean that + * method should extract all components to the right of `startIndex`. + * @return Copy of the specified range of the caller `JSONPointer`. */ -public final function JSONPointer Copy() +public final function JSONPointer Copy( + optional int startIndex, + optional int maxLength) { - local int i; + local int i, endIndex; local JSONPointer newPointer; local array newComponents; - newComponents = components; - for (i = 0; i < newComponents.length; i += 1) + + if (maxLength <= 0) { + maxLength = components.length - startIndex; + } + endIndex = startIndex + maxLength; + if (endIndex <= 0) { + return JSONPointer(_.memory.Allocate(class'JSONPointer')); + } + startIndex = Max(startIndex, 0); + endIndex = Min(endIndex, components.length); + for (i = startIndex; i < endIndex; i += 1) { - if (newComponents[i].asText != none) { - newComponents[i].asText = newComponents[i].asText.MutableCopy(); + newComponents[newComponents.length] = components[i]; + if (components[i].asText != none) + { + newComponents[newComponents.length - 1].asText = + components[i].asText.MutableCopy(); } } newPointer = JSONPointer(_.memory.Allocate(class'JSONPointer')); @@ -394,6 +442,85 @@ public final function JSONPointer Copy() return newPointer; } +/** + * Appends path, contained in JSON pointer `other` to the caller JSON pointer. + * Appending "/A/B/7/C" to "/object/hey/1/there/" produces + * "/object/hey/1/there//A/B/7/C". + * + * @param other Pointer to append. If `none` - caller `JSONPointer` will + * not change. + * @return Reference to the caller `JSONPointer` to allow for method chaining. + */ +public final function JSONPointer Append(JSONPointer other) +{ + local int i; + local array otherComponents; + + if (other == none) { + return self; + } + otherComponents = other.components; + for (i = 0; i < otherComponents.length; i += 1) + { + if (otherComponents[i].asText != none) { + otherComponents[i].asText = otherComponents[i].asText.MutableCopy(); + } + components[components.length] = otherComponents[i]; + } + return self; +} + +/** + * Checks if given pointer corresponds with the beginning of the caller one. + * + * Pointer starts with another one if it includes all of its fields from + * the beginning and in order + * E.g. "/A/B/C" starts with "/A/B", but not with "/A/B/C/D", "/D/A/B/C" or + * "/A/B/CD". + * + * @param other Candidate into being caller pointer's prefix. + * @return `true` if `other` is prefix and `false` otherwise. `none` is + * considered to be an empty pointer and, therefore, prefix to any other + * pointer. + */ +public final function bool StartsWith(JSONPointer other) +{ + local int i; + local array otherComponents; + + // `none` is same as empty + if (other == none) return true; + otherComponents = other.components; + // Not enough length + if (components.length < otherComponents.length) return false; + + for (i = 0; i < otherComponents.length; i += 1) + { + // Compare numeric components if at least one is such + if ( components[i].testedForBeingNumeric + || otherComponents[i].testedForBeingNumeric) + { + if (GetNumericComponent(i) != other.GetNumericComponent(i)) { + return false; + } + // End this iteration for numeric component, but continue for + // text ones + if (GetNumericComponent(i) >= 0) { + continue; + } + } + // We can reach here if: + // 1. Neither components have `testedForBeingNumeric` set to + // `true`, neither `asText` fields are `none` by the invariant; + // 2. At least one had `testedForBeingNumeric`, but they tested + // negative for being numeric. + if (!components[i].asText.Compare(otherComponents[i].asText)) { + return false; + } + } + return true; +} + defaultproperties { TSLASH = 0 diff --git a/sources/Text/Tests/TEST_JSON.uc b/sources/Text/Tests/TEST_JSON.uc index ac5de65..79534a8 100644 --- a/sources/Text/Tests/TEST_JSON.uc +++ b/sources/Text/Tests/TEST_JSON.uc @@ -1,6 +1,6 @@ /** * Set of tests for functionality of JSON printing/parsing. - * Copyright 2021-2022 Anton Tarasenko + * Copyright 2021-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -22,11 +22,18 @@ class TEST_JSON extends TestCase var string simpleJSONObject, complexJSONObject; +protected static function JSONPointer MakePtr(string str) +{ + return __().json.Pointer(__().text.FromString(str)); +} + protected static function TESTS() { Test_Pointer(); Test_Print(); Test_Parse(); + Test_Copy(); + Test_Incrementing(); } protected static function Test_Pointer() @@ -37,6 +44,10 @@ protected static function Test_Pointer() SubTest_PointerPushPop(); SubTest_PointerNumeric(); SubTest_PopWithoutRemoving(); + SubTest_Append(); + SubText_Copy(); + SubTest_StartsWith(); + SubTest_IsComponentArrayApplicable(); } protected static function SubTest_PointerCreate() @@ -187,6 +198,114 @@ protected static function SubTest_PopWithoutRemoving() TEST_ExpectTrue(pointer.Pop(true).ToString() == "simple"); } +protected static function SubTest_Append() +{ + local JSONPointer pointer, append; + Issue("Appending another JSON pointer is not working correctly."); + pointer = __().json.Pointer(P("/object/hey/1/there/")); + append = __().json.Pointer(P("/A/B/7/C")); + pointer.Append(append); + TEST_ExpectTrue( + pointer.ToText().ToString() + == "/object/hey/1/there//A/B/7/C"); + pointer = __().json.Pointer(P("")); + append = __().json.Pointer(P("/A/B/7/C")); + pointer.Append(append); + TEST_ExpectTrue(pointer.ToText().ToString() == "/A/B/7/C"); + pointer = __().json.Pointer(P("/object/hey/1/there/")); + append = __().json.Pointer(P("")); + pointer.Append(append); + TEST_ExpectTrue(pointer.ToText().ToString() == "/object/hey/1/there/"); + pointer = __().json.Pointer(P("/object/hey/1/there/")); + pointer.Append(none); + TEST_ExpectTrue(pointer.ToText().ToString() == "/object/hey/1/there/"); +} + +protected static function SubText_Copy() +{ + Issue("JSON pointer's `Copy()` method does not correctly copy the whole" + @ "pointer."); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy().ToText().ToString() + == "/A/B/3/D"); + TEST_ExpectTrue(MakePtr("/").Copy().ToText().ToString() == "/"); + TEST_ExpectTrue(MakePtr("").Copy().ToText().ToString() == ""); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(0, 4).ToText().ToString() + == "/A/B/3/D"); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(-2).ToText().ToString() + == "/A/B/3/D"); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(0, 7).ToText().ToString() + == "/A/B/3/D"); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(-3, 7).ToText().ToString() + == "/A/B/3/D"); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(-5, 20).ToText().ToString() + == "/A/B/3/D"); + TEST_ExpectTrue(MakePtr("").Copy(-1).ToText().ToString() == ""); + + Issue("JSON pointer's `Copy()` method does not correctly copy empty range" + @ "of the pointer."); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(-5, 4).ToText().ToString() == ""); + TEST_ExpectTrue(MakePtr("/A/B/3/D").Copy(4, 11).ToText().ToString() == ""); + + Issue("JSON pointer's `Copy()` method does not correctly copy partial" + @ "intersection range of the pointer."); + TEST_ExpectTrue(MakePtr("/A//3/D").Copy(-5, 8).ToText().ToString() + == "/A//3"); // left + TEST_ExpectTrue(MakePtr("/A//3/D").Copy(1, 11).ToText().ToString() + == "//3/D"); // right +} + +protected static function SubTest_StartsWith() +{ + local JSONPointer pointer; + + Issue("Any pointers start with `none` JSON pointer."); + TEST_ExpectTrue(__().json.Pointer(P("/A/B/C")).StartsWith(none)); + TEST_ExpectTrue(__().json.Pointer(P("/")).StartsWith(none)); + TEST_ExpectTrue(__().json.Pointer(P("")).StartsWith(none)); + + Issue("`StartsWith()` correctly detects JSON pointers that are actually" + @ "their prefixes."); + TEST_ExpectTrue(__().json.Pointer(P("/A/7/C")) + .StartsWith(__().json.Pointer(P("/A/7/C")))); + // Same, but constructed manually to handle components added as numeric + pointer = __().json.Pointer().Push(P("A")).PushNumeric(7).Push(P("C")); + TEST_ExpectTrue(pointer.StartsWith(__().json.Pointer(P("/A/7/C")))); + TEST_ExpectTrue(__().json.Pointer(P("/A/7/C")) + .StartsWith(__().json.Pointer(P("/A/7")))); + TEST_ExpectTrue(__().json.Pointer(P("/A/7/C")) + .StartsWith(__().json.Pointer(P("")))); + TEST_ExpectTrue(__().json.Pointer(P("")) + .StartsWith(__().json.Pointer(P("")))); + + Issue("`StartsWith()` correctly detects JSON pointers that aren't actually" + @ "their prefixes."); + TEST_ExpectFalse(__().json.Pointer(P("/A/7/C")) + .StartsWith(__().json.Pointer(P("/A/3/C")))); + // Constructed manually to handle components added as numeric + pointer = __().json.Pointer().Push(P("A")).PushNumeric(8).Push(P("C")); + TEST_ExpectFalse(pointer.StartsWith(__().json.Pointer(P("/A/3/C")))); + TEST_ExpectFalse(__().json.Pointer(P("/A/7/C")) + .StartsWith(__().json.Pointer(P("/A/7/")))); + TEST_ExpectFalse(__().json.Pointer(P("/A/7/C")) + .StartsWith(__().json.Pointer(P("/")))); +} + +protected static function SubTest_IsComponentArrayApplicable() +{ + Issue("`IsComponentArrayApplicable()` method wrongly detects numeric" + @ "components."); + TEST_ExpectFalse( + __().json.Pointer(P("/A/B/C")).IsComponentArrayApplicable(0)); + TEST_ExpectTrue( + __().json.Pointer(P("/A/2/C")).IsComponentArrayApplicable(1)); + TEST_ExpectTrue( + __().json.Pointer(P("/A/B/-")).IsComponentArrayApplicable(2)); + TEST_ExpectFalse( + __().json.Pointer(P("/A/7/C")).IsComponentArrayApplicable(-2)); + TEST_ExpectFalse( + __().json.Pointer(P("/A/7/C")).IsComponentArrayApplicable(10)); +} + protected static function Test_Print() { Context("Testing printing simple JSON values."); @@ -651,6 +770,399 @@ protected static function SubTest_ParseComplex() TEST_ExpectTrue(FloatBox(inner.GetItem(P("maybe"))).Get() == 0.003); } +protected static function Test_Copy() +{ + Context("Testing method for copying JSON values."); + SubTest_CopySimple(); + SubTest_CopyComplex(); +} + +protected static function SubTest_CopySimple() +{ + Issue("JSON's `Copy()` method incorrectly copies copy boxed built-in" + @ "types."); + TEST_ExpectNone(__().json.Copy(none)); + TEST_ExpectTrue( + BoolBox(__().json.Copy(__().box.bool(true))).Get() + == true); + TEST_ExpectTrue( + BoolRef(__().json.Copy(__().ref.bool(false))).Get() + == false); + TEST_ExpectTrue(IntBox(__().json.Copy(__().box.int(-7))).Get() == -7); + TEST_ExpectTrue( + IntRef(__().json.Copy(__().ref.int(234234))).Get() + == 234234); + TEST_ExpectTrue( + FloatBox(__().json.Copy(__().box.float(3.76))).Get() + == 3.76); + TEST_ExpectTrue( + FloatRef(__().json.Copy(__().ref.float(-213.1))).Get() + == -213.1); + TEST_ExpectTrue(Text(__().json.Copy(P("Hey!"))).ToString() == "Hey!"); + TEST_ExpectTrue( + MutableText(__().json.Copy(__().text.FromStringM("Hey!"))) + .ToString() + == "Hey!"); +} + +protected static function HashTable ConstructComplexJSONObject() +{ + local HashTable result, innerObject, deepObject, oneMoreObject; + local ArrayList innerArray; + + deepObject = __().collections.EmptyHashTable(); + deepObject.SetItem(P("something \"here\""), P("yes")); + deepObject.SetFloat(P("maybe"), 0.003); + innerArray = __().collections.EmptyArrayList(); + innerArray.AddString("Engine.Actor"); + innerArray.AddBool(false); + innerArray.AddItem(none); + innerArray.AddItem(deepObject); + innerArray.AddFloat(56.6); + oneMoreObject = __().collections.EmptyHashTable(); + oneMoreObject.SetInt(P("nope"), 324532); + oneMoreObject.SetBool(P("whatever"), false); + oneMoreObject.SetString(P("o rly?"), "ya rly"); + innerObject = __().collections.EmptyHashTable(); + innerObject.SetBool(P("my_bool"), true); + innerObject.SetItem(P("array"), innerArray); + innerObject.SetItem(P("one more"), oneMoreObject); + innerObject.SetInt(P("my_int"), -9823452); + result = __().collections.EmptyHashTable(); + result.SetItem(P("innerObject"), innerObject); + result.SetFloat(P("some_var"), -7.32); + result.SetString(P("another_var"), "aye!"); + return result; +} + +protected static function SubTest_CopyComplex() +{ + local HashTable complexCopy; + + complexCopy = HashTable(__().json.Copy(ConstructComplexJSONObject())); + TEST_ExpectTrue(complexCopy.GetBoolBy(P("/innerObject/my_bool")) == true); + TEST_ExpectTrue( + complexCopy.GetStringBy(P("/innerObject/array/0")) == "Engine.Actor"); + TEST_ExpectTrue( + complexCopy.GetBoolBy(P("/innerObject/array/1")) == false); + TEST_ExpectTrue( + complexCopy.GetItemBy(P("/innerObject/array/2")) == none); + TEST_ExpectTrue( + complexCopy.GetStringBy(P("/innerObject/array/3/something \"here\"")) + == "yes"); + TEST_ExpectTrue( + complexCopy.GetFloatBy(P("/innerObject/array/3/maybe")) == 0.003); + TEST_ExpectTrue( + complexCopy.GetFloatBy(P("/innerObject/array/4")) == 56.6); + TEST_ExpectTrue( + complexCopy.GetIntBy(P("/innerObject/one more/nope")) == 324532); + TEST_ExpectTrue( + complexCopy.GetBoolBy(P("/innerObject/one more/whatever")) == false); + TEST_ExpectTrue( + complexCopy.GetStringBy(P("/innerObject/one more/o rly?")) == "ya rly"); + TEST_ExpectTrue( + complexCopy.GetIntBy(P("/innerObject/my_int")) == -9823452); + TEST_ExpectTrue( + complexCopy.GetFloatBy(P("/some_var")) == -7.32); + TEST_ExpectTrue( + complexCopy.GetStringBy(P("/another_var")) == "aye!"); +} + +protected static function Test_Incrementing() +{ + Context("Testing incrementing JSON values with `_.json.Increment()`."); + SubTest_Incrementing_Null(); + SubTest_Incrementing_Bool(); + SubTest_Incrementing_Number(); + SubTest_Incrementing_String(); + SubTest_Incrementing_Array(); + SubTest_Incrementing_Object(); + SubTest_Incrementing_Incompatible(); +} + +protected static function SubTest_Incrementing_Null() +{ + local FloatRef ref; + + Issue("Null values aren't incremented correctly."); + TEST_ExpectNone(__().json.Increment(none, none)); + TEST_ExpectTrue(BoolBox(__().json.Increment( + none, __().box.bool(true))).Get() == true); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.float(11.5), none)).Get() == 11.5); + + Issue("Incrementing null values simply copies reference."); + ref = __().ref.float(1032423.91); + TEST_ExpectFalse(__().json.Increment(ref, none) == none); + TEST_ExpectFalse(__().json.Increment(none, ref) == none); +} + +protected static function SubTest_Incrementing_Bool() +{ + Issue("Boolean values aren't incremented correctly."); + TEST_ExpectTrue(BoolBox(__().json.Increment( + __().box.bool(false), __().box.bool(false))).Get() == false); + TEST_ExpectTrue(BoolBox(__().json.Increment( + __().box.bool(true), __().box.bool(false))).Get() == true); + TEST_ExpectTrue(BoolBox(__().json.Increment( + __().box.bool(false), __().box.bool(true))).Get() == true); + TEST_ExpectTrue(BoolBox(__().json.Increment( + __().box.bool(true), __().box.bool(true))).Get() == true); + + Issue("Incrementing boolean values produces incorrect type."); + TEST_ExpectTrue(BoolBox(__().json.Increment( + __().box.bool(false), __().box.bool(true))).Get() == true); + TEST_ExpectTrue(BoolBox(__().json.Increment( + __().box.bool(false), __().ref.bool(true))).Get() == true); + TEST_ExpectTrue(BoolRef(__().json.Increment( + __().ref.bool(false), __().box.bool(true))).Get() == true); + TEST_ExpectTrue(BoolRef(__().json.Increment( + __().ref.bool(false), __().ref.bool(true))).Get() == true); +} + +protected static function SubTest_Incrementing_Number() +{ + SubSubTest_Incrementing_Number_Pure(); + SubSubTest_Incrementing_Number_Mixed(); +} + +protected static function SubSubTest_Incrementing_Number_Pure() +{ + Issue("Numeric values aren't incremented correctly (for boxed `int`s)."); + TEST_ExpectTrue(IntBox(__().json.Increment( + __().box.int(3), __().box.int(-3))).Get() == 0); + TEST_ExpectTrue(IntBox(__().json.Increment( + __().box.int(-4), __().ref.int(11))).Get() == 7); + TEST_ExpectTrue(IntRef(__().json.Increment( + __().ref.int(124), __().box.int(624))).Get() == 748); + TEST_ExpectTrue(IntRef(__().json.Increment( + __().ref.int(345), __().ref.int(-23423))).Get() == -23078); + + Issue("Numeric values aren't incremented correctly (for boxed `float`s)."); + TEST_ExpectTrue(FloatBox(__().json.Increment( + __().box.float(11.2), __().box.float(-0.2))).Get() == 11); + TEST_ExpectTrue(FloatBox(__().json.Increment( + __().box.float(1012.78), __().ref.float(0.12))).Get() == 1012.9); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.float(12), __().box.float(13))).Get() == 25); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.float(-0.32), __().ref.float(0.32))).Get() == 0); +} + +protected static function SubSubTest_Incrementing_Number_Mixed() +{ + Issue("Numeric values aren't incremented correctly (for mixed `int`s and" + @ "`float`s)."); + TEST_ExpectTrue(FloatBox(__().json.Increment( + __().box.float(11.2), __().box.int(0))).Get() == 11.2); + TEST_ExpectTrue(FloatBox(__().json.Increment( + __().box.float(1012.78), __().ref.int(2))).Get() == 1014.78); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.float(12), __().box.int(13))).Get() == 25); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.float(-0.32), __().ref.int(14))).Get() == 13.68); + + TEST_ExpectTrue(FloatBox(__().json.Increment( + __().box.int(11), __().box.float(-0.2))).Get() == 10.8); + TEST_ExpectTrue(FloatBox(__().json.Increment( + __().box.int(1012), __().ref.float(7.12))).Get() == 1019.12); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.int(12), __().box.float(13.1))).Get() == 25.1); + TEST_ExpectTrue(FloatRef(__().json.Increment( + __().ref.int(-10), __().ref.float(0.32))).Get() == -9.68); +} + +protected static function SubTest_Incrementing_String() +{ + Issue("String values aren't incremented correctly."); + TEST_ExpectTrue(Text(__().json.Increment( + __().text.FromString("Whatever"), __().text.FromString("revetahW"))) + .ToString() == "WhateverrevetahW"); + TEST_ExpectTrue(MutableText(__().json.Increment( + __().text.FromStringM("Whatever"), __().text.FromString("revetahW"))) + .ToString() == "WhateverrevetahW"); + TEST_ExpectTrue(Text(__().json.Increment( + __().text.FromString("Whatever"), __().text.FromStringM("revetahW"))) + .ToString() == "WhateverrevetahW"); + TEST_ExpectTrue(MutableText(__().json.Increment( + __().text.FromStringM("Whatever"), __().text.FromStringM("revetahW"))) + .ToString() == "WhateverrevetahW"); +} + +protected static function SubTest_Incrementing_Array() +{ + local ArrayList array1, array2, result; + + Issue("Array values aren't incremented correctly."); + array1 = __().collections.EmptyArrayList(); + array2 = __().collections.EmptyArrayList(); + array1.AddItem(__().box.int(5)); + array2.AddItem(__().box.int(3)); + array2.AddItem(__().box.int(-7)); + result = ArrayList(__().json.Increment(array1, array2)); + TEST_ExpectTrue(__().json.Print(result).ToString() == "[5,3,-7]"); + TEST_ExpectTrue(result.GetItem(0) != array1.GetItem(0)); + TEST_ExpectTrue(result.GetItem(1) != array2.GetItem(0)); + TEST_ExpectTrue(result.GetItem(2) != array2.GetItem(1)); + + Issue("Incrementing array values incorrectly handles reference counts."); + // +1 after copy, +2 after getters (before and here) + TEST_ExpectTrue(result.GetItem(0)._getRefCount() == 3); + TEST_ExpectTrue(result.GetItem(1)._getRefCount() == 3); + TEST_ExpectTrue(result.GetItem(2)._getRefCount() == 3); + TEST_ExpectTrue(array1._getRefCount() == 1); + TEST_ExpectTrue(array2._getRefCount() == 1); + TEST_ExpectTrue(result._getRefCount() == 1); +} + +protected static function SubTest_Incrementing_Object() +{ + local HashTable table1, table2, result; + + Issue("Object values aren't incremented correctly."); + table1 = __().collections.EmptyHashTable(); + table2 = __().collections.EmptyHashTable(); + table1.Setitem(P("A"), __().box.int(5)); + table2.Setitem(P("B"), __().box.int(3)); + table2.Setitem(P("C"), __().box.int(-7)); + result = HashTable(__().json.Increment(table1, table2)); + TEST_ExpectTrue(result.GetLength() == 3); + TEST_ExpectTrue(result.GetInt(P("A")) == 5); + TEST_ExpectTrue(result.GetInt(P("B")) == 3); + TEST_ExpectTrue(result.GetInt(P("C")) == -7); + TEST_ExpectTrue(result.GetItem(P("A")) != table1.GetItem(P("A"))); + TEST_ExpectTrue(result.GetItem(P("B")) != table2.GetItem(P("B"))); + TEST_ExpectTrue(result.GetItem(P("C")) != table2.GetItem(P("C"))); + + // +1 after copy, +2 after getters (before and here) + Issue("Incrementing object values incorrectly handles reference counts."); + TEST_ExpectTrue(result.GetItem(P("A"))._getRefCount() == 3); + TEST_ExpectTrue(result.GetItem(P("B"))._getRefCount() == 3); + TEST_ExpectTrue(result.GetItem(P("C"))._getRefCount() == 3); + TEST_ExpectTrue(table1._getRefCount() == 1); + TEST_ExpectTrue(table2._getRefCount() == 1); + TEST_ExpectTrue(result._getRefCount() == 1); +} + +protected static function SubTest_Incrementing_Incompatible() +{ + Issue("Incrementing with incompatible values doesn't produce `none`."); + SubSubTest_Incrementing_Incompatible_bool(); + SubSubTest_Incrementing_Incompatible_int(); + SubSubTest_Incrementing_Incompatible_float(); + SubSubTest_Incrementing_Incompatible_text(); + SubSubTest_Incrementing_Incompatible_arraylist(); + SubSubTest_Incrementing_Incompatible_hashtable(); +} + +protected static function SubSubTest_Incrementing_Incompatible_bool() +{ + TEST_ExpectNone(__().json.Increment( + __().box.bool(true), + __().ref.int(32))); + TEST_ExpectNone(__().json.Increment( + __().box.bool(true), + __().ref.float(32.5))); + TEST_ExpectNone(__().json.Increment( + __().box.bool(true), + __().text.FromString("Hello there!"))); + TEST_ExpectNone(__().json.Increment( + __().box.bool(true), + __().collections.EmptyArrayList())); + TEST_ExpectNone(__().json.Increment( + __().box.bool(true), + __().collections.EmptyHashTable())); +} + +protected static function SubSubTest_Incrementing_Incompatible_int() +{ + TEST_ExpectNone(__().json.Increment( + __().box.int(3), + __().ref.bool(false))); + TEST_ExpectNone(__().json.Increment( + __().box.int(234), + __().text.FromString("Hello there!"))); + TEST_ExpectNone(__().json.Increment( + __().box.int(2), + __().collections.EmptyArrayList())); + TEST_ExpectNone(__().json.Increment( + __().box.int(782), + __().collections.EmptyHashTable())); +} + +protected static function SubSubTest_Incrementing_Incompatible_float() +{ + TEST_ExpectNone(__().json.Increment( + __().box.float(3), + __().ref.bool(false))); + TEST_ExpectNone(__().json.Increment( + __().box.float(234), + __().text.FromString("Hello there!"))); + TEST_ExpectNone(__().json.Increment( + __().box.float(2), + __().collections.EmptyArrayList())); + TEST_ExpectNone(__().json.Increment( + __().box.float(782), + __().collections.EmptyHashTable())); +} + +protected static function SubSubTest_Incrementing_Incompatible_text() +{ + TEST_ExpectNone(__().json.Increment( + __().text.FromString("yo"), + __().ref.bool(true))); + TEST_ExpectNone(__().json.Increment( + __().text.FromString("yo"), + __().ref.int(32))); + TEST_ExpectNone(__().json.Increment( + __().text.FromString("yo"), + __().ref.float(32.5))); + TEST_ExpectNone(__().json.Increment( + __().text.FromString("yo"), + __().collections.EmptyArrayList())); + TEST_ExpectNone(__().json.Increment( + __().text.FromString("yo"), + __().collections.EmptyHashTable())); +} + +protected static function SubSubTest_Incrementing_Incompatible_arraylist() +{ + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyArrayList(), + __().ref.bool(true))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyArrayList(), + __().ref.int(32))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyArrayList(), + __().ref.float(32.5))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyArrayList(), + __().text.FromString("Not a collection!"))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyArrayList(), + __().collections.EmptyHashTable())); +} + +protected static function SubSubTest_Incrementing_Incompatible_hashtable() +{ + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyHashTable(), + __().ref.bool(true))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyHashTable(), + __().ref.int(32))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyHashTable(), + __().ref.float(32.5))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyHashTable(), + __().text.FromString("Not a collection!"))); + TEST_ExpectNone(__().json.Increment( + __().collections.EmptyHashTable(), + __().collections.EmptyArrayList())); +} + defaultproperties { caseName = "JSON" diff --git a/sources/Users/ACommandUserGroups.uc b/sources/Users/ACommandUserGroups.uc index f1d5478..b58f706 100644 --- a/sources/Users/ACommandUserGroups.uc +++ b/sources/Users/ACommandUserGroups.uc @@ -41,21 +41,8 @@ protected function BuildData(CommandDataBuilder builder) builder.SubCommand(P("remove")) .Describe(P("Removes a group")) .ParamText(P("group_name")); - builder.SubCommand(P("adduser")) - .Describe(P("Adds new user to the group. Allows to also optionally" - @ "specify annotation (human-readable name) that can be thought of" - @ "as a {$TextEmphasis comment}.")) - .ParamText(P("group_name")) - .ParamText(P("user_id")) - .OptionalParams() - .ParamText(P("annotation")); - builder.SubCommand(P("removeuser")) - .Describe(P("Removes user from the group. User can be specified by both" - @ "user's id or annotation, with id taking priority.")) - .ParamText(P("group_name")) - .ParamText(P("user_name")); builder.SubCommand(P("addplayer")) - .Describe(P("Adds new user to the group, specified by the player" + .Describe(F("Adds new user to the group, specified by the player" @ "selector. Can add several players at once." @ "Allows to also optionally specify annotation" @ "(human-readable name) that can be thought of as" @@ -70,6 +57,19 @@ protected function BuildData(CommandDataBuilder builder) @ "Can remove several players at once.")) .ParamText(P("group_name")) .ParamPlayers(P("player_selector")); + builder.SubCommand(P("adduser")) + .Describe(F("Adds new user to the group. Allows to also optionally" + @ "specify annotation (human-readable name) that can be thought of" + @ "as a {$TextEmphasis comment}.")) + .ParamText(P("group_name")) + .ParamText(P("user_id")) + .OptionalParams() + .ParamText(P("annotation")); + builder.SubCommand(P("removeuser")) + .Describe(P("Removes user from the group. User can be specified by both" + @ "user's id or annotation, with id taking priority.")) + .ParamText(P("group_name")) + .ParamText(P("user_name")); builder.Option(P("force")) .Describe(P("Allows to force usage of invalid user IDs.")); } @@ -89,6 +89,7 @@ protected function Executed(CallData arguments, EPlayer instigator) // An array of players that can be specified for some commands players = arguments.parameters.GetArrayList(P("player_selector")); groups = arguments.parameters.GetArrayList(P("groups")); + forceOption = arguments.options.HasKey(P("force")); if (arguments.subCommandName.IsEmpty()) { DisplayUserGroups(); } diff --git a/sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Signal.uc b/sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Signal.uc new file mode 100644 index 0000000..8b98050 --- /dev/null +++ b/sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Signal.uc @@ -0,0 +1,40 @@ +/** + * Signal class for `PersistentDataManager`'s `OnPersistentDataReady()` signal. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class PersistentDataManager_OnPersistentDataReady_Signal extends Signal + dependson(DBConnection); + +public final function Emit(UserID id, bool online) +{ + local Slot nextSlot; + StartIterating(); + nextSlot = GetNextSlot(); + while (nextSlot != none) + { + PersistentDataManager_OnPersistentDataReady_Slot(nextSlot) + .connect(id, online); + nextSlot = GetNextSlot(); + } + CleanEmptySlots(); +} + +defaultproperties +{ + relatedSlotClass = class'PersistentDataManager_OnPersistentDataReady_Slot' +} \ No newline at end of file diff --git a/sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Slot.uc b/sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Slot.uc new file mode 100644 index 0000000..2e9ddc1 --- /dev/null +++ b/sources/Users/PersistentData/Events/PersistentDataManager_OnPersistentDataReady_Slot.uc @@ -0,0 +1,41 @@ +/** + * Slot class for `PersistentDataManager`'s `OnPersistentDataReady()` signal. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class PersistentDataManager_OnPersistentDataReady_Slot extends Slot + dependson(DBConnection); + +delegate connect(UserID id, bool online) +{ + DummyCall(); +} + +protected function Constructor() +{ + connect = none; +} + +protected function Finalizer() +{ + super.Finalizer(); + connect = none; +} + +defaultproperties +{ +} \ No newline at end of file diff --git a/sources/Users/PersistentData/PersistentDataManager.uc b/sources/Users/PersistentData/PersistentDataManager.uc new file mode 100644 index 0000000..c6ec518 --- /dev/null +++ b/sources/Users/PersistentData/PersistentDataManager.uc @@ -0,0 +1,407 @@ +/** + * This tool is for simplifying writing and reading persistent user data. + * All it requires is a setup of database + json pointer to data and it will + * take care of data caching and database connection. + * Copyright 2023 Anton Tarasenko + *------------------------------------------------------------------------------ + * This file is part of Acedia. + * + * Acedia is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, version 3 of the License, or + * (at your option) any later version. + * + * Acedia is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with Acedia. If not, see . + */ +class PersistentDataManager extends AcediaObject; + +/** + * # `PersistentDataManager` + * + * This tool is for simplifying writing and reading persistent user data. + * All it requires is a setup of database + json pointer to data and it will + * take care of data caching and database connection. + * + * ## Usage + * + * Create an instance and use `Setup()` to connect to the database with + * persistent data. You can use `Setup()` again on the same object to setup + * a different database as a source. All data will be automatically reloaded. + * After that you can use `GetPersistentData()`/`SetPersistentData()` to + * read/write persistent data for the particular user. + * Since loading data from the database takes time, you don't have an + * immediate access to it. + * But you can use `_.users.OnPersistentDataAvailable()` signal to track + * whenever new user data from database becomes available. However, you can + * start writing persistent data (and reading what you've wrote) at any time it + * - these changes will be reapplied whenever data is actually loaded from + * database. + * + * ## Implementation + * + * Implementation consists of simply creating `DBConnection` for every user + * and storing them in the `HashTable` that maps user IDs into those + * `DBConnection`s. + * We also maintain a reverse map to figure out what `DBConnection` belongs + * to what user when connection signals an update. We borrow the signal that + * `UsersAPI` provides to inform everyone interested about which users + * have updated. + */ + +var private bool initialized; +var private Database database; +var private JSONPointer rootPointer; +var private HashTable userToConnection, connectionToUser; + +var private PersistentDataManager_OnPersistentDataReady_Signal onPersistentDataReadySignal; + +protected function Constructor() +{ + _.players.OnNewPlayer(self).connect = ConnectPersistentData; + onPersistentDataReadySignal = _.users._getOnReadySignal(); +} + +protected function Finalizer() +{ + Reset(); + _.players.OnNewPlayer(self).Disconnect(); +} + +private final function Reset() +{ + _.memory.Free(database); + _.memory.Free(rootPointer); + _.memory.Free(userToConnection); + _.memory.Free(connectionToUser); + _.memory.Free(onPersistentDataReadySignal); + database = none; + rootPointer = none; + userToConnection = none; + connectionToUser = none; + onPersistentDataReadySignal = none; + initialized = false; +} + +/** + * Sets up database and location inside it as a source of users' persistent + * data. + * + * Must be successfully called at least once for the caller + * `PersistentDataManager` to be usable. + * + * @param db Database inside which persistent data is stored. + * @param location Location inside specified database to the root of + * persistent data. + * @return `true` if setup was successful (requires both arguments to be not + * `none`) and `false` otherwise. + */ +public final function bool Setup(Database db, JSONPointer location) +{ + if (db == none) return false; + if (location == none) return false; + + Reset(); + database = db; + database.NewRef(); + rootPointer = location.Copy(); + userToConnection = _.collections.EmptyHashTable(); + connectionToUser = _.collections.EmptyHashTable(); + // Using `userToConnection` as an empty hash table, not related to its + // actual meaning + database.IncrementData(location, userToConnection); + initialized = true; + return true; +} + +/** + * Reads specified named persistent data for the specified group. + * + * @param id ID of the user to read persistent data from. + * @param groupName Group to which this persistent data belongs to. + * Groups are used as namespaces to avoid duplicate persistent variables + * between mods. If your mod needs several subgroups, its recommended to + * use the same prefix for them, e.g. "MyAwesomeMod.economy" and + * "MyAwesomeMod.enemies". + * @param dataName Name of persistent data variable to read inside + * `groupName` persistent data group. Not `none` value must be provided. + * @param data Data to set as persistent value. Must be + * JSON-compatible. If `none` is passed, returns the all data for + * the given group. + * @return Data read from the persistent variable. `none` in case of any kind + * of failure. + */ +public final function AcediaObject GetPersistentData( + UserID id, + BaseText groupName, + optional BaseText dataName) +{ + local AcediaObject result; + local Text textID; + local JSONPointer location; + local DBConnection relevantConnection; + + if (!initialized) return none; + if (id == none) return none; + if (groupName == none) return none; + + textID = id.GetUniqueID(); + relevantConnection = DBConnection(userToConnection.GetItem(textID)); + textID.FreeSelf(); + if (relevantConnection != none) + { + location = _.json.Pointer(); + location.Push(groupName); + if (dataName != none) { + location.Push(dataName); + } + result = relevantConnection.ReadDataByJSON(location); + relevantConnection.FreeSelf(); + location.FreeSelf(); + } + return result; +} + +/** + * Writes specified named persistent data for the specified group. + * + * @param id ID of the user to change persistent data of. + * @param groupName Group to which this persistent data belongs to. + * Groups are used as namespaces to avoid duplicate persistent variables + * between mods. If your mod needs several subgroups, its recommended to + * use the same prefix for them, e.g. "MyAwesomeMod.economy" and + * "MyAwesomeMod.enemies". + * @param dataName Name of persistent data variable to change inside + * `groupName` persistent data group. + * @param data Data to set as persistent value. Must be + * JSON-compatible. + * @return `true` if change succeeded in local cached version of database with + * persistent values and `false` otherwise. Such local changes can + * potentially be not applied to the actual database. But successful local + * changes should persist for the game session. + */ +public final function bool WritePersistentData( + UserID id, + BaseText groupName, + BaseText dataName, + AcediaObject data) +{ + local bool result; + local Text textID; + local JSONPointer location; + local DBConnection relevantConnection; + local HashTable emptyObject; + + if (!initialized) return false; + if (id == none) return false; + if (groupName == none) return false; + if (dataName == none) return false; + + textID = id.GetUniqueID(); + relevantConnection = DBConnection(userToConnection.GetItem(textID)); + textID.FreeSelf(); + if (relevantConnection != none) + { + emptyObject = _.collections.EmptyHashTable(); + location = _.json.Pointer(); + location.Push(groupName); + relevantConnection.IncrementDataByJSON(location, emptyObject); + location.Push(dataName); + result = relevantConnection.WriteDataByJSON(location, data); + relevantConnection.FreeSelf(); + location.FreeSelf(); + emptyObject.FreeSelf(); + } + return result; +} + +/** + * Increments specified named persistent data for the specified group. + * + * @param id ID of the user to change persistent data of. + * @param groupName Group to which this persistent data belongs to. + * Groups are used as namespaces to avoid duplicate persistent variables + * between mods. If your mod needs several subgroups, its recommended to + * use the same prefix for them, e.g. "MyAwesomeMod.economy" and + * "MyAwesomeMod.enemies". + * @param dataName Name of persistent data variable to change inside + * `groupName` persistent data group. + * @param data Data by which to increment existing persistent value. + * Must be JSON-compatible. + * @return `true` if change succeeded in local cached version of database with + * persistent values and `false` otherwise. Such local changes can + * potentially be not applied to the actual database. But successful local + * changes should persist for the game session. + */ +public final function bool IncrementPersistentData( + UserID id, + BaseText groupName, + BaseText dataName, + AcediaObject data) +{ + local bool result; + local Text textID; + local JSONPointer location; + local DBConnection relevantConnection; + + if (!initialized) return false; + if (id == none) return false; + if (groupName == none) return false; + if (dataName == none) return false; + + textID = id.GetUniqueID(); + relevantConnection = DBConnection(userToConnection.GetItem(textID)); + textID.FreeSelf(); + if (relevantConnection != none) + { + location = _.json.Pointer(); + location.Push(groupName).Push(dataName); + result = relevantConnection.IncrementDataByJSON(location, data); + relevantConnection.FreeSelf(); + location.FreeSelf(); + } + return result; +} + +/** + * Removes specified named persistent data for the specified group. + * + * @param id ID of the user to remove persistent data of. + * @param groupName Group to which this persistent data belongs to. + * Groups are used as namespaces to avoid duplicate persistent variables + * between mods. If your mod needs several subgroups, its recommended to + * use the same prefix for them, e.g. "MyAwesomeMod.economy" and + * "MyAwesomeMod.enemies". + * @param dataName Name of persistent data variable to remove inside + * `groupName` persistent data group. + * @return `true` if removal succeeded in local cached version of database with + * persistent values and `false` otherwise. Such local changes can + * potentially be not applied to the actual database. But successful local + * changes should persist for the game session. + */ +public final function bool RemovePersistentData( + UserID id, + BaseText groupName, + BaseText dataName) +{ + local bool result; + local Text textID; + local JSONPointer location; + local DBConnection relevantConnection; + + if (!initialized) return false; + if (id == none) return false; + if (groupName == none) return false; + if (dataName == none) return false; + + textID = id.GetUniqueID(); + relevantConnection = DBConnection(userToConnection.GetItem(textID)); + textID.FreeSelf(); + if (relevantConnection != none) + { + location = _.json.Pointer(); + location.Push(groupName).Push(dataName); + result = relevantConnection.RemoveDataByJSON(location); + relevantConnection.FreeSelf(); + location.FreeSelf(); + } + return result; +} + +/** + * Connects and starts synchronizing persistent data for the given player. + * + * @param player Player to synchronize persistent data for. + */ +public final function ConnectPersistentData(EPlayer player) +{ + local UserID playerID; + + if (initialized && player != none) + { + playerID = player.GetUserID(); + ConnectPersistentDataByID(playerID); + _.memory.Free(playerID); + } +} + +/** + * Connects and starts synchronizing persistent data for the player given by + * their ID. + * + * @param id User ID for which to synchronize persistent data from + * the database. + */ +public final function ConnectPersistentDataByID(UserID id) +{ + local Text textID; + local DBConnection newConnection; + + if (!initialized) return; + if (id == none) return; + + textID = id.GetUniqueID(); + if (userToConnection.HasKey(textID)) + { + _.memory.Free(textID); + return; + } + rootPointer.Push(textID); + newConnection = DBConnection(_.memory.Allocate(class'DBConnection')); + newConnection.Initialize(database, rootPointer); + _.memory.Free(rootPointer.Pop()); + newConnection.Connect(); + userToConnection.SetItem(textID, newConnection); + connectionToUser.SetItem(newConnection, textID); + newConnection.OnStateChanged(self).connect = UserUpdated; + textID.FreeSelf(); + newConnection.FreeSelf(); +} + +private final function UserUpdated( + DBConnection instance, + DBConnection.DBConnectionState oldState, + DBConnection.DBConnectionState newState) +{ + local UserID id; + + if (!initialized) return; + if (newState == DBCS_Connecting) return; + if (onPersistentDataReadySignal == none) return; + if (!onPersistentDataReadySignal.IsAllocated()) return; + + id = UserID(connectionToUser.GetItem(instance)); + if (id != none) + { + onPersistentDataReadySignal.Emit(id, newState == DBCS_Connected); + id.FreeSelf(); + } +} + +/** + * Attempts to start persistent data synchronization for all players currently + * on the server. + */ +public final function LoadCurrentPlayers() +{ + local int i; + local array currentPlayers; + + if (initialized) + { + currentPlayers = _.players.GetAll(); + for (i = 0; i < currentPlayers.length; i += 1) { + ConnectPersistentData(currentPlayers[i]); + } + _.memory.FreeMany(currentPlayers); + } +} + +defaultproperties +{ +} \ No newline at end of file diff --git a/sources/Users/User.uc b/sources/Users/User.uc index 6a9b1ed..246a825 100644 --- a/sources/Users/User.uc +++ b/sources/Users/User.uc @@ -2,7 +2,7 @@ * Object that is supposed to store a persistent data about the * certain player. That is data that will be remembered even after player * reconnects or server changes map/restarts. - * Copyright 2020-2022 Anton Tarasenko + * Copyright 2020-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -27,38 +27,40 @@ var private UserID id; // an easy reference in console commands var private int key; -// If we failed to create user database skeleton - set this to `true`, -// this will prevent us from making changes that might mess up database due to -// misconfiguration -var private bool failedToCreateDatabaseSkeleton; -// Database where user's persistent data is stored -var private Database persistentDatabase; -// Pointer to this user's "settings" data in particular -var private JSONPointer persistentSettingsPointer; -// Groups to which caller `User` belongs to. -// Every user always belongs to group "everyone", so it is never listed -// here. -// Local user groups are not available for modification and are only loaded -// from configs, so `userGroups` might duplicate groups from `localUserGroup`, -// allowing to add them to editable sources (database). -// Group names are stored in the lower register. -var private array userGroups; // user groups loaded from database -var private array localUserGroups; // user groups loaded from local files -var private LoggerAPI.Definition warnNoPersistentDatabase; -var private LoggerAPI.Definition infoPersistentDatabaseLoaded; -var private LoggerAPI.Definition errCannotCreateSkeletonFor; -var private LoggerAPI.Definition errCannotReadDB, errInvalidUserGroups; +var private HashTable sessionData; + +var private int persistentDataLifeVersion; +var private PersistentDataManager persistentData; protected function Finalizer() { if (id != none) { id.FreeSelf(); } - if (persistentSettingsPointer != none) { - persistentSettingsPointer.FreeSelf(); + id = none; +} + +private final function UpdatePersistentDataManager() +{ + local Users_Feature feature; + + if ( persistentData != none + && persistentData.GetLifeVersion() != persistentDataLifeVersion) + { + persistentData = none; + } + if (persistentData == none) + { + feature = + Users_Feature(class'Users_Feature'.static.GetEnabledInstance()); + if (feature != none) { + persistentData = feature.BorrowPersistentDataManager(); + } + if (persistentData != none) { + persistentDataLifeVersion = persistentData.GetLifeVersion(); + } + _.memory.Free(feature); } - id = none; - persistentSettingsPointer = none; } /** @@ -71,17 +73,11 @@ protected function Finalizer() */ public final function Initialize(UserID initID, int initKey) { - local DBReadTask groupsReadingTask; id = initID; key = initKey; if (initID != none) { initID.NewRef(); } - //LoadLocalGroups(); - /*groupsReadingTask = ReadPersistentData(P("Acedia"), P("UserGroups")); - if (groupsReadingTask != none) { - groupsReadingTask.connect = LoadDBGroups; - }*/ } /** @@ -107,402 +103,60 @@ public final function int GetKey() return key; } -// Loads locally defined groups from the "AcediaUserGroups.ini" config -private final function LoadLocalGroups() -{ - local int i, j; - local string mySteamID; - local UserGroup nextGroupConfig; - local array nextUserArray; - local array availableGroups; - - if (id == none) { - return; - } - class'UserGroup'.static.Initialize(); - mySteamID = _.text.IntoString(id.GetSteamID64String()); - availableGroups = class'UserGroup'.static.AvailableConfigs(); - // Go over every group - for (i = 0; i < availableGroups.length; i += 1) - { - nextGroupConfig = UserGroup( - class'UserGroup'.static.GetConfigInstance(availableGroups[i])); - // Add group as local if it has our ID recorded - nextUserArray = nextGroupConfig.user; - for (j = 0; j < nextUserArray.length; j += 1) - { - if (nextUserArray[j] == mySteamID) - { - localUserGroups[localUserGroups.length] = - availableGroups[i].LowerCopy(); - } - } - _.memory.Free(nextGroupConfig); - } - _.memory.FreeMany(availableGroups); -} - -// Loads groups defined in database with user data -private final function LoadDBGroups( - Database.DBQueryResult result, - AcediaObject data, - Database source) -{ - local int i; - local MutableText nextGroup; - local ArrayList dbGroups; - - if (result != DBR_Success) - { - _.logger.Auto(errCannotReadDB); - return; - } - _.memory.FreeMany(userGroups); - userGroups.length = 0; - dbGroups = ArrayList(data); - if (dbGroups == none) - { - if (data != none) - { - _.logger.Auto(errInvalidUserGroups); - _.memory.Free(data); - } - return; - } - for (i = 0; i < dbGroups.GetLength(); i += 1) - { - nextGroup = dbGroups.GetMutableText(i); - if (nextGroup == none) { - continue; - } - if (!class'UserGroup'.static.Exists(nextGroup)) - { - nextGroup.FreeSelf(); - continue; - } - userGroups[userGroups.length] = nextGroup.IntoText(); - } - dbGroups.FreeSelf(); -} - -// Save current user groups into the user data database -private final function UpdateDBGroups() -{ - local ArrayList newDBData; - - newDBData = _.collections.NewArrayList(userGroups); - WritePersistentData(P("Acedia"), P("UserGroups"), newDBData); - newDBData.FreeSelf(); -} - -/** - * Adds caller user into new group, specified by `newGroup`. - * This group must exist for the method to succeed. - * - * @param newGroup Name of the group to add caller `User` into. - */ -public final function AddGroup(Text newGroup) -{ - local int i; - - if (newGroup == none) return; - if (class'UserGroup'.static.Exists(newGroup)) return; - - for (i = 0; i < userGroups.length; i += 1) - { - if (newGroup.Compare(userGroups[i], SCASE_INSENSITIVE)) { - return; - } - } - userGroups[userGroups.length] = newGroup.LowerCopy(); - UpdateDBGroups(); -} - -/** - * Removes caller user from the given group `groupToRemove`. - * - * @param groupToRemove Name of the group to remove caller `User` from. - * @return `true` if user was actually removed from the group and `false` - * otherwise (group doesn't exist or user didn't belong to it). - */ -public final function bool RemoveGroup(Text groupToRemove) -{ - local int i; - - if (groupToRemove == none) { - return false; - } - for (i = 0; i < userGroups.length; i += 1) - { - if (groupToRemove.Compare(userGroups[i], SCASE_INSENSITIVE)) - { - userGroups[i].FreeSelf(); - userGroups.Remove(i, 1); - UpdateDBGroups(); - return true; - } - } - return false; -} - /** - * Checks whether caller `User` belongs to the group specified by - * `groupToCheck`. + * Returns persistent data for the caller user. Data is specified by the its + * name along with the name of the data group it is stored in. * - * @param groupToCheck Name of the group to check for whether caller `User` - * belongs to it. - * @return `true` if caller `User` belongs to the group `groupToCheck` and - * `false` otherwise. + * @param groupName Name of the group to get data from. Cannot be `none`. + * @param dataName Name of the data to return. If `none` value is provided, + * all the data in specified group will be returned. + * @return Requested data, `none` in case of failure (i.e. data is missing). */ -public final function bool IsInGroup(Text groupToCheck) -{ - local int i; - - if (groupToCheck == none) { - return false; - } - for (i = 0; i < userGroups.length; i += 1) - { - if (groupToCheck.Compare(userGroups[i], SCASE_INSENSITIVE)) { - return true; - } - } - return false; -} - -/** - * Returns array with names of all groups to which caller user belongs to. - * - * @return Array of names of the groups that caller user belongs to. - * Guaranteed to not contain duplicates or `none` values. - */ -public final function array GetGroups() -{ - local int i, j; - local bool duplicate; - local array result; - - for (i = 0; i < localUserGroups.length; i += 1) { - result[result.length] = localUserGroups[i].Copy(); - } - for (i = 0; i < userGroups.length; i += 1) - { - duplicate = false; - // Check `userGroups[i]` for being a duplicate from `localUserGroups` - for (j = 0; j < localUserGroups.length; j += 1) - { - // No need for `SCASE_INSENSITIVE`, since user group names - // are stored in lower case - if (userGroups[i].Compare(localUserGroups[j])) - { - duplicate = true; - break; - } - } - if (!duplicate) { - result[result.length] = userGroups[i].Copy(); - } - } - return result; -} - -/** - * Reads user's persistent data saved inside group `groupName`, saving it into - * a collection using mutable data types. - * Only should be used if `_.users.PersistentStorageExists()` returns `true`. - * - * @param groupName Name of the group these settings belong to. - * This exists to help reduce name collisions between different mods. - * Acedia stores all its settings under "Acedia" group. We suggest that you - * pick at least one name to use for your own mods. - * It should be unique enough to not get picked by others - "weapons" is - * a bad name, while "CoolModMastah79" is actually a good pick. - * @return Task object for reading specified persistent data from the database. - * For more info see `Database.ReadData()` method. - * Guaranteed to not be `none` iff - * `_.users.PersistentStorageExists() == true`. - */ -public final function DBReadTask ReadGroupOfPersistentData(BaseText groupName) -{ - local DBReadTask task; - - if (groupName == none) return none; - if (!SetupDatabaseVariables()) return none; - - persistentSettingsPointer.Push(groupName); - task = persistentDatabase.ReadData(persistentSettingsPointer, true); - _.memory.Free(persistentSettingsPointer.Pop()); - return task; -} - -/** - * Reads user's persistent data saved under name `dataName`, saving it into - * a collection using mutable data types. - * Only should be used if `_.users.PersistentStorageExists()` returns `true`. - * - * @param groupName Name of the group these settings belong to. - * This exists to help reduce name collisions between different mods. - * Acedia stores all its settings under "Acedia" group. We suggest that you - * pick at least one name to use for your own mods. - * It should be unique enough to not get picked by others - "weapons" is - * a bad name, while "CoolModMastah79" is actually a good pick. - * @param dataName Any name, from under which settings you are interested - * (inside `groupName` group) should be read. - * @return Task object for reading specified persistent data from the database. - * For more info see `Database.ReadData()` method. - * Guaranteed to not be `none` iff - * `_.users.PersistentStorageExists() == true`. - */ -public final function DBReadTask ReadPersistentData( +public final function AcediaObject GetPersistentData( BaseText groupName, BaseText dataName) { - local DBReadTask task; - - if (groupName == none) return none; - if (dataName == none) return none; - if (!SetupDatabaseVariables()) return none; + local AcediaObject result; + local UserID myID; - persistentSettingsPointer.Push(groupName).Push(dataName); - task = persistentDatabase.ReadData(persistentSettingsPointer, true); - _.memory.Free(persistentSettingsPointer.Pop()); - _.memory.Free(persistentSettingsPointer.Pop()); - return task; + UpdatePersistentDataManager(); + if (persistentData == none) { + return none; + } + myID = GetID(); + result = persistentData.GetPersistentData(myID, groupname, dataName); + _.memory.Free(myID); + return result; } /** - * Writes user's persistent data under name `dataName`. - * Only should be used if `_.users.PersistentStorageExists()` returns `true`. + * Changes persistent data for the caller user. Data to change is specified by + * the its name along with the name of the data group it is stored in. * - * @param groupName Name of the group these settings belong to. - * This exists to help reduce name collisions between different mods. - * Acedia stores all its settings under "Acedia" group. We suggest that you - * pick at least one name to use for your own mods. - * It should be unique enough to not get picked by others - "weapons" is - * a bad name, while "CoolModMastah79" is actually a good pick. - * @param dataName Any name, under which settings you are interested - * (inside `groupName` group) should be written. - * @param data JSON-compatible (see `_.json.IsCompatible()`) data that - * should be written into database. - * @return Task object for writing specified persistent data into the database. - * For more info see `Database.WriteData()` method. - * Guarantee to not be `none` iff - * `_.users.PersistentStorageExists() == true`. + * @param groupName Name of the group to get data from. Cannot be `none`. + * @param dataName Name of the data to return. Cannot be `none`. + * @param data New data to record. + * @return `true` in case operation was successful and `false` otherwise. */ -public final function DBWriteTask WritePersistentData( +public final function bool SetPersistentData( BaseText groupName, BaseText dataName, AcediaObject data) { - local DBWriteTask task; - local HashTable emptyObject; - - if (groupName == none) return none; - if (dataName == none) return none; - if (!SetupDatabaseVariables()) return none; - - emptyObject = _.collections.EmptyHashTable(); - persistentSettingsPointer.Push(groupName); - persistentDatabase.IncrementData(persistentSettingsPointer, emptyObject); - persistentSettingsPointer.Push(dataName); - task = persistentDatabase.WriteData(persistentSettingsPointer, data); - _.memory.Free(persistentSettingsPointer.Pop()); - _.memory.Free(persistentSettingsPointer.Pop()); - _.memory.Free(emptyObject); - return task; -} - -// Setup database `persistentDatabase` and pointer to this user's data -// `persistentSettingsPointer`. -// Return `true` if these variables were setup (during this call or before) -// and `false` otherwise. -private function bool SetupDatabaseVariables() -{ - local Text userDataLink; - local Text userTextID; - - if (failedToCreateDatabaseSkeleton) return false; - if (persistentDatabase != none) return true; - if (id == none || !id.IsInitialized()) return false; + local bool result; + local UserID myID; - // Check if database was even specified - persistentDatabase = _.users.GetPersistentDatabase(); - if (persistentDatabase == none) - { - _.logger.Auto(warnNoPersistentDatabase); + UpdatePersistentDataManager(); + if (persistentData == none) { return false; } - // Try making skeleton database - userTextID = id.GetSteamID64String(); - userDataLink = _.users.GetPersistentDataLink(); - persistentSettingsPointer = __core().db.GetPointer(userDataLink); - persistentSettingsPointer.Push(P("PerUserData")); - persistentSettingsPointer.Push(userTextID); - MakeSkeletonUserDatabase(userTextID, persistentSettingsPointer); - persistentSettingsPointer.Push(P("settings")); - userTextID.FreeSelf(); - _.memory.Free(userDataLink); - return true; -} - -private function MakeSkeletonUserDatabase( - Text userTextID, - JSONPointer userDataPointer) -{ - local HashTable skeleton, emptyObject; - - // Construct skeleton object - skeleton = _.collections.EmptyHashTable(); - emptyObject = _.collections.EmptyHashTable(); - skeleton.SetItem(P("Settings"), emptyObject); - skeleton.SetItem(P("Statistics"), emptyObject); - // Try adding the skeleton object - persistentDatabase - .IncrementData(userDataPointer, skeleton) - .connect = ReportSkeletonCreationResult; - // Release skeleton objects - skeleton.FreeSelf(); - emptyObject.FreeSelf(); -} - -private function ReportSkeletonCreationResult( - Database.DBQueryResult result, - Database source) -{ - local Text userTextID; - local Text userDataLink; - - userTextID = id.GetSteamID64String(); - userDataLink = _.users.GetPersistentDataLink(); - if (result == DBR_Success) - { - _.logger.Auto(infoPersistentDatabaseLoaded) - .Arg(userTextID) - .Arg(userDataLink); - } - else - { - _.logger.Auto(errCannotCreateSkeletonFor) - .Arg(userTextID) - .Arg(userDataLink); - failedToCreateDatabaseSkeleton = true; - _.memory.Free(persistentDatabase); - _.memory.Free(persistentSettingsPointer); - persistentDatabase = none; - persistentSettingsPointer = none; - } - _.memory.Free(userTextID); - _.memory.Free(userDataLink); + myID = GetID(); + result = persistentData + .WritePersistentData(myID, groupname, dataName, data); + _.memory.Free(myID); + return result; } -// Load groups from db data only, inside the `UserAPI` -// Get rid of the "AcediaUserGroups.ini" -// Make command for editing user groups defaultproperties { - warnNoPersistentDatabase = (l=LOG_Error,m="No persistent user database available.") - infoPersistentDatabaseLoaded = (l=LOG_Info,m="Persistent user database was setup for user \"%1\" (using database link \"%2\").") - errCannotCreateSkeletonFor = (l=LOG_Error,m="Failed to create persistent user database skeleton for user \"%1\" (using database link \"%2\"). User data functionality won't function properly.") - errCannotReadDB = (l=LOG_Error,m="Failed to read user groups from persistent user database.") - errInvalidUserGroups = (l=LOG_Error,m="Invalid data is written as user groups array inside persistent user database.") } \ No newline at end of file diff --git a/sources/Users/UserAPI.uc b/sources/Users/UserAPI.uc index 978896a..d879f4c 100644 --- a/sources/Users/UserAPI.uc +++ b/sources/Users/UserAPI.uc @@ -1,6 +1,6 @@ /** * API that allows easy access to `User` persistent data and `UserID`s. - * Copyright 2020-2022 Anton Tarasenko + * Copyright 2020-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -21,24 +21,42 @@ class UserAPI extends AcediaObject dependson(Users_Feature) config(AcediaSystem); -var private config string userdataDBLink; - // Active `Users_Feature`, remember it along with life version to avoid // taking up a reference var private int usersFeatureLifeVersion; var private Users_Feature usersFeature; -// Database where user's data (persistent data and user groups) is stored -var private Database persistentDatabase; +var private int persistentDataLifeVersion; +var private PersistentDataManager persistentData; -var private LoggerAPI.Definition warnNoPersistentDatabaseLink; -var private LoggerAPI.Definition errNoPersistentDatabase; -var private LoggerAPI.Definition errCannotCreateSkeletonFor; -var private LoggerAPI.Definition infoPersistentDatabaseLoaded; +var private PersistentDataManager_OnPersistentDataReady_Signal onPersistentDataReadySignal; -protected function Constructor() +/** + * Signal that will be emitted whenever we get an update on connection status + * to the database, where persistent data for UserID is stored. This can + * be updated several times in cases like `Users_Feature` being rebooted or + * losing connection to the database. + * + * [Signature] + * void (UserID id, bool online) + * + * @param id ID of the user, for whom status of persistent data got + * updated. + * @param online Is connection to the database online? If this flag is set to + * `false` - an local, session-only storage will be used instead. + */ +/* SIGNAL */ +public final function PersistentDataManager_OnPersistentDataReady_Slot OnEditResult( + AcediaObject receiver) +{ + return PersistentDataManager_OnPersistentDataReady_Slot( + onPersistentDataReadySignal.NewSlot(receiver)); +} + +// DO NOT CALL MANUALLY +public function PersistentDataManager_OnPersistentDataReady_Signal _getOnReadySignal() { - //SetupUserDataDatabase(); + return onPersistentDataReadySignal; } // DO NOT CALL MANUALLY @@ -58,111 +76,13 @@ public function _reloadFeature() _.memory.Free(usersFeature); } -// Loads persistent user database, specified by the AcediaCore's config and -// creates a basic skeleton for storing its data -private function SetupUserDataDatabase() -{ - local Text persistentDataLink; - local JSONPointer persistentDataPointer; - local HashTable skeleton, emptyObject; - - if (persistentDatabase != none) { - return; - } - // Check if database was even specified - persistentDataLink = GetPersistentDataLink(); - if (persistentDataLink.IsEmpty()) - { - _.logger.Auto(warnNoPersistentDatabaseLink); - persistentDataLink.FreeSelf(); - return; - } - // If link was specified - try loading database from it - persistentDatabase = __core().db.Load(persistentDataLink); - if (persistentDatabase == none) - { - _.logger.Auto(errNoPersistentDatabase).Arg(persistentDataLink); - return; - } - // Write skeleton database's skeleton - skeleton = _.collections.EmptyHashTable(); - emptyObject = _.collections.EmptyHashTable(); - skeleton.SetItem(P("Groups"), emptyObject); - skeleton.SetItem(P("PerUserData"), emptyObject); - persistentDataPointer = __core().db.GetPointer(persistentDataLink); - persistentDatabase - .IncrementData(persistentDataPointer, skeleton) - .connect = ReportSkeletonCreationResult; - skeleton.FreeSelf(); - emptyObject.FreeSelf(); - persistentDataLink.FreeSelf(); - _.memory.Free(persistentDataPointer); -} - -private function ReportSkeletonCreationResult( - Database.DBQueryResult result, - Database source) -{ - local Text persistentDataLink; - - persistentDataLink = GetPersistentDataLink(); - if (result == DBR_Success) { - _.logger.Auto(infoPersistentDatabaseLoaded).Arg(persistentDataLink); - } - else - { - _.logger.Auto(errCannotCreateSkeletonFor).Arg(persistentDataLink); - _.memory.Free(persistentDatabase); - persistentDatabase = none; - } - _.memory.Free(persistentDataLink); -} - -/** - * Returns reference to the database of user records that Acedia was - * set up to use. - * - * `UserDatabase` is for storing a set of users that joined the game during - * the session, for database that stores persistent user data - * @see `GetPersistentDatabase()`. - * - * @return Main `UserDatabase` that Acedia currently uses to load and - * store user information. Guaranteed to be a valid non-`none` reference. - */ -public final function UserDatabase GetDatabase() -{ - return class'UserDatabase'.static.GetInstance(); -} - -/** - * Returns reference to the database of user records that Acedia was - * set up to use. - * - * `Database` returned by this method stores persistent user data, for - * the database of users that joined during the current game session - * @see `GetDatabase()`. - * - * @return Main `UserDatabase` that Acedia currently uses to load and - * store user information. Guaranteed to be a valid non-`none` reference. - */ -public final function Database GetPersistentDatabase() -{ - if (persistentDatabase != none) { - persistentDatabase.NewRef(); - } - return persistentDatabase; -} - -/** - * Returns configured database link to the JSON object in which users' data - * is stored. - * - * @return Database link to the JSON object in which users' data is stored. - * Guaranteed to not be `none`. - */ -public final function Text GetPersistentDataLink() +protected function Constructor() { - return _.text.FromString(userdataDBLink); + onPersistentDataReadySignal = + PersistentDataManager_OnPersistentDataReady_Signal( + _.memory.Allocate( + class'PersistentDataManager_OnPersistentDataReady_Signal') + ); } /** @@ -176,9 +96,12 @@ public final function Text GetPersistentDataLink() * @return `true` if database for users' persistent data storage exists and * `false` otherwise. */ -public final function bool PersistentStorageExists() +public final function bool IsPersistentStorageActive() { - return (persistentDatabase != none); + if (usersFeature != none) { + return (usersFeature.BorrowPersistentDataManager() != none); + } + return false; } /** @@ -1527,9 +1450,4 @@ public final function bool IsUserGroupDataLoaded() defaultproperties { - userdataDBLink = "[local]database:/users" - warnNoPersistentDatabaseLink = (l=LOG_Warning,m="No persistent user database link is setup. No persistent user data or user groups will be available. Setup `userDataDBLink` inside \"AcediaSystem.ini\".") - errCannotCreateSkeletonFor = (l=LOG_Error,m="Failed to create persistent database skeleton for connected database with link \"%1\". User data functionality won't function properly.") - errNoPersistentDatabase = (l=LOG_Error,m="Failed to connect to persistent user database with link \"%1\".") - infoPersistentDatabaseLoaded = (l=LOG_Info,m="Connected to persistent user database with link \"%1\".") } \ No newline at end of file diff --git a/sources/Users/UserDatabase.uc b/sources/Users/UserDatabase.uc index 348a21e..4fc6735 100644 --- a/sources/Users/UserDatabase.uc +++ b/sources/Users/UserDatabase.uc @@ -1,7 +1,7 @@ /** * Simple user database for Acedia. * Only stores data for a session, map or server restarts will clear it. - * Copyright 2020 Anton Tarasenko + * Copyright 2020-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -23,7 +23,7 @@ class UserDatabase extends AcediaObject // This is used as a global variable only (`default.activeDatabase`) to store // a reference to main database for persistent data, used by Acedia. -var public UserDatabase activeDatabase; +var public UserDatabase activeDatabase; // `User` records that were stored this session var private array sessionUsers; // `UserID`s generated during this session. @@ -90,7 +90,7 @@ public final function UserID FetchUserID(BaseText idHash) if (newUserID.IsInitialized()) { storedUserIDs[storedUserIDs.length] = newUserID; - newUserID.newRef(); + newUserID.NewRef(); return newUserID; } _.memory.Free(steamID.steamID64); @@ -112,13 +112,16 @@ public final function User FetchUser(UserID userID) local User newUser; for (i = 0; i < sessionUsers.length; i += 1) { - if (sessionUsers[i].GetID().IsEqual(userID)) { + if (sessionUsers[i].GetID().IsEqual(userID)) + { + sessionUsers[i].NewRef(); return sessionUsers[i]; } } newUser = User(__().memory.Allocate(class'User')); newUser.Initialize(userID, sessionUsers.length + 1); sessionUsers[sessionUsers.length] = newUser; + newUser.NewRef(); return newUser; } @@ -135,7 +138,9 @@ public final function User FetchUserByKey(int userKey) local int i; for (i = 0; i < sessionUsers.length; i += 1) { - if (sessionUsers[i].GetKey() == userKey) { + if (sessionUsers[i].GetKey() == userKey) + { + sessionUsers[i].NewRef(); return sessionUsers[i]; } } diff --git a/sources/Users/Users.uc b/sources/Users/Users.uc index f1bfa10..28282f9 100644 --- a/sources/Users/Users.uc +++ b/sources/Users/Users.uc @@ -21,8 +21,10 @@ class Users extends FeatureConfig perobjectconfig config(AcediaUsers); -var public config bool useDatabase; -var public config string databaseLink; +var public config bool usePersistentData; +var public config string persistentDataDatabaseLink; +var public config bool useDatabaseForGroupsData; +var public config string groupsDatabaseLink; var public config array localUserGroup; protected function HashTable ToData() @@ -32,8 +34,10 @@ protected function HashTable ToData() local ArrayList userGroupList; data = __().collections.EmptyHashTable(); - data.SetBool(P("useDatabase"), useDatabase, false); - data.SetString(P("databaseLink"), databaseLink); + data.SetBool(P("usePersistentData"), usePersistentData); + data.SetString(P("persistentDataDatabaseLink"), persistentDataDatabaseLink); + data.SetBool(P("useDatabaseForGroupsData"), useDatabaseForGroupsData); + data.SetString(P("groupsDatabaseLink"), groupsDatabaseLink); userGroupList = _.collections.EmptyArrayList(); for (i = 0; i < localUserGroup.length; i += 1) { userGroupList.AddString(localUserGroup[i]); @@ -51,10 +55,14 @@ protected function FromData(HashTable source) if (source == none) { return; } - useDatabase = source.GetBool(P("useDatabase")); - databaseLink = source.GetString( - P("databaseLink"), - "[local]database:/users"); + usePersistentData = source.GetBool(P("usePersistentData")); + persistentDataDatabaseLink = source.GetString( + P("persistentDataDatabaseLink"), + "[local]database:/persistent_data"); + useDatabaseForGroupsData = source.GetBool(P("useDatabaseForGroupsData")); + groupsDatabaseLink = source.GetString( + P("groupsDatabaseLink"), + "[local]database:/groups_data"); userGroupList = source.GetArrayList(P("userGroups")); localUserGroup.length = 0; if (userGroupList == none) { @@ -68,8 +76,10 @@ protected function FromData(HashTable source) protected function DefaultIt() { - useDatabase = false; - databaseLink = "[local]database:/users"; + usePersistentData = false; + persistentDataDatabaseLink = "[local]database:/persistent_data"; + useDatabaseForGroupsData = false; + groupsDatabaseLink = "[local]database:/groups_data"; localUserGroup.length = 0; localUserGroup[0] = "admin"; localUserGroup[1] = "moderator"; @@ -79,8 +89,10 @@ protected function DefaultIt() defaultproperties { configName = "AcediaUsers" - useDatabase = false - databaseLink = "[local]database:/users" + usePersistentData = false + persistentDataDatabaseLink = "[local]database:/persistent_data" + useDatabaseForGroupsData = false + groupsDatabaseLink = "[local]database:/groups_data" localUserGroup(0) = "admin" localUserGroup(1) = "moderator" localUserGroup(2) = "trusted" diff --git a/sources/Users/Users_Feature.uc b/sources/Users/Users_Feature.uc index 7f398c6..e785eb5 100644 --- a/sources/Users/Users_Feature.uc +++ b/sources/Users/Users_Feature.uc @@ -3,7 +3,7 @@ * database-defined information about group sources. An instance of this * feature is necessary for functioning of Acedia's `UserAPI` methods related * to user groups. - * Copyright 2022 Anton Tarasenko + * Copyright 2022-2023 Anton Tarasenko *------------------------------------------------------------------------------ * This file is part of Acedia. * @@ -20,10 +20,19 @@ * You should have received a copy of the GNU General Public License * along with Acedia. If not, see . */ -class Users_Feature extends Feature; +class Users_Feature extends Feature + dependson(Database); -var private /*config*/ bool useDatabase; -var private /*config*/ string databaseLink; +/** + * This feature is responsible for managing users: their groups and persistent + * data. Group information can be stored in both configs and databases, while + * persistent data can only be stored in databases. + */ + +var private /*config*/ bool usePersistentData; +var private /*config*/ string persistentDataDatabaseLink; +var private /*config*/ bool useDatabaseForGroupsData; +var private /*config*/ string groupsDatabaseLink; var private /*config*/ array availableUserGroups; var private bool diskSaveScheduled; @@ -39,6 +48,13 @@ struct IDAnnotationPair var Text id, annotation; }; +var private bool userGroupsDataLoaded; +var private Database usersGroupsDatabase; +var private JSONPointer userGroupsRootPointer; +var private int stackedDBReadingRequests; + +var private PersistentDataManager currentPersistentDataManager; + // List of all available user groups for current config var private array loadedUserGroups; // `HashTable` (with group name keys) that stores `HashTable`s used as @@ -47,6 +63,10 @@ var private HashTable loadedGroupToUsersMap; var private LoggerAPI.Definition warnNoLocalGroup, warnDuplicateIDs; var private LoggerAPI.Definition errCannotCreateLocalGroup; +var private LoggerAPI.Definition errCannotOpenDatabase, infoUserGroupDataLoaded; +var private LoggerAPI.Definition errDBBadRootUserGroupData, errDBBadLinkPointer; +var private LoggerAPI.Definition errDBDamaged, errNoServerCore; +var private LoggerAPI.Definition errDBContainsNonLowerRegister; protected function OnEnabled() { @@ -60,6 +80,7 @@ protected function OnEnabled() feature.RegisterCommand(class'ACommandUserGroups'); feature.FreeSelf(); } + LoadUserData(); } protected function OnDisabled() @@ -74,6 +95,9 @@ protected function OnDisabled() feature.RemoveCommand(class'ACommandUserGroups'); feature.FreeSelf(); } + ResetUploadedUserGroups(); + _.memory.Free(currentPersistentDataManager); + currentPersistentDataManager = none; } protected function SwapConfig(FeatureConfig config) @@ -84,11 +108,195 @@ protected function SwapConfig(FeatureConfig config) if (newConfig == none) { return; } - useDatabase = newConfig.useDatabase; - databaseLink = newConfig.databaseLink; - availableUserGroups = newConfig.localUserGroup; - class'UserGroup'.static.Initialize(); - LoadLocalData(); + usePersistentData = newConfig.usePersistentData; + persistentDataDatabaseLink = newConfig.persistentDataDatabaseLink; + useDatabaseForGroupsData = newConfig.useDatabaseForGroupsData; + groupsDatabaseLink = newConfig.groupsDatabaseLink; + availableUserGroups = newConfig.localUserGroup; + ResetUploadedUserGroups(); + if (IsEnabled()) + { + if (!_server.IsAvailable()) + { + _.logger.Auto(errNoServerCore); + return; + } + LoadUserData(); + SetupPersistentData(usePersistentData); + } +} + +/** + * Borrows active `PersistentDataManager` (if one is setup for this config). + * + * @return Borrowed reference to active `PersistentDataManager`. Can be `none` + * if persistent data isn't setup for the current config. + */ +public final function PersistentDataManager BorrowPersistentDataManager() +{ + return currentPersistentDataManager; +} + +private final function SetupPersistentData(bool doUsePersistentData) +{ + local Text databaseLinkAsText; + local Database persistentDatabase; + local JSONPointer persistentRootPointer; + + if (!doUsePersistentData) + { + _.memory.Free(currentPersistentDataManager); + currentPersistentDataManager = none; + return; + } + databaseLinkAsText = _.text.FromString(persistentDataDatabaseLink); + persistentDatabase = _server.db.Load(databaseLinkAsText); + if (persistentDatabase != none) { + persistentRootPointer = _server.db.GetPointer(databaseLinkAsText); + } + else { + _.logger.Auto(errCannotOpenDatabase).Arg(databaseLinkAsText); + } + if (persistentRootPointer != none) + { + if (currentPersistentDataManager == none) + { + currentPersistentDataManager = PersistentDataManager( + _.memory.Allocate(class'PersistentDataManager')); + } + currentPersistentDataManager.Setup( + persistentDatabase, + persistentRootPointer); + currentPersistentDataManager.LoadCurrentPlayers(); + } + _.memory.Free(persistentRootPointer); + _.memory.Free(persistentDatabase); + _.memory.Free(databaseLinkAsText); +} + +private final function LoadUserData() +{ + local Text databaseLinkAsText; + local HashTable emptyHashTable; + + if (userGroupsDataLoaded) { + return; + } + if (useDatabaseForGroupsData) + { + databaseLinkAsText = _.text.FromString(groupsDatabaseLink); + usersGroupsDatabase = _server.db.Load(databaseLinkAsText); + if (usersGroupsDatabase == none) { + _.logger.Auto(errCannotOpenDatabase).Arg(databaseLinkAsText); + } + else + { + userGroupsRootPointer = _server.db.GetPointer(databaseLinkAsText); + emptyHashTable = _.collections.EmptyHashTable(); + usersGroupsDatabase.IncrementData( + userGroupsRootPointer, + emptyHashTable); + emptyHashTable.FreeSelf(); + usersGroupsDatabase.ReadData(userGroupsRootPointer).connect = + HandleInitialUserGroupsDataLoading; + stackedDBReadingRequests += 1; + databaseLinkAsText.FreeSelf(); + } + } + else + { + class'UserGroup'.static.Initialize(); + LoadLocalData(); + userGroupsDataLoaded = true; + } +} + +private final function HandleInitialUserGroupsDataLoading( + Database.DBQueryResult result, + AcediaObject data, + Database source, + int requestID) +{ + local Text databaseLinkAsText; + local HashTable newGroupData; + + stackedDBReadingRequests -= 1; + // If this counter remains above zero, that means several requests were + // made and this response is to the outdated one + if (stackedDBReadingRequests > 0) return; + if (!IsEnabled()) return; + + newGroupData = HashTable(data); + databaseLinkAsText = _.text.FromString(groupsDatabaseLink); + if (result == DBR_Success) + { + if (newGroupData == none) + { + _.logger.Auto(errDBBadRootUserGroupData).Arg(databaseLinkAsText); + return; + } + userGroupsDataLoaded = true; + _.memory.Free(loadedGroupToUsersMap); + _.memory.FreeMany(loadedUserGroups); + loadedGroupToUsersMap = FilterDBData(newGroupData); + loadedUserGroups = loadedGroupToUsersMap.GetTextKeys(); + newGroupData.FreeSelf(); + _.logger.Auto(infoUserGroupDataLoaded).Arg(databaseLinkAsText); + } + else if (result == DBR_InvalidPointer) { + _.logger.Auto(errDBBadLinkPointer).Arg(databaseLinkAsText); + } + else + { + // Any other error indicates that database is somehow damaged and + // unusable for our purpose + _.logger.Auto(errDBDamaged).Arg(databaseLinkAsText); + } +} + +private final function HashTable FilterDBData(HashTable received) +{ + local int i; + local array allKeys; + local AcediaObject nextItem; + local HashTable result; + + if (received == none) { + return none; + } + result = _.collections.EmptyHashTable(); + allKeys = received.GetTextKeys(); + for (i = 0; i < allKeys.length; i += 1) + { + if (allKeys[i].IsLowerCase()) + { + nextItem = received.GetItem(allKeys[i]); + result.SetItem(allKeys[i], nextItem); + _.memory.Free(nextItem); + } + else + { + allKeys[i].NewRef(); + _.logger.Auto(errDBContainsNonLowerRegister) + .Arg(_.text.FromString(groupsDatabaseLink)) + .Arg(allKeys[i]); + } + } + _.memory.FreeMany(allKeys); + return result; +} + +private final function ResetUploadedUserGroups() +{ + _.memory.Free(userGroupsRootPointer); + _.memory.Free(usersGroupsDatabase); + _.memory.Free(loadedGroupToUsersMap); + _.memory.FreeMany(loadedUserGroups); + userGroupsRootPointer = none; + usersGroupsDatabase = none; + loadedGroupToUsersMap = none; + loadedUserGroups.length = 0; + userGroupsDataLoaded = false; } private final function LoadLocalData() @@ -228,7 +436,7 @@ private final function SaveLocalData() local Users currentConfig; local HashTableIterator iter; - if (useDatabase) return; + if (useDatabaseForGroupsData) return; if (loadedGroupToUsersMap == none) return; availableUserGroups.length = 0; @@ -384,7 +592,8 @@ public final /*unreal*/ function array GetAvailableGroups_S() */ public final function bool AddGroup(BaseText groupName) { - local Text lowerCaseGroupName; + local Text lowerCaseGroupName; + local HashTable emptyHashTable; if (groupName == none) { return false; @@ -395,8 +604,20 @@ public final function bool AddGroup(BaseText groupName) lowerCaseGroupName.FreeSelf(); return false; } - // Try loading local `UserGroup`? - if (LoadLocalGroup(lowerCaseGroupName)) + if (useDatabaseForGroupsData) + { + emptyHashTable = _.collections.EmptyHashTable(); + userGroupsRootPointer.Push(lowerCaseGroupName); + usersGroupsDatabase.IncrementData( + userGroupsRootPointer, + emptyHashTable); + _.memory.Free(userGroupsRootPointer.Pop()); + loadedUserGroups[loadedUserGroups.length] = lowerCaseGroupName; + loadedGroupToUsersMap.SetItem(lowerCaseGroupName, emptyHashTable); + emptyHashTable.FreeSelf(); + return true; + } + else if (LoadLocalGroup(lowerCaseGroupName)) { // Move `lowerCaseGroupName` here, do NOT release the reference loadedUserGroups[loadedUserGroups.length] = lowerCaseGroupName; @@ -478,10 +699,17 @@ public final function bool RemoveGroup(BaseText groupName) break; } } - // Try loading local `UserGroup`? loadedGroupToUsersMap.RemoveItem(lowerCaseGroupName); + if (useDatabaseForGroupsData) + { + userGroupsRootPointer.Push(lowerCaseGroupName); + usersGroupsDatabase.RemoveData(userGroupsRootPointer); + _.memory.Free(userGroupsRootPointer.Pop()); + } + else { + ScheduleConfigSave(); + } lowerCaseGroupName.FreeSelf(); - ScheduleConfigSave(); return true; } @@ -585,20 +813,30 @@ public final function bool AddSteamIDToGroup( local Text lowercaseGroupName; local HashTable groupUsers; + if (steamID == none) return false; if (loadedGroupToUsersMap == none) return false; if (groupName == none) return false; lowercaseGroupName = groupName.LowerCopy(); groupUsers = loadedGroupToUsersMap.GetHashTable(lowercaseGroupName); - lowercaseGroupName.FreeSelf(); - // No specified group? Nothing to add! - if (groupUsers == none) { - return false; + if (groupUsers != none && !groupUsers.HasKey(steamID)) + { + groupUsers.SetItem(steamID, none); + if (useDatabaseForGroupsData) + { + userGroupsRootPointer.Push(lowerCaseGroupName); + userGroupsRootPointer.Push(steamID); + usersGroupsDatabase.IncrementData(userGroupsRootPointer, none); + _.memory.Free(userGroupsRootPointer.Pop()); + _.memory.Free(userGroupsRootPointer.Pop()); + } + else { + ScheduleConfigSave(); + } } - groupUsers.SetItem(steamID, none); - groupUsers.FreeSelf(); - ScheduleConfigSave(); - return true; + lowercaseGroupName.FreeSelf(); + _.memory.Free(groupUsers); + return (groupUsers != none); } /** @@ -765,19 +1003,30 @@ public final function bool RemoveSteamIDFromGroup( local Text lowercaseGroupName; local HashTable groupUsers; + if (steamID == none) return false; if (groupName == none) return false; if (loadedGroupToUsersMap == none) return false; lowercaseGroupName = groupName.LowerCopy(); groupUsers = loadedGroupToUsersMap.GetHashTable(lowercaseGroupName); - lowercaseGroupName.FreeSelf(); - if (groupUsers == none) { - return false; + if (groupUsers != none) + { + hadUser = groupUsers.HasKey(steamID); + groupUsers.RemoveItem(steamID); + if (useDatabaseForGroupsData) + { + userGroupsRootPointer.Push(lowerCaseGroupName); + userGroupsRootPointer.Push(steamID); + usersGroupsDatabase.RemoveData(userGroupsRootPointer); + _.memory.Free(userGroupsRootPointer.Pop()); + _.memory.Free(userGroupsRootPointer.Pop()); + } + else { + ScheduleConfigSave(); + } } - hadUser = groupUsers.HasKey(steamID); - groupUsers.RemoveItem(steamID); - groupUsers.FreeSelf(); - ScheduleConfigSave(); + _.memory.Free(groupUsers); + lowercaseGroupName.FreeSelf(); return hadUser; } @@ -1495,11 +1744,23 @@ public final function SetAnnotationForSteamID( lowerCaseGroupName = groupName.LowerCopy(); groupUsers = loadedGroupToUsersMap.GetHashTable(lowerCaseGroupName); - lowerCaseGroupName.FreeSelf(); - if (groupUsers != none && groupUsers.HasKey(steamID)) { + if (groupUsers != none && groupUsers.HasKey(steamID)) + { groupUsers.SetItem(steamID, annotation); + if (useDatabaseForGroupsData) + { + userGroupsRootPointer.Push(lowerCaseGroupName); + userGroupsRootPointer.Push(steamID); + usersGroupsDatabase.WriteData(userGroupsRootPointer, annotation); + _.memory.Free(userGroupsRootPointer.Pop()); + _.memory.Free(userGroupsRootPointer.Pop()); + } + else { + ScheduleConfigSave(); + } } _.memory.Free(groupUsers); + lowerCaseGroupName.FreeSelf(); } /** @@ -1872,13 +2133,13 @@ public final /*unreal*/ function bool IsUserInGroup_S( * * Data loaded once is cached and this method returning `true` does not * guarantee that is isn't outdated. Additional, asynchronous queries must be - * made to check for that. + * made to check for that. * * @return `true` if user groups' data was loaded and `false` otherwise. */ public final function bool IsUserGroupDataLoaded() { - return true; + return userGroupsDataLoaded; } defaultproperties @@ -1887,4 +2148,11 @@ defaultproperties warnNoLocalGroup = (l=LOG_Warning,m="Expected config to contain `UserGroup` named \"%1\", but it is missing. \"AcediaUsers.ini\" might be misconfigured.") warnDuplicateIDs = (l=LOG_Warning,m="Duplicate record for user id \"%1\" is found in `UserGroup` named \"%2\". \"AcediaUsers.ini\" is misconfigured and needs to be fixed.") errCannotCreateLocalGroup = (l=LOG_Error,m="Failed to create config section for `UserGroup` named \"%1\".") + errCannotOpenDatabase = (l=LOG_Error,m="\"Users_Feature\" has failed to open database given by the link \"%1\".") + infoUserGroupDataLoaded = (l=LOG_Info,m="Successfully loaded user data from the database link \"%1\".") + errDBBadRootUserGroupData = (l=LOG_Error,m="Database link \"%1\" (configured to load user group data in \"AcediaUsers.ini\") contains incompatible data.") + errDBBadLinkPointer = (l=LOG_Error,m="Path inside database link \"%1\" (configured inside \"AcediaUsers.ini\") is invalid.") + errDBDamaged = (l=LOG_Error,m="Database given by the link \"%1\" (configured inside \"AcediaUsers.ini\") seems to be damaged.") + errNoServerCore = (l=LOG_Error,m="Cannot start \"Users_Feature\", because no `ServerCore` was created.") + errDBContainsNonLowerRegister = (l=LOG_Error,m="Database given by the link \"%1\" contains non-lower case key \"%2\". This shouldn't happen, unless someone manually edited database.") } \ No newline at end of file