diff --git a/docs/API/Databases/index.md b/docs/API/Databases/index.md
new file mode 100644
index 0000000..c0d8f16
--- /dev/null
+++ b/docs/API/Databases/index.md
@@ -0,0 +1,114 @@
+# Databases
+!!! Do not tell about recommended db type
+!!! Tell about no intention to bother with access rights
+While most mods' needs for storing data can be easily covered by config files, there are still use cases that require more powerful tools like databases:
+
+* With config files alone it is impossible to share data between several different servers, especially if they are located on different machines;
+* Representing hierarchical data in config files, while not impossible, can be quite tricky (and any generic implementation can itself be called a database).
+
+Acedia provides it's own information storage functionality in form of databases that store information in JSON format. That is, every Acedia's database is represented by a JSON object, that can be interacted with by provided database API. Two implementations are provided:
+
+1. **Remote database** *(not yet implemented)* that provides Acedia an ability to connect to Avarice database over TCP connection, allowing such database to be used by several servers at once;
+2. **Local database** that store information in server's own directory, making it only accessible from that server. While using remote databases is recommended, local ones make sure that Acedia can function even if server admin does not want to use external software.
+
+## Using databases
+
+To demonstrate basic of working with Acedia's databases, let's consider a simple, practice problem: creating a feature that can remember shared text notes in the database and later display all the accumulated ones.
+
+```unrealscript
+class NoteTaker extends Feature;
+
+var private Database myDatabase;
+var private JSONPointer realmPointer;
+
+var private LoggerAPI.Definition errFailedToRead, errBadData, errHadBadNotes;
+
+protected function Constructor()
+{
+ local DynamicArray emptyArray;
+ myDatabase = _.db.Realms();
+ realmPointer = _.db.RealmsPointer();
+ realmPointer.Push(P("NoteTaker"));
+ emptyArray = _.collections.EmptyDynamicArray();
+ db.IncrementData(realmPointer, emptyArray);
+ emptyArray.FreeSelf();
+}
+
+public function TakeNote(Text newNote)
+{
+ local DynamicArray wrapper;
+ if (newNote == none) {
+ return;
+ }
+ wrapper = _.collections
+ .EmptyDynamicArray()
+ .AddItem(newNote);
+ db.IncrementData(realmPointer, wrapper);
+ wrapper.FreeSelf();
+}
+
+public function PrintAllNotes()
+{
+ db.ReadData(realmPointer).connect = DoPrint;
+}
+
+private function DoPrint(DBQueryResult result, AcediaObject data)
+{
+ local int i;
+ local bool hadBadNotes;
+ local Text nextNote;
+ local DynamicArray loadedArray;
+ if (result != DBR_Success)
+ {
+ _.logger.Auto(errFailedToRead);
+ _.memory.Free(data);
+ return;
+ }
+ loadedArray = DynamicArray(data);
+ if (loadedArray == none)
+ {
+ _.logger.Auto(errBadData);
+ _.memory.Free(data);
+ return;
+ }
+ for (i = 0; i < loadedArray.GetLength(); i += 1)
+ {
+ nextNote = loadedArray.GetText(i);
+ if (nextNote != none) {
+ Log("Note" @ (i+1) $ "." @ loadedArray.GetText(i).ToPlainString());
+ }
+ else {
+ hadBadNotes = true;
+ }
+ }
+ if (hadBadNotes) {
+ _.logger.Auto(errHadBadNotes);
+ }
+ _.memory.Free(data);
+}
+
+defaultproperties
+{
+ errFailedToRead = (l=LOG_Error,m="Could not read notes data from the database!")
+ errBadData = (l=LOG_Error,m="Notes database contained invalid data!")
+ errHadBadNotes = (l=LOG_Error,m="Some of the notes had wrong data format!")
+}
+```
+
+....
+Acedia assumes that *creating* and *deleting* databases is server admins's responsibility, since they have to make a choice of what type of database to use. So unless you are making a feature that is supposed to manage databases, **you should attempt to create or delete databases**. You need, instead, load already existing one via one of the several ways. Easiest way is using *realms*:
+
+```unrealscript
+local Database db;
+local JSONPointer ptr;
+db = _.db.Realm(P("MyMod")).database;
+ptr = _.db.Realm(P("MyMod")).pointer;
+```
+
+### Issues: database might already contain badly formatted data - check it
+
+### Improvements: delete notes
+
+### Improvements: loading only one note
+
+## Further topics
diff --git a/sources/Data/Database/DBAPI.uc b/sources/Data/Database/DBAPI.uc
new file mode 100644
index 0000000..ced06f9
--- /dev/null
+++ b/sources/Data/Database/DBAPI.uc
@@ -0,0 +1,200 @@
+/**
+ * API that provides methods for creating/destroying and managing available
+ * databases.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBAPI extends AcediaObject;
+
+var private const class localDBClass;
+
+// Store all already loaded databases to make sure we do not create two
+// different `LocalDatabaseInstance` that are trying to make changes
+// separately.
+var private AssociativeArray loadedLocalDatabases;
+
+private final function CreateLocalDBMapIfMissing()
+{
+ if (loadedLocalDatabases == none) {
+ loadedLocalDatabases = __().collections.EmptyAssociativeArray();
+ }
+}
+
+/**
+ * Creates new local database with name `databaseName`.
+ *
+ * This method will fail if:
+ * 1. `databaseName` is `none` or empty;
+ * 2. Local database with name `databaseName` already exists.
+ *
+ * @param databaseName Name for the new database.
+ * @return Reference to created database. Returns `none` iff method failed.
+ */
+public final function LocalDatabaseInstance NewLocal(Text databaseName)
+{
+ local DBRecord rootRecord;
+ local Text rootRecordName;
+ local LocalDatabase newConfig;
+ local LocalDatabaseInstance newLocalDBInstance;
+ CreateLocalDBMapIfMissing();
+ // No need to check `databaseName` for being valid,
+ // since `Load()` will just return `none` if it is not.
+ newConfig = class'LocalDatabase'.static.Load(databaseName);
+ if (newConfig == none) return none;
+ if (newConfig.HasDefinedRoot()) return none;
+ if (loadedLocalDatabases.HasKey(databaseName)) return none;
+
+ newLocalDBInstance = LocalDatabaseInstance(_.memory.Allocate(localDBClass));
+ loadedLocalDatabases.SetItem(databaseName.Copy(), newLocalDBInstance);
+ rootRecord = class'DBRecord'.static.NewRecord(databaseName);
+ rootRecordName = _.text.FromString(string(rootRecord.name));
+ newConfig.SetRootName(rootRecordName);
+ newConfig.Save();
+ newLocalDBInstance.Initialize(newConfig, rootRecord);
+ _.memory.Free(rootRecordName);
+ return newLocalDBInstance;
+}
+
+/**
+ * Loads and returns local database with the name `databaseName`.
+ *
+ * If specified database is already loaded - simply returns it's reference
+ * (consequent calls to `LoadLocal()` will keep returning the same reference,
+ * unless database is deleted).
+ *
+ * @param databaseName Name of the database to load.
+ * @return Loaded local database. `none` if it does not exist.
+ */
+public final function LocalDatabaseInstance LoadLocal(Text databaseName)
+{
+ local DBRecord rootRecord;
+ local Text rootRecordName;
+ local LocalDatabase newConfig;
+ local LocalDatabaseInstance newLocalDBInstance;
+ CreateLocalDBMapIfMissing();
+ if (loadedLocalDatabases.HasKey(databaseName))
+ {
+ return LocalDatabaseInstance(loadedLocalDatabases
+ .GetItem(databaseName));
+ }
+ // No need to check `databaseName` for being valid,
+ // since `Load()` will just return `none` if it is not.
+ newConfig = class'LocalDatabase'.static.Load(databaseName);
+ if (newConfig == none) return none;
+ if (!newConfig.HasDefinedRoot()) return none;
+
+ newLocalDBInstance = LocalDatabaseInstance(_.memory.Allocate(localDBClass));
+ loadedLocalDatabases.SetItem(databaseName.Copy(), newLocalDBInstance);
+ rootRecordName = newConfig.GetRootName();
+ rootRecord = class'DBRecord'.static
+ .LoadRecord(rootRecordName, databaseName);
+ newLocalDBInstance.Initialize(newConfig, rootRecord);
+ _.memory.Free(rootRecordName);
+ return newLocalDBInstance;
+}
+
+/**
+ * Checks if local database with the name `databaseName` already exists.
+ *
+ * @param databaseName Name of the database to check.
+ * @return `true` if database with specified name exists and `false` otherwise.
+ */
+public final function bool ExistsLocal(Text databaseName)
+{
+ return LoadLocal(databaseName) != none;
+}
+
+/**
+ * Deletes local database with name `databaseName`.
+ *
+ * @param databaseName Name of the database to delete.
+ * @return `true` if database with specified name existed and was deleted and
+ * `false` otherwise.
+ */
+public final function bool DeleteLocal(Text databaseName)
+{
+ local LocalDatabase localDatabaseConfig;
+ local LocalDatabaseInstance localDatabase;
+ local AssociativeArray.Entry dbEntry;
+ CreateLocalDBMapIfMissing();
+ // To delete database we first need to load it
+ localDatabase = LoadLocal(databaseName);
+ if (localDatabase != none) {
+ localDatabaseConfig = localDatabase.GetConfig();
+ }
+ dbEntry = loadedLocalDatabases.TakeEntry(databaseName);
+ // Delete `LocalDatabaseInstance` before erasing the package,
+ // to allow it to clean up safely
+ _.memory.Free(dbEntry.key);
+ _.memory.Free(dbEntry.value);
+ if (localDatabaseConfig != none) {
+ EraseAllPackageData(localDatabaseConfig.GetPackageName());
+ localDatabaseConfig.DeleteSelf();
+ return true;
+ }
+ return false;
+}
+
+private function EraseAllPackageData(Text packageToErase)
+{
+ local int i;
+ local string packageName;
+ local GameInfo game;
+ local DBRecord nextRecord;
+ local array allRecords;
+ packageName = _.text.ToString(packageToErase);
+ if (packageName == "") {
+ return;
+ }
+ game = _.unreal.GetGameType();
+ game.DeletePackage(packageName);
+ // Delete any leftover objects. This has to be done *after*
+ // `DeletePackage()` call, otherwise removed garbage can reappear.
+ // No clear idea why it works this way.
+ foreach game.AllDataObjects(class'DBRecord', nextRecord, packageName) {
+ allRecords[allRecords.length] = nextRecord;
+ }
+ for (i = 0; i < allRecords.length; i += 1)
+ {
+ game.DeleteDataObject( class'DBRecord', string(allRecords[i].name),
+ packageName);
+ }
+}
+
+/**
+ * Returns array of names of all available local databases.
+ *
+ * @return List of names of all local databases.
+ */
+public final function array ListLocal()
+{
+ local int i;
+ local array dbNames;
+ local array dbNamesAsStrings;
+ dbNamesAsStrings = GetPerObjectNames( "AcediaDB",
+ string(class'LocalDatabase'.name),
+ MaxInt);
+ for (i = 0; i < dbNamesAsStrings.length; i += 1) {
+ dbNames[dbNames.length] = _.text.FromString(dbNamesAsStrings[i]);
+ }
+ return dbNames;
+}
+
+defaultproperties
+{
+ localDBClass = class'LocalDatabaseInstance'
+}
\ No newline at end of file
diff --git a/sources/Data/Database/DBTask.uc b/sources/Data/Database/DBTask.uc
new file mode 100644
index 0000000..ce7abec
--- /dev/null
+++ b/sources/Data/Database/DBTask.uc
@@ -0,0 +1,189 @@
+/**
+ * This should be considered an internal class and a detail of
+ * implementation.
+ * An object that is created when user tries to query database.
+ * It contains a delegate `connect()` that will be called when query is
+ * completed and will self-destruct afterwards. Concrete delegates are
+ * declared in child classes of this `DBTask`, since they can have different
+ * signatures, depending on the query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBTask extends AcediaObject
+ dependson(Database)
+ abstract;
+
+/**
+ * Life of instances of this class is supposed to go like so:
+ * 1. Get created and returned to the user that made database query so
+ * that he can setup a delegate that will receive the result;
+ * 2. Wait until database query result is ready AND all previous tasks
+ * have completed;
+ * 3. Call it's `connect()` delegate with query results;
+ * 4. Deallocate itself.
+ *
+ * Task is determined ready when it's `DBQueryResult` variable was set.
+ *
+ * This class IS NOT supposed to be accessed by user at all - this is simply
+ * an auxiliary construction that allows us to make calls to the database
+ * like so: `db.ReadData(...).connect = handler;`.
+ *
+ * Since every query can have it's own set of returning parameters -
+ * signature of `connect()` method can vary from task to task.
+ * For this reason we define it in child classes of `BDTask` that specialize in
+ * particular query.
+ */
+
+var private DBTask previousTask;
+// These allows us to detect when previous task got completed (deallocated)
+var private int previousTaskLifeVersion;
+
+var private Database.DBQueryResult taskResult;
+var private bool isReadyToComplete;
+
+var private LoggerAPI.Definition errLoopInTaskChain;
+
+protected function Finalizer()
+{
+ if (previousTask != none) {
+ previousTask.FreeSelf(previousTaskLifeVersion);
+ }
+ previousTask = none;
+ previousTaskLifeVersion = -1;
+ isReadyToComplete = false;
+}
+
+/**
+ * Sets `DBQueryResult` for the caller task.
+ *
+ * Having previous task assigned is not required for the caller task to
+ * be completed, since it can be the first task.
+ *
+ * @param task Task that has to be completed before this one can.
+ */
+public final function SetPreviousTask(DBTask task)
+{
+ previousTask = task;
+ if (previousTask != none) {
+ previousTaskLifeVersion = previousTask.GetLifeVersion();
+ }
+}
+
+/**
+ * Returns `DBQueryResult` assigned to the caller `DBTask`.
+ *
+ * This method should only be called after `SetResult()`, otherwise it's
+ * behavior and return result should be considered undefined.
+ *
+ * @return `DBQueryResult` assigned to the caller `DBTask`.
+ */
+public final function Database.DBQueryResult GetResult()
+{
+ return taskResult;
+}
+
+/**
+ * Assigns `DBQueryResult` for the caller task.
+ *
+ * Every single task has to be assigned one and cannot be completed before
+ * it does.
+ *
+ * This value can be assigned several times and the last assigned value will
+ * be used.
+ *
+ * @param result Result of the query, relevant to the caller task.
+ */
+public final function SetResult(Database.DBQueryResult result)
+{
+ taskResult = result;
+ isReadyToComplete = true;
+}
+
+/**
+ * Override this to call `connect()` delegate declared in child classes.
+ * Since this base class does not itself have `connect()` delegate declared -
+ * this method cannot be implemented here.
+ */
+protected function CompleteSelf() {}
+
+/**
+ * Attempts to complete this task.
+ * Can only succeed iff caller task both has necessary data to complete it's
+ * query and all previous tasks have completed.
+ */
+public final function TryCompleting()
+{
+ local int i;
+ local array tasksQueue;
+ tasksQueue = BuildRequiredTasksQueue();
+ // Queue is built backwards: tasks that have to be completed first are
+ // at the end of the array
+ for (i = tasksQueue.length - 1; i >= 0; i -= 1)
+ {
+ if (tasksQueue[i].isReadyToComplete)
+ {
+ tasksQueue[i].CompleteSelf();
+ _.memory.Free(tasksQueue[i]);
+ }
+ else {
+ break;
+ }
+ }
+}
+
+// We do not know how deep `previousTask`-based chain will go, so we
+// will store tasks that have to complete last earlier in the array.
+private final function array BuildRequiredTasksQueue()
+{
+ local int i;
+ local int expectedLifeVersion;
+ local bool loopDetected;
+ local DBTask nextRequiredTask;
+ local array tasksQueue;
+ nextRequiredTask = self;
+ tasksQueue[0] = nextRequiredTask;
+ while (nextRequiredTask.previousTask != none)
+ {
+ expectedLifeVersion = nextRequiredTask.previousTaskLifeVersion;
+ nextRequiredTask = nextRequiredTask.previousTask;
+ if (nextRequiredTask.GetLifeVersion() != expectedLifeVersion) {
+ break;
+ }
+ for (i = 0; i < tasksQueue.length; i += 1)
+ {
+ if (nextRequiredTask == tasksQueue[i])
+ {
+ loopDetected = true;
+ break;
+ }
+ }
+ if (!loopDetected) {
+ tasksQueue[tasksQueue.length] = nextRequiredTask;
+ }
+ else
+ {
+ _.logger.Auto(errLoopInTaskChain).ArgClass(nextRequiredTask.class);
+ break;
+ }
+ }
+ return tasksQueue;
+}
+
+defaultproperties
+{
+ errLoopInTaskChain = (l=LOG_Error,m="`DBTask` of class `%1` required itself to complete. This might cause database to get damaged unexpectedly. Please report this to the developer.")
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Database.uc b/sources/Data/Database/Database.uc
new file mode 100644
index 0000000..aa7079d
--- /dev/null
+++ b/sources/Data/Database/Database.uc
@@ -0,0 +1,304 @@
+/**
+ * Interface database class that provides all Acedia's functionality for
+ * querying databases. For most of the cases, this is a class you are expected
+ * to work with and providing appropriate implementation is Acedia's `DBAPI`
+ * responsibility. Choice of the implementation is done based on user's
+ * config files.
+ * All of the methods are asynchronous - they do not return requested
+ * values immediately and instead require user to provide a handler function
+ * that will be called once operation is completed.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class Database extends AcediaObject
+ abstract;
+
+/**
+ * Describes possible data types that can be stored in Acedia's databases.
+ * Lists consists of all possible JSON values types (with self-explanatory
+ * names) plus technical `JSON_Undefined` type that is used to indicate that
+ * a particular value does not exist.
+ */
+enum DataType
+{
+ JSON_Undefined,
+ JSON_Null,
+ JSON_Boolean,
+ JSON_Number,
+ JSON_String,
+ JSON_Array,
+ JSON_Object
+};
+
+/**
+ * Possible outcomes of any query: success (only `DBR_Success`) or
+ * some kind of failure (any other value).
+ * This type is common for all queries, however reasons as to why
+ * a particular result value was obtained can differ from one to another.
+ */
+enum DBQueryResult
+{
+ // Means query has succeeded;
+ DBR_Success,
+ // Query was provided with an invalid JSON pointer
+ // (`none` or somehow otherwise unfit to be used with a particular query);
+ DBR_InvalidPointer,
+ // Operation could not finish because database is damaged and unusable;
+ DBR_InvalidDatabase,
+ // Means that data (provided for the query) is somehow invalid.
+ DBR_InvalidData
+};
+
+/**
+ * Schedules reading data, located at the given `pointer` in
+ * the caller database.
+ *
+ * @param pointerToData JSON pointer to the value in database to read.
+ * `none` is always treated as an invalid JSON pointer.
+ * @param makeMutable Setting this to `false` (default) will force method
+ * to load data as immutable Acedia's types and `true` will make it load
+ * data as mutable types. This setting does not affect `Collection`s into
+ * which JSON arrays and objects are converted - they are always mutable.
+ * @return Task object that corresponds to this `ReadData()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when reading task is complete:
+ * `ReadData(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result, AcediaObject data)`;
+ * * Ownership of `data` object returned in the `connect()` is considered
+ * to be transferred to whoever handled result of this query.
+ * It must be deallocated once no longer needed.
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer` and `DBR_InvalidDatabase`;
+ * * `data != none` iff `result == DBR_Success`;//TODO: JSON null???
+ * * `DBR_InvalidPointer` can be produced if either `pointer == none` or
+ * it does not point at any existing value inside the caller database.
+ */
+public function DBReadTask ReadData(
+ JSONPointer pointer,
+ optional bool makeMutable)
+{
+ return none;
+}
+
+/**
+ * Schedules writing `data` at the location inside the caller database,
+ * given by the `pointer`.
+ *
+ * Only `AssociativeArray` (that represents JSON object) can be recorded as
+ * a database's root value (referred to by an empty JSON pointer "").
+ *
+ * @param pointer JSON pointer to the location in the database, where `data`
+ * should be written (as a JSON value).
+ * `none` is always treated as an invalid JSON pointer.
+ * @param data Data that needs to be written at the specified location
+ * inside the database. For method to succeed this object needs to have
+ * JSON-compatible type (see `_.json.IsCompatible()` for more details).
+ * @return Task object that corresponds to this `WriteData()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when writing task is complete:
+ * `WriteData(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result)`;
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer`, `DBR_InvalidDatabase` and `DBR_InvalidData`;
+ * * Data is actually written inside the database iff
+ * `result == DBR_Success`;
+ * * `result == DBR_InvalidData` iff either given `data`'s type is not
+ * JSON-compatible or a non-`AssociativeArray` was attempted to be
+ * recorded as caller database's root value;
+ * * `DBR_InvalidPointer` can be produced if either `pointer == none` or
+ * container of the value `pointer` points at does not exist.
+ * Example: writing data at "/sub-object/valueA" will always fail if
+ * "sub-object" does not exist.
+ */
+public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data)
+{
+ return none;
+}
+
+/**
+ * Schedules removing data at the location inside the caller database,
+ * given by the `pointer`.
+ *
+ * "Removing" root object results in simply erasing all of it's stored data.
+ *
+ * @param pointer JSON pointer to the location of the data to remove from
+ * database. `none` is always treated as an invalid JSON pointer.
+ * @return Task object that corresponds to this `RemoveData()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when writing task is complete:
+ * `RemoveData(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result)`.
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer` and `DBR_InvalidDatabase`;
+ * * Data is actually removed from the database iff
+ * `result == DBR_Success`.
+ * * `DBR_InvalidPointer` can be produced if either `pointer == none` or
+ * it does not point at any existing value inside the caller database.
+ */
+public function DBRemoveTask RemoveData(JSONPointer pointer)
+{
+ return none;
+}
+
+/**
+ * Schedules checking type of data at the location inside the caller database,
+ * given by the `pointer`.
+ *
+ * @param pointer JSON pointer to the location of the data for which type
+ * needs to be checked.
+ * `none` is always treated as an invalid JSON pointer.
+ * @return Task object that corresponds to this `CheckDataType()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when reading task is complete:
+ * `CheckDataType(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result, Database.DataType type)`;
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer` and `DBR_InvalidDatabase`;
+ * * This task can only fail if either caller database is broken
+ * (task will produce `DBR_InvalidDatabase` result) or given `pointer`
+ * is `none` (task will produce `DBR_InvalidPointer` result).
+ * Otherwise the result will be `DBR_Success`.
+ * * Data is actually removed from the database iff
+ * `result == DBR_Success`.
+ */
+public function DBCheckTask CheckDataType(JSONPointer pointer)
+{
+ return none;
+}
+
+/**
+ * Schedules obtaining "size": amount of elements stored inside
+ * either JSON object or JSON array, which location inside the caller database
+ * is given by provided `pointer`.
+ *
+ * For every JSON value that is neither object or array size is
+ * defined as `-1`.
+ *
+ * @param pointer JSON pointer to the location of the JSON object or array
+ * for which size needs to be obtained.
+ * `none` is always treated as an invalid JSON pointer.
+ * @return Task object that corresponds to this `GetDataSize()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when reading task is complete:
+ * `GetDataSize(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result, int size)`.
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer` and `DBR_InvalidDatabase`;
+ * * Returned `size` value is actually a size of referred
+ * JSON object/array inside the database iff `result == DBR_Success`;
+ * * `DBR_InvalidPointer` can be produced if either `pointer == none` or
+ * it does not point at a JSON object or array inside the
+ * caller database.
+ */
+public function DBSizeTask GetDataSize(JSONPointer pointer)
+{
+ return none;
+}
+
+/**
+ * Schedules obtaining set of keys inside the JSON object, which location in
+ * the caller database is given by provided `pointer`.
+ *
+ * Only JSON objects have (and will return) keys (names of their sub-values).
+ *
+ * @param pointer JSON pointer to the location of the JSON object for which
+ * keys need to be obtained.
+ * `none` is always treated as an invalid JSON pointer.
+ * @return Task object that corresponds to this `GetDataKeys()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when reading task is complete:
+ * `GetDataKeys(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result, DynamicArray keys)`.
+ * * Ownership of `keys` array returned in the `connect()` is considered
+ * to be transferred to whoever handled result of this query.
+ * It must be deallocated once no longer needed.
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer`, `DBR_InvalidData` and `DBR_InvalidDatabase`;
+ * * Returned `keys` will be non-`none` and contain keys of the referred
+ * JSON object inside the database iff `result == DBR_Success`;
+ * * `DBR_InvalidPointer` can be produced iff `pointer == none`;
+ * * `result == DBR_InvalidData` iff `pointer != none`, but does not
+ * point at a JSON object inside caller database
+ * (value can either not exist at all or have some other type).
+ */
+public function DBKeysTask GetDataKeys(JSONPointer pointer)
+{
+ return none;
+}
+
+/**
+ * Schedules "incrementing" data, located at the given `pointer` in
+ * the caller database.
+ *
+ * "Incrementing" is an operation that is safe from the point of view of
+ * simultaneous access. What "incrementing" actually does depends on
+ * the passed JSON value (`increment` parameter):
+ * (0. Unless `pointer` points at the JSON null value - then "increment"
+ * acts as a `WriteData()` method regardless of `increment`'s value);
+ * 1. JSON null: it never modifies existing value and reports an error if
+ * existing value was not itself JSON null;
+ * 2. JSON bool: if combines with stored JSON bool value -
+ * performs logical "or" operation. Otherwise fails;
+ * 3. JSON number: if combines with stored JSON numeric value -
+ * adds values together. Otherwise fails.
+ * 4. JSON string: if combines with stored JSON string value -
+ * concatenates itself at the end. Otherwise fails.
+ * 5. JSON array: if combines with stored JSON array value -
+ * concatenates itself at the end. Otherwise fails.
+ * 6. JSON object: if combines with stored JSON object value -
+ * `increment` adds it's own values with new keys into the stored
+ * JSON object. Does not override old values.
+ * Fails when combined with any other type.
+ *
+ * @param pointer JSON pointer to the location in the database, where
+ * data should be incremented (by `increment`).
+ * `none` is always treated as an invalid JSON pointer.
+ * @param increment JSON-compatible value to be used as an increment for
+ * the data at the specified location inside the database.
+ * @return Task object that corresponds to this `IncrementData()` call.
+ * * Guaranteed to be not `none`;
+ * * Use it to connect a handler for when reading task is complete:
+ * `IncrementData(...).connect = handler`,
+ * where `handler` must have the following signature:
+ * `connect(DBQueryResult result)`.
+ * * Possible `DBQueryResult` types are `DBR_Success`,
+ * `DBR_InvalidPointer`, `DBR_InvalidData` and `DBR_InvalidDatabase`;
+ * * Data is actually incremented iff `result == DBR_Success`;
+ * * `DBR_InvalidPointer` can be produced if either `pointer == none` or
+ * container of the value `pointer` points at does not exist.
+ * Example: incrementing data at "/sub-object/valueA" will always fail
+ * if "sub-object" does not exist.
+ * * `result == DBR_InvalidData` iff `pointer != none`, but does not
+ * point at a JSON value compatible (in the sense of "increment"
+ * operation) with `increment` parameter.
+ */
+public function DBIncrementTask IncrementData(
+ JSONPointer pointer,
+ AcediaObject increment)
+{
+ return none;
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Local/DBRecord.uc b/sources/Data/Database/Local/DBRecord.uc
new file mode 100644
index 0000000..00e8bae
--- /dev/null
+++ b/sources/Data/Database/Local/DBRecord.uc
@@ -0,0 +1,1160 @@
+/**
+ * This should be considered an internal class and a detail of
+ * implementation.
+ * This is a data object that is used to store JSON data inside
+ * Unreal Engine's save packages (see `GameInfo` class, starting from
+ * `CreateDataObject()` method).
+ * Auxiliary data object that can store either a JSON array or an object in
+ * the local Acedia database. It is supposed to be saved and loaded
+ * to / from packages.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBRecord extends Object
+ perobjectconfig
+ config(AcediaDB);
+
+/**
+ * # How can JSON information be stored in per-config-objects?
+ * Standard way to store information locally would be by simply recording
+ * it inside a config file. This is enough for almost anything.
+ * Even something like ServerPerks' player database is implemented with just
+ * a per-config-objects: since it just stores a particular data per player -
+ * it can do with simply creating one storage object per player.
+ * We, however, want to store an arbitrary JSON object inside our database
+ * that can contain any other kind of JSON data and not just player's
+ * numeric stats. With some additional work this again can also be done with
+ * per-config-objects. For example, if we want to store a JSON object inside
+ * another JSON object - we can create both of them separately, give them some
+ * different arbitrary names and then make the first one refer to the second
+ * one by it's given name.
+ * This way we can create a config object for each JSON array/object and
+ * then store it's data as an array of plain types (same as ServerPerks' one):
+ * null, boolean, number and string can be stored as is and other
+ * JSON arrays/objects can be stored by their references.
+ *
+ * # Why are we using data objects instead of per-object-configs?
+ * Despite everything described above, Acedia's local databases DO NOT use
+ * per-object-configs to store their data, opting for data objects and
+ * Unreal Engine's save packages instead.
+ * Data objects can be created, loaded and saved inside Unreal Engine's
+ * binary packages with methods available from `GameInfo` class (look them up
+ * starting from `CreateDataObject()` or browsing through
+ * [wiki](https://wiki.beyondunreal.com/Legacy:DataObject)).
+ * They can essentially act the same as per-object-configs, but have
+ * an advantage of allowing us to cheaply (execution time-wise) create/delete
+ * as many objects as we need and then update their package on the disk instead
+ * of calling `SaveConfig()` or `ClearConfig()` on them one-by-one. This both
+ * simplifies and speed up a bunch of necessary operations.
+ * They also seem to behave more predictably.
+ *
+ * # Some terminology
+ * Acedia's objects (representing JSON values) that are getting loaded
+ * into the `DBRecord`s are called "objects". We then refer to their
+ * representation within `DBRecord`s as "items". For example, this class has
+ * two methods for conversion between the two: `ConvertObjectToItem()` and
+ * `ConvertItemToObject()`.
+ * Most other methods are:
+ * 1. either methods that actually perform Acedia's database queries;
+ * 2. or methods that provide safe and easy access to the `DBRecord`'s
+ * items array (like making sure to remove unneeded data objects).
+ * All of the methods that perform database query rely on the
+ * `ConvertPointer()` method that take `JSONPointer` and convert it into
+ * internal pointer representation that immediately points at `DBRecord` that
+ * represents referred data (or contains it).
+ */
+
+// Name of the database package this object belongs to
+var private string package;
+// Does this record store a JSON array (`true`) or object (`false`)?
+var private bool isJSONArray;
+
+// `ToCollection()` and `EraseSelf()` methods make recursive calls on their
+// "sub-objects" (referred via name). If database was somehow damaged - a loop
+// of references can occur, leading to infinite recursive calls (which results
+// in a crash). These variable help to avoid that by preventing re-entry into
+// these methods for the same object.
+var private bool lockToCollection;
+var private bool lockEraseSelf;
+
+/**
+ * We pack as much information into the type of the record:
+ * whether it's 'null', 'boolean', 'number', 'string' or reference to another
+ * `DBRecord`.
+ * If it's 'boolean', then record value in the type
+ * (`DBAT_False` / `DBAT_True`), if `number` record whether it's `int` or
+ * `float`.
+ * While JSON does not distinguish between `int` and `float`, we still
+ * have to pick one of these type when transferring JSON numeric value into
+ * UnrealScript, plus it's easier for us to store it in one of these types.
+*/
+enum DBDataType
+{
+ DBAT_Null,
+ DBAT_False,
+ DBAT_True,
+ DBAT_Int,
+ DBAT_Float,
+ DBAT_String,
+ // We actually store the name of another `DBRecord` that represents either
+ // sub-array or sub-object.
+ DBAT_Reference
+};
+
+/**
+ * Store JSON array / object as a bunch of values.
+ * Which variable is used to store value depends on the type `t`.
+ */
+struct StorageItem
+{
+ // Determines whether variable's value is stored in `i`, `f` or `s`.
+ var DBDataType t;
+ // For JSON objects only (`isJSONArray == false`), stores the key of
+ // corresponding value.
+ var string k;
+ var int i;
+ var float f;
+ // For both `DBRecord` references and JSON strings
+ var string s;
+};
+var private config array storage;
+
+var private const int LATIN_LETTERS_AMOUNT;
+var private const int LOWER_A_CODEPOINT, UPPER_A_CODEPOINT;
+
+/**
+ * Since `DBRecord` represents JSON array or object, we can use
+ * JSON pointers to refer to any sub-value inside it.
+ * However, JSON pointers are not convenient or efficient enough for that,
+ * so internally we use this struct that provides quick and easy access to
+ * any sub-value.
+ */
+struct DBRecordPointer
+{
+ // `DBRecord` inside which referred value is directly stored.
+ // `record == none` automatically makes `DBRecordPointer` invalid.
+ var DBRecord record;
+ // Index in `record`'s `storage` variable that corresponds to
+ // referred (simple) value.
+ // Negative `index` values mean `record` itself is pointed at.
+ // To point at JSON array / object represented by a `DBRecord`, always set
+ // `record` to that record and `index` to negative value (e.g. `-1`).
+ var int index;
+};
+
+private final function bool IsValidPointer(DBRecordPointer pointer)
+{
+ return pointer.record != none;
+}
+
+private final function bool IsPointerToRecord(DBRecordPointer pointer)
+{
+ return (pointer.record != none && pointer.index < 0);
+}
+
+// Auxiliary method serving as a simple constructor.
+private final function DBRecordPointer MakeRecordPointer(
+ DBRecord record,
+ optional int index)
+{
+ local DBRecordPointer pointer;
+ pointer.record = record;
+ pointer.index = index;
+ return pointer;
+}
+
+private final function DBRecordPointer ConvertPointer(JSONPointer jsonPointer)
+{
+ if (jsonPointer == none) {
+ return MakeRecordPointer(none);
+ }
+ return ConvertPointerPath(jsonPointer, 0, jsonPointer.GetLength());
+}
+
+private final function DBRecordPointer ConvertContainerPointer(
+ JSONPointer jsonPointer)
+{
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) {
+ return MakeRecordPointer(none);
+ }
+ pointer = ConvertPointerPath(jsonPointer, 0, jsonPointer.GetLength() - 1);
+ if (!IsPointerToRecord(pointer)) {
+ pointer.record = none; // invalidate pointer
+ }
+ return pointer;
+}
+
+// Converts `JSONPointer` into internal `DBRecordPointer`.
+// Only uses sub-pointer: components from `startIndex` to `endIndex`.
+private final function DBRecordPointer ConvertPointerPath(
+ JSONPointer pointer,
+ int startIndex,
+ int endIndex)
+{
+ local int index;
+ local StorageItem nextElement;
+ local DBRecord nextRecord;
+ local string nextComponent;
+ if (pointer == none) {
+ return MakeRecordPointer(none);
+ }
+ // We are done!
+ if (startIndex >= endIndex) {
+ return MakeRecordPointer(self, -1);
+ }
+ // Use first available to us component to find next sub-object
+ if (isJSONArray)
+ {
+ index = pointer.GetNumericComponent(startIndex);
+ if (index < 0 || index >= storage.length) {
+ return MakeRecordPointer(none); // fail: out-of-bounds index
+ }
+ }
+ else
+ {
+ nextComponent = __().text.ToString(pointer.GetComponent(startIndex));
+ index = FindItem(nextComponent);
+ }
+ if (index < 0) {
+ return MakeRecordPointer(none); // fail: missing key for component
+ }
+ nextElement = storage[index];
+ if (nextElement.t != DBAT_Reference)
+ {
+ if (startIndex + 1 >= endIndex) {
+ return MakeRecordPointer(self, index);
+ }
+ // fail: found value cannot contain sub-values,
+ // but pointer is not exhausted
+ return MakeRecordPointer(none);
+ }
+ nextRecord = LoadRecordFor(nextElement.s, package);
+ if (nextRecord == none) {
+ return MakeRecordPointer(none); // fail: bad database
+ }
+ // Success for the component, do recursive call
+ startIndex += 1;
+ return nextRecord.ConvertPointerPath(pointer, startIndex, endIndex);
+}
+
+public static final function Global __()
+{
+ return class'Global'.static.GetInstance();
+}
+
+/**
+ * Method for creating a new `DBRecord` in a package named `dbPackageName`,
+ * picking an appropriate and unique name for it.
+ *
+ * @param dbPackageName Name of the package new `DBRecord` must belong to.
+ * @return New `DBRecord`, created in specified package.
+ * `none` iff `dbPackageName == none`.
+ */
+public final static function DBRecord NewRecord(Text dbPackageName)
+{
+ if (dbPackageName == none) {
+ return none;
+ }
+ return NewRecordFor(dbPackageName.ToPlainString());
+}
+
+// Auxiliary method that does what `NewRecord()` does, but for `string`
+// parameter. This makes it cheaper to call for internal use.
+private final static function DBRecord NewRecordFor(string dbPackageName)
+{
+ local string nextName;
+ local DBRecord recordCandidate;
+ // Try to generate new random name.
+ // This cycle can in theory be infinite. However in practice it will
+ // only run for one iteration (unless user messed with settings and
+ // set length of randomized names too low), since by default there is
+ // 26^20 == 19,928,148,895,209,409,152,340,197,376 different
+ // random names and the chance of duplicate in infinitesimal.
+ while (true)
+ {
+ nextName = GetRandomName();
+ recordCandidate = LoadRecordFor(nextName, dbPackageName);
+ if (recordCandidate != none) {
+ continue;
+ }
+ recordCandidate = __().unreal.GetGameType()
+ .CreateDataObject(class'DBRecord', nextName, dbPackageName);
+ recordCandidate.package = dbPackageName;
+ return recordCandidate;
+ }
+ // We cannot actually reach here
+ return none;
+}
+
+public final static function DBRecord LoadRecord(
+ Text recordName,
+ Text dbPackageName)
+{
+ if (dbPackageName == none) return none;
+ if (recordName == none) return none;
+
+ return LoadRecordFor( recordName.ToPlainString(),
+ dbPackageName.ToPlainString());
+}
+
+// Auxiliary method that does what `LoadRecord()` does, but for `string`
+// parameter. This makes it cheaper to call for internal use.
+private final static function DBRecord LoadRecordFor(
+ string name,
+ string package)
+{
+ return __().unreal.GetGameType()
+ .LoadDataObject(class'DBRecord', name, package);
+}
+
+private final static function string GetRandomName()
+{
+ local int i;
+ local int length;
+ local string result;
+ length = Max(1, class'LocalDBSettings'.default.randomNameLength);
+ for (i = 0; i < length; i += 1) {
+ result = result $ GetRandomLetter();
+ }
+ return result;
+}
+
+private final static function string GetRandomLetter()
+{
+ return Chr(Rand(default.LATIN_LETTERS_AMOUNT) + default.LOWER_A_CODEPOINT);
+}
+
+/**
+ * Loads Acedia's representation of JSON value stored at `pointer` inside
+ * the JSON object/array represented by the caller `DBRecord`.
+ *
+ * @param jsonPointer JSON pointer to the value to load
+ * (either simple, array or object one).
+ * @param result Loaded value will be recorded inside this variable.
+ * Set to `none` on failure.
+ * @param makeMutable `false` if you want simple value to be recorded as
+ * immutable "boxes" (and `Text` for JSON strings) and `true` if you want
+ * them to be recorded as mutable "references"
+ * (`MutableText` for JSON strings).
+ * @return `true` if method successfully loaded JSON value and
+ * `false` otherwise. Failure can happen if passed `pointer` is invalid
+ * (either does not point at any existing value or is equal to `none`).
+ */
+public final function bool LoadObject(
+ JSONPointer jsonPointer,
+ out AcediaObject result,
+ bool makeMutable)
+{
+ local int itemIndex;
+ local DBRecord container;
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) return false;
+ pointer = ConvertPointer(jsonPointer);
+ if (!IsValidPointer(pointer)) return false;
+
+ if (IsPointerToRecord(pointer)) {
+ result = pointer.record.ToCollection(makeMutable);
+ }
+ else
+ {
+ itemIndex = pointer.index;
+ container = pointer.record;
+ result = ConvertItemToObject(container.GetItem(itemIndex), makeMutable);
+ }
+ return true;
+}
+
+/**
+ * Saves Acedia's representation of JSON value at a `pointer` inside
+ * the JSON object/array represented by the caller `DBRecord`.
+ *
+ * @param jsonPointer JSON pointer to location at which to save the value.
+ * Only the last segment of the path will be created (if missing), the rest
+ * must already exist and will not be automatically created.
+ * If another value is already recorded at `pointer` - it will be erased.
+ * @param newItem New value to save at `pointer` inside
+ * the caller `DBRecord`.
+ * @return `true` if method successfully saved new JSON value and
+ * `false` otherwise. Failure can happen if passed `pointer` is invalid
+ * (either missing some necessary segments or is equal to `none`).
+ */
+public final function bool SaveObject(
+ JSONPointer jsonPointer,
+ AcediaObject newItem)
+{
+ local int index;
+ local string itemKey;
+ local DBRecord directContainer;
+ local Collection newItemAsCollection;
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) {
+ return false;
+ }
+ if (jsonPointer.IsEmpty())
+ {
+ // Special case - rewriting caller `DBRecord` itself
+ newItemAsCollection = Collection(newItem);
+ if (newItemAsCollection == none) {
+ return false;
+ }
+ EmptySelf();
+ isJSONArray = (newItemAsCollection.class == class'DynamicArray');
+ FromCollection(newItemAsCollection);
+ return true;
+ }
+ pointer = ConvertContainerPointer(jsonPointer);
+ if (!IsValidPointer(pointer)) {
+ return false;
+ }
+ directContainer = pointer.record;
+ if (directContainer.isJSONArray)
+ {
+ index = jsonPointer.PopNumeric(true);
+ if (index < 0) {
+ return false;
+ }
+ }
+ else
+ {
+ itemKey = __().text.ToString(jsonPointer.Pop(true));
+ index = directContainer.FindItem(itemKey);
+ }
+ directContainer.SetItem(index, ConvertObjectToItem(newItem), itemKey);
+ return true;
+}
+
+/**
+ * Removes Acedia's values stored in the database at `pointer` inside
+ * the JSON object/array represented by the caller `DBRecord`.
+ *
+ * @param jsonPointer JSON pointer to the value to remove
+ * (either simple, array or object one).
+ * @return `true` if method successfully removed JSON value and
+ * `false` otherwise. Failure can happen if passed `pointer` is invalid
+ * (either does not point at any existing value or equal to `none`).
+ */
+public final function bool RemoveObject(JSONPointer jsonPointer)
+{
+ local int itemIndex;
+ local string itemKey;
+ local DBRecord directContainer;
+ local DBRecordPointer containerPointer;
+ if (jsonPointer == none) return false;
+ containerPointer = ConvertContainerPointer(jsonPointer);
+ if (!IsValidPointer(containerPointer)) return false;
+
+ directContainer = containerPointer.record;
+ if (directContainer.isJSONArray) {
+ itemIndex = jsonPointer.PopNumeric(true);
+ }
+ else
+ {
+ itemKey = __().text.ToString(jsonPointer.Pop(true));
+ itemIndex = directContainer.FindItem(itemKey);
+ }
+ if (itemIndex >= 0)
+ {
+ directContainer.RemoveItem(itemIndex);
+ return true;
+ }
+ return false;
+}
+
+/**
+ * Checks type of the JSON value stored at `pointer` inside
+ * the JSON object/array represented by the caller `DBRecord`.
+ *
+ * @param jsonPointer JSON pointer to the value for which type
+ * should be checked.
+ * @return `Database.DataType` that corresponds to the type of referred value.
+ * `JSON_Undefined` if value is missing or passed pointer is invalid.
+ */
+public final function LocalDatabaseInstance.DataType GetObjectType(
+ JSONPointer jsonPointer)
+{
+ local DBRecord directContainer;
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) return JSON_Undefined;
+ pointer = ConvertPointer(jsonPointer);
+ if (!IsValidPointer(pointer)) return JSON_Undefined;
+
+ if (IsPointerToRecord(pointer))
+ {
+ if (pointer.record.isJSONArray) {
+ return JSON_Array;
+ }
+ else {
+ return JSON_Object;
+ }
+ }
+ directContainer = pointer.record;
+ switch (directContainer.GetItem(pointer.index).t)
+ {
+ case DBAT_Null:
+ return JSON_Null;
+ case DBAT_False:
+ case DBAT_True:
+ return JSON_Boolean;
+ case DBAT_Int:
+ case DBAT_Float:
+ return JSON_Number;
+ case DBAT_String:
+ return JSON_String;
+ }
+ // We should not reach here
+ return JSON_Undefined;
+}
+
+/**
+ * Returns "size" of the JSON value stored at `pointer` inside
+ * the JSON object/array represented by the caller `DBRecord`.
+ *
+ * For JSON arrays and objects it's the amount of stored elements.
+ * For other values it's considered undefined and method returns negative
+ * value instead.
+ *
+ * @param jsonPointer JSON pointer to the value for which method should
+ * return size.
+ * @return If `pointer` refers to the JSON array or object - amount of it's
+ * elements is returned. Otherwise returns `-1`.
+ */
+public final function int GetObjectSize(JSONPointer jsonPointer)
+{
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) {
+ return -1;
+ }
+ pointer = ConvertPointer(jsonPointer);
+ if (IsPointerToRecord(pointer)) {
+ return pointer.record.GetStorageLength();
+ }
+ return -1;
+}
+
+/**
+ * Returns keys of the JSON object stored at `pointer` inside
+ * the JSON object/array represented by the caller `DBRecord`.
+ *
+ * @param jsonPointer JSON pointer to the value for which method should
+ * return size.
+ * @return If `pointer` refers to the JSON object - all available keys.
+ * `none` otherwise (including case of JSON arrays).
+ */
+public final function DynamicArray GetObjectKeys(JSONPointer jsonPointer)
+{
+ local int i;
+ local TextAPI api;
+ local DynamicArray resultKeys;
+ local array items;
+ local DBRecord referredObject;
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) return none;
+ pointer = ConvertPointer(jsonPointer);
+ if (!IsValidPointer(pointer)) return none;
+ if (!IsPointerToRecord(pointer)) return none;
+ referredObject = pointer.record;
+ if (referredObject.isJSONArray) return none;
+
+ api = __().text;
+ resultKeys = __().collections.EmptyDynamicArray();
+ items = referredObject.storage;
+ for (i = 0; i < items.length; i += 1) {
+ resultKeys.AddItem(api.FromString(items[i].k));
+ }
+ return resultKeys;
+}
+
+/**
+ * Increments JSON value at a `pointer` inside the JSON object/array
+ * represented by the caller `DBRecord` by a given Acedia's value.
+ *
+ * For "increment" operation description refer to `Database.IncrementData()`.
+ *
+ * @param jsonPointer JSON pointer to location at which to save the value.
+ * Only the last segment of the path might be created (if missing),
+ * the rest must already exist and will not be automatically created.
+ * If another value is already recorded at `pointer` - it will be erased.
+ * @param object Value by which to increment another value, stored at
+ * `pointer` inside the caller `DBRecord`.
+ * @return Returns query result that is appropriate for "increment" operation,
+ * according to `Database.IncrementData()` specification.
+ */
+public final function Database.DBQueryResult IncrementObject(
+ JSONPointer jsonPointer,
+ AcediaObject object)
+{
+ local int index;
+ local string itemKey;
+ local DBRecord directContainer;
+ local AssociativeArray objectAsAssociativeArray;
+ local DBRecordPointer pointer;
+ if (jsonPointer == none) {
+ return DBR_InvalidPointer;
+ }
+ if (jsonPointer.IsEmpty())
+ {
+ // Special case - incrementing caller `DBRecord` itself
+ objectAsAssociativeArray = AssociativeArray(object);
+ if (objectAsAssociativeArray == none) {
+ return DBR_InvalidData;
+ }
+ FromCollection(objectAsAssociativeArray);
+ return DBR_Success;
+ }
+ // All the work will be done by the separate `IncrementItem()` method;
+ // But it is applied to the `DBRecord` that contains referred item,
+ // so we have to find it.
+ pointer = ConvertContainerPointer(jsonPointer);
+ if (!IsValidPointer(pointer)) {
+ return DBR_InvalidPointer;
+ }
+ directContainer = pointer.record;
+ if (directContainer.isJSONArray)
+ {
+ index = jsonPointer.PopNumeric(true);
+ if (index < 0) {
+ return DBR_InvalidPointer;
+ }
+ }
+ else
+ {
+ itemKey = __().text.ToString(jsonPointer.Pop(true));
+ index = directContainer.FindItem(itemKey);
+ }
+ if (directContainer.IncrementItem(index, object, itemKey)) {
+ return DBR_Success;
+ }
+ return DBR_InvalidData;
+}
+
+private final function StorageItem GetItem(int index)
+{
+ local StorageItem emptyResult;
+ if (index < 0) return emptyResult;
+ if (index >= storage.length) return emptyResult;
+
+ return storage[index];
+}
+
+// Negative `index` means that value will need to be appended to the end
+// of the `storage`.
+// Optionally lets you specify item's key (via `itemName`) for
+// JSON objects.
+private final function SetItem(
+ int index,
+ StorageItem newItem,
+ optional string itemName)
+{
+ local DBRecord oldRecord;
+ local StorageItem oldItem;
+ if (index < 0) {
+ index = storage.length;
+ }
+ if (index < storage.length)
+ {
+ // Clean up old value
+ oldItem = storage[index];
+ if (oldItem.t == DBAT_Reference)
+ {
+ oldRecord = LoadRecordFor(oldItem.s, package);
+ if (oldRecord != none) {
+ oldRecord.EmptySelf();
+ }
+ __().unreal.GetGameType()
+ .DeleteDataObject(class'DBRecord', oldItem.s, package);
+ }
+ }
+ storage[index] = newItem;
+ storage[index].k = itemName;
+}
+
+// Auxiliary getter that helps us avoid referring to `storage` array
+// directly from `DBRecord` reference, which would cause unnecessary copying of
+// it's data.
+private final function int GetStorageLength()
+{
+ return storage.length;
+}
+
+// Auxiliary method for removing items from `storage` array that helps us
+// avoid referring to it directly from `DBRecord` reference, which would cause
+// unnecessary copying of it's data.
+private final function RemoveItem(int index)
+{
+ local DBRecord oldRecord;
+ local StorageItem oldItem;
+ if (index >= storage.length) return;
+ if (index < 0) return;
+
+ // Clean up old value
+ oldItem = storage[index];
+ if (oldItem.t == DBAT_Reference)
+ {
+ oldRecord = LoadRecordFor(oldItem.s, package);
+ if (oldRecord != none) {
+ oldRecord.EmptySelf();
+ }
+ __().unreal.GetGameType()
+ .DeleteDataObject(class'DBRecord', oldItem.s, package);
+ }
+ storage.Remove(index, 1);
+}
+
+private final function int FindItem(string itemName)
+{
+ local int index;
+ if (isJSONArray) {
+ return -1;
+ }
+ for (index = 0; index < storage.length; index += 1)
+ {
+ if (storage[index].k == itemName) {
+ return index;
+ }
+ }
+ return -1;
+}
+
+// Negative `index` means that `object` value needs to be appended to the
+// end of the `storage`, instead of incrementing an existing value.
+// Returns `true` if changes were successfully made and `false` otherwise.
+private final function bool IncrementItem(
+ int index,
+ AcediaObject object,
+ optional string itemName)
+{
+ local StorageItem itemToIncrement;
+ if (index < 0)
+ {
+ index = storage.length;
+ // `itemToIncrement` is blank at this point and has type `DBAT_Null`,
+ // which will simply be rewritten by `IncrementItemByObject()`
+ // call later
+ storage[index] = itemToIncrement;
+ }
+ else if (index < storage.length) {
+ itemToIncrement = storage[index];
+ }
+ if (IncrementItemByObject(itemToIncrement, object))
+ {
+ storage[index] = itemToIncrement;
+ storage[index].k = itemName;
+ return true;
+ }
+ return false;
+}
+
+/**
+ * Extracts JSON object or array data from caller `DBRecord` as either
+ * `AssociativeArray` (for JSON objects) or `DynamicArray` (for JSON arrays).
+ *
+ * Type conversion rules in immutable case:
+ * 1. 'null' -> `none`;
+ * 2. 'boolean' -> `BoolBox`;
+ * 3. 'number' -> either `IntBox` or `FloatBox`, depending on
+ * what seems to fit better;
+ * 4. 'string' -> `Text`;
+ * 5. 'array' -> `DynamicArray`;
+ * 6. 'object' -> `AssociativeArray`.
+ *
+ * Type conversion rules in mutable case:
+ * 1. 'null' -> `none`;
+ * 2. 'boolean' -> `BoolRef`;
+ * 3. 'number' -> either `IntRef` or `FloatRef`, depending on
+ * what seems to fit better;
+ * 4. 'string' -> `MutableText`;
+ * 5. 'array' -> `DynamicArray`;
+ * 6. 'object' -> `AssociativeArray`.
+ *
+ * @param makeMutable `false` if you want this method to produce
+ * immutable types and `true` otherwise.
+ * @return `AssociativeArray` if caller `DBRecord` represents a JSON object
+ * and `DynamicArray` if it represents JSON array.
+ * Returned collection must have all of it's keys deallocated before being
+ * discarded.
+ * `none` iff caller `DBRecord` was not initialized as either.
+ */
+public final function Collection ToCollection(bool makeMutable)
+{
+ local Collection result;
+ if (lockToCollection) {
+ return none;
+ }
+ lockToCollection = true;
+ if (isJSONArray) {
+ result = ToDynamicArray(makeMutable);
+ }
+ else {
+ result = ToAssociativeArray(makeMutable);
+ }
+ lockToCollection = false;
+ return result;
+}
+
+// Does not do any validation check, assumes caller `DBRecord`
+// represents an array.
+private final function Collection ToDynamicArray(bool makeMutable)
+{
+ local int i;
+ local DynamicArray result;
+ result = __().collections.EmptyDynamicArray();
+ for (i = 0; i < storage.length; i += 1) {
+ result.AddItem(ConvertItemToObject(storage[i], makeMutable));
+ }
+ return result;
+}
+
+// Does not do any validation check, assumes caller `DBRecord`
+// represents an object.
+private final function Collection ToAssociativeArray(bool makeMutable)
+{
+ local int i;
+ local AssociativeArray result;
+ result = __().collections.EmptyAssociativeArray();
+ for (i = 0; i < storage.length; i += 1)
+ {
+ result.SetItem( __().text.FromString(storage[i].k),
+ ConvertItemToObject(storage[i], makeMutable));
+ }
+ return result;
+}
+
+/**
+ * Completely erases all data inside a caller `DBRecord`, recursively deleting
+ * all referred `DBRecord`.
+ */
+public final function EmptySelf()
+{
+ local int i;
+ local GameInfo game;
+ local DBRecord subRecord;
+ if (lockEraseSelf) {
+ return;
+ }
+ lockEraseSelf = true;
+ game = __().unreal.GetGameType();
+ for (i = 0; i < storage.length; i += 1)
+ {
+ if (storage[i].t != DBAT_Reference) continue;
+ subRecord = LoadRecordFor(storage[i].s, package);
+ if (subRecord == none) continue;
+
+ subRecord.EmptySelf();
+ game.DeleteDataObject(class'DBRecord', string(subRecord.name), package);
+ }
+ storage.length = 0;
+ lockEraseSelf = false;
+}
+
+/**
+ * Takes all available values from `source` and records them into caller
+ * `DBRecord`. Does not erase untouched old values, but will overwrite them
+ * in case of the conflict.
+ *
+ * Can only convert items in passed collection that return `true` for
+ * `_.json.IsCompatible()` check. Any other values will be treated as `none`.
+ *
+ * Only works as long as caller `DBRecord` has the same container type as
+ * `source`. `isJSONArray` iff `source.class == class'DynamicArray` and
+ * `!isJSONArray` iff `source.class == class'AssociativeArray`.
+ *
+ * Values that cannot be converted into JSON will be replaced with `none`.
+ *
+ * @param source `Collection` to write into the caller `DBRecord`.
+ */
+public final function FromCollection(Collection source)
+{
+ local DynamicArray asDynamicArray;
+ local AssociativeArray asAssociativeArray;
+ asDynamicArray = DynamicArray(source);
+ asAssociativeArray = AssociativeArray(source);
+ if (asDynamicArray != none && isJSONArray) {
+ FromDynamicArray(asDynamicArray);
+ }
+ if (asAssociativeArray != none && !isJSONArray) {
+ FromAssociativeArray(asAssociativeArray);
+ }
+}
+
+// Does not do any validation check.
+private final function FromDynamicArray(DynamicArray source)
+{
+ local int i, length;
+ length = source.GetLength();
+ for (i = 0; i < length; i += 1) {
+ storage[storage.length] = ConvertObjectToItem(source.GetItem(i));
+ }
+}
+
+// Does not do any validation check.
+private final function FromAssociativeArray(AssociativeArray source)
+{
+ local int i, originalStorageLength;
+ local Iter iter;
+ local string nextKey;
+ local bool isNewKey;
+ originalStorageLength = storage.length;
+ for (iter = source.Iterate(); !iter.HasFinished(); iter.Next())
+ {
+ if (iter.GetKey() == none) {
+ continue;
+ }
+ nextKey = Text(iter.GetKey()).ToPlainString();
+ isNewKey = true;
+ for (i = 0; i < originalStorageLength; i += 1)
+ {
+ if (storage[i].k == nextKey)
+ {
+ isNewKey = false;
+ break;
+ }
+ }
+ if (isNewKey) {
+ SetItem(storage.length, ConvertObjectToItem(iter.Get()), nextKey);
+ }
+ }
+}
+
+// Converts `AcediaObject` into it's internal representation.
+private final function StorageItem ConvertObjectToItem(AcediaObject data)
+{
+ local StorageItem result;
+ local DBRecord newDBRecord;
+ if (Text(data) != none)
+ {
+ result.t = DBAT_String;
+ result.s = Text(data).ToPlainString();
+ }
+ else if(Collection(data) != none)
+ {
+ result.t = DBAT_Reference;
+ newDBRecord = NewRecordFor(package);
+ newDBRecord.isJSONArray = (data.class == class'DynamicArray');
+ newDBRecord.FromCollection(Collection(data));
+ result.s = string(newDBRecord.name);
+ }
+ else if (FloatBox(data) != none || FloatRef(data) != none)
+ {
+ result.t = DBAT_Float;
+ if (FloatBox(data) != none) {
+ result.f = FloatBox(data).Get();
+ }
+ else {
+ result.f = FloatRef(data).Get();
+ }
+ }
+ else if (IntBox(data) != none || IntRef(data) != none)
+ {
+ result.t = DBAT_Int;
+ if (IntBox(data) != none) {
+ result.i = IntBox(data).Get();
+ }
+ else {
+ result.i = IntRef(data).Get();
+ }
+ }
+ else if (BoolBox(data) != none || BoolRef(data) != none)
+ {
+ result.t = DBAT_False;
+ if (BoolBox(data) != none && BoolBox(data).Get()) {
+ result.t = DBAT_True;
+ }
+ if (BoolRef(data) != none && BoolRef(data).Get()) {
+ result.t = DBAT_True;
+ }
+ }
+ return result;
+}
+
+// Converts internal data representation into `AcediaObject`.
+private final function AcediaObject ConvertItemToObject(
+ StorageItem item,
+ bool makeMutable)
+{
+ local DBRecord subRecord;
+ switch (item.t) {
+ case DBAT_False:
+ case DBAT_True:
+ if (makeMutable) {
+ return __().ref.bool(item.t == DBAT_True);
+ }
+ else {
+ return __().box.bool(item.t == DBAT_True);
+ }
+ case DBAT_Int:
+ if (makeMutable) {
+ return __().ref.int(item.i);
+ }
+ else {
+ return __().box.int(item.i);
+ }
+ case DBAT_Float:
+ if (makeMutable) {
+ return __().ref.float(item.f);
+ }
+ else {
+ return __().box.float(item.f);
+ }
+ case DBAT_String:
+ if (makeMutable) {
+ return __().text.FromStringM(item.s);
+ }
+ else {
+ return __().text.FromString(item.s);
+ }
+ case DBAT_Reference:
+ subRecord = LoadRecordFor(item.s, package);
+ if (subRecord != none) {
+ return subRecord.ToCollection(makeMutable);
+ }
+ default:
+ }
+ return none;
+}
+
+// "Increments" internal data representation by value inside given
+// `AcediaObject`.
+// See `IncrementObject()` method for details.
+private final function bool IncrementItemByObject(
+ out StorageItem item,
+ AcediaObject object)
+{
+ local DBRecord itemRecord;
+ if (object == none) {
+ return (item.t == DBAT_Null);
+ }
+ if (item.t == DBAT_Null)
+ {
+ item = ConvertObjectToItem(object);
+ return true;
+ }
+ else if (item.t == DBAT_String && Text(object) != none)
+ {
+ item.s $= Text(object).ToPlainString();
+ return true;
+ }
+ else if(item.t == DBAT_Reference && Collection(object) != none)
+ {
+ itemRecord = LoadRecordFor(item.s, package);
+ if (itemRecord == none)
+ {
+ itemRecord = NewRecordFor(package); // DB was broken somehow
+ item.s = string(itemRecord.name);
+ itemRecord.isJSONArray = (object.class == class'DynamicArray');
+ }
+ if ( (itemRecord.isJSONArray && object.class != class'DynamicArray')
+ || ( !itemRecord.isJSONArray
+ && object.class != class'AssociativeArray'))
+ {
+ return false;
+ }
+ itemRecord.FromCollection(Collection(object));
+ return true;
+ }
+ else if ( (item.t == DBAT_False || item.t == DBAT_True)
+ && (BoolBox(object) != none || BoolRef(object) != none))
+ {
+ if (BoolBox(object) != none && BoolBox(object).Get()) {
+ item.t = DBAT_True;
+ }
+ if (BoolRef(object) != none && BoolRef(object).Get()) {
+ item.t = DBAT_True;
+ }
+ return true;
+ }
+ return IncrementNumericItemByObject(item, object);
+}
+
+private final function bool IncrementNumericItemByObject(
+ out StorageItem item,
+ AcediaObject object)
+{
+ local int storedValueAsInteger, incrementAsInteger;
+ local float storedValueAsFloat, incrementAsFloat;
+ if (item.t != DBAT_Float && item.t != DBAT_Int) {
+ return false;
+ }
+ if (!ReadNumericObjectInto(object, incrementAsInteger, incrementAsFloat)) {
+ return false;
+ }
+ if (item.t == DBAT_Float)
+ {
+ storedValueAsInteger = int(item.f);
+ storedValueAsFloat = item.f;
+ }
+ else
+ {
+ storedValueAsInteger = item.i;
+ storedValueAsFloat = float(item.i);
+ }
+ // Later we want to implement arbitrary precision arithmetic for storage,
+ // but for now let's just assume that if either value is a float -
+ // then user wants a float precision.
+ if ( item.t == DBAT_Float || FloatBox(object) != none
+ || FloatRef(object) != none)
+ {
+ item.t = DBAT_Float;
+ item.f = storedValueAsFloat + incrementAsFloat;
+ item.i = 0;
+ }
+ else
+ {
+ item.t = DBAT_Int;
+ item.i = storedValueAsInteger + incrementAsInteger;
+ item.f = 0;
+ }
+ return true;
+}
+
+private final function bool ReadNumericObjectInto(
+ AcediaObject object,
+ out int valueAsInt,
+ out float valueAsFloat)
+{
+ if (IntBox(object) != none || IntRef(object) != none)
+ {
+ if (IntBox(object) != none) {
+ valueAsInt = IntBox(object).Get();
+ }
+ else {
+ valueAsInt = IntRef(object).Get();
+ }
+ valueAsFloat = float(valueAsInt);
+ return true;
+ }
+ if (FloatBox(object) != none || FloatRef(object) != none)
+ {
+ if (FloatBox(object) != none) {
+ valueAsFloat = FloatBox(object).Get();
+ }
+ else {
+ valueAsFloat = FloatRef(object).Get();
+ }
+ valueAsInt = int(valueAsFloat);
+ return true;
+ }
+ return false;
+}
+
+// Add storing bytes
+defaultproperties
+{
+ LATIN_LETTERS_AMOUNT = 26
+ LOWER_A_CODEPOINT = 97
+ UPPER_A_CODEPOINT = 65
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Local/LocalDBSettings.uc b/sources/Data/Database/Local/LocalDBSettings.uc
new file mode 100644
index 0000000..b71ecbb
--- /dev/null
+++ b/sources/Data/Database/Local/LocalDBSettings.uc
@@ -0,0 +1,53 @@
+/**
+ * Object for storing settings for the local databases. It is useless to
+ * allocate it's instances.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class LocalDBSettings extends AcediaObject
+ config(AcediaSystem);
+
+// Acedia's local database stores it's JSON objects and arrays as
+// named data objects inside a it's package file.
+// Every object in a package must have a unique name, but neither
+// JSON object/array's own name or it's path can be used since they can contain
+// characters unusable for data object's name.
+// That's why Acedia generates a random name for every object that consists
+// of a sequence of latin letters. This value defines how many letters this
+// sequence must contain. With default value of 20 letters it provides database
+// with an ability to store up to
+// 26^20 ~= 19,928,148,895,209,409,152,340,197,376
+// different names, while also reducing probability of name collision for
+// newly created objects to zero.
+// There is really no need to modify this value and reducing it might
+// lead to issues with database, so do not do it unless there is a really good
+// reason to it.
+var config public const int randomNameLength;
+// Delay (in seconds) between consecutive writings of the database's
+// content on the disk.
+// Setting this value too low can cause loss of performance, while setting
+// it too high might cause some of the data not being recorded and getting lost
+// on crash.
+// This delay is ignored in special circumstances when database object is
+// forcefully destroyed (and upon level end).
+var config public const float writeToDiskDelay;
+
+defaultproperties
+{
+ randomNameLength = 20
+ writeToDiskDelay = 10.0
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Local/LocalDatabase.uc b/sources/Data/Database/Local/LocalDatabase.uc
new file mode 100644
index 0000000..b3e391e
--- /dev/null
+++ b/sources/Data/Database/Local/LocalDatabase.uc
@@ -0,0 +1,100 @@
+/**
+ * This class IS NOT an implementation for `Database` interface and
+ * simply exists to store config information about some local database.
+ * Name is chosen to make user configs more readable.
+ * This class is considered an internal object and should only be referred
+ * to inside AcediaCore package.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class LocalDatabase extends AcediaObject
+ perobjectconfig
+ config(AcediaDB);
+
+var config private string root;
+
+public final function Text GetPackageName()
+{
+ return __().text.FromString(string(name));
+}
+
+public final function bool HasDefinedRoot()
+{
+ return root != "";
+}
+
+public final function Text GetRootName()
+{
+ return __().text.FromString(root);
+}
+
+/**
+ * Changes caller's root name.
+ *
+ * Only makes changes if root is not already defined.
+ */
+public final function SetRootName(Text rootName)
+{
+ if (HasDefinedRoot()) {
+ return;
+ }
+ if (rootName != none) {
+ root = rootName.ToPlainString();
+ }
+ else {
+ root = "";
+ }
+}
+
+public final static function LocalDatabase Load(Text databaseName)
+{
+ if (!__().text.IsEmpty(databaseName)) {
+ return new(none, databaseName.ToPlainString()) class'LocalDatabase';
+ }
+ return none;
+}
+
+/**
+ * Updates `LocalDatabase` record inside it's config file. If caller
+ * `LocalDatabase` does not have defined root `HasDefinedRoot() == none`,
+ * then this method will erase it's record from the config.
+ */
+public final function Save()
+{
+ if (HasDefinedRoot()) {
+ SaveConfig();
+ }
+ else {
+ ClearConfig();
+ }
+}
+
+/**
+ * Forgets all information stored in the caller `LocalDatabase` and erases it
+ * from the config files. After this call, creating `LocalDatabase` object
+ * with the same name will produce an object that can be treated as "blank":
+ * one will be able to use it to store information about new database.
+ */
+public final function DeleteSelf()
+{
+ root = "";
+ ClearConfig();
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Local/LocalDatabaseInstance.uc b/sources/Data/Database/Local/LocalDatabaseInstance.uc
new file mode 100644
index 0000000..52b02ad
--- /dev/null
+++ b/sources/Data/Database/Local/LocalDatabaseInstance.uc
@@ -0,0 +1,356 @@
+/**
+ * Implementation of Acedia's `Database` interface for locally stored
+ * databases.
+ * This class SHOULD NOT be deallocated manually.
+ * This name was chosen so that more readable `LocalDatabase` could be
+ * used in config for defining local databases through per-object-config.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class LocalDatabaseInstance extends Database;
+
+/**
+ * `LocalDatabaseInstance` implements `Database` interface for
+ * local databases, however most of the work (everything related to actually
+ * performing operations) is handled by `DBRecord` class.
+ * This class' purpose is to:
+ * 1. Managing updating information stored on the disk: it has to make
+ * sure that saving is (eventually) done after every update, but not
+ * too often, since it is an expensive operation;
+ * 2. Making sure handlers for database queries are called (eventually).
+ * First point is done via starting a "cooldown" timer after every disk
+ * update that will count time until the next one. Second is done by storing
+ * `DBTask`, generated by last database query and making it call it's handler
+ * at the start of next tick.
+ *
+ * Why do we wait until the next tick?
+ * Factually, every `LocalDatabaseInstance`'s query is completed immediately.
+ * However, `Database`'s interface is designed to be used like so:
+ * `db.ReadData(...).connect = handler;` where `handler` for query is assigned
+ * AFTER it was filed to the database. Therefore, we cannot call `handler`
+ * inside `ReadData()` and wait until next tick instead.
+ * We could have allowed for immediate query response if we either
+ * requested that handler was somehow set before the query or by providing
+ * a method to immediately call handlers for queries users have made so far.
+ * We avoided these solutions because we intend Acedia's `Database` interface
+ * to be used in the same way regardless of whether server admins have chosen
+ * to use local or remote databases. And neither of these solutions would have
+ * worked with inherently asynchronous remote databases. That is why we instead
+ * opted to use a more convenient interface
+ * `db.ReadData(...).connect = handler;` and have both databases behave
+ * the same way - with somewhat delayed response from the database.
+ * If you absolutely must force your local database to have an immediate
+ * response, then you can do it like so:
+ * ```unrealscript
+ * local DBTask task;
+ * ...
+ * task = db.ReadData(...);
+ * task.connect = handler;
+ * task.TryCompleting();
+ * ```
+ * However this method is not recommended and will never be a part of
+ * a stable interface.
+ */
+
+// Reference to the `LocalDatabase` config object, corresponding to
+// this database
+var private LocalDatabase configEntry;
+// Reference to the `DBRecord` that stores root object of this database
+var private DBRecord rootRecord;
+
+// As long as this `Timer` runs - we are in the "cooldown" period where no disk
+// updates can be done (except special cases like this object getting
+// deallocated).
+var private Timer diskUpdateTimer;
+// Only relevant when `diskUpdateTimer` is running. `false` would mean there is
+// nothing to new to write and the timer will be discarded, but `true` means
+// that we have to write database on disk and restart the update timer again.
+var private bool needsDiskUpdate;
+
+// Last to-be-completed task added to this database
+var private DBTask lastTask;
+// Remember task's life version to make sure we still have the correct copy
+var private int lastTaskLifeVersion;
+
+protected function Constructor()
+{
+ _.unreal.OnTick(self).connect = CompleteAllTasks;
+}
+
+protected function Finalizer()
+{
+ // Defaulting variables is not necessary, since this class does not
+ // use object pool.
+ CompleteAllTasks();
+ _.unreal.OnTick(self).Disconnect();
+ _.memory.Free(diskUpdateTimer);
+}
+
+// It only has parameters so that it can be used as a `Tick()` event handler.
+private final function CompleteAllTasks(
+ optional float delta,
+ optional float dilationCoefficient)
+{
+ if (lastTask != none && lastTask.GetLifeVersion() == lastTaskLifeVersion) {
+ lastTask.TryCompleting();
+ }
+ lastTask = none;
+ lastTaskLifeVersion = -1;
+}
+
+private final function LocalDatabaseInstance ScheduleDiskUpdate()
+{
+ if (diskUpdateTimer != none)
+ {
+ needsDiskUpdate = true;
+ return self;
+ }
+ WriteToDisk();
+ needsDiskUpdate = false;
+ diskUpdateTimer = _.time.StartTimer(
+ class'LocalDBSettings'.default.writeToDiskDelay);
+ diskUpdateTimer.OnElapsed(self).connect = DoDiskUpdate;
+ return self;
+}
+
+private final function DoDiskUpdate(Timer source)
+{
+ if (needsDiskUpdate)
+ {
+ WriteToDisk();
+ needsDiskUpdate = false;
+ diskUpdateTimer.Start();
+ }
+ else
+ {
+ _.memory.Free(diskUpdateTimer);
+ diskUpdateTimer = none;
+ }
+}
+
+private final function WriteToDisk()
+{
+ local string packageName;
+ if (configEntry != none) {
+ packageName = _.text.ToString(configEntry.GetPackageName());
+ }
+ if (packageName != "") {
+ _.unreal.GetGameType().SavePackage(packageName);
+ }
+}
+
+private final function DBTask MakeNewTask(class newTaskClass)
+{
+ local DBTask newTask;
+ if (lastTask != none && lastTask.GetLifeVersion() != lastTaskLifeVersion)
+ {
+ lastTask = none;
+ lastTaskLifeVersion = -1;
+ }
+ newTask = DBTask(_.memory.Allocate(newTaskClass));
+ newTask.SetPreviousTask(lastTask);
+ lastTask = newTask;
+ lastTaskLifeVersion = lastTask.GetLifeVersion();
+ return newTask;
+}
+
+private function bool ValidatePointer(JSONPointer pointer, DBTask relevantTask)
+{
+ if (pointer != none) {
+ return true;
+ }
+ relevantTask.SetResult(DBR_InvalidPointer);
+ return false;
+}
+
+private function bool ValidateRootRecord(DBTask relevantTask)
+{
+ if (rootRecord != none) {
+ return true;
+ }
+ relevantTask.SetResult(DBR_InvalidDatabase);
+ return false;
+}
+
+public function DBReadTask ReadData(
+ JSONPointer pointer,
+ optional bool makeMutable)
+{
+ local AcediaObject queryResult;
+ local DBReadTask readTask;
+ readTask = DBReadTask(MakeNewTask(class'DBReadTask'));
+ if (!ValidatePointer(pointer, readTask)) return readTask;
+ if (!ValidateRootRecord(readTask)) return readTask;
+
+ if (rootRecord.LoadObject(pointer, queryResult, makeMutable))
+ {
+ readTask.SetReadData(queryResult);
+ readTask.SetResult(DBR_Success);
+ }
+ else
+ {
+ readTask.SetResult(DBR_InvalidPointer);
+ _.memory.Free(queryResult); // just in case
+ }
+ return readTask;
+}
+
+public function DBWriteTask WriteData(JSONPointer pointer, AcediaObject data)
+{
+ local bool isDataStorable;
+ local DBWriteTask writeTask;
+ writeTask = DBWriteTask(MakeNewTask(class'DBWriteTask'));
+ if (!ValidatePointer(pointer, writeTask)) return writeTask;
+ if (!ValidateRootRecord(writeTask)) return writeTask;
+
+ if (pointer.GetLength() <= 0) {
+ isDataStorable = (data.class == class'AssociativeArray');
+ }
+ else {
+ isDataStorable = _.json.IsCompatible(data);
+ }
+ if (!isDataStorable)
+ {
+ writeTask.SetResult(DBR_InvalidData);
+ return writeTask;
+ }
+ if (rootRecord.SaveObject(pointer, data))
+ {
+ writeTask.SetResult(DBR_Success);
+ ScheduleDiskUpdate();
+ }
+ else {
+ writeTask.SetResult(DBR_InvalidPointer);
+ }
+ return writeTask;
+}
+
+public function DBRemoveTask RemoveData(JSONPointer pointer)
+{
+ local DBRemoveTask removeTask;
+ removeTask = DBRemoveTask(MakeNewTask(class'DBRemoveTask'));
+ if (!ValidatePointer(pointer, removeTask)) return removeTask;
+ if (!ValidateRootRecord(removeTask)) return removeTask;
+
+ if (pointer.GetLength() == 0)
+ {
+ rootRecord.EmptySelf();
+ removeTask.SetResult(DBR_Success);
+ return removeTask;
+ }
+ if (rootRecord.RemoveObject(pointer))
+ {
+ removeTask.SetResult(DBR_Success);
+ ScheduleDiskUpdate();
+ }
+ else {
+ removeTask.SetResult(DBR_InvalidPointer);
+ }
+ return removeTask;
+}
+
+public function DBCheckTask CheckDataType(JSONPointer pointer)
+{
+ local DBCheckTask checkTask;
+ checkTask = DBCheckTask(MakeNewTask(class'DBCheckTask'));
+ if (!ValidatePointer(pointer, checkTask)) return checkTask;
+ if (!ValidateRootRecord(checkTask)) return checkTask;
+
+ checkTask.SetDataType(rootRecord.GetObjectType(pointer));
+ checkTask.SetResult(DBR_Success);
+ return checkTask;
+}
+
+public function DBSizeTask GetDataSize(JSONPointer pointer)
+{
+ local DBSizeTask sizeTask;
+ sizeTask = DBSizeTask(MakeNewTask(class'DBSizeTask'));
+ if (!ValidatePointer(pointer, sizeTask)) return sizeTask;
+ if (!ValidateRootRecord(sizeTask)) return sizeTask;
+
+ sizeTask.SetDataSize(rootRecord.GetObjectSize(pointer));
+ sizeTask.SetResult(DBR_Success);
+ return sizeTask;
+}
+
+public function DBKeysTask GetDataKeys(JSONPointer pointer)
+{
+ local DynamicArray keys;
+ local DBKeysTask keysTask;
+ keysTask = DBKeysTask(MakeNewTask(class'DBKeysTask'));
+ if (!ValidatePointer(pointer, keysTask)) return keysTask;
+ if (!ValidateRootRecord(keysTask)) return keysTask;
+
+ keys = rootRecord.GetObjectKeys(pointer);
+ keysTask.SetDataKeys(keys);
+ if (keys == none) {
+ keysTask.SetResult(DBR_InvalidData);
+ }
+ else {
+ keysTask.SetResult(DBR_Success);
+ }
+ return keysTask;
+}
+
+public function DBIncrementTask IncrementData(
+ JSONPointer pointer,
+ AcediaObject increment)
+{
+ local DBQueryResult queryResult;
+ local DBIncrementTask incrementTask;
+ incrementTask = DBIncrementTask(MakeNewTask(class'DBIncrementTask'));
+ if (!ValidatePointer(pointer, incrementTask)) return incrementTask;
+ if (!ValidateRootRecord(incrementTask)) return incrementTask;
+
+ queryResult = rootRecord.IncrementObject(pointer, increment);
+ incrementTask.SetResult(queryResult);
+ if (queryResult == DBR_Success) {
+ ScheduleDiskUpdate();
+ }
+ return incrementTask;
+}
+
+/**
+ * Initializes caller database with prepared config and root objects.
+ *
+ * This is internal method and should not be called outside of `DBAPI`.
+ */
+public final function Initialize(LocalDatabase config, DBRecord root)
+{
+ if (configEntry != none) {
+ return;
+ }
+ configEntry = config;
+ rootRecord = root;
+}
+
+/**
+ * Returns config object that describes caller database.
+ *
+ * @return Config object that describes caller database.
+ * returned value is the same value caller database uses,
+ * it IS NOT a copy and SHOULD NOT be deallocated or deleted.
+ */
+public final function LocalDatabase GetConfig()
+{
+ return configEntry;
+}
+
+defaultproperties
+{
+ usesObjectPool = false
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBCheckTask.uc b/sources/Data/Database/Tasks/DBCheckTask.uc
new file mode 100644
index 0000000..a5af3e0
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBCheckTask.uc
@@ -0,0 +1,45 @@
+/**
+ * Variant of `DBTask` for `CheckDataType()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBCheckTask extends DBTask;
+
+var private Database.DataType queryTypeResponse;
+
+delegate connect(Database.DBQueryResult result, Database.DataType type) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ queryTypeResponse = JSON_Undefined;
+ connect = none;
+}
+
+public function SetDataType(Database.DataType type)
+{
+ queryTypeResponse = type;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult(), queryTypeResponse);
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBIncrementTask.uc b/sources/Data/Database/Tasks/DBIncrementTask.uc
new file mode 100644
index 0000000..9507ff0
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBIncrementTask.uc
@@ -0,0 +1,37 @@
+/**
+ * Variant of `DBTask` for `IncrementData()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBIncrementTask extends DBTask;
+
+delegate connect(Database.DBQueryResult result) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ connect = none;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult());
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBKeysTask.uc b/sources/Data/Database/Tasks/DBKeysTask.uc
new file mode 100644
index 0000000..e7b2d37
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBKeysTask.uc
@@ -0,0 +1,45 @@
+/**
+ * Variant of `DBTask` for `GetDataKeys()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBKeysTask extends DBTask;
+
+var private DynamicArray queryKeysResponse;
+
+delegate connect(Database.DBQueryResult result, DynamicArray keys) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ queryKeysResponse = none;
+ connect = none;
+}
+
+public function SetDataKeys(DynamicArray keys)
+{
+ queryKeysResponse = keys;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult(), queryKeysResponse);
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBReadTask.uc b/sources/Data/Database/Tasks/DBReadTask.uc
new file mode 100644
index 0000000..b51de6a
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBReadTask.uc
@@ -0,0 +1,45 @@
+/**
+ * Variant of `DBTask` for `ReadData()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBReadTask extends DBTask;
+
+var private AcediaObject queryDataResponse;
+
+delegate connect(Database.DBQueryResult result, AcediaObject data) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ queryDataResponse = none;
+ connect = none;
+}
+
+public function SetReadData(AcediaObject data)
+{
+ queryDataResponse = data;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult(), queryDataResponse);
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBRemoveTask.uc b/sources/Data/Database/Tasks/DBRemoveTask.uc
new file mode 100644
index 0000000..88cc4aa
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBRemoveTask.uc
@@ -0,0 +1,37 @@
+/**
+ * Variant of `DBTask` for `RemoveData()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBRemoveTask extends DBTask;
+
+delegate connect(Database.DBQueryResult result) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ connect = none;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult());
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBSizeTask.uc b/sources/Data/Database/Tasks/DBSizeTask.uc
new file mode 100644
index 0000000..8d31d1a
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBSizeTask.uc
@@ -0,0 +1,45 @@
+/**
+ * Variant of `DBTask` for `GetDataSize()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBSizeTask extends DBTask;
+
+var private int querySizeResponse;
+
+delegate connect(Database.DBQueryResult result, int size) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ querySizeResponse = 0;
+ connect = none;
+}
+
+public function SetDataSize(int size)
+{
+ querySizeResponse = size;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult(), querySizeResponse);
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tasks/DBWriteTask.uc b/sources/Data/Database/Tasks/DBWriteTask.uc
new file mode 100644
index 0000000..7e499ba
--- /dev/null
+++ b/sources/Data/Database/Tasks/DBWriteTask.uc
@@ -0,0 +1,37 @@
+/**
+ * Variant of `DBTask` for `WriteData()` query.
+ * Copyright 2021 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class DBWriteTask extends DBTask;
+
+delegate connect(Database.DBQueryResult result) {}
+
+protected function Finalizer()
+{
+ super.Finalizer();
+ connect = none;
+}
+
+protected function CompleteSelf()
+{
+ connect(GetResult());
+}
+
+defaultproperties
+{
+}
\ No newline at end of file
diff --git a/sources/Data/Database/Tests/TEST_LocalDatabase.uc b/sources/Data/Database/Tests/TEST_LocalDatabase.uc
new file mode 100644
index 0000000..2e0f51c
--- /dev/null
+++ b/sources/Data/Database/Tests/TEST_LocalDatabase.uc
@@ -0,0 +1,1238 @@
+/**
+ * Set of tests for `DBRecord` class.
+ * Copyright 2020 Anton Tarasenko
+ *------------------------------------------------------------------------------
+ * This file is part of Acedia.
+ *
+ * Acedia is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * Acedia is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Acedia. If not, see .
+ */
+class TEST_LocalDatabase extends TestCase
+ abstract;
+
+// Results of callback are written here
+var protected int resultSize;
+var protected DynamicArray resultKeys;
+var protected Database.DBQueryResult resultType;
+var protected Database.DataType resultDataType;
+var protected AssociativeArray resultData;
+var protected AcediaObject resultObject;
+
+protected function DBReadingHandler(
+ Database.DBQueryResult result,
+ AcediaObject data)
+{
+ default.resultType = result;
+ default.resultObject = data;
+ default.resultData = AssociativeArray(data);
+}
+
+protected function DBKeysHandler(
+ Database.DBQueryResult result,
+ DynamicArray keys)
+{
+ default.resultType = result;
+ default.resultKeys = keys;
+}
+
+protected function DBCheckHandler(
+ Database.DBQueryResult result,
+ Database.DataType type)
+{
+ default.resultType = result;
+ default.resultDataType = type;
+}
+
+protected function DBSizeHandler(
+ Database.DBQueryResult result,
+ int size)
+{
+ default.resultType = result;
+ default.resultSize = size;
+}
+
+protected function DBWritingHandler(Database.DBQueryResult result)
+{
+ default.resultType = result;
+}
+
+protected function DBIncrementHandler(Database.DBQueryResult result)
+{
+ default.resultType = result;
+}
+
+protected function DBRemoveHandler(Database.DBQueryResult result)
+{
+ default.resultType = result;
+}
+
+protected static function ReadFromDB(LocalDatabaseInstance db, string pointer)
+{
+ local DBReadTask task;
+ task = db.ReadData(__().json.Pointer(P(pointer)));
+ task.connect = DBReadingHandler;
+ task.TryCompleting();
+}
+
+protected static function int CountRecordsInPackage(string package)
+{
+ local int counter;
+ local DBRecord nextRecord;
+ local GameInfo game;
+ game = __().unreal.GetGameType();
+ foreach game.AllDataObjects(class'DBRecord', nextRecord, package) {
+ counter += 1;
+ }
+ return counter;
+}
+
+/* JSON data written in the "MockLocalDBReadOnly" local database.
+This is the code that has been used to create the "TEST_ReadOnly" package that
+contains it:
+
+local string source;
+local Parser parser;
+local AssociativeArray root;
+local LocalDatabaseInstance db;
+source = GetJSONTemplateString();
+parser = __().text.ParseString(source);
+root = AssociativeArray(__().json.ParseWith(parser));
+db = class'LocalDatabaseInstance'.static.NewDatabase(P("TEST_ReadOnly"));
+*/
+protected static function string GetJSONTemplateString()
+{
+ return "{\"web-app\": {"
+ @ " \"servlet\": [ "
+ @ " {"
+ @ " \"servlet-name\": \"cofaxCDS\","
+ @ " \"servlet-class\": \"org.cofax.cds.CDSServlet\","
+ @ " \"init-param\": {"
+ @ " \"configGlossary:installationAt\": \"Philadelphia, PA\","
+ @ " \"configGlossary:adminEmail\": \"ksm@pobox.com\","
+ @ " \"configGlossary:poweredBy\": \"Cofax\","
+ @ " \"configGlossary:poweredByIcon\": \"/images/cofax.gif\","
+ @ " \"configGlossary:staticPath\": \"/content/static\","
+ @ " \"templateProcessorClass\": \"org.cofax.WysiwygTemplate\","
+ @ " \"templateLoaderClass\": \"org.cofax.FilesTemplateLoader\","
+ @ " \"templatePath\": \"templates\","
+ @ " \"templateOverridePath\": \"\","
+ @ " \"defaultListTemplate\": \"listTemplate.htm\","
+ @ " \"defaultFileTemplate\": \"articleTemplate.htm\","
+ @ " \"useJSP\": false,"
+ @ " \"jspListTemplate\": \"listTemplate.jsp\","
+ @ " \"jspFileTemplate\": \"articleTemplate.jsp\","
+ @ " \"cachePackageTagsTrack\": 200,"
+ @ " \"cachePackageTagsStore\": 200,"
+ @ " \"cachePackageTagsRefresh\": 60,"
+ @ " \"cacheTemplatesTrack\": 100,"
+ @ " \"cacheTemplatesStore\": 50,"
+ @ " \"cacheTemplatesRefresh\": 15,"
+ @ " \"cachePagesTrack\": 200,"
+ @ " \"cachePagesStore\": 100,"
+ @ " \"cachePagesRefresh\": 10,"
+ @ " \"cachePagesDirtyRead\": 10,"
+ @ " \"searchEngineListTemplate\": \"forSearchEnginesList.htm\","
+ @ " \"searchEngineFileTemplate\": \"forSearchEngines.htm\","
+ @ " \"searchEngineRobotsDb\": \"WEB-INF/robots.db\","
+ @ " \"useDataStore\": true,"
+ @ " \"dataStoreClass\": \"org.cofax.SqlDataStore\","
+ @ " \"redirectionClass\": \"org.cofax.SqlRedirection\","
+ @ " \"dataStoreName\": \"cofax\","
+ @ " \"dataStoreDriver\": \"com.microsoft.jdbc.sqlserver.SQLServerDriver\","
+ @ " \"dataStoreUrl\": \"jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon\","
+ @ " \"dataStoreUser\": \"sa\","
+ @ " \"dataStorePassword\": \"dataStoreTestQuery\","
+ @ " \"dataStoreTestQuery\": \"SET NOCOUNT ON;select test='test';\","
+ @ " \"dataStoreLogFile\": \"/usr/local/tomcat/logs/datastore.log\","
+ @ " \"dataStoreInitConns\": 10,"
+ @ " \"dataStoreMaxConns\": 100,"
+ @ " \"dataStoreConnUsageLimit\": 100,"
+ @ " \"dataStoreLogLevel\": \"debug\","
+ @ " \"maxUrlLength\": 500}},"
+ @ " {"
+ @ " \"servlet-name\": \"cofaxEmail\","
+ @ " \"servlet-class\": \"org.cofax.cds.EmailServlet\","
+ @ " \"init-param\": {"
+ @ " \"mailHost\": \"mail1\","
+ @ " \"mailHostOverride\": \"mail2\"}},"
+ @ " {"
+ @ " \"servlet-name\": \"cofaxAdmin\","
+ @ " \"servlet-class\": \"org.cofax.cds.AdminServlet\"},"
+ @ " "
+ @ " {"
+ @ " \"servlet-name\": \"fileServlet\","
+ @ " \"servlet-class\": \"org.cofax.cds.FileServlet\"},"
+ @ " {"
+ @ " \"servlet-name\": \"cofaxTools\","
+ @ " \"servlet-class\": \"org.cofax.cms.CofaxToolsServlet\","
+ @ " \"init-param\": {"
+ @ " \"templatePath\": \"toolstemplates/\","
+ @ " \"log\": 1,"
+ @ " \"logLocation\": \"/usr/local/tomcat/logs/CofaxTools.log\","
+ @ " \"logMaxSize\": \"\","
+ @ " \"dataLog\": 1,"
+ @ " \"dataLogLocation\": \"/usr/local/tomcat/logs/dataLog.log\","
+ @ " \"dataLogMaxSize\": \"\","
+ @ " \"removePageCache\": \"/content/admin/remove?cache=pages&id=\","
+ @ " \"removeTemplateCache\": \"/content/admin/remove?cache=templates&id=\","
+ @ " \"fileTransferFolder\": \"/usr/local/tomcat/webapps/content/fileTransferFolder\","
+ @ " \"lookInContext\": 1,"
+ @ " \"adminGroupID\": 4,"
+ @ " \"betaServer\": true}}],"
+ @ " \"servlet-mapping\": {"
+ @ " \"cofaxCDS\": \"/\","
+ @ " \"cofaxEmail\": \"/cofaxutil/aemail/*\","
+ @ " \"cofaxAdmin\": \"/admin/*\","
+ @ " \"fileServlet\": \"/static/*\","
+ @ " \"cofaxTools\": \"/tools/*\"},"
+ @ " "
+ @ " \"taglib\": {"
+ @ " \"taglib-uri\": \"cofax.tld\","
+ @ " \"taglib-location\": \"/WEB-INF/tlds/cofax.tld\"}}}";
+}
+
+protected static function TESTS()
+{
+ Test_LoadingPrepared();
+ Test_Writing();
+ Test_Recreate();
+ Test_TaskChaining();
+ Test_Removal();
+ Test_Increment();
+}
+
+protected static function Test_LoadingPrepared()
+{
+ local LocalDatabaseInstance db;
+ db = __().db.LoadLocal(P("TEST_ReadOnly"));
+ Context("Testing reading prepared data from the local database.");
+ Issue("Existing database reported as missing.");
+ TEST_ExpectTrue(__().db.ExistsLocal(P("TEST_ReadOnly")));
+
+ Issue("Loading same database several times produces different"
+ @ "`LocalDatabaseInstance` objects.");
+ TEST_ExpectTrue(__().db.LoadLocal(P("TEST_ReadOnly")) == db);
+ // Groups of read-only tests
+ SubTest_LoadingPreparedSuccessRoot(db);
+ SubTest_LoadingPreparedSuccessSubValues(db);
+ SubTest_LoadingPreparedFailure(db);
+ SubTest_LoadingPreparedCheckTypesSuccess(db);
+ SubTest_LoadingPreparedCheckTypesFail(db);
+ SubTest_LoadingPreparedGetSizePositive(db);
+ SubTest_LoadingPreparedGetSizeNegative(db);
+ SubTest_LoadingPreparedGetKeysSuccess(db);
+ SubTest_LoadingPreparedGetKeysFail(db);
+}
+
+protected static function SubTest_LoadingPreparedSuccessRoot(
+ LocalDatabaseInstance db)
+{
+ Issue("Data is being read incorrectly.");
+ ReadFromDB(db, "");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultData.GetLength() == 1);
+ TEST_ExpectTrue(default.resultData
+ .GetAssociativeArrayByPointer(P("/web-app")).GetLength() == 3);
+ TEST_ExpectTrue(default.resultData
+ .GetDynamicArrayByPointer(P("/web-app/servlet")).GetLength() == 5);
+ TEST_ExpectTrue(default.resultData
+ .GetAssociativeArrayByPointer(P("/web-app/servlet/0/init-param"))
+ .GetLength() == 42);
+ TEST_ExpectTrue(default.resultData
+ .GetTextByPointer(P("/web-app/servlet/2/servlet-class"))
+ .ToPlainString() == "org.cofax.cds.AdminServlet");
+ TEST_ExpectFalse(default.resultData
+ .GetBoolByPointer(P("/web-app/servlet/0/init-param/useJSP")));
+ TEST_ExpectTrue(default.resultData
+ .GetIntByPointer(P("/web-app/servlet/0/init-param/dataStoreMaxConns"))
+ == 100);
+}
+
+protected static function SubTest_LoadingPreparedSuccessSubValues(
+ LocalDatabaseInstance db)
+{
+ Issue("Sub-objects are being read incorrectly.");
+ ReadFromDB(db, "/web-app/servlet-mapping");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultData.GetLength() == 5);
+ TEST_ExpectTrue(
+ default.resultData.GetText(P("cofaxCDS")).ToPlainString() == "/");
+ TEST_ExpectTrue(
+ default.resultData.GetText(P("cofaxEmail")).ToPlainString()
+ == "/cofaxutil/aemail/*");
+ TEST_ExpectTrue(
+ default.resultData.GetText(P("cofaxAdmin")).ToPlainString()
+ == "/admin/*");
+
+ Issue("Simple values are being read incorrectly.");
+ ReadFromDB(db, "/web-app/servlet/3/servlet-class");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(
+ Text(default.resultObject).ToPlainString()
+ == "org.cofax.cds.FileServlet");
+ ReadFromDB(db, "/web-app/servlet/4/init-param/adminGroupID");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(IntBox(default.resultObject).Get() == 4);
+}
+
+protected static function SubTest_LoadingPreparedFailure(
+ LocalDatabaseInstance db)
+{
+ local DBReadTask task;
+ Issue("Reading database values from incorrect path does not produce"
+ @ "`DBR_InvalidPointer` result.");
+ task = db.ReadData(none);
+ task.connect = DBReadingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+ ReadFromDB(db, "/web-app/servlet-mappings");
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+ ReadFromDB(db, "/web-app/servlet/5");
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+}
+
+protected static function SubTest_LoadingPreparedCheckTypesSuccess(
+ LocalDatabaseInstance db)
+{
+ local DBCheckTask task;
+ Issue("`CheckDataType()` returns incorrect type for existing elements.");
+ task = db.CheckDataType(__().json.Pointer(P("/web-app/servlet-mapping")));
+ task.connect = DBCheckHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultDataType == JSON_Object);
+ task = db.CheckDataType(__().json.Pointer(P("/web-app/taglib/taglib-uri")));
+ task.connect = DBCheckHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultDataType == JSON_String);
+ task = db.CheckDataType(__().json.Pointer(
+ P("/web-app/servlet/0/init-param/cacheTemplatesRefresh")));
+ task.connect = DBCheckHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultDataType == JSON_Number);
+ task = db.CheckDataType(__().json.Pointer(
+ P("/web-app/servlet/0/init-param/useJSP")));
+ task.connect = DBCheckHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultDataType == JSON_Boolean);
+}
+
+protected static function SubTest_LoadingPreparedCheckTypesFail(
+ LocalDatabaseInstance db)
+{
+ local DBCheckTask task;
+ Issue("`CheckDataType()` returns incorrect type for missing elements.");
+ task = db.CheckDataType(__().json.Pointer(P("/web-app/NothingHere")));
+ task.connect = DBCheckHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultDataType == JSON_Undefined);
+
+ Issue("`CheckDataType()` reports success for `none` pointer.");
+ task = db.CheckDataType(none);
+ task.connect = DBCheckHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+}
+
+protected static function SubTest_LoadingPreparedGetSizePositive(
+ LocalDatabaseInstance db)
+{
+ local DBSizeTask task;
+ Issue("Local database incorrectly reports size of arrays.");
+ task = db.GetDataSize(__().json.Pointer(P("/web-app/servlet")));
+ task.connect = DBSizeHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultSize == 5);
+
+ Issue("Local database incorrectly reports size of objects.");
+ task = db.GetDataSize(
+ __().json.Pointer(P("/web-app/servlet/0/init-param")));
+ task.connect = DBSizeHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultSize == 42);
+}
+
+protected static function SubTest_LoadingPreparedGetSizeNegative(
+ LocalDatabaseInstance db)
+{
+ local DBSizeTask task;
+ Issue("Local database does not report negative size value for"
+ @ "non-array/object size.");
+ task = db.GetDataSize(__().json.Pointer(P("/web-app/taglib/taglib-uri")));
+ task.connect = DBSizeHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultSize < 0);
+
+ Issue("Local database does not report negative size value for non-existing"
+ @ "values.");
+ task = db.GetDataSize(__().json.Pointer(P("/web-app/whoops")));
+ task.connect = DBSizeHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultSize < 0);
+
+ Issue("Local database does not report failure for empty pointer.");
+ task = db.GetDataSize(__().json.Pointer(P("/web-app/whoops")));
+ task.connect = DBSizeHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultSize < 0);
+}
+
+protected static function SubTest_LoadingPreparedGetKeysSuccess(
+ LocalDatabaseInstance db)
+{
+ local int i;
+ local bool rCDS, rEmail, rAdmin, rServlet, rTools;
+ local string nextKey;
+ local DBKeysTask task;
+ Issue("Object keys are read incorrectly.");
+ task = db.GetDataKeys(__().json.Pointer(P("/web-app/servlet-mapping")));
+ task.connect = DBKeysHandler;
+ task.TryCompleting();
+ for (i = 0; i < default.resultKeys.GetLength(); i += 1)
+ {
+ nextKey = default.resultKeys.GetText(i).ToPlainString();
+ if (nextKey == "cofaxCDS") rCDS = true;
+ if (nextKey == "cofaxEmail") rEmail = true;
+ if (nextKey == "cofaxAdmin") rAdmin = true;
+ if (nextKey == "fileServlet") rServlet = true;
+ if (nextKey == "cofaxTools") rTools = true;
+ }
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultKeys.GetLength() == 5);
+ TEST_ExpectTrue(rCDS && rEmail && rAdmin && rServlet && rTools);
+}
+
+protected static function SubTest_LoadingPreparedGetKeysFail(
+ LocalDatabaseInstance db)
+{
+ local DBKeysTask task;
+ Issue("Non-objects do not correctly cause failure for getting their"
+ @ "key arrays.");
+ task = db.GetDataKeys(__().json.Pointer(P("/web-app/servlet")));
+ task.connect = DBKeysHandler;
+ task.TryCompleting();
+ TEST_ExpectNone(default.resultKeys);
+ task = db.GetDataKeys(
+ __().json.Pointer(P("/web-app/servlet/1/mailHostOverride")));
+ task.connect = DBKeysHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidData);
+ TEST_ExpectNone(default.resultKeys);
+
+ Issue("Missing values do not correctly cause failure for getting their"
+ @ "key arrays.");
+ task = db.GetDataKeys(__().json.Pointer(P("/web-app/a-what-now?")));
+ task.connect = DBKeysHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidData);
+ TEST_ExpectNone(default.resultKeys);
+
+ Issue("Obtaining key arrays for `none` JSON pointers does not"
+ @ "produce errors.");
+ task = db.GetDataKeys(none);
+ task.connect = DBKeysHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+ TEST_ExpectNone(default.resultKeys);
+}
+
+protected static function Test_Writing()
+{
+ local LocalDatabaseInstance db;
+ db = __().db.NewLocal(P("TEST_DB"));
+ Context("Testing (re-)creating and writing into a new local database.");
+ Issue("Cannot create a new database.");
+ TEST_ExpectNotNone(db);
+ TEST_ExpectTrue(__().db.ExistsLocal(P("TEST_DB")));
+
+ Issue("Freshly created database is not empty.");
+ TEST_ExpectTrue(CountRecordsInPackage("TEST_DB") == 1); // 1 root object
+
+ Issue("Loading just created database produces different"
+ @ "`LocalDatabaseInstance` object.");
+ TEST_ExpectTrue(__().db.LoadLocal(P("TEST_DB")) == db);
+ // This set of tests fills our test database with objects
+ SubTest_WritingSuccess(db);
+ SubTest_WritingDataCheck(db);
+ SubTest_WritingDataCheck_Immutable(db);
+ SubTest_WritingDataCheck_Mutable(db);
+ SubTest_WritingFailure(db);
+ SubTest_WritingIntoSimpleValues(db);
+
+ Issue("`DeleteLocal()` does not return `true` after deleting existing"
+ @ "local database.");
+ TEST_ExpectTrue(__().db.DeleteLocal(P("TEST_DB")));
+
+ Issue("Newly created database is reported to still exist after deletion.");
+ TEST_ExpectFalse(__().db.ExistsLocal(P("TEST_DB")));
+ TEST_ExpectFalse(db.IsAllocated());
+
+ Issue("`DeleteLocal()` does not return `false` after trying to delete"
+ @ "non-existing local database.");
+ TEST_ExpectFalse(__().db.DeleteLocal(P("TEST_DB")));
+}
+
+protected static function Test_Recreate()
+{
+ local LocalDatabaseInstance db;
+ Issue("Freshly created database is not empty.");
+ db = __().db.NewLocal(P("TEST_DB"));
+ TEST_ExpectTrue(CountRecordsInPackage("TEST_DB") == 1);
+
+ Issue("Cannot create a database after database with the same name was"
+ @ "just deleted.");
+ TEST_ExpectNotNone(db);
+ TEST_ExpectTrue(__().db.ExistsLocal(P("TEST_DB")));
+ SubTest_WritingArrayIndicies(db);
+ __().db.DeleteLocal(P("TEST_DB"));
+ Issue("Newly created database is reported to still exist after deletion.");
+ TEST_ExpectFalse(__().db.ExistsLocal(P("TEST_DB")));
+ TEST_ExpectFalse(db.IsAllocated());
+}
+
+protected static function Test_TaskChaining()
+{
+ local LocalDatabaseInstance db;
+ Context("Testing (re-)creating and writing into a new local database.");
+ Issue("Freshly created database is not empty.");
+ db = __().db.NewLocal(P("TEST_DB"));
+ TEST_ExpectTrue(CountRecordsInPackage("TEST_DB") == 1);
+
+ Issue("Cannot create a database after database with the same name was"
+ @ "just deleted.");
+ TEST_ExpectNotNone(db);
+ TEST_ExpectTrue(__().db.ExistsLocal(P("TEST_DB")));
+ SubTest_TaskChaining(db);
+ __().db.DeleteLocal(P("TEST_DB"));
+}
+
+protected static function AssociativeArray GetJSONSubTemplateObject()
+{
+ local Parser parser;
+ parser = __().text.ParseString("{\"A\":\"simpleValue\",\"B\":11.12}");
+ return AssociativeArray(__().json.ParseWith(parser));
+}
+
+protected static function DynamicArray GetJSONSubTemplateArray()
+{
+ local Parser parser;
+ parser = __().text.ParseString("[true, null, \"huh\"]");
+ return DynamicArray(__().json.ParseWith(parser));
+}
+
+/*
+In the following function we construct the following JSON object inside
+the database by using templates provided by `GetJSONSubTemplateObject()` and
+`GetJSONSubTemplateArray()`:
+{
+ "A": "simpleValue",
+ "B": {
+ "A": [true, {
+ "A": "simpleValue",
+ "B": 11.12,
+ "": [true, null, "huh"]
+ }, "huh"],
+ "B": 11.12
+ }
+}
+*/
+protected static function SubTest_WritingSuccess(LocalDatabaseInstance db)
+{
+ local DBWriteTask task;
+ local DynamicArray templateArray;
+ local AssociativeArray templateObject;
+ templateObject = GetJSONSubTemplateObject();
+ templateArray = GetJSONSubTemplateArray();
+ Issue("`WriteData()` call that is supposed to succeed reports failure.");
+ task = db.WriteData(__().json.Pointer(P("")), templateObject);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ task = db.WriteData(__().json.Pointer(P("/B")), templateObject);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ task = db.WriteData(__().json.Pointer(P("/B/A")), templateArray);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ // Rewrite object to test whether it will create trash
+ db.WriteData(__().json.Pointer(P("/B/A/1")), templateObject).TryCompleting();
+ db.WriteData(__().json.Pointer(P("/B/A/1")), templateArray).TryCompleting();
+ task = db.WriteData(__().json.Pointer(P("/B/A/1")), templateObject);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ task = db.WriteData(__().json.Pointer(P("/B/A/1/")), templateArray);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ Issue("`WriteData()` creates garbage objects inside database's package.");
+ TEST_ExpectTrue(CountRecordsInPackage("TEST_DB") == 5);
+}
+
+protected static function SubTest_WritingDataCheck(LocalDatabaseInstance db)
+{
+ Issue("Created database does not load expected values as"
+ @ "immutable types with `makeMutable` parameter set to `false`.");
+ // Full db read
+ ReadFromDB(db, "");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultData.GetLength() == 2);
+ TEST_ExpectTrue(
+ default.resultData.GetTextByPointer(P("/B/A/1//2")).ToPlainString()
+ == "huh");
+ TEST_ExpectTrue(
+ default.resultData.GetTextByPointer(P("/A")).ToPlainString()
+ == "simpleValue");
+ TEST_ExpectTrue(default.resultData.GetFloatByPointer(P("/B/B")) == 11.12);
+ TEST_ExpectTrue(default.resultData.GetBoolByPointer(P("/B/A/0"), false));
+ TEST_ExpectNone(default.resultData.GetItemByPointer(P("/B/A/1//1")));
+}
+
+protected static function SubTest_WritingDataCheck_Immutable(
+ LocalDatabaseInstance db)
+{
+ Issue("Created database does not load expected values as"
+ @ "mutable types with `makeMutable` parameter set to `true`.");
+ // Full db read
+ ReadFromDB(db, "");
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/B/A/1//2")).class
+ == class'Text');
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/A")).class
+ == class'Text');
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/B/B")).class
+ == class'FloatBox');
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/B/A/0")).class
+ == class'BoolBox');
+}
+
+protected static function SubTest_WritingDataCheck_Mutable(
+ LocalDatabaseInstance db)
+{
+ local DBReadTask task;
+ Issue("Created database does not contain expected values.");
+ // Full db read
+ task = db.ReadData(__().json.Pointer(P("")), true);
+ task.connect = DBReadingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/B/A/1//2")).class
+ == class'MutableText');
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/A")).class
+ == class'MutableText');
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/B/B")).class
+ == class'FloatRef');
+ TEST_ExpectTrue(
+ default.resultData.GetItemByPointer(P("/B/A/0")).class
+ == class'BoolRef');
+}
+
+protected static function SubTest_WritingFailure(LocalDatabaseInstance db)
+{
+ local DBWriteTask task;
+ local DynamicArray templateArray;
+ local AssociativeArray templateObject;
+ templateObject = GetJSONSubTemplateObject();
+ templateArray = GetJSONSubTemplateArray();
+ Issue("`WriteData()` does not report error when attempting writing data at"
+ @ "impossible path.");
+ task = db.WriteData(__().json.Pointer(P("/A/B/C/D")), templateObject);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+
+ Issue("`WriteData()` does not report error when attempting to write"
+ @ "JSON array as the root value.");
+ task = db.WriteData(__().json.Pointer(P("")), templateArray);
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidData);
+
+ Issue("`WriteData()` does not report error when attempting to write"
+ @ "simple JSON value as the root value.");
+ task = db.WriteData(__().json.Pointer(P("")), __().box.int(14641));
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidData);
+}
+
+protected static function SubTest_WritingIntoSimpleValues(
+ LocalDatabaseInstance db)
+{
+ local DBWriteTask task;
+ // This test is rather specific, but it was added because of the bug
+ Issue("Writing sub-value inside a simple value will cause operation to"
+ @ "report success and write new value into simple one's parent"
+ @ "structure.");
+ task = db.WriteData(__().json.Pointer(P("/B/B/new")), __().box.int(7));
+ task.connect = DBWritingHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+ ReadFromDB(db, "/B/new");
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+ TEST_ExpectNone(default.resultObject);
+}
+
+protected static function SubTest_WritingArrayIndicies(LocalDatabaseInstance db)
+{
+ local DBWriteTask writeTask;
+ local DynamicArray resultArray;
+ local DynamicArray templateArray;
+ local AssociativeArray templateObject;
+ templateObject = GetJSONSubTemplateObject();
+ templateArray = GetJSONSubTemplateArray();
+ db.WriteData(__().json.Pointer(P("")), templateObject);
+ db.WriteData(__().json.Pointer(P("/A")), templateArray);
+ db.WriteData(__().json.Pointer(P("/A/100")), __().box.int(-342));
+
+ Issue("Database allows writing data into negative JSON array indices.");
+ writeTask = db.WriteData(__().json.Pointer(P("/A/-5")), __().box.int(1202));
+ writeTask.connect = DBWritingHandler;
+ writeTask.TryCompleting();
+
+ Issue("Database cannot extend stored JSON array's length by assigning to"
+ @ "the out-of-bounds index.");
+ ReadFromDB(db, "/A");
+ resultArray = DynamicArray(default.resultObject);
+ TEST_ExpectTrue(resultArray.GetLength() == 101);
+ TEST_ExpectNone(resultArray.GetItem(99));
+ TEST_ExpectTrue(resultArray.GetInt(100) == -342);
+ TEST_ExpectTrue(resultArray.GetBool(0));
+}
+
+protected static function SubTest_TaskChaining(LocalDatabaseInstance db)
+{
+ local DBWriteTask writeTask;
+ local DynamicArray templateArray;
+ local AssociativeArray templateObject;
+ templateObject = GetJSONSubTemplateObject();
+ templateArray = GetJSONSubTemplateArray();
+ db.WriteData(__().json.Pointer(P("")), templateObject);
+ db.WriteData(__().json.Pointer(P("/B")), templateArray);
+ db.ReadData(__().json.Pointer(P("/B/2"))).connect
+ = DBReadingHandler;
+ writeTask = db.WriteData(__().json.Pointer(P("/B/2")), templateArray);
+ writeTask.TryCompleting();
+
+ Issue("Chaining several tasks for the database leads to a failure.");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultObject.class == class'Text');
+ TEST_ExpectTrue(Text(default.resultObject).ToPlainString() == "huh");
+ ReadFromDB(db, "/B/2");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultObject.class == class'DynamicArray');
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetLength() == 3);
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetBool(0) );
+}
+
+protected static function Test_Removal()
+{
+ local LocalDatabaseInstance db;
+ local DynamicArray templateArray;
+ local AssociativeArray templateObject;
+ templateObject = GetJSONSubTemplateObject();
+ templateArray = GetJSONSubTemplateArray();
+ db = __().db.NewLocal(P("TEST_DB"));
+ db.WriteData(__().json.Pointer(P("")), templateObject);
+ db.WriteData(__().json.Pointer(P("/B")), templateObject);
+ db.WriteData(__().json.Pointer(P("/B/A")), templateArray);
+ db.WriteData(__().json.Pointer(P("/B/A/1")), templateObject);
+ db.WriteData(__().json.Pointer(P("/B/A/1/")), templateArray);
+
+ Context("Testing removing data from local database.");
+ SubTest_RemovalResult(db);
+ SubTest_RemovalCheckValuesAfter(db);
+ SubTest_RemovalRoot(db);
+ __().db.DeleteLocal(P("TEST_DB"));
+}
+
+protected static function SubTest_RemovalResult(LocalDatabaseInstance db)
+{
+ local DBRemoveTask removeTask;
+ Issue("Removing data does not correctly fail when attempting to remove"
+ @ "non-existing objects.");
+ removeTask = db.RemoveData(__().json.Pointer(P("/C")));
+ removeTask.connect = DBRemoveHandler;
+ removeTask.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+ removeTask = db.RemoveData(__().json.Pointer(P("/B/A/1//")));
+ removeTask.connect = DBRemoveHandler;
+ removeTask.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidPointer);
+
+ Issue("Removing data does not succeed when it is expected to.");
+ removeTask = db.RemoveData(__().json.Pointer(P("/B/B")));
+ removeTask.connect = DBRemoveHandler;
+ removeTask.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ removeTask = db.RemoveData(__().json.Pointer(P("/B/A/1")));
+ removeTask.connect = DBRemoveHandler;
+ removeTask.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+}
+
+protected static function SubTest_RemovalCheckValuesAfter(
+ LocalDatabaseInstance db)
+{
+ /* Expected data: {
+ "A": "simpleValue",
+ "B": {
+ "A": [true, "huh"]
+ }
+ }
+ */
+ Issue("`DeleteData()` leaves garbage objects behind.");
+ TEST_ExpectTrue(CountRecordsInPackage("TEST_DB") == 3);
+
+ Issue("Database values do not look like expected after data removal.");
+ ReadFromDB(db, "/B");
+ TEST_ExpectTrue(default.resultData.GetLength() == 1);
+ TEST_ExpectTrue(default.resultData.HasKey(P("A")));
+ TEST_ExpectTrue(
+ default.resultData.GetDynamicArray(P("A")).GetLength() == 2);
+ TEST_ExpectTrue(default.resultData.GetDynamicArray(P("A")).GetBool(0));
+ TEST_ExpectTrue(default.resultData.GetDynamicArray(P("A"))
+ .GetText(1).ToPlainString()
+ == "huh");
+}
+
+protected static function SubTest_RemovalRoot(LocalDatabaseInstance db)
+{
+ local DBRemoveTask removeTask;
+ Issue("Removing root object from the database does not"
+ @ "work as expected.");
+ removeTask = db.RemoveData(__().json.Pointer(P("")));
+ removeTask.connect = DBRemoveHandler;
+ removeTask.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "");
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ TEST_ExpectTrue(default.resultData.GetLength() == 0);
+}
+
+protected static function Test_Increment()
+{
+ local LocalDatabaseInstance db;
+ local DynamicArray templateArray;
+ local AssociativeArray templateObject;
+ templateObject = GetJSONSubTemplateObject();
+ templateArray = GetJSONSubTemplateArray();
+ db = __().db.NewLocal(P("TEST_DB"));
+ db.WriteData(__().json.Pointer(P("")), templateObject);
+ db.WriteData(__().json.Pointer(P("/B")), templateObject);
+ db.WriteData(__().json.Pointer(P("/C")), __().box.int(-5));
+ db.WriteData(__().json.Pointer(P("/D")), __().box.bool(false));
+ db.WriteData(__().json.Pointer(P("/B/A")), templateArray);
+ db.WriteData(__().json.Pointer(P("/B/A/1")), templateObject);
+ db.WriteData(__().json.Pointer(P("/B/A/1/")), templateArray);
+ /* `db` now contains:
+ {
+ "A": "simpleValue",
+ "B": {
+ "A": [true, {
+ "A": "simpleValue",
+ "B": 11.12,
+ "": [true, null, "huh"]
+ }, "huh"],
+ "B": 11.12
+ },
+ "C": -5,
+ "D": false
+ }
+ */
+ // Constantly recreating `db` takes time, so we make test dependent
+ // on each other.
+ // Generally speaking this is not great, but we cannot run them in
+ // parallel anyway.
+ Context("Testing incrementing data inside local database.");
+ SubTest_IncrementNull(db);
+ SubTest_IncrementBool(db);
+ SubTest_IncrementNumeric(db);
+ SubTest_IncrementString(db);
+ SubTest_IncrementObject(db);
+ SubTest_IncrementArray(db);
+ SubTest_IncrementRewriteBool(db, templateArray, templateObject);
+ SubTest_IncrementRewriteNumeric(db, templateArray, templateObject);
+ SubTest_IncrementRewriteString(db, templateArray, templateObject);
+ SubTest_IncrementRewriteObject(db, templateArray, templateObject);
+ SubTest_IncrementRewriteArray(db, templateArray, templateObject);
+ SubTest_IncrementMissing(db);
+ Issue("Incrementing database values has created garbage objects.");
+ // 5 initial records + 1 made for a new array in `SubTest_IncrementNull()`
+ TEST_ExpectTrue(CountRecordsInPackage("TEST_DB") == 6);
+ __().db.DeleteLocal(P("TEST_DB"));
+}
+
+protected static function SubTest_IncrementNull(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("JSON null values are not incremented properly.");
+ task = db.IncrementData(__().json.Pointer(P("/B/A/1//1")), none);
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/B/A/1/");
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetLength() == 3);
+ TEST_ExpectNone(DynamicArray(default.resultObject).GetItem(1));
+ task = db.IncrementData(
+ __().json.Pointer(P("/B/A/1//1")), GetJSONSubTemplateArray());
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ task = db.IncrementData(
+ __().json.Pointer(P("/B/A/1//1/1")), __().box.int(2));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/B/A/1/");
+ TEST_ExpectTrue(default.resultObject.class == class'DynamicArray');
+ TEST_ExpectNotNone(DynamicArray(default.resultObject).GetDynamicArray(1));
+ TEST_ExpectTrue(
+ DynamicArray(default.resultObject).GetDynamicArray(1).GetInt(1) == 2);
+}
+
+protected static function SubTest_IncrementBool(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("JSON's boolean values are not incremented properly.");
+ task = db.IncrementData(__().json.Pointer(P("/D")), __().box.bool(false));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/D");
+ TEST_ExpectNotNone(BoolBox(default.resultObject));
+ TEST_ExpectFalse(BoolBox(default.resultObject).Get());
+ task = db.IncrementData(__().json.Pointer(P("/D")), __().box.bool(true));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/D");
+ TEST_ExpectTrue(BoolBox(default.resultObject).Get());
+ task = db.IncrementData(__().json.Pointer(P("/D")), __().box.bool(false));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/D");
+ TEST_ExpectTrue(BoolBox(default.resultObject).Get());
+}
+
+protected static function SubTest_IncrementNumeric(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("JSON's numeric values are not incremented properly.");
+ task = db.IncrementData(__().json.Pointer(P("/C")), __().box.int(10));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/C");
+ TEST_ExpectTrue(IntBox(default.resultObject).Get() == 5);
+ task = db.IncrementData(__().json.Pointer(P("/C")), __().box.float(0.5));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/C");
+ TEST_ExpectTrue(FloatBox(default.resultObject).Get() == 5.5);
+ task = db.IncrementData(__().json.Pointer(P("/B/B")), __().box.int(-1));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/B/B");
+ TEST_ExpectTrue(FloatBox(default.resultObject).Get() == 10.12);
+}
+
+protected static function SubTest_IncrementString(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("JSON's string values are not incremented properly.");
+ task = db.IncrementData(__().json.Pointer(P("/A")),
+ __().text.FromString(""));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/A");
+ TEST_ExpectTrue(
+ Text(default.resultObject).ToPlainString() == "simpleValue");
+ task = db.IncrementData(__().json.Pointer(P("/A")),
+ __().text.FromString("!"));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/A");
+ TEST_ExpectTrue(
+ Text(default.resultObject).ToPlainString() == "simpleValue!");
+ task = db.IncrementData(__().json.Pointer(P("/A")),
+ __().text.FromStringM("?"));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/A");
+ TEST_ExpectTrue(
+ Text(default.resultObject).ToPlainString() == "simpleValue!?");
+}
+
+protected static function AssociativeArray GetHelperObject()
+{
+ local AssociativeArray result;
+ result = __().collections.EmptyAssociativeArray();
+ result.SetItem(P("A"), __().text.FromString("complexString"));
+ result.SetItem(P("E"), __().text.FromString("str"));
+ result.SetItem(P("F"), __().ref.float(45));
+ return result;
+}
+
+protected static function SubTest_IncrementObject(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("JSON objects are not incremented properly.");
+ task = db.IncrementData(__().json.Pointer(P("")),
+ __().collections.EmptyAssociativeArray());
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "");
+ TEST_ExpectNotNone(default.resultData);
+ TEST_ExpectTrue(default.resultData.GetLength() == 4);
+ // Check that value was not overwritten
+ TEST_ExpectTrue(
+ default.resultData.GetText(P("A")).ToPlainString() == "simpleValue!?");
+ task = db.IncrementData(__().json.Pointer(P("")),
+ GetHelperObject());
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "");
+ TEST_ExpectNotNone(default.resultData);
+ TEST_ExpectTrue(default.resultData.GetLength() == 6);
+ TEST_ExpectTrue(
+ default.resultData.GetText(P("E")).ToPlainString() == "str");
+ TEST_ExpectTrue(default.resultData.GetFloat(P("F")) == 45);
+ TEST_ExpectTrue(
+ default.resultData.GetItem(P("B")).class == class'AssociativeArray');
+ Issue("Incrementing JSON objects can overwrite existing data.");
+ TEST_ExpectTrue(
+ default.resultData.GetText(P("A")).ToPlainString() == "simpleValue!?");
+}
+
+protected static function DynamicArray GetHelperArray()
+{
+ local DynamicArray result;
+ result = __().collections.EmptyDynamicArray();
+ result.AddItem(__().text.FromString("complexString"));
+ result.AddItem(__().ref.float(45));
+ result.AddItem(none);
+ result.AddItem(__().ref.bool(true));
+ return result;
+}
+
+protected static function SubTest_IncrementArray(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("JSON arrays are not incremented properly.");
+ task = db.IncrementData(__().json.Pointer(P("/B/A")),
+ __().collections.EmptyDynamicArray());
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/B/A");
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetLength() == 3);
+ TEST_ExpectTrue(
+ DynamicArray(default.resultObject).GetText(2).ToPlainString() == "huh");
+ task = db.IncrementData(__().json.Pointer(P("/B/A")),
+ GetHelperArray());
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ ReadFromDB(db, "/B/A");
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetLength() == 7);
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetBool(0));
+ TEST_ExpectNotNone(
+ DynamicArray(default.resultObject).GetAssociativeArray(1));
+ TEST_ExpectTrue(
+ DynamicArray(default.resultObject).GetText(2).ToPlainString() == "huh");
+ TEST_ExpectTrue(
+ DynamicArray(default.resultObject).GetText(3).ToPlainString()
+ == "complexString");
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetFloat(4) == 45);
+ TEST_ExpectNone(DynamicArray(default.resultObject).GetItem(5));
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetBool(6));
+}
+
+protected static function CheckValuesAfterIncrement(AssociativeArray root)
+{
+ local DynamicArray jsonArray;
+ TEST_ExpectTrue(root.GetBoolByPointer(P("/D")));
+ TEST_ExpectTrue(root.GetFloatByPointer(P("/B/B")) == 10.12);
+ TEST_ExpectTrue(
+ root.GetTextByPointer(P("/A")).ToPlainString()
+ == "simpleValue!?");
+ jsonArray = root.GetDynamicArrayByPointer(P("/B/A"));
+ TEST_ExpectTrue(jsonArray.GetBool(0));
+ TEST_ExpectNotNone(jsonArray.GetAssociativeArray(1));
+ TEST_ExpectTrue(jsonArray.GetText(2).ToPlainString() == "huh");
+ TEST_ExpectTrue(jsonArray.GetText(3).ToPlainString() == "complexString");
+ TEST_ExpectTrue(jsonArray.GetFloat(4) == 45);
+ TEST_ExpectNone(jsonArray.GetItem(5));
+ TEST_ExpectTrue(jsonArray.GetBool(6));
+ // Test root itself
+ TEST_ExpectTrue(root.GetLength() == 6);
+ TEST_ExpectTrue(root.GetText(P("A")).ToPlainString() == "simpleValue!?");
+ TEST_ExpectTrue(root.GetItem(P("B")).class == class'AssociativeArray');
+ TEST_ExpectTrue(root.GetFloat(P("C")) == 5.5);
+ TEST_ExpectTrue(root.GetBool(P("D")));
+ TEST_ExpectTrue(root.GetText(P("E")).ToPlainString() == "str");
+ TEST_ExpectTrue(root.GetFloat(P("F")) == 45);
+}
+
+protected static function IncrementExpectingFail(
+ LocalDatabaseInstance db,
+ string pointer,
+ AcediaObject value)
+{
+ local Text pointerAsText;
+ local JSONPointer jsonPointer;
+ local DBIncrementTask task;
+ pointerAsText = __().text.FromString(pointer);
+ jsonPointer = __().json.Pointer(pointerAsText);
+ task = db.IncrementData(__().json.Pointer(pointerAsText), value);
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_InvalidData);
+ jsonPointer.FreeSelf();
+ pointerAsText.FreeSelf();
+}
+
+protected static function SubTest_IncrementRewriteBool(
+ LocalDatabaseInstance db,
+ DynamicArray templateArray,
+ AssociativeArray templateObject)
+{
+ Issue("JSON boolean values are rewritten by non-boolean values.");
+ IncrementExpectingFail(db, "/D", none);
+ IncrementExpectingFail(db, "/D", db);
+ IncrementExpectingFail(db, "/D", __().box.int(23));
+ IncrementExpectingFail(db, "/D", __().ref.float(-12));
+ IncrementExpectingFail(db, "/D", __().text.FromStringM("Random!"));
+ IncrementExpectingFail(db, "/D", templateArray);
+ IncrementExpectingFail(db, "/D", templateObject);
+ ReadFromDB(db, "");
+ CheckValuesAfterIncrement(default.resultData);
+}
+
+protected static function SubTest_IncrementRewriteNumeric(
+ LocalDatabaseInstance db,
+ DynamicArray templateArray,
+ AssociativeArray templateObject)
+{
+ Issue("JSON numeric values are rewritten by non-numeric values.");
+ IncrementExpectingFail(db, "/B/B", none);
+ IncrementExpectingFail(db, "/B/B", db);
+ IncrementExpectingFail(db, "/B/B", __().box.bool(true));
+ IncrementExpectingFail(db, "/B/B", __().text.FromStringM("Random!"));
+ IncrementExpectingFail(db, "/B/B", templateArray);
+ IncrementExpectingFail(db, "/B/B", templateObject);
+ ReadFromDB(db, "");
+ CheckValuesAfterIncrement(default.resultData);
+}
+
+protected static function SubTest_IncrementRewriteString(
+ LocalDatabaseInstance db,
+ DynamicArray templateArray,
+ AssociativeArray templateObject)
+{
+ Issue("JSON string values are rewritten by non-`Text`/`MutableText`"
+ @ "values.");
+ IncrementExpectingFail(db, "/A", none);
+ IncrementExpectingFail(db, "/A", db);
+ IncrementExpectingFail(db, "/A", __().box.bool(true));
+ IncrementExpectingFail(db, "/A", __().box.int(23));
+ IncrementExpectingFail(db, "/A", __().ref.float(-12));
+ IncrementExpectingFail(db, "/A", templateArray);
+ IncrementExpectingFail(db, "/A", templateObject);
+ ReadFromDB(db, "");
+ CheckValuesAfterIncrement(default.resultData);
+}
+
+protected static function SubTest_IncrementRewriteObject(
+ LocalDatabaseInstance db,
+ DynamicArray templateArray,
+ AssociativeArray templateObject)
+{
+ Issue("JSON objects are rewritten by non-`AssociativeArray` values.");
+ IncrementExpectingFail(db, "", none);
+ IncrementExpectingFail(db, "", db);
+ IncrementExpectingFail(db, "", __().box.bool(true));
+ IncrementExpectingFail(db, "", __().box.int(23));
+ IncrementExpectingFail(db, "", __().ref.float(-12));
+ IncrementExpectingFail(db, "", __().text.FromStringM("Random!"));
+ IncrementExpectingFail(db, "", templateArray);
+ ReadFromDB(db, "");
+ CheckValuesAfterIncrement(default.resultData);
+}
+
+protected static function SubTest_IncrementRewriteArray(
+ LocalDatabaseInstance db,
+ DynamicArray templateArray,
+ AssociativeArray templateObject)
+{
+ Issue("JSON arrays are rewritten by non-`DynamicArray` values.");
+ IncrementExpectingFail(db, "/B/A", none);
+ IncrementExpectingFail(db, "/B/A", db);
+ IncrementExpectingFail(db, "/B/A", __().box.bool(true));
+ IncrementExpectingFail(db, "/B/A", __().box.int(23));
+ IncrementExpectingFail(db, "/B/A", __().ref.float(-12));
+ IncrementExpectingFail(db, "/B/A", __().text.FromStringM("Random!"));
+ IncrementExpectingFail(db, "/B/A", templateObject);
+ ReadFromDB(db, "");
+ CheckValuesAfterIncrement(default.resultData);
+}
+
+protected static function SubTest_IncrementMissing(LocalDatabaseInstance db)
+{
+ local DBIncrementTask task;
+ Issue("New values are created in database after incrementing with path"
+ @ "pointing to non-existing value.");
+ task = db.IncrementData(__().json.Pointer(P("/L")), __().box.int(345));
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ task = db.IncrementData(__().json.Pointer(P("/B/A/1//10")), none);
+ task.connect = DBIncrementHandler;
+ task.TryCompleting();
+ TEST_ExpectTrue(default.resultType == DBR_Success);
+ db.CheckDataType(__().json.Pointer(P("/L"))).connect = DBCheckHandler;
+ ReadFromDB(db, "/B/A/1/");
+ TEST_ExpectTrue(default.resultDataType == JSON_Number);
+ TEST_ExpectTrue(DynamicArray(default.resultObject).GetLength() == 11);
+}
+
+defaultproperties
+{
+ caseGroup = "Database"
+ caseName = "Local database"
+}
\ No newline at end of file
diff --git a/sources/Global.uc b/sources/Global.uc
index 9a7de61..bf1a4e4 100644
--- a/sources/Global.uc
+++ b/sources/Global.uc
@@ -39,6 +39,7 @@ var public ColorAPI color;
var public UserAPI users;
var public PlayersAPI players;
var public JSONAPI json;
+var public DBAPI db;
var public KFFrontend kf;
@@ -72,6 +73,7 @@ protected function Initialize()
users = UserAPI(memory.Allocate(class'UserAPI'));
players = PlayersAPI(memory.Allocate(class'PlayersAPI'));
json = JSONAPI(memory.Allocate(class'JSONAPI'));
+ db = DBAPI(memory.Allocate(class'DBAPI'));
kf = KFFrontend(memory.Allocate(class'KF1_Frontend'));
json.StaticConstructor();
}
\ No newline at end of file
diff --git a/sources/Manifest.uc b/sources/Manifest.uc
index bb3b074..f0353db 100644
--- a/sources/Manifest.uc
+++ b/sources/Manifest.uc
@@ -54,4 +54,5 @@ defaultproperties
testCases(19) = class'TEST_Command'
testCases(20) = class'TEST_CommandDataBuilder'
testCases(21) = class'TEST_LogMessage'
+ testCases(22) = class'TEST_LocalDatabase'
}
\ No newline at end of file