Move files around

This commit is contained in:
Natsumi
2025-09-30 12:26:19 +13:00
parent d1b961aaf2
commit cc89d5d8c9
31 changed files with 83 additions and 107 deletions
+16 -13
View File
@@ -26,7 +26,7 @@ namespace VRCX
var userDataDir = Path.Join(Program.AppDataDirectory, "userdata");
// delete userdata if Cef version has been downgraded, fixes VRCX not opening after a downgrade
CheckCefVersion(userDataDir);
var cefSettings = new CefSettings
{
RootCachePath = userDataDir,
@@ -51,7 +51,7 @@ namespace VRCX
),
IsLocal = true
});
// cefSettings.CefCommandLineArgs.Add("ignore-certificate-errors");
// cefSettings.CefCommandLineArgs.Add("disable-plugins");
cefSettings.CefCommandLineArgs.Add("disable-spell-checking");
@@ -66,7 +66,7 @@ namespace VRCX
{
cefSettings.CefCommandLineArgs["proxy-server"] = WebApi.ProxyUrl;
}
if (VRCXStorage.Instance.Get("VRCX_DisableGpuAcceleration") == "true")
{
cefSettings.CefCommandLineArgs.Add("disable-gpu");
@@ -83,9 +83,9 @@ namespace VRCX
var extensionsPath = Path.Join(Program.AppDataDirectory, "extensions");
Directory.CreateDirectory(extensionsPath);
// extract Vue Devtools
var vueDevtoolsCrxPath = Path.Join(Program.BaseDirectory, @"..\..\build-tools\Vue-js-devtools.crx");
var vueDevtoolsCrxPath = Path.Join(Program.BaseDirectory, @"..\..\Dotnet\build-tools\Vue-js-devtools.crx");
if (File.Exists(vueDevtoolsCrxPath))
{
var vueDevtoolsPath = Path.Join(extensionsPath, "Vue-js-devtools");
@@ -93,7 +93,7 @@ namespace VRCX
{
if (Directory.Exists(vueDevtoolsPath))
Directory.Delete(vueDevtoolsPath, true);
Directory.CreateDirectory(vueDevtoolsPath);
ZipFile.ExtractToDirectory(vueDevtoolsCrxPath, vueDevtoolsPath);
}
@@ -102,7 +102,7 @@ namespace VRCX
logger.Error(ex, "Failed to extract Vue Devtools");
}
}
// load extensions
var folders = Directory.GetDirectories(extensionsPath);
foreach (var folder in folders)
@@ -110,10 +110,10 @@ namespace VRCX
cefSettings.CefCommandLineArgs.Add("load-extension", folder);
}
}
CefSharpSettings.ShutdownOnExit = false;
CefSharpSettings.ConcurrentTaskExecution = true;
if (Cef.Initialize(cefSettings, false) == false)
{
logger.Error("Cef failed to initialize");
@@ -153,15 +153,18 @@ namespace VRCX
File.WriteAllBytes(_lastCefVersionPath, Encoding.UTF8.GetBytes(currentVersion));
logger.Info("Cef version: {0}", currentVersion);
}
private static void DeleteUserData(string userDataDir)
{
if (!Directory.Exists(userDataDir))
return;
try {
try
{
Directory.Delete(userDataDir, true);
} catch (Exception ex) {
}
catch (Exception ex)
{
logger.Error(ex, "Failed to delete userdata directory: {0}", userDataDir);
}
}
+10
View File
@@ -0,0 +1,10 @@
namespace DBMerger
{
public class Config(string newDBPath, string oldDBPath, bool debug, bool importConfig)
{
public string NewDBPath { get; } = newDBPath;
public string OldDBPath { get; } = oldDBPath;
public bool Debug { get; } = debug;
public bool ImportConfig { get; } = importConfig;
}
}
+45
View File
@@ -0,0 +1,45 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputPath>..\build\DBMerger\</OutputPath>
</PropertyGroup>
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9</TargetFramework>
<Configurations>Debug;Release</Configurations>
<Platforms>x64</Platforms>
<PlatformTarget>x64</PlatformTarget>
<ApplicationRevision>0</ApplicationRevision>
<ApplicationVersion>1.0.0.%2a</ApplicationVersion>
<AssemblyTitle>DBMerger</AssemblyTitle>
<Product>DBMerger</Product>
<Copyright>vrcx-team, loukylor, pypy, natsumi</Copyright>
<AppendTargetFrameworkToOutputPath>false</AppendTargetFrameworkToOutputPath>
<AppendRuntimeIdentifierToOutputPath>false</AppendRuntimeIdentifierToOutputPath>
<Title>DBMerger</Title>
<Description>DBMerger - Merges VRCX sqlite databases</Description>
<RepositoryUrl>https://github.com/vrcx-team/VRCX</RepositoryUrl>
<ResourceLanguages>en</ResourceLanguages>
<SatelliteResourceLanguages>en-US;en</SatelliteResourceLanguages>
<EnableWindowsTargeting>true</EnableWindowsTargeting>
<!-- Fix fail fast exception -->
<CETCompat>false</CETCompat>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
<DebugType>full</DebugType>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
<DebugType>pdbonly</DebugType>
</PropertyGroup>
<PropertyGroup>
<RuntimeIdentifier Condition="'$(RuntimeIdentifier)' == ''">win-x64</RuntimeIdentifier>
<SelfContained Condition="'$(SelfContained)' == ''">false</SelfContained>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="NLog" Version="6.0.3" />
<PackageReference Include="sqlite-net-pcl" Version="1.9.172" />
<PackageReference Include="System.CommandLine" Version="2.0.0-beta4.22272.1" />
</ItemGroup>
</Project>
+634
View File
@@ -0,0 +1,634 @@
using NLog;
using SQLite;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text.RegularExpressions;
namespace DBMerger
{
public partial class Merger(SQLiteConnection dbConn, string oldDBName, string newDBName, Config config)
{
// C#'s iso date string has millionths of a second but the db stores
// dates with only thousands of a second, so define our own format
private const string JSDateTimeFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'.'fffzzz";
private static readonly Logger logger = LogManager.GetCurrentClassLogger();
private static readonly Regex userIDRegex = UserIDRegex();
// This list of table names will be slowly emptied by specific
// handlers until only unrecognied tables are left
private List<string> unMergedTables;
public void Merge()
{
logger.Info("Starting merge process...");
logger.Debug("Creating transaction for databases");
dbConn.BeginTransaction();
try
{
MergeInternal();
}
catch
{
logger.Warn("Encoutered error! Rolling back changes to databases");
dbConn.Rollback();
throw;
}
logger.Debug("Committing changes to database");
dbConn.Commit();
logger.Info("Optimizing database size...");
dbConn.Execute("VACUUM new_db;");
logger.Info("Merge completed without any major issues!");
}
private void MergeInternal()
{
unMergedTables = dbConn.QueryScalars<string>($"SELECT name FROM {oldDBName}.sqlite_schema WHERE type='table';");
// Holds sensitive information. Burn it with fire so no sensitive
// data gets leaked
unMergedTables.Remove("cookies");
// Get any tables in the old db that arent in the new db
logger.Info("Creating tables not present on new database that are present on old database...");
var newDBTables = dbConn.QueryScalars<string>($"SELECT name FROM {newDBName}.sqlite_schema WHERE type='table';").ToHashSet();
for (int i = 0; i < unMergedTables.Count; i++)
{
var table = unMergedTables[i];
if (newDBTables.Contains(table) || table == "configs")
{
continue;
}
unMergedTables.RemoveAt(i);
if (table.StartsWith("sqlite_"))
{
// Skip sqlite reserved tables
logger.Debug($"Skipping sqlite reserved table: {table}");
continue;
}
i--;
// Then just tack them on
// Get command to create the table
logger.Info($"Adding table: {table}...");
var createQuery = dbConn.ExecuteScalar<string>($"SELECT sql FROM {oldDBName}.sqlite_schema WHERE type='table' AND name=?;", table);
// Insert name of new database into create table query
createQuery = createQuery[..13] + newDBName + "." + createQuery[13..];
logger.Debug($"Using command: {createQuery}");
dbConn.Execute(createQuery);
// Then add the rows
logger.Debug("Addings rows to table");
dbConn.Execute($"INSERT INTO {newDBName}.{table} SELECT * FROM {oldDBName}.{table};");
}
logger.Info("Merging memos into new database...");
MergeMemos();
logger.Info("Merging favorites into new database...");
MergeFavorites();
logger.Info("Merging avatar and world cache into new database...");
MergeCaches();
logger.Info("Merging gamelog into new database...");
MergeGamelog();
logger.Info("Merging user feed tables into new database...");
MergeUsers();
if (config.ImportConfig)
{
logger.Info("Importing config from old database...");
ImportConfig();
}
else
{
unMergedTables.Remove("configs");
}
ResetDatabaseVersion();
foreach (var table in unMergedTables)
{
logger.Warn("Found unmerged table: " + table);
}
}
private void MergeMemos()
{
MergeTable(
table => table.EndsWith("memos"),
[0],
(old, existing) =>
{
if (existing == null)
{
logger.Trace("Inserting new memo");
return old;
}
logger.Trace("Replacing memo");
// Pick newer edited_at time
var oldDateTime = DateTime.Parse((string)old[1]);
var newDateTime = DateTime.Parse((string)existing[1]);
old[1] = oldDateTime > newDateTime ? oldDateTime : newDateTime;
old[1] = ((DateTime)old[1]).ToString(JSDateTimeFormat);
// Don't concatenate memos if they're the exact same or
// the new memo ends with the old one (suggesting import
// has already been run)
old[2] = existing[2] == old[2] || ((string)existing[2]).EndsWith((string)old[2])
? existing[2] : old[2] + "\n" + existing[2];
return old;
}
);
}
private void MergeFavorites()
{
MergeTable(
table => table.StartsWith("favorite_"),
[2, 3],
(old, existing) =>
{
if (existing == null)
{
logger.Trace("Inserting new favorite");
// Let sqlite generate new pk
old[0] = null;
return old;
}
logger.Trace("Replacing favorite");
// Favorites are the same, so just pick the older create
// time and add it
var oldDateTime = DateTime.Parse((string)old[1]);
var newDateTime = DateTime.Parse((string)existing[1]);
var updatedDateTime = oldDateTime < newDateTime ? oldDateTime : newDateTime;
existing[1] = updatedDateTime;
return existing;
}
);
}
private void MergeCaches()
{
MergeTable(
table => table.StartsWith("cache_"),
[0],
(old, existing) =>
{
if (existing == null)
{
logger.Trace("Inserting new cache entry");
return old;
}
logger.Trace("Replacing cache entry");
// old and existing have the same pk, so pick the newer
// cache entry
var oldDateTime = DateTime.Parse((string)old[1]);
var newDateTime = DateTime.Parse((string)existing[1]);
return oldDateTime > newDateTime ? old : existing;
}
);
}
private void MergeGamelog()
{
// While this could be handled throw a single query, I would like to
// log anything determined to be a duplicate in case this doens't
// work
MergeTable(
table => table.StartsWith("gamelog_") && table != "gamelog_join_leave",
// These tables can be merged just fine by checking the created
// date and first col of information because we just need to
// know if 2 rows are the same
[1, 2],
// Literally just place back in what's already there
// created_at times should be pretty consistent, so we can trust
// that no duplicates will be created
(old, existing) =>
{
if (existing != null)
{
logger.Trace("Determined these rows to be the same: ");
logger.Trace(string.Join(", ", old));
logger.Trace(string.Join(", ", existing));
}
old[0] = null;
// Return existing over old so we know that pk is unique
return existing ?? old;
},
table => SortTable(dbConn, newDBName, table, 1)
);
MergeTable(
table => table == "gamelog_join_leave",
[1, 3],
(old, existing) =>
{
if (existing != null)
{
logger.Trace("Determined these rows to be the same: ");
logger.Trace(string.Join(", ", old));
logger.Trace(string.Join(", ", existing));
}
old[0] = null;
return existing ?? old;
},
table => SortTable(dbConn, newDBName, table, 1)
);
}
private void MergeUsers()
{
MergeTable(
table => userIDRegex.IsMatch(table)
&& !table.EndsWith("_avatar_history")
&& (table.EndsWith("_notifications")
|| table.EndsWith("_moderation")),
[0],
(old, existing) =>
{
if (existing == null)
{
logger.Trace("Inserting new feed entry");
return old;
}
logger.Trace("Replacing feed entry");
// old and existing have the same pk, so pick the newer
// cache entry
var oldDateTime = DateTime.Parse((string)old[1]);
var newDateTime = DateTime.Parse((string)existing[1]);
return oldDateTime > newDateTime ? old : existing;
}
);
MergeTable(
table => userIDRegex.IsMatch(table) && table.EndsWith("_avatar_history"),
[0],
(old, existing) =>
{
if (existing == null)
{
logger.Trace("Inserting new avatar feed entry");
return old;
}
logger.Trace("Replacing avatar feed entry");
// old and existing have the same pk, so pick the newer
// cache entry
var oldDateTime = DateTime.Parse((string)old[1]);
var newDateTime = DateTime.Parse((string)existing[1]);
// Make sure to combine time values if they exist
if (existing.Length < 2)
{
return oldDateTime > newDateTime ? old : existing;
}
var oldAvatarTime = old.Length >= 3 && old[2] != null ? (int)old[2] : 0;
var newAvatarTime = existing[2] != null ? (int)existing[2] : 0;
if (oldDateTime <= newDateTime)
{
old.CopyTo(existing, 0);
}
existing[2] = oldAvatarTime + newAvatarTime;
logger.Trace(
"Combined avatar time: {} + {} = {}",
oldAvatarTime, newAvatarTime, oldAvatarTime + newAvatarTime
);
return existing;
}
);
var overlappingTables = new List<string>();
DateTime? oldestInNewTables = null;
DateTime? newestInOldTables = null;
for (int i = 0; i < unMergedTables.Count; i++)
{
// All other feed tables shouldve been merged, so just by
// matching user we should get all the rest of the user tables
string table = unMergedTables[i];
if (!userIDRegex.IsMatch(table))
{
continue;
}
unMergedTables.RemoveAt(i);
i--;
// Skip friend log current for obvious reasons
if (table.EndsWith("_friend_log_current"))
{
continue;
}
logger.Debug($"Merging table `{table}` into new database");
List<string> colNames = GetTableColumnNames(dbConn, oldDBName, table);
// Find min value of new db table and max value of old db table
var oldestInNew = dbConn.ExecuteScalar<string>($"SELECT MIN({colNames[1]}) FROM {newDBName}.{table};");
DateTime? oldestInNewDT = oldestInNew != null ? DateTime.Parse(oldestInNew) : null;
var newestInOld = dbConn.ExecuteScalar<string>($"SELECT MAX({colNames[1]}) FROM {oldDBName}.{table};");
DateTime? newestInOldDT = newestInOld != null ? DateTime.Parse(newestInOld) : null;
// If either tables are empty or the oldest value in the new
// table is still newer than the newest value in the old
// (the tables don't overlap in time at all)
if (newestInOldDT == null || oldestInNewDT == null || oldestInNewDT > newestInOldDT)
{
logger.Debug($"User tables {table} has no overlap");
// Then we can just combine them since there is no data
// overlap
var columnsClause = string.Join(", ", colNames.Skip(1));
dbConn.Execute(
$"INSERT INTO {newDBName}.{table}({columnsClause})" +
$"SELECT {columnsClause} FROM {oldDBName}.{table};"
);
SortTable(dbConn, newDBName, table, 1);
}
else
{
// I don't think people will actually care to choose a date
// for every single overlapping database
// Although it could easily be argued that people would want
// per user overlap control
logger.Debug($"User tables {table} has overlap");
if (oldestInNewTables == null || oldestInNewDT < oldestInNewTables)
{
oldestInNewTables = oldestInNewDT;
}
if (newestInOldTables == null || newestInOldDT > newestInOldTables)
{
newestInOldTables = newestInOldDT;
}
overlappingTables.Add(table);
}
}
if (overlappingTables.Count > 0)
{
// The datetimes should not be null here since there are
// overlapping tables
MergeUsersOverlap(overlappingTables, oldestInNewTables.Value, newestInOldTables.Value);
}
}
private void MergeUsersOverlap(List<string> tables, DateTime oldestInNew, DateTime newestInOld)
{
PrintOverlapWarning(tables, oldestInNew, newestInOld);
string userInput = null;
bool datetimeParsed;
DateTime cutoffTime;
do
{
if (userInput != null)
{
logger.Error("Unrecognized input: " + userInput);
}
userInput = Console.ReadLine();
datetimeParsed = DateTime.TryParseExact(
userInput, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out cutoffTime
);
}
while (userInput != "keep old" && userInput != "keep new" && !datetimeParsed);
// If user wants to keep new one then do nothing
if (userInput == "keep new")
{
logger.Info("Keeping new");
return;
}
if (userInput == "keep old")
{
logger.Info("Keeping old");
// For old we just delete all rows from new and reinsert rows
// from old
foreach (var table in tables)
{
logger.Debug($"Deleting all rows in new database's {table}");
dbConn.Execute($"DELETE FROM {newDBName}.{table}");
logger.Debug($"Adding rows from old database's {table}");
dbConn.Execute($"INSERT INTO {newDBName}.{table} SELECT * FROM {oldDBName}.{table};");
SortTable(dbConn, newDBName, table, 1);
}
return;
}
// Else we do the cutoff
var cutoffStr = cutoffTime.ToString(JSDateTimeFormat);
logger.Info("Merging at date: " + cutoffTime.ToString("yyyy-MM-dd HH:mm:ss"));
foreach (var table in tables)
{
// Get column names from old db in case newdb has columns
// old db doesn't
var colNames = GetTableColumnNames(dbConn, oldDBName, table);
// Cutoff data in new db thats older than cutoff
logger.Debug($"Deleting rows in new database's {table} older than cutoff");
dbConn.Execute($"DELETE FROM {newDBName}.{table} WHERE {colNames[1]}<?;", cutoffStr);
// Insert old rows in to the new db
logger.Debug($"Adding rows from old database's {table} older than cutoff");
var columnsClause = string.Join(", ", colNames.Skip(1));
dbConn.Execute(
$"INSERT INTO {newDBName}.{table}({columnsClause})" +
$"SELECT {columnsClause} FROM {oldDBName}.{table} " +
$"WHERE {colNames[1]}<?;", cutoffStr
);
SortTable(dbConn, newDBName, table, 1);
}
}
private void PrintOverlapWarning(List<string> tables, DateTime oldestInNew, DateTime newestInOld)
{
var overlap = newestInOld - oldestInNew;
var overlapString = $"{overlap.Days} days, {overlap.Hours} hours";
logger.Warn(new string('=', 100));
logger.Warn("WARNING:".PadLeft(46));
logger.Warn("The merger has is unable to automatically merge the following USER FEED tables:");
foreach (var table in tables)
{
logger.Warn(table);
}
logger.Warn("");
logger.Warn("This is because these USER FEED tables contain overlap that can't be resolved:");
logger.Warn("old database".PadRight(64, '-') + "|");
logger.Warn(("|" + " new database".PadLeft(64, '-')).PadLeft(100));
logger.Warn($"overlap ({overlapString})".PadRight(35) + "|" + new string('-', 28) + "|");
logger.Warn("cutoff (^)".PadLeft(49));
logger.Warn("");
logger.Warn("Please choose a cutoff date and time. Data in the old USER FEED tables after this date");
logger.Warn("will be discarded, and data in the new USER FEED tables before this date will be discarded as well.");
logger.Warn("The remaining data will then be spliced together.");
logger.Warn("Your input should be in this format: `YYYY-MM-DD HH:MM:SS`");
logger.Warn("");
logger.Warn("Alternatively, you can enter `keep new` to discard the old data or `keep old` to discard new data.");
logger.Warn("");
logger.Warn("Again, this only affects USER FEED tables, all other tables are merged properly.");
logger.Warn("To read more about this process please check the VRCX wiki: <woops>");
logger.Warn(new string('=', 100));
}
private void ImportConfig()
{
unMergedTables.Remove("configs");
// Skip saved credentials to avoid accidentally exposing sensitive
// information somehow
dbConn.Execute(
$"INSERT OR REPLACE INTO {newDBName}.configs " +
$"SELECT * FROM {oldDBName}.configs " +
$"WHERE key!=?;", "config:savedcredentials"
);
}
private void ResetDatabaseVersion()
{
// Tell VRCX to add in any missing fields that the merger may have
// missed, just as a precaution
dbConn.Execute(
$"INSERT OR REPLACE INTO {newDBName}.configs VALUES (?, 0)",
"config:vrcx_databaseversion"
);
}
/// <summary>
/// A method that automates various processes of merging.
///
/// It first finds a table that matches the `tableMatcher` predicate,
/// then removes it from `unMergedTables`.
/// Then it loops over every row in the old database table, checking if
/// the row exists in the new table. It does this by checking if the
/// column indices passed into `colIndicesToMatch` are the same.
/// Then for each row, it calls `rowTransformer`, passing in the old
/// rows and existing new rows. `rowTransformer` should return the row
/// to insert into the new database or null.
/// </summary>
/// <param name="tableMatcher">A predicate to check if a table is one to edit</param>
/// <param name="colIndicesToMatch">The column indices to match to see if old and new rows are the same</param>
/// <param name="rowTransformer">An func called on every row</param>
/// <param name="finalizer">An action called once all rows have been iterated</param>
private void MergeTable(
Predicate<string> tableMatcher,
int[] colIndicesToMatch,
Func<object[], object[], object[]> rowTransformer,
Action<string> finalizer = null
) {
for (int i = 0; i < unMergedTables.Count; i++)
{
// Find table that we want to merge
string table = unMergedTables[i];
if (!tableMatcher(table))
{
continue;
}
unMergedTables.RemoveAt(i);
i--;
logger.Debug($"Merging table `{table}` into new database");
// Prepare queries
var colNames = GetTableColumnNames(dbConn, newDBName, table);
var valuesClause = string.Join(',', new string('?', colNames.Count).ToCharArray());
var insertQuery = $"INSERT INTO {newDBName}.{table} VALUES ({valuesClause});";
var whereClause = string.Join(" AND ", colIndicesToMatch.Select(i => colNames[i] + "=?"));
var existsQuery = $"DELETE FROM {newDBName}.{table} WHERE {whereClause} RETURNING *;";
// Loop over every row on table in old database
var rowsCommand = dbConn.CreateCommand($"SELECT * FROM {oldDBName}.{table};");
foreach (object[] oldRow in rowsCommand.ExecuteQueryScalars(dbConn))
{
// Find existing row (if it exists at all) and remove it
object[] colsToMatch = colIndicesToMatch.Select(i => oldRow[i]).ToArray();
var existingRow = dbConn.QueryScalars(existsQuery, colsToMatch).FirstOrDefault();
// Insert new row in place of the existing row
var newRow = rowTransformer(oldRow, existingRow);
if (newRow.Length < colNames.Count)
{
// Row count may not match between old and new db, so
// just add null data
var temp = new object[colNames.Count];
newRow.CopyTo(temp, 0);
newRow = temp;
}
dbConn.Execute(insertQuery, newRow);
}
finalizer?.Invoke(table);
}
}
private static List<string> GetTableColumnNames(SQLiteConnection conn, string db, string table)
=> conn.QueryScalars<string>($"SELECT name FROM pragma_table_info(?, ?);", table, db);
private static void SortTable(SQLiteConnection conn, string db, string table, int sortCol, bool isDesc = false)
{
logger.Debug($"Sorting table {db}.{table}");
// Just to ensure name is unique
var time = DateTime.Now.Ticks;
var newTableName = table + time.ToString();
logger.Debug($"Creating new table: " + newTableName);
// Split create query into words
var createQuery = conn.ExecuteScalar<string>($"SELECT sql FROM {db}.sqlite_schema WHERE type='table' AND name=?;", table);
var words = createQuery.Split(' ');
// Third word is table name
words[2] = db + "." + newTableName;
createQuery = string.Join(' ', words);
logger.Debug("Creating table with command: " + createQuery);
conn.Execute(createQuery);
logger.Debug("Adding rows...");
var colNames = GetTableColumnNames(conn, db, table);
// Skip pks so they get reassigned
var selectClause = string.Join(',', colNames.Skip(1));
conn.Execute(
$"INSERT INTO {db}.{newTableName} ({selectClause}) " +
$"SELECT {selectClause} FROM {db}.{table} " +
$"ORDER BY {colNames[sortCol]} {(isDesc ? "DESC" : "ASC")};"
);
logger.Debug("Dropping old and renaming");
conn.Execute($"DROP TABLE {db}.{table}");
conn.Execute($"ALTER TABLE {db}.{newTableName} RENAME TO {table}");
}
[GeneratedRegex("^([A-Za-z0-9]{10}|usr[0-9A-Fa-f]{32})")]
private static partial Regex UserIDRegex();
}
}
+207
View File
@@ -0,0 +1,207 @@
using NLog;
using NLog.Targets;
using SQLite;
using System;
// Use different command line parser for more standardized output
// (like help text)
using System.CommandLine;
using System.CommandLine.Parsing;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
namespace DBMerger
{
public class Program
{
private static readonly Logger logger = LogManager.GetLogger("DBMerger");
// TODO: Consider config class?
public static SQLiteConnection DBConn { get; private set; }
public static SQLiteConnection OldDBConn { get; private set; }
public static Config Config { get; private set; }
public static void Main(string[] args)
{
ProcessArgs(args);
ConfigureLogger();
if (Config.Debug)
{
// Needed? mostly just covering my ass
logger.Warn(new string('=', 100));
logger.Warn("WARNING:".PadLeft(46));
logger.Warn("Debug mode will output some sensitive information (friends list, friend history, etc.)");
logger.Warn("Only use this mode for debug purposes. Enter `y` to confirm or anything else to exit.");
logger.Warn(new string('=', 100));
if (Console.ReadLine() != "y")
{
return;
}
}
var asm = Assembly.GetExecutingAssembly();
var versionInfo = FileVersionInfo.GetVersionInfo(asm.Location);
logger.Info($"{versionInfo.ProductName}-{versionInfo.ProductVersion}");
logger.Info($"by {versionInfo.LegalCopyright}\n");
if (Path.GetFullPath(Config.NewDBPath) == Path.GetFullPath(Config.OldDBPath))
{
logger.Fatal("Database pathes cannot be the same!");
return;
}
try
{
logger.Debug("Creating connection to old DB");
try
{
DBConn = new SQLiteConnection(Config.OldDBPath) { Tracer = logger.Trace, Trace = true };
}
catch (SQLiteException)
{
logger.Fatal("Could not connect to old DB. Perhaps passed in db is corrupt or not a valid sqlite db?");
return;
}
logger.Debug("Creating connection to new DB");
try
{
DBConn.Execute("ATTACH DATABASE ? AS new_db", Config.NewDBPath);
}
catch (SQLiteException)
{
logger.Fatal("Could not connect to new DB. Perhaps passed in db is corrupt or not a valid sqlite db?");
return;
}
logger.Info("Database connections created successfully!");
CreateBackup();
try
{
new Merger(DBConn, "main", "new_db", Config).Merge();
}
catch (Exception ex)
{
logger.Fatal(ex, "Merge process failed with error:\n");
}
}
finally
{
logger.Debug("Closing database connection...");
DBConn.Close();
}
}
private static void ProcessArgs(string[] args)
{
static string validateDBPath(ArgumentResult arg)
{
string path = arg.Tokens[0].Value;
Option option = arg.Argument.Parents.Single() as Option;
string extension = Path.GetExtension(path);
if (extension == "")
{
path += Path.ChangeExtension(path, "sqlite3");
}
else if (extension != ".sqlite3")
{
arg.ErrorMessage = $"File given to option `{option.Aliases.First()}` is not a sqlite database!";
return null;
}
if (!File.Exists(path))
{
arg.ErrorMessage = $"File given to option `{option.Aliases.First()}` does not exist!";
return null;
}
return path;
}
var rootCommand = new RootCommand("Merge an old and new VRCX sqlite database into one.");
var newDBOption = new Option<string>(
["-n", "--new-db-path"],
description: "The path of the new DB to merge the old onto.",
parseArgument: validateDBPath
) { IsRequired = true };
rootCommand.AddOption(newDBOption);
var oldDBOption = new Option<string>(
["-o", "--old-db-path"],
description: "The path of the old DB to merge into the new.",
parseArgument: validateDBPath
) { IsRequired = true };
rootCommand.AddOption(oldDBOption);
// Add `debug` option to be consistent with args from the main exe
var debugOption = new Option<bool>(["-v", "--verbose", "-d", "--debug"], () => false, "Add debug information to the output.");
rootCommand.AddOption(debugOption);
var importConfigOption = new Option<bool>(["--import-config"], () => false, "Imports the config values from the old database. This will override the config in the new database.");
rootCommand.AddOption(importConfigOption);
rootCommand.SetHandler((newDBPath, oldDBPath, debug, importConfig) =>
{
Config = new Config(newDBPath, oldDBPath, debug, importConfig);
}, newDBOption, oldDBOption, debugOption, importConfigOption);
// If the args weren't parsable or verifiable, exit
if (rootCommand.Invoke(args) != 0)
{
Environment.Exit(0);
}
}
private static void ConfigureLogger()
{
LogManager.Setup().LoadConfiguration(builder =>
{
var fileTarget = new FileTarget("fileTarget")
{
FileName = "DBMerger.log",
Layout = "${longdate} [${level:uppercase=true:padding=-5}] ${logger:padding=-20} - ${message} ${exception:format=tostring}",
ArchiveSuffixFormat = "{1:yyyy-MM-dd.HH-mm-ss}",
ArchiveOldFileOnStartup = true,
KeepFileOpen = true,
AutoFlush = true,
Encoding = System.Text.Encoding.UTF8
};
var consoleTarget = new ColoredConsoleTarget()
{
Layout = "[${level:uppercase=true:padding=-5}] ${message} ${exception:format=tostring}",
AutoFlush = true,
Encoding = System.Text.Encoding.UTF8
};
builder.ForLogger().FilterMinLevel(Config.Debug ? LogLevel.Trace : LogLevel.Debug).WriteTo(fileTarget);
builder.ForLogger().FilterMinLevel(Config.Debug ? LogLevel.Trace : LogLevel.Info).WriteTo(consoleTarget);
});
}
private static void CreateBackup()
{
// Get unique name for backup. Format matches the log file name format
string date = DateTime.Now.ToString("yyyyMMdd");
int counter = 0;
string backupPath;
do
{
backupPath = Path.Combine(Path.GetDirectoryName(Config.NewDBPath), $"VRCX.back.{date}.{counter}.sqlite3");
counter++;
}
while (File.Exists(backupPath));
File.Copy(Config.NewDBPath, backupPath);
logger.Info($"Created backup of new DB at {backupPath}");
}
}
}
+77
View File
@@ -0,0 +1,77 @@
using SQLite;
using SQLitePCL;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
namespace DBMerger
{
// This class is made of mostly hardcoded copies from the sqlite lib.
// Normally this would be very bad, but since the library is long since
// unmaintained it shouldn't matter
internal static class SqliteExtensions
{
// The prepare method is private, so fetch it here
private static readonly MethodInfo _prepareMethod = typeof(SQLiteCommand).GetMethod("Prepare", BindingFlags.NonPublic | BindingFlags.Instance);
/// <summary>
/// Creates a SQLiteCommand given the command text (SQL) with arguments. Place a '?'
/// in the command text for each of the arguments and then executes that command.
/// It returns each row as an array of object primitives.
/// </summary>
/// <param name="query">
/// The fully escaped SQL.
/// </param>
/// <param name="args">
/// Arguments to substitute for the occurences of '?' in the query.
/// </param>
/// <returns>
/// An enumerable with one object array for each row.
/// </returns>
public static List<object[]> QueryScalars(this SQLiteConnection conn, string query, params object[] args)
{
var cmd = conn.CreateCommand(query, args);
return cmd.ExecuteQueryScalars(conn).ToList();
}
public static IEnumerable<object[]> ExecuteQueryScalars(this SQLiteCommand cmd, SQLiteConnection conn)
{
if (conn.Trace)
{
conn.Tracer?.Invoke("Executing Query: " + cmd);
}
var stmt = _prepareMethod.Invoke(cmd, []) as sqlite3_stmt;
try
{
int columnCount = SQLite3.ColumnCount(stmt);
if (SQLite3.ColumnCount(stmt) < 1)
{
throw new InvalidOperationException("QueryScalars should return at least one column");
}
while (SQLite3.Step(stmt) == SQLite3.Result.Row)
{
var row = new object[columnCount];
for (int i = 0; i < columnCount; i++)
{
var colType = SQLite3.ColumnType(stmt, i);
row[i] = colType switch
{
SQLite3.ColType.Integer => SQLite3.ColumnInt(stmt, i),
SQLite3.ColType.Float => (float)SQLite3.ColumnDouble(stmt, i),
SQLite3.ColType.Text => SQLite3.ColumnString(stmt, i),
SQLite3.ColType.Blob => SQLite3.ColumnByteArray(stmt, i),
SQLite3.ColType.Null or _ => null
};
}
yield return row;
}
}
finally
{
SQLite3.Finalize(stmt);
}
}
}
}
+6 -5
View File
@@ -18,7 +18,7 @@
<AppendRuntimeIdentifierToOutputPath>false</AppendRuntimeIdentifierToOutputPath>
<Title>VRCX</Title>
<Description>VRCX</Description>
<PackageIcon>..\VRCX.png</PackageIcon>
<PackageIcon>..\images\VRCX.png</PackageIcon>
<RepositoryUrl>https://github.com/vrcx-team/VRCX</RepositoryUrl>
<ResourceLanguages>en</ResourceLanguages>
<SatelliteResourceLanguages>en-US;en</SatelliteResourceLanguages>
@@ -31,6 +31,7 @@
<GenerateTargetFrameworkAttribute>false</GenerateTargetFrameworkAttribute>
<!-- Fix fail fast exception -->
<CETCompat>false</CETCompat>
<WarningLevel>0</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
@@ -41,7 +42,7 @@
</PropertyGroup>
<PropertyGroup>
<ApplicationIcon>..\VRCX.ico</ApplicationIcon>
<ApplicationIcon>..\images\VRCX.ico</ApplicationIcon>
</PropertyGroup>
<PropertyGroup>
@@ -80,10 +81,10 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<TargetPath>openvr_api.dll</TargetPath>
</None>
<Content Include="..\VRCX.ico">
<Content Include="..\images\VRCX.ico">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="..\VRCX.png">
<Content Include="..\images\VRCX.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
</ItemGroup>
@@ -112,7 +113,7 @@
</Target>
<Target Name="PostBuild" AfterTargets="PostBuildEvent">
<Exec Command="&quot;$(ProjectDir)..\build-tools\Topten.nvpatch.exe&quot; --enable &quot;$(ProjectDir)..\build\Cef\VRCX.exe&quot;" />
<Exec Command="&quot;$(ProjectDir)..\Dotnet\build-tools\Topten.nvpatch.exe&quot; --enable &quot;$(ProjectDir)..\build\Cef\VRCX.exe&quot;" />
</Target>
<ItemGroup>
+5 -4
View File
@@ -19,7 +19,7 @@
<AppendRuntimeIdentifierToOutputPath>false</AppendRuntimeIdentifierToOutputPath>
<Title>VRCX</Title>
<Description>VRCX</Description>
<PackageIcon>..\VRCX.png</PackageIcon>
<PackageIcon>..\images\VRCX.png</PackageIcon>
<RepositoryUrl>https://github.com/vrcx-team/VRCX</RepositoryUrl>
<ResourceLanguages>en</ResourceLanguages>
<SatelliteResourceLanguages>en-US;en</SatelliteResourceLanguages>
@@ -28,6 +28,7 @@
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<GenerateTargetFrameworkAttribute>false</GenerateTargetFrameworkAttribute>
<WarningLevel>0</WarningLevel>
</PropertyGroup>
<Import Sdk="Microsoft.NET.Sdk" Project="Sdk.targets" />
@@ -46,7 +47,7 @@
</PropertyGroup>
<PropertyGroup>
<ApplicationIcon>..\VRCX.ico</ApplicationIcon>
<ApplicationIcon>..\images\VRCX.ico</ApplicationIcon>
</PropertyGroup>
<ItemGroup>
@@ -60,10 +61,10 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<TargetPath>openvr_api.dll</TargetPath>
</None>
<Content Include="..\VRCX.ico">
<Content Include="..\images\VRCX.ico">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="..\VRCX.png">
<Content Include="..\images\VRCX.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
</ItemGroup>
+5 -4
View File
@@ -18,7 +18,7 @@
<AppendRuntimeIdentifierToOutputPath>false</AppendRuntimeIdentifierToOutputPath>
<Title>VRCX</Title>
<Description>VRCX</Description>
<PackageIcon>..\VRCX.png</PackageIcon>
<PackageIcon>..\images\VRCX.png</PackageIcon>
<RepositoryUrl>https://github.com/vrcx-team/VRCX</RepositoryUrl>
<ResourceLanguages>en</ResourceLanguages>
<SatelliteResourceLanguages>en-US;en</SatelliteResourceLanguages>
@@ -27,6 +27,7 @@
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<GenerateTargetFrameworkAttribute>false</GenerateTargetFrameworkAttribute>
<WarningLevel>0</WarningLevel>
</PropertyGroup>
<Import Sdk="Microsoft.NET.Sdk" Project="Sdk.targets" />
@@ -51,7 +52,7 @@
</PropertyGroup>
<PropertyGroup>
<ApplicationIcon>..\VRCX.ico</ApplicationIcon>
<ApplicationIcon>..\images\VRCX.ico</ApplicationIcon>
</PropertyGroup>
<ItemGroup>
@@ -65,10 +66,10 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<TargetPath>openvr_api.dll</TargetPath>
</None>
<Content Include="..\VRCX.ico">
<Content Include="..\images\VRCX.ico">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="..\VRCX.png">
<Content Include="..\images\VRCX.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
</ItemGroup>
+34
View File
@@ -0,0 +1,34 @@
https://github.com/toptensoftware/nvpatch
Copyright © 2014-2021 Topten Software.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License") you may not use this product except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
https://github.com/vuejs/devtools/blob/main/LICENSE
MIT License
Copyright (c) 2023 webfansplz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,72 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$comment": "This schema is primarily for any developers that want to interface with this outside of VRCX and want an easy way to view/generate the format without crawling through the codebase. It's also for me a couple months from now when I come back to this and have no idea what was done."
"title": "VRChat Screenshot JSON",
"description": "JSON object attached by VRCX to screenshot files taken by users in-game.",
"type": "object",
"required": ["application", "version", "author", "world", "players"],
"properties": {
"application": {
"type": "string",
"default": "VRCX"
"description": "Name of the application writing to the screenshot. Should be VRCX."
},
"version": {
"type": "integer",
"description": "The version of this schema. If the format changes, this number should change."
"const": 1
},
"author": {
"type": "object",
"description": "The details of the user that took the picture.",
"required": ["id", "displayName"],
"properties": {
"id": {
"type": "string",
"description": "The ID of the user."
},
"displayName": {
"type": "string",
"description": "The display name of the user."
}
}
},
"world": {
"type": "object",
"description": "Information about the world the picture was taken in.",
"required": ["id", "name", "instanceId"],
"properties": {
"id": {
"type": "string",
"description": "The ID of the world."
},
"name": {
"type": "string",
"description": "The name of the world."
},
"instanceId": {
"type": "string",
"description": "The full ID of the game instance."
}
}
},
"players": {
"type": "array",
"description": "A list of players in the world at the time the picture was taken.",
"items": {
"type": "object",
"required": ["id", "displayName"],
"properties": {
"id": {
"type": "string",
"description": "The ID of the player in the world."
},
"displayName": {
"type": "string",
"description": "The display name of the player in the world."
}
}
}
}
}
}