mirror of
https://github.com/vrcx-team/VRCX.git
synced 2026-04-06 00:32:02 +02:00
refactor activity tab
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,96 @@
|
||||
export function buildHeatmapOption({
|
||||
data,
|
||||
rawBuckets,
|
||||
dayLabels,
|
||||
hourLabels,
|
||||
weekStartsOn,
|
||||
isDarkMode,
|
||||
emptyColor,
|
||||
scaleColors,
|
||||
unitLabel
|
||||
}) {
|
||||
return {
|
||||
tooltip: {
|
||||
position: 'top',
|
||||
formatter: (params) => {
|
||||
const [hour, dayIndex] = params.data;
|
||||
const originalDay = (dayIndex + weekStartsOn) % 7;
|
||||
const slot = originalDay * 24 + hour;
|
||||
const minutes = Math.round(rawBuckets[slot] || 0);
|
||||
return `${dayLabels[dayIndex]} ${hourLabels[hour]}<br/><b>${minutes}</b> ${unitLabel}`;
|
||||
}
|
||||
},
|
||||
grid: {
|
||||
top: 6,
|
||||
left: 42,
|
||||
right: 16,
|
||||
bottom: 32
|
||||
},
|
||||
xAxis: {
|
||||
type: 'category',
|
||||
data: hourLabels,
|
||||
splitArea: { show: false },
|
||||
axisLabel: {
|
||||
interval: 2,
|
||||
fontSize: 10
|
||||
},
|
||||
axisTick: { show: false }
|
||||
},
|
||||
yAxis: {
|
||||
type: 'category',
|
||||
data: dayLabels,
|
||||
inverse: true,
|
||||
splitArea: { show: false },
|
||||
axisLabel: {
|
||||
fontSize: 11
|
||||
},
|
||||
axisTick: { show: false }
|
||||
},
|
||||
visualMap: {
|
||||
min: 0,
|
||||
max: 1,
|
||||
calculable: false,
|
||||
show: false,
|
||||
type: 'piecewise',
|
||||
dimension: 2,
|
||||
pieces: [
|
||||
{ min: 0, max: 0, color: emptyColor },
|
||||
{ gt: 0, lte: 0.2, color: scaleColors[0] },
|
||||
{ gt: 0.2, lte: 0.4, color: scaleColors[1] },
|
||||
{ gt: 0.4, lte: 0.6, color: scaleColors[2] },
|
||||
{ gt: 0.6, lte: 0.8, color: scaleColors[3] },
|
||||
{ gt: 0.8, lte: 1, color: scaleColors[4] }
|
||||
]
|
||||
},
|
||||
series: [
|
||||
{
|
||||
type: 'heatmap',
|
||||
data,
|
||||
emphasis: {
|
||||
itemStyle: {
|
||||
shadowBlur: 6,
|
||||
shadowColor: 'rgba(0, 0, 0, 0.3)'
|
||||
}
|
||||
},
|
||||
itemStyle: {
|
||||
borderWidth: 1.5,
|
||||
borderColor: isDarkMode ? 'hsl(220, 15%, 8%)' : 'hsl(0, 0%, 100%)',
|
||||
borderRadius: 2
|
||||
}
|
||||
}
|
||||
],
|
||||
backgroundColor: 'transparent'
|
||||
};
|
||||
}
|
||||
|
||||
export function toHeatmapSeriesData(normalizedBuckets, weekStartsOn) {
|
||||
const data = [];
|
||||
for (let day = 0; day < 7; day++) {
|
||||
for (let hour = 0; hour < 24; hour++) {
|
||||
const slot = day * 24 + hour;
|
||||
const displayDay = (day - weekStartsOn + 7) % 7;
|
||||
data.push([hour, displayDay, normalizedBuckets[slot]]);
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
@@ -1,225 +0,0 @@
|
||||
import { database } from '../services/database';
|
||||
import {
|
||||
buildSessionsFromGamelog,
|
||||
ONLINE_SESSION_MERGE_GAP_MS
|
||||
} from '../shared/utils/overlapCalculator';
|
||||
|
||||
/** @typedef {{ start: number, end: number }} Session */
|
||||
|
||||
/**
|
||||
* Module-level singleton cache for the current user's online sessions.
|
||||
* Lazy-loaded on first access, then incrementally updated.
|
||||
*/
|
||||
|
||||
/** @type {Session[] | null} */
|
||||
let cachedSessions = null;
|
||||
|
||||
/** @type {string[] | null} */
|
||||
let cachedTimestamps = null;
|
||||
|
||||
/** @type {string | null} */
|
||||
let lastRowCreatedAt = null;
|
||||
|
||||
/** @type {'idle' | 'loading' | 'ready'} */
|
||||
let status = 'idle';
|
||||
|
||||
/** @type {Promise<void> | null} */
|
||||
let loadPromise = null;
|
||||
|
||||
/** @type {Array<() => void>} */
|
||||
const onReadyCallbacks = [];
|
||||
|
||||
/** @type {ReturnType<typeof setInterval> | null} */
|
||||
let refreshTimer = null;
|
||||
|
||||
const REFRESH_INTERVAL_MS = 30 * 60 * 1000; // 30 minutes
|
||||
|
||||
/**
|
||||
* Executes all onReady callbacks and clears the list.
|
||||
*/
|
||||
function flushCallbacks() {
|
||||
const cbs = onReadyCallbacks.splice(0);
|
||||
for (const cb of cbs) {
|
||||
try {
|
||||
cb();
|
||||
} catch (e) {
|
||||
console.error('useCurrentUserSessions onReady callback error:', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the periodic incremental refresh timer.
|
||||
* Only starts if not already running.
|
||||
*/
|
||||
function startRefreshTimer() {
|
||||
if (refreshTimer) return;
|
||||
refreshTimer = setInterval(async () => {
|
||||
if (status !== 'ready') return;
|
||||
try {
|
||||
await incrementalUpdate();
|
||||
} catch (e) {
|
||||
console.error('useCurrentUserSessions periodic refresh error:', e);
|
||||
}
|
||||
}, REFRESH_INTERVAL_MS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Full load: queries all gamelog_location rows and builds sessions.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function fullLoad() {
|
||||
status = 'loading';
|
||||
try {
|
||||
const rows = await database.getCurrentUserOnlineSessions();
|
||||
cachedTimestamps = rows.map((r) => r.created_at);
|
||||
cachedSessions = buildSessionsFromGamelog(rows);
|
||||
if (rows.length > 0) {
|
||||
lastRowCreatedAt = rows[rows.length - 1].created_at;
|
||||
}
|
||||
status = 'ready';
|
||||
startRefreshTimer();
|
||||
flushCallbacks();
|
||||
} catch (e) {
|
||||
status = 'idle';
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Incremental update: only fetches rows newer than lastRowCreatedAt.
|
||||
* Merges new sessions into the cached sessions array.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function incrementalUpdate() {
|
||||
if (!lastRowCreatedAt || status !== 'ready') return;
|
||||
|
||||
const newRows =
|
||||
await database.getCurrentUserOnlineSessionsAfter(lastRowCreatedAt);
|
||||
if (newRows.length === 0) return;
|
||||
|
||||
lastRowCreatedAt = newRows[newRows.length - 1].created_at;
|
||||
cachedTimestamps.push(...newRows.map((r) => r.created_at));
|
||||
|
||||
const newSessions = buildSessionsFromGamelog(newRows);
|
||||
if (newSessions.length === 0) return;
|
||||
|
||||
// Merge: if last cached session and first new session overlap or are close, merge them
|
||||
if (cachedSessions.length > 0 && newSessions.length > 0) {
|
||||
const last = cachedSessions[cachedSessions.length - 1];
|
||||
const first = newSessions[0];
|
||||
if (first.start <= last.end + ONLINE_SESSION_MERGE_GAP_MS) {
|
||||
last.end = Math.max(last.end, first.end);
|
||||
newSessions.shift();
|
||||
}
|
||||
}
|
||||
cachedSessions.push(...newSessions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the cache is ready.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isReady() {
|
||||
return status === 'ready';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the cache is currently loading.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isLoading() {
|
||||
return status === 'loading';
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the cached sessions. If not loaded yet, triggers a full load.
|
||||
* If already loaded, does an incremental update first.
|
||||
* @returns {Promise<Session[]>}
|
||||
*/
|
||||
async function getSessions() {
|
||||
if (status === 'ready') {
|
||||
await incrementalUpdate();
|
||||
return cachedSessions;
|
||||
}
|
||||
|
||||
if (status === 'loading') {
|
||||
// Wait for existing load to complete
|
||||
await loadPromise;
|
||||
return cachedSessions;
|
||||
}
|
||||
|
||||
// idle: trigger full load
|
||||
loadPromise = fullLoad();
|
||||
try {
|
||||
await loadPromise;
|
||||
return cachedSessions;
|
||||
} finally {
|
||||
loadPromise = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the cached timestamps (created_at strings from gamelog_location).
|
||||
* Must be called after getSessions() or after onReady fires.
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getTimestamps() {
|
||||
return cachedTimestamps || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a callback to be called when the cache becomes ready.
|
||||
* If already ready, callback is invoked immediately.
|
||||
* @param {() => void} callback
|
||||
*/
|
||||
function onReady(callback) {
|
||||
if (status === 'ready') {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
onReadyCallbacks.push(callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers a full load if idle, or returns the existing promise if loading.
|
||||
* Does NOT block the caller — designed for fire-and-forget usage.
|
||||
* Returns the promise so callers can optionally await it.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
function triggerLoad() {
|
||||
if (status === 'ready') return Promise.resolve();
|
||||
if (status === 'loading') return loadPromise;
|
||||
|
||||
loadPromise = fullLoad().finally(() => {
|
||||
loadPromise = null;
|
||||
});
|
||||
return loadPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates the cache and stops the refresh timer.
|
||||
*/
|
||||
function invalidate() {
|
||||
cachedSessions = null;
|
||||
cachedTimestamps = null;
|
||||
lastRowCreatedAt = null;
|
||||
status = 'idle';
|
||||
loadPromise = null;
|
||||
if (refreshTimer) {
|
||||
clearInterval(refreshTimer);
|
||||
refreshTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
export function useCurrentUserSessions() {
|
||||
return {
|
||||
isReady,
|
||||
isLoading,
|
||||
getSessions,
|
||||
getTimestamps,
|
||||
onReady,
|
||||
triggerLoad,
|
||||
invalidate
|
||||
};
|
||||
}
|
||||
@@ -1429,16 +1429,16 @@
|
||||
"header": "Activity",
|
||||
"load": "Load Activity",
|
||||
"load_hint": "Load activity data from the local database when needed.",
|
||||
"refresh_hint": "Refresh activity data",
|
||||
"total_events": "{count} online events",
|
||||
"times_online": "times online",
|
||||
"most_active_day": "Most active day:",
|
||||
"most_active_time": "Peak hours:",
|
||||
"period": "Period:",
|
||||
"period_all": "All Time",
|
||||
"period_365": "Last Year",
|
||||
"period_180": "Last 6 Months",
|
||||
"period_90": "Last 90 Days",
|
||||
"period_30": "Last 30 Days",
|
||||
"period_7": "Last 7 Days",
|
||||
"minutes_online": "min online",
|
||||
"no_data_in_period": "No activity data in selected period",
|
||||
"days": {
|
||||
"mon": "Mon",
|
||||
@@ -1456,13 +1456,14 @@
|
||||
"peak_overlap": "Peak overlap:",
|
||||
"exclude_hours": "Exclude hours",
|
||||
"no_data": "Not enough data to calculate overlap",
|
||||
"times_overlap": "times overlapping"
|
||||
"times_overlap": "times overlapping",
|
||||
"minutes_overlap": "min overlap"
|
||||
},
|
||||
"most_visited_worlds": {
|
||||
"header": "Most Visited Worlds"
|
||||
},
|
||||
"preparing_data": "Preparing activity data...",
|
||||
"preparing_data_hint": "This may take a moment on first load. You'll be notified when ready.",
|
||||
"preparing_data_hint": "This may take a moment on first load.",
|
||||
"data_ready": "Activity data is ready"
|
||||
},
|
||||
"note_memo": {
|
||||
|
||||
@@ -1,219 +0,0 @@
|
||||
import { dbVars } from '../database';
|
||||
|
||||
import sqliteService from '../sqlite.js';
|
||||
|
||||
const activityCache = {
|
||||
/**
|
||||
* @param {string} userId
|
||||
* @returns {Promise<{
|
||||
* userId: string,
|
||||
* updatedAt: string,
|
||||
* isSelf: boolean,
|
||||
* sourceLastCreatedAt: string,
|
||||
* pendingSessionStartAt: number | null
|
||||
* } | null>}
|
||||
*/
|
||||
async getActivityCacheMeta(userId) {
|
||||
let row = null;
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
row = {
|
||||
userId: dbRow[0],
|
||||
updatedAt: dbRow[1],
|
||||
isSelf: Boolean(dbRow[2]),
|
||||
sourceLastCreatedAt: dbRow[3] || '',
|
||||
pendingSessionStartAt:
|
||||
typeof dbRow[4] === 'number' ? dbRow[4] : null
|
||||
};
|
||||
},
|
||||
`SELECT user_id, updated_at, is_self, source_last_created_at, pending_session_start_at
|
||||
FROM ${dbVars.userPrefix}_activity_cache_meta
|
||||
WHERE user_id = @userId`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return row;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} userId
|
||||
* @returns {Promise<Array<{start: number, end: number}>>}
|
||||
*/
|
||||
async getActivityCacheSessions(userId) {
|
||||
const sessions = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
sessions.push({
|
||||
start: dbRow[0],
|
||||
end: dbRow[1]
|
||||
});
|
||||
},
|
||||
`SELECT start_at, end_at
|
||||
FROM ${dbVars.userPrefix}_activity_cache_sessions
|
||||
WHERE user_id = @userId
|
||||
ORDER BY start_at`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return sessions;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} userId
|
||||
* @returns {Promise<{
|
||||
* userId: string,
|
||||
* updatedAt: string,
|
||||
* isSelf: boolean,
|
||||
* sourceLastCreatedAt: string,
|
||||
* pendingSessionStartAt: number | null,
|
||||
* sessions: Array<{start: number, end: number}>
|
||||
* } | null>}
|
||||
*/
|
||||
async getActivityCache(userId) {
|
||||
const meta = await this.getActivityCacheMeta(userId);
|
||||
if (!meta) {
|
||||
return null;
|
||||
}
|
||||
const sessions = await this.getActivityCacheSessions(userId);
|
||||
return {
|
||||
...meta,
|
||||
sessions
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} userId
|
||||
* @returns {Promise<{start: number, end: number} | null>}
|
||||
*/
|
||||
async getLastActivityCacheSession(userId) {
|
||||
let row = null;
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
row = {
|
||||
start: dbRow[0],
|
||||
end: dbRow[1]
|
||||
};
|
||||
},
|
||||
`SELECT start_at, end_at
|
||||
FROM ${dbVars.userPrefix}_activity_cache_sessions
|
||||
WHERE user_id = @userId
|
||||
ORDER BY start_at DESC
|
||||
LIMIT 1`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return row;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* userId: string,
|
||||
* updatedAt: string,
|
||||
* isSelf: boolean,
|
||||
* sourceLastCreatedAt: string,
|
||||
* pendingSessionStartAt: number | null,
|
||||
* sessions: Array<{start: number, end: number}>
|
||||
* }} entry
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async replaceActivityCache(entry) {
|
||||
await sqliteService.executeNonQuery('BEGIN');
|
||||
try {
|
||||
await sqliteService.executeNonQuery(
|
||||
`DELETE FROM ${dbVars.userPrefix}_activity_cache_sessions WHERE user_id = @userId`,
|
||||
{ '@userId': entry.userId }
|
||||
);
|
||||
await upsertSessions(entry.userId, entry.sessions);
|
||||
await upsertMeta(entry);
|
||||
await sqliteService.executeNonQuery('COMMIT');
|
||||
} catch (error) {
|
||||
await sqliteService.executeNonQuery('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* userId: string,
|
||||
* updatedAt: string,
|
||||
* isSelf: boolean,
|
||||
* sourceLastCreatedAt: string,
|
||||
* pendingSessionStartAt: number | null,
|
||||
* sessions: Array<{start: number, end: number}>,
|
||||
* replaceLastSession?: {start: number, end: number} | null
|
||||
* }} entry
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async appendActivityCache(entry) {
|
||||
await sqliteService.executeNonQuery('BEGIN');
|
||||
try {
|
||||
if (entry.replaceLastSession) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`DELETE FROM ${dbVars.userPrefix}_activity_cache_sessions
|
||||
WHERE user_id = @userId AND start_at = @start AND end_at = @end`,
|
||||
{
|
||||
'@userId': entry.userId,
|
||||
'@start': entry.replaceLastSession.start,
|
||||
'@end': entry.replaceLastSession.end
|
||||
}
|
||||
);
|
||||
}
|
||||
await upsertSessions(entry.userId, entry.sessions);
|
||||
await upsertMeta(entry);
|
||||
await sqliteService.executeNonQuery('COMMIT');
|
||||
} catch (error) {
|
||||
await sqliteService.executeNonQuery('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* userId: string,
|
||||
* updatedAt: string,
|
||||
* isSelf: boolean,
|
||||
* sourceLastCreatedAt: string,
|
||||
* pendingSessionStartAt: number | null
|
||||
* }} entry
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async touchActivityCacheMeta(entry) {
|
||||
await upsertMeta(entry);
|
||||
}
|
||||
};
|
||||
|
||||
async function upsertMeta(entry) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${dbVars.userPrefix}_activity_cache_meta
|
||||
(user_id, updated_at, is_self, source_last_created_at, pending_session_start_at)
|
||||
VALUES (@user_id, @updated_at, @is_self, @source_last_created_at, @pending_session_start_at)`,
|
||||
{
|
||||
'@user_id': entry.userId,
|
||||
'@updated_at': entry.updatedAt,
|
||||
'@is_self': entry.isSelf ? 1 : 0,
|
||||
'@source_last_created_at': entry.sourceLastCreatedAt || '',
|
||||
'@pending_session_start_at': entry.pendingSessionStartAt
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async function upsertSessions(userId, sessions = []) {
|
||||
const chunkSize = 250;
|
||||
for (let chunkStart = 0; chunkStart < sessions.length; chunkStart += chunkSize) {
|
||||
const chunk = sessions.slice(chunkStart, chunkStart + chunkSize);
|
||||
const args = {};
|
||||
const values = chunk.map((session, index) => {
|
||||
const suffix = `${chunkStart + index}`;
|
||||
args[`@user_id_${suffix}`] = userId;
|
||||
args[`@start_at_${suffix}`] = session.start;
|
||||
args[`@end_at_${suffix}`] = session.end;
|
||||
return `(@user_id_${suffix}, @start_at_${suffix}, @end_at_${suffix})`;
|
||||
});
|
||||
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${dbVars.userPrefix}_activity_cache_sessions
|
||||
(user_id, start_at, end_at)
|
||||
VALUES ${values.join(', ')}`,
|
||||
args
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export { activityCache };
|
||||
504
src/services/database/activityV2.js
Normal file
504
src/services/database/activityV2.js
Normal file
@@ -0,0 +1,504 @@
|
||||
import { dbVars } from '../database';
|
||||
|
||||
import sqliteService from '../sqlite.js';
|
||||
|
||||
const ACTIVITY_VIEW_KIND = {
|
||||
ACTIVITY: 'activity',
|
||||
OVERLAP: 'overlap'
|
||||
};
|
||||
|
||||
const ACTIVITY_RANGE_CACHE_KIND = {
|
||||
SESSIONS: 0,
|
||||
TOP_WORLDS: 1
|
||||
};
|
||||
|
||||
function syncStateTable() {
|
||||
return `${dbVars.userPrefix}_activity_sync_state_v2`;
|
||||
}
|
||||
|
||||
function sessionsTable() {
|
||||
return `${dbVars.userPrefix}_activity_sessions_v2`;
|
||||
}
|
||||
|
||||
function rangeCacheTable() {
|
||||
return `${dbVars.userPrefix}_activity_range_cache_v2`;
|
||||
}
|
||||
|
||||
function bucketCacheTable() {
|
||||
return `${dbVars.userPrefix}_activity_bucket_cache_v2`;
|
||||
}
|
||||
|
||||
function topWorldsCacheTable() {
|
||||
return `${dbVars.userPrefix}_activity_top_worlds_cache_v2`;
|
||||
}
|
||||
|
||||
function parseJson(value, fallback) {
|
||||
if (!value) {
|
||||
return fallback;
|
||||
}
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Activity V2 is the formal, stable schema for the refactored Activity tab.
|
||||
* Legacy activity_cache_* tables remain only for upgrade compatibility.
|
||||
*/
|
||||
const activityV2 = {
|
||||
ACTIVITY_VIEW_KIND,
|
||||
ACTIVITY_RANGE_CACHE_KIND,
|
||||
|
||||
async getActivitySourceSliceV2({ userId, isSelf, fromDays, toDays = 0 }) {
|
||||
const fromDateIso = new Date(Date.now() - fromDays * 86400000).toISOString();
|
||||
const toDateIso = toDays > 0
|
||||
? new Date(Date.now() - toDays * 86400000).toISOString()
|
||||
: '';
|
||||
return isSelf
|
||||
? this.getCurrentUserLocationSliceV2(fromDateIso, toDateIso)
|
||||
: this.getFriendPresenceSliceV2(userId, fromDateIso, toDateIso);
|
||||
},
|
||||
|
||||
async getActivitySourceAfterV2({ userId, isSelf, afterCreatedAt, inclusive = false }) {
|
||||
return isSelf
|
||||
? this.getCurrentUserLocationAfterV2(afterCreatedAt, inclusive)
|
||||
: this.getFriendPresenceAfterV2(userId, afterCreatedAt);
|
||||
},
|
||||
|
||||
async getFriendPresenceSliceV2(userId, fromDateIso, toDateIso = '') {
|
||||
const rows = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
rows.push({ created_at: dbRow[0], type: dbRow[1] });
|
||||
},
|
||||
`
|
||||
SELECT created_at, type
|
||||
FROM (
|
||||
SELECT created_at, type, 0 AS sort_group
|
||||
FROM (
|
||||
SELECT created_at, type
|
||||
FROM ${dbVars.userPrefix}_feed_online_offline
|
||||
WHERE user_id = @userId
|
||||
AND (type = 'Online' OR type = 'Offline')
|
||||
AND created_at < @fromDateIso
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
)
|
||||
UNION ALL
|
||||
SELECT created_at, type, 1 AS sort_group
|
||||
FROM ${dbVars.userPrefix}_feed_online_offline
|
||||
WHERE user_id = @userId
|
||||
AND (type = 'Online' OR type = 'Offline')
|
||||
AND created_at >= @fromDateIso
|
||||
${toDateIso ? 'AND created_at < @toDateIso' : ''}
|
||||
)
|
||||
ORDER BY created_at ASC, sort_group ASC
|
||||
`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@fromDateIso': fromDateIso,
|
||||
'@toDateIso': toDateIso
|
||||
}
|
||||
);
|
||||
|
||||
if (toDateIso) {
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
rows.push({ created_at: dbRow[0], type: dbRow[1] });
|
||||
},
|
||||
`SELECT created_at, type
|
||||
FROM ${dbVars.userPrefix}_feed_online_offline
|
||||
WHERE user_id = @userId
|
||||
AND (type = 'Online' OR type = 'Offline')
|
||||
AND created_at >= @toDateIso
|
||||
ORDER BY created_at ASC
|
||||
LIMIT 1`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@toDateIso': toDateIso
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return rows.sort((left, right) => left.created_at.localeCompare(right.created_at));
|
||||
},
|
||||
|
||||
async getFriendPresenceAfterV2(userId, afterCreatedAt) {
|
||||
const rows = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
rows.push({ created_at: dbRow[0], type: dbRow[1] });
|
||||
},
|
||||
`SELECT created_at, type
|
||||
FROM ${dbVars.userPrefix}_feed_online_offline
|
||||
WHERE user_id = @userId
|
||||
AND (type = 'Online' OR type = 'Offline')
|
||||
AND created_at > @afterCreatedAt
|
||||
ORDER BY created_at`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@afterCreatedAt': afterCreatedAt
|
||||
}
|
||||
);
|
||||
return rows;
|
||||
},
|
||||
|
||||
async getCurrentUserLocationSliceV2(fromDateIso, toDateIso = '') {
|
||||
const rows = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
rows.push({ created_at: dbRow[0], time: dbRow[1] || 0 });
|
||||
},
|
||||
`
|
||||
SELECT created_at, time
|
||||
FROM (
|
||||
SELECT created_at, time, 0 AS sort_group
|
||||
FROM (
|
||||
SELECT created_at, time
|
||||
FROM gamelog_location
|
||||
WHERE created_at < @fromDateIso
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
)
|
||||
UNION ALL
|
||||
SELECT created_at, time, 1 AS sort_group
|
||||
FROM gamelog_location
|
||||
WHERE created_at >= @fromDateIso
|
||||
${toDateIso ? 'AND created_at < @toDateIso' : ''}
|
||||
${toDateIso
|
||||
? `UNION ALL
|
||||
SELECT created_at, time, 2 AS sort_group
|
||||
FROM (
|
||||
SELECT created_at, time
|
||||
FROM gamelog_location
|
||||
WHERE created_at >= @toDateIso
|
||||
ORDER BY created_at
|
||||
LIMIT 1
|
||||
)`
|
||||
: ''}
|
||||
)
|
||||
ORDER BY created_at ASC, sort_group ASC
|
||||
`,
|
||||
{
|
||||
'@fromDateIso': fromDateIso,
|
||||
'@toDateIso': toDateIso
|
||||
}
|
||||
);
|
||||
return rows;
|
||||
},
|
||||
|
||||
async getCurrentUserLocationAfterV2(afterCreatedAt, inclusive = false) {
|
||||
const rows = [];
|
||||
const operator = inclusive ? '>=' : '>';
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
rows.push({ created_at: dbRow[0], time: dbRow[1] || 0 });
|
||||
},
|
||||
`SELECT created_at, time
|
||||
FROM gamelog_location
|
||||
WHERE created_at ${operator} @afterCreatedAt
|
||||
ORDER BY created_at`,
|
||||
{ '@afterCreatedAt': afterCreatedAt }
|
||||
);
|
||||
return rows;
|
||||
},
|
||||
|
||||
async getActivitySyncStateV2(userId) {
|
||||
let row = null;
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
row = {
|
||||
userId: dbRow[0],
|
||||
updatedAt: dbRow[1] || '',
|
||||
isSelf: Boolean(dbRow[2]),
|
||||
sourceLastCreatedAt: dbRow[3] || '',
|
||||
pendingSessionStartAt: typeof dbRow[4] === 'number' ? dbRow[4] : null,
|
||||
cachedRangeDays: dbRow[5] || 0
|
||||
};
|
||||
},
|
||||
`SELECT user_id, updated_at, is_self, source_last_created_at, pending_session_start_at, cached_range_days
|
||||
FROM ${syncStateTable()}
|
||||
WHERE user_id = @userId`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return row;
|
||||
},
|
||||
|
||||
async upsertActivitySyncStateV2(entry) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${syncStateTable()}
|
||||
(user_id, updated_at, is_self, source_last_created_at, pending_session_start_at, cached_range_days)
|
||||
VALUES (@userId, @updatedAt, @isSelf, @sourceLastCreatedAt, @pendingSessionStartAt, @cachedRangeDays)`,
|
||||
{
|
||||
'@userId': entry.userId,
|
||||
'@updatedAt': entry.updatedAt || '',
|
||||
'@isSelf': entry.isSelf ? 1 : 0,
|
||||
'@sourceLastCreatedAt': entry.sourceLastCreatedAt || '',
|
||||
'@pendingSessionStartAt': entry.pendingSessionStartAt,
|
||||
'@cachedRangeDays': entry.cachedRangeDays || 0
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
async getActivitySessionsV2(userId) {
|
||||
const sessions = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
sessions.push({
|
||||
start: dbRow[0],
|
||||
end: dbRow[1],
|
||||
isOpenTail: Boolean(dbRow[2]),
|
||||
sourceRevision: dbRow[3] || ''
|
||||
});
|
||||
},
|
||||
`SELECT start_at, end_at, is_open_tail, source_revision
|
||||
FROM ${sessionsTable()}
|
||||
WHERE user_id = @userId
|
||||
ORDER BY start_at`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return sessions;
|
||||
},
|
||||
|
||||
async replaceActivitySessionsV2(userId, sessions = []) {
|
||||
await sqliteService.executeNonQuery('BEGIN');
|
||||
try {
|
||||
await sqliteService.executeNonQuery(
|
||||
`DELETE FROM ${sessionsTable()} WHERE user_id = @userId`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
await insertSessions(userId, sessions);
|
||||
await sqliteService.executeNonQuery('COMMIT');
|
||||
} catch (error) {
|
||||
await sqliteService.executeNonQuery('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async appendActivitySessionsV2({ userId, sessions = [], replaceFromStartAt = null }) {
|
||||
await sqliteService.executeNonQuery('BEGIN');
|
||||
try {
|
||||
if (replaceFromStartAt !== null) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`DELETE FROM ${sessionsTable()}
|
||||
WHERE user_id = @userId AND start_at >= @replaceFromStartAt`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@replaceFromStartAt': replaceFromStartAt
|
||||
}
|
||||
);
|
||||
}
|
||||
await insertSessions(userId, sessions);
|
||||
await sqliteService.executeNonQuery('COMMIT');
|
||||
} catch (error) {
|
||||
await sqliteService.executeNonQuery('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
async getActivityRangeCacheV2(userId, rangeDays, cacheKind) {
|
||||
let row = null;
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
row = {
|
||||
userId: dbRow[0],
|
||||
rangeDays: dbRow[1],
|
||||
cacheKind: dbRow[2],
|
||||
isComplete: Boolean(dbRow[3]),
|
||||
builtFromCursor: dbRow[4] || '',
|
||||
builtAt: dbRow[5] || ''
|
||||
};
|
||||
},
|
||||
`SELECT user_id, range_days, cache_kind, is_complete, built_from_cursor, built_at
|
||||
FROM ${rangeCacheTable()}
|
||||
WHERE user_id = @userId AND range_days = @rangeDays AND cache_kind = @cacheKind`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@rangeDays': rangeDays,
|
||||
'@cacheKind': cacheKind
|
||||
}
|
||||
);
|
||||
return row;
|
||||
},
|
||||
|
||||
async upsertActivityRangeCacheV2(entry) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${rangeCacheTable()}
|
||||
(user_id, range_days, cache_kind, is_complete, built_from_cursor, built_at)
|
||||
VALUES (@userId, @rangeDays, @cacheKind, @isComplete, @builtFromCursor, @builtAt)`,
|
||||
{
|
||||
'@userId': entry.userId,
|
||||
'@rangeDays': entry.rangeDays,
|
||||
'@cacheKind': entry.cacheKind,
|
||||
'@isComplete': entry.isComplete ? 1 : 0,
|
||||
'@builtFromCursor': entry.builtFromCursor || '',
|
||||
'@builtAt': entry.builtAt || ''
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
async getActivityBucketCacheV2({
|
||||
ownerUserId,
|
||||
targetUserId = '',
|
||||
rangeDays,
|
||||
viewKind,
|
||||
excludeKey = ''
|
||||
}) {
|
||||
let row = null;
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
row = {
|
||||
ownerUserId: dbRow[0],
|
||||
targetUserId: dbRow[1],
|
||||
rangeDays: dbRow[2],
|
||||
viewKind: dbRow[3],
|
||||
excludeKey: dbRow[4] || '',
|
||||
bucketVersion: dbRow[5] || 1,
|
||||
builtFromCursor: dbRow[6] || '',
|
||||
rawBuckets: parseJson(dbRow[7], []),
|
||||
normalizedBuckets: parseJson(dbRow[8], []),
|
||||
summary: parseJson(dbRow[9], {}),
|
||||
builtAt: dbRow[10] || ''
|
||||
};
|
||||
},
|
||||
`SELECT user_id, target_user_id, range_days, view_kind, exclude_key, bucket_version, built_from_cursor, raw_buckets_json, normalized_buckets_json, summary_json, built_at
|
||||
FROM ${bucketCacheTable()}
|
||||
WHERE user_id = @ownerUserId AND target_user_id = @targetUserId AND range_days = @rangeDays AND view_kind = @viewKind AND exclude_key = @excludeKey`,
|
||||
{
|
||||
'@ownerUserId': ownerUserId,
|
||||
'@targetUserId': targetUserId,
|
||||
'@rangeDays': rangeDays,
|
||||
'@viewKind': viewKind,
|
||||
'@excludeKey': excludeKey
|
||||
}
|
||||
);
|
||||
return row;
|
||||
},
|
||||
|
||||
async upsertActivityBucketCacheV2(entry) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${bucketCacheTable()}
|
||||
(user_id, target_user_id, range_days, view_kind, exclude_key, bucket_version, built_from_cursor, raw_buckets_json, normalized_buckets_json, summary_json, built_at)
|
||||
VALUES (@ownerUserId, @targetUserId, @rangeDays, @viewKind, @excludeKey, @bucketVersion, @builtFromCursor, @rawBucketsJson, @normalizedBucketsJson, @summaryJson, @builtAt)`,
|
||||
{
|
||||
'@ownerUserId': entry.ownerUserId,
|
||||
'@targetUserId': entry.targetUserId || '',
|
||||
'@rangeDays': entry.rangeDays,
|
||||
'@viewKind': entry.viewKind,
|
||||
'@excludeKey': entry.excludeKey || '',
|
||||
'@bucketVersion': entry.bucketVersion || 1,
|
||||
'@builtFromCursor': entry.builtFromCursor || '',
|
||||
'@rawBucketsJson': JSON.stringify(entry.rawBuckets || []),
|
||||
'@normalizedBucketsJson': JSON.stringify(entry.normalizedBuckets || []),
|
||||
'@summaryJson': JSON.stringify(entry.summary || {}),
|
||||
'@builtAt': entry.builtAt || ''
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
async getActivityTopWorldsCacheV2(userId, rangeDays) {
|
||||
const worlds = [];
|
||||
let builtFromCursor = '';
|
||||
let builtAt = '';
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
builtFromCursor = dbRow[0] || builtFromCursor;
|
||||
builtAt = dbRow[1] || builtAt;
|
||||
worlds.push({
|
||||
worldId: dbRow[3],
|
||||
worldName: dbRow[4],
|
||||
visitCount: dbRow[5] || 0,
|
||||
totalTime: dbRow[6] || 0
|
||||
});
|
||||
},
|
||||
`SELECT built_from_cursor, built_at, rank_index, world_id, world_name, visit_count, total_time
|
||||
FROM ${topWorldsCacheTable()}
|
||||
WHERE user_id = @userId AND range_days = @rangeDays
|
||||
ORDER BY rank_index`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@rangeDays': rangeDays
|
||||
}
|
||||
);
|
||||
if (worlds.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
userId,
|
||||
rangeDays,
|
||||
builtFromCursor,
|
||||
builtAt,
|
||||
worlds
|
||||
};
|
||||
},
|
||||
|
||||
async replaceActivityTopWorldsCacheV2(entry) {
|
||||
await sqliteService.executeNonQuery('BEGIN');
|
||||
try {
|
||||
await sqliteService.executeNonQuery(
|
||||
`DELETE FROM ${topWorldsCacheTable()} WHERE user_id = @userId AND range_days = @rangeDays`,
|
||||
{
|
||||
'@userId': entry.userId,
|
||||
'@rangeDays': entry.rangeDays
|
||||
}
|
||||
);
|
||||
|
||||
for (let index = 0; index < entry.worlds.length; index++) {
|
||||
const world = entry.worlds[index];
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${topWorldsCacheTable()}
|
||||
(user_id, range_days, rank_index, world_id, world_name, visit_count, total_time, built_from_cursor, built_at)
|
||||
VALUES (@userId, @rangeDays, @rankIndex, @worldId, @worldName, @visitCount, @totalTime, @builtFromCursor, @builtAt)`,
|
||||
{
|
||||
'@userId': entry.userId,
|
||||
'@rangeDays': entry.rangeDays,
|
||||
'@rankIndex': index,
|
||||
'@worldId': world.worldId,
|
||||
'@worldName': world.worldName || world.worldId,
|
||||
'@visitCount': world.visitCount || 0,
|
||||
'@totalTime': world.totalTime || 0,
|
||||
'@builtFromCursor': entry.builtFromCursor || '',
|
||||
'@builtAt': entry.builtAt || ''
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
await sqliteService.executeNonQuery('COMMIT');
|
||||
} catch (error) {
|
||||
await sqliteService.executeNonQuery('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
async function insertSessions(userId, sessions = []) {
|
||||
if (sessions.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chunkSize = 250;
|
||||
for (let chunkStart = 0; chunkStart < sessions.length; chunkStart += chunkSize) {
|
||||
const chunk = sessions.slice(chunkStart, chunkStart + chunkSize);
|
||||
const args = {};
|
||||
const values = chunk.map((session, index) => {
|
||||
const suffix = `${chunkStart + index}`;
|
||||
args[`@userId_${suffix}`] = userId;
|
||||
args[`@startAt_${suffix}`] = session.start;
|
||||
args[`@endAt_${suffix}`] = session.end;
|
||||
args[`@isOpenTail_${suffix}`] = session.isOpenTail ? 1 : 0;
|
||||
args[`@sourceRevision_${suffix}`] = session.sourceRevision || '';
|
||||
return `(@userId_${suffix}, @startAt_${suffix}, @endAt_${suffix}, @isOpenTail_${suffix}, @sourceRevision_${suffix})`;
|
||||
});
|
||||
|
||||
await sqliteService.executeNonQuery(
|
||||
`INSERT OR REPLACE INTO ${sessionsTable()}
|
||||
(user_id, start_at, end_at, is_open_tail, source_revision)
|
||||
VALUES ${values.join(', ')}`,
|
||||
args
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export { activityV2 };
|
||||
@@ -594,59 +594,6 @@ const feed = {
|
||||
return feedDatabase;
|
||||
},
|
||||
|
||||
async getOnlineFrequencyData(userId) {
|
||||
const data = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
data.push(dbRow[0]);
|
||||
},
|
||||
`SELECT created_at FROM ${dbVars.userPrefix}_feed_online_offline WHERE type = 'Online' AND user_id = @userId ORDER BY created_at`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return data;
|
||||
},
|
||||
|
||||
/**
|
||||
* Get Online and Offline events for a user to build sessions
|
||||
* @param {string} userId
|
||||
* @returns {Promise<Array<{created_at: string, type: string}>>}
|
||||
*/
|
||||
async getOnlineOfflineSessions(userId) {
|
||||
const data = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
data.push({ created_at: dbRow[0], type: dbRow[1] });
|
||||
},
|
||||
`SELECT created_at, type FROM ${dbVars.userPrefix}_feed_online_offline WHERE user_id = @userId AND (type = 'Online' OR type = 'Offline') ORDER BY created_at`,
|
||||
{ '@userId': userId }
|
||||
);
|
||||
return data;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} userId
|
||||
* @param {string} afterCreatedAt
|
||||
* @returns {Promise<Array<{created_at: string, type: string}>>}
|
||||
*/
|
||||
async getOnlineOfflineSessionsAfter(userId, afterCreatedAt) {
|
||||
const data = [];
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
data.push({ created_at: dbRow[0], type: dbRow[1] });
|
||||
},
|
||||
`SELECT created_at, type FROM ${dbVars.userPrefix}_feed_online_offline
|
||||
WHERE user_id = @userId
|
||||
AND (type = 'Online' OR type = 'Offline')
|
||||
AND created_at > @afterCreatedAt
|
||||
ORDER BY created_at`,
|
||||
{
|
||||
'@userId': userId,
|
||||
'@afterCreatedAt': afterCreatedAt
|
||||
}
|
||||
);
|
||||
return data;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {number} days - Number of days to look back
|
||||
* @param {number} limit - Max number of worlds to return
|
||||
|
||||
@@ -348,9 +348,12 @@ const gameLog = {
|
||||
};
|
||||
data.set(row.location, row);
|
||||
},
|
||||
`SELECT created_at, location, time, world_name, group_name FROM gamelog_location WHERE location LIKE '%${groupId}%' ORDER BY id DESC`,
|
||||
`SELECT created_at, location, time, world_name, group_name
|
||||
FROM gamelog_location
|
||||
WHERE location LIKE @groupId
|
||||
ORDER BY id DESC`,
|
||||
{
|
||||
'@groupId': groupId
|
||||
'@groupId': `%${groupId}%`
|
||||
}
|
||||
);
|
||||
return data;
|
||||
@@ -828,7 +831,7 @@ const gameLog = {
|
||||
checkString = `AND resource_type != 'StringLoad'`;
|
||||
}
|
||||
if (!resourceload_image) {
|
||||
checkString = `AND resource_type != 'ImageLoad'`;
|
||||
checkImage = `AND resource_type != 'ImageLoad'`;
|
||||
}
|
||||
selects.push(
|
||||
`SELECT * FROM (SELECT id, created_at, resource_type AS type, NULL AS display_name, location, NULL AS user_id, NULL AS time, NULL AS world_id, NULL AS world_name, NULL AS group_name, NULL AS instance_id, NULL AS video_url, NULL AS video_name, NULL AS video_id, resource_url, resource_type, NULL AS data, NULL AS message FROM gamelog_resource_load WHERE 1=1 ${checkString} ${checkImage} ORDER BY id DESC LIMIT @perTable)`
|
||||
@@ -1048,7 +1051,7 @@ const gameLog = {
|
||||
checkString = `AND resource_type != 'StringLoad'`;
|
||||
}
|
||||
if (!resourceload_image) {
|
||||
checkString = `AND resource_type != 'ImageLoad'`;
|
||||
checkImage = `AND resource_type != 'ImageLoad'`;
|
||||
}
|
||||
selects.push(
|
||||
`SELECT * FROM (SELECT id, created_at, resource_type AS type, NULL AS display_name, location, NULL AS user_id, NULL AS time, NULL AS world_id, NULL AS world_name, NULL AS group_name, NULL AS instance_id, NULL AS video_url, NULL AS video_name, NULL AS video_id, resource_url, resource_type, NULL AS data, NULL AS message FROM gamelog_resource_load WHERE resource_url LIKE @searchLike ${checkString} ${checkImage} ORDER BY id DESC LIMIT @perTable)`
|
||||
@@ -1376,28 +1379,60 @@ const gameLog = {
|
||||
* Get current user's online sessions from gamelog_location
|
||||
* Each row has created_at (leave time) and time (duration in ms)
|
||||
* Session start = created_at - time, Session end = created_at
|
||||
* @param {number} [fromDays=0] - How many days back to start (0 = all time)
|
||||
* @param {number} [toDays=0] - How many days back to stop (0 = now)
|
||||
* @returns {Promise<Array<{created_at: string, time: number}>>}
|
||||
*/
|
||||
async getCurrentUserOnlineSessions() {
|
||||
async getCurrentUserOnlineSessions(fromDays = 0, toDays = 0) {
|
||||
const data = [];
|
||||
await sqliteService.execute((dbRow) => {
|
||||
data.push({ created_at: dbRow[0], time: dbRow[1] || 0 });
|
||||
}, `SELECT created_at, time FROM gamelog_location ORDER BY created_at`);
|
||||
const now = new Date();
|
||||
const params = {};
|
||||
const where = [];
|
||||
|
||||
if (fromDays > 0) {
|
||||
const fromDate = new Date(now.getTime() - fromDays * 86400000).toISOString();
|
||||
params['@fromDate'] = fromDate;
|
||||
where.push('created_at >= @fromDate');
|
||||
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
data.push({ created_at: dbRow[0], time: dbRow[1] || 0 });
|
||||
},
|
||||
'SELECT created_at, time FROM gamelog_location WHERE created_at < @fromDate ORDER BY created_at DESC LIMIT 1',
|
||||
{ '@fromDate': fromDate }
|
||||
);
|
||||
}
|
||||
if (toDays > 0) {
|
||||
const toDate = new Date(now.getTime() - toDays * 86400000).toISOString();
|
||||
params['@toDate'] = toDate;
|
||||
where.push('created_at < @toDate');
|
||||
}
|
||||
|
||||
const dateClause = where.length > 0 ? `WHERE ${where.join(' AND ')}` : '';
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
data.push({ created_at: dbRow[0], time: dbRow[1] || 0 });
|
||||
},
|
||||
`SELECT created_at, time FROM gamelog_location ${dateClause} ORDER BY created_at`,
|
||||
params
|
||||
);
|
||||
return data;
|
||||
},
|
||||
|
||||
/**
|
||||
* Get current user's online sessions after a given timestamp (incremental).
|
||||
* @param {string} afterCreatedAt - Only return rows created after this timestamp
|
||||
* @param {boolean} [inclusive=false] - If true, use >= instead of > to re-read the last record
|
||||
* @returns {Promise<Array<{created_at: string, time: number}>>}
|
||||
*/
|
||||
async getCurrentUserOnlineSessionsAfter(afterCreatedAt) {
|
||||
async getCurrentUserOnlineSessionsAfter(afterCreatedAt, inclusive = false) {
|
||||
const data = [];
|
||||
const op = inclusive ? '>=' : '>';
|
||||
await sqliteService.execute(
|
||||
(dbRow) => {
|
||||
data.push({ created_at: dbRow[0], time: dbRow[1] || 0 });
|
||||
},
|
||||
`SELECT created_at, time FROM gamelog_location WHERE created_at > @after ORDER BY created_at`,
|
||||
`SELECT created_at, time FROM gamelog_location WHERE created_at ${op} @after ORDER BY created_at`,
|
||||
{ '@after': afterCreatedAt }
|
||||
);
|
||||
return data;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { activityCache } from './activityCache.js';
|
||||
import { activityV2 } from './activityV2.js';
|
||||
import { avatarFavorites } from './avatarFavorites.js';
|
||||
import { avatarTags } from './avatarTags.js';
|
||||
import { feed } from './feed.js';
|
||||
@@ -26,7 +26,7 @@ const dbVars = {
|
||||
|
||||
const database = {
|
||||
...feed,
|
||||
...activityCache,
|
||||
...activityV2,
|
||||
...gameLog,
|
||||
...notifications,
|
||||
...moderation,
|
||||
@@ -76,13 +76,71 @@ const database = {
|
||||
`CREATE INDEX IF NOT EXISTS ${dbVars.userPrefix}_feed_online_offline_user_created_idx ON ${dbVars.userPrefix}_feed_online_offline (user_id, created_at)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_cache_meta (user_id TEXT PRIMARY KEY, updated_at TEXT, is_self INTEGER DEFAULT 0, source_last_created_at TEXT, pending_session_start_at INTEGER)`
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_sync_state_v2 (
|
||||
user_id TEXT PRIMARY KEY,
|
||||
updated_at TEXT NOT NULL DEFAULT '',
|
||||
is_self INTEGER NOT NULL DEFAULT 0,
|
||||
source_last_created_at TEXT NOT NULL DEFAULT '',
|
||||
pending_session_start_at INTEGER,
|
||||
cached_range_days INTEGER NOT NULL DEFAULT 0
|
||||
)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_cache_sessions (user_id TEXT NOT NULL, start_at INTEGER NOT NULL, end_at INTEGER NOT NULL, PRIMARY KEY (user_id, start_at, end_at))`
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_sessions_v2 (
|
||||
session_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
start_at INTEGER NOT NULL,
|
||||
end_at INTEGER NOT NULL,
|
||||
is_open_tail INTEGER NOT NULL DEFAULT 0,
|
||||
source_revision TEXT NOT NULL DEFAULT ''
|
||||
)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE INDEX IF NOT EXISTS ${dbVars.userPrefix}_activity_cache_sessions_user_start_idx ON ${dbVars.userPrefix}_activity_cache_sessions (user_id, start_at)`
|
||||
`CREATE INDEX IF NOT EXISTS ${dbVars.userPrefix}_activity_sessions_v2_user_start_idx ON ${dbVars.userPrefix}_activity_sessions_v2 (user_id, start_at)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE INDEX IF NOT EXISTS ${dbVars.userPrefix}_activity_sessions_v2_user_end_idx ON ${dbVars.userPrefix}_activity_sessions_v2 (user_id, end_at)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_range_cache_v2 (
|
||||
user_id TEXT NOT NULL,
|
||||
range_days INTEGER NOT NULL,
|
||||
cache_kind INTEGER NOT NULL,
|
||||
is_complete INTEGER NOT NULL DEFAULT 0,
|
||||
built_from_cursor TEXT NOT NULL DEFAULT '',
|
||||
built_at TEXT NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (user_id, range_days, cache_kind)
|
||||
)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_bucket_cache_v2 (
|
||||
user_id TEXT NOT NULL,
|
||||
target_user_id TEXT NOT NULL DEFAULT '',
|
||||
range_days INTEGER NOT NULL,
|
||||
view_kind TEXT NOT NULL,
|
||||
exclude_key TEXT NOT NULL DEFAULT '',
|
||||
bucket_version INTEGER NOT NULL DEFAULT 1,
|
||||
raw_buckets_json TEXT NOT NULL DEFAULT '[]',
|
||||
normalized_buckets_json TEXT NOT NULL DEFAULT '[]',
|
||||
built_from_cursor TEXT NOT NULL DEFAULT '',
|
||||
summary_json TEXT NOT NULL DEFAULT '{}',
|
||||
built_at TEXT NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (user_id, target_user_id, range_days, view_kind, exclude_key)
|
||||
)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_activity_top_worlds_cache_v2 (
|
||||
user_id TEXT NOT NULL,
|
||||
range_days INTEGER NOT NULL,
|
||||
rank_index INTEGER NOT NULL,
|
||||
world_id TEXT NOT NULL,
|
||||
world_name TEXT NOT NULL,
|
||||
visit_count INTEGER NOT NULL DEFAULT 0,
|
||||
total_time INTEGER NOT NULL DEFAULT 0,
|
||||
built_from_cursor TEXT NOT NULL DEFAULT '',
|
||||
built_at TEXT NOT NULL DEFAULT '',
|
||||
PRIMARY KEY (user_id, range_days, rank_index)
|
||||
)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${dbVars.userPrefix}_friend_log_current (user_id TEXT PRIMARY KEY, display_name TEXT, trust_level TEXT, friend_number INTEGER)`
|
||||
@@ -117,6 +175,9 @@ const database = {
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS gamelog_location (id INTEGER PRIMARY KEY, created_at TEXT, location TEXT, world_id TEXT, world_name TEXT, time INTEGER, group_name TEXT, UNIQUE(created_at, location))`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE INDEX IF NOT EXISTS gamelog_location_created_at_idx ON gamelog_location (created_at)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS gamelog_join_leave (id INTEGER PRIMARY KEY, created_at TEXT, type TEXT, display_name TEXT, location TEXT, user_id TEXT, time INTEGER, UNIQUE(created_at, type, display_name))`
|
||||
);
|
||||
|
||||
@@ -10,16 +10,10 @@ const tableAlter = {
|
||||
await this.updateTableForGroupNames();
|
||||
await this.addFriendLogFriendNumber();
|
||||
await this.updateTableForAvatarHistory();
|
||||
await this.ensureActivityCacheTables();
|
||||
// }
|
||||
// await sqliteService.executeNonQuery('PRAGMA user_version = 1');
|
||||
},
|
||||
|
||||
async updateActivityTabDatabaseVersion() {
|
||||
await this.ensureActivityCacheTables();
|
||||
await this.ensureFeedOnlineOfflineIndexes();
|
||||
},
|
||||
|
||||
async updateTableForGroupNames() {
|
||||
var tables = [];
|
||||
await sqliteService.execute((dbRow) => {
|
||||
@@ -88,36 +82,6 @@ const tableAlter = {
|
||||
}
|
||||
},
|
||||
|
||||
async ensureActivityCacheTables() {
|
||||
const tables = [];
|
||||
await sqliteService.execute((dbRow) => {
|
||||
tables.push(dbRow[0]);
|
||||
}, `SELECT name FROM sqlite_schema WHERE type='table' AND name LIKE '%_feed_online_offline'`);
|
||||
for (const tableName of tables) {
|
||||
const userPrefix = tableName.replace(/_feed_online_offline$/, '');
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${userPrefix}_activity_cache_meta (user_id TEXT PRIMARY KEY, updated_at TEXT, is_self INTEGER DEFAULT 0, source_last_created_at TEXT, pending_session_start_at INTEGER)`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${userPrefix}_activity_cache_sessions (user_id TEXT NOT NULL, start_at INTEGER NOT NULL, end_at INTEGER NOT NULL, PRIMARY KEY (user_id, start_at, end_at))`
|
||||
);
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE INDEX IF NOT EXISTS ${userPrefix}_activity_cache_sessions_user_start_idx ON ${userPrefix}_activity_cache_sessions (user_id, start_at)`
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
async ensureFeedOnlineOfflineIndexes() {
|
||||
const tables = [];
|
||||
await sqliteService.execute((dbRow) => {
|
||||
tables.push(dbRow[0]);
|
||||
}, `SELECT name FROM sqlite_schema WHERE type='table' AND name LIKE '%_feed_online_offline'`);
|
||||
for (const tableName of tables) {
|
||||
await sqliteService.executeNonQuery(
|
||||
`CREATE INDEX IF NOT EXISTS ${tableName}_user_created_idx ON ${tableName} (user_id, created_at)`
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export { tableAlter };
|
||||
|
||||
373
src/shared/utils/activityEngine.js
Normal file
373
src/shared/utils/activityEngine.js
Normal file
@@ -0,0 +1,373 @@
|
||||
export const ONLINE_SESSION_MERGE_GAP_MS = 5 * 60 * 1000;
|
||||
export const DEFAULT_MAX_SESSION_MS = 8 * 60 * 60 * 1000;
|
||||
const ONE_HOUR_MS = 60 * 60 * 1000;
|
||||
|
||||
export function buildSessionsFromEvents(events, initialStart = null) {
|
||||
const sessions = [];
|
||||
let currentStart = initialStart;
|
||||
|
||||
for (const event of events) {
|
||||
const ts = new Date(event.created_at).getTime();
|
||||
if (event.type === 'Online') {
|
||||
if (currentStart !== null) {
|
||||
sessions.push({ start: currentStart, end: ts });
|
||||
}
|
||||
currentStart = ts;
|
||||
} else if (event.type === 'Offline' && currentStart !== null) {
|
||||
sessions.push({ start: currentStart, end: ts });
|
||||
currentStart = null;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
pendingSessionStartAt: currentStart,
|
||||
sessions
|
||||
};
|
||||
}
|
||||
|
||||
export function buildSessionsFromGamelog(rows, mergeGapMs = ONLINE_SESSION_MERGE_GAP_MS, nowMs = Date.now()) {
|
||||
if (rows.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const rawSessions = [];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const start = new Date(rows[i].created_at).getTime();
|
||||
let duration = rows[i].time || 0;
|
||||
if (duration === 0) {
|
||||
if (i < rows.length - 1) {
|
||||
duration = new Date(rows[i + 1].created_at).getTime() - start;
|
||||
} else {
|
||||
duration = nowMs - start;
|
||||
}
|
||||
duration = Math.min(duration, 24 * 60 * 60 * 1000);
|
||||
}
|
||||
if (duration > 0) {
|
||||
rawSessions.push({ start, end: start + duration });
|
||||
}
|
||||
}
|
||||
|
||||
rawSessions.sort((a, b) => a.start - b.start);
|
||||
return mergeSessions([], rawSessions, mergeGapMs);
|
||||
}
|
||||
|
||||
export function mergeSessions(olderSessions, newerSessions, mergeGapMs = ONLINE_SESSION_MERGE_GAP_MS) {
|
||||
if (olderSessions.length === 0 && newerSessions.length === 0) {
|
||||
return [];
|
||||
}
|
||||
const all = [...olderSessions.map(cloneSession), ...newerSessions.map(cloneSession)];
|
||||
if (all.length === 0) {
|
||||
return [];
|
||||
}
|
||||
all.sort((a, b) => a.start - b.start);
|
||||
|
||||
const merged = [all[0]];
|
||||
for (let i = 1; i < all.length; i++) {
|
||||
const last = merged[merged.length - 1];
|
||||
const current = all[i];
|
||||
if (current.start <= last.end + mergeGapMs) {
|
||||
last.end = Math.max(last.end, current.end);
|
||||
last.isOpenTail = last.isOpenTail || current.isOpenTail;
|
||||
last.sourceRevision = current.sourceRevision || last.sourceRevision || '';
|
||||
} else {
|
||||
merged.push(current);
|
||||
}
|
||||
}
|
||||
return merged;
|
||||
}
|
||||
|
||||
export function clipSessionsToRange(sessions, rangeStartMs, rangeEndMs = Date.now()) {
|
||||
return sessions
|
||||
.filter((session) => session.end > rangeStartMs && session.start < rangeEndMs)
|
||||
.map((session) => ({
|
||||
...session,
|
||||
start: Math.max(session.start, rangeStartMs),
|
||||
end: Math.min(session.end, rangeEndMs)
|
||||
}))
|
||||
.filter((session) => session.end > session.start);
|
||||
}
|
||||
|
||||
export function buildHeatmapBuckets(
|
||||
sessions,
|
||||
windowStartMs,
|
||||
nowMs,
|
||||
maxSessionMs = DEFAULT_MAX_SESSION_MS
|
||||
) {
|
||||
const buckets = new Float64Array(168);
|
||||
|
||||
for (const session of sessions) {
|
||||
const effectiveEnd = Math.min(session.end, session.start + maxSessionMs);
|
||||
const start = Math.max(session.start, windowStartMs);
|
||||
const end = Math.min(effectiveEnd, nowMs);
|
||||
if (end <= start) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let cursor = start;
|
||||
while (cursor < end) {
|
||||
const date = new Date(cursor);
|
||||
const slot = date.getDay() * 24 + date.getHours();
|
||||
const nextHour = new Date(cursor);
|
||||
nextHour.setMinutes(0, 0, 0);
|
||||
nextHour.setTime(nextHour.getTime() + ONE_HOUR_MS);
|
||||
const segmentEnd = Math.min(nextHour.getTime(), end);
|
||||
buckets[slot] += (segmentEnd - cursor) / 60000;
|
||||
cursor = segmentEnd;
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(buckets);
|
||||
}
|
||||
|
||||
export function buildOverlapBuckets(
|
||||
selfSessions,
|
||||
targetSessions,
|
||||
windowStartMs,
|
||||
nowMs,
|
||||
maxSessionMs = DEFAULT_MAX_SESSION_MS
|
||||
) {
|
||||
const intersections = [];
|
||||
let leftIndex = 0;
|
||||
let rightIndex = 0;
|
||||
|
||||
while (leftIndex < selfSessions.length && rightIndex < targetSessions.length) {
|
||||
const left = selfSessions[leftIndex];
|
||||
const right = targetSessions[rightIndex];
|
||||
const leftEnd = Math.min(left.end, left.start + maxSessionMs);
|
||||
const rightEnd = Math.min(right.end, right.start + maxSessionMs);
|
||||
const start = Math.max(left.start, right.start);
|
||||
const end = Math.min(leftEnd, rightEnd);
|
||||
|
||||
if (start < end) {
|
||||
intersections.push({ start, end });
|
||||
}
|
||||
|
||||
if (leftEnd < rightEnd) {
|
||||
leftIndex++;
|
||||
} else {
|
||||
rightIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
return buildHeatmapBuckets(intersections, windowStartMs, nowMs, maxSessionMs);
|
||||
}
|
||||
|
||||
export function normalizeBuckets(buckets, thresholdMinutes, capPercentile, mode) {
|
||||
const thresholded = buckets.map((value) => (value >= thresholdMinutes ? value : 0));
|
||||
const positiveValues = thresholded.filter((value) => value > 0).sort((a, b) => a - b);
|
||||
const cap = positiveValues.length > 0 ? percentile(positiveValues, capPercentile) : 1;
|
||||
const normalized = new Float64Array(168);
|
||||
|
||||
for (let index = 0; index < 168; index++) {
|
||||
const value = thresholded[index];
|
||||
if (value <= 0) {
|
||||
normalized[index] = 0;
|
||||
continue;
|
||||
}
|
||||
const scaled = mode === 'log'
|
||||
? Math.log1p(value) / Math.log1p(cap)
|
||||
: Math.sqrt(value / cap);
|
||||
normalized[index] = Math.min(Math.max(scaled, 0), 1);
|
||||
}
|
||||
|
||||
return Array.from(normalized);
|
||||
}
|
||||
|
||||
export function computePeaksFromBuckets(buckets, dayLabels) {
|
||||
const grid = bucketsToGrid(buckets);
|
||||
const daySums = new Array(7).fill(0);
|
||||
const hourSums = new Array(24).fill(0);
|
||||
for (let day = 0; day < 7; day++) {
|
||||
for (let hour = 0; hour < 24; hour++) {
|
||||
daySums[day] += grid[day][hour];
|
||||
hourSums[hour] += grid[day][hour];
|
||||
}
|
||||
}
|
||||
|
||||
const maxDaySum = Math.max(...daySums, 0);
|
||||
const peakDay = maxDaySum > 0 ? dayLabels[daySums.indexOf(maxDaySum)] : '';
|
||||
const maxHourSum = Math.max(...hourSums, 0);
|
||||
let peakTime = '';
|
||||
if (maxHourSum > 0) {
|
||||
const threshold = maxHourSum * 0.7;
|
||||
let startHour = hourSums.indexOf(maxHourSum);
|
||||
let endHour = startHour;
|
||||
while (startHour > 0 && hourSums[startHour - 1] >= threshold) {
|
||||
startHour--;
|
||||
}
|
||||
while (endHour < 23 && hourSums[endHour + 1] >= threshold) {
|
||||
endHour++;
|
||||
}
|
||||
peakTime = startHour === endHour
|
||||
? `${String(startHour).padStart(2, '0')}:00`
|
||||
: `${String(startHour).padStart(2, '0')}:00-${String(endHour + 1).padStart(2, '0')}:00`;
|
||||
}
|
||||
|
||||
return { peakDay, peakTime };
|
||||
}
|
||||
|
||||
export function findBestOverlapTimeFromBuckets(buckets, dayLabels) {
|
||||
const grid = bucketsToGrid(buckets);
|
||||
const hourSums = new Array(24).fill(0);
|
||||
for (let hour = 0; hour < 24; hour++) {
|
||||
for (let day = 0; day < 7; day++) {
|
||||
hourSums[hour] += grid[day][hour];
|
||||
}
|
||||
}
|
||||
|
||||
const maxHourSum = Math.max(...hourSums, 0);
|
||||
if (maxHourSum === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const threshold = maxHourSum * 0.6;
|
||||
let startHour = hourSums.indexOf(maxHourSum);
|
||||
let endHour = startHour;
|
||||
while (startHour > 0 && hourSums[startHour - 1] >= threshold) {
|
||||
startHour--;
|
||||
}
|
||||
while (endHour < 23 && hourSums[endHour + 1] >= threshold) {
|
||||
endHour++;
|
||||
}
|
||||
|
||||
const daySums = new Array(7).fill(0);
|
||||
for (let day = 0; day < 7; day++) {
|
||||
for (let hour = startHour; hour <= endHour; hour++) {
|
||||
daySums[day] += grid[day][hour];
|
||||
}
|
||||
}
|
||||
const maxDaySum = Math.max(...daySums, 0);
|
||||
if (maxDaySum === 0) {
|
||||
return '';
|
||||
}
|
||||
const peakDayLabel = dayLabels[daySums.indexOf(maxDaySum)];
|
||||
return `${peakDayLabel}, ${String(startHour).padStart(2, '0')}:00-${String(endHour + 1).padStart(2, '0')}:00`;
|
||||
}
|
||||
|
||||
export function computeActivityView({
|
||||
sessions,
|
||||
dayLabels,
|
||||
rangeDays,
|
||||
nowMs = Date.now(),
|
||||
normalizeConfig,
|
||||
maxSessionMs = DEFAULT_MAX_SESSION_MS
|
||||
}) {
|
||||
const windowStartMs = nowMs - rangeDays * 86400000;
|
||||
const clippedSessions = clipSessionsToRange(sessions, windowStartMs, nowMs);
|
||||
const rawBuckets = buildHeatmapBuckets(clippedSessions, windowStartMs, nowMs, maxSessionMs);
|
||||
const normalizedBuckets = normalizeBuckets(
|
||||
rawBuckets,
|
||||
normalizeConfig.thresholdMinutes,
|
||||
normalizeConfig.capPercentile,
|
||||
normalizeConfig.mode
|
||||
);
|
||||
const { peakDay, peakTime } = computePeaksFromBuckets(rawBuckets, dayLabels);
|
||||
return {
|
||||
rangeDays,
|
||||
rawBuckets,
|
||||
normalizedBuckets,
|
||||
peakDay,
|
||||
peakTime,
|
||||
filteredEventCount: clippedSessions.length
|
||||
};
|
||||
}
|
||||
|
||||
export function computeOverlapView({
|
||||
selfSessions,
|
||||
targetSessions,
|
||||
dayLabels,
|
||||
rangeDays,
|
||||
excludeHours = null,
|
||||
nowMs = Date.now(),
|
||||
normalizeConfig,
|
||||
maxSessionMs = DEFAULT_MAX_SESSION_MS
|
||||
}) {
|
||||
const windowStartMs = nowMs - rangeDays * 86400000;
|
||||
const clippedSelf = clipSessionsToRange(selfSessions, windowStartMs, nowMs);
|
||||
const clippedTarget = clipSessionsToRange(targetSessions, windowStartMs, nowMs);
|
||||
const selfBuckets = buildHeatmapBuckets(clippedSelf, windowStartMs, nowMs, maxSessionMs);
|
||||
const targetBuckets = buildHeatmapBuckets(clippedTarget, windowStartMs, nowMs, maxSessionMs);
|
||||
const rawBuckets = buildOverlapBuckets(clippedSelf, clippedTarget, windowStartMs, nowMs, maxSessionMs);
|
||||
|
||||
if (excludeHours) {
|
||||
applyExcludeHours(rawBuckets, selfBuckets, targetBuckets, excludeHours);
|
||||
}
|
||||
|
||||
const overlapMinutes = sum(rawBuckets);
|
||||
const selfMinutes = sum(selfBuckets);
|
||||
const targetMinutes = sum(targetBuckets);
|
||||
const denominator = Math.min(selfMinutes, targetMinutes);
|
||||
const overlapPercent = denominator > 0 ? Math.round((overlapMinutes / denominator) * 100) : 0;
|
||||
const normalizedBuckets = normalizeBuckets(
|
||||
rawBuckets,
|
||||
normalizeConfig.thresholdMinutes,
|
||||
normalizeConfig.capPercentile,
|
||||
normalizeConfig.mode
|
||||
);
|
||||
const bestOverlapTime = overlapMinutes > 0
|
||||
? findBestOverlapTimeFromBuckets(rawBuckets, dayLabels)
|
||||
: '';
|
||||
|
||||
return {
|
||||
rangeDays,
|
||||
rawBuckets,
|
||||
normalizedBuckets,
|
||||
overlapPercent,
|
||||
bestOverlapTime
|
||||
};
|
||||
}
|
||||
|
||||
function cloneSession(session) {
|
||||
return {
|
||||
start: session.start,
|
||||
end: session.end,
|
||||
isOpenTail: Boolean(session.isOpenTail),
|
||||
sourceRevision: session.sourceRevision || ''
|
||||
};
|
||||
}
|
||||
|
||||
function percentile(sortedValues, percentileValue) {
|
||||
if (sortedValues.length === 0) {
|
||||
return 1;
|
||||
}
|
||||
const index = (percentileValue / 100) * (sortedValues.length - 1);
|
||||
const lower = Math.floor(index);
|
||||
const upper = Math.ceil(index);
|
||||
if (lower === upper) {
|
||||
return sortedValues[lower];
|
||||
}
|
||||
return sortedValues[lower] + (sortedValues[upper] - sortedValues[lower]) * (index - lower);
|
||||
}
|
||||
|
||||
function bucketsToGrid(buckets) {
|
||||
const grid = Array.from({ length: 7 }, () => new Array(24).fill(0));
|
||||
for (let slot = 0; slot < 168; slot++) {
|
||||
grid[Math.floor(slot / 24)][slot % 24] = buckets[slot];
|
||||
}
|
||||
return grid;
|
||||
}
|
||||
|
||||
function applyExcludeHours(rawBuckets, selfBuckets, targetBuckets, excludeHours) {
|
||||
const { startHour, endHour } = excludeHours;
|
||||
for (let day = 0; day < 7; day++) {
|
||||
if (startHour <= endHour) {
|
||||
zeroHourRange(day, startHour, endHour, rawBuckets, selfBuckets, targetBuckets);
|
||||
} else {
|
||||
zeroHourRange(day, startHour, 24, rawBuckets, selfBuckets, targetBuckets);
|
||||
zeroHourRange(day, 0, endHour, rawBuckets, selfBuckets, targetBuckets);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function zeroHourRange(day, startHour, endHour, rawBuckets, selfBuckets, targetBuckets) {
|
||||
for (let hour = startHour; hour < endHour; hour++) {
|
||||
const slot = day * 24 + hour;
|
||||
rawBuckets[slot] = 0;
|
||||
selfBuckets[slot] = 0;
|
||||
targetBuckets[slot] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
function sum(values) {
|
||||
return values.reduce((total, value) => total + value, 0);
|
||||
}
|
||||
@@ -1,262 +1,75 @@
|
||||
export const ONLINE_SESSION_MERGE_GAP_MS = 5 * 60 * 1000;
|
||||
import {
|
||||
ONLINE_SESSION_MERGE_GAP_MS,
|
||||
buildSessionsFromEvents as engineBuildSessionsFromEvents,
|
||||
buildSessionsFromGamelog as engineBuildSessionsFromGamelog,
|
||||
buildHeatmapBuckets,
|
||||
buildOverlapBuckets,
|
||||
clipSessionsToRange,
|
||||
computeActivityView,
|
||||
computeOverlapView,
|
||||
findBestOverlapTimeFromBuckets
|
||||
} from './activityEngine.js';
|
||||
|
||||
export { ONLINE_SESSION_MERGE_GAP_MS };
|
||||
|
||||
/**
|
||||
* Builds online sessions from Online/Offline events.
|
||||
* @param {Array<{created_at: string, type: string}>} events - Sorted by created_at
|
||||
* @returns {Array<{start: number, end: number}>} Sessions as Unix timestamps (ms)
|
||||
*/
|
||||
export function buildSessionsFromEvents(events) {
|
||||
const sessions = [];
|
||||
let currentStart = null;
|
||||
|
||||
for (const event of events) {
|
||||
const ts = new Date(event.created_at).getTime();
|
||||
if (event.type === 'Online') {
|
||||
if (currentStart !== null) {
|
||||
sessions.push({ start: currentStart, end: ts });
|
||||
}
|
||||
currentStart = ts;
|
||||
} else if (event.type === 'Offline' && currentStart !== null) {
|
||||
sessions.push({ start: currentStart, end: ts });
|
||||
currentStart = null;
|
||||
}
|
||||
}
|
||||
return sessions;
|
||||
return engineBuildSessionsFromEvents(events).sessions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds online sessions from gamelog_location rows.
|
||||
* Each row: created_at = enter time, time = duration in ms (updated on leave).
|
||||
* If time = 0, the user may still be in this instance or it wasn't updated.
|
||||
* For the last row with time=0, we estimate end = next row's start or now.
|
||||
* Merges adjacent sessions within mergeGapMs.
|
||||
* @param {Array<{created_at: string, time: number}>} rows
|
||||
* @param {number} [mergeGapMs] - Merge gap threshold (default 5 min)
|
||||
* @returns {Array<{start: number, end: number}>}
|
||||
*/
|
||||
export function buildSessionsFromGamelog(
|
||||
rows,
|
||||
mergeGapMs = ONLINE_SESSION_MERGE_GAP_MS
|
||||
) {
|
||||
if (rows.length === 0) return [];
|
||||
|
||||
const rawSessions = [];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const start = new Date(rows[i].created_at).getTime();
|
||||
let duration = rows[i].time || 0;
|
||||
|
||||
if (duration === 0) {
|
||||
// time not yet updated: estimate end as next row's start, or now for the last row
|
||||
if (i < rows.length - 1) {
|
||||
duration = new Date(rows[i + 1].created_at).getTime() - start;
|
||||
} else {
|
||||
// Last row, user may still be online - use current time
|
||||
duration = Date.now() - start;
|
||||
}
|
||||
// Cap at 24h to avoid unreasonable durations from stale data
|
||||
duration = Math.min(duration, 24 * 60 * 60 * 1000);
|
||||
}
|
||||
|
||||
if (duration > 0) {
|
||||
rawSessions.push({ start, end: start + duration });
|
||||
}
|
||||
}
|
||||
|
||||
if (rawSessions.length === 0) return [];
|
||||
|
||||
rawSessions.sort((a, b) => a.start - b.start);
|
||||
|
||||
const merged = [{ ...rawSessions[0] }];
|
||||
for (let i = 1; i < rawSessions.length; i++) {
|
||||
const last = merged[merged.length - 1];
|
||||
const curr = rawSessions[i];
|
||||
if (curr.start <= last.end + mergeGapMs) {
|
||||
last.end = Math.max(last.end, curr.end);
|
||||
} else {
|
||||
merged.push({ ...curr });
|
||||
}
|
||||
}
|
||||
return merged;
|
||||
export function buildSessionsFromGamelog(rows, mergeGapMs = ONLINE_SESSION_MERGE_GAP_MS) {
|
||||
return engineBuildSessionsFromGamelog(rows, mergeGapMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes intersection intervals between two sorted, non-overlapping session arrays.
|
||||
* @param {Array<{start: number, end: number}>} sessionsA
|
||||
* @param {Array<{start: number, end: number}>} sessionsB
|
||||
* @returns {Array<{start: number, end: number}>}
|
||||
*/
|
||||
function computeIntersections(sessionsA, sessionsB) {
|
||||
const result = [];
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
while (i < sessionsA.length && j < sessionsB.length) {
|
||||
const a = sessionsA[i];
|
||||
const b = sessionsB[j];
|
||||
const start = Math.max(a.start, b.start);
|
||||
const end = Math.min(a.end, b.end);
|
||||
if (start < end) {
|
||||
result.push({ start, end });
|
||||
}
|
||||
if (a.end < b.end) {
|
||||
i++;
|
||||
} else {
|
||||
j++;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments a 7×24 grid for each hour-slot covered by the given time range.
|
||||
* @param {number[][]} grid - 7×24 array (dayOfWeek × hour)
|
||||
* @param {number} startMs
|
||||
* @param {number} endMs
|
||||
*/
|
||||
function incrementGrid(grid, startMs, endMs) {
|
||||
// Walk hour by hour from start to end
|
||||
const ONE_HOUR = 3600000;
|
||||
let cursor = startMs;
|
||||
|
||||
while (cursor < endMs) {
|
||||
const d = new Date(cursor);
|
||||
const day = d.getDay(); // 0=Sun
|
||||
const hour = d.getHours();
|
||||
grid[day][hour]++;
|
||||
|
||||
// Move to next hour boundary
|
||||
const nextHour = new Date(cursor);
|
||||
nextHour.setMinutes(0, 0, 0);
|
||||
nextHour.setTime(nextHour.getTime() + ONE_HOUR);
|
||||
cursor = nextHour.getTime();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters sessions to only include those overlapping with the given time range.
|
||||
* @param {Array<{start: number, end: number}>} sessions
|
||||
* @param {number} cutoffMs - Only include sessions that end after this timestamp
|
||||
* @returns {Array<{start: number, end: number}>}
|
||||
*/
|
||||
export function filterSessionsByPeriod(sessions, cutoffMs) {
|
||||
return sessions
|
||||
.filter((s) => s.end > cutoffMs)
|
||||
.map((s) => ({
|
||||
start: Math.max(s.start, cutoffMs),
|
||||
end: s.end
|
||||
}));
|
||||
return clipSessionsToRange(sessions, cutoffMs, Date.now());
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates overlap grid and statistics between two users' sessions.
|
||||
* @param {Array<{start: number, end: number}>} sessionsA - Current user
|
||||
* @param {Array<{start: number, end: number}>} sessionsB - Target user
|
||||
* @returns {{
|
||||
* grid: number[][],
|
||||
* maxVal: number,
|
||||
* totalOverlapMs: number,
|
||||
* totalUserAMs: number,
|
||||
* totalUserBMs: number,
|
||||
* overlapPercent: number
|
||||
* }}
|
||||
*/
|
||||
export function calculateOverlapGrid(sessionsA, sessionsB) {
|
||||
const overlapSessions = computeIntersections(sessionsA, sessionsB);
|
||||
|
||||
const nowMs = Date.now();
|
||||
const rawBuckets = buildOverlapBuckets(sessionsA, sessionsB, 0, nowMs);
|
||||
const grid = Array.from({ length: 7 }, () => new Array(24).fill(0));
|
||||
for (const session of overlapSessions) {
|
||||
incrementGrid(grid, session.start, session.end);
|
||||
}
|
||||
|
||||
const totalOverlapMs = overlapSessions.reduce((sum, s) => sum + (s.end - s.start), 0);
|
||||
const totalUserAMs = sessionsA.reduce((sum, s) => sum + (s.end - s.start), 0);
|
||||
const totalUserBMs = sessionsB.reduce((sum, s) => sum + (s.end - s.start), 0);
|
||||
const minOnline = Math.min(totalUserAMs, totalUserBMs);
|
||||
const overlapPercent = minOnline > 0 ? Math.round((totalOverlapMs / minOnline) * 100) : 0;
|
||||
|
||||
let maxVal = 0;
|
||||
for (let d = 0; d < 7; d++) {
|
||||
for (let h = 0; h < 24; h++) {
|
||||
if (grid[d][h] > maxVal) maxVal = grid[d][h];
|
||||
for (let slot = 0; slot < 168; slot++) {
|
||||
const value = rawBuckets[slot];
|
||||
if (value > maxVal) {
|
||||
maxVal = value;
|
||||
}
|
||||
grid[Math.floor(slot / 24)][slot % 24] = value;
|
||||
}
|
||||
|
||||
const totalOverlapMs = rawBuckets.reduce((sum, value) => sum + value, 0) * 60000;
|
||||
const totalUserAMs = sumDurations(sessionsA);
|
||||
const totalUserBMs = sumDurations(sessionsB);
|
||||
const minOnlineMs = Math.min(totalUserAMs, totalUserBMs);
|
||||
const overlapPercent = minOnlineMs > 0 ? Math.round((totalOverlapMs / minOnlineMs) * 100) : 0;
|
||||
return { grid, maxVal, totalOverlapMs, totalUserAMs, totalUserBMs, overlapPercent };
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregates sessions into a 7×24 grid (dayOfWeek × hour).
|
||||
* For each session, increments all hour slots it covers.
|
||||
* @param {Array<{start: number, end: number}>} sessions
|
||||
* @returns {{ grid: number[][], maxVal: number }}
|
||||
*/
|
||||
export function aggregateSessionsToGrid(sessions) {
|
||||
const rawBuckets = buildHeatmapBuckets(sessions, 0, Date.now());
|
||||
const grid = Array.from({ length: 7 }, () => new Array(24).fill(0));
|
||||
for (const session of sessions) {
|
||||
incrementGrid(grid, session.start, session.end);
|
||||
}
|
||||
let maxVal = 0;
|
||||
for (let d = 0; d < 7; d++) {
|
||||
for (let h = 0; h < 24; h++) {
|
||||
if (grid[d][h] > maxVal) maxVal = grid[d][h];
|
||||
for (let slot = 0; slot < 168; slot++) {
|
||||
const value = rawBuckets[slot];
|
||||
if (value > maxVal) {
|
||||
maxVal = value;
|
||||
}
|
||||
grid[Math.floor(slot / 24)][slot % 24] = value;
|
||||
}
|
||||
return { grid, maxVal };
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the best time to meet based on the overlap grid.
|
||||
* @param {number[][]} grid - 7×24 (dayOfWeek × hour)
|
||||
* @param {string[]} dayLabels - ['Sun', 'Mon', ..., 'Sat']
|
||||
* @returns {string} e.g. "Sat, 20:00–23:00" or empty string
|
||||
*/
|
||||
export function findBestOverlapTime(grid, dayLabels) {
|
||||
const hourSums = new Array(24).fill(0);
|
||||
for (let h = 0; h < 24; h++) {
|
||||
for (let d = 0; d < 7; d++) {
|
||||
hourSums[h] += grid[d][h];
|
||||
const buckets = [];
|
||||
for (let day = 0; day < 7; day++) {
|
||||
for (let hour = 0; hour < 24; hour++) {
|
||||
buckets.push(grid[day][hour]);
|
||||
}
|
||||
}
|
||||
|
||||
let maxHourSum = 0;
|
||||
let maxHourIdx = 0;
|
||||
for (let h = 0; h < 24; h++) {
|
||||
if (hourSums[h] > maxHourSum) {
|
||||
maxHourSum = hourSums[h];
|
||||
maxHourIdx = h;
|
||||
}
|
||||
}
|
||||
if (maxHourSum === 0) return '';
|
||||
|
||||
const threshold = maxHourSum * 0.6;
|
||||
let startHour = maxHourIdx;
|
||||
let endHour = maxHourIdx;
|
||||
while (startHour > 0 && hourSums[startHour - 1] >= threshold) {
|
||||
startHour--;
|
||||
}
|
||||
while (endHour < 23 && hourSums[endHour + 1] >= threshold) {
|
||||
endHour++;
|
||||
}
|
||||
|
||||
const daySums = new Array(7).fill(0);
|
||||
for (let d = 0; d < 7; d++) {
|
||||
for (let h = startHour; h <= endHour; h++) {
|
||||
daySums[d] += grid[d][h];
|
||||
}
|
||||
}
|
||||
let maxDaySum = 0;
|
||||
let maxDayIdx = 0;
|
||||
for (let d = 0; d < 7; d++) {
|
||||
if (daySums[d] > maxDaySum) {
|
||||
maxDaySum = daySums[d];
|
||||
maxDayIdx = d;
|
||||
}
|
||||
}
|
||||
|
||||
const dayName = dayLabels[maxDayIdx];
|
||||
const timeRange =
|
||||
startHour === endHour
|
||||
? `${String(startHour).padStart(2, '0')}:00`
|
||||
: `${String(startHour).padStart(2, '0')}:00\u2013${String(endHour + 1).padStart(2, '0')}:00`;
|
||||
|
||||
return `${dayName}, ${timeRange}`;
|
||||
return findBestOverlapTimeFromBuckets(buckets, dayLabels);
|
||||
}
|
||||
|
||||
export { computeActivityView, computeOverlapView };
|
||||
|
||||
function sumDurations(sessions) {
|
||||
return sessions.reduce((sum, session) => sum + (session.end - session.start), 0);
|
||||
}
|
||||
|
||||
@@ -1,119 +1,600 @@
|
||||
import { defineStore } from 'pinia';
|
||||
|
||||
import { database } from '../services/database';
|
||||
import { ONLINE_SESSION_MERGE_GAP_MS } from '../shared/utils/overlapCalculator';
|
||||
const refreshJobs = new Map();
|
||||
import { mergeSessions } from '../shared/utils/activityEngine.js';
|
||||
import { runActivityWorkerTask } from '../workers/activityWorkerRunner.js';
|
||||
|
||||
function buildSessionsAndPendingFromEvents(events, initialStart = null) {
|
||||
const sessions = [];
|
||||
let currentStart = initialStart;
|
||||
const snapshotMap = new Map();
|
||||
const inFlightJobs = new Map();
|
||||
const workerCall = runActivityWorkerTask;
|
||||
const MAX_SNAPSHOT_ENTRIES = 12;
|
||||
let deferredWriteQueue = Promise.resolve();
|
||||
|
||||
for (const event of events) {
|
||||
const ts = new Date(event.created_at).getTime();
|
||||
if (event.type === 'Online') {
|
||||
// Treat consecutive Online events as reconnect boundaries.
|
||||
if (currentStart !== null) {
|
||||
sessions.push({ start: currentStart, end: ts });
|
||||
}
|
||||
currentStart = ts;
|
||||
} else if (event.type === 'Offline' && currentStart !== null) {
|
||||
sessions.push({ start: currentStart, end: ts });
|
||||
currentStart = null;
|
||||
}
|
||||
function deferWrite(task) {
|
||||
const run = () => {
|
||||
deferredWriteQueue = deferredWriteQueue
|
||||
.catch(() => {})
|
||||
.then(task)
|
||||
.catch((error) => {
|
||||
console.error('[Activity] deferred write failed:', error);
|
||||
});
|
||||
return deferredWriteQueue;
|
||||
};
|
||||
if (typeof requestIdleCallback === 'function') {
|
||||
requestIdleCallback(run);
|
||||
return;
|
||||
}
|
||||
setTimeout(run, 0);
|
||||
}
|
||||
|
||||
function createSnapshot(userId, isSelf) {
|
||||
return {
|
||||
pendingSessionStartAt: currentStart,
|
||||
sessions
|
||||
userId,
|
||||
isSelf,
|
||||
sync: {
|
||||
userId,
|
||||
updatedAt: '',
|
||||
isSelf,
|
||||
sourceLastCreatedAt: '',
|
||||
pendingSessionStartAt: null,
|
||||
cachedRangeDays: 0
|
||||
},
|
||||
sessions: [],
|
||||
activityViews: new Map(),
|
||||
overlapViews: new Map(),
|
||||
topWorldsViews: new Map()
|
||||
};
|
||||
}
|
||||
|
||||
function getSnapshot(userId, isSelf) {
|
||||
let snapshot = snapshotMap.get(userId);
|
||||
if (!snapshot) {
|
||||
snapshot = createSnapshot(userId, isSelf);
|
||||
snapshotMap.set(userId, snapshot);
|
||||
} else if (typeof isSelf === 'boolean') {
|
||||
snapshot.isSelf = isSelf;
|
||||
snapshot.sync.isSelf = isSelf;
|
||||
}
|
||||
touchSnapshot(userId, snapshot);
|
||||
pruneSnapshots();
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
function touchSnapshot(userId, snapshot) {
|
||||
snapshotMap.delete(userId);
|
||||
snapshotMap.set(userId, snapshot);
|
||||
}
|
||||
|
||||
function pruneSnapshots() {
|
||||
if (snapshotMap.size <= MAX_SNAPSHOT_ENTRIES) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const [userId] of snapshotMap) {
|
||||
if (isUserInFlight(userId)) {
|
||||
continue;
|
||||
}
|
||||
snapshotMap.delete(userId);
|
||||
if (snapshotMap.size <= MAX_SNAPSHOT_ENTRIES) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isUserInFlight(userId) {
|
||||
for (const key of inFlightJobs.keys()) {
|
||||
if (key.startsWith(`${userId}:`)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function clearDerivedViews(snapshot) {
|
||||
snapshot.activityViews.clear();
|
||||
snapshot.overlapViews.clear();
|
||||
snapshot.topWorldsViews.clear();
|
||||
}
|
||||
|
||||
function overlapExcludeKey(excludeHours) {
|
||||
if (!excludeHours?.enabled) {
|
||||
return '';
|
||||
}
|
||||
return `${excludeHours.startHour}-${excludeHours.endHour}`;
|
||||
}
|
||||
|
||||
function pairCursor(leftCursor, rightCursor) {
|
||||
return `${leftCursor || ''}|${rightCursor || ''}`;
|
||||
}
|
||||
|
||||
export const useActivityStore = defineStore('Activity', () => {
|
||||
function getCache(userId) {
|
||||
return database.getActivityCache(userId);
|
||||
async function getCache(userId, isSelf = false) {
|
||||
const snapshot = await hydrateSnapshot(userId, isSelf);
|
||||
return {
|
||||
userId: snapshot.userId,
|
||||
isSelf: snapshot.isSelf,
|
||||
updatedAt: snapshot.sync.updatedAt,
|
||||
sourceLastCreatedAt: snapshot.sync.sourceLastCreatedAt,
|
||||
pendingSessionStartAt: snapshot.sync.pendingSessionStartAt,
|
||||
cachedRangeDays: snapshot.sync.cachedRangeDays,
|
||||
sessions: snapshot.sessions
|
||||
};
|
||||
}
|
||||
|
||||
function getCachedDays(userId) {
|
||||
return snapshotMap.get(userId)?.sync.cachedRangeDays || 0;
|
||||
}
|
||||
|
||||
function isRefreshing(userId) {
|
||||
return refreshJobs.has(userId);
|
||||
for (const key of inFlightJobs.keys()) {
|
||||
if (key.startsWith(`${userId}:`)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function fullRefresh(userId) {
|
||||
const events = await database.getOnlineOfflineSessions(userId);
|
||||
const { sessions, pendingSessionStartAt } =
|
||||
buildSessionsAndPendingFromEvents(events);
|
||||
const sourceLastCreatedAt =
|
||||
events.length > 0 ? events[events.length - 1].created_at : '';
|
||||
async function loadActivity(userId, { isSelf = false, rangeDays = 30, normalizeConfig, dayLabels, forceRefresh = false }) {
|
||||
const snapshot = await ensureSnapshot(userId, { isSelf, rangeDays, forceRefresh });
|
||||
const cacheKey = String(rangeDays);
|
||||
const currentCursor = snapshot.sync.sourceLastCreatedAt || '';
|
||||
|
||||
let view = snapshot.activityViews.get(cacheKey);
|
||||
if (!forceRefresh && view?.builtFromCursor === currentCursor) {
|
||||
return buildActivityResponse(snapshot, view);
|
||||
}
|
||||
|
||||
if (!forceRefresh) {
|
||||
const persisted = await database.getActivityBucketCacheV2({
|
||||
ownerUserId: userId,
|
||||
rangeDays,
|
||||
viewKind: database.ACTIVITY_VIEW_KIND.ACTIVITY
|
||||
});
|
||||
if (persisted?.builtFromCursor === currentCursor) {
|
||||
view = {
|
||||
...persisted.summary,
|
||||
rawBuckets: persisted.rawBuckets,
|
||||
normalizedBuckets: persisted.normalizedBuckets,
|
||||
builtFromCursor: persisted.builtFromCursor,
|
||||
builtAt: persisted.builtAt
|
||||
};
|
||||
snapshot.activityViews.set(cacheKey, view);
|
||||
return buildActivityResponse(snapshot, view);
|
||||
}
|
||||
}
|
||||
|
||||
const computed = await workerCall('computeActivityView', {
|
||||
sessions: snapshot.sessions,
|
||||
dayLabels,
|
||||
rangeDays,
|
||||
normalizeConfig
|
||||
});
|
||||
view = {
|
||||
...computed,
|
||||
builtFromCursor: currentCursor,
|
||||
builtAt: new Date().toISOString()
|
||||
};
|
||||
snapshot.activityViews.set(cacheKey, view);
|
||||
deferWrite(() => database.upsertActivityBucketCacheV2({
|
||||
ownerUserId: userId,
|
||||
rangeDays,
|
||||
viewKind: database.ACTIVITY_VIEW_KIND.ACTIVITY,
|
||||
builtFromCursor: currentCursor,
|
||||
rawBuckets: view.rawBuckets,
|
||||
normalizedBuckets: view.normalizedBuckets,
|
||||
summary: {
|
||||
peakDay: view.peakDay,
|
||||
peakTime: view.peakTime,
|
||||
filteredEventCount: view.filteredEventCount
|
||||
},
|
||||
builtAt: view.builtAt
|
||||
}));
|
||||
return buildActivityResponse(snapshot, view);
|
||||
}
|
||||
|
||||
async function loadOverlap(currentUserId, targetUserId, {
|
||||
rangeDays = 30,
|
||||
dayLabels,
|
||||
normalizeConfig,
|
||||
excludeHours,
|
||||
forceRefresh = false
|
||||
}) {
|
||||
const [selfSnapshot, targetSnapshot] = await Promise.all([
|
||||
ensureSnapshot(currentUserId, { isSelf: true, rangeDays, forceRefresh }),
|
||||
ensureSnapshot(targetUserId, { isSelf: false, rangeDays, forceRefresh })
|
||||
]);
|
||||
|
||||
const excludeKey = overlapExcludeKey(excludeHours);
|
||||
const cacheKey = `${targetUserId}:${rangeDays}:${excludeKey}`;
|
||||
const cursor = pairCursor(selfSnapshot.sync.sourceLastCreatedAt, targetSnapshot.sync.sourceLastCreatedAt);
|
||||
|
||||
let view = targetSnapshot.overlapViews.get(cacheKey);
|
||||
if (view?.builtFromCursor === cursor) {
|
||||
return view;
|
||||
}
|
||||
|
||||
const persisted = await database.getActivityBucketCacheV2({
|
||||
ownerUserId: currentUserId,
|
||||
targetUserId,
|
||||
rangeDays,
|
||||
viewKind: database.ACTIVITY_VIEW_KIND.OVERLAP,
|
||||
excludeKey
|
||||
});
|
||||
if (persisted?.builtFromCursor === cursor) {
|
||||
view = {
|
||||
...persisted.summary,
|
||||
rawBuckets: persisted.rawBuckets,
|
||||
normalizedBuckets: persisted.normalizedBuckets,
|
||||
builtFromCursor: persisted.builtFromCursor,
|
||||
builtAt: persisted.builtAt
|
||||
};
|
||||
targetSnapshot.overlapViews.set(cacheKey, view);
|
||||
return view;
|
||||
}
|
||||
|
||||
view = await workerCall('computeOverlapView', {
|
||||
selfSessions: selfSnapshot.sessions,
|
||||
targetSessions: targetSnapshot.sessions,
|
||||
dayLabels,
|
||||
rangeDays,
|
||||
excludeHours: excludeHours?.enabled ? excludeHours : null,
|
||||
normalizeConfig
|
||||
});
|
||||
view = {
|
||||
...view,
|
||||
builtFromCursor: cursor,
|
||||
builtAt: new Date().toISOString()
|
||||
};
|
||||
targetSnapshot.overlapViews.set(cacheKey, view);
|
||||
deferWrite(() => database.upsertActivityBucketCacheV2({
|
||||
ownerUserId: currentUserId,
|
||||
targetUserId,
|
||||
rangeDays,
|
||||
viewKind: database.ACTIVITY_VIEW_KIND.OVERLAP,
|
||||
excludeKey,
|
||||
builtFromCursor: cursor,
|
||||
rawBuckets: view.rawBuckets,
|
||||
normalizedBuckets: view.normalizedBuckets,
|
||||
summary: {
|
||||
overlapPercent: view.overlapPercent,
|
||||
bestOverlapTime: view.bestOverlapTime
|
||||
},
|
||||
builtAt: view.builtAt
|
||||
}));
|
||||
return view;
|
||||
}
|
||||
|
||||
async function loadTopWorlds(userId, { rangeDays = 30, limit = 5, isSelf = true, forceRefresh = false }) {
|
||||
const snapshot = await ensureSnapshot(userId, { isSelf, rangeDays, forceRefresh });
|
||||
const cacheKey = `${rangeDays}:${limit}`;
|
||||
const currentCursor = snapshot.sync.sourceLastCreatedAt || '';
|
||||
|
||||
let cached = snapshot.topWorldsViews.get(cacheKey);
|
||||
if (!forceRefresh && cached?.builtFromCursor === currentCursor) {
|
||||
return cached.worlds;
|
||||
}
|
||||
|
||||
if (!forceRefresh) {
|
||||
const persisted = await database.getActivityTopWorldsCacheV2(userId, rangeDays);
|
||||
if (persisted?.builtFromCursor === currentCursor) {
|
||||
snapshot.topWorldsViews.set(cacheKey, persisted);
|
||||
return persisted.worlds;
|
||||
}
|
||||
}
|
||||
|
||||
const worlds = await database.getMyTopWorlds(rangeDays, limit);
|
||||
const entry = {
|
||||
userId,
|
||||
updatedAt: new Date().toISOString(),
|
||||
isSelf: false,
|
||||
sourceLastCreatedAt,
|
||||
pendingSessionStartAt,
|
||||
sessions
|
||||
rangeDays,
|
||||
worlds,
|
||||
builtFromCursor: currentCursor,
|
||||
builtAt: new Date().toISOString()
|
||||
};
|
||||
await database.replaceActivityCache(entry);
|
||||
return database.getActivityCache(userId);
|
||||
snapshot.topWorldsViews.set(cacheKey, entry);
|
||||
deferWrite(() => database.replaceActivityTopWorldsCacheV2(entry));
|
||||
deferWrite(() => database.upsertActivityRangeCacheV2({
|
||||
userId,
|
||||
rangeDays,
|
||||
cacheKind: database.ACTIVITY_RANGE_CACHE_KIND.TOP_WORLDS,
|
||||
isComplete: true,
|
||||
builtFromCursor: currentCursor,
|
||||
builtAt: entry.builtAt
|
||||
}));
|
||||
return worlds;
|
||||
}
|
||||
|
||||
async function incrementalRefresh(meta) {
|
||||
const updatedAt = new Date().toISOString();
|
||||
|
||||
if (!meta.sourceLastCreatedAt) {
|
||||
return fullRefresh(meta.userId);
|
||||
}
|
||||
|
||||
const events = await database.getOnlineOfflineSessionsAfter(
|
||||
meta.userId,
|
||||
meta.sourceLastCreatedAt
|
||||
);
|
||||
if (events.length === 0) {
|
||||
await database.touchActivityCacheMeta({
|
||||
...meta,
|
||||
updatedAt
|
||||
});
|
||||
return database.getActivityCache(meta.userId);
|
||||
}
|
||||
|
||||
const { sessions, pendingSessionStartAt } =
|
||||
buildSessionsAndPendingFromEvents(
|
||||
events,
|
||||
meta.pendingSessionStartAt
|
||||
);
|
||||
const sourceLastCreatedAt = events[events.length - 1].created_at;
|
||||
|
||||
await database.appendActivityCache({
|
||||
...meta,
|
||||
updatedAt,
|
||||
sourceLastCreatedAt,
|
||||
pendingSessionStartAt,
|
||||
sessions
|
||||
});
|
||||
return database.getActivityCache(meta.userId);
|
||||
async function refreshActivity(userId, options) {
|
||||
return loadActivity(userId, { ...options, forceRefresh: true });
|
||||
}
|
||||
|
||||
function refreshActivityCache(userId) {
|
||||
const existing = refreshJobs.get(userId);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
const job = (async () => {
|
||||
const meta = await database.getActivityCacheMeta(userId);
|
||||
if (!meta || meta.isSelf) {
|
||||
return fullRefresh(userId);
|
||||
}
|
||||
return incrementalRefresh(meta);
|
||||
})().finally(() => {
|
||||
refreshJobs.delete(userId);
|
||||
async function loadActivityView({ userId, isSelf = false, rangeDays = 30, dayLabels, forceRefresh = false }) {
|
||||
const response = await loadActivity(userId, {
|
||||
isSelf,
|
||||
rangeDays,
|
||||
dayLabels,
|
||||
forceRefresh,
|
||||
normalizeConfig: pickActivityNormalizeConfig(isSelf, rangeDays)
|
||||
});
|
||||
return {
|
||||
hasAnyData: response.sessions.length > 0,
|
||||
filteredEventCount: response.view.filteredEventCount,
|
||||
peakDay: response.view.peakDay,
|
||||
peakTime: response.view.peakTime,
|
||||
rawBuckets: response.view.rawBuckets,
|
||||
normalizedBuckets: response.view.normalizedBuckets
|
||||
};
|
||||
}
|
||||
|
||||
refreshJobs.set(userId, job);
|
||||
return job;
|
||||
async function loadOverlapView({ currentUserId, targetUserId, rangeDays = 30, dayLabels, excludeHours, forceRefresh = false }) {
|
||||
const response = await loadOverlap(currentUserId, targetUserId, {
|
||||
rangeDays,
|
||||
dayLabels,
|
||||
excludeHours,
|
||||
forceRefresh,
|
||||
normalizeConfig: pickOverlapNormalizeConfig(rangeDays)
|
||||
});
|
||||
return {
|
||||
hasOverlapData: response.rawBuckets.some((value) => value > 0),
|
||||
overlapPercent: response.overlapPercent,
|
||||
bestOverlapTime: response.bestOverlapTime,
|
||||
rawBuckets: response.rawBuckets,
|
||||
normalizedBuckets: response.normalizedBuckets
|
||||
};
|
||||
}
|
||||
|
||||
async function loadTopWorldsView({ userId, rangeDays = 30, limit = 5, forceRefresh = false }) {
|
||||
return loadTopWorlds(userId, {
|
||||
rangeDays,
|
||||
limit,
|
||||
isSelf: true,
|
||||
forceRefresh
|
||||
});
|
||||
}
|
||||
|
||||
function invalidateUser(userId) {
|
||||
if (!userId) {
|
||||
return;
|
||||
}
|
||||
snapshotMap.delete(userId);
|
||||
}
|
||||
|
||||
return {
|
||||
getCache,
|
||||
getCachedDays,
|
||||
isRefreshing,
|
||||
refreshActivityCache
|
||||
loadActivity,
|
||||
loadActivityView,
|
||||
loadOverlap,
|
||||
loadOverlapView,
|
||||
loadTopWorlds,
|
||||
loadTopWorldsView,
|
||||
refreshActivity,
|
||||
invalidateUser,
|
||||
workerCall: runActivityWorkerTask
|
||||
};
|
||||
});
|
||||
|
||||
function buildActivityResponse(snapshot, view) {
|
||||
return {
|
||||
userId: snapshot.userId,
|
||||
isSelf: snapshot.isSelf,
|
||||
cachedRangeDays: snapshot.sync.cachedRangeDays,
|
||||
sessions: snapshot.sessions,
|
||||
view
|
||||
};
|
||||
}
|
||||
|
||||
async function hydrateSnapshot(userId, isSelf) {
|
||||
const snapshot = getSnapshot(userId, isSelf);
|
||||
if (snapshot.sync.updatedAt || snapshot.sessions.length > 0) {
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
const [syncState, sessions] = await Promise.all([
|
||||
database.getActivitySyncStateV2(userId),
|
||||
database.getActivitySessionsV2(userId)
|
||||
]);
|
||||
|
||||
if (syncState) {
|
||||
snapshot.sync = {
|
||||
...snapshot.sync,
|
||||
...syncState,
|
||||
isSelf: typeof syncState.isSelf === 'boolean' ? syncState.isSelf : snapshot.isSelf
|
||||
};
|
||||
}
|
||||
if (sessions.length > 0) {
|
||||
snapshot.sessions = sessions;
|
||||
}
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
async function ensureSnapshot(userId, { isSelf, rangeDays, forceRefresh = false }) {
|
||||
const jobKey = `${userId}:${isSelf}:${rangeDays}:${forceRefresh ? 'force' : 'normal'}`;
|
||||
const existingJob = inFlightJobs.get(jobKey);
|
||||
if (existingJob) {
|
||||
return existingJob;
|
||||
}
|
||||
|
||||
const job = (async () => {
|
||||
const snapshot = await hydrateSnapshot(userId, isSelf);
|
||||
if (forceRefresh || !snapshot.sync.updatedAt) {
|
||||
await fullRefresh(snapshot, rangeDays);
|
||||
} else {
|
||||
await incrementalRefresh(snapshot);
|
||||
if (rangeDays > snapshot.sync.cachedRangeDays) {
|
||||
await expandRange(snapshot, rangeDays);
|
||||
}
|
||||
}
|
||||
return snapshot;
|
||||
})().finally(() => {
|
||||
inFlightJobs.delete(jobKey);
|
||||
});
|
||||
|
||||
inFlightJobs.set(jobKey, job);
|
||||
return job;
|
||||
}
|
||||
|
||||
async function fullRefresh(snapshot, rangeDays) {
|
||||
const sourceItems = await database.getActivitySourceSliceV2({
|
||||
userId: snapshot.userId,
|
||||
isSelf: snapshot.isSelf,
|
||||
fromDays: rangeDays
|
||||
});
|
||||
const sourceLastCreatedAt = sourceItems.length > 0 ? sourceItems[sourceItems.length - 1].created_at : '';
|
||||
const result = await workerCall('computeSessionsSnapshot', {
|
||||
sourceType: snapshot.isSelf ? 'self_gamelog' : 'friend_presence',
|
||||
rows: snapshot.isSelf ? sourceItems : undefined,
|
||||
events: snapshot.isSelf ? undefined : sourceItems,
|
||||
initialStart: null,
|
||||
nowMs: Date.now(),
|
||||
mayHaveOpenTail: snapshot.isSelf,
|
||||
sourceRevision: sourceLastCreatedAt
|
||||
});
|
||||
|
||||
snapshot.sessions = result.sessions;
|
||||
snapshot.sync = {
|
||||
...snapshot.sync,
|
||||
updatedAt: new Date().toISOString(),
|
||||
isSelf: snapshot.isSelf,
|
||||
sourceLastCreatedAt,
|
||||
pendingSessionStartAt: result.pendingSessionStartAt,
|
||||
cachedRangeDays: rangeDays
|
||||
};
|
||||
clearDerivedViews(snapshot);
|
||||
|
||||
deferWrite(() => database.replaceActivitySessionsV2(snapshot.userId, snapshot.sessions));
|
||||
deferWrite(() => database.upsertActivitySyncStateV2(snapshot.sync));
|
||||
deferWrite(() => database.upsertActivityRangeCacheV2({
|
||||
userId: snapshot.userId,
|
||||
rangeDays,
|
||||
cacheKind: database.ACTIVITY_RANGE_CACHE_KIND.SESSIONS,
|
||||
isComplete: true,
|
||||
builtFromCursor: snapshot.sync.sourceLastCreatedAt,
|
||||
builtAt: snapshot.sync.updatedAt
|
||||
}));
|
||||
}
|
||||
|
||||
async function incrementalRefresh(snapshot) {
|
||||
if (!snapshot.sync.sourceLastCreatedAt) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceItems = await database.getActivitySourceAfterV2({
|
||||
userId: snapshot.userId,
|
||||
isSelf: snapshot.isSelf,
|
||||
afterCreatedAt: snapshot.sync.sourceLastCreatedAt,
|
||||
inclusive: snapshot.isSelf
|
||||
});
|
||||
if (sourceItems.length === 0) {
|
||||
snapshot.sync.updatedAt = new Date().toISOString();
|
||||
deferWrite(() => database.upsertActivitySyncStateV2(snapshot.sync));
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceLastCreatedAt = sourceItems[sourceItems.length - 1].created_at;
|
||||
const result = await workerCall('computeSessionsSnapshot', {
|
||||
sourceType: snapshot.isSelf ? 'self_gamelog' : 'friend_presence',
|
||||
rows: snapshot.isSelf ? sourceItems : undefined,
|
||||
events: snapshot.isSelf ? undefined : sourceItems,
|
||||
initialStart: snapshot.isSelf ? null : snapshot.sync.pendingSessionStartAt,
|
||||
nowMs: Date.now(),
|
||||
mayHaveOpenTail: snapshot.isSelf,
|
||||
sourceRevision: sourceLastCreatedAt
|
||||
});
|
||||
|
||||
const replaceFromStartAt = snapshot.sessions.length > 0
|
||||
? snapshot.sessions[Math.max(snapshot.sessions.length - 1, 0)].start
|
||||
: null;
|
||||
const merged = mergeSessions(snapshot.sessions, result.sessions);
|
||||
snapshot.sessions = merged;
|
||||
snapshot.sync = {
|
||||
...snapshot.sync,
|
||||
updatedAt: new Date().toISOString(),
|
||||
sourceLastCreatedAt,
|
||||
pendingSessionStartAt: result.pendingSessionStartAt
|
||||
};
|
||||
clearDerivedViews(snapshot);
|
||||
|
||||
const tailSessions = replaceFromStartAt === null
|
||||
? merged
|
||||
: merged.filter((session) => session.start >= replaceFromStartAt);
|
||||
deferWrite(() => database.appendActivitySessionsV2({
|
||||
userId: snapshot.userId,
|
||||
sessions: tailSessions,
|
||||
replaceFromStartAt
|
||||
}));
|
||||
deferWrite(() => database.upsertActivitySyncStateV2(snapshot.sync));
|
||||
}
|
||||
|
||||
async function expandRange(snapshot, rangeDays) {
|
||||
const currentDays = snapshot.sync.cachedRangeDays || 0;
|
||||
if (rangeDays <= currentDays) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceItems = await database.getActivitySourceSliceV2({
|
||||
userId: snapshot.userId,
|
||||
isSelf: snapshot.isSelf,
|
||||
fromDays: rangeDays,
|
||||
toDays: currentDays
|
||||
});
|
||||
const result = await workerCall('computeSessionsSnapshot', {
|
||||
sourceType: snapshot.isSelf ? 'self_gamelog' : 'friend_presence',
|
||||
rows: snapshot.isSelf ? sourceItems : undefined,
|
||||
events: snapshot.isSelf ? undefined : sourceItems,
|
||||
initialStart: null,
|
||||
nowMs: Date.now(),
|
||||
mayHaveOpenTail: false,
|
||||
sourceRevision: snapshot.sync.sourceLastCreatedAt
|
||||
});
|
||||
|
||||
if (result.sessions.length > 0) {
|
||||
snapshot.sessions = mergeSessions(result.sessions, snapshot.sessions);
|
||||
deferWrite(() => database.replaceActivitySessionsV2(snapshot.userId, snapshot.sessions));
|
||||
}
|
||||
snapshot.sync.cachedRangeDays = rangeDays;
|
||||
snapshot.sync.updatedAt = new Date().toISOString();
|
||||
clearDerivedViews(snapshot);
|
||||
|
||||
deferWrite(() => database.upsertActivitySyncStateV2(snapshot.sync));
|
||||
deferWrite(() => database.upsertActivityRangeCacheV2({
|
||||
userId: snapshot.userId,
|
||||
rangeDays,
|
||||
cacheKind: database.ACTIVITY_RANGE_CACHE_KIND.SESSIONS,
|
||||
isComplete: true,
|
||||
builtFromCursor: snapshot.sync.sourceLastCreatedAt,
|
||||
builtAt: snapshot.sync.updatedAt
|
||||
}));
|
||||
}
|
||||
|
||||
function pickActivityNormalizeConfig(isSelf, rangeDays) {
|
||||
const role = isSelf ? 'self' : 'friend';
|
||||
return {
|
||||
self: {
|
||||
7: { thresholdMinutes: 0, capPercentile: 95, mode: 'sqrt' },
|
||||
30: { thresholdMinutes: 10, capPercentile: 95, mode: 'sqrt' },
|
||||
90: { thresholdMinutes: 20, capPercentile: 90, mode: 'log' }
|
||||
},
|
||||
friend: {
|
||||
7: { thresholdMinutes: 0, capPercentile: 95, mode: 'sqrt' },
|
||||
30: { thresholdMinutes: 10, capPercentile: 95, mode: 'sqrt' },
|
||||
90: { thresholdMinutes: 20, capPercentile: 90, mode: 'log' }
|
||||
}
|
||||
}[role][rangeDays] || {
|
||||
thresholdMinutes: 10,
|
||||
capPercentile: 95,
|
||||
mode: 'sqrt'
|
||||
};
|
||||
}
|
||||
|
||||
function pickOverlapNormalizeConfig(rangeDays) {
|
||||
return {
|
||||
7: { thresholdMinutes: 0, capPercentile: 95, mode: 'sqrt' },
|
||||
30: { thresholdMinutes: 5, capPercentile: 95, mode: 'sqrt' },
|
||||
90: { thresholdMinutes: 10, capPercentile: 90, mode: 'log' }
|
||||
}[rangeDays] || {
|
||||
thresholdMinutes: 5,
|
||||
capPercentile: 95,
|
||||
mode: 'sqrt'
|
||||
};
|
||||
}
|
||||
|
||||
@@ -208,9 +208,6 @@ export const useVrcxStore = defineStore('Vrcx', () => {
|
||||
await database.fixCancelFriendRequestTypo(); // fix CancelFriendRequst typo
|
||||
await database.fixBrokenGameLogDisplayNames(); // fix gameLog display names "DisplayName (userId)"
|
||||
await database.upgradeDatabaseVersion(); // update database version
|
||||
if (state.databaseVersion < 15) {
|
||||
await database.updateActivityTabDatabaseVersion(); // improve activity tab performance, ver 15
|
||||
}
|
||||
await database.vacuum(); // succ
|
||||
await database.optimize();
|
||||
await configRepository.setInt(
|
||||
|
||||
107
src/workers/activityWorker.js
Normal file
107
src/workers/activityWorker.js
Normal file
@@ -0,0 +1,107 @@
|
||||
import {
|
||||
buildSessionsFromEvents,
|
||||
buildSessionsFromGamelog,
|
||||
buildHeatmapBuckets,
|
||||
buildOverlapBuckets,
|
||||
computeActivityView,
|
||||
computeOverlapView,
|
||||
normalizeBuckets
|
||||
} from '../shared/utils/activityEngine.js';
|
||||
|
||||
self.addEventListener('message', (event) => {
|
||||
const { type, seq, payload } = event.data;
|
||||
|
||||
try {
|
||||
let result;
|
||||
switch (type) {
|
||||
case 'computeSessionsSnapshot':
|
||||
result = computeSessionsSnapshot(payload);
|
||||
break;
|
||||
case 'computeActivityView':
|
||||
result = computeActivityView(payload);
|
||||
break;
|
||||
case 'computeOverlapView':
|
||||
result = computeOverlapView(payload);
|
||||
break;
|
||||
case 'buildSessionsFromGamelog':
|
||||
result = {
|
||||
sessions: buildSessionsFromGamelog(
|
||||
payload.rows || [],
|
||||
payload.mergeGapMs,
|
||||
payload.nowMs
|
||||
)
|
||||
};
|
||||
break;
|
||||
case 'buildSessionsFromEvents':
|
||||
result = buildSessionsFromEvents(payload.events || [], payload.initialStart ?? null);
|
||||
break;
|
||||
case 'buildHeatmapBuckets':
|
||||
result = {
|
||||
buckets: buildHeatmapBuckets(
|
||||
payload.sessions || [],
|
||||
payload.windowStartMs,
|
||||
payload.nowMs,
|
||||
payload.maxSessionMs
|
||||
)
|
||||
};
|
||||
break;
|
||||
case 'buildOverlapBuckets':
|
||||
result = {
|
||||
buckets: buildOverlapBuckets(
|
||||
payload.selfSessions || [],
|
||||
payload.friendSessions || [],
|
||||
payload.windowStartMs,
|
||||
payload.nowMs,
|
||||
payload.maxSessionMs
|
||||
)
|
||||
};
|
||||
break;
|
||||
case 'normalizeHeatmapBuckets':
|
||||
result = {
|
||||
normalized: normalizeBuckets(
|
||||
payload.buckets || [],
|
||||
payload.thresholdMinutes,
|
||||
payload.capPercentile,
|
||||
payload.mode
|
||||
)
|
||||
};
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown activity worker task: ${type}`);
|
||||
}
|
||||
|
||||
self.postMessage({ type: 'result', seq, payload: result });
|
||||
} catch (error) {
|
||||
self.postMessage({
|
||||
type: 'error',
|
||||
seq,
|
||||
payload: { message: error instanceof Error ? error.message : String(error) }
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
function computeSessionsSnapshot(payload) {
|
||||
const sourceRevision = payload.sourceRevision || '';
|
||||
if (payload.sourceType === 'self_gamelog') {
|
||||
const sessions = buildSessionsFromGamelog(payload.rows, payload.mergeGapMs, payload.nowMs)
|
||||
.map((session, index, list) => ({
|
||||
...session,
|
||||
isOpenTail: index === list.length - 1 && payload.mayHaveOpenTail === true,
|
||||
sourceRevision
|
||||
}));
|
||||
return {
|
||||
sessions,
|
||||
pendingSessionStartAt: null
|
||||
};
|
||||
}
|
||||
|
||||
const result = buildSessionsFromEvents(payload.events, payload.initialStart);
|
||||
return {
|
||||
pendingSessionStartAt: result.pendingSessionStartAt,
|
||||
sessions: result.sessions.map((session) => ({
|
||||
...session,
|
||||
isOpenTail: false,
|
||||
sourceRevision
|
||||
}))
|
||||
};
|
||||
}
|
||||
34
src/workers/activityWorkerRunner.js
Normal file
34
src/workers/activityWorkerRunner.js
Normal file
@@ -0,0 +1,34 @@
|
||||
import ActivityWorker from './activityWorker.js?worker&inline';
|
||||
|
||||
let worker = null;
|
||||
let workerSeq = 0;
|
||||
const pendingWorkerCallbacks = new Map();
|
||||
|
||||
function getWorker() {
|
||||
if (!worker) {
|
||||
worker = new ActivityWorker();
|
||||
worker.onmessage = (event) => {
|
||||
const { type, seq, payload } = event.data;
|
||||
const callback = pendingWorkerCallbacks.get(seq);
|
||||
if (!callback) {
|
||||
return;
|
||||
}
|
||||
pendingWorkerCallbacks.delete(seq);
|
||||
if (type === 'error') {
|
||||
callback.reject(new Error(payload.message));
|
||||
return;
|
||||
}
|
||||
callback.resolve(payload);
|
||||
};
|
||||
}
|
||||
return worker;
|
||||
}
|
||||
|
||||
export function runActivityWorkerTask(type, payload) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const seq = ++workerSeq;
|
||||
pendingWorkerCallbacks.set(seq, { resolve, reject });
|
||||
getWorker().postMessage({ type, seq, payload });
|
||||
});
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user