From 728e4cff5ba85a7686ea6eb48e51be6477ef2dc3 Mon Sep 17 00:00:00 2001 From: Mike Cao Date: Sat, 1 Apr 2023 17:38:35 -0700 Subject: [PATCH] Updated queries to use cache. --- lib/auth.ts | 7 ++-- lib/query.ts | 34 ++++++++++++++++++- lib/session.ts | 22 +++--------- lib/types.ts | 1 + queries/analytics/event/getEventMetrics.ts | 7 ++-- queries/analytics/eventData/getEventData.ts | 7 ++-- .../analytics/pageview/getPageviewStats.ts | 7 ++-- .../analytics/session/getSessionMetrics.ts | 9 +++-- queries/analytics/stats/getWebsiteStats.ts | 7 ++-- 9 files changed, 59 insertions(+), 42 deletions(-) diff --git a/lib/auth.ts b/lib/auth.ts index 614a47ef..6cc48ff2 100644 --- a/lib/auth.ts +++ b/lib/auth.ts @@ -7,6 +7,7 @@ import { getTeamUser, getTeamUserById } from 'queries'; import { getTeamWebsite, getTeamWebsiteByTeamMemberId } from 'queries/admin/teamWebsite'; import { validate } from 'uuid'; import { Auth } from './types'; +import { loadWebsite } from './query'; const log = debug('umami:auth'); @@ -66,7 +67,7 @@ export async function canViewWebsite({ user, shareToken }: Auth, websiteId: stri return true; } - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); if (website.userId) { return user.id === website.userId; @@ -98,7 +99,7 @@ export async function canUpdateWebsite({ user }: Auth, websiteId: string) { return false; } - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); if (website.userId) { return user.id === website.userId; @@ -112,7 +113,7 @@ export async function canDeleteWebsite({ user }: Auth, websiteId: string) { return true; } - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); if (website.userId) { return user.id === website.userId; diff --git a/lib/query.ts b/lib/query.ts index 9b1104fd..0fa50595 100644 --- a/lib/query.ts +++ b/lib/query.ts @@ -1,5 +1,5 @@ import cache from 'lib/cache'; -import { getWebsite } from 'queries'; +import { getWebsite, getSession, getUser } from 'queries'; import { Website } from './types'; export async function loadWebsite(websiteId: string): Promise { @@ -17,3 +17,35 @@ export async function loadWebsite(websiteId: string): Promise { return website; } + +export async function loadSession(sessionId: string): Promise { + let session; + + if (cache.enabled) { + session = await cache.fetchSession(sessionId); + } else { + session = await getSession({ id: sessionId }); + } + + if (!session) { + return null; + } + + return session; +} + +export async function loadUser(userId: string): Promise { + let user; + + if (cache.enabled) { + user = await cache.fetchUser(userId); + } else { + user = await getUser({ id: userId }); + } + + if (!user || user.deletedAt) { + return null; + } + + return user; +} diff --git a/lib/session.ts b/lib/session.ts index ffc16e23..19c2e405 100644 --- a/lib/session.ts +++ b/lib/session.ts @@ -1,11 +1,11 @@ -import cache from 'lib/cache'; import clickhouse from 'lib/clickhouse'; import { secret, uuid } from 'lib/crypto'; import { getClientInfo, getJsonBody } from 'lib/detect'; import { parseToken } from 'next-basics'; import { CollectRequestBody, NextApiRequestCollect } from 'pages/api/send'; -import { createSession, getSession, getWebsite } from 'queries'; +import { createSession } from 'queries'; import { validate } from 'uuid'; +import { loadSession, loadWebsite } from './query'; export async function findSession(req: NextApiRequestCollect) { const { payload } = getJsonBody(req); @@ -33,15 +33,9 @@ export async function findSession(req: NextApiRequestCollect) { } // Find website - let website; + const website = await loadWebsite(websiteId); - if (cache.enabled) { - website = await cache.fetchWebsite(websiteId); - } else { - website = await getWebsite({ id: websiteId }); - } - - if (!website || website.deletedAt) { + if (!website) { throw new Error(`Website not found: ${websiteId}`); } @@ -68,13 +62,7 @@ export async function findSession(req: NextApiRequestCollect) { } // Find session - let session; - - if (cache.enabled) { - session = await cache.fetchSession(sessionId); - } else { - session = await getSession({ id: sessionId }); - } + let session = await loadSession(websiteId); // Create a session if not found if (!session) { diff --git a/lib/types.ts b/lib/types.ts index 36c9c56a..034326f0 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -14,6 +14,7 @@ export type KafkaTopics = ObjectValues; export interface EventData { [key: string]: number | string | EventData | number[] | string[] | EventData[]; } + export interface Auth { user?: { id: string; diff --git a/queries/analytics/event/getEventMetrics.ts b/queries/analytics/event/getEventMetrics.ts index 388ac1ab..662e072b 100644 --- a/queries/analytics/event/getEventMetrics.ts +++ b/queries/analytics/event/getEventMetrics.ts @@ -1,10 +1,9 @@ import prisma from 'lib/prisma'; import clickhouse from 'lib/clickhouse'; import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db'; -import cache from 'lib/cache'; import { WebsiteEventMetric } from 'lib/types'; import { EVENT_TYPE } from 'lib/constants'; -import { getWebsite } from 'queries'; +import { loadWebsite } from 'lib/query'; export async function getEventMetrics( ...args: [ @@ -49,7 +48,7 @@ async function relationalQuery( }, ) { const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma; - const website = await getWebsite({ id: websiteId }); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params: any = [websiteId, resetDate, startDate, endDate]; const filterQuery = getFilterQuery(filters, params); @@ -91,7 +90,7 @@ async function clickhouseQuery( }, ) { const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse; - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params = { websiteId }; diff --git a/queries/analytics/eventData/getEventData.ts b/queries/analytics/eventData/getEventData.ts index cd44d29a..5b72b111 100644 --- a/queries/analytics/eventData/getEventData.ts +++ b/queries/analytics/eventData/getEventData.ts @@ -1,9 +1,8 @@ -import cache from 'lib/cache'; import clickhouse from 'lib/clickhouse'; import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db'; import prisma from 'lib/prisma'; import { WebsiteEventDataMetric } from 'lib/types'; -import { getWebsite } from 'queries'; +import { loadWebsite } from 'lib/query'; export async function getEventData( ...args: [ @@ -49,7 +48,7 @@ async function relationalQuery( ) { const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data; const { toUuid, rawQuery, getEventDataFilterQuery, getDateQuery } = prisma; - const website = await getWebsite({ id: websiteId }); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params: any = [websiteId, resetDate, startDate, endDate, eventName || '']; @@ -99,7 +98,7 @@ async function clickhouseQuery( const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data; const { rawQuery, getDateFormat, getBetweenDates, getDateQuery, getEventDataFilterQuery } = clickhouse; - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params = { websiteId }; diff --git a/queries/analytics/pageview/getPageviewStats.ts b/queries/analytics/pageview/getPageviewStats.ts index be3e6eff..275af58b 100644 --- a/queries/analytics/pageview/getPageviewStats.ts +++ b/queries/analytics/pageview/getPageviewStats.ts @@ -1,9 +1,8 @@ -import cache from 'lib/cache'; import clickhouse from 'lib/clickhouse'; import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db'; import prisma from 'lib/prisma'; import { EVENT_TYPE } from 'lib/constants'; -import { getWebsite } from 'queries'; +import { loadWebsite } from 'lib/query'; export async function getPageviewStats( ...args: [ @@ -47,7 +46,7 @@ async function relationalQuery( sessionKey = 'session_id', } = criteria; const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma; - const website = await getWebsite({ id: websiteId }); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params: any = [websiteId, resetDate, startDate, endDate]; const { filterQuery, joinSession } = parseFilters(filters, params); @@ -95,7 +94,7 @@ async function clickhouseQuery( getDateQuery, getBetweenDates, } = clickhouse; - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params = { websiteId }; const { filterQuery } = parseFilters(filters, params); diff --git a/queries/analytics/session/getSessionMetrics.ts b/queries/analytics/session/getSessionMetrics.ts index 4a155b61..bfd96e8f 100644 --- a/queries/analytics/session/getSessionMetrics.ts +++ b/queries/analytics/session/getSessionMetrics.ts @@ -1,9 +1,8 @@ import prisma from 'lib/prisma'; import clickhouse from 'lib/clickhouse'; import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db'; -import cache from 'lib/cache'; -import { EVENT_TYPE, FILTER_COLUMNS } from 'lib/constants'; -import { getWebsite } from 'queries'; +import { EVENT_TYPE } from 'lib/constants'; +import { loadWebsite } from 'lib/query'; export async function getSessionMetrics( ...args: [ @@ -21,7 +20,7 @@ async function relationalQuery( websiteId: string, criteria: { startDate: Date; endDate: Date; column: string; filters: object }, ) { - const website = await getWebsite({ id: websiteId }); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const { startDate, endDate, column, filters = {} } = criteria; const { toUuid, parseFilters, rawQuery } = prisma; @@ -55,7 +54,7 @@ async function clickhouseQuery( ) { const { startDate, endDate, column, filters = {} } = data; const { getDateFormat, parseFilters, getBetweenDates, rawQuery } = clickhouse; - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params = { websiteId }; const { filterQuery } = parseFilters(filters, params); diff --git a/queries/analytics/stats/getWebsiteStats.ts b/queries/analytics/stats/getWebsiteStats.ts index 3fddb9eb..73f2bdab 100644 --- a/queries/analytics/stats/getWebsiteStats.ts +++ b/queries/analytics/stats/getWebsiteStats.ts @@ -1,9 +1,8 @@ import prisma from 'lib/prisma'; import clickhouse from 'lib/clickhouse'; import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db'; -import cache from 'lib/cache'; import { EVENT_TYPE } from 'lib/constants'; -import { getWebsite } from 'queries'; +import { loadWebsite } from 'lib/query'; export async function getWebsiteStats( ...args: [ @@ -23,7 +22,7 @@ async function relationalQuery( ) { const { startDate, endDate, filters = {} } = criteria; const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma; - const website = await getWebsite({ id: websiteId }); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params: any = [websiteId, resetDate, startDate, endDate]; const { filterQuery, joinSession } = parseFilters(filters, params); @@ -58,7 +57,7 @@ async function clickhouseQuery( ) { const { startDate, endDate, filters = {} } = criteria; const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse; - const website = await cache.fetchWebsite(websiteId); + const website = await loadWebsite(websiteId); const resetDate = website?.resetAt || website?.createdAt; const params = { websiteId }; const { filterQuery } = parseFilters(filters, params);