pull/2040/merge
Joseph Lee 2023-05-22 01:19:51 +00:00 committed by GitHub
commit d509782940
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 1067 additions and 42 deletions

155
db/mongodb/schema.prisma Normal file
View File

@ -0,0 +1,155 @@
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "mongodb"
url = env("DATABASE_URL")
relationMode = "prisma"
}
model User {
id String @id @map("_id") @db.String
username String @unique @db.String
password String @db.String
role String @map("role") @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
updatedAt DateTime? @updatedAt @map("updated_at") @db.Date
deletedAt DateTime? @map("deleted_at") @db.Date
website Website[]
teamUser TeamUser[]
@@map("user")
}
model Session {
id String @id @map("_id") @db.String
websiteId String @map("website_id") @db.String
hostname String? @db.String
browser String? @db.String
os String? @db.String
device String? @db.String
screen String? @db.String
language String? @db.String
country String? @db.String
subdivision1 String? @db.String
subdivision2 String? @db.String
city String? @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
websiteEvent WebsiteEvent[]
@@index([createdAt])
@@index([websiteId])
@@map("session")
}
model Website {
id String @id @map("_id") @db.String
name String @db.String
domain String? @db.String
shareId String? @unique @map("share_id") @db.String
resetAt DateTime? @map("reset_at") @db.Date
userId String? @map("user_id") @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
updatedAt DateTime? @updatedAt @map("updated_at") @db.Date
deletedAt DateTime? @map("deleted_at") @db.Date
user User? @relation(fields: [userId], references: [id])
teamWebsite TeamWebsite[]
eventData EventData[]
@@index([userId])
@@index([createdAt])
@@map("website")
}
model WebsiteEvent {
id String @id() @map("_id") @db.String
websiteId String @map("website_id") @db.String
sessionId String @map("session_id") @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
urlPath String @map("url_path") @db.String
urlQuery String? @map("url_query") @db.String
referrerPath String? @map("referrer_path") @db.String
referrerQuery String? @map("referrer_query") @db.String
referrerDomain String? @map("referrer_domain") @db.String
pageTitle String? @map("page_title") @db.String
eventType Int @default(1) @map("event_type") @db.Int
eventName String? @map("event_name") @db.String
eventData EventData[]
session Session @relation(fields: [sessionId], references: [id])
@@index([createdAt])
@@index([sessionId])
@@index([websiteId])
@@index([websiteId, createdAt])
@@index([websiteId, sessionId, createdAt])
@@map("website_event")
}
model EventData {
id String @id() @map("_id") @db.String
websiteEventId String @map("website_event_id") @db.String
websiteId String @map("website_id") @db.String
eventKey String @map("event_key") @db.String
eventStringValue String? @map("event_string_value") @db.String
eventNumericValue Float? @map("event_numeric_value") @db.Double // (19, 4)
eventDateValue DateTime? @map("event_date_value") @db.Date
eventDataType Int @map("event_data_type") @db.Int
createdAt DateTime? @default(now()) @map("created_at") @db.Date
website Website @relation(fields: [websiteId], references: [id])
websiteEvent WebsiteEvent @relation(fields: [websiteEventId], references: [id])
@@index([createdAt])
@@index([websiteId])
@@index([websiteEventId])
@@index([websiteId, websiteEventId, createdAt])
@@map("event_data")
}
model Team {
id String @id() @map("_id") @db.String
name String @db.String
accessCode String? @unique @map("access_code") @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
updatedAt DateTime? @updatedAt @map("updated_at") @db.Date
teamUser TeamUser[]
teamWebsite TeamWebsite[]
@@map("team")
}
model TeamUser {
id String @id() @map("_id") @db.String
teamId String @map("team_id") @db.String
userId String @map("user_id") @db.String
role String @map("role") @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
updatedAt DateTime? @updatedAt @map("updated_at") @db.Date
team Team @relation(fields: [teamId], references: [id])
user User @relation(fields: [userId], references: [id])
@@index([teamId])
@@index([userId])
@@map("team_user")
}
model TeamWebsite {
id String @id() @map("_id") @db.String
teamId String @map("team_id") @db.String
websiteId String @map("website_id") @db.String
createdAt DateTime? @default(now()) @map("created_at") @db.Date
team Team @relation(fields: [teamId], references: [id])
website Website @relation(fields: [websiteId], references: [id])
@@index([teamId])
@@index([websiteId])
@@map("team_website")
}

View File

@ -4,6 +4,7 @@ export const MYSQL = 'mysql';
export const CLICKHOUSE = 'clickhouse';
export const KAFKA = 'kafka';
export const KAFKA_PRODUCER = 'kafka-producer';
export const MONGODB = 'mongodb';
// Fixes issue with converting bigint values
BigInt.prototype.toJSON = function () {
@ -15,6 +16,8 @@ export function getDatabaseType(url = process.env.DATABASE_URL) {
if (type === 'postgres') {
return POSTGRESQL;
} else if (type === 'mongodb+srv') {
return MONGODB;
}
return type;
@ -26,6 +29,9 @@ export async function runQuery(queries) {
if (db === POSTGRESQL || db === MYSQL) {
return queries[PRISMA]();
}
if (db === MONGODB) {
return queries[MONGODB]();
}
if (db === CLICKHOUSE) {
if (queries[KAFKA]) {

View File

@ -1,6 +1,6 @@
import prisma from '@umami/prisma-client';
import moment from 'moment-timezone';
import { MYSQL, POSTGRESQL, getDatabaseType } from 'lib/db';
import { MYSQL, POSTGRESQL, getDatabaseType, MONGODB } from 'lib/db';
import { getEventDataType } from './eventData';
import { FILTER_COLUMNS } from './constants';
@ -51,6 +51,10 @@ function getDateQuery(field: string, unit: string, timezone?: string): string {
return `date_format(${field}, '${MYSQL_DATE_FORMATS[unit]}')`;
}
if (db === MONGODB) {
return MYSQL_DATE_FORMATS[unit];
}
}
function getTimestampInterval(field: string): string {
@ -138,6 +142,25 @@ function parseFilters(
};
}
function parseMongoFilter(filters: { [key: string]: any } = {}) {
const query = {};
for (let k in filters) {
const v = filters[k];
if (v !== undefined) {
const tempK = FILTER_COLUMNS[k];
if (tempK !== undefined) {
k = tempK;
}
if (k === 'browser' || k === 'os' || k === 'device' || k === 'language') {
k = 'session.' + k;
}
query[k] = v;
}
}
return { $match: query };
}
async function rawQuery(query: string, params: never[] = []): Promise<any> {
const db = getDatabaseType(process.env.DATABASE_URL);
@ -152,11 +175,13 @@ async function rawQuery(query: string, params: never[] = []): Promise<any> {
export default {
...prisma,
getDatabaseType: () => getDatabaseType(process.env.DATABASE_URL),
getDateQuery,
getTimestampInterval,
getFilterQuery,
getEventDataFilterQuery,
toUuid,
parseFilters,
parseMongoFilter,
rawQuery,
};

View File

@ -75,7 +75,7 @@ export default async (
city,
},
});
const prevPeriod = await getWebsiteStats(websiteId, {
let prevPeriod = await getWebsiteStats(websiteId, {
startDate: prevStartDate,
endDate: prevEndDate,
filters: {
@ -92,14 +92,31 @@ export default async (
city,
},
});
const stats = Object.keys(metrics[0]).reduce((obj, key) => {
obj[key] = {
value: Number(metrics[0][key]) || 0,
change: Number(metrics[0][key]) - Number(prevPeriod[0][key]) || 0,
};
return obj;
}, {});
if (prevPeriod.length === 0) {
prevPeriod = [
{
pageviews: 0,
uniques: 0,
bounces: 0,
totaltime: 0,
},
];
}
let stats: object = {
pageviews: 0,
uniques: 0,
bounces: 0,
totaltime: 0,
};
if (metrics.length != 0) {
stats = Object.keys(metrics[0]).reduce((obj, key) => {
obj[key] = {
value: Number(metrics[0][key]) || 0,
change: Number(metrics[0][key]) - Number(prevPeriod[0][key]) || 0,
};
return obj;
}, {});
}
return ok(res, stats);
}

View File

@ -3,6 +3,16 @@ import cache from 'lib/cache';
import { ROLES } from 'lib/constants';
import prisma from 'lib/prisma';
import { Website, User, Roles } from 'lib/types';
import { getDatabaseType } from '../../lib/db';
function whereDeletedAtNotNull() {
const db = getDatabaseType(process.env.DATABASE_URL);
if (db === 'mongodb') {
return { isSet: false };
} else {
return null;
}
}
export async function getUser(
where: Prisma.UserWhereInput | Prisma.UserWhereUniqueInput,
@ -11,7 +21,14 @@ export async function getUser(
const { includePassword = false, showDeleted = false } = options;
return prisma.client.user.findFirst({
where: { ...where, ...(showDeleted ? {} : { deletedAt: null }) },
where: {
...where,
...(showDeleted
? {}
: {
deletedAt: whereDeletedAtNotNull(),
}),
},
select: {
id: true,
username: true,
@ -26,7 +43,7 @@ export async function getUsers(): Promise<User[]> {
return prisma.client.user.findMany({
take: 100,
where: {
deletedAt: null,
deletedAt: whereDeletedAtNotNull(),
},
orderBy: [
{
@ -76,7 +93,7 @@ export async function getUserWebsites(userId: string): Promise<Website[]> {
return prisma.client.website.findMany({
where: {
userId,
deletedAt: null,
deletedAt: whereDeletedAtNotNull(),
},
orderBy: [
{

View File

@ -1,6 +1,6 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { runQuery, CLICKHOUSE, PRISMA, MONGODB } from 'lib/db';
import { WebsiteEventMetric } from 'lib/types';
import { EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
@ -23,6 +23,7 @@ export async function getEventMetrics(
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
[MONGODB]: () => mongodbQuery(...args),
});
}
@ -50,7 +51,6 @@ async function relationalQuery(
const resetDate = new Date(website?.resetAt || website?.createdAt);
const params: any = [websiteId, resetDate, startDate, endDate];
const filterQuery = getFilterQuery(filters, params);
return rawQuery(
`select
event_name x,
@ -108,3 +108,120 @@ async function clickhouseQuery(
params,
);
}
async function mongodbQuery(
websiteId: string,
{
startDate,
endDate,
timezone = 'utc',
unit = 'day',
filters,
}: {
startDate: Date;
endDate: Date;
timezone: string;
unit: string;
filters: {
url: string;
eventName: string;
};
},
) {
const { getDateQuery, parseMongoFilter, client } = prisma;
const website = await loadWebsite(websiteId);
const resetDate = new Date(website?.resetAt || website?.createdAt);
const mongoFilter = parseMongoFilter(filters);
return await client.websiteEvent.aggregateRaw({
pipeline: [
mongoFilter,
{
$match: {
$expr: {
$and: [
{
$eq: ['$event_type', EVENT_TYPE.customEvent],
},
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: resetDate.toISOString(),
},
},
],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: startDate.toISOString(),
},
},
],
},
{
$lte: [
'$created_at',
{
$dateFromString: {
dateString: endDate.toISOString(),
},
},
],
},
],
},
},
},
{
$project: {
t: {
$dateTrunc: {
date: '$created_at',
unit: unit,
timezone: timezone,
},
},
event_name: 1,
},
},
{
$group: {
_id: {
t: '$t',
name: '$event_name',
},
y: {
$sum: 1,
},
},
},
{
$project: {
x: '$_id.name',
t: {
$dateToString: {
date: '$_id.t',
format: getDateQuery('', unit, timezone),
timezone: timezone,
},
},
y: 1,
_id: 0,
},
},
{
$sort: {
t: 1,
},
},
],
});
}

View File

@ -1,11 +1,11 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { EVENT_TYPE } from 'lib/constants';
import { runQuery, CLICKHOUSE, PRISMA, MONGODB } from 'lib/db';
export function getEvents(...args: [websiteId: string, startAt: Date, eventType: number]) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}

View File

@ -1,5 +1,5 @@
import { EVENT_NAME_LENGTH, URL_LENGTH, EVENT_TYPE } from 'lib/constants';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
import kafka from 'lib/kafka';
import prisma from 'lib/prisma';
import { uuid } from 'lib/crypto';
@ -29,6 +29,7 @@ export async function saveEvent(args: {
}) {
return runQuery({
[PRISMA]: () => relationalQuery(args),
[MONGODB]: () => relationalQuery(args),
[CLICKHOUSE]: () => clickhouseQuery(args),
});
}

View File

@ -1,5 +1,5 @@
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
import { WebsiteEventDataMetric } from 'lib/types';
import { loadWebsite } from 'lib/query';
@ -23,6 +23,7 @@ export async function getEventData(
): Promise<WebsiteEventDataMetric[]> {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => mongoQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
@ -120,3 +121,137 @@ async function clickhouseQuery(
params,
);
}
async function mongoQuery(
websiteId: string,
data: {
startDate: Date;
endDate: Date;
timeSeries?: {
unit: string;
timezone: string;
};
eventName: string;
urlPath?: string;
filters: [
{
eventKey?: string;
eventValue?: string | number | boolean | Date;
},
];
},
) {
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
const { client, parseMongoFilter } = prisma;
const website = await loadWebsite(websiteId);
const resetDate = new Date(website?.resetAt || website?.createdAt);
const mongoFilter = parseMongoFilter(filters);
let joinAggregation: any = { match: {} };
let matchAggregation: any = { match: {} };
let eventTypeProjectProperty = '';
let urlProjectProperty = '';
if (eventName || urlPath) {
joinAggregation = {
$lookup: {
from: 'website_event',
localField: 'website_event_id',
foreignField: '_id',
as: 'result',
},
};
eventTypeProjectProperty = 'event_name: {$arrayElemAt: ["$result.event_name", 0]}';
}
if (eventName) {
matchAggregation = {
$match: {
'result.event_name': eventName,
},
};
}
if (urlPath) {
urlProjectProperty = 'url_path: {$arrayElemAt: ["$result.url_path", 0],}';
}
let timeProjectProperty = '';
if (timeSeries) {
timeProjectProperty = `t: $dateTrunc: {date: "$created_at",unit: ${timeSeries.unit}, timezone : ${timeSeries.timezone}`;
}
return await client.websiteEvent.aggregateRaw({
pipeline: [
mongoFilter,
{
$match: {
$expr: {
$and: [
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: resetDate.toISOString(),
},
},
],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: startDate.toISOString(),
},
},
],
},
{
$lte: [
'$created_at',
{
$dateFromString: {
dateString: endDate.toISOString(),
},
},
],
},
],
},
},
},
joinAggregation,
matchAggregation,
{
$project: {
eventTypeProjectProperty,
timeProjectProperty,
urlProjectProperty,
},
},
{
$group: {
_id: {
url_path: '$url_path',
event_name: '$event_name',
t: '$t',
},
x: {
$sum: 1,
},
},
},
{
$project: {
url_path: '$_id.url_path',
urlPath: '$_id.url_path',
event_name: '$_id.event_name',
eventName: '$_id.event_name',
x: 1,
t: '$_id.t',
_id: 0,
},
},
],
});
}

View File

@ -1,7 +1,7 @@
import { Prisma } from '@prisma/client';
import { EVENT_DATA_TYPE } from 'lib/constants';
import { uuid } from 'lib/crypto';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
import { flattenJSON } from 'lib/eventData';
import kafka from 'lib/kafka';
import prisma from 'lib/prisma';
@ -18,6 +18,7 @@ export async function saveEventData(args: {
}) {
return runQuery({
[PRISMA]: () => relationalQuery(args),
[MONGODB]: () => relationalQuery(args),
[CLICKHOUSE]: () => clickhouseQuery(args),
});
}
@ -30,7 +31,9 @@ async function relationalQuery(data: {
const { websiteId, eventId, eventData } = data;
const jsonKeys = flattenJSON(eventData);
if (jsonKeys.length == 0) {
return Promise.resolve({ count: 0 });
}
//id, websiteEventId, eventStringValue
const flattendData = jsonKeys.map(a => ({
id: uuid(),

View File

@ -1,6 +1,6 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { runQuery, CLICKHOUSE, PRISMA, MONGODB } from 'lib/db';
import { EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
@ -17,6 +17,7 @@ export async function getPageviewMetrics(
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => mongodbQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
@ -50,7 +51,6 @@ async function relationalQuery(
}
const { filterQuery, joinSession } = parseFilters(filters, params);
return rawQuery(
`select ${column} x, count(*) y
from website_event
@ -111,3 +111,113 @@ async function clickhouseQuery(
params,
);
}
async function mongodbQuery(
websiteId: string,
criteria: {
startDate: Date;
endDate: Date;
column: string;
filters: object;
},
) {
const { startDate, endDate, filters = {}, column } = criteria;
const { parseMongoFilter, client } = prisma;
const website = await loadWebsite(websiteId);
const resetDate = new Date(website?.resetAt || website?.createdAt);
const params: any = [
websiteId,
resetDate,
startDate,
endDate,
column === 'event_name' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
];
let excludeDomainMongo: any = '';
if (column === 'referrer_domain') {
excludeDomainMongo = {
$ne: ['$referrer_domain', website.domain],
};
params.push(website.domain);
}
const mongoFilter = parseMongoFilter(filters);
return await client.websiteEvent.aggregateRaw({
pipeline: [
mongoFilter,
{
$match: {
$expr: {
$and: [
{
$eq: ['$event_type', params[4]],
},
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: resetDate.toISOString(),
},
},
],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: startDate.toISOString(),
},
},
],
},
{
$lte: [
'$created_at',
{
$dateFromString: {
dateString: endDate.toISOString(),
},
},
],
},
excludeDomainMongo,
],
},
},
},
{
$group: {
_id: '$' + column,
y: {
$sum: 1,
},
},
},
{
$project: {
x: '$_id',
y: 1,
_id: 0,
},
},
{
$sort: {
x: 1,
},
},
{
$sort: {
y: -1,
},
},
{
$limit: 100,
},
],
});
}

View File

@ -1,5 +1,5 @@
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
import { EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
@ -20,6 +20,7 @@ export async function getPageviewStats(
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => mongodbQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
@ -118,3 +119,149 @@ async function clickhouseQuery(
params,
);
}
async function mongodbQuery(
websiteId: string,
criteria: {
startDate: Date;
endDate: Date;
timezone?: string;
unit?: string;
count?: string;
filters: object;
sessionKey?: string;
},
) {
const {
startDate,
endDate,
timezone = 'utc',
unit = 'day',
count = '*',
filters = {},
} = criteria;
const { getDateQuery, client, parseMongoFilter } = prisma;
const website = await loadWebsite(websiteId);
const resetDate = new Date(website?.resetAt || website?.createdAt);
const mongoFilter = parseMongoFilter(filters);
let sessionInclude = '';
let sessionGroupAggregation: any = { $match: {} };
const sessionLookUpAggregation: any = {
$lookup: {
from: 'session',
foreignField: '_id',
localField: 'session_id',
as: 'session',
},
};
let sessionProjectAggregation: any = { $match: {} };
if (count !== '*') {
sessionInclude = 'session_id : 1';
sessionGroupAggregation = {
$group: {
_id: {
t: '$t',
session_id: '$session_id',
},
},
};
sessionProjectAggregation = {
$project: {
t: '$_id.t',
},
};
}
return await client.websiteEvent.aggregateRaw({
pipeline: [
sessionLookUpAggregation,
mongoFilter,
{
$match: {
$expr: {
$and: [
{
$eq: ['$event_type', EVENT_TYPE.pageView],
},
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: resetDate.toISOString(),
},
},
],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: startDate.toISOString(),
},
},
],
},
{
$lte: [
'$created_at',
{
$dateFromString: {
dateString: endDate.toISOString(),
},
},
],
},
],
},
},
},
{
$project: {
t: {
$dateTrunc: {
date: '$created_at',
unit: unit,
},
},
sessionInclude,
},
},
sessionGroupAggregation,
sessionProjectAggregation,
{
$group: {
_id: {
t: '$t',
session_id: '$session_id',
},
y: {
$sum: 1,
},
},
},
{
$project: {
x: {
$dateToString: {
date: '$_id.t',
format: getDateQuery('', unit, timezone),
timezone: timezone,
},
},
y: 1,
_id: 0,
},
},
{
$sort: {
x: 1,
},
},
],
});
}

View File

@ -1,4 +1,4 @@
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
import kafka from 'lib/kafka';
import prisma from 'lib/prisma';
import cache from 'lib/cache';
@ -7,6 +7,7 @@ import { Prisma } from '@prisma/client';
export async function createSession(args: Prisma.SessionCreateInput) {
return runQuery({
[PRISMA]: () => relationalQuery(args),
[MONGODB]: () => relationalQuery(args),
[CLICKHOUSE]: () => clickhouseQuery(args),
}).then(async data => {
if (cache.enabled) {

View File

@ -1,11 +1,12 @@
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
import { Prisma } from '@prisma/client';
export async function getSession(args: { id: string }) {
return runQuery({
[PRISMA]: () => relationalQuery(args),
[MONGODB]: () => relationalQuery(args),
[CLICKHOUSE]: () => clickhouseQuery(args),
});
}

View File

@ -1,6 +1,6 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { runQuery, CLICKHOUSE, PRISMA, MONGODB } from 'lib/db';
import { EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
@ -12,6 +12,7 @@ export async function getSessionMetrics(
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => mongodbQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
@ -47,7 +48,6 @@ async function relationalQuery(
params,
);
}
async function clickhouseQuery(
websiteId: string,
data: { startDate: Date; endDate: Date; column: string; filters: object },
@ -73,3 +73,103 @@ async function clickhouseQuery(
params,
);
}
async function mongodbQuery(
websiteId: string,
criteria: { startDate: Date; endDate: Date; column: string; filters: object },
) {
const website = await loadWebsite(websiteId);
const resetDate = new Date(website?.resetAt || website?.createdAt);
const { startDate, endDate, column, filters = {} } = criteria;
const { parseMongoFilter, client } = prisma;
const mongoFilter = parseMongoFilter(filters);
return await client.websiteEvent.aggregateRaw({
pipeline: [
mongoFilter,
{
$match: {
$expr: {
$and: [
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: resetDate.toISOString(),
},
},
],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: startDate.toISOString(),
},
},
],
},
{
$lte: [
'$created_at',
{
$dateFromString: {
dateString: endDate.toISOString(),
},
},
],
},
],
},
},
},
{
$group: {
_id: '$session_id',
},
},
{
$lookup: {
from: 'session',
localField: '_id',
foreignField: '_id',
as: 'session',
},
},
{
$project: {
session: {
$arrayElemAt: ['$session', 0],
},
},
},
{
$group: {
_id: '$session.' + column,
sum: {
$sum: 1,
},
},
},
{
$project: {
x: '$_id',
y: '$sum',
_id: 0,
},
},
{
$sort: {
sum: -1,
},
},
{
$limit: 100,
},
],
});
}

View File

@ -1,10 +1,11 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, PRISMA, CLICKHOUSE } from 'lib/db';
import { runQuery, PRISMA, CLICKHOUSE, MONGODB } from 'lib/db';
export async function getSessions(...args: [websiteId: string, startAt: Date]) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}

View File

@ -1,11 +1,12 @@
import { subMinutes } from 'date-fns';
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { runQuery, CLICKHOUSE, PRISMA, MONGODB } from 'lib/db';
export async function getActiveVisitors(...args: [websiteId: string]) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => mongodbQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
@ -18,11 +19,11 @@ async function relationalQuery(websiteId: string) {
return rawQuery(
`select count(distinct session_id) x
from website_event
join website
on website_event.website_id = website.website_id
where website.website_id = $1${toUuid()}
and website_event.created_at >= $2`,
from website_event
join website
on website_event.website_id = website.website_id
where website.website_id = $1${toUuid()}
and website_event.created_at >= $2`,
params,
);
}
@ -39,3 +40,48 @@ async function clickhouseQuery(websiteId: string) {
params,
);
}
async function mongodbQuery(websiteId: string) {
const { client } = prisma;
const date = subMinutes(new Date(), 5);
const result: any = await client.websiteEvent.aggregateRaw({
pipeline: [
{
$match: {
$expr: {
$and: [
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: date.toISOString(),
},
},
],
},
],
},
},
},
{
$group: {
_id: '$session_id',
},
},
{
$count: 'x',
},
],
});
if (result.length > 0) {
return { x: result[0].x };
} else {
return { x: 0 };
}
}

View File

@ -1,6 +1,6 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { runQuery, CLICKHOUSE, PRISMA, MONGODB } from 'lib/db';
import { EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
@ -12,6 +12,7 @@ export async function getWebsiteStats(
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[MONGODB]: () => mongodbQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
@ -26,7 +27,6 @@ async function relationalQuery(
const resetDate = new Date(website?.resetAt || website?.createdAt);
const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params);
return rawQuery(
`select sum(t.c) as "pageviews",
count(distinct t.session_id) as "uniques",
@ -86,3 +86,124 @@ async function clickhouseQuery(
params,
);
}
async function mongodbQuery(
websiteId: string,
criteria: { startDate: Date; endDate: Date; filters: object },
) {
const { startDate, endDate, filters = {} } = criteria;
const { parseMongoFilter, client } = prisma;
const website = await loadWebsite(websiteId);
const resetDate = new Date(website?.resetAt || website?.createdAt);
const mongoFilter = parseMongoFilter(filters);
return await client.websiteEvent.aggregateRaw({
pipeline: [
mongoFilter,
{
$match: {
$expr: {
$and: [
{
$eq: ['$event_type', EVENT_TYPE.pageView],
},
{
$eq: ['$website_id', websiteId],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: resetDate.toISOString(),
},
},
],
},
{
$gte: [
'$created_at',
{
$dateFromString: {
dateString: startDate.toISOString(),
},
},
],
},
{
$lte: [
'$created_at',
{
$dateFromString: {
dateString: endDate.toISOString(),
},
},
],
},
],
},
},
},
{
$project: {
session_id: '$session_id',
hour: {
$toString: { $hour: '$created_at' },
},
created_at: '$created_at',
},
},
{
$group: {
_id: {
$concat: ['$session_id', ':', '$hour'],
},
session_id: { $first: '$session_id' },
hour: { $first: '$hour' },
count: { $sum: 1 },
timeMax: { $max: '$created_at' },
timeMin: { $min: '$created_at' },
},
},
{
$project: {
_id: '$_id',
session_id: '$session_id',
hour: '$hour',
count: '$count',
time: {
$dateDiff: {
endDate: '$timeMax',
startDate: '$timeMin',
unit: 'second',
},
},
bounce: {
$cond: {
if: { $eq: ['$count', 1] },
then: 1,
else: 0,
},
},
},
},
{
$group: {
_id: '$session_id',
pageviews: { $sum: '$count' },
bounces: { $sum: '$bounce' },
totaltime: { $sum: '$time' },
},
},
{
$group: {
_id: '',
pageviews: { $sum: '$pageviews' },
uniques: { $sum: 1 },
bounces: { $sum: '$bounces' },
totaltime: { $sum: '$totaltime' },
},
},
],
});
}

View File

@ -15,6 +15,8 @@ function getDatabaseType(url = process.env.DATABASE_URL) {
if (type === 'postgres') {
return 'postgresql';
} else if (type === 'mongodb+srv') {
return 'mongodb';
}
return type;
@ -50,10 +52,19 @@ async function checkConnection() {
}
async function checkDatabaseVersion(databaseType) {
const query = await prisma.$queryRaw`select version() as version`;
const version = semver.valid(semver.coerce(query[0].version));
let version;
if (databaseType === 'mongodb') {
const query = await prisma.$runCommandRaw({
buildInfo: 1,
});
version = semver.valid(query.version);
} else {
const query = await prisma.$queryRaw`select version() as version`;
version = semver.valid(semver.coerce(query[0].version));
}
const minVersion = databaseType === 'postgresql' ? '9.4.0' : '5.7.0';
const minVersion =
databaseType === 'postgresql' ? '9.4.0' : databaseType === 'mongodb' ? '3.0.0' : '5.7.0';
if (semver.lt(version, minVersion)) {
throw new Error(
@ -65,6 +76,11 @@ async function checkDatabaseVersion(databaseType) {
}
async function checkV1Tables() {
if (databaseType === 'mongodb') {
// Ignore
return;
}
try {
await prisma.$queryRaw`select * from account limit 1`;
@ -78,7 +94,11 @@ async function checkV1Tables() {
}
async function applyMigration() {
console.log(execSync('prisma migrate deploy').toString());
if (databaseType === 'mongodb') {
console.log(execSync('prisma db push').toString());
} else {
console.log(execSync('prisma migrate deploy').toString());
}
success('Database is up to date.');
}

View File

@ -9,6 +9,8 @@ function getDatabaseType(url = process.env.DATABASE_URL) {
if (type === 'postgres') {
return 'postgresql';
} else if (type === 'mongodb+srv') {
return 'mongodb';
}
return type;
@ -16,7 +18,7 @@ function getDatabaseType(url = process.env.DATABASE_URL) {
const databaseType = getDatabaseType();
if (!databaseType || !['mysql', 'postgresql'].includes(databaseType)) {
if (!databaseType || !['mysql', 'postgresql', 'mongodb'].includes(databaseType)) {
throw new Error('Missing or invalid database');
}