2022-12-13 04:45:38 +01:00
|
|
|
import prisma from 'lib/prisma';
|
|
|
|
import clickhouse from 'lib/clickhouse';
|
|
|
|
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
|
|
|
import { WebsiteEventMetric } from 'lib/types';
|
|
|
|
import { EVENT_TYPE } from 'lib/constants';
|
2023-04-02 02:38:35 +02:00
|
|
|
import { loadWebsite } from 'lib/query';
|
2022-12-13 04:45:38 +01:00
|
|
|
|
|
|
|
export async function getEventMetrics(
|
|
|
|
...args: [
|
|
|
|
websiteId: string,
|
|
|
|
data: {
|
|
|
|
startDate: Date;
|
|
|
|
endDate: Date;
|
|
|
|
timezone: string;
|
|
|
|
unit: string;
|
|
|
|
filters: {
|
|
|
|
url: string;
|
|
|
|
eventName: string;
|
|
|
|
};
|
|
|
|
},
|
|
|
|
]
|
|
|
|
): Promise<WebsiteEventMetric[]> {
|
|
|
|
return runQuery({
|
|
|
|
[PRISMA]: () => relationalQuery(...args),
|
|
|
|
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async function relationalQuery(
|
|
|
|
websiteId: string,
|
|
|
|
{
|
|
|
|
startDate,
|
|
|
|
endDate,
|
|
|
|
timezone = 'utc',
|
|
|
|
unit = 'day',
|
|
|
|
filters,
|
|
|
|
}: {
|
|
|
|
startDate: Date;
|
|
|
|
endDate: Date;
|
|
|
|
timezone: string;
|
|
|
|
unit: string;
|
|
|
|
filters: {
|
|
|
|
url: string;
|
|
|
|
eventName: string;
|
|
|
|
};
|
|
|
|
},
|
|
|
|
) {
|
2023-01-11 20:01:44 +01:00
|
|
|
const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma;
|
2023-04-02 02:38:35 +02:00
|
|
|
const website = await loadWebsite(websiteId);
|
2023-04-20 06:16:56 +02:00
|
|
|
const resetDate = new Date(website?.resetAt || website?.createdAt);
|
2023-03-27 20:25:16 +02:00
|
|
|
const params: any = [websiteId, resetDate, startDate, endDate];
|
2023-04-02 00:44:30 +02:00
|
|
|
const filterQuery = getFilterQuery(filters, params);
|
2022-12-13 04:45:38 +01:00
|
|
|
|
|
|
|
return rawQuery(
|
2023-05-07 19:56:19 +02:00
|
|
|
`with event_data as (
|
|
|
|
select d.website_event_id,
|
|
|
|
jsonb_object_agg(
|
|
|
|
d.event_key,
|
|
|
|
case
|
|
|
|
d.event_data_type
|
|
|
|
when 1 then to_jsonb(d.event_string_value) -- string
|
|
|
|
when 2 then to_jsonb(d.event_numeric_value) -- number
|
|
|
|
when 3 then to_jsonb(d.event_bool_value) -- boolean
|
|
|
|
when 4 then to_jsonb(d.event_date_value) -- date
|
|
|
|
when 5 then d.event_string_value::jsonb -- array
|
|
|
|
end
|
|
|
|
) filter (
|
|
|
|
where d.event_key is not null
|
|
|
|
) as event_data
|
|
|
|
from event_data d
|
|
|
|
group by d.website_event_id
|
|
|
|
)
|
|
|
|
select
|
|
|
|
w.event_name x,
|
|
|
|
e.event_data d,
|
|
|
|
${getDateQuery('w.created_at', unit, timezone)} t,
|
2022-12-13 04:45:38 +01:00
|
|
|
count(*) y
|
2023-05-07 19:56:19 +02:00
|
|
|
from website_event w
|
|
|
|
left join event_data d on w.event_id = d.website_event_id
|
2023-01-11 20:01:44 +01:00
|
|
|
where website_id = $1${toUuid()}
|
2023-03-27 20:25:16 +02:00
|
|
|
and created_at >= $2
|
|
|
|
and created_at between $3 and $4
|
2022-12-13 04:45:38 +01:00
|
|
|
and event_type = ${EVENT_TYPE.customEvent}
|
2023-04-02 00:44:30 +02:00
|
|
|
${filterQuery}
|
2023-05-07 19:56:19 +02:00
|
|
|
group by 1, 2, 3
|
2023-05-04 21:10:49 +02:00
|
|
|
order by 3`,
|
2022-12-13 04:45:38 +01:00
|
|
|
params,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function clickhouseQuery(
|
|
|
|
websiteId: string,
|
|
|
|
{
|
|
|
|
startDate,
|
|
|
|
endDate,
|
|
|
|
timezone = 'utc',
|
|
|
|
unit = 'day',
|
|
|
|
filters,
|
|
|
|
}: {
|
|
|
|
startDate: Date;
|
|
|
|
endDate: Date;
|
|
|
|
timezone: string;
|
|
|
|
unit: string;
|
|
|
|
filters: {
|
|
|
|
url: string;
|
|
|
|
eventName: string;
|
|
|
|
};
|
|
|
|
},
|
|
|
|
) {
|
2023-03-27 20:25:16 +02:00
|
|
|
const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse;
|
2023-04-02 02:38:35 +02:00
|
|
|
const website = await loadWebsite(websiteId);
|
2023-04-20 06:16:56 +02:00
|
|
|
const resetDate = new Date(website?.resetAt || website?.createdAt);
|
2023-03-27 20:25:16 +02:00
|
|
|
const params = { websiteId };
|
2022-12-13 04:45:38 +01:00
|
|
|
|
|
|
|
return rawQuery(
|
2023-05-07 19:56:19 +02:00
|
|
|
`with event_data as (
|
|
|
|
select d.website_event_id,
|
|
|
|
jsonb_object_agg(
|
|
|
|
d.event_key,
|
|
|
|
case
|
|
|
|
d.event_data_type
|
|
|
|
when 1 then to_jsonb(d.event_string_value) -- string
|
|
|
|
when 2 then to_jsonb(d.event_numeric_value) -- number
|
|
|
|
when 3 then to_jsonb(d.event_bool_value) -- boolean
|
|
|
|
when 4 then to_jsonb(d.event_date_value) -- date
|
|
|
|
when 5 then d.event_string_value::jsonb -- array
|
|
|
|
end
|
|
|
|
) filter (
|
|
|
|
where d.event_key is not null
|
|
|
|
) as event_data
|
|
|
|
from event_data d
|
|
|
|
group by d.website_event_id
|
|
|
|
)
|
|
|
|
select
|
|
|
|
w.event_name x,
|
|
|
|
d.event_data d,
|
|
|
|
${getDateQuery('w.created_at', unit, timezone)} t,
|
2022-12-13 04:45:38 +01:00
|
|
|
count(*) y
|
2023-05-07 19:56:19 +02:00
|
|
|
from website_event w
|
|
|
|
left join event_data d on w.event_id = d.website_event_id
|
2023-01-12 09:02:12 +01:00
|
|
|
where website_id = {websiteId:UUID}
|
2023-05-07 19:56:19 +02:00
|
|
|
and w.event_type = ${EVENT_TYPE.customEvent}
|
|
|
|
and w.created_at >= ${getDateFormat(resetDate)}
|
|
|
|
and ${getBetweenDates('w.created_at', startDate, endDate)}
|
2022-12-13 04:45:38 +01:00
|
|
|
${getFilterQuery(filters, params)}
|
2023-05-07 19:56:19 +02:00
|
|
|
group by x, d, t
|
2022-12-13 04:45:38 +01:00
|
|
|
order by t`,
|
|
|
|
params,
|
|
|
|
);
|
|
|
|
}
|