fix: add last rigth value to series and refactor general stats
This commit is contained in:
parent
2e3402cb1c
commit
38eec37140
@ -489,7 +489,7 @@ SELECT
|
||||
CASE
|
||||
WHEN COALESCE(os.open_count, 0) > 0 THEN COALESCE(rs.true_result_count, 0)::float / COALESCE(os.open_count, 0)::float
|
||||
ELSE 0
|
||||
END AS conversion,
|
||||
END::float AS conversion,
|
||||
COALESCE(at.avg_time, 0) AS avg_time
|
||||
FROM
|
||||
TimeBucket tb
|
||||
|
@ -1,6 +1,6 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.26.0
|
||||
// sqlc v1.25.0
|
||||
|
||||
package sqlcgen
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.26.0
|
||||
// sqlc v1.25.0
|
||||
|
||||
package sqlcgen
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.26.0
|
||||
// sqlc v1.25.0
|
||||
// source: queries.sql
|
||||
|
||||
package sqlcgen
|
||||
@ -556,18 +556,15 @@ func (q *Queries) DuplicateQuestion(ctx context.Context, id int64) (DuplicateQue
|
||||
const generalStatistics = `-- name: GeneralStatistics :many
|
||||
WITH TimeBucket AS (
|
||||
SELECT
|
||||
CASE
|
||||
WHEN EXTRACT(epoch FROM $2::timestamp) - EXTRACT(epoch FROM $1::timestamp) > 172800 THEN date_trunc('day', timestamp_bucket)
|
||||
ELSE date_trunc('hour', timestamp_bucket)
|
||||
END::TIMESTAMP AS time_interval_start,
|
||||
LEAD(
|
||||
CASE
|
||||
WHEN EXTRACT(epoch FROM $2::timestamp) - EXTRACT(epoch FROM $1::timestamp) > 172800 THEN date_trunc('day', timestamp_bucket)
|
||||
ELSE date_trunc('hour', timestamp_bucket)
|
||||
END::TIMESTAMP
|
||||
) OVER (ORDER BY timestamp_bucket) AS time_interval_end
|
||||
date_trunc('hour', timestamp_bucket)::TIMESTAMP AS time_interval_start,
|
||||
COALESCE(LEAD(
|
||||
date_trunc('hour', timestamp_bucket)::TIMESTAMP
|
||||
) OVER (ORDER BY timestamp_bucket), NOW()) AS time_interval_end
|
||||
FROM
|
||||
generate_series($1::timestamp with time zone, $2::timestamp with time zone, '1 hour'::interval) AS timestamp_bucket
|
||||
generate_series(TO_TIMESTAMP($1), TO_TIMESTAMP($2), CASE
|
||||
WHEN EXTRACT(epoch FROM TO_TIMESTAMP($2)) - EXTRACT(epoch FROM TO_TIMESTAMP($1)) > 172800 THEN '1 day'::interval
|
||||
ELSE '1 hour'::interval
|
||||
END) AS timestamp_bucket
|
||||
),
|
||||
OpenStats AS (
|
||||
SELECT
|
||||
@ -584,8 +581,8 @@ WITH TimeBucket AS (
|
||||
WHERE
|
||||
answer.quiz_id = $3
|
||||
AND start = TRUE
|
||||
AND created_at >= $1::timestamp
|
||||
AND created_at <= $2::timestamp
|
||||
AND created_at >= TO_TIMESTAMP($1)
|
||||
AND created_at <= TO_TIMESTAMP($2)
|
||||
GROUP BY
|
||||
session
|
||||
) AS first_starts
|
||||
@ -609,8 +606,8 @@ WITH TimeBucket AS (
|
||||
WHERE
|
||||
answer.quiz_id = $3
|
||||
AND result = TRUE
|
||||
AND created_at >= $1::timestamp
|
||||
AND created_at <= $2::timestamp
|
||||
AND created_at >= TO_TIMESTAMP($1)
|
||||
AND created_at <= TO_TIMESTAMP($2)
|
||||
GROUP BY
|
||||
session
|
||||
) AS first_results
|
||||
@ -623,7 +620,7 @@ WITH TimeBucket AS (
|
||||
SELECT
|
||||
tb.time_interval_start,
|
||||
tb.time_interval_end,
|
||||
AVG(EXTRACT(epoch FROM (a.created_at - b.created_at))) AS avg_time
|
||||
AVG(EXTRACT(epoch FROM (a.created_at)) - EXTRACT(epoch FROM (b.created_at))) AS avg_time
|
||||
FROM
|
||||
answer a
|
||||
JOIN answer b ON a.session = b.session
|
||||
@ -634,10 +631,10 @@ WITH TimeBucket AS (
|
||||
AND a.result = TRUE
|
||||
AND b.start = TRUE
|
||||
AND b.quiz_id = $3
|
||||
AND a.created_at >= $1::timestamp
|
||||
AND a.created_at <= $2::timestamp
|
||||
AND b.created_at >= $1::timestamp
|
||||
AND b.created_at <= $2::timestamp
|
||||
AND a.created_at >= TO_TIMESTAMP($1)
|
||||
AND a.created_at <= TO_TIMESTAMP($2)
|
||||
AND b.created_at >= TO_TIMESTAMP($1)
|
||||
AND b.created_at <= TO_TIMESTAMP($2)
|
||||
GROUP BY
|
||||
tb.time_interval_start, tb.time_interval_end
|
||||
)
|
||||
@ -646,9 +643,9 @@ SELECT
|
||||
COALESCE(os.open_count, 0) AS open_count,
|
||||
COALESCE(rs.true_result_count, 0) AS true_result_count,
|
||||
CASE
|
||||
WHEN COALESCE(os.open_count, 0) > 0 THEN COALESCE(rs.true_result_count, 0) / COALESCE(os.open_count, 0)
|
||||
WHEN COALESCE(os.open_count, 0) > 0 THEN COALESCE(rs.true_result_count, 0)::float / COALESCE(os.open_count, 0)::float
|
||||
ELSE 0
|
||||
END AS conversion,
|
||||
END::float AS conversion,
|
||||
COALESCE(at.avg_time, 0) AS avg_time
|
||||
FROM
|
||||
TimeBucket tb
|
||||
@ -664,21 +661,21 @@ FROM
|
||||
`
|
||||
|
||||
type GeneralStatisticsParams struct {
|
||||
Column1 time.Time `db:"column_1" json:"column_1"`
|
||||
Column2 time.Time `db:"column_2" json:"column_2"`
|
||||
QuizID int64 `db:"quiz_id" json:"quiz_id"`
|
||||
ToTimestamp float64 `db:"to_timestamp" json:"to_timestamp"`
|
||||
ToTimestamp_2 float64 `db:"to_timestamp_2" json:"to_timestamp_2"`
|
||||
QuizID int64 `db:"quiz_id" json:"quiz_id"`
|
||||
}
|
||||
|
||||
type GeneralStatisticsRow struct {
|
||||
TimeBucket time.Time `db:"time_bucket" json:"time_bucket"`
|
||||
OpenCount int64 `db:"open_count" json:"open_count"`
|
||||
TrueResultCount int64 `db:"true_result_count" json:"true_result_count"`
|
||||
Conversion int32 `db:"conversion" json:"conversion"`
|
||||
Conversion float64 `db:"conversion" json:"conversion"`
|
||||
AvgTime float64 `db:"avg_time" json:"avg_time"`
|
||||
}
|
||||
|
||||
func (q *Queries) GeneralStatistics(ctx context.Context, arg GeneralStatisticsParams) ([]GeneralStatisticsRow, error) {
|
||||
rows, err := q.db.QueryContext(ctx, generalStatistics, arg.Column1, arg.Column2, arg.QuizID)
|
||||
rows, err := q.db.QueryContext(ctx, generalStatistics, arg.ToTimestamp, arg.ToTimestamp_2, arg.QuizID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"penahub.gitlab.yandexcloud.net/backend/quiz/common.git/dal/sqlcgen"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Deps struct {
|
||||
@ -67,7 +66,7 @@ type GeneralStatsResp struct {
|
||||
Open map[int64]int64 // количество ответов с полем start == true за период от одного пункта разбиения и до другого
|
||||
Result map[int64]int64 // количество ответов с полем result == true за период от одного пункта разбиения и до другого
|
||||
AvTime map[int64]uint64 // среднее время между ответом с полем result == true и start == true. в рамках сессии
|
||||
Conversion map[int64]int32 // Result/Open за период от одного пункта разбиения и до другого
|
||||
Conversion map[int64]float64 // Result/Open за период от одного пункта разбиения и до другого
|
||||
}
|
||||
|
||||
func (r *StatisticsRepository) GetGeneralStatistics(ctx context.Context, req DeviceStatReq) (GeneralStatsResp, error) {
|
||||
@ -75,13 +74,13 @@ func (r *StatisticsRepository) GetGeneralStatistics(ctx context.Context, req Dev
|
||||
Open: make(map[int64]int64),
|
||||
Result: make(map[int64]int64),
|
||||
AvTime: make(map[int64]uint64),
|
||||
Conversion: make(map[int64]int32),
|
||||
Conversion: make(map[int64]float64),
|
||||
}
|
||||
// todo затестить запрос нужно, когда на один тру ответ приходится один тру старт апдейтнуть запрос
|
||||
allStatistics, err := r.queries.GeneralStatistics(ctx, sqlcgen.GeneralStatisticsParams{
|
||||
QuizID: req.QuizId,
|
||||
Column1: time.Unix(int64(req.From), 0),
|
||||
Column2: time.Unix(int64(req.To), 0),
|
||||
ToTimestamp: float64(req.From),
|
||||
ToTimestamp_2: float64(req.To),
|
||||
})
|
||||
if err != nil {
|
||||
return resp, err
|
||||
|
Loading…
Reference in New Issue
Block a user