merge ref_excel to leadTarget

This commit is contained in:
Pavel 2024-09-17 14:21:26 +03:00
commit 21941cee89
7 changed files with 151 additions and 243 deletions

@ -75,6 +75,7 @@ type Options struct {
RedisHost string `env:"REDIS_HOST" default:"localhost:6379"`
RedisPassword string `env:"REDIS_PASSWORD" default:"admin"`
RedisDB uint64 `env:"REDIS_DB" default:"2"`
S3Prefix string `env:"S3_PREFIX"`
}
func New(ctx context.Context, opts interface{}, ver appInit.Version) (appInit.CommonApp, error) {
@ -210,6 +211,7 @@ func New(ctx context.Context, opts interface{}, ver appInit.Version) (appInit.Co
ChDAL: chDal,
TelegramClient: tgClient,
RedisClient: redisClient,
S3Prefix: options.S3Prefix,
})
svc.Register(app)

@ -1,3 +1,4 @@
version: "3"
services:
core:
hostname: squiz-core
@ -15,5 +16,13 @@ services:
PUBLIC_KEY: $PEM_PUB_USERID
PRIVATE_KEY: $PEM_PRIV_USERID
REDIRECT_URL: 'https://quiz.pena.digital'
KAFKA_BROKERS: 10.8.0.6:9092
KAFKA_TOPIC: "mailnotifier"
GRPC_HOST: "0.0.0.0"
TRASH_LOG_HOST: "10.8.0.15:7113"
MODULE_LOGGER: "quiz-core-main"
CLICK_HOUSE_CRED: "clickhouse://10.8.0.15:9000/default?sslmode=disable"
S3_PREFIX: "https://s3.timeweb.cloud/3c580be9-cf31f296-d055-49cf-b39e-30c7959dc17b/squizimages/"
ports:
- 10.8.0.9:1488:1488
- 10.8.0.9:9000:9000

@ -15,6 +15,7 @@ services:
AUTH_URL: 'http://10.8.0.6:59300/user'
PUBLIC_KEY: $PEM_PUB_USERID
PRIVATE_KEY: $PEM_PRIV_USERID
REDIRECT_URL: 'https://quiz.pena.digital'
KAFKA_BROKERS: 10.8.0.6:9092
KAFKA_TOPIC: "mailnotifier"
GRPC_HOST: "0.0.0.0"

@ -9,6 +9,7 @@ import (
"penahub.gitlab.yandexcloud.net/backend/quiz/core/models"
"time"
"unicode/utf8"
"fmt"
)
type CreateQuizReq struct {
@ -505,6 +506,7 @@ func (s *Service) TemplateCopy(ctx *fiber.Ctx) error {
qizID, err := s.dal.QuizRepo.TemplateCopy(ctx.Context(), accountID, req.Qid)
if err != nil {
fmt.Println("TEMPLERR", err)
return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error())
}

@ -159,6 +159,11 @@ func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error {
}
}
quiz, err := s.dal.QuizRepo.GetQuizById(ctx.Context(), accountID, quizID)
if err != nil {
return ctx.Status(fiber.StatusInternalServerError).SendString("failed to get quiz")
}
questions, err := s.dal.ResultRepo.GetQuestions(ctx.Context(), quizID)
if err != nil {
return ctx.Status(fiber.StatusInternalServerError).SendString("failed to get questions")
@ -177,7 +182,7 @@ func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error {
buffer := new(bytes.Buffer)
if err := tools.WriteDataToExcel(buffer, questions, answers); err != nil {
if err := tools.WriteDataToExcel(buffer, questions, answers, s.s3Prefix + quiz.Qid + "/"); err != nil {
return ctx.Status(fiber.StatusInternalServerError).SendString("failed to write data to Excel")
}

@ -18,6 +18,7 @@ type Service struct {
chDAL *dal.ClickHouseDAL
telegramClient *telegram.TelegramClient
redisClient *redis.Client
s3Prefix string
}
type Deps struct {
@ -28,6 +29,7 @@ type Deps struct {
ChDAL *dal.ClickHouseDAL
TelegramClient *telegram.TelegramClient
RedisClient *redis.Client
S3Prefix string
}
func New(deps Deps) *Service {
@ -39,6 +41,7 @@ func New(deps Deps) *Service {
chDAL: deps.ChDAL,
telegramClient: deps.TelegramClient,
redisClient: deps.RedisClient,
s3Prefix: deps.S3Prefix,
}
}

@ -1,6 +1,7 @@
package tools
import (
"encoding/json"
"fmt"
"github.com/xuri/excelize/v2"
_ "image/gif"
@ -27,7 +28,7 @@ const (
bucketAnswers = "squizanswer"
)
func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []model.Answer) error {
func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []model.Answer, s3Prefix string) error {
file := excelize.NewFile()
sheet := "Sheet1"
@ -40,6 +41,36 @@ func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []mo
return questions[i].Page < questions[j].Page
})
headers, mapQueRes := prepareHeaders(questions)
for col, header := range headers {
cell := ToAlphaString(col+1) + "1"
if err := file.SetCellValue(sheet, cell, header); err != nil {
return err
}
}
sort.Slice(answers, func(i, j int) bool {
return answers[i].QuestionId < answers[j].QuestionId
})
standart, results := categorizeAnswers(answers)
var wg sync.WaitGroup
row := 2
for session := range results {
wg.Add(1)
go func(session string, response []model.Answer, row int) {
defer wg.Done()
processSession(file, sheet, session, s3Prefix, response, results, questions, mapQueRes, headers, row)
}(session, standart[session], row)
row++
}
wg.Wait()
return file.Write(buffer)
}
func prepareHeaders(questions []model.Question) ([]string, map[uint64]string) {
headers := []string{"Данные респондента"}
mapQueRes := make(map[uint64]string)
@ -52,27 +83,14 @@ func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []mo
}
}
}
headers = append(headers, "Результат")
return headers, mapQueRes
}
for col, header := range headers {
cell := ToAlphaString(col+1) + "1"
if err := file.SetCellValue(sheet, cell, header); err != nil {
return err
}
}
sort.Slice(answers, func(i, j int) bool {
return answers[i].QuestionId < answers[j].QuestionId
})
// мапа для хранения обычных ответов респондентов
func categorizeAnswers(answers []model.Answer) (map[string][]model.Answer, map[string]model.Answer) {
standart := make(map[string][]model.Answer)
// мапа для хранения данных респондентов
results := make(map[string]model.Answer)
// заполняем мапу ответами и данными респондентов
for _, answer := range answers {
if answer.Result {
results[answer.Session] = answer
@ -80,104 +98,110 @@ func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []mo
standart[answer.Session] = append(standart[answer.Session], answer)
}
}
return standart, results
}
processSession := func(session string, response []model.Answer, row int) {
defer func() {
if r := recover(); r != nil {
fmt.Println("Recovered from panic:", r)
}
}()
if err := file.SetCellValue(sheet, "A"+strconv.Itoa(row), results[session].Content); err != nil {
fmt.Println(err.Error())
func processSession(file *excelize.File, sheet, session, s3Prefix string, response []model.Answer, results map[string]model.Answer, questions []model.Question, mapQueRes map[uint64]string, headers []string, row int) {
defer func() {
if r := recover(); r != nil {
fmt.Println("Recovered from panic:", r)
}
count := 2
for _, q := range questions {
if !q.Deleted && q.Type != model.TypeResult {
index := binarySearch(response, q.Id)
if index != -1 {
cell := ToAlphaString(count) + strconv.Itoa(row)
tipe := FileSearch(response[index].Content)
noAccept := make(map[string]struct{})
todoMap := make(map[string]string)
if tipe != "Text" && q.Type == model.TypeImages || q.Type == model.TypeVarImages {
urle := ExtractImageURL(response[index].Content)
urlData := strings.Split(urle, " ")
if len(urlData) == 1 {
u, err := url.Parse(urle)
if err == nil && u.Scheme != "" && u.Host != "" {
picture, err := downloadImage(urle)
if err != nil {
fmt.Println(err.Error())
}
file.SetColWidth(sheet, ToAlphaString(count), ToAlphaString(count), 50)
file.SetRowHeight(sheet, row, 150)
if err := file.AddPictureFromBytes(sheet, cell, picture); err != nil {
fmt.Println(err.Error())
}
noAccept[response[index].Content] = struct{}{}
} else {
todoMap[response[index].Content] = cell
}
} else {
todoMap[response[index].Content] = cell
}
} else if tipe != "Text" && q.Type == model.TypeFile {
urle := ExtractImageURL(response[index].Content)
display, tooltip := urle, urle
if err := file.SetCellValue(sheet, cell, response[index].Content); err != nil {
fmt.Println(err.Error())
}
if err := file.SetCellHyperLink(sheet, cell, urle, "External", excelize.HyperlinkOpts{
Display: &display,
Tooltip: &tooltip,
}); err != nil {
fmt.Println(err.Error())
}
noAccept[response[index].Content] = struct{}{}
} else {
todoMap[response[index].Content] = cell
}
for cnt, cel := range todoMap {
if _, ok := noAccept[cnt]; !ok {
if err := file.SetCellValue(sheet, cel, cnt); err != nil {
fmt.Println(err.Error())
}
}
}
}()
} else {
cell := ToAlphaString(count) + strconv.Itoa(row)
if err := file.SetCellValue(sheet, cell, "-"); err != nil {
fmt.Println(err.Error())
}
if err := file.SetCellValue(sheet, "A"+strconv.Itoa(row), results[session].Content); err != nil {
fmt.Println(err.Error())
}
count := 2
for _, q := range questions {
if !q.Deleted && q.Type != model.TypeResult {
cell := ToAlphaString(count) + strconv.Itoa(row)
index := binarySearch(response, q.Id)
if index != -1 {
handleAnswer(file, sheet, cell, s3Prefix, response[index], q, count, row)
} else {
if err := file.SetCellValue(sheet, cell, "-"); err != nil {
fmt.Println(err.Error())
}
count++
}
count++
}
}
cell := ToAlphaString(len(headers)) + strconv.Itoa(row)
if err := file.SetCellValue(sheet, cell, mapQueRes[results[session].QuestionId]); err != nil {
fmt.Println(err.Error())
}
}
func handleAnswer(file *excelize.File, sheet, cell, s3Prefix string, answer model.Answer, question model.Question, count, row int) {
tipe := FileSearch(answer.Content)
noAccept := make(map[string]struct{})
todoMap := make(map[string]string)
if tipe != "Text" && question.Type == model.TypeImages || question.Type == model.TypeVarImages {
handleImage(file, sheet, cell, answer.Content, count, row, noAccept, todoMap)
} else if question.Type == model.TypeFile {
handleFile(file, sheet, cell, answer.Content, s3Prefix, noAccept)
} else {
todoMap[answer.Content] = cell
}
for cnt, cel := range todoMap {
if _, ok := noAccept[cnt]; !ok {
if err := file.SetCellValue(sheet, cel, cnt); err != nil {
fmt.Println(err.Error())
}
}
cell := ToAlphaString(len(headers)) + strconv.Itoa(row)
if err := file.SetCellValue(sheet, cell, mapQueRes[results[session].QuestionId]); err != nil {
fmt.Println(err.Error())
}
}
func handleImage(file *excelize.File, sheet, cell, content string, count, row int, noAccept map[string]struct{}, todoMap map[string]string) {
var res model.ImageContent
err := json.Unmarshal([]byte(content), &res)
if err != nil {
res.Image = content
}
urle := ExtractImageURL(res.Image)
urlData := strings.Split(urle, " ")
if len(urlData) == 1 {
u, err := url.Parse(urle)
if err == nil && u.Scheme != "" && u.Host != "" {
picture, err := downloadImage(urle)
if err != nil {
fmt.Println(err.Error())
}
file.SetColWidth(sheet, ToAlphaString(count), ToAlphaString(count), 50)
file.SetRowHeight(sheet, row, 150)
if err := file.AddPictureFromBytes(sheet, cell, picture); err != nil {
fmt.Println(err.Error())
}
noAccept[content] = struct{}{}
} else {
todoMap[content] = cell
}
} else {
todoMap[content] = cell
}
}
func handleFile(file *excelize.File, sheet, cell, content, s3Prefix string, noAccept map[string]struct{}) {
urle := content
if urle != "" && !strings.HasPrefix(urle, "https") {
urle = s3Prefix + urle
}
row := 2
var wg sync.WaitGroup
for session, _ := range results {
wg.Add(1)
go func(session string, response []model.Answer, row int) {
defer wg.Done()
processSession(session, standart[session], row)
}(session, standart[session], row)
row++
}
wg.Wait()
fmt.Println("ORRRRR", urle, s3Prefix)
display, tooltip := urle, urle
if err := file.Write(buffer); err != nil {
return err
if err := file.SetCellValue(sheet, cell, urle); err != nil {
fmt.Println(err.Error())
}
return nil
if err := file.SetCellHyperLink(sheet, cell, urle, "External", excelize.HyperlinkOpts{
Display: &display,
Tooltip: &tooltip,
}); err != nil {
fmt.Println(err.Error())
}
noAccept[content] = struct{}{}
}
func binarySearch(answers []model.Answer, questionID uint64) int {
@ -283,141 +307,3 @@ func ExtractImageURL(htmlContent string) string {
}
return htmlContent
}
//func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []model.Answer) error {
// file := excelize.NewFile()
// sheet := "Sheet1"
//
// _, err := file.NewSheet(sheet)
// if err != nil {
// return err
// }
//
// sort.Slice(questions, func(i, j int) bool {
// return questions[i].Page > questions[j].Page
// })
//
// headers := []string{"Данные респондента"}
// mapQueRes := make(map[uint64]string)
//
// for _, q := range questions {
// if !q.Deleted {
// if q.Type == model.TypeResult {
// mapQueRes[q.Id] = q.Title + "\n" + q.Description
// } else {
// headers = append(headers, q.Title)
// }
// }
// }
//
// headers = append(headers, "Результат")
//
// // добавляем заголовки в первую строку
// for col, header := range headers {
// cell := ToAlphaString(col+1) + "1"
// if err := file.SetCellValue(sheet, cell, header); err != nil {
// return err
// }
// }
//
// // мапа для хранения обычных ответов респондентов
// standart := make(map[string][]model.Answer)
//
// // мапа для хранения данных респондентов
// results := make(map[string]model.Answer)
//
// // заполняем мапу ответами и данными респондентов
// for _, answer := range answers {
// if answer.Result {
// results[answer.Session] = answer
// } else {
// standart[answer.Session] = append(standart[answer.Session], answer)
// }
// }
//
// // записываем данные в файл
// row := 2
// for session, _ := range results {
// response := standart[session]
// if err := file.SetCellValue(sheet, "A"+strconv.Itoa(row), results[session].Content); err != nil {
// return err
// }
// count := 2
// for _, q := range questions {
// if !q.Deleted && q.Type != model.TypeResult {
// sort.Slice(response, func(i, j int) bool {
// return response[i].QuestionId < response[j].QuestionId
// })
// index := binarySearch(response, q.Id)
// if index != -1 {
// cell := ToAlphaString(count) + strconv.Itoa(row)
// typeMap := FileSearch(response[index].Content)
// noAccept := make(map[string]struct{})
// todoMap := make(map[string]string)
// for _, tipe := range typeMap {
// if tipe != "Text" && q.Type == model.TypeImages || q.Type == model.TypeVarImages {
// urle := ExtractImageURL(response[index].Content)
// urlData := strings.Split(urle, " ")
// for _, k := range urlData {
// u, err := url.Parse(k)
// if err == nil && u.Scheme != "" && u.Host != "" {
// picture, err := downloadImage(k)
// if err != nil {
// return err
// }
// file.SetColWidth(sheet, ToAlphaString(count), ToAlphaString(count), 50)
// file.SetRowHeight(sheet, row, 150)
// if err := file.AddPictureFromBytes(sheet, cell, picture); err != nil {
// return err
// }
// noAccept[response[index].Content] = struct{}{}
// }
// }
// } else if tipe != "Text" && q.Type == model.TypeFile {
// urle := ExtractImageURL(response[index].Content)
// display, tooltip := urle, urle
// if err := file.SetCellValue(sheet, cell, response[index].Content); err != nil {
// return err
// }
// if err := file.SetCellHyperLink(sheet, cell, urle, "External", excelize.HyperlinkOpts{
// Display: &display,
// Tooltip: &tooltip,
// }); err != nil {
// return err
// }
// noAccept[response[index].Content] = struct{}{}
// } else {
// todoMap[response[index].Content] = cell
// }
// }
// for cnt, cel := range todoMap {
// if _, ok := noAccept[cnt]; !ok {
// if err := file.SetCellValue(sheet, cel, cnt); err != nil {
// return err
// }
// }
// }
//
// } else {
// cell := ToAlphaString(count) + strconv.Itoa(row)
// if err := file.SetCellValue(sheet, cell, "-"); err != nil {
// return err
// }
// }
// count++
// }
// }
// cell := ToAlphaString(len(headers)) + strconv.Itoa(row)
// if err := file.SetCellValue(sheet, cell, mapQueRes[results[session].QuestionId]); err != nil {
// return err
// }
// row++
// }
//
// // cохраняем данные в буфер
// if err := file.Write(buffer); err != nil {
// return err
// }
//
// return nil
//}