diff --git a/.gitea/workflows/deployProd.yml b/.gitea/workflows/deployProd.yml index d2d57e8..11e2694 100644 --- a/.gitea/workflows/deployProd.yml +++ b/.gitea/workflows/deployProd.yml @@ -15,9 +15,18 @@ jobs: secrets: REGISTRY_USER: ${{ secrets.REGISTRY_USER }} REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} + ValidateConfig: + runs-on: [squizprod] + uses: https://gitea.pena/PenaDevops/actions.git/.gitea/workflows/validate_config.yml@v1.2.1 + needs: CreateImage + with: + runner: hubstaging + secrets: + REGISTRY_USER: ${{ secrets.REGISTRY_USER }} + REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} DeployService: runs-on: [squizprod] - needs: CreateImage + needs: ValidateConfig uses: https://gitea.pena/PenaDevops/actions.git/.gitea/workflows/deploy.yml@v1.1.4-p7 with: runner: hubprod diff --git a/.gitea/workflows/deployStaging.yml b/.gitea/workflows/deployStaging.yml index a9c04e4..7d4f337 100644 --- a/.gitea/workflows/deployStaging.yml +++ b/.gitea/workflows/deployStaging.yml @@ -8,16 +8,35 @@ on: jobs: CreateImage: - runs-on: [hubstaging] + runs-on: [squizstaging] uses: http://gitea.pena/PenaDevops/actions.git/.gitea/workflows/build-image.yml@v1.1.6-p with: runner: hubstaging secrets: REGISTRY_USER: ${{ secrets.REGISTRY_USER }} REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} - DeployService: - runs-on: [hubstaging] + ValidateConfig: + runs-on: [squizstaging] + uses: https://gitea.pena/PenaDevops/actions.git/.gitea/workflows/validate_config.yml@v1.2.1 needs: CreateImage + with: + runner: hubstaging + secrets: + REGISTRY_USER: ${{ secrets.REGISTRY_USER }} + REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} + MigrateDatabase: + runs-on: [squizstaging] + uses: http://gitea.pena/PenaDevops/actions.git/.gitea/workflows/migrate.yml@9263e22095fa40bcb36881ad81722d3049acd07f + needs: ValidateConfig + with: + runner: hubstaging + branch_name: ${{ github.ref_name }} + secrets: + REGISTRY_USER: ${{ secrets.REGISTRY_USER }} + REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }} + DeployService: + runs-on: [squizstaging] + needs: MigrateDatabase uses: http://gitea.pena/PenaDevops/actions.git/.gitea/workflows/deploy.yml@v1.1.4-p7 with: runner: hubstaging diff --git a/.gitea/workflows/lint.yml b/.gitea/workflows/lint.yml index 20e6298..a474401 100644 --- a/.gitea/workflows/lint.yml +++ b/.gitea/workflows/lint.yml @@ -8,7 +8,7 @@ on: jobs: Lint: - runs-on: [hubstaging] - uses: http://gitea.pena/PenaDevops/actions.git/.gitea/workflows/lint.yml@v1.1.0 + runs-on: [squizstaging] + uses: http://gitea.pena/PenaDevops/actions.git/.gitea/workflows/lint.yml@v1.1.2 with: - runner: hubstaging + runner: squizstaging diff --git a/.gitignore b/.gitignore index ca20f47..e503e41 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,5 @@ worker/worker storer/storer answerer/answerer core +/.tdlib/ +/unsetrecover.bolt diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..d36a5e0 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +#v1.0.0 + +- В статистику по воронкам добавлена статистика по формам контактов diff --git a/Dockerfile b/Dockerfile index 5e73187..4d51e98 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,13 @@ FROM gitea.pena/penadevops/container-images/golang:main as build WORKDIR /app -RUN apk add git COPY . . RUN go mod download -RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o core +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o core ./cmd/main.go +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o validator ./cmd/validator/main.go -FROM gitea.pena/penadevops/container-images/alpine:main as prod + +FROM gitea.pena/penadevops/container-images/alpine:main COPY --from=build /app/core . -COPY --from=build /app/schema /schema +COPY --from=build /app/validator . RUN apk add tzdata CMD ["/core"] diff --git a/Makefile b/Makefile deleted file mode 100644 index 845ad9d..0000000 --- a/Makefile +++ /dev/null @@ -1,64 +0,0 @@ -GOCMD=go -GOBUILD=$(GOCMD) build -GOCLEAN=$(GOCMD) clean -GOTEST=$(GOCMD) test -COMMIT?=$(shell git rev-parse --short HEAD) -BUILD_TIME?=$(shell date -u '+%Y-%m-%d_%H:%M:%S') -GOOS?=linux -GOARCH?=amd64 -DOCKER_REGISTRY=yourRegistryHost: -BINARY_NAME=$(shell basename `pwd`) -PORT?=1488 -SHELL = /bin/bash -LDFLAGS=-s -w -X github.com/skeris/appInit/version.Release=${shell git describe --tags --abbrev=0} \ - -X github.com/skeris/appInit/version.Commit=${COMMIT} -X github.com/skeris/appInit/version.BuildTime=${BUILD_TIME} - -all: compile run -clean: - rm -f $(BINARY_NAME) - rm -f ./worker/worker -compile: clean - CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} $(GOBUILD) -ldflags "${LDFLAGS}" -o ${BINARY_NAME} - CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} $(GOBUILD) -ldflags "${LDFLAGS}" -o ./worker/worker -run: compile - ./$(BINARY_NAME) -container: compile - docker build -t $(BINARY_NAME):${shell git describe --tags --abbrev=0} . - docker build -t $(BINARY_NAME)-worker:${shell git describe --tags --abbrev=0} ./worker -docker-push: container - docker tag $(BINARY_NAME) $(DOCKER_REGISTRY)/$(BINARY_NAME) - docker tag $(BINARY_NAME)-worker $(DOCKER_REGISTRY)/$(BINARY_NAME)-worker -pull: - docker pull $(DOCKER_REGISTRY)/$(BINARY_NAME) - docker pull $(DOCKER_REGISTRY)/$(BINARY_NAME)-worker - docker tag $(DOCKER_REGISTRY)/$(BINARY_NAME) $(BINARY_NAME) - docker tag $(DOCKER_REGISTRY)/$(BINARY_NAME)-worker $(BINARY_NAME)-worker -run-container: - docker run --rm --name squiz --network host -p 1488:1488 $(BINARY_NAME):latest -test: - $(GOTEST) -v -race ./... -commit-all: - git add -A - git commit -a - git push -push-new-release: commit-all - git tag ${shell git describe --tags --abbrev=0 | awk -F '.' '{print "v"$$1+1".0.0"}'} - git push --tags -push-new-feature: commit-all - git tag ${shell git describe --tags --abbrev=0 | awk -F '.' '{print $$1"."$$2+1".0"}'} - git push --tags -push-new-state: commit-all - git tag ${shell git describe --tags --abbrev=0 | awk -F '.' '{print $$1"."$$2"."$$3+1}'} - git push --tags -benchmark: - mv ./tests/new.txt ./tests/old.txt - go test -run=NONE -bench=. -benchmem ./tests -test.short > ./tests/new.txt - benchstat -html ./tests/old.txt ./tests/new.txt > benchmark.html - -# show full set of messages -test-in-docker-debug: - docker-compose -f deployments/test/docker-compose.yaml up --build --force-recreate - -# show only relevant messages -test-in-docker: - docker-compose -f deployments/test/docker-compose.yaml up --build --force-recreate --exit-code-from test-squiz 2>/dev/null | grep ^test-squiz diff --git a/Taskfile.dist.yml b/Taskfile.dist.yml new file mode 100644 index 0000000..efeeef7 --- /dev/null +++ b/Taskfile.dist.yml @@ -0,0 +1,8 @@ +tasks: + update-linter: + cmds: + - go get -u gitea.pena/PenaSide/linters-golang + lint: + cmds: + - task: update-linter + - cmd: golangci-lint run -v -c $(go list -f '{{"{{"}}.Dir{{"}}"}}' -m gitea.pena/PenaSide/linters-golang)/.golangci.yml diff --git a/openapi.yaml b/api/openapi.yaml similarity index 81% rename from openapi.yaml rename to api/openapi.yaml index d6fc8fb..13460cc 100644 --- a/openapi.yaml +++ b/api/openapi.yaml @@ -734,10 +734,22 @@ components: PipeLineStatsResp: type: object - additionalProperties: - type: array - items: - $ref: '#/components/schemas/Statistic' + properties: + PipelineStatistic: + type: object + additionalProperties: + type: array + items: + $ref: '#/components/schemas/Statistic' + description: Статистика по воронкам + + ContactFormStatistic: + type: object + additionalProperties: + type: integer + format: int64 + description: Количество ответов на вопрос формы контактов + description: Статистика форм контакта Answer: type: object properties: @@ -772,8 +784,72 @@ components: Deleted: type: boolean description: удален? - - + LeadTarget: + type: object + properties: + ID: + type: integer + format: int64 + AccountID: + type: string + Type: + type: string + QuizID: + type: integer + format: int32 + Target: + type: string + InviteLink: + type: string + Deleted: + type: boolean + CreatedAt: + type: string + TgAccountStatus: + type: string + enum: + - active + - inactive + - ban + TgAccount: + type: object + properties: + ID: + type: integer + format: int64 + ApiID: + type: integer + format: int32 + ApiHash: + type: string + PhoneNumber: + type: string + Password: + type: string + Status: + $ref: '#/components/schemas/TgAccountStatus' + Deleted: + type: boolean + CreatedAt: + type: string + format: date-time + AuthTgUserReq: + type: object + required: + - ApiID + - ApiHash + - PhoneNumber + - Password + properties: + ApiID: + type: integer + format: int32 + ApiHash: + type: string + PhoneNumber: + type: string + Password: + type: string paths: /liveness: get: @@ -1546,6 +1622,211 @@ paths: properties: message: type: string + /account/leadtarget: + post: + description: Метод для добавления целевых мест, куда будут посылаться заявки клиенту. + security: + - Bearer: [ ] + requestBody: + content: + 'application/json': + schema: + type: object + required: + - type + - quizID + - target + properties: + type: + type: string + description: Тип цели (mail, telegram, whatsapp). + enum: + - mail + - telegram + - whatsapp + quizID: + type: integer + format: int32 + description: ID квиза, к которому прикреплено это правило (приоритет). Передавать как 0, если правило не прикрепляется к квизу и является общим. + target: + type: string + description: Адресат, куда конкретно слать (для mail - email, для telegram - ID канала, передавать не нужно канал сам создаться, для whatsapp - номер телефона, наверное). + name: + type: string + description: имя например для тг канала + responses: + '200': + description: ОК, парвило добавлено если тип mail о сразу добавляется если тг то будет добавленно в воркере если ватсап пока тодо +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/LeadTarget' + '400': + description: Bad request, ошибка в теле запроса + content: + application/json: + schema: + type: object + properties: + message: + type: string + '401': + description: Unauthorized, не авторизован + content: + application/json: + schema: + type: object + properties: + message: + type: string + '500': + description: Internal Srv Error + content: + application/json: + schema: + type: object + properties: + message: + type: string + put: + description: Метод для обновления целевого места, куда будут посылаться заявки клиенту. + security: + - Bearer: [ ] + requestBody: + content: + 'application/json': + schema: + type: object + required: + - id + - target + properties: + id: + type: integer + format: int64 + description: id этой самой цели, primary key. + target: + type: string + description: Адресат, куда конкретно слать (для mail - email, для telegram - ID чата, для whatsapp - номер телефона, наверное). + responses: + '200': + description: ОК, парвило обновлено + content: + application/json: + schema: + $ref: '#/components/schemas/LeadTarget' + '400': + description: Bad request, ошибка в теле запроса + content: + application/json: + schema: + type: object + properties: + message: + type: string + '401': + description: Unauthorized, не авторизован + content: + application/json: + schema: + type: object + properties: + message: + type: string + '404': + description: NotFound, такого не существует + content: + application/json: + schema: + type: object + properties: + message: + type: string + '500': + description: Internal Srv Error + content: + application/json: + schema: + type: object + properties: + message: + type: string + /account/leadtarget/{id}: + delete: + description: удаление правила по id, primary key + security: + - Bearer: [ ] + responses: + '200': + description: ОК, парвило удалено + '400': + description: Bad request, ошибка в теле запроса + content: + application/json: + schema: + type: object + properties: + message: + type: string + '500': + description: Internal Srv Error + content: + application/json: + schema: + type: object + properties: + message: + type: string + /account/leadtarget/{quizID}: + get: + description: получение правила по quizID, так же стоит передавать 0 если правило не было привязано к определенному квизу, возвращает массив + security: + - Bearer: [ ] + responses: + '200': + description: ОК, парвила получены + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/LeadTarget' + '400': + description: Bad request, ошибка в теле запроса + content: + application/json: + schema: + type: object + properties: + message: + type: string + '401': + description: Unauthorized, не авторизован + content: + application/json: + schema: + type: object + properties: + message: + type: string + '404': + description: NotFound, такого не существует + content: + application/json: + schema: + type: object + properties: + message: + type: string + '500': + description: Internal Srv Error + content: + application/json: + schema: + type: object + properties: + message: + type: string /statistics/:quizID/pipelines: get: description: получение статистики по векторам прохождения респондентами опроса с ветвлением и без, на выход отдается мапа с ключем последний вопрос и массивом точек "точек прохождения пользователем вопросов" грубо говоря массив с векторами как двигался респондент по возможным путям, в этом массиве question id и count прошедших сессий через него @@ -1569,3 +1850,94 @@ paths: description: Bad Request '500': description: Internal Server Error + /telegram/pool: + get: + description: возвращает все неудаленные аккаунты тг, активные, не активные и баны, тело пустое + responses: + '200': + description: успех + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/TgAccount' + /telegram/create: + post: + description: метод для автторизации сервера в тг аккаунте + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/AuthTgUserReq' + responses: + '200': + description: возвращает подпись, которая является идентификатором текущей сессии авторизации нужно для метода отправки кода + content: + application/json: + schema: + type: object + properties: + signature: + type: string + example: b7gh83j2k4l0 + '400': + description: неверные данные запроса + '409': + description: аккаунт уже существует и активен + '500': + description: внутренняя ошибка сервера + /telegram/{id}: + delete: + description: метод мягкого удаления аккаунта по id primary key + parameters: + - in: path + name: id + required: true + description: id primary key + schema: + type: integer + format: int64 + responses: + '200': + description: успех + '400': + description: неверные данные запроса + '500': + description: внутренняя ошибка сервера + + /telegram/setCode: + post: + description: метод для отправки кода авторизации, который пришел от телеграмма + requestBody: + content: + application/json: + schema: + type: object + required: + - code + - signature + properties: + code: + type: string + signature: + type: string + responses: + '200': + description: возвращает id primary авторизованного аккаунта + content: + application/json: + schema: + type: object + properties: + id: + type: integer + format: int64 + '204': + description: state канал закрылся до того как перешел в состояние логина или отказа от логина, возможно стоит другой статус указывать или как то побороть эту беду + '400': + description: неверные данные запроса + '403': + description: что то пошло не так связано с тг + '500': + description: внутренняя ошибка сервера \ No newline at end of file diff --git a/app/app.go b/app/app.go index a82404f..e69de29 100644 --- a/app/app.go +++ b/app/app.go @@ -1,239 +0,0 @@ -package app - -import ( - "context" - "errors" - "fmt" - "gitea.pena/PenaSide/common/log_mw" - "gitea.pena/PenaSide/common/privilege" - "gitea.pena/PenaSide/hlog" - "gitea.pena/PenaSide/trashlog/wrappers/zaptrashlog" - "gitea.pena/SQuiz/common/dal" - "gitea.pena/SQuiz/common/healthchecks" - "gitea.pena/SQuiz/common/middleware" - "gitea.pena/SQuiz/common/model" - "gitea.pena/SQuiz/core/brokers" - "gitea.pena/SQuiz/core/clients/auth" - "gitea.pena/SQuiz/core/initialize" - "gitea.pena/SQuiz/core/models" - "gitea.pena/SQuiz/core/server" - "gitea.pena/SQuiz/core/service" - "gitea.pena/SQuiz/core/tools" - "github.com/gofiber/fiber/v2" - "github.com/skeris/appInit" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - "time" -) - -type App struct { - logger *zap.Logger - err chan error -} - -func (a App) GetLogger() *zap.Logger { - return a.logger -} - -func (a App) GetErr() chan error { - return a.err -} - -var ( - errInvalidOptions = errors.New("invalid options") -) - -var zapOptions = []zap.Option{ - zap.AddCaller(), - zap.AddCallerSkip(2), - zap.AddStacktrace(zap.ErrorLevel), -} - -var _ appInit.CommonApp = (*App)(nil) - -type Options struct { - LoggerProdMode bool `env:"IS_PROD_LOG" default:"false"` - IsProd bool `env:"IS_PROD" default:"false"` - NumberPort string `env:"PORT" default:"1488"` - CrtFile string `env:"CRT" default:"server.crt"` - KeyFile string `env:"KEY" default:"server.key"` - PostgresCredentials string `env:"PG_CRED" default:"host=localhost port=35432 user=squiz password=Redalert2 dbname=squiz sslmode=disable"` - HubAdminUrl string `env:"HUB_ADMIN_URL" default:"http://localhost:8001/"` - ServiceName string `env:"SERVICE_NAME" default:"squiz"` - AuthServiceURL string `env:"AUTH_URL" default:"http://localhost:8000/"` - GrpcHost string `env:"GRPC_HOST" default:"localhost"` - GrpcPort string `env:"GRPC_PORT" default:"9000"` - KafkaBrokers string `env:"KAFKA_BROKERS" default:"localhost:9092"` - KafkaTopic string `env:"KAFKA_TOPIC" default:"test-topic"` - KafkaGroup string `env:"KAFKA_GROUP" default:"mailnotifier"` - TrashLogHost string `env:"TRASH_LOG_HOST" default:"localhost:7113"` - ModuleLogger string `env:"MODULE_LOGGER" default:"core-local"` - ClickHouseCred string `env:"CLICK_HOUSE_CRED" default:"tcp://10.8.0.15:9000/default?sslmode=disable"` - S3Prefix string `env:"S3_PREFIX"` - KafkaGroupGigaChat string `env:"KAFKA_GROUP_GIGA_CHAT" default:"gigachat"` - KafkaTopicGigaChat string `env:"KAFKA_TOPIC_GIGA_CHAT"` -} - -func New(ctx context.Context, opts interface{}, ver appInit.Version) (appInit.CommonApp, error) { - var ( - err, workerErr error - zapLogger *zap.Logger - errChan = make(chan error) - options Options - ok bool - ) - - if options, ok = opts.(Options); !ok { - return App{}, errInvalidOptions - } - - if options.LoggerProdMode { - zapLogger, err = zap.NewProduction(zapOptions...) - if err != nil { - return nil, err - } - } else { - zapLogger, err = zap.NewDevelopment(zapOptions...) - if err != nil { - return nil, err - } - } - - zapLogger = zapLogger.With( - zap.String("SvcCommit", ver.Commit), - zap.String("SvcVersion", ver.Release), - zap.String("SvcBuildTime", ver.BuildTime), - ) - - clickHouseLogger, err := zaptrashlog.NewCore(ctx, zap.InfoLevel, options.TrashLogHost, ver.Release, ver.Commit, time.Now().Unix()) - if err != nil { - panic(err) - } - - loggerForHlog := zapLogger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core { - return zapcore.NewTee(core, clickHouseLogger) - })) - - loggerHlog := hlog.New(loggerForHlog).Module(options.ModuleLogger) - loggerHlog.With(models.AllFields{}) - loggerHlog.Emit(InfoSvcStarted{}) - - authClient := auth.NewAuthClient(options.AuthServiceURL) - - pgdal, err := dal.New(ctx, options.PostgresCredentials, nil) - if err != nil { - fmt.Println("NEW", err) - return nil, err - } - - chDal, err := dal.NewClickHouseDAL(ctx, options.ClickHouseCred) - if err != nil { - fmt.Println("failed init clickhouse", err) - return nil, err - } - - kafkaClient, err := initialize.KafkaInit(ctx, initialize.KafkaDeps{ - KafkaGroup: options.KafkaGroup, - KafkaBrokers: options.KafkaBrokers, - KafkaTopic: options.KafkaTopic, - }) - if err != nil { - return nil, err - } - - kafkaClientGigaChat, err := initialize.KafkaInit(ctx, initialize.KafkaDeps{ - KafkaGroup: options.KafkaGroupGigaChat, - KafkaBrokers: options.KafkaBrokers, - KafkaTopic: options.KafkaTopicGigaChat, - }) - - producer := brokers.NewProducer(brokers.ProducerDeps{ - KafkaClient: kafkaClient, - Logger: zapLogger, - }) - - producerGigaChat := brokers.NewProducer(brokers.ProducerDeps{ - KafkaClient: kafkaClientGigaChat, - Logger: zapLogger, - }) - - clientData := privilege.Client{ - URL: options.HubAdminUrl, - ServiceName: options.ServiceName, - Privileges: model.Privileges, - } - fiberClient := &fiber.Client{} - privilegeController := privilege.NewPrivilege(clientData, fiberClient) - go tools.PublishPrivilege(privilegeController, 10, 5*time.Minute) - - // todo подумать над реализацией всего а то пока мне кажется что немного каша получается такой предикт что через некоторое время - // сложно будет разобраться что есть где - grpcControllers := initialize.InitRpcControllers(pgdal) - grpc, err := server.NewGRPC(zapLogger) - if err != nil { - fmt.Println("error:", err) - panic("err init grpc server") - } - grpc.Register(grpcControllers) - go grpc.Run(server.DepsGrpcRun{ - Host: options.GrpcHost, - Port: options.GrpcPort, - }) - - app := fiber.New() - app.Use(middleware.JWTAuth()) - app.Use(log_mw.ContextLogger(loggerHlog)) - app.Get("/liveness", healthchecks.Liveness) - app.Get("/readiness", healthchecks.Readiness(&workerErr)) //todo parametrized readiness. should discuss ready reason - - svc := service.New(service.Deps{ - Dal: pgdal, - AuthClient: authClient, - Producer: producer, - ServiceName: options.ServiceName, - ChDAL: chDal, - S3Prefix: options.S3Prefix, - ProducerGigaChat: producerGigaChat, - }) - - svc.Register(app) - - loggerHlog.Emit(InfoSvcReady{}) - - go func() { - defer func() { - if pgdal != nil { - pgdal.Close() - } - if chDal != nil { - chDal.Close(ctx) - } - err := grpc.Stop(ctx) - err = app.Shutdown() - loggerHlog.Emit(InfoSvcShutdown{Signal: err.Error()}) - }() - - if options.IsProd { - if err := app.ListenTLS(fmt.Sprintf(":%s", options.NumberPort), options.CrtFile, options.KeyFile); err != nil { - loggerHlog.Emit(ErrorCanNotServe{ - Err: err, - }) - errChan <- err - } - } else { - if err := app.Listen(fmt.Sprintf(":%s", options.NumberPort)); err != nil { - loggerHlog.Emit(ErrorCanNotServe{ - Err: err, - }) - errChan <- err - } - } - - errChan <- nil - }() - // todo implement helper func for service app type. such as server preparing, logger preparing, healthchecks and etc. - return &App{ - logger: zapLogger, - err: errChan, - }, err -} diff --git a/benchmarks/pagination_test.go b/benchmarks/pagination_test.go new file mode 100644 index 0000000..96fb4fc --- /dev/null +++ b/benchmarks/pagination_test.go @@ -0,0 +1,329 @@ +package benchmarks + +import ( + "database/sql" + "log" + "testing" + + _ "github.com/lib/pq" +) + +const ( + accountID = "64f2cd7a7047f28fdabf6d9e" + connStr = "host=localhost port=35432 user=squiz password=Redalert2 dbname=squiz sslmode=disable" + queryTotal = ` + WITH user_data AS ( + SELECT AmoID FROM accountsAmo WHERE accountsAmo.AccountID = $1 AND accountsAmo.Deleted = false + ) + SELECT f.*, COUNT(*) OVER() as total_count + FROM fields f JOIN user_data u ON f.AccountID = u.AmoID + WHERE f.Deleted = false + ORDER BY f.ID OFFSET ($2 - 1) * $3 LIMIT $3; + ` + queryCount = ` + WITH user_data AS ( + SELECT AmoID FROM accountsAmo WHERE accountsAmo.AccountID = $1 AND accountsAmo.Deleted = false + ) + SELECT COUNT(*) + FROM fields f JOIN user_data u ON f.AccountID = u.AmoID + WHERE f.Deleted = false; + ` + queryData = ` + WITH user_data AS ( + SELECT AmoID FROM accountsAmo WHERE accountsAmo.AccountID = $1 AND accountsAmo.Deleted = false + ) + SELECT f.* + FROM fields f JOIN user_data u ON f.AccountID = u.AmoID + WHERE f.Deleted = false + ORDER BY f.ID OFFSET ($2 - 1) * $3 LIMIT $3; + ` +) + +type GetFieldsWithPaginationRow struct { + ID int64 `db:"id" json:"id"` + Amoid int32 `db:"amoid" json:"amoid"` + Code string `db:"code" json:"code"` + Accountid int32 `db:"accountid" json:"accountid"` + Name string `db:"name" json:"name"` + Entity interface{} `db:"entity" json:"entity"` + Type interface{} `db:"type" json:"type"` + Deleted bool `db:"deleted" json:"deleted"` + Createdat sql.NullTime `db:"createdat" json:"createdat"` + TotalCount int64 `db:"total_count" json:"total_count"` +} + +func initDB() *sql.DB { + db, err := sql.Open("postgres", connStr) + if err != nil { + log.Fatal(err) + } + return db +} + +// Все получаем в одном запросе не аллоцируя при этом массив +func BenchmarkAllOne(b *testing.B) { + db := initDB() + defer db.Close() + for i := 0; i < b.N; i++ { + page := 1 + size := 25 + rows, err := db.Query(queryTotal, accountID, page, size) + if err != nil { + b.Fatal(err) + } + defer rows.Close() + + var results []GetFieldsWithPaginationRow + for rows.Next() { + var row GetFieldsWithPaginationRow + if err := rows.Scan( + &row.ID, + &row.Amoid, + &row.Code, + &row.Accountid, + &row.Name, + &row.Entity, + &row.Type, + &row.Deleted, + &row.Createdat, + &row.TotalCount, + ); err != nil { + b.Fatal(err) + } + results = append(results, row) + } + + if err := rows.Err(); err != nil { + b.Fatal(err) + } + } +} + +// Все получаем в одном запросе аллоцируя при этом массив +func BenchmarkAllOnePreAllocation(b *testing.B) { + db := initDB() + defer db.Close() + for i := 0; i < b.N; i++ { + page := 1 + size := 25 + rows, err := db.Query(queryTotal, accountID, page, size) + if err != nil { + b.Fatal(err) + } + defer rows.Close() + + results := make([]GetFieldsWithPaginationRow, size) + for rows.Next() { + var row GetFieldsWithPaginationRow + if err := rows.Scan( + &row.ID, + &row.Amoid, + &row.Code, + &row.Accountid, + &row.Name, + &row.Entity, + &row.Type, + &row.Deleted, + &row.Createdat, + &row.TotalCount, + ); err != nil { + b.Fatal(err) + } + results = append(results, row) + } + + if err := rows.Err(); err != nil { + b.Fatal(err) + } + } +} + +// Считается сначала количество потом получаются данные длину и емкость массиву не меняем +func BenchmarkCountThenGetData(b *testing.B) { + db := initDB() + defer db.Close() + for i := 0; i < b.N; i++ { + page := 1 + size := 25 + + row := db.QueryRow(queryCount, accountID) + var totalCount int + if err := row.Scan(&totalCount); err != nil { + b.Fatal(err) + } + var results []GetFieldsWithPaginationRow + rows, err := db.Query(queryData, accountID, page, size) + if err != nil { + b.Fatal(err) + } + defer rows.Close() + + for rows.Next() { + var row GetFieldsWithPaginationRow + if err := rows.Scan( + &row.ID, + &row.Amoid, + &row.Code, + &row.Accountid, + &row.Name, + &row.Entity, + &row.Type, + &row.Deleted, + &row.Createdat, + ); err != nil { + b.Fatal(err) + } + results = append(results, row) + } + + if err := rows.Err(); err != nil { + b.Fatal(err) + } + } +} + +// Параллельное вычисление данных и общего количество при этом длина слайса = size +func BenchmarkParallel(b *testing.B) { + db := initDB() + defer db.Close() + + for i := 0; i < b.N; i++ { + page := 1 + size := 25 + results := make([]GetFieldsWithPaginationRow, size) + channel := make(chan error, 2) + + go func() { + row := db.QueryRow(queryCount, accountID) + var totalCount int + channel <- row.Scan(&totalCount) + }() + + go func() { + rows, err := db.Query(queryData, accountID, page, size) + if err != nil { + channel <- err + return + } + defer rows.Close() + + index := 0 + for rows.Next() { + if err := rows.Scan( + &results[index].ID, + &results[index].Amoid, + &results[index].Code, + &results[index].Accountid, + &results[index].Name, + &results[index].Entity, + &results[index].Type, + &results[index].Deleted, + &results[index].Createdat, + ); err != nil { + channel <- err + return + } + index++ + } + channel <- rows.Err() + }() + + for i := 0; i < 2; i++ { + if err := <-channel; err != nil { + b.Fatal(err) + } + } + } +} + +// Считается сначала количество потом получаются данные создаем слайс через маке указывая ему длину начальную кап = лен +func BenchmarkWithPreAllocation(b *testing.B) { + db := initDB() + defer db.Close() + + for i := 0; i < b.N; i++ { + page := 1 + size := 25 + results := make([]GetFieldsWithPaginationRow, size) + + row := db.QueryRow(queryCount, accountID) + var totalCount int + if err := row.Scan(&totalCount); err != nil { + b.Fatal(err) + } + rows, err := db.Query(queryData, accountID, page, size) + if err != nil { + b.Fatal(err) + } + defer rows.Close() + + index := 0 + for rows.Next() { + if err := rows.Scan( + &results[index].ID, + &results[index].Amoid, + &results[index].Code, + &results[index].Accountid, + &results[index].Name, + &results[index].Entity, + &results[index].Type, + &results[index].Deleted, + &results[index].Createdat, + ); err != nil { + b.Fatal(err) + } + index++ + } + + if err := rows.Err(); err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkWithPreAllocationAndMonitoringTotalCount(b *testing.B) { + db := initDB() + defer db.Close() + + for i := 0; i < b.N; i++ { + page := 1 + size := 50 + + row := db.QueryRow(queryCount, accountID) + var totalCount int + if err := row.Scan(&totalCount); err != nil { + b.Fatal(err) + } + if totalCount < size { + size = totalCount + } + results := make([]GetFieldsWithPaginationRow, size) + rows, err := db.Query(queryData, accountID, page, size) + if err != nil { + b.Fatal(err) + } + defer rows.Close() + + index := 0 + for rows.Next() { + if err := rows.Scan( + &results[index].ID, + &results[index].Amoid, + &results[index].Code, + &results[index].Accountid, + &results[index].Name, + &results[index].Entity, + &results[index].Type, + &results[index].Deleted, + &results[index].Createdat, + ); err != nil { + b.Fatal(err) + } + index++ + } + + if err := rows.Err(); err != nil { + b.Fatal(err) + } + } +} diff --git a/cmd/main.go b/cmd/main.go new file mode 100644 index 0000000..ef2dd8a --- /dev/null +++ b/cmd/main.go @@ -0,0 +1,35 @@ +package main + +import ( + "context" + "go.uber.org/zap" + "log" + "os" + "os/signal" + "gitea.pena/SQuiz/core/internal/app" + "gitea.pena/SQuiz/core/internal/initialize" + "syscall" +) + +var ( + commit string = os.Getenv("COMMIT") + buildTime string = os.Getenv("BUILD_TIME") + version string = os.Getenv("VERSION") +) + +func main() { + config, err := initialize.LoadConfig() + if err != nil { + log.Fatal("Failed to load config", zap.Error(err)) + } + + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + if err = app.Run(ctx, *config, app.Build{ + Commit: commit, + Version: version, + }); err != nil { + log.Fatal("App exited with error", zap.Error(err)) + } +} diff --git a/cmd/validator/main.go b/cmd/validator/main.go new file mode 100644 index 0000000..6c9605e --- /dev/null +++ b/cmd/validator/main.go @@ -0,0 +1,85 @@ +package main + +import ( + "context" + "errors" + "gitea.pena/PenaSide/common/validate" + "gitea.pena/SQuiz/common/dal" + "gitea.pena/SQuiz/core/internal/initialize" + "github.com/caarlos0/env/v8" + "log" +) + +func main() { + cfg, err := loadConfig() + if err != nil { + log.Fatalf("error loading config: %v", err) + } + + err = validateNotEmpty(cfg) + if err != nil { + log.Fatalf("error validating config: %v", err) + } + + _, err = dal.New(context.TODO(), cfg.PostgresURL, nil) + if err != nil { + log.Fatalf("error connecting to database postgres: %v", err) + } + + _, err = dal.NewClickHouseDAL(context.TODO(), cfg.ClickhouseURL) + if err != nil { + log.Fatalf("error connecting to database clickhouse: %v", err) + } + + err = validate.ValidateKafka([]string{cfg.KafkaBrokers}, cfg.KafkaTopicNotifyer) + if err != nil { + log.Fatalf("error validating kafka: %v", err) + } + + err = validate.ValidateRedis(cfg.RedisHost, cfg.RedisPassword, int(cfg.RedisDB)) + if err != nil { + log.Fatalf("error validating redis: %v", err) + } + return +} + +func loadConfig() (initialize.Config, error) { + var cfg initialize.Config + + if err := env.Parse(&cfg); err != nil { + return cfg, err + } + return cfg, nil +} + +func validateNotEmpty(cfg initialize.Config) error { + if cfg.ClientHttpURL == "" { + return errors.New("client http url dont be empty") + } + + if cfg.GrpcURL == "" { + return errors.New("grpc url dont be empty") + } + + if cfg.HubadminMicroserviceURL == "" { + return errors.New("hubadmin microservice url dont be empty") + } + + if cfg.AuthMicroserviceURL == "" { + return errors.New("auth microservice url dont be empty") + } + + if cfg.TrashLogHost == "" { + return errors.New("trash log host dont be empty") + } + + if cfg.S3Prefix == "" { + return errors.New("s3 prefix dont be empty") + } + + if cfg.ServiceName == "" { + return errors.New("service name dont be empty") + } + + return nil +} diff --git a/deployments/staging/config.env b/deployments/staging/config.env new file mode 100644 index 0000000..e45cb34 --- /dev/null +++ b/deployments/staging/config.env @@ -0,0 +1,22 @@ +IS_PROD_LOG="false" +IS_PROD="false" +CLIENT_HTTP_URL="0.0.0.0:1488" +GRPC_URL="0.0.0.0:9000" +POSTGRES_URL="host=10.7.0.10 port=5432 user=squiz password=Redalert2 dbname=squiz sslmode=disable" +CLICKHOUSE_URL="clickhouse://10.7.0.5:9000/default?sslmode=disable" +HUBADMIN_MICROSERVICE_URL="http://10.7.0.6:59303" +AUTH_MICROSERVICE_URL="http://10.7.0.6:59300/user" +KAFKA_BROKERS="10.7.0.6:9092" +KAFKA_TOPIC="mailnotifier" +KAFKA_GROUP="mailnotifier" +TRASH_LOG_HOST="10.7.0.5:7113" +S3_PREFIX="3c580be9-cf31f296-d055-49cf-b39e-30c7959dc17b" +REDIS_HOST="10.7.0.6:6379" +REDIS_PASSWORD="Redalert2" +REDIS_DB=2 +PUBLIC_ACCESS_SECRET_KEY="-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCLW1tlHyKC9AG0hGpmkksET2DE +r7ojSPemxFWAgFgcPJWQ7x3uNbsdJ3bIZFoA/FClaWKMCZmjnH9tv0bKZtY/CDhM +ZEyHpMruRSn6IKrxjtQZWy4uv/w6MzUeyBYG0OvNCiYpdvz5SkAGAUHD5ZNFqn2w +KKFD0I2Dr59BFVSGJwIDAQAB +-----END PUBLIC KEY-----" diff --git a/deployments/staging/docker-compose.yaml b/deployments/staging/docker-compose.yaml index e0928cd..6a0436f 100644 --- a/deployments/staging/docker-compose.yaml +++ b/deployments/staging/docker-compose.yaml @@ -1,27 +1,14 @@ -version: "3" services: core: - hostname: squiz-core - container_name: squiz-core - image: $CI_REGISTRY_IMAGE/staging-core:$CI_COMMIT_REF_SLUG.$CI_PIPELINE_ID + hostname: squiz + container_name: squiz tty: true - environment: - HUB_ADMIN_URL: 'http://10.8.0.6:59303' - IS_PROD_LOG: 'false' - IS_PROD: 'false' - PORT: 1488 - PUBLIC_ACCESS_SECRET_KEY: $JWT_PUBLIC_KEY - PG_CRED: 'host=10.8.0.5 port=5433 user=squiz password=Redalert2 dbname=squiz sslmode=disable' - AUTH_URL: 'http://10.8.0.6:59300/user' - PUBLIC_KEY: $PEM_PUB_USERID - PRIVATE_KEY: $PEM_PRIV_USERID - REDIRECT_URL: 'https://quiz.pena.digital' - KAFKA_BROKERS: 10.8.0.6:9092 - KAFKA_TOPIC: "mailnotifier" - GRPC_HOST: "0.0.0.0" - TRASH_LOG_HOST: "10.8.0.15:7113" - MODULE_LOGGER: "quiz-core-staging" - CLICK_HOUSE_CRED: "clickhouse://10.8.0.15:9000/default?sslmode=disable" + image: gitea.pena/squiz/core/staging:$GITHUB_RUN_NUMBER + labels: + com.pena.allowed_headers: content-type,authorization,device,browser,os,devicetype,response-type + env_file: config.env ports: - - 10.8.0.5:1488:1488 - - 10.8.0.5:9000:9000 + - 10.7.0.10:1488:1488 + - 10.7.0.10:9000:9000 + - 10.7.0.10:2346:2345 + command: dlv --listen=:2345 --continue --headless=true --log=true --log-output=debugger,debuglineerr,gdbwire,lldbout,rpc --accept-multiclient --api-version=2 exec /core diff --git a/deployments/staging/validate_config.yml b/deployments/staging/validate_config.yml new file mode 100644 index 0000000..682bbeb --- /dev/null +++ b/deployments/staging/validate_config.yml @@ -0,0 +1,6 @@ +services: + validator: + tty: true + command: ./validator + image: gitea.pena/squiz/core/staging:$GITHUB_RUN_NUMBER + env_file: config.env diff --git a/go.mod b/go.mod index da30d12..770fca9 100644 --- a/go.mod +++ b/go.mod @@ -38,6 +38,7 @@ require ( github.com/go-redis/redis/v8 v8.11.5 // indirect github.com/goccy/go-json v0.10.5 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/golang/snappy v0.0.1 // indirect github.com/google/uuid v1.6.0 // indirect github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/cpuid/v2 v2.2.10 // indirect diff --git a/go.sum b/go.sum index 0fa200c..74aa30a 100644 --- a/go.sum +++ b/go.sum @@ -35,7 +35,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 h1:F1EaeKL/ta07PY/k9Os/UFtwERei2/XzGemhpGnBKNg= github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80= github.com/cloudflare/golz4 v0.0.0-20240916140612-caecf3c00c06 h1:6aQNgrBLzcUBaJHQjMk4X+jDo9rQtu5E0XNLhRV6pOk= github.com/cloudflare/golz4 v0.0.0-20240916140612-caecf3c00c06/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80= @@ -59,6 +58,10 @@ github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -84,7 +87,10 @@ github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= @@ -139,6 +145,8 @@ github.com/minio/minio-go/v7 v7.0.91 h1:tWLZnEfo3OZl5PoXQwcwTAPNNrjyWwOh6cbZitW5 github.com/minio/minio-go/v7 v7.0.91/go.mod h1:uvMUcGrpgeSAAI6+sD3818508nUyMULw94j2Nxku/Go= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe h1:iruDEfMl2E6fbMZ9s0scYfZQ84/6SPL6zC8ACM2oIL0= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= @@ -251,8 +259,8 @@ golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7 golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4= -golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= +golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ= +golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -260,6 +268,7 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -288,9 +297,12 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= @@ -314,6 +326,7 @@ golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/initialize/rpc_controllers.go b/initialize/rpc_controllers.go index b7f40e5..e69de29 100644 --- a/initialize/rpc_controllers.go +++ b/initialize/rpc_controllers.go @@ -1,16 +0,0 @@ -package initialize - -import ( - "gitea.pena/SQuiz/common/dal" - "gitea.pena/SQuiz/core/rpc_service" -) - -type RpcRegister struct { - MailNotify *rpc_service.MailNotify -} - -func InitRpcControllers(dal *dal.DAL) *RpcRegister { - return &RpcRegister{ - MailNotify: rpc_service.NewMailNotify(dal), - } -} diff --git a/internal/app/app.go b/internal/app/app.go new file mode 100644 index 0000000..ed4f786 --- /dev/null +++ b/internal/app/app.go @@ -0,0 +1,185 @@ +package app + +import ( + "context" + "errors" + "gitea.pena/PenaSide/common/privilege" + "gitea.pena/PenaSide/hlog" + "gitea.pena/PenaSide/trashlog/wrappers/zaptrashlog" + "gitea.pena/SQuiz/common/model" + "gitea.pena/SQuiz/core/internal/brokers" + "gitea.pena/SQuiz/core/internal/initialize" + "gitea.pena/SQuiz/core/internal/models" + server "gitea.pena/SQuiz/core/internal/server/grpc" + "gitea.pena/SQuiz/core/internal/server/http" + "gitea.pena/SQuiz/core/internal/tools" + "gitea.pena/SQuiz/core/internal/workers" + "gitea.pena/SQuiz/core/pkg/closer" + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/log" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" + "time" +) + +type Build struct { + Commit string + Version string +} + +var zapOptions = []zap.Option{ + zap.AddCaller(), + zap.AddCallerSkip(2), + zap.AddStacktrace(zap.ErrorLevel), +} + +func Run(ctx context.Context, cfg initialize.Config, build Build) error { + var ( + err error + zapLogger *zap.Logger + ) + + defer func() { + if r := recover(); r != nil { + log.Error("Recovered from a panic", zap.Any("error", r)) + } + }() + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + if cfg.LoggerProdMode { + zapLogger, err = zap.NewProduction(zapOptions...) + if err != nil { + return err + } + } else { + zapLogger, err = zap.NewDevelopment(zapOptions...) + if err != nil { + return err + } + } + + zapLogger = zapLogger.With( + zap.String("SvcCommit", build.Commit), + zap.String("SvcVersion", build.Version), + zap.String("SvcBuildTime", time.Now().String()), + ) + + clickHouseLogger, err := zaptrashlog.NewCore(ctx, zap.InfoLevel, cfg.TrashLogHost, build.Version, build.Commit, time.Now().Unix()) + if err != nil { + panic(err) + } + + loggerForHlog := zapLogger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core { + return zapcore.NewTee(core, clickHouseLogger) + })) + + loggerHlog := hlog.New(loggerForHlog).Module(initialize.ModuleLogger) + loggerHlog.With(models.AllFields{}) + loggerHlog.Emit(InfoSvcStarted{}) + + shutdownGroup := closer.NewCloserGroup() + + dalS, err := initialize.NewDALs(ctx, cfg) + if err != nil { + zapLogger.Error("Error initializing dals", zap.Error(err)) + return err + } + + kafkaClient, err := initialize.KafkaInit(ctx, initialize.KafkaDeps{ + KafkaGroup: cfg.KafkaGroup, + KafkaBrokers: cfg.KafkaBrokers, + KafkaTopic: cfg.KafkaTopicNotifyer, + }) + if err != nil { + zapLogger.Error("Error initializing kafka", zap.Error(err)) + return err + } + + producer := brokers.NewProducer(brokers.ProducerDeps{ + KafkaClient: kafkaClient, + Logger: zapLogger, + }) + + redisClient, err := initialize.Redis(ctx, cfg) + if err != nil { + zapLogger.Error("Error initializing redis", zap.Error(err)) + return err + } + + go tools.PublishPrivilege(privilege.NewPrivilege(privilege.Client{ + URL: cfg.HubadminMicroserviceURL, + ServiceName: cfg.ServiceName, + Privileges: model.Privileges, + }, &fiber.Client{}), 10, 5*time.Minute) + + clients, err := initialize.NewClients(ctx, cfg, dalS.PgDAL) + if err != nil { + zapLogger.Error("Error initializing clients", zap.Error(err)) + return err + } + + tgWC := workers.NewTgListenerWC(workers.Deps{ + BotID: int64(6712573453), // todo убрать + Redis: redisClient, + Dal: dalS.PgDAL, + //TgClient: clients.TgClient, + }) + + go tgWC.Start(ctx) + + controllers := initialize.NewControllers(initialize.ControllerDeps{ + Clients: clients, + DALs: dalS, + Config: cfg, + Producer: producer, + RedisClient: redisClient, + }) + + grpc, err := server.NewGRPC(zapLogger) + if err != nil { + zapLogger.Error("Error initializing grpc", zap.Error(err)) + return err + } + grpc.Register(controllers.GRpcControllers) + + srv := http.NewServer(http.ServerConfig{ + Logger: zapLogger, + Controllers: []http.Controller{controllers.HttpControllers.Account, controllers.HttpControllers.Telegram, controllers.HttpControllers.Result, + controllers.HttpControllers.Question, controllers.HttpControllers.Quiz, controllers.HttpControllers.Statistic}, + Hlogger: loggerHlog, + }) + + go func() { + if err := srv.Start(cfg.ClientHttpURL); err != nil { + zapLogger.Error("HTTP server startup error", zap.Error(err)) + cancel() + } + }() + + go grpc.Run(cfg.GrpcURL) + + srv.ListRoutes() + + shutdownGroup.Add(closer.CloserFunc(srv.Shutdown)) + shutdownGroup.Add(closer.CloserFunc(grpc.Stop)) + shutdownGroup.Add(closer.CloserFunc(dalS.PgDAL.Close)) + shutdownGroup.Add(closer.CloserFunc(dalS.ChDAL.Close)) + + <-ctx.Done() + + timeoutCtx, timeoutCancel := context.WithTimeout(context.Background(), 10*time.Second) + defer timeoutCancel() + if err := shutdownGroup.Call(timeoutCtx); err != nil { + if errors.Is(err, context.DeadlineExceeded) { + zapLogger.Error("Shutdown timed out", zap.Error(err)) + } else { + zapLogger.Error("Failed to shutdown services gracefully", zap.Error(err)) + } + return err + } + + zapLogger.Info("Application has stopped") + return nil +} diff --git a/app/logrecords.go b/internal/app/logrecords.go similarity index 100% rename from app/logrecords.go rename to internal/app/logrecords.go diff --git a/brokers/producer.go b/internal/brokers/producer.go similarity index 100% rename from brokers/producer.go rename to internal/brokers/producer.go diff --git a/clients/auth/auth.go b/internal/clients/auth/auth.go similarity index 100% rename from clients/auth/auth.go rename to internal/clients/auth/auth.go diff --git a/internal/clients/telegram/tg.go b/internal/clients/telegram/tg.go new file mode 100644 index 0000000..9d62e29 --- /dev/null +++ b/internal/clients/telegram/tg.go @@ -0,0 +1,246 @@ +package telegram +// +// import ( +// "context" +// "errors" +// "fmt" +// "path/filepath" +// "penahub.gitlab.yandexcloud.net/backend/quiz/common.git/dal" +// "penahub.gitlab.yandexcloud.net/backend/quiz/common.git/model" +// "penahub.gitlab.yandexcloud.net/backend/quiz/common.git/pj_errors" +// "penahub.gitlab.yandexcloud.net/backend/tdlib/client" +// "sync" +// "time" +// ) +// +// type TelegramClient struct { +// repo *dal.DAL +// TgClients map[int64]*client.Client +// WaitingClients map[string]WaitingClient +// mu sync.Mutex +// } +// +// type WaitingClient struct { +// PreviousReq AuthTgUserReq +// Authorizer *client.ClientAuthorizer +// } +// +// func NewTelegramClient(ctx context.Context, repo *dal.DAL) (*TelegramClient, error) { +// tgClient := &TelegramClient{ +// repo: repo, +// TgClients: make(map[int64]*client.Client), +// WaitingClients: make(map[string]WaitingClient), +// } +// +// allTgAccounts, err := repo.TgRepo.GetAllTgAccounts(ctx) +// if err != nil { +// if errors.Is(err, pj_errors.ErrNotFound) { +// return tgClient, nil +// } +// return nil, err +// } +// +// for _, account := range allTgAccounts { +// if account.Status == model.ActiveTg { +// authorizer := client.ClientAuthorizerr() +// authorizer.TdlibParameters <- &client.SetTdlibParametersRequest{ +// UseTestDc: false, +// DatabaseDirectory: filepath.Join(".tdlib", "database"), +// FilesDirectory: filepath.Join(".tdlib", "files"), +// UseFileDatabase: true, +// UseChatInfoDatabase: true, +// UseMessageDatabase: true, +// UseSecretChats: true, +// ApiId: account.ApiID, +// ApiHash: account.ApiHash, +// SystemLanguageCode: "en", +// DeviceModel: "Server", +// SystemVersion: "1.0.0", +// ApplicationVersion: "1.0.0", +// } +// +// _, err := client.SetLogVerbosityLevel(&client.SetLogVerbosityLevelRequest{ +// NewVerbosityLevel: 1, +// }) +// if err != nil { +// return nil, err +// } +// +// var tdlibClient *client.Client +// var goErr error +// go func() { +// tdlibClient, goErr = client.NewClient(authorizer) +// if goErr != nil { +// fmt.Println("new client failed", err) +// return +// } +// fmt.Println("i am down") +// }() +// if goErr != nil { +// return nil, goErr +// } +// +// for { +// state, ok := <-authorizer.State +// if !ok { +// break +// } +// fmt.Println("currnet state:", state) +// switch state.AuthorizationStateType() { +// case client.TypeAuthorizationStateWaitPhoneNumber: +// authorizer.PhoneNumber <- account.PhoneNumber +// case client.TypeAuthorizationStateWaitCode: +// err := tgClient.repo.TgRepo.UpdateStatusTg(ctx, account.ID, model.InactiveTg) +// if err != nil { +// return nil, err +// } +// case client.TypeAuthorizationStateLoggingOut, client.TypeAuthorizationStateClosing, client.TypeAuthorizationStateClosed: +// err := tgClient.repo.TgRepo.UpdateStatusTg(ctx, account.ID, model.InactiveTg) +// if err != nil { +// return nil, err +// } +// case client.TypeAuthorizationStateReady: +// // костыль так как в либе тож костыль стоит пока там ьд обновиться будет ниловый всегда клиент +// time.Sleep(3 * time.Second) +// me, err := tdlibClient.GetMe() +// if err != nil { +// return nil, err +// } +// fmt.Printf("Me: %s %s [%v]", me.FirstName, me.LastName, me.Usernames) +// tgClient.mu.Lock() +// tgClient.TgClients[account.ID] = tdlibClient +// tgClient.mu.Unlock() +// break +// case client.TypeAuthorizationStateWaitPassword: +// authorizer.Password <- account.Password +// } +// } +// } +// } +// return tgClient, nil +// } +// +// type AuthTgUserReq struct { +// ApiID int32 `json:"api_id"` +// ApiHash string `json:"api_hash"` +// PhoneNumber string `json:"phone_number"` +// Password string `json:"password"` +// } +// +// func (tg *TelegramClient) AddedToMap(data WaitingClient, id string) { +// fmt.Println("AddedToMap") +// tg.mu.Lock() +// defer tg.mu.Unlock() +// tg.WaitingClients[id] = data +// } +// +// func (tg *TelegramClient) GetFromMap(id string) (WaitingClient, bool) { +// fmt.Println("GetFromMap") +// tg.mu.Lock() +// defer tg.mu.Unlock() +// if data, ok := tg.WaitingClients[id]; ok { +// delete(tg.WaitingClients, id) +// return data, true +// } +// return WaitingClient{}, false +// } +// +// func (tg *TelegramClient) SaveTgAccount(appID int32, appHash string, tdLibClient *client.Client) { +// account, err := tg.repo.TgRepo.SearchIDByAppIDanAppHash(context.Background(), appID, appHash) +// if err != nil { +// fmt.Println("err SaveTgAccount", err) +// return +// } +// if account.Status == model.ActiveTg { +// tg.mu.Lock() +// defer tg.mu.Unlock() +// tg.TgClients[account.ID] = tdLibClient +// } +// } +// +// func (tg *TelegramClient) CreateChannel(channelName string, botID int64) (string, int64, error) { +// tg.mu.Lock() +// defer tg.mu.Unlock() +// if len(tg.TgClients) == 0 { +// return "", 0, errors.New("no active Telegram clients") +// } +// var lastError error +// var inviteLink string +// var channelId int64 +// for _, activeClient := range tg.TgClients { +// // todo пока не понимаю это какой то рандом? в один день бот норм находится в другой уже не находится хотя абсолютно с точки зрения тг кода этой функции и бота не менялось +// _, err := activeClient.GetUser(&client.GetUserRequest{ +// UserId: botID, +// }) +// if err != nil { +// lastError = fmt.Errorf("not found this bot, make privacy off: %v", err) +// continue +// } +// +// // todo нужно поймать ошибку, при которой либо бан либо медленный редим включается для того чтобы прервать +// // исполнение клиента текущего аккаунта и дать задачу следующему пока поймал 1 раз и не запомнил больше не получается +// channel, err := activeClient.CreateNewSupergroupChat(&client.CreateNewSupergroupChatRequest{ +// Title: channelName, +// IsChannel: true, +// Description: "private channel", +// }) +// if err != nil { +// lastError = fmt.Errorf("failed to create channel: %s", err.Error()) +// continue +// } +// +// _, err = activeClient.SetChatMemberStatus(&client.SetChatMemberStatusRequest{ +// ChatId: channel.Id, +// MemberId: &client.MessageSenderUser{UserId: botID}, +// Status: &client.ChatMemberStatusAdministrator{ +// CustomTitle: "bot", +// Rights: &client.ChatAdministratorRights{ +// CanManageChat: true, +// CanChangeInfo: true, +// CanPostMessages: true, +// CanEditMessages: true, +// CanDeleteMessages: true, +// CanInviteUsers: true, +// CanRestrictMembers: true, +// CanPinMessages: true, +// CanManageTopics: true, +// CanPromoteMembers: true, +// CanManageVideoChats: true, +// CanPostStories: true, +// CanEditStories: true, +// CanDeleteStories: true, +// }, +// }, +// }) +// if err != nil { +// lastError = fmt.Errorf("failed to make bot admin: %s", err.Error()) +// continue +// } +// +// inviteLinkResp, err := activeClient.CreateChatInviteLink(&client.CreateChatInviteLinkRequest{ +// ChatId: channel.Id, +// Name: channelName, +// ExpirationDate: 0, +// MemberLimit: 0, +// CreatesJoinRequest: false, +// }) +// if err != nil { +// lastError = fmt.Errorf("failed to get invite link: %s", err.Error()) +// continue +// } +// +// _, err = activeClient.LeaveChat(&client.LeaveChatRequest{ +// ChatId: channel.Id, +// }) +// if err != nil { +// lastError = fmt.Errorf("failed to leave the channel: %s", err.Error()) +// continue +// } +// +// inviteLink = inviteLinkResp.InviteLink +// channelId = channel.Id +// return inviteLink, channelId, nil +// } +// +// return "", 0, lastError +// } diff --git a/internal/controllers/http_controllers/account/account.go b/internal/controllers/http_controllers/account/account.go new file mode 100644 index 0000000..6ca1b07 --- /dev/null +++ b/internal/controllers/http_controllers/account/account.go @@ -0,0 +1,420 @@ +package account + +import ( + "database/sql" + "encoding/json" + "errors" + "fmt" + "gitea.pena/PenaSide/common/log_mw" + "gitea.pena/SQuiz/common/dal" + "gitea.pena/SQuiz/common/middleware" + "gitea.pena/SQuiz/common/model" + "gitea.pena/SQuiz/common/pj_errors" + "gitea.pena/SQuiz/core/internal/brokers" + "gitea.pena/SQuiz/core/internal/clients/auth" + "gitea.pena/SQuiz/core/internal/models" + "github.com/go-redis/redis/v8" + "github.com/gofiber/fiber/v2" + "strconv" + "time" +) + +type Deps struct { + Dal *dal.DAL + AuthClient *auth.AuthClient + Producer *brokers.Producer + ServiceName string + RedisClient *redis.Client +} + +type Account struct { + dal *dal.DAL + authClient *auth.AuthClient + producer *brokers.Producer + serviceName string + redisClient *redis.Client +} + +func NewAccountController(deps Deps) *Account { + return &Account{ + dal: deps.Dal, + authClient: deps.AuthClient, + producer: deps.Producer, + serviceName: deps.ServiceName, + redisClient: deps.RedisClient, + } +} + +type CreateAccountReq struct { + UserID string `json:"userId"` +} + +type CreateAccountResp struct { + CreatedAccount model.Account `json:"created_account"` +} + +type DeleteAccountResp struct { + DeletedAccountID string `json:"account_Id"` +} + +type GetPrivilegeByUserIDReq struct { + UserID string `json:"userId"` +} + +type DeleteAccountByUserIDReq struct { + UserID string `json:"userId"` +} + +type DeleteAccountByUserIDResp struct { + DeletedAccountUserID string `json:"userId"` +} + +type GetAccountsReq struct { + Limit uint64 `json:"limit"` + Page uint64 `json:"page"` +} + +type GetAccountsResp struct { + Count uint64 `json:"count"` + Items []model.Account `json:"items"` +} + +// getCurrentAccount обработчик для получения текущего аккаунта +func (r *Account) GetCurrentAccount(ctx *fiber.Ctx) error { + accountID, ok := middleware.GetAccountId(ctx) + if !ok { + return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") + } + + account, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) + if err != nil && err != sql.ErrNoRows { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + //TODO: fix this later + if account.ID == "" { + return ctx.Status(fiber.StatusNotFound).SendString("no account") + } + + return ctx.Status(fiber.StatusOK).JSON(account) +} + +// createAccount обработчик для создания нового аккаунта +func (r *Account) CreateAccount(ctx *fiber.Ctx) error { + accountID, ok := middleware.GetAccountId(ctx) + if !ok { + return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") + } + hlogger := log_mw.ExtractLogger(ctx) + + existingAccount, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) + if err != nil && err != sql.ErrNoRows { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + if existingAccount.ID != "" { + return ctx.Status(fiber.StatusConflict).SendString("user with this ID already exists") + } + + email, err := r.authClient.GetUserEmail(accountID) + if err != nil { + return err + } + + newAccount := model.Account{ + UserID: accountID, + CreatedAt: time.Now(), + Deleted: false, + Privileges: map[string]model.ShortPrivilege{ + "quizUnlimTime": { + PrivilegeID: "quizUnlimTime", + PrivilegeName: "Безлимит Опросов", + Amount: 14, + CreatedAt: time.Now(), + }, + }, + } + + createdAcc, err := r.dal.AccountRepo.CreateAccount(ctx.Context(), &newAccount) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + _, err = r.dal.AccountRepo.PostLeadTarget(ctx.Context(), model.LeadTarget{ + AccountID: accountID, + Target: email, + Type: model.LeadTargetEmail, + QuizID: 0, + }) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + hlogger.Emit(models.InfoAccountCreated{ + CtxUserID: accountID, + CtxAccountID: createdAcc.ID, + }) + + err = r.producer.ToMailNotify(ctx.Context(), brokers.Message{ + AccountID: accountID, + Email: email, + ServiceKey: r.serviceName, + SendAt: time.Now(), + }) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + return ctx.JSON(CreateAccountResp{ + CreatedAccount: newAccount, + }) +} + +// deleteAccount обработчик для удаления текущего аккаунта +func (r *Account) DeleteAccount(ctx *fiber.Ctx) error { + accountID, ok := middleware.GetAccountId(ctx) + if !ok { + return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") + } + + account, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + if err := r.dal.AccountRepo.DeleteAccount(ctx.Context(), account.ID); err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + return ctx.JSON(DeleteAccountResp{ + DeletedAccountID: accountID, + }) +} + +// getPrivilegeByUserID обработчик для получения привилегий аккаунта по ID пользователя +func (r *Account) GetPrivilegeByUserID(ctx *fiber.Ctx) error { + var req GetPrivilegeByUserIDReq + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + privilege, err := r.dal.AccountRepo.GetPrivilegesByAccountID(ctx.Context(), req.UserID) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + return ctx.Status(fiber.StatusOK).JSON(privilege) +} + +// deleteAccountByUserID обработчик для удаления аккаунта по ID пользователя +func (r *Account) DeleteAccountByUserID(ctx *fiber.Ctx) error { + var req DeleteAccountByUserIDReq + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + existingAccount, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), req.UserID) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + if existingAccount.ID == "" { + return ctx.Status(fiber.StatusInternalServerError).SendString("user with this ID not found") + } + + if err := r.dal.AccountRepo.DeleteAccount(ctx.Context(), existingAccount.ID); err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + return ctx.JSON(DeleteAccountByUserIDResp{ + DeletedAccountUserID: req.UserID, + }) +} + +// getAccounts обработчик для получения списка аккаунтов с пагинацией +func (r *Account) GetAccounts(ctx *fiber.Ctx) error { + var req GetAccountsReq + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + _, ok := middleware.GetAccountId(ctx) + if !ok { + return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") + } + + accounts, totalCount, err := r.dal.AccountRepo.GetAccounts(ctx.Context(), req.Limit, req.Page) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + response := GetAccountsResp{ + Count: totalCount, + Items: accounts, + } + + return ctx.Status(fiber.StatusOK).JSON(response) +} + +func (r *Account) ManualDone(ctx *fiber.Ctx) error { + var req struct { + Id string `json:"id"` + } + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + if req.Id == "" { + return ctx.Status(fiber.StatusBadRequest).SendString("User id is required") + } + + err := r.dal.AccountRepo.ManualDone(ctx.Context(), req.Id) + if err != nil { + if errors.Is(err, pj_errors.ErrNotFound) { + return ctx.Status(fiber.StatusNotFound).SendString("user don't have this privilege") + } + return ctx.Status(fiber.StatusInternalServerError).SendString("Internal Server Error") + } + + return ctx.SendStatus(fiber.StatusOK) +} + +func (r *Account) PostLeadTarget(ctx *fiber.Ctx) error { + var req struct { + Type string `json:"type"` + QuizID int32 `json:"quizID"` + Target string `json:"target"` + Name string `json:"name"` + } + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + accountID, ok := middleware.GetAccountId(ctx) + if !ok { + return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") + } + + //accountID := "64f2cd7a7047f28fdabf6d9e" + + if _, ok := model.ValidLeadTargetTypes[req.Type]; !ok { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid type") + } + + if req.Type == "" || (req.Target == "" && req.Type != string(model.LeadTargetTg)) { + return ctx.Status(fiber.StatusBadRequest).SendString("Type and Target don't be nil") + } + + switch req.Type { + case "mail": + _, err := r.dal.AccountRepo.PostLeadTarget(ctx.Context(), model.LeadTarget{ + AccountID: accountID, + Target: req.Target, + Type: model.LeadTargetType(req.Type), + QuizID: req.QuizID, + }) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + return ctx.SendStatus(fiber.StatusOK) + case "telegram": + targets, err := r.dal.AccountRepo.GetLeadTarget(ctx.Context(), accountID, req.QuizID) + if err != nil && !errors.Is(err, pj_errors.ErrNotFound) { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + if !errors.Is(err, pj_errors.ErrNotFound) { + for _, t := range targets { + if t.Type == model.LeadTargetTg { + return ctx.Status(fiber.StatusAlreadyReported).SendString("LeadTarget for this quiz already exist") + } + } + } + + task := model.TgRedisTask{ + Name: req.Name, + QuizID: req.QuizID, + AccountID: accountID, + } + + taskKey := fmt.Sprintf("telegram_task:%d", time.Now().UnixNano()) + taskData, err := json.Marshal(task) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + + if err := r.redisClient.Set(ctx.Context(), taskKey, taskData, 0).Err(); err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + case "whatsapp": + return ctx.Status(fiber.StatusOK).SendString("todo") + } + + return nil +} + +func (r *Account) DeleteLeadTarget(ctx *fiber.Ctx) error { + leadIDStr := ctx.Params("id") + leadID, err := strconv.ParseInt(leadIDStr, 10, 64) + if err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid lead ID format") + } + + err = r.dal.AccountRepo.DeleteLeadTarget(ctx.Context(), leadID) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + return ctx.SendStatus(fiber.StatusOK) +} + +func (r *Account) GetLeadTarget(ctx *fiber.Ctx) error { + accountID, ok := middleware.GetAccountId(ctx) + if !ok { + return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") + } + + quizIDStr := ctx.Params("quizID") + quizID, err := strconv.ParseInt(quizIDStr, 10, 64) + if err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid quiz ID format") + } + + result, err := r.dal.AccountRepo.GetLeadTarget(ctx.Context(), accountID, int32(quizID)) + if err != nil { + switch { + case errors.Is(err, pj_errors.ErrNotFound): + return ctx.Status(fiber.StatusNotFound).SendString("this lead target not found") + default: + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + } + + return ctx.Status(fiber.StatusOK).JSON(result) +} + +func (r *Account) UpdateLeadTarget(ctx *fiber.Ctx) error { + var req struct { + ID int64 `json:"id"` + Target string `json:"target"` + } + + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + if req.ID == 0 || req.Target == "" { + return ctx.Status(fiber.StatusBadRequest).SendString("ID and Target don't be nil") + } + + result, err := r.dal.AccountRepo.UpdateLeadTarget(ctx.Context(), model.LeadTarget{ + ID: req.ID, + Target: req.Target, + }) + if err != nil { + switch { + case errors.Is(err, pj_errors.ErrNotFound): + return ctx.Status(fiber.StatusNotFound).SendString("this lead target not found") + default: + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + } + + return ctx.Status(fiber.StatusOK).JSON(result) +} diff --git a/internal/controllers/http_controllers/account/route.go b/internal/controllers/http_controllers/account/route.go new file mode 100644 index 0000000..8276adf --- /dev/null +++ b/internal/controllers/http_controllers/account/route.go @@ -0,0 +1,21 @@ +package account + +import "github.com/gofiber/fiber/v2" + +func (r *Account) Register(router fiber.Router) { + router.Get("/account/get", r.GetCurrentAccount) + router.Post("/account/create", r.CreateAccount) + router.Delete("/account/delete", r.DeleteAccount) + router.Get("/accounts", r.GetAccounts) + router.Get("/privilege/:userId", r.GetPrivilegeByUserID) + router.Delete("/account/:userId", r.DeleteAccountByUserID) + router.Post("/account/manualdone", r.ManualDone) + router.Post("/account/leadtarget", r.PostLeadTarget) + router.Delete("/account/leadtarget/:id", r.DeleteLeadTarget) + router.Get("/account/leadtarget/:quizID", r.GetLeadTarget) + router.Put("/account/leadtarget", r.UpdateLeadTarget) +} + +func (r *Account) Name() string { + return "" +} diff --git a/service/question_svc.go b/internal/controllers/http_controllers/question/question.go similarity index 88% rename from service/question_svc.go rename to internal/controllers/http_controllers/question/question.go index db214f5..9edec6e 100644 --- a/service/question_svc.go +++ b/internal/controllers/http_controllers/question/question.go @@ -1,15 +1,28 @@ -package service +package question import ( "gitea.pena/PenaSide/common/log_mw" + "gitea.pena/SQuiz/common/dal" "gitea.pena/SQuiz/common/middleware" "gitea.pena/SQuiz/common/model" - "gitea.pena/SQuiz/core/models" + "gitea.pena/SQuiz/core/internal/models" "github.com/gofiber/fiber/v2" "github.com/lib/pq" "unicode/utf8" ) +type Deps struct { + DAL *dal.DAL +} + +type Question struct { + dal *dal.DAL +} + +func NewQuestionController(deps Deps) *Question { + return &Question{dal: deps.DAL} +} + // QuestionCreateReq request structure for creating Question type QuestionCreateReq struct { QuizId uint64 `json:"quiz_id"` // relation to quiz @@ -23,7 +36,7 @@ type QuestionCreateReq struct { } // CreateQuestion service handler for creating question for quiz -func (s *Service) CreateQuestion(ctx *fiber.Ctx) error { +func (r *Question) CreateQuestion(ctx *fiber.Ctx) error { accountID, ok := middleware.GetAccountId(ctx) if !ok { return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") @@ -63,7 +76,8 @@ func (s *Service) CreateQuestion(ctx *fiber.Ctx) error { Page: req.Page, Content: req.Content, } - questionID, err := s.dal.QuestionRepo.CreateQuestion(ctx.Context(), &result) + + questionID, err := r.dal.QuestionRepo.CreateQuestion(ctx.Context(), &result) if err != nil { if e, ok := err.(*pq.Error); ok { if e.Constraint == "quiz_relation" { @@ -103,7 +117,7 @@ type GetQuestionListResp struct { } // GetQuestionList handler for paginated list question -func (s *Service) GetQuestionList(ctx *fiber.Ctx) error { +func (r *Question) GetQuestionList(ctx *fiber.Ctx) error { var req GetQuestionListReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -126,7 +140,7 @@ func (s *Service) GetQuestionList(ctx *fiber.Ctx) error { "'test','none','file', 'button','select','checkbox'") } - res, cnt, err := s.dal.QuestionRepo.GetQuestionList(ctx.Context(), + res, cnt, err := r.dal.QuestionRepo.GetQuestionList(ctx.Context(), req.Limit, req.Page*req.Limit, uint64(req.From), @@ -165,7 +179,7 @@ type UpdateResp struct { } // UpdateQuestion handler for update question -func (s *Service) UpdateQuestion(ctx *fiber.Ctx) error { +func (r *Question) UpdateQuestion(ctx *fiber.Ctx) error { var req UpdateQuestionReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -195,7 +209,7 @@ func (s *Service) UpdateQuestion(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusNotAcceptable).SendString("type must be only test,button,file,checkbox,select, none or empty string") } - question, err := s.dal.QuestionRepo.MoveToHistoryQuestion(ctx.Context(), req.Id) + question, err := r.dal.QuestionRepo.MoveToHistoryQuestion(ctx.Context(), req.Id) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -227,7 +241,7 @@ func (s *Service) UpdateQuestion(ctx *fiber.Ctx) error { question.Content = req.Content } - if err := s.dal.QuestionRepo.UpdateQuestion(ctx.Context(), question); err != nil { + if err := r.dal.QuestionRepo.UpdateQuestion(ctx.Context(), question); err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -243,7 +257,7 @@ type CopyQuestionReq struct { } // CopyQuestion handler for copy question -func (s *Service) CopyQuestion(ctx *fiber.Ctx) error { +func (r *Question) CopyQuestion(ctx *fiber.Ctx) error { var req CopyQuestionReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -253,7 +267,7 @@ func (s *Service) CopyQuestion(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("no id provided") } - question, err := s.dal.QuestionRepo.CopyQuestion(ctx.Context(), req.Id, req.QuizId) + question, err := r.dal.QuestionRepo.CopyQuestion(ctx.Context(), req.Id, req.QuizId) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -271,8 +285,8 @@ type GetQuestionHistoryReq struct { } // GetQuestionHistory handler for history of quiz -func (s *Service) GetQuestionHistory(ctx *fiber.Ctx) error { - var req GetQuizHistoryReq +func (r *Question) GetQuestionHistory(ctx *fiber.Ctx) error { + var req GetQuestionHistoryReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } @@ -281,7 +295,7 @@ func (s *Service) GetQuestionHistory(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("no id provided") } - history, err := s.dal.QuestionRepo.QuestionHistory(ctx.Context(), req.Id, req.Limit, req.Page*req.Limit) + history, err := r.dal.QuestionRepo.QuestionHistory(ctx.Context(), req.Id, req.Limit, req.Page*req.Limit) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -289,15 +303,22 @@ func (s *Service) GetQuestionHistory(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusOK).JSON(history) } +type DeactivateResp struct { + Deactivated uint64 `json:"deactivated"` +} + // DeleteQuestion handler for fake delete question -func (s *Service) DeleteQuestion(ctx *fiber.Ctx) error { +func (r *Question) DeleteQuestion(ctx *fiber.Ctx) error { accountID, ok := middleware.GetAccountId(ctx) if !ok { return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") } hlogger := log_mw.ExtractLogger(ctx) - var req DeactivateReq + var req struct { + Id uint64 `json:"id"` + } + if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } @@ -306,7 +327,7 @@ func (s *Service) DeleteQuestion(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("id for deleting question is required") } - deleted, err := s.dal.QuestionRepo.DeleteQuestion(ctx.Context(), req.Id) + deleted, err := r.dal.QuestionRepo.DeleteQuestion(ctx.Context(), req.Id) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } diff --git a/internal/controllers/http_controllers/question/route.go b/internal/controllers/http_controllers/question/route.go new file mode 100644 index 0000000..bc152d6 --- /dev/null +++ b/internal/controllers/http_controllers/question/route.go @@ -0,0 +1,16 @@ +package question + +import "github.com/gofiber/fiber/v2" + +func (r *Question) Register(router fiber.Router) { + router.Post("/create", r.CreateQuestion) + router.Post("/getList", r.GetQuestionList) + router.Patch("/edit", r.UpdateQuestion) + router.Post("/copy", r.CopyQuestion) + router.Post("/history", r.GetQuestionHistory) + router.Delete("/delete", r.DeleteQuestion) +} + +func (r *Question) Name() string { + return "question" +} diff --git a/service/quiz_svc.go b/internal/controllers/http_controllers/quiz/quiz.go similarity index 92% rename from service/quiz_svc.go rename to internal/controllers/http_controllers/quiz/quiz.go index f1719af..b6adf5f 100644 --- a/service/quiz_svc.go +++ b/internal/controllers/http_controllers/quiz/quiz.go @@ -1,19 +1,31 @@ -package service +package quiz import ( "fmt" "gitea.pena/PenaSide/common/log_mw" + "gitea.pena/SQuiz/common/dal" "gitea.pena/SQuiz/common/middleware" "gitea.pena/SQuiz/common/model" "gitea.pena/SQuiz/common/repository/quiz" "gitea.pena/SQuiz/core/brokers" - "gitea.pena/SQuiz/core/models" + "gitea.pena/SQuiz/core/internal/models" "github.com/gofiber/fiber/v2" - "strconv" "time" "unicode/utf8" ) +type Deps struct { + DAL *dal.DAL +} + +type Quiz struct { + dal *dal.DAL +} + +func NewQuizController(deps Deps) *Quiz { + return &Quiz{dal: deps.DAL} +} + type CreateQuizReq struct { Fingerprinting bool `json:"fingerprinting"` // true if you need to store device id Repeatable bool `json:"repeatable"` // make it true for allow more than one quiz checkouting @@ -38,7 +50,7 @@ type CreateQuizReq struct { } // CreateQuiz handler for quiz creating request -func (s *Service) CreateQuiz(ctx *fiber.Ctx) error { +func (r *Quiz) CreateQuiz(ctx *fiber.Ctx) error { var req CreateQuizReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -98,7 +110,7 @@ func (s *Service) CreateQuiz(ctx *fiber.Ctx) error { GroupId: req.GroupId, } - quizID, err := s.dal.QuizRepo.CreateQuiz(ctx.Context(), &record) + quizID, err := r.dal.QuizRepo.CreateQuiz(ctx.Context(), &record) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -132,7 +144,7 @@ type GetQuizListResp struct { } // GetQuizList handler for paginated list quiz -func (s *Service) GetQuizList(ctx *fiber.Ctx) error { +func (r *Quiz) GetQuizList(ctx *fiber.Ctx) error { var req GetQuizListReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -155,7 +167,7 @@ func (s *Service) GetQuizList(ctx *fiber.Ctx) error { "'stop','start','draft', 'template','timeout','offlimit'") } - res, cnt, err := s.dal.QuizRepo.GetQuizList(ctx.Context(), + res, cnt, err := r.dal.QuizRepo.GetQuizList(ctx.Context(), quiz.GetQuizListDeps{ Limit: req.Limit, Offset: req.Limit * req.Page, @@ -199,7 +211,11 @@ type UpdateQuizReq struct { GroupId uint64 `json:"group_id"` } -func (s *Service) UpdateQuiz(ctx *fiber.Ctx) error { +type UpdateResp struct { + Updated uint64 `json:"updated"` +} + +func (r *Quiz) UpdateQuiz(ctx *fiber.Ctx) error { var req UpdateQuizReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -241,7 +257,7 @@ func (s *Service) UpdateQuiz(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusConflict).SendString("you can pause quiz only if it has deadline for passing") } - quiz, err := s.dal.QuizRepo.MoveToHistoryQuiz(ctx.Context(), req.Id, accountId) + quiz, err := r.dal.QuizRepo.MoveToHistoryQuiz(ctx.Context(), req.Id, accountId) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -313,7 +329,7 @@ func (s *Service) UpdateQuiz(ctx *fiber.Ctx) error { quiz.ParentIds = append(quiz.ParentIds, int32(quiz.Id)) - if err := s.dal.QuizRepo.UpdateQuiz(ctx.Context(), accountId, quiz); err != nil { + if err := r.dal.QuizRepo.UpdateQuiz(ctx.Context(), accountId, quiz); err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -341,7 +357,7 @@ type CopyQuizReq struct { } // CopyQuiz request handler for copy quiz -func (s *Service) CopyQuiz(ctx *fiber.Ctx) error { +func (r *Quiz) CopyQuiz(ctx *fiber.Ctx) error { var req CopyQuizReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -356,7 +372,7 @@ func (s *Service) CopyQuiz(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("no id provided") } - quiz, err := s.dal.QuizRepo.CopyQuiz(ctx.Context(), accountId, req.Id) + quiz, err := r.dal.QuizRepo.CopyQuiz(ctx.Context(), accountId, req.Id) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -374,7 +390,7 @@ type GetQuizHistoryReq struct { } // GetQuizHistory handler for history of quiz -func (s *Service) GetQuizHistory(ctx *fiber.Ctx) error { +func (r *Quiz) GetQuizHistory(ctx *fiber.Ctx) error { var req GetQuizHistoryReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -388,7 +404,7 @@ func (s *Service) GetQuizHistory(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("no id provided") } - history, err := s.dal.QuizRepo.QuizHistory(ctx.Context(), quiz.QuizHistoryDeps{ + history, err := r.dal.QuizRepo.QuizHistory(ctx.Context(), quiz.QuizHistoryDeps{ Id: req.Id, Limit: req.Limit, Offset: req.Page * req.Limit, @@ -411,7 +427,7 @@ type DeactivateResp struct { } // DeleteQuiz handler for fake delete quiz -func (s *Service) DeleteQuiz(ctx *fiber.Ctx) error { +func (r *Quiz) DeleteQuiz(ctx *fiber.Ctx) error { var req DeactivateReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -427,7 +443,7 @@ func (s *Service) DeleteQuiz(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("id for deleting is required") } - deleted, err := s.dal.QuizRepo.DeleteQuiz(ctx.Context(), accountId, req.Id) + deleted, err := r.dal.QuizRepo.DeleteQuiz(ctx.Context(), accountId, req.Id) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -443,7 +459,7 @@ func (s *Service) DeleteQuiz(ctx *fiber.Ctx) error { } // ArchiveQuiz handler for archiving quiz -func (s *Service) ArchiveQuiz(ctx *fiber.Ctx) error { +func (r *Quiz) ArchiveQuiz(ctx *fiber.Ctx) error { var req DeactivateReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -458,7 +474,7 @@ func (s *Service) ArchiveQuiz(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusFailedDependency).SendString("id for archive quiz is required") } - archived, err := s.dal.QuizRepo.DeleteQuiz(ctx.Context(), accountId, req.Id) + archived, err := r.dal.QuizRepo.DeleteQuiz(ctx.Context(), accountId, req.Id) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -472,7 +488,7 @@ type QuizMoveReq struct { Qid, AccountID string } -func (s *Service) QuizMove(ctx *fiber.Ctx) error { +func (r *Quiz) QuizMove(ctx *fiber.Ctx) error { var req QuizMoveReq if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -482,7 +498,7 @@ func (s *Service) QuizMove(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request qid and accountID is required") } - resp, err := s.dal.QuizRepo.QuizMove(ctx.Context(), req.Qid, req.AccountID) + resp, err := r.dal.QuizRepo.QuizMove(ctx.Context(), req.Qid, req.AccountID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -490,7 +506,7 @@ func (s *Service) QuizMove(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusOK).JSON(resp) } -func (s *Service) TemplateCopy(ctx *fiber.Ctx) error { +func (r *Quiz) TemplateCopy(ctx *fiber.Ctx) error { accountID, ok := middleware.GetAccountId(ctx) if !ok { return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") @@ -509,7 +525,7 @@ func (s *Service) TemplateCopy(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request qid is required") } - qizID, err := s.dal.QuizRepo.TemplateCopy(ctx.Context(), accountID, req.Qid) + qizID, err := r.dal.QuizRepo.TemplateCopy(ctx.Context(), accountID, req.Qid) if err != nil { fmt.Println("TEMPLERR", err) return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) diff --git a/internal/controllers/http_controllers/quiz/route.go b/internal/controllers/http_controllers/quiz/route.go new file mode 100644 index 0000000..a267542 --- /dev/null +++ b/internal/controllers/http_controllers/quiz/route.go @@ -0,0 +1,19 @@ +package quiz + +import "github.com/gofiber/fiber/v2" + +func (r *Quiz) Register(router fiber.Router) { + router.Post("/create", r.CreateQuiz) + router.Post("/getList", r.GetQuizList) + router.Patch("/edit", r.UpdateQuiz) + router.Post("/copy", r.CopyQuiz) + router.Post("/history", r.GetQuizHistory) + router.Delete("/delete", r.DeleteQuiz) + router.Patch("/archive", r.ArchiveQuiz) + router.Post("/move", r.QuizMove) + router.Post("/template", r.TemplateCopy) +} + +func (r *Quiz) Name() string { + return "quiz" +} diff --git a/service/result_svc.go b/internal/controllers/http_controllers/result/result.go similarity index 78% rename from service/result_svc.go rename to internal/controllers/http_controllers/result/result.go index 1c2d94f..5b8f930 100644 --- a/service/result_svc.go +++ b/internal/controllers/http_controllers/result/result.go @@ -1,16 +1,34 @@ -package service +package result import ( "bytes" - "github.com/gofiber/fiber/v2" + "gitea.pena/SQuiz/common/dal" "gitea.pena/SQuiz/common/middleware" "gitea.pena/SQuiz/common/model" "gitea.pena/SQuiz/common/repository/result" - "gitea.pena/SQuiz/core/tools" + "github.com/gofiber/fiber/v2" + "gitea.pena/SQuiz/core/internal/tools" "strconv" "time" ) +type Deps struct { + DAL *dal.DAL + S3Prefix string +} + +type Result struct { + dal *dal.DAL + s3Prefix string +} + +func NewResultController(deps Deps) *Result { + return &Result{ + dal: deps.DAL, + s3Prefix: deps.S3Prefix, + } +} + type ReqExport struct { To, From time.Time New bool @@ -23,7 +41,7 @@ type ReqExportResponse struct { Results []model.AnswerExport `json:"results"` } -func (s *Service) GetResultsByQuizID(ctx *fiber.Ctx) error { +func (r *Result) GetResultsByQuizID(ctx *fiber.Ctx) error { payment := true // параметр для определения существования текущих привилегий юзера accountID, ok := middleware.GetAccountId(ctx) @@ -42,7 +60,7 @@ func (s *Service) GetResultsByQuizID(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid quiz ID format") } - account, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) + account, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -52,7 +70,7 @@ func (s *Service) GetResultsByQuizID(ctx *fiber.Ctx) error { } } - results, totalCount, err := s.dal.ResultRepo.GetQuizResults(ctx.Context(), quizID, result.GetQuizResDeps{ + results, totalCount, err := r.dal.ResultRepo.GetQuizResults(ctx.Context(), quizID, result.GetQuizResDeps{ To: req.To, From: req.From, New: req.New, @@ -71,7 +89,7 @@ func (s *Service) GetResultsByQuizID(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusOK).JSON(resp) } -func (s *Service) DelResultByID(ctx *fiber.Ctx) error { +func (r *Result) DelResultByID(ctx *fiber.Ctx) error { accountID, ok := middleware.GetAccountId(ctx) if !ok { return ctx.Status(fiber.StatusUnauthorized).SendString("could not get account ID from token") @@ -83,7 +101,7 @@ func (s *Service) DelResultByID(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid result ID format") } - isOwner, err := s.dal.ResultRepo.CheckResultOwner(ctx.Context(), resultID, accountID) + isOwner, err := r.dal.ResultRepo.CheckResultOwner(ctx.Context(), resultID, accountID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -92,7 +110,7 @@ func (s *Service) DelResultByID(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusUnauthorized).SendString("not the owner of the result") } - if err := s.dal.ResultRepo.SoftDeleteResultByID(ctx.Context(), resultID); err != nil { + if err := r.dal.ResultRepo.SoftDeleteResultByID(ctx.Context(), resultID); err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -104,7 +122,7 @@ type ReqSeen struct { Answers []int64 } -func (s *Service) SetStatus(ctx *fiber.Ctx) error { +func (r *Result) SetStatus(ctx *fiber.Ctx) error { var req ReqSeen if err := ctx.BodyParser(&req); err != nil { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") @@ -115,7 +133,7 @@ func (s *Service) SetStatus(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusUnauthorized).SendString("could not get account ID from token") } - answers, err := s.dal.ResultRepo.CheckResultsOwner(ctx.Context(), req.Answers, accountID) + answers, err := r.dal.ResultRepo.CheckResultsOwner(ctx.Context(), req.Answers, accountID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -124,14 +142,14 @@ func (s *Service) SetStatus(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusNotAcceptable).SendString("could not update some answers because you don't have rights") } - if err := s.dal.ResultRepo.UpdateAnswersStatus(ctx.Context(), accountID, answers); err != nil { + if err := r.dal.ResultRepo.UpdateAnswersStatus(ctx.Context(), accountID, answers); err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } return ctx.Status(fiber.StatusOK).JSON(nil) } -func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error { +func (r *Result) ExportResultsToCSV(ctx *fiber.Ctx) error { accountID, ok := middleware.GetAccountId(ctx) if !ok { return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") @@ -148,7 +166,7 @@ func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("invalid request body") } - account, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) + account, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -159,17 +177,17 @@ func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error { } } - quiz, err := s.dal.QuizRepo.GetQuizById(ctx.Context(), accountID, quizID) + quiz, err := r.dal.QuizRepo.GetQuizById(ctx.Context(), accountID, quizID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString("failed to get quiz") } - questions, err := s.dal.ResultRepo.GetQuestions(ctx.Context(), quizID) + questions, err := r.dal.ResultRepo.GetQuestions(ctx.Context(), quizID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString("failed to get questions") } - answers, err := s.dal.ResultRepo.GetQuizResultsCSV(ctx.Context(), quizID, result.GetQuizResDeps{ + answers, err := r.dal.ResultRepo.GetQuizResultsCSV(ctx.Context(), quizID, result.GetQuizResDeps{ To: req.To, From: req.From, New: req.New, @@ -182,7 +200,7 @@ func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error { buffer := new(bytes.Buffer) - if err := tools.WriteDataToExcel(buffer, questions, answers, s.s3Prefix + quiz.Qid + "/"); err != nil { + if err := tools.WriteDataToExcel(buffer, questions, answers, r.s3Prefix+quiz.Qid+"/"); err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString("failed to write data to Excel") } @@ -192,7 +210,7 @@ func (s *Service) ExportResultsToCSV(ctx *fiber.Ctx) error { return ctx.Send(buffer.Bytes()) } -func (s *Service) GetResultAnswers(ctx *fiber.Ctx) error { +func (r *Result) GetResultAnswers(ctx *fiber.Ctx) error { accountID, ok := middleware.GetAccountId(ctx) if !ok { return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") @@ -203,7 +221,7 @@ func (s *Service) GetResultAnswers(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("invalid quiz ID") } - account, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) + account, err := r.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -214,11 +232,11 @@ func (s *Service) GetResultAnswers(ctx *fiber.Ctx) error { } } - answers, err := s.dal.ResultRepo.GetResultAnswers(ctx.Context(), resultID) + answers, err := r.dal.ResultRepo.GetResultAnswers(ctx.Context(), resultID) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString("failed to get result answers") } - sortedAnswers, err := s.dal.QuestionRepo.ForSortingResults(ctx.Context(), answers) + sortedAnswers, err := r.dal.QuestionRepo.ForSortingResults(ctx.Context(), answers) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString("failed sort result answers") } diff --git a/internal/controllers/http_controllers/result/route.go b/internal/controllers/http_controllers/result/route.go new file mode 100644 index 0000000..a70af9d --- /dev/null +++ b/internal/controllers/http_controllers/result/route.go @@ -0,0 +1,15 @@ +package result + +import "github.com/gofiber/fiber/v2" + +func (r *Result) Register(router fiber.Router) { + router.Post("/results/getResults/:quizId", r.GetResultsByQuizID) + router.Delete("/results/delete/:resultId", r.DelResultByID) + router.Patch("/result/seen", r.SetStatus) + router.Post("/results/:quizID/export", r.ExportResultsToCSV) + router.Get("/result/:resultID", r.GetResultAnswers) +} + +func (r *Result) Name() string { + return "" +} diff --git a/internal/controllers/http_controllers/statistic/route.go b/internal/controllers/http_controllers/statistic/route.go new file mode 100644 index 0000000..ce7d661 --- /dev/null +++ b/internal/controllers/http_controllers/statistic/route.go @@ -0,0 +1,15 @@ +package statistic + +import "github.com/gofiber/fiber/v2" + +func (r *Statistic) Register(router fiber.Router) { + router.Post("/statistic/:quizID/devices", r.GetDeviceStatistics) + router.Post("/statistic/:quizID/general", r.GetGeneralStatistics) + router.Post("/statistic/:quizID/questions", r.GetQuestionsStatistics) + router.Post("/statistic", r.AllServiceStatistics) + router.Get("/statistics/:quizID/pipelines", r.GetPipelinesStatistics) +} + +func (r *Statistic) Name() string { + return "" +} diff --git a/service/statistic_svc.go b/internal/controllers/http_controllers/statistic/statistic.go similarity index 70% rename from service/statistic_svc.go rename to internal/controllers/http_controllers/statistic/statistic.go index 2a3e300..59de503 100644 --- a/service/statistic_svc.go +++ b/internal/controllers/http_controllers/statistic/statistic.go @@ -1,17 +1,35 @@ -package service +package statistic import ( - "github.com/gofiber/fiber/v2" + "gitea.pena/SQuiz/common/dal" "gitea.pena/SQuiz/common/repository/statistics" + "github.com/gofiber/fiber/v2" "strconv" ) +type Deps struct { + DAL *dal.DAL + ChDAL *dal.ClickHouseDAL +} + +type Statistic struct { + dal *dal.DAL + chDAL *dal.ClickHouseDAL +} + +func NewStatisticController(deps Deps) *Statistic { + return &Statistic{ + dal: deps.DAL, + chDAL: deps.ChDAL, + } +} + type DeviceStatReq struct { From uint64 // временные границы выбора статистики To uint64 } -func (s *Service) GetDeviceStatistics(ctx *fiber.Ctx) error { +func (r *Statistic) GetDeviceStatistics(ctx *fiber.Ctx) error { quizIDStr := ctx.Params("quizID") quizID, err := strconv.ParseInt(quizIDStr, 10, 64) @@ -24,7 +42,7 @@ func (s *Service) GetDeviceStatistics(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } - deviceStats, err := s.dal.StatisticsRepo.GetDeviceStatistics(ctx.Context(), statistics.DeviceStatReq{ + deviceStats, err := r.dal.StatisticsRepo.GetDeviceStatistics(ctx.Context(), statistics.DeviceStatReq{ QuizId: quizID, From: req.From, To: req.To, @@ -40,7 +58,7 @@ type GeneralStatsResp struct { Open, Result, AvTime, Conversion map[uint64]uint64 } -func (s *Service) GetGeneralStatistics(ctx *fiber.Ctx) error { +func (r *Statistic) GetGeneralStatistics(ctx *fiber.Ctx) error { quizIDStr := ctx.Params("quizID") quizID, err := strconv.ParseInt(quizIDStr, 10, 64) if err != nil { @@ -52,7 +70,7 @@ func (s *Service) GetGeneralStatistics(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } - generalStats, err := s.dal.StatisticsRepo.GetGeneralStatistics(ctx.Context(), statistics.DeviceStatReq{ + generalStats, err := r.dal.StatisticsRepo.GetGeneralStatistics(ctx.Context(), statistics.DeviceStatReq{ QuizId: quizID, From: req.From, To: req.To, @@ -64,7 +82,7 @@ func (s *Service) GetGeneralStatistics(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusOK).JSON(generalStats) } -func (s *Service) GetQuestionsStatistics(ctx *fiber.Ctx) error { +func (r *Statistic) GetQuestionsStatistics(ctx *fiber.Ctx) error { quizIDStr := ctx.Params("quizID") quizID, err := strconv.ParseInt(quizIDStr, 0, 64) if err != nil { @@ -73,10 +91,10 @@ func (s *Service) GetQuestionsStatistics(ctx *fiber.Ctx) error { var req DeviceStatReq if err := ctx.BodyParser(&req); err != nil { - ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } - questionsStats, err := s.dal.StatisticsRepo.GetQuestionsStatistics(ctx.Context(), statistics.DeviceStatReq{ + questionsStats, err := r.dal.StatisticsRepo.GetQuestionsStatistics(ctx.Context(), statistics.DeviceStatReq{ QuizId: quizID, From: req.From, To: req.To, @@ -92,13 +110,13 @@ type StatisticReq struct { From, To uint64 // временные границы выбора статистики } -func (s *Service) AllServiceStatistics(ctx *fiber.Ctx) error { +func (r *Statistic) AllServiceStatistics(ctx *fiber.Ctx) error { var req StatisticReq if err := ctx.BodyParser(&req); err != nil { - ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } - allSvcStats, err := s.dal.StatisticsRepo.AllServiceStatistics(ctx.Context(), req.From, req.To) + allSvcStats, err := r.dal.StatisticsRepo.AllServiceStatistics(ctx.Context(), req.From, req.To) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } @@ -106,10 +124,10 @@ func (s *Service) AllServiceStatistics(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusOK).JSON(allSvcStats) } -func (s *Service) GetPipelinesStatistics(ctx *fiber.Ctx) error { +func (r *Statistic) GetPipelinesStatistics(ctx *fiber.Ctx) error { var req StatisticReq if err := ctx.BodyParser(&req); err != nil { - ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") } quizIDStr := ctx.Params("quizID") @@ -118,7 +136,7 @@ func (s *Service) GetPipelinesStatistics(ctx *fiber.Ctx) error { return ctx.Status(fiber.StatusBadRequest).SendString("Invalid quiz ID format") } - result, err := s.chDAL.StatisticClickRepo.GetPipelinesStatistics(ctx.Context(), quizID, req.From, req.To) + result, err := r.chDAL.StatisticClickRepo.GetPipelinesStatistics(ctx.Context(), quizID, req.From, req.To) if err != nil { return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) } diff --git a/internal/controllers/http_controllers/telegram/route.go b/internal/controllers/http_controllers/telegram/route.go new file mode 100644 index 0000000..811b963 --- /dev/null +++ b/internal/controllers/http_controllers/telegram/route.go @@ -0,0 +1,14 @@ +package telegram + +import "github.com/gofiber/fiber/v2" + +func (r *Telegram) Register(router fiber.Router) { + router.Get("/pool", r.GetPoolTgAccounts) + router.Post("/create", r.AddingTgAccount) + router.Delete("/:id", r.DeleteTgAccountByID) + router.Post("/setCode", r.SettingTgCode) +} + +func (r *Telegram) Name() string { + return "telegram" +} diff --git a/internal/controllers/http_controllers/telegram/telegram.go b/internal/controllers/http_controllers/telegram/telegram.go new file mode 100644 index 0000000..6a76069 --- /dev/null +++ b/internal/controllers/http_controllers/telegram/telegram.go @@ -0,0 +1,194 @@ +package telegram + +import ( + "errors" + "gitea.pena/SQuiz/common/dal" + "gitea.pena/SQuiz/common/pj_errors" + + //"fmt" + "github.com/gofiber/fiber/v2" + // "github.com/rs/xid" + //"path/filepath" + // "penahub.gitlab.yandexcloud.net/backend/quiz/common.git/model" + // "gitea.pena/SQuiz/core/clients/telegram" + // "penahub.gitlab.yandexcloud.net/backend/tdlib/client" + "strconv" +) + +type Deps struct { + DAL *dal.DAL + //TelegramClient *telegram.TelegramClient +} + +type Telegram struct { + dal *dal.DAL + //telegramClient *telegram.TelegramClient +} + +func NewTelegramController(deps Deps) *Telegram { + return &Telegram{ + dal: deps.DAL, + //telegramClient: deps.TelegramClient, + } +} + +type Message struct { + Type string `json:"type"` + Data string `json:"data"` +} + +func (r *Telegram) GetPoolTgAccounts(ctx *fiber.Ctx) error { + allAccounts, err := r.dal.TgRepo.GetAllTgAccounts(ctx.Context()) + if err != nil { + switch { + case errors.Is(err, pj_errors.ErrNotFound): + return ctx.Status(fiber.StatusNotFound).SendString("not found") + default: + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + } + return ctx.Status(fiber.StatusOK).JSON(allAccounts) +} + +func (r *Telegram) AddingTgAccount(ctx *fiber.Ctx) error { + // var req telegram.AuthTgUserReq + // if err := ctx.BodyParser(&req); err != nil { + // return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + // } + // if req.ApiID == 0 || req.ApiHash == "" || req.Password == "" || req.PhoneNumber == "" { + // return ctx.Status(fiber.StatusBadRequest).SendString("empty required fields") + // } + // allAccounts, err := s.dal.TgRepo.GetAllTgAccounts(ctx.Context()) + // if err != nil && !errors.Is(err, pj_errors.ErrNotFound) { + // return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + // } + // if !errors.Is(err, pj_errors.ErrNotFound) { + // for _, account := range allAccounts { + // if account.ApiID == req.ApiID && account.ApiHash == req.ApiHash && account.Status == model.ActiveTg { + // return ctx.Status(fiber.StatusConflict).SendString("this account already exist and active") + // } + // } + // } + // authorizer := client.ClientAuthorizerr() + // authorizer.TdlibParameters <- &client.SetTdlibParametersRequest{ + // UseTestDc: false, + // DatabaseDirectory: filepath.Join(".tdlib", "database"), + // FilesDirectory: filepath.Join(".tdlib", "files"), + // UseFileDatabase: true, + // UseChatInfoDatabase: true, + // UseMessageDatabase: true, + // UseSecretChats: true, + // ApiId: req.ApiID, + // ApiHash: req.ApiHash, + // SystemLanguageCode: "en", + // DeviceModel: "Server", + // SystemVersion: "1.0.0", + // ApplicationVersion: "1.0.0", + // } + // + // _, err = client.SetLogVerbosityLevel(&client.SetLogVerbosityLevelRequest{ + // NewVerbosityLevel: 1, + // }) + // if err != nil { + // return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + // } + // + // var tdlibClient *client.Client + // // завершается уже в другом контроллере + // var goErr error + // // todo ужно продумать завершение горутины если код вставлять не пошли + // go func() { + // tdlibClient, goErr = client.NewClient(authorizer) + // if goErr != nil { + // fmt.Println("new client failed", err) + // return + // } + // s.telegramClient.SaveTgAccount(req.ApiID, req.ApiHash, tdlibClient) + // fmt.Println("i am down") + // }() + // if goErr != nil { + // return ctx.Status(fiber.StatusInternalServerError).SendString(goErr.Error()) + // } + // + // for { + // state, ok := <-authorizer.State + // if !ok { + // return ctx.Status(fiber.StatusOK).SendString("state chan is close auth maybe ok") + // } + // fmt.Println("currnet state:", state) + // switch state.AuthorizationStateType() { + // case client.TypeAuthorizationStateWaitPhoneNumber: + // authorizer.PhoneNumber <- req.PhoneNumber + // case client.TypeAuthorizationStateWaitCode: + // signature := xid.New() + // s.telegramClient.AddedToMap(telegram.WaitingClient{ + // PreviousReq: req, + // Authorizer: authorizer, + // }, signature.String()) + // return ctx.Status(fiber.StatusOK).JSON(fiber.Map{"signature": signature.String()}) + // + // case client.TypeAuthorizationStateLoggingOut, client.TypeAuthorizationStateClosing, client.TypeAuthorizationStateClosed: + // return ctx.Status(fiber.StatusForbidden).SendString(fmt.Sprintf("auth failed, last state is %s", state)) + // } + // } + return nil +} + +func (r *Telegram) SettingTgCode(ctx *fiber.Ctx) error { + var req struct { + Code string `json:"code"` + Signature string `json:"signature"` + } + if err := ctx.BodyParser(&req); err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") + } + + if req.Code == "" || req.Signature == "" { + return ctx.Status(fiber.StatusBadRequest).SendString("empty required fields") + } + + // data, ok := s.telegramClient.GetFromMap(req.Signature) + // if !ok { + // return ctx.Status(fiber.StatusBadRequest).SendString("Invalid id, don't have data") + // } + // data.Authorizer.Code <- req.Code + // for { + // state, ok := <-data.Authorizer.State + // if !ok { + // return ctx.Status(fiber.StatusNoContent).SendString("state chan is close auth maybe ok") + // } + // fmt.Println("currnet state:", state) + // } + return nil + // switch state.AuthorizationStateType() { + // case client.TypeAuthorizationStateReady: + // id, err := s.dal.TgRepo.CreateTgAccount(ctx.Context(), model.TgAccount{ + // ApiID: data.PreviousReq.ApiID, + // ApiHash: data.PreviousReq.ApiHash, + // PhoneNumber: data.PreviousReq.PhoneNumber, + // Status: model.ActiveTg, + // Password: data.PreviousReq.Password, + // }) + // if err != nil { + // return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + // } + // return ctx.Status(fiber.StatusOK).JSON(fiber.Map{"id": id}) + // case client.TypeAuthorizationStateWaitPassword: + // data.Authorizer.Password <- data.PreviousReq.Password + // case client.TypeAuthorizationStateLoggingOut, client.TypeAuthorizationStateClosing, client.TypeAuthorizationStateClosed: + // return ctx.Status(fiber.StatusForbidden).SendString(fmt.Sprintf("auth failed, last state is %s", state)) + // } + // } +} + +func (r *Telegram) DeleteTgAccountByID(ctx *fiber.Ctx) error { + id, err := strconv.ParseInt(ctx.Params("id"), 10, 64) + if err != nil { + return ctx.Status(fiber.StatusBadRequest).SendString("invalid id format") + } + err = r.dal.TgRepo.SoftDeleteTgAccount(ctx.Context(), id) + if err != nil { + return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) + } + return ctx.SendStatus(fiber.StatusOK) +} diff --git a/rpc_service/mail_notify.go b/internal/controllers/rpc_controllers/mail_notify.go similarity index 97% rename from rpc_service/mail_notify.go rename to internal/controllers/rpc_controllers/mail_notify.go index 8a76e2f..b92d826 100644 --- a/rpc_service/mail_notify.go +++ b/internal/controllers/rpc_controllers/mail_notify.go @@ -1,4 +1,4 @@ -package rpc_service +package rpc_controllers import ( "context" diff --git a/internal/initialize/clients.go b/internal/initialize/clients.go new file mode 100644 index 0000000..3450bd3 --- /dev/null +++ b/internal/initialize/clients.go @@ -0,0 +1,24 @@ +package initialize + +import ( + "context" + "gitea.pena/SQuiz/common/dal" + "gitea.pena/SQuiz/core/internal/clients/auth" +) + +type Clients struct { + AuthClient *auth.AuthClient + //TgClient *telegram.TelegramClient +} + +func NewClients(ctx context.Context, cfg Config, pgDAL *dal.DAL) (*Clients, error) { + //tgClient, err := telegram.NewTelegramClient(ctx, pgDAL) + //if err != nil { + // return nil, err + //} + + return &Clients{ + //TgClient: tgClient, + AuthClient: auth.NewAuthClient(cfg.AuthMicroserviceURL), + }, nil +} diff --git a/internal/initialize/config.go b/internal/initialize/config.go new file mode 100644 index 0000000..e960b3e --- /dev/null +++ b/internal/initialize/config.go @@ -0,0 +1,43 @@ +package initialize + +import ( + "github.com/caarlos0/env/v8" + "github.com/joho/godotenv" + "log" +) + +type Config struct { + LoggerProdMode bool `env:"IS_PROD_LOG" envDefault:"false"` + IsProd bool `env:"IS_PROD" envDefault:"false"` + ClientHttpURL string `env:"CLIENT_HTTP_URL" envDefault:"0.0.0.0:1488"` + GrpcURL string `env:"GRPC_URL" envDefault:"localhost:9000"` + PostgresURL string `env:"POSTGRES_URL" envDefault:"host=localhost port=35432 user=squiz password=Redalert2 dbname=squiz sslmode=disable"` + ClickhouseURL string `env:"CLICKHOUSE_URL" envDefault:"tcp://10.8.0.15:9000/default?sslmode=disable"` + HubadminMicroserviceURL string `env:"HUBADMIN_MICROSERVICE_URL" envDefault:"http://localhost:8001/"` + AuthMicroserviceURL string `env:"AUTH_MICROSERVICE_URL" envDefault:"http://localhost:8000/"` + KafkaBrokers string `env:"KAFKA_BROKERS" envDefault:"localhost:9092"` + KafkaGroup string `env:"KAFKA_GROUP" envDefault:"mailnotifier"` + KafkaTopicNotifyer string `env:"KAFKA_TOPIC" envDefault:"test-topic"` + TrashLogHost string `env:"TRASH_LOG_HOST" envDefault:"localhost:7113"` + S3Prefix string `env:"S3_PREFIX"` + RedisHost string `env:"REDIS_HOST" envDefault:"localhost:6379"` + RedisPassword string `env:"REDIS_PASSWORD" envDefault:"admin"` + RedisDB uint64 `env:"REDIS_DB" envDefault:"2"` + + CrtFile string `env:"CRT" envDefault:"server.crt"` + KeyFile string `env:"KEY" envDefault:"server.key"` + ServiceName string `env:"SERVICE_NAME" envDefault:"squiz"` +} + +func LoadConfig() (*Config, error) { + if err := godotenv.Load(); err != nil { + log.Print("No .env file found") + } + var config Config + if err := env.Parse(&config); err != nil { + return nil, err + } + return &config, nil +} + +const ModuleLogger = "core" diff --git a/internal/initialize/controllers.go b/internal/initialize/controllers.go new file mode 100644 index 0000000..9945ae1 --- /dev/null +++ b/internal/initialize/controllers.go @@ -0,0 +1,73 @@ +package initialize + +import ( + "github.com/go-redis/redis/v8" + "gitea.pena/SQuiz/core/internal/brokers" + "gitea.pena/SQuiz/core/internal/controllers/http_controllers/account" + "gitea.pena/SQuiz/core/internal/controllers/http_controllers/question" + "gitea.pena/SQuiz/core/internal/controllers/http_controllers/quiz" + "gitea.pena/SQuiz/core/internal/controllers/http_controllers/result" + "gitea.pena/SQuiz/core/internal/controllers/http_controllers/statistic" + "gitea.pena/SQuiz/core/internal/controllers/http_controllers/telegram" + "gitea.pena/SQuiz/core/internal/controllers/rpc_controllers" +) + +type ControllerDeps struct { + Clients *Clients + DALs *DALs + Config Config + Producer *brokers.Producer + RedisClient *redis.Client +} + +type Controller struct { + GRpcControllers GRpcControllers + HttpControllers HttpControllers +} + +type GRpcControllers struct { + MailNotify *rpc_controllers.MailNotify +} +type HttpControllers struct { + Account *account.Account + Question *question.Question + Quiz *quiz.Quiz + Result *result.Result + Statistic *statistic.Statistic + Telegram *telegram.Telegram +} + +func NewControllers(deps ControllerDeps) *Controller { + return &Controller{ + GRpcControllers: GRpcControllers{ + MailNotify: rpc_controllers.NewMailNotify(deps.DALs.PgDAL), + }, + HttpControllers: HttpControllers{ + Account: account.NewAccountController(account.Deps{ + Dal: deps.DALs.PgDAL, + AuthClient: deps.Clients.AuthClient, + Producer: deps.Producer, + ServiceName: deps.Config.ServiceName, + RedisClient: deps.RedisClient, + }), + Question: question.NewQuestionController(question.Deps{ + DAL: deps.DALs.PgDAL, + }), + Quiz: quiz.NewQuizController(quiz.Deps{ + DAL: deps.DALs.PgDAL, + }), + Result: result.NewResultController(result.Deps{ + DAL: deps.DALs.PgDAL, + S3Prefix: deps.Config.S3Prefix, + }), + Statistic: statistic.NewStatisticController(statistic.Deps{ + DAL: deps.DALs.PgDAL, + ChDAL: deps.DALs.ChDAL, + }), + Telegram: telegram.NewTelegramController(telegram.Deps{ + DAL: deps.DALs.PgDAL, + //TelegramClient: deps.Clients.TgClient, + }), + }, + } +} diff --git a/internal/initialize/dals.go b/internal/initialize/dals.go new file mode 100644 index 0000000..c1c342d --- /dev/null +++ b/internal/initialize/dals.go @@ -0,0 +1,28 @@ +package initialize + +import ( + "context" + "gitea.pena/SQuiz/common/dal" +) + +type DALs struct { + PgDAL *dal.DAL + ChDAL *dal.ClickHouseDAL +} + +func NewDALs(ctx context.Context, cfg Config) (*DALs, error) { + pgDal, err := dal.New(ctx, cfg.PostgresURL, nil) + if err != nil { + return nil, err + } + + chDal, err := dal.NewClickHouseDAL(ctx, cfg.ClickhouseURL) + if err != nil { + return nil, err + } + + return &DALs{ + PgDAL: pgDal, + ChDAL: chDal, + }, nil +} diff --git a/initialize/kafka.go b/internal/initialize/kafka.go similarity index 100% rename from initialize/kafka.go rename to internal/initialize/kafka.go diff --git a/internal/initialize/redis.go b/internal/initialize/redis.go new file mode 100644 index 0000000..eb14f05 --- /dev/null +++ b/internal/initialize/redis.go @@ -0,0 +1,21 @@ +package initialize + +import ( + "context" + "github.com/go-redis/redis/v8" +) + +func Redis(ctx context.Context, cfg Config) (*redis.Client, error) { + rdb := redis.NewClient(&redis.Options{ + Addr: cfg.RedisHost, + Password: cfg.RedisPassword, + DB: int(cfg.RedisDB), + }) + + status := rdb.Ping(ctx) + if err := status.Err(); err != nil { + return nil, err + } + + return rdb, nil +} diff --git a/models/hlog_events.go b/internal/models/hlog_events.go similarity index 100% rename from models/hlog_events.go rename to internal/models/hlog_events.go diff --git a/proto/notifyer/notifyer.pb.go b/internal/proto/notifyer/notifyer.pb.go similarity index 100% rename from proto/notifyer/notifyer.pb.go rename to internal/proto/notifyer/notifyer.pb.go diff --git a/proto/notifyer/notifyer_grpc.pb.go b/internal/proto/notifyer/notifyer_grpc.pb.go similarity index 100% rename from proto/notifyer/notifyer_grpc.pb.go rename to internal/proto/notifyer/notifyer_grpc.pb.go diff --git a/server/grpc.go b/internal/server/grpc/rpc_server.go similarity index 83% rename from server/grpc.go rename to internal/server/grpc/rpc_server.go index 63caa6e..001139e 100644 --- a/server/grpc.go +++ b/internal/server/grpc/rpc_server.go @@ -2,15 +2,14 @@ package server import ( "context" - "fmt" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpc_zap "github.com/grpc-ecosystem/go-grpc-middleware/logging/zap" grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" "go.uber.org/zap" "google.golang.org/grpc" "net" - "gitea.pena/SQuiz/core/initialize" - "gitea.pena/SQuiz/core/proto/notifyer" + "gitea.pena/SQuiz/core/internal/initialize" + "gitea.pena/SQuiz/core/internal/proto/notifyer" "time" ) @@ -36,14 +35,7 @@ func NewGRPC(logger *zap.Logger) (*GRPC, error) { }, nil } -type DepsGrpcRun struct { - Host string - Port string -} - -func (g *GRPC) Run(config DepsGrpcRun) { - connectionString := fmt.Sprintf("%s:%s", config.Host, config.Port) - +func (g *GRPC) Run(connectionString string) { g.logger.Info("Starting GRPC Server", zap.String("host", connectionString)) if err := g.listen(connectionString); err != nil && err != grpc.ErrServerStopped { @@ -58,7 +50,7 @@ func (g *GRPC) Stop(_ context.Context) error { return nil } -func (g *GRPC) Register(reg *initialize.RpcRegister) *GRPC { +func (g *GRPC) Register(reg initialize.GRpcControllers) *GRPC { notifyer.RegisterQuizServiceServer(g.grpc, reg.MailNotify) // another return g diff --git a/internal/server/http/http_server.go b/internal/server/http/http_server.go new file mode 100644 index 0000000..121c9b1 --- /dev/null +++ b/internal/server/http/http_server.go @@ -0,0 +1,73 @@ +package http + +import ( + "context" + "fmt" + "gitea.pena/PenaSide/common/log_mw" + "gitea.pena/PenaSide/hlog" + "gitea.pena/SQuiz/common/middleware" + "github.com/gofiber/fiber/v2" + "go.uber.org/zap" +) + +type ServerConfig struct { + Logger *zap.Logger + Controllers []Controller + Hlogger hlog.Logger +} + +type Server struct { + Logger *zap.Logger + Controllers []Controller + app *fiber.App +} + +func NewServer(config ServerConfig) *Server { + app := fiber.New() + app.Use(middleware.JWTAuth()) + app.Use(log_mw.ContextLogger(config.Hlogger)) + //app.Get("/liveness", healthchecks.Liveness) + //app.Get("/readiness", healthchecks.Readiness(&workerErr)) //todo parametrized readiness. should discuss ready reason + s := &Server{ + Logger: config.Logger, + Controllers: config.Controllers, + app: app, + } + + s.registerRoutes() + + return s +} + +func (s *Server) Start(addr string) error { + if err := s.app.Listen(addr); err != nil { + s.Logger.Error("Failed to start server", zap.Error(err)) + return err + } + return nil +} + +func (s *Server) Shutdown(ctx context.Context) error { + return s.app.Shutdown() +} + +func (s *Server) registerRoutes() { + for _, c := range s.Controllers { + router := s.app.Group(c.Name()) + c.Register(router) + } +} + +type Controller interface { + Register(router fiber.Router) + Name() string +} + +func (s *Server) ListRoutes() { + fmt.Println("Registered routes:") + for _, stack := range s.app.Stack() { + for _, route := range stack { + fmt.Printf("%s %s\n", route.Method, route.Path) + } + } +} diff --git a/tools/publishPriv.go b/internal/tools/publishPriv.go similarity index 100% rename from tools/publishPriv.go rename to internal/tools/publishPriv.go diff --git a/internal/tools/tools.go b/internal/tools/tools.go new file mode 100644 index 0000000..7a1ef43 --- /dev/null +++ b/internal/tools/tools.go @@ -0,0 +1,434 @@ +package tools + +import ( + "encoding/json" + "fmt" + "gitea.pena/SQuiz/common/model" + "github.com/xuri/excelize/v2" + _ "image/gif" + _ "image/jpeg" + _ "image/png" + "io" + "io/ioutil" + "net/http" + "net/url" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + "sync" +) + +const ( + bucketImages = "squizimages" + bucketFonts = "squizfonts" + bucketScripts = "squizscript" + bucketStyle = "squizstyle" + bucketAnswers = "squizanswer" +) + +func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []model.Answer, s3Prefix string) error { + file := excelize.NewFile() + sheet := "Sheet1" + + _, err := file.NewSheet(sheet) + if err != nil { + return err + } + + sort.Slice(questions, func(i, j int) bool { + return questions[i].Page < questions[j].Page + }) + + headers, mapQueRes := prepareHeaders(questions) + headers = append([]string{"Дата и время"}, headers...) + + for col, header := range headers { + cell := ToAlphaString(col+1) + "1" + if err := file.SetCellValue(sheet, cell, header); err != nil { + return err + } + } + + sort.Slice(answers, func(i, j int) bool { + return answers[i].QuestionId < answers[j].QuestionId + }) + standart, results := categorizeAnswers(answers) + + var wg sync.WaitGroup + row := 2 + for session := range results { + wg.Add(1) + go func(session string, response []model.Answer, row int) { + defer wg.Done() + processSession(file, sheet, session, s3Prefix, response, results, questions, mapQueRes, headers, row) + }(session, standart[session], row) + row++ + } + wg.Wait() + + return file.Write(buffer) +} + +func prepareHeaders(questions []model.Question) ([]string, map[uint64]string) { + headers := []string{"Данные респондента"} + mapQueRes := make(map[uint64]string) + + for _, q := range questions { + if !q.Deleted { + if q.Type == model.TypeResult { + mapQueRes[q.Id] = q.Title + "\n" + q.Description + } else { + headers = append(headers, q.Title) + } + } + } + headers = append(headers, "Результат") + return headers, mapQueRes +} + +func categorizeAnswers(answers []model.Answer) (map[string][]model.Answer, map[string]model.Answer) { + standart := make(map[string][]model.Answer) + results := make(map[string]model.Answer) + + for _, answer := range answers { + if answer.Result { + results[answer.Session] = answer + } else { + standart[answer.Session] = append(standart[answer.Session], answer) + } + } + return standart, results +} + +func processSession(file *excelize.File, sheet, session, s3Prefix string, response []model.Answer, results map[string]model.Answer, questions []model.Question, mapQueRes map[uint64]string, headers []string, row int) { + defer func() { + if r := recover(); r != nil { + fmt.Println("Recovered from panic:", r) + } + }() + + if err := file.SetCellValue(sheet, "A"+strconv.Itoa(row), results[session].CreatedAt.Format("2006-01-02 15:04:05")); err != nil { + fmt.Println(err.Error()) + } + + if err := file.SetCellValue(sheet, "B"+strconv.Itoa(row), results[session].Content); err != nil { + fmt.Println(err.Error()) + } + + count := 3 + for _, q := range questions { + if !q.Deleted && q.Type != model.TypeResult { + cell := ToAlphaString(count) + strconv.Itoa(row) + index := binarySearch(response, q.Id) + if index != -1 { + handleAnswer(file, sheet, cell, s3Prefix, response[index], q, count, row) + } else { + if err := file.SetCellValue(sheet, cell, "-"); err != nil { + fmt.Println(err.Error()) + } + } + count++ + } + } + cell := ToAlphaString(len(headers)) + strconv.Itoa(row) + if err := file.SetCellValue(sheet, cell, mapQueRes[results[session].QuestionId]); err != nil { + fmt.Println(err.Error()) + } +} + +func handleAnswer(file *excelize.File, sheet, cell, s3Prefix string, answer model.Answer, question model.Question, count, row int) { + tipe := FileSearch(answer.Content) + noAccept := make(map[string]struct{}) + todoMap := make(map[string]string) + + if tipe != "Text" && (question.Type == model.TypeImages || question.Type == model.TypeVarImages) { + handleImage(file, sheet, cell, answer.Content, count, row, noAccept, todoMap, question.Title) + } else if question.Type == model.TypeFile { + handleFile(file, sheet, cell, answer.Content, s3Prefix, noAccept) + } else { + todoMap[answer.Content] = cell + } + + for cnt, cel := range todoMap { + if _, ok := noAccept[cnt]; !ok { + cntArr := strings.Split(cnt, "`,`") + resultCnt := cnt + if len(cntArr) > 1 { + resultCnt = strings.Join(cntArr, "\n") + } + + if len(resultCnt) > 1 && resultCnt[0] == '`' && resultCnt[len(resultCnt)-1] == '`' { + resultCnt = resultCnt[1 : len(resultCnt)-1] + } + + if len(resultCnt) > 1 && resultCnt[0] == '`' { + resultCnt = resultCnt[1:] + } + + if len(resultCnt) > 1 && resultCnt[len(resultCnt)-1] == '`' { + resultCnt = resultCnt[:len(resultCnt)-1] + } + + if err := file.SetCellValue(sheet, cel, resultCnt); err != nil { + fmt.Println(err.Error()) + } + } + } +} + +func handleImage(file *excelize.File, sheet, cell, content string, count, row int, noAccept map[string]struct{}, todoMap map[string]string, questionTitle string) { + multiImgArr := strings.Split(content, "`,`") + if len(multiImgArr) > 1 { + var descriptions []string + mediaSheet := "Media" + flag, err := file.GetSheetIndex(mediaSheet) + if err != nil { + fmt.Println(err.Error()) + } + if flag == -1 { + _, _ = file.NewSheet(mediaSheet) + err = file.SetCellValue(mediaSheet, "A1", "Вопрос") + if err != nil { + fmt.Println(err.Error()) + } + } + + mediaRow := row + for i, imgContent := range multiImgArr { + if i == 0 && len(imgContent) > 1 && imgContent[0] == '`' { + imgContent = imgContent[1:] + } + + if i == len(multiImgArr)-1 && len(imgContent) > 1 && imgContent[len(imgContent)-1] == '`' { + imgContent = imgContent[:len(imgContent)-1] + } + + var res model.ImageContent + err := json.Unmarshal([]byte(imgContent), &res) + if err != nil { + res.Image = imgContent + } + + // чек на пустой дескрипшен, есмли пустой то отмечаем как вариант ответа номер по i + if res.Description != "" { + descriptions = append(descriptions, res.Description) + } else { + descriptions = append(descriptions, fmt.Sprintf("Вариант ответа №%d", i+1)) + } + + urle := ExtractImageURL(res.Image) + urlData := strings.Split(urle, " ") + if len(urlData) == 1 { + u, err := url.Parse(urle) + if err == nil && u.Scheme != "" && u.Host != "" { + picture, err := downloadImage(urle) + if err != nil { + fmt.Println(err.Error()) + continue + } + err = file.SetCellValue(mediaSheet, "A"+strconv.Itoa(mediaRow), questionTitle) + if err != nil { + fmt.Println(err.Error()) + } + + col := ToAlphaString(i + 2) + err = file.SetColWidth(mediaSheet, col, col, 50) + if err != nil { + fmt.Println(err.Error()) + } + err = file.SetRowHeight(mediaSheet, mediaRow, 150) + if err != nil { + fmt.Println(err.Error()) + } + if err := file.AddPictureFromBytes(mediaSheet, col+strconv.Itoa(mediaRow), picture); err != nil { + fmt.Println(err.Error()) + } + noAccept[content] = struct{}{} + } else { + todoMap[content] = cell + } + } else { + todoMap[imgContent] = cell + } + + descriptionsStr := strings.Join(descriptions, "\n") + linkText := fmt.Sprintf("%s\n Перейти в приложение %s!A%d", descriptionsStr, mediaSheet, mediaRow) + + if err := file.SetCellValue(sheet, cell, linkText); err != nil { + fmt.Println(err.Error()) + } + //if err := file.SetCellHyperLink(sheet, cell, fmt.Sprintf("%s!A%d", mediaSheet, mediaRow), "Location", excelize.HyperlinkOpts{ + // Display: &linkText, + //}); err != nil { + // fmt.Println(err.Error()) + //} + } + } else { + if len(content) > 1 && content[0] == '`' && content[len(content)-1] == '`' { + content = content[1 : len(content)-1] + } + var res model.ImageContent + err := json.Unmarshal([]byte(content), &res) + if err != nil { + res.Image = content + } + urle := ExtractImageURL(res.Image) + urlData := strings.Split(urle, " ") + if len(urlData) == 1 { + u, err := url.Parse(urle) + if err == nil && u.Scheme != "" && u.Host != "" { + picture, err := downloadImage(urle) + if err != nil { + fmt.Println(err.Error()) + } + err = file.SetColWidth(sheet, ToAlphaString(count), ToAlphaString(count), 50) + if err != nil { + fmt.Println(err.Error()) + } + err = file.SetRowHeight(sheet, row, 150) + if err != nil { + fmt.Println(err.Error()) + } + if err := file.AddPictureFromBytes(sheet, cell, picture); err != nil { + fmt.Println(err.Error()) + } + noAccept[content] = struct{}{} + } else { + todoMap[content] = cell + } + } else { + todoMap[content] = cell + } + } +} +func handleFile(file *excelize.File, sheet, cell, content, s3Prefix string, noAccept map[string]struct{}) { + urle := content + if urle != "" && !strings.HasPrefix(urle, "https") { + urle = s3Prefix + urle + } + + fmt.Println("ORRRRR", urle, s3Prefix) + display, tooltip := urle, urle + + if err := file.SetCellValue(sheet, cell, urle); err != nil { + fmt.Println(err.Error()) + } + if err := file.SetCellHyperLink(sheet, cell, urle, "External", excelize.HyperlinkOpts{ + Display: &display, + Tooltip: &tooltip, + }); err != nil { + fmt.Println(err.Error()) + } + noAccept[content] = struct{}{} +} + +func binarySearch(answers []model.Answer, questionID uint64) int { + left := 0 + right := len(answers) - 1 + for left <= right { + mid := left + (right-left)/2 + if answers[mid].QuestionId == questionID { + return mid + } else if answers[mid].QuestionId < questionID { + left = mid + 1 + } else { + right = mid - 1 + } + } + return -1 +} + +func FileSearch(content string) string { + if strings.Contains(content, bucketImages) { + return FileType(content) + } else if strings.Contains(content, bucketFonts) { + return FileType(content) + } else if strings.Contains(content, bucketScripts) { + return FileType(content) + } else if strings.Contains(content, bucketStyle) { + return FileType(content) + } else if strings.Contains(content, bucketAnswers) { + return FileType(content) + } + + return "Text" +} + +func FileType(filename string) string { + parts := strings.Split(filename, ".") + extension := parts[len(parts)-1] + + switch extension { + case "png", "jpg", "jpeg", "gif", "bmp", "svg", "webp", "tiff", "ico": + return "Image" + default: + return "File" + } +} + +func downloadImage(url string) (*excelize.Picture, error) { + resp, err := http.Get(url) + if err != nil { + return nil, err + } + + defer func() { + if derr := resp.Body.Close(); derr != nil { + fmt.Printf("error close response body in downloadImage: %v", derr) + } + }() + + imgData, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + ext := filepath.Ext(url) + if ext == "" { + contentType := resp.Header.Get("Content-Type") + switch { + case strings.HasPrefix(contentType, "image/jpeg"): + ext = ".jpg" + case strings.HasPrefix(contentType, "image/png"): + ext = ".png" + default: + ext = ".png" + } + } + + pic := &excelize.Picture{ + Extension: ext, + File: imgData, + Format: &excelize.GraphicOptions{ + AutoFit: true, + Positioning: "oneCell", + }, + } + return pic, nil +} + +func ToAlphaString(col int) string { + var result string + for col > 0 { + col-- + result = string(rune('A'+col%26)) + result + col /= 26 + } + return result +} + +func ExtractImageURL(htmlContent string) string { + re := regexp.MustCompile(`(?:]*src="([^"]+)"[^>]*>)|(?:]*>.*?]*src="([^"]+)"[^>]*>.*?)|(?:]*>.*?]*>.*?]*src="([^"]+)"[^>]*>.*?.*?)|(?:]*\s+download[^>]*>([^<]+)<\/a>)`) + matches := re.FindAllStringSubmatch(htmlContent, -1) + + for _, match := range matches { + for i := 1; i < len(match); i++ { + if match[i] != "" { + return strings.TrimSpace(match[i]) + } + } + } + return htmlContent +} diff --git a/internal/workers/tg_worker.go b/internal/workers/tg_worker.go new file mode 100644 index 0000000..ceac311 --- /dev/null +++ b/internal/workers/tg_worker.go @@ -0,0 +1,107 @@ +package workers + +import ( + "context" + "gitea.pena/SQuiz/common/dal" + "github.com/go-redis/redis/v8" + "time" +) + +type Deps struct { + BotID int64 + Redis *redis.Client + Dal *dal.DAL + //TgClient *telegram.TelegramClient +} + +type TgListenerWorker struct { + botID int64 + redis *redis.Client + dal *dal.DAL + //tgClient *telegram.TelegramClient +} + +func NewTgListenerWC(deps Deps) *TgListenerWorker { + return &TgListenerWorker{ + botID: deps.BotID, + redis: deps.Redis, + dal: deps.Dal, + //tgClient: deps.TgClient, + } +} + +func (wc *TgListenerWorker) Start(ctx context.Context) { + ticker := time.NewTicker(10 * time.Second) //time.Minute + defer ticker.Stop() + + for { + select { + case <-ticker.C: + wc.processTasks(ctx) + case <-ctx.Done(): + return + } + } +} + +func (wc *TgListenerWorker) processTasks(ctx context.Context) { + //var cursor uint64 + //for { + // var keys []string + // var err error + // keys, cursor, err = wc.redis.Scan(ctx, cursor, "telegram_task:*", 0).Result() + // if err != nil { + // fmt.Println("Failed scan for telegram tasks:", err) + // break + // } + // + // for _, key := range keys { + // func() { + // taskBytes, err := wc.redis.GetDel(ctx, key).Result() + // if err == redis.Nil { + // return + // } else if err != nil { + // fmt.Println("Failed getdel telegram task:", err) + // return + // } + // // todo logging into tg with trashlog + // var aimErr error + // defer func() { + // if r := recover(); r != nil || aimErr != nil { + // fmt.Println("recovering from panic or error setting redis value:", r, aimErr) + // _ = wc.redis.Set(ctx, key, taskBytes, 0).Err() + // } + // }() + // + // var task model.TgRedisTask + // if err = json.Unmarshal([]byte(taskBytes), &task); err != nil { + // fmt.Println("Failed unmarshal telegram task:", err) + // return + // } + // + // var inviteLink string + // var chatID int64 + // inviteLink, chatID, aimErr = wc.tgClient.CreateChannel(task.Name, wc.botID) + // if aimErr != nil { + // fmt.Println("Failed create tg channel:", aimErr) + // return + // } + // + // _, aimErr = wc.dal.AccountRepo.PostLeadTarget(ctx, model.LeadTarget{ + // AccountID: task.AccountID, + // Type: model.LeadTargetTg, + // QuizID: task.QuizID, + // Target: strconv.Itoa(int(chatID)), + // InviteLink: inviteLink, + // }) + // if aimErr != nil { + // fmt.Println("Failed create lead target in db:", aimErr) + // return + // } + // }() + // } + // if cursor == 0 { + // break + // } + //} +} diff --git a/main.go b/main.go index c1b985b..e69de29 100644 --- a/main.go +++ b/main.go @@ -1,10 +0,0 @@ -package main - -import ( - "github.com/skeris/appInit" - "gitea.pena/SQuiz/core/app" -) - -func main() { - appInit.Initialize(app.New, app.Options{}) -} diff --git a/pkg/closer/closer.go b/pkg/closer/closer.go new file mode 100644 index 0000000..fdfbaf1 --- /dev/null +++ b/pkg/closer/closer.go @@ -0,0 +1,37 @@ +package closer + +import ( + "context" +) + +type Closer interface { + Close(ctx context.Context) error +} + +type CloserFunc func(ctx context.Context) error + +func (cf CloserFunc) Close(ctx context.Context) error { + return cf(ctx) +} + +type CloserGroup struct { + closers []Closer +} + +func NewCloserGroup() *CloserGroup { + return &CloserGroup{} +} + +func (cg *CloserGroup) Add(c Closer) { + cg.closers = append(cg.closers, c) +} + +func (cg *CloserGroup) Call(ctx context.Context) error { + var closeErr error + for i := len(cg.closers) - 1; i >= 0; i-- { + if err := cg.closers[i].Close(ctx); err != nil && closeErr == nil { + closeErr = err + } + } + return closeErr +} diff --git a/schema/000001_init.down.sql b/schema/000001_init.down.sql deleted file mode 100644 index 54966a0..0000000 --- a/schema/000001_init.down.sql +++ /dev/null @@ -1,24 +0,0 @@ --- Drop indexes -DROP INDEX IF EXISTS subquizes; -DROP INDEX IF EXISTS birthtime; -DROP INDEX IF EXISTS groups; -DROP INDEX IF EXISTS timeouted; -DROP INDEX IF EXISTS active ON quiz; -DROP INDEX IF EXISTS questiontype; -DROP INDEX IF EXISTS required; -DROP INDEX IF EXISTS relation; -DROP INDEX IF EXISTS active ON question; - --- Drop tables -DROP TABLE IF EXISTS privileges; -DROP TABLE IF EXISTS answer; -DROP TABLE IF EXISTS question; -DROP TABLE IF EXISTS quiz; -DROP TABLE IF EXISTS account; - --- Drop types -DO $$ -BEGIN -DROP TYPE IF EXISTS question_type; -DROP TYPE IF EXISTS quiz_status; -END$$; \ No newline at end of file diff --git a/schema/000001_init.up.sql b/schema/000001_init.up.sql deleted file mode 100644 index 9aefa67..0000000 --- a/schema/000001_init.up.sql +++ /dev/null @@ -1,120 +0,0 @@ --- Create types -DO $$ -BEGIN - IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'question_type') THEN -CREATE TYPE question_type AS ENUM ( - 'variant', - 'images', - 'varimg', - 'emoji', - 'text', - 'select', - 'date', - 'number', - 'file', - 'page', - 'rating' - ); -END IF; - - IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'quiz_status') THEN -CREATE TYPE quiz_status AS ENUM ( - 'draft', - 'template', - 'stop', - 'start', - 'timeout', - 'offlimit' - ); -END IF; - - CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; -END$$; - --- Create tables -CREATE TABLE IF NOT EXISTS account ( - id UUID PRIMARY KEY, - user_id VARCHAR(24), - email VARCHAR(50), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - deleted BOOLEAN DEFAULT false - ); - -CREATE TABLE IF NOT EXISTS quiz ( - id bigserial UNIQUE NOT NULL PRIMARY KEY, - qid uuid DEFAULT uuid_generate_v4(), - accountid varchar(30) NOT NULL, - deleted boolean DEFAULT false, - archived boolean DEFAULT false, - fingerprinting boolean DEFAULT false, - repeatable boolean DEFAULT false, - note_prevented boolean DEFAULT false, - mail_notifications boolean DEFAULT false, - unique_answers boolean DEFAULT false, - super boolean DEFAULT false, - group_id bigint DEFAULT 0, - name varchar(280), - description text, - config text, - status quiz_status DEFAULT 'draft', - limit_answers integer DEFAULT 0, - due_to integer DEFAULT 0, - time_of_passing integer DEFAULT 0, - pausable boolean DEFAULT false, - version smallint DEFAULT 0, - version_comment text DEFAULT '', - parent_ids integer[], - created_at timestamp DEFAULT CURRENT_TIMESTAMP, - updated_at timestamp DEFAULT CURRENT_TIMESTAMP, - questions_count integer DEFAULT 0, - answers_count integer DEFAULT 0, - average_time_passing integer DEFAULT 0 - ); - -CREATE TABLE IF NOT EXISTS question ( - id bigserial UNIQUE NOT NULL PRIMARY KEY, - quiz_id bigint NOT NULL, - title varchar(512) NOT NULL, - description text, - questiontype question_type DEFAULT 'text', - required boolean DEFAULT false, - deleted boolean DEFAULT false, - page smallint DEFAULT 0, - content text, - version smallint DEFAULT 0, - parent_ids integer[], - created_at timestamp DEFAULT CURRENT_TIMESTAMP, - updated_at timestamp DEFAULT CURRENT_TIMESTAMP, - CONSTRAINT quiz_relation FOREIGN KEY(quiz_id) REFERENCES quiz(id) - ); - -CREATE TABLE IF NOT EXISTS answer ( - id bigserial UNIQUE NOT NULL PRIMARY KEY, - content text, - quiz_id bigint NOT NULL REFERENCES quiz(id), - question_id bigint NOT NULL REFERENCES question(id), - fingerprint varchar(1024), - session varchar(20), - created_at timestamp DEFAULT CURRENT_TIMESTAMP - ); - -CREATE TABLE IF NOT EXISTS privileges ( - id SERIAL PRIMARY KEY, - privilegeID VARCHAR(50), - account_id UUID, - privilege_name VARCHAR(255), - amount INT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (account_id) REFERENCES account (id) - ); - --- Create indexes -CREATE INDEX IF NOT EXISTS active ON question(deleted) WHERE deleted=false; -CREATE INDEX IF NOT EXISTS relation ON question(quiz_id DESC); -CREATE INDEX IF NOT EXISTS required ON question(required DESC); -CREATE INDEX IF NOT EXISTS questiontype ON question(questiontype); -CREATE INDEX IF NOT EXISTS active ON quiz(deleted, archived, status) WHERE deleted = false AND archived = false AND status = 'start'; -CREATE INDEX IF NOT EXISTS timeouted ON quiz(due_to DESC) WHERE deleted = false AND due_to <> 0 AND status <> 'timeout'; -CREATE INDEX IF NOT EXISTS groups ON quiz(super) WHERE super = true; -CREATE INDEX IF NOT EXISTS birthtime ON quiz(created_at DESC); -CREATE INDEX IF NOT EXISTS subquizes ON quiz(group_id DESC) WHERE group_id <> 0; diff --git a/schema/000002_init.down.sql b/schema/000002_init.down.sql deleted file mode 100644 index 0aa3d9e..0000000 --- a/schema/000002_init.down.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE answer DROP COLUMN IF EXISTS result; diff --git a/schema/000002_init.up.sql b/schema/000002_init.up.sql deleted file mode 100644 index 4083153..0000000 --- a/schema/000002_init.up.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE answer ADD COLUMN result BOOLEAN DEFAULT FALSE; \ No newline at end of file diff --git a/schema/000003_init.down.sql b/schema/000003_init.down.sql deleted file mode 100644 index 94e352f..0000000 --- a/schema/000003_init.down.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE quiz DROP COLUMN IF EXISTS sessions_count; diff --git a/schema/000003_init.up.sql b/schema/000003_init.up.sql deleted file mode 100644 index a292d62..0000000 --- a/schema/000003_init.up.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE quiz ADD COLUMN sessions_count integer; diff --git a/schema/000004_init.down.sql b/schema/000004_init.down.sql deleted file mode 100644 index b6d5ec4..0000000 --- a/schema/000004_init.down.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE quiz DROP COLUMN IF EXISTS new; -ALTER TABLE quiz DROP COLUMN IF EXISTS deleted; \ No newline at end of file diff --git a/schema/000004_init.up.sql b/schema/000004_init.up.sql deleted file mode 100644 index 9dc5591..0000000 --- a/schema/000004_init.up.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE answer ADD COLUMN new BOOLEAN DEFAULT TRUE; -ALTER TABLE answer ADD COLUMN deleted BOOLEAN DEFAULT FALSE; \ No newline at end of file diff --git a/schema/000005_init.down.sql b/schema/000005_init.down.sql deleted file mode 100644 index ccae4f5..0000000 --- a/schema/000005_init.down.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE answer DROP COLUMN IF EXISTS email; -DROP INDEX IF EXISTS answer_email_unique_idx; \ No newline at end of file diff --git a/schema/000005_init.up.sql b/schema/000005_init.up.sql deleted file mode 100644 index f2bfe97..0000000 --- a/schema/000005_init.up.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE answer ADD COLUMN email VARCHAR(50) NOT NULL DEFAULT ''; -CREATE UNIQUE INDEX IF NOT EXISTS answer_email_unique_idx ON answer (quiz_id, email) WHERE email <> ''; \ No newline at end of file diff --git a/schema/000006_init.down.sql b/schema/000006_init.down.sql deleted file mode 100644 index 355ba3b..0000000 --- a/schema/000006_init.down.sql +++ /dev/null @@ -1,6 +0,0 @@ -ALTER TABLE answer -DROP COLUMN device_type, -DROP COLUMN device, -DROP COLUMN os, -DROP COLUMN browser, -DROP COLUMN ip; \ No newline at end of file diff --git a/schema/000006_init.up.sql b/schema/000006_init.up.sql deleted file mode 100644 index 4dc1487..0000000 --- a/schema/000006_init.up.sql +++ /dev/null @@ -1,6 +0,0 @@ -ALTER TABLE answer -ADD COLUMN device_type VARCHAR(50) NOT NULL DEFAULT '', -ADD COLUMN device VARCHAR(100) NOT NULL DEFAULT '', -ADD COLUMN os VARCHAR(100) NOT NULL DEFAULT '', -ADD COLUMN browser VARCHAR(100) NOT NULL DEFAULT '', -ADD COLUMN ip VARCHAR(50) NOT NULL DEFAULT ''; \ No newline at end of file diff --git a/schema/000007_init.down.sql b/schema/000007_init.down.sql deleted file mode 100644 index 374c55d..0000000 --- a/schema/000007_init.down.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE answer -DROP COLUMN start; \ No newline at end of file diff --git a/schema/000007_init.up.sql b/schema/000007_init.up.sql deleted file mode 100644 index 6b41425..0000000 --- a/schema/000007_init.up.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE answer -ADD COLUMN start BOOLEAN NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/schema/000008_init.down.sql b/schema/000008_init.down.sql deleted file mode 100644 index f2dbd88..0000000 --- a/schema/000008_init.down.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE answer -ALTER COLUMN device TYPE VARCHAR(100), -ALTER COLUMN os TYPE VARCHAR(100), -ALTER COLUMN browser TYPE VARCHAR(100); diff --git a/schema/000008_init.up.sql b/schema/000008_init.up.sql deleted file mode 100644 index fcba321..0000000 --- a/schema/000008_init.up.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE answer -ALTER COLUMN device TYPE VARCHAR(1024), -ALTER COLUMN os TYPE VARCHAR(1024), -ALTER COLUMN browser TYPE VARCHAR(1024); diff --git a/schema/000009_init.down.sql b/schema/000009_init.down.sql deleted file mode 100644 index 342ea06..0000000 --- a/schema/000009_init.down.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE quiz -ALTER COLUMN name TYPE VARCHAR(280); diff --git a/schema/000009_init.up.sql b/schema/000009_init.up.sql deleted file mode 100644 index 0c3038e..0000000 --- a/schema/000009_init.up.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE quiz -ALTER COLUMN name TYPE VARCHAR(1024); diff --git a/service/account_svc.go b/service/account_svc.go index 6888c56..e69de29 100644 --- a/service/account_svc.go +++ b/service/account_svc.go @@ -1,238 +0,0 @@ -package service - -import ( - "database/sql" - "errors" - "github.com/gofiber/fiber/v2" - "gitea.pena/PenaSide/common/log_mw" - "gitea.pena/SQuiz/common/middleware" - "gitea.pena/SQuiz/common/model" - "gitea.pena/SQuiz/common/pj_errors" - "gitea.pena/SQuiz/core/brokers" - "gitea.pena/SQuiz/core/models" - "time" -) - -type CreateAccountReq struct { - UserID string `json:"userId"` -} - -type CreateAccountResp struct { - CreatedAccount model.Account `json:"created_account"` -} - -type DeleteAccountResp struct { - DeletedAccountID string `json:"account_Id"` -} - -type GetPrivilegeByUserIDReq struct { - UserID string `json:"userId"` -} - -type DeleteAccountByUserIDReq struct { - UserID string `json:"userId"` -} - -type DeleteAccountByUserIDResp struct { - DeletedAccountUserID string `json:"userId"` -} - -type GetAccountsReq struct { - Limit uint64 `json:"limit"` - Page uint64 `json:"page"` -} - -type GetAccountsResp struct { - Count uint64 `json:"count"` - Items []model.Account `json:"items"` -} - -// getCurrentAccount обработчик для получения текущего аккаунта -func (s *Service) getCurrentAccount(ctx *fiber.Ctx) error { - accountID, ok := middleware.GetAccountId(ctx) - if !ok { - return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") - } - - account, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) - if err != nil && err != sql.ErrNoRows { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - //TODO: fix this later - if account.ID == "" { - return ctx.Status(fiber.StatusNotFound).SendString("no account") - } - - return ctx.Status(fiber.StatusOK).JSON(account) -} - -// createAccount обработчик для создания нового аккаунта -func (s *Service) createAccount(ctx *fiber.Ctx) error { - accountID, ok := middleware.GetAccountId(ctx) - if !ok { - return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") - } - hlogger := log_mw.ExtractLogger(ctx) - - existingAccount, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) - if err != nil && err != sql.ErrNoRows { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - if existingAccount.ID != "" { - return ctx.Status(fiber.StatusConflict).SendString("user with this ID already exists") - } - - email, err := s.authClient.GetUserEmail(accountID) - if err != nil { - return err - } - - newAccount := model.Account{ - UserID: accountID, - CreatedAt: time.Now(), - Email: email, - Deleted: false, - Privileges: map[string]model.ShortPrivilege{ - "quizUnlimTime": { - PrivilegeID: "quizUnlimTime", - PrivilegeName: "Безлимит Опросов", - Amount: 14, - CreatedAt: time.Now(), - }, - }, - } - - createdAcc, err := s.dal.AccountRepo.CreateAccount(ctx.Context(), &newAccount) - if err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - hlogger.Emit(models.InfoAccountCreated{ - CtxUserID: accountID, - CtxAccountID: createdAcc.ID, - }) - - err = s.producer.ToMailNotify(ctx.Context(), brokers.Message{ - AccountID: accountID, - Email: email, - ServiceKey: s.serviceName, - SendAt: time.Now(), - }) - if err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - return ctx.JSON(CreateAccountResp{ - CreatedAccount: newAccount, - }) -} - -// deleteAccount обработчик для удаления текущего аккаунта -func (s *Service) deleteAccount(ctx *fiber.Ctx) error { - accountID, ok := middleware.GetAccountId(ctx) - if !ok { - return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") - } - - account, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), accountID) - if err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - if err := s.dal.AccountRepo.DeleteAccount(ctx.Context(), account.ID); err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - return ctx.JSON(DeleteAccountResp{ - DeletedAccountID: accountID, - }) -} - -// getPrivilegeByUserID обработчик для получения привилегий аккаунта по ID пользователя -func (s *Service) getPrivilegeByUserID(ctx *fiber.Ctx) error { - var req GetPrivilegeByUserIDReq - if err := ctx.BodyParser(&req); err != nil { - return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") - } - - privilege, err := s.dal.AccountRepo.GetPrivilegesByAccountID(ctx.Context(), req.UserID) - if err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - return ctx.Status(fiber.StatusOK).JSON(privilege) -} - -// deleteAccountByUserID обработчик для удаления аккаунта по ID пользователя -func (s *Service) deleteAccountByUserID(ctx *fiber.Ctx) error { - var req DeleteAccountByUserIDReq - if err := ctx.BodyParser(&req); err != nil { - return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") - } - - existingAccount, err := s.dal.AccountRepo.GetAccountByID(ctx.Context(), req.UserID) - if err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - if existingAccount.ID == "" { - return ctx.Status(fiber.StatusInternalServerError).SendString("user with this ID not found") - } - - if err := s.dal.AccountRepo.DeleteAccount(ctx.Context(), existingAccount.ID); err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - return ctx.JSON(DeleteAccountByUserIDResp{ - DeletedAccountUserID: req.UserID, - }) -} - -// getAccounts обработчик для получения списка аккаунтов с пагинацией -func (s *Service) getAccounts(ctx *fiber.Ctx) error { - var req GetAccountsReq - if err := ctx.BodyParser(&req); err != nil { - return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") - } - - _, ok := middleware.GetAccountId(ctx) - if !ok { - return ctx.Status(fiber.StatusUnauthorized).SendString("account id is required") - } - - accounts, totalCount, err := s.dal.AccountRepo.GetAccounts(ctx.Context(), req.Limit, req.Page) - if err != nil { - return ctx.Status(fiber.StatusInternalServerError).SendString(err.Error()) - } - - response := GetAccountsResp{ - Count: totalCount, - Items: accounts, - } - - return ctx.Status(fiber.StatusOK).JSON(response) -} - -func (s *Service) ManualDone(ctx *fiber.Ctx) error { - var req struct { - Id string `json:"id"` - } - if err := ctx.BodyParser(&req); err != nil { - return ctx.Status(fiber.StatusBadRequest).SendString("Invalid request data") - } - - if req.Id == "" { - return ctx.Status(fiber.StatusBadRequest).SendString("User id is required") - } - - err := s.dal.AccountRepo.ManualDone(ctx.Context(), req.Id) - if err != nil { - if errors.Is(err, pj_errors.ErrNotFound) { - return ctx.Status(fiber.StatusNotFound).SendString("user don't have this privilege") - } - return ctx.Status(fiber.StatusInternalServerError).SendString("Internal Server Error") - } - - return ctx.SendStatus(fiber.StatusOK) -} diff --git a/service/service.go b/service/service.go index 52a0307..e69de29 100644 --- a/service/service.go +++ b/service/service.go @@ -1,89 +0,0 @@ -package service - -import ( - "gitea.pena/SQuiz/common/dal" - "gitea.pena/SQuiz/core/brokers" - "gitea.pena/SQuiz/core/clients/auth" - "github.com/gofiber/fiber/v2" -) - -// Service is an entity for http requests handling -type Service struct { - dal *dal.DAL - authClient *auth.AuthClient - producer *brokers.Producer - serviceName string - chDAL *dal.ClickHouseDAL - s3Prefix string - producerGigaChat *brokers.Producer -} - -type Deps struct { - Dal *dal.DAL - AuthClient *auth.AuthClient - Producer *brokers.Producer - ServiceName string - ChDAL *dal.ClickHouseDAL - S3Prefix string - ProducerGigaChat *brokers.Producer -} - -func New(deps Deps) *Service { - return &Service{ - dal: deps.Dal, - authClient: deps.AuthClient, - producer: deps.Producer, - serviceName: deps.ServiceName, - chDAL: deps.ChDAL, - s3Prefix: deps.S3Prefix, - } -} - -// Register is a function for add handlers of service to external multiplexer -func (s *Service) Register(app *fiber.App) { - // quiz manipulating handlers - app.Post("/quiz/create", s.CreateQuiz) - app.Post("/quiz/getList", s.GetQuizList) - app.Patch("/quiz/edit", s.UpdateQuiz) - app.Post("/quiz/copy", s.CopyQuiz) - app.Post("/quiz/history", s.GetQuizHistory) - app.Delete("/quiz/delete", s.DeleteQuiz) - app.Patch("/quiz/archive", s.ArchiveQuiz) - app.Post("/quiz/move", s.QuizMove) - app.Post("/quiz/template", s.TemplateCopy) - - app.Post("/quiz/:quizID/auditory", s.CreateQuizAuditory) - app.Get("/quiz/:quizID/auditory", s.GetQuizAuditory) - app.Delete("/quiz/:quizID/auditory", s.DeleteQuizAuditory) - - // question manipulating handlers - app.Post("/question/create", s.CreateQuestion) - app.Post("/question/getList", s.GetQuestionList) - app.Patch("/question/edit", s.UpdateQuestion) - app.Post("/question/copy", s.CopyQuestion) - app.Post("/question/history", s.GetQuestionHistory) - app.Delete("/question/delete", s.DeleteQuestion) - - // account handlers - app.Get("/account/get", s.getCurrentAccount) - app.Post("/account/create", s.createAccount) - app.Delete("/account/delete", s.deleteAccount) - app.Get("/accounts", s.getAccounts) - app.Get("/privilege/:userId", s.getPrivilegeByUserID) - app.Delete("/account/:userId", s.deleteAccountByUserID) - app.Post("/account/manualdone", s.ManualDone) - - // result handlers - app.Post("/results/getResults/:quizId", s.GetResultsByQuizID) - app.Delete("/results/delete/:resultId", s.DelResultByID) - app.Patch("/result/seen", s.SetStatus) - app.Post("/results/:quizID/export", s.ExportResultsToCSV) - app.Get("/result/:resultID", s.GetResultAnswers) - - // statistics handlers - app.Post("/statistic/:quizID/devices", s.GetDeviceStatistics) - app.Post("/statistic/:quizID/general", s.GetGeneralStatistics) - app.Post("/statistic/:quizID/questions", s.GetQuestionsStatistics) - app.Post("/statistic", s.AllServiceStatistics) - app.Get("/statistics/:quizID/pipelines", s.GetPipelinesStatistics) -} diff --git a/tests/mailNotify_test.go b/tests/mailNotify_test.go index d125f83..4a7466e 100644 --- a/tests/mailNotify_test.go +++ b/tests/mailNotify_test.go @@ -2,11 +2,11 @@ package tests import ( "context" + "gitea.pena/SQuiz/core/internal/brokers" + "gitea.pena/SQuiz/core/internal/initialize" "github.com/pioz/faker" "go.uber.org/zap" "log" - "gitea.pena/SQuiz/core/brokers" - "gitea.pena/SQuiz/core/initialize" "testing" "time" ) diff --git a/tests/publish_test.go b/tests/publish_test.go index 6b99aa0..87aa333 100644 --- a/tests/publish_test.go +++ b/tests/publish_test.go @@ -1,10 +1,10 @@ package tests import ( - "github.com/gofiber/fiber/v2" - "github.com/stretchr/testify/assert" "gitea.pena/PenaSide/common/privilege" "gitea.pena/SQuiz/common/model" + "github.com/gofiber/fiber/v2" + "github.com/stretchr/testify/assert" "testing" ) diff --git a/tests/smtp_test.go b/tests/smtp_test.go index a744de0..908b01b 100644 --- a/tests/smtp_test.go +++ b/tests/smtp_test.go @@ -2,12 +2,11 @@ package tests import ( _ "embed" + "gitea.pena/SQuiz/common/clients" + "gitea.pena/SQuiz/common/model" "github.com/gofiber/fiber/v2" "github.com/pioz/faker" "github.com/stretchr/testify/assert" - "gitea.pena/SQuiz/common/model" - "gitea.pena/SQuiz/worker/answerwc" - "gitea.pena/SQuiz/worker/clients/mailclient" "testing" "time" ) @@ -19,16 +18,16 @@ var toClientTemplate string var reminderTemplate string func TestProcessMessageToSMTP(t *testing.T) { - clientDeps := mailclient.ClientDeps{ - Host: "connect.mailclient.bz", - Port: "587", - Sender: "skeris@mailing.pena.digital", - Auth: &mailclient.PlainAuth{Username: "kotilion.95@gmail.com", Password: "vWwbCSg4bf0p"}, + clientDeps := clients.Deps{ + SmtpHost: "connect.mailclient.bz", + SmtpPort: "587", + SmtpSender: "skeris@mailing.pena.digital", ApiKey: "P0YsjUB137upXrr1NiJefHmXVKW1hmBWlpev", FiberClient: &fiber.Client{}, + SmtpApiUrl: "", } - client := mailclient.NewClient(clientDeps) + client := clients.NewSmtpClient(clientDeps) recipient := "mullinp@internet.ru" subject := "Test" diff --git a/tests/utils.go b/tests/utils.go index 4d904f3..01c54a9 100644 --- a/tests/utils.go +++ b/tests/utils.go @@ -111,7 +111,11 @@ func registerUser(login string) *jwt.Token { if err != nil { panic(err) } - defer resp.Body.Close() + defer func() { + if derr := resp.Body.Close(); derr != nil { + fmt.Printf("error close response body in registerUser: %v", derr) + } + }() bytes, err := io.ReadAll(resp.Body) if err != nil { diff --git a/tools/migrate b/tools/migrate new file mode 100755 index 0000000..573af5c Binary files /dev/null and b/tools/migrate differ diff --git a/tools/tools.go b/tools/tools.go index b9e8790..e69de29 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -1,449 +0,0 @@ -package tools - -import ( - "encoding/json" - "fmt" - "github.com/xuri/excelize/v2" - _ "image/gif" - _ "image/jpeg" - _ "image/png" - "io" - "io/ioutil" - "net/http" - "net/url" - "path/filepath" - "gitea.pena/SQuiz/common/model" - "regexp" - "sort" - "strconv" - "strings" - "sync" -) - -const ( - bucketImages = "squizimages" - bucketFonts = "squizfonts" - bucketScripts = "squizscript" - bucketStyle = "squizstyle" - bucketAnswers = "squizanswer" -) - -func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []model.Answer, s3Prefix string) error { - file := excelize.NewFile() - sheet := "Sheet1" - - _, err := file.NewSheet(sheet) - if err != nil { - return err - } - - sort.Slice(questions, func(i, j int) bool { - return questions[i].Page < questions[j].Page - }) - - headers := []string{"Данные респондента"} - headers = append([]string{"Дата и время"}, headers...) - mapQueRes := make(map[uint64]string) - - for _, q := range questions { - if !q.Deleted { - if q.Type == model.TypeResult { - mapQueRes[q.Id] = q.Title + "\n" + q.Description - } else { - headers = append(headers, q.Title) - } - } - } - - headers = append(headers, "Результат") - - for col, header := range headers { - cell := ToAlphaString(col+1) + "1" - if err := file.SetCellValue(sheet, cell, header); err != nil { - return err - } - } - - sort.Slice(answers, func(i, j int) bool { - return answers[i].QuestionId < answers[j].QuestionId - }) - - // мапа для хранения обычных ответов респондентов - standart := make(map[string][]model.Answer) - - // мапа для хранения данных респондентов - results := make(map[string]model.Answer) - - // заполняем мапу ответами и данными респондентов - for _, answer := range answers { - if answer.Result { - results[answer.Session] = answer - } else { - standart[answer.Session] = append(standart[answer.Session], answer) - } - } - - processSession := func(session string, response []model.Answer, row int) { - defer func() { - if r := recover(); r != nil { - fmt.Println("Recovered from panic:", r) - } - }() - - if err := file.SetCellValue(sheet, "A"+strconv.Itoa(row), results[session].CreatedAt.Format("2006-01-02 15:04:05")); err != nil { - fmt.Println(err.Error()) - } - if err := file.SetCellValue(sheet, "B"+strconv.Itoa(row), results[session].Content); err != nil { - fmt.Println(err.Error()) - } - - count := 3 - - for _, q := range questions { - if !q.Deleted && q.Type != model.TypeResult { - index := binarySearch(response, q.Id) - if index != -1 { - cell := ToAlphaString(count) + strconv.Itoa(row) - tipe := FileSearch(response[index].Content) - noAccept := make(map[string]struct{}) - todoMap := make(map[string]string) - if tipe != "Text" && q.Type == model.TypeImages || q.Type == model.TypeVarImages { - var res model.ImageContent - err := json.Unmarshal([]byte(response[index].Content), &res) - if err != nil { - res.Image = response[index].Content - } - urle := ExtractImageURL(res.Image) - urlData := strings.Split(urle, " ") - if len(urlData) == 1 { - u, err := url.Parse(urle) - if err == nil && u.Scheme != "" && u.Host != "" { - picture, err := downloadImage(urle) - if err != nil { - fmt.Println(err.Error()) - } - file.SetColWidth(sheet, ToAlphaString(count), ToAlphaString(count), 50) - file.SetRowHeight(sheet, row, 150) - if err := file.AddPictureFromBytes(sheet, cell, picture); err != nil { - fmt.Println(err.Error()) - } - noAccept[response[index].Content] = struct{}{} - } else { - todoMap[response[index].Content] = cell - } - } else { - todoMap[response[index].Content] = cell - } - } else if q.Type == model.TypeFile { - urle := response[index].Content - if urle != "" && !strings.HasPrefix(urle, "https") { - urle = strings.ReplaceAll(s3Prefix,bucketImages, bucketAnswers) + fmt.Sprint(q.Id)+ "/" + urle - } - fmt.Println("ORRRRR", urle, s3Prefix) - display, tooltip := urle, urle - if err := file.SetCellValue(sheet, cell, urle); err != nil { - fmt.Println(err.Error()) - } - if err := file.SetCellHyperLink(sheet, cell, urle, "External", excelize.HyperlinkOpts{ - Display: &display, - Tooltip: &tooltip, - }); err != nil { - fmt.Println(err.Error()) - } - noAccept[response[index].Content] = struct{}{} - } else { - todoMap[response[index].Content] = cell - } - for cnt, cel := range todoMap { - if _, ok := noAccept[cnt]; !ok { - if err := file.SetCellValue(sheet, cel, cnt); err != nil { - fmt.Println(err.Error()) - } - } - } - - } else { - cell := ToAlphaString(count) + strconv.Itoa(row) - if err := file.SetCellValue(sheet, cell, "-"); err != nil { - fmt.Println(err.Error()) - } - } - count++ - } - } - index := binarySearch(response, results[session].QuestionId) - content := response[index].Content - score , err := strconv.ParseInt(content, 10, 64) - cell := ToAlphaString(len(headers)) + strconv.Itoa(row) - if err != nil { - if err := file.SetCellValue(sheet, cell, mapQueRes[results[session].QuestionId]); err != nil { - fmt.Println(err.Error()) - } - } else { - if err := file.SetCellValue(sheet, cell, score); err != nil { - fmt.Println(err.Error()) - } - } - - } - - row := 2 - var wg sync.WaitGroup - for session, _ := range results { - wg.Add(1) - go func(session string, response []model.Answer, row int) { - defer wg.Done() - processSession(session, standart[session], row) - }(session, standart[session], row) - row++ - } - wg.Wait() - - if err := file.Write(buffer); err != nil { - return err - } - - return nil -} - -func binarySearch(answers []model.Answer, questionID uint64) int { - left := 0 - right := len(answers) - 1 - for left <= right { - mid := left + (right-left)/2 - if answers[mid].QuestionId == questionID { - return mid - } else if answers[mid].QuestionId < questionID { - left = mid + 1 - } else { - right = mid - 1 - } - } - return -1 -} - -func FileSearch(content string) string { - if strings.Contains(content, bucketImages) { - return FileType(content) - } else if strings.Contains(content, bucketFonts) { - return FileType(content) - } else if strings.Contains(content, bucketScripts) { - return FileType(content) - } else if strings.Contains(content, bucketStyle) { - return FileType(content) - } else if strings.Contains(content, bucketAnswers) { - return FileType(content) - } - - return "Text" -} - -func FileType(filename string) string { - parts := strings.Split(filename, ".") - extension := parts[len(parts)-1] - - switch extension { - case "png", "jpg", "jpeg", "gif", "bmp", "svg", "webp", "tiff", "ico": - return "Image" - default: - return "File" - } -} - -func downloadImage(url string) (*excelize.Picture, error) { - resp, err := http.Get(url) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - imgData, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - ext := filepath.Ext(url) - if ext == "" { - contentType := resp.Header.Get("Content-Type") - switch { - case strings.HasPrefix(contentType, "image/jpeg"): - ext = ".jpg" - case strings.HasPrefix(contentType, "image/png"): - ext = ".png" - default: - ext = ".png" - } - } - - pic := &excelize.Picture{ - Extension: ext, - File: imgData, - Format: &excelize.GraphicOptions{ - AutoFit: true, - Positioning: "oneCell", - }, - } - return pic, nil -} - -func ToAlphaString(col int) string { - var result string - for col > 0 { - col-- - result = string(rune('A'+col%26)) + result - col /= 26 - } - return result -} - -func ExtractImageURL(htmlContent string) string { - re := regexp.MustCompile(`(?:]*src="([^"]+)"[^>]*>)|(?:]*>.*?]*src="([^"]+)"[^>]*>.*?)|(?:]*>.*?]*>.*?]*src="([^"]+)"[^>]*>.*?.*?)|(?:]*\s+download[^>]*>([^<]+)<\/a>)`) - matches := re.FindAllStringSubmatch(htmlContent, -1) - - for _, match := range matches { - for i := 1; i < len(match); i++ { - if match[i] != "" { - return strings.TrimSpace(match[i]) - } - } - } - return htmlContent -} - -//func WriteDataToExcel(buffer io.Writer, questions []model.Question, answers []model.Answer) error { -// file := excelize.NewFile() -// sheet := "Sheet1" -// -// _, err := file.NewSheet(sheet) -// if err != nil { -// return err -// } -// -// sort.Slice(questions, func(i, j int) bool { -// return questions[i].Page > questions[j].Page -// }) -// -// headers := []string{"Данные респондента"} -// mapQueRes := make(map[uint64]string) -// -// for _, q := range questions { -// if !q.Deleted { -// if q.Type == model.TypeResult { -// mapQueRes[q.Id] = q.Title + "\n" + q.Description -// } else { -// headers = append(headers, q.Title) -// } -// } -// } -// -// headers = append(headers, "Результат") -// -// // добавляем заголовки в первую строку -// for col, header := range headers { -// cell := ToAlphaString(col+1) + "1" -// if err := file.SetCellValue(sheet, cell, header); err != nil { -// return err -// } -// } -// -// // мапа для хранения обычных ответов респондентов -// standart := make(map[string][]model.Answer) -// -// // мапа для хранения данных респондентов -// results := make(map[string]model.Answer) -// -// // заполняем мапу ответами и данными респондентов -// for _, answer := range answers { -// if answer.Result { -// results[answer.Session] = answer -// } else { -// standart[answer.Session] = append(standart[answer.Session], answer) -// } -// } -// -// // записываем данные в файл -// row := 2 -// for session, _ := range results { -// response := standart[session] -// if err := file.SetCellValue(sheet, "A"+strconv.Itoa(row), results[session].Content); err != nil { -// return err -// } -// count := 2 -// for _, q := range questions { -// if !q.Deleted && q.Type != model.TypeResult { -// sort.Slice(response, func(i, j int) bool { -// return response[i].QuestionId < response[j].QuestionId -// }) -// index := binarySearch(response, q.Id) -// if index != -1 { -// cell := ToAlphaString(count) + strconv.Itoa(row) -// typeMap := FileSearch(response[index].Content) -// noAccept := make(map[string]struct{}) -// todoMap := make(map[string]string) -// for _, tipe := range typeMap { -// if tipe != "Text" && q.Type == model.TypeImages || q.Type == model.TypeVarImages { -// urle := ExtractImageURL(response[index].Content) -// urlData := strings.Split(urle, " ") -// for _, k := range urlData { -// u, err := url.Parse(k) -// if err == nil && u.Scheme != "" && u.Host != "" { -// picture, err := downloadImage(k) -// if err != nil { -// return err -// } -// file.SetColWidth(sheet, ToAlphaString(count), ToAlphaString(count), 50) -// file.SetRowHeight(sheet, row, 150) -// if err := file.AddPictureFromBytes(sheet, cell, picture); err != nil { -// return err -// } -// noAccept[response[index].Content] = struct{}{} -// } -// } -// } else if tipe != "Text" && q.Type == model.TypeFile { -// urle := ExtractImageURL(response[index].Content) -// display, tooltip := urle, urle -// if err := file.SetCellValue(sheet, cell, response[index].Content); err != nil { -// return err -// } -// if err := file.SetCellHyperLink(sheet, cell, urle, "External", excelize.HyperlinkOpts{ -// Display: &display, -// Tooltip: &tooltip, -// }); err != nil { -// return err -// } -// noAccept[response[index].Content] = struct{}{} -// } else { -// todoMap[response[index].Content] = cell -// } -// } -// for cnt, cel := range todoMap { -// if _, ok := noAccept[cnt]; !ok { -// if err := file.SetCellValue(sheet, cel, cnt); err != nil { -// return err -// } -// } -// } -// -// } else { -// cell := ToAlphaString(count) + strconv.Itoa(row) -// if err := file.SetCellValue(sheet, cell, "-"); err != nil { -// return err -// } -// } -// count++ -// } -// } -// cell := ToAlphaString(len(headers)) + strconv.Itoa(row) -// if err := file.SetCellValue(sheet, cell, mapQueRes[results[session].QuestionId]); err != nil { -// return err -// } -// row++ -// } -// -// // cохраняем данные в буфер -// if err := file.Write(buffer); err != nil { -// return err -// } -// -// return nil -//}