commit bad198ce2975d157efe6c9cc475d732725444173 Author: poignatov-home Date: Sun Feb 8 17:01:36 2026 +0300 Первоначальный коммит Co-authored-by: Cursor diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..187b392 Binary files /dev/null and b/.DS_Store differ diff --git a/.cursor/commands.json b/.cursor/commands.json new file mode 100644 index 0000000..d777428 --- /dev/null +++ b/.cursor/commands.json @@ -0,0 +1,33 @@ +{ + "commands": [ + { + "name": "init", + "description": "Инициализация Play Life: остановка контейнеров, поднятие сервисов, создание дампа с продакшена и восстановление в локальную базу", + "command": "./init.sh", + "type": "shell", + "cwd": "${workspaceFolder}" + }, + { + "name": "run", + "description": "Перезапуск Play Life: перезапуск всех контейнеров", + "command": "./run.sh", + "type": "shell", + "cwd": "${workspaceFolder}" + }, + { + "name": "backupFromProd", + "description": "Создание дампа базы данных с продакшена", + "command": "./dump-db.sh", + "type": "shell", + "cwd": "${workspaceFolder}" + }, + { + "name": "restoreToLocal", + "description": "Восстановление базы данных из самого свежего дампа в локальную базу (автоматически выбирает последний дамп)", + "command": "./restore-db.sh", + "type": "shell", + "cwd": "${workspaceFolder}" + } + ] +} + diff --git a/.cursor/plans/изменить_сортировку_заблокированных_желаний_по_времени_разблокировки_4987d56a.plan.md b/.cursor/plans/изменить_сортировку_заблокированных_желаний_по_времени_разблокировки_4987d56a.plan.md new file mode 100644 index 0000000..0174406 --- /dev/null +++ b/.cursor/plans/изменить_сортировку_заблокированных_желаний_по_времени_разблокировки_4987d56a.plan.md @@ -0,0 +1,260 @@ +# План: Изменить сортировку заблокированных желаний по времени разблокировки + +## Цель +Изменить сортировку желаний: +1. Разблокированные - по цене от меньшего к большему +2. Заблокированные без целей-задач - по сроку разблокировки (максимальное время среди проектов) +3. Заблокированные с целями-задачами - по сроку разблокировки (максимальное время среди проектов) + +## Статус реализации + +**Уже реализовано:** +- ✅ `calculateProjectUnlockWeeks` - функция расчета недель разблокировки +- ✅ `calculateLockedSortValue` - использует `calculateProjectUnlockWeeks` и возвращает недели +- ✅ `getProjectMedian` - упрощенная версия без fallback (используется как есть) + +**Требуется реализовать:** +- ⏳ Создать миграцию для `projects_median_mv` (миграции нет, но используется в коде) +- ⏳ В `getWishlistHandler`: заменить `calculateUnlockedSortValue` на прямую сортировку по цене для разблокированных +- ⏳ В `getWishlistHandler`: разделить заблокированные на группы (с задачами/без задач) и сортировать каждую группу +- ⏳ В `getBoardItemsHandler`: заменить `calculateUnlockedSortValue` на прямую сортировку по цене для разблокированных +- ⏳ В `getBoardItemsHandler`: разделить заблокированные на группы (с задачами/без задач) и сортировать каждую группу + +## Изменения + +### 1. Создать миграцию для projects_median_mv + +**Статус:** `getProjectMedian` уже использует `projects_median_mv`, но миграции для неё нет в списке миграций. Нужно создать миграцию. + +**Файл:** `play-life-backend/migrations/000007_add_projects_median_mv.up.sql` + +Убедиться, что materialized view включает `user_id`: +```sql +CREATE MATERIALIZED VIEW projects_median_mv AS +SELECT + p.id AS project_id, + p.user_id, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score +FROM ( + SELECT + project_id, + normalized_total_score, + report_year, + report_week, + ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn + FROM weekly_report_mv + WHERE + (report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER) + OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER) +) sub +JOIN projects p ON p.id = sub.project_id +WHERE rn <= 12 AND p.deleted = FALSE +GROUP BY p.id, p.user_id +WITH DATA; + +CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id); +CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id); +``` + +**Файл:** `play-life-backend/migrations/000007_add_projects_median_mv.down.sql` + +```sql +DROP MATERIALIZED VIEW IF EXISTS projects_median_mv; +``` + +### 2. Изменить calculateLockedSortValue для расчета времени + +**Файл:** `play-life-backend/main.go` (строки 12488-12561) + +**Статус:** Функция уже реализована и использует `calculateProjectUnlockWeeks`. Проверить, что логика соответствует требованиям: +- Учитывает только условия типа `project_points` +- Использует правильного владельца условия (`conditionOwnerID`) +- Возвращает максимальное количество недель среди всех условий проектов +- Возвращает 999999.0 если нет условий по проектам или все выполнены + +**Текущая реализация уже корректна**, изменения не требуются. + +**Важно:** +- Функция уже использует `calculateProjectUnlockWeeks` для расчета (уже реализовано) +- Функция НЕ должна учитывать задачи, только проекты. Разделение на группы с задачами и без задач будет в сортировке. +- Функция уже правильно обрабатывает владельца условия через `conditionOwnerID` (не использует `userID` напрямую) + +### 3. Обновить сортировку в getWishlistHandler + +**Файл:** `play-life-backend/main.go` (строки 9933-9951) + +**Текущее состояние:** +- Разблокированные: используют `calculateUnlockedSortValue` (сумма баллов) - **нужно заменить на цену** +- Заблокированные: сортируются по `calculateLockedSortValue` (недели) - **нужно разделить на группы** + +**Изменить:** +1. Разблокированные: сортировка по цене от меньшего к большему (заменить `calculateUnlockedSortValue`) +2. Заблокированные: разделить на группы (с задачами/без задач) и сортировать каждую группу по времени + +```go +// Сортируем разблокированные по цене от меньшего к большему +// ЗАМЕНА: было calculateUnlockedSortValue, стало прямая сортировка по цене +sort.Slice(unlocked, func(i, j int) bool { + priceI := 0.0 + priceJ := 0.0 + if unlocked[i].Price != nil { + priceI = *unlocked[i].Price + } + if unlocked[j].Price != nil { + priceJ = *unlocked[j].Price + } + if priceI == priceJ { + return unlocked[i].ID < unlocked[j].ID + } + return priceI < priceJ // Сортировка по цене от меньшего к большему (заменяет calculateUnlockedSortValue) +}) + +// Разделяем заблокированные на группы +lockedWithoutTasks := []WishlistItem{} +lockedWithTasks := []WishlistItem{} + +for _, item := range locked { + hasUncompletedTasks := false + for _, cond := range item.UnlockConditions { + if cond.Type == "task_completion" && (cond.TaskCompleted == nil || !*cond.TaskCompleted) { + hasUncompletedTasks = true + break + } + } + if hasUncompletedTasks { + lockedWithTasks = append(lockedWithTasks, item) + } else { + lockedWithoutTasks = append(lockedWithoutTasks, item) + } +} + +// Сортируем каждую группу по времени разблокировки +sort.Slice(lockedWithoutTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithoutTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithoutTasks[j], userID) + if valueI == valueJ { + return lockedWithoutTasks[i].ID < lockedWithoutTasks[j].ID + } + return valueI < valueJ +}) + +sort.Slice(lockedWithTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithTasks[j], userID) + if valueI == valueJ { + return lockedWithTasks[i].ID < lockedWithTasks[j].ID + } + return valueI < valueJ +}) + +// Объединяем: сначала без задач, потом с задачами +locked = append(lockedWithoutTasks, lockedWithTasks...) +``` + +### 4. Обновить сортировку в getBoardItemsHandler + +**Файл:** `play-life-backend/main.go` (строки 12222-12240) + +**Текущее состояние:** +- Разблокированные: используют `calculateUnlockedSortValue` (сумма баллов) - **нужно заменить на цену** +- Заблокированные: сортируются по `calculateLockedSortValue` (недели) - **нужно разделить на группы** + +**Изменить аналогично getWishlistHandler:** +1. Разблокированные: сортировка по цене от меньшего к большему (заменить `calculateUnlockedSortValue`) +2. Заблокированные: разделить на группы (с задачами/без задач) и сортировать каждую группу по времени + +```go +// Сортируем разблокированные по цене от меньшего к большему +// ЗАМЕНА: было calculateUnlockedSortValue, стало прямая сортировка по цене +sort.Slice(unlocked, func(i, j int) bool { + priceI := 0.0 + priceJ := 0.0 + if unlocked[i].Price != nil { + priceI = *unlocked[i].Price + } + if unlocked[j].Price != nil { + priceJ = *unlocked[j].Price + } + if priceI == priceJ { + return unlocked[i].ID < unlocked[j].ID + } + return priceI < priceJ +}) + +// РАЗДЕЛЕНИЕ НА ГРУППЫ: Заблокированные с задачами и без задач +// ЗАМЕНА: было просто sort.Slice(locked, ...), стало разделение на группы +lockedWithoutTasks := []WishlistItem{} +lockedWithTasks := []WishlistItem{} + +for _, item := range locked { + hasUncompletedTasks := false + for _, cond := range item.UnlockConditions { + if cond.Type == "task_completion" && (cond.TaskCompleted == nil || !*cond.TaskCompleted) { + hasUncompletedTasks = true + break + } + } + if hasUncompletedTasks { + lockedWithTasks = append(lockedWithTasks, item) + } else { + lockedWithoutTasks = append(lockedWithoutTasks, item) + } +} + +// Сортируем каждую группу по времени разблокировки +sort.Slice(lockedWithoutTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithoutTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithoutTasks[j], userID) + if valueI == valueJ { + return lockedWithoutTasks[i].ID < lockedWithoutTasks[j].ID + } + return valueI < valueJ +}) + +sort.Slice(lockedWithTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithTasks[j], userID) + if valueI == valueJ { + return lockedWithTasks[i].ID < lockedWithTasks[j].ID + } + return valueI < valueJ +}) + +// Объединяем: сначала без задач, потом с задачами +locked = append(lockedWithoutTasks, lockedWithTasks...) +``` + +## Итоговый порядок элементов + +1. **Разблокированные** - отсортированы по цене от меньшего к большему +2. **Заблокированные без целей-задач** - отсортированы по максимальному времени разблокировки (среди всех проектов) от меньшего к большему +3. **Заблокированные с целями-задачами** - отсортированы по максимальному времени разблокировки (среди всех проектов) от меньшего к большему + +## Обработка краевых случаев + +- **Если медиана проекта = 0 или отсутствует**: `calculateProjectUnlockWeeks` возвращает 99999, что обрабатывается в `calculateLockedSortValue` (не учитывается в maxWeeks, если >= 99999) +- **Если нет условий**: возвращать 999999.0 (отсутствие условий = все условия выполнены) +- **Если все условия выполнены**: возвращать 999999.0 +- **Если цена не указана (NULL)**: считать как 0.0 +- **Если нет условий по проектам** (только задачи или нет условий): возвращать 999999.0 + +## Зависимости + +- `projects_median_mv` должна существовать (проверить наличие миграции или создать при необходимости) +- Функция `getProjectMedian` уже реализована (упрощенная версия без fallback) +- Функция `calculateProjectUnlockWeeks` уже реализована и используется в `calculateLockedSortValue` + +## Финальный шаг: Перезапуск приложения + +**После выполнения всех изменений:** + +Выполнить команду для перезапуска фронтенда и бэкенда: +```bash +./run.sh +``` + +Это пересоберет и перезапустит: +- Backend сервер (с пересборкой) +- Frontend приложение (с пересборкой) +- База данных diff --git a/.cursor/plans/создать_общие_функции_расчета_и_форматирования_срока_разблокировки_8a3f4b2c.plan.md b/.cursor/plans/создать_общие_функции_расчета_и_форматирования_срока_разблокировки_8a3f4b2c.plan.md new file mode 100644 index 0000000..fc97fae --- /dev/null +++ b/.cursor/plans/создать_общие_функции_расчета_и_форматирования_срока_разблокировки_8a3f4b2c.plan.md @@ -0,0 +1,392 @@ +# План: Создать общие функции расчета и форматирования срока разблокировки + +## Цель + +Создать универсальные функции для расчета и форматирования срока разблокировки проекта, которые будут использоваться везде где необходимо считать остаточный срок. + +## Изменения + +### 1. Создать функцию расчета срока разблокировки (бэкенд) + +**Файл:** `play-life-backend/main.go` + +Создать функцию `calculateProjectUnlockWeeks`: + +```go +// calculateProjectUnlockWeeks рассчитывает срок разблокировки проекта в неделях +// projectID - ID проекта +// requiredPoints - необходимое количество баллов +// startDate - дата начала подсчета (может быть nil - за всё время) +// userID - ID пользователя (владельца условия) +// Возвращает количество недель (float64): +// - > 0: условие не выполнено, возвращает количество недель +// - 0: условие уже выполнено (remaining <= 0) +// - 99999: медиана отсутствует или равна 0 (нельзя рассчитать) +func (a *App) calculateProjectUnlockWeeks(projectID int, requiredPoints float64, startDate sql.NullTime, userID int) float64 { + // 1. Получаем текущие баллы от startDate + currentPoints, err := a.calculateProjectPointsFromDate(projectID, startDate, userID) + if err != nil { + log.Printf("Error calculating project points for project %d, user %d: %v", projectID, userID, err) + return 99999 // Ошибка расчета - возвращаем 99999 + } + + // 2. Вычисляем остаток + remaining := requiredPoints - currentPoints + if remaining <= 0 { + // Условие уже выполнено + return 0 + } + + // 3. Получаем медиану проекта + median, err := a.getProjectMedian(projectID) + if err != nil || median <= 0 { + // Если медиана отсутствует или равна 0, возвращаем 99999 (нельзя рассчитать) + // Это нормальная ситуация, не логируем + return 99999 + } + + // 4. Рассчитываем недели + weeks := remaining / median + return weeks +} +``` + +**Примечание:** Функция возвращает: + +- `0`: условие уже выполнено (remaining <= 0) +- `> 0 && < 99999`: количество недель до выполнения условия +- `99999`: медиана отсутствует или равна 0 (нельзя рассчитать) или ошибка расчета + +```` + +### 2. Создать функцию форматирования срока (бэкенд) + +**Файл:** `play-life-backend/main.go` + +Создать функцию `formatWeeksText`: + +```go +// formatWeeksText форматирует количество недель в текстовый формат +// weeks - количество недель (float64) +// Возвращает строку: "2 недели", "<1 недели", "5 недель", "∞ недель" и т.д. +func formatWeeksText(weeks float64) string { + // Если weeks == 0, условие уже выполнено - не показываем срок + if weeks == 0 { + return "" + } + + // Если weeks >= 99999, это означает что медиана отсутствует или нельзя рассчитать + if weeks >= 99999 { + return "∞ недель" + } + + if weeks < 0 { + return "" + } + + if weeks < 1 { + return "<1 недели" + } + + weeksRounded := math.Round(weeks) + weeksInt := int(weeksRounded) + + // Правильное склонение для русского языка + var weekWord string + lastDigit := weeksInt % 10 + lastTwoDigits := weeksInt % 100 + + if lastTwoDigits >= 11 && lastTwoDigits <= 14 { + weekWord = "недель" + } else if lastDigit == 1 { + weekWord = "неделя" + } else if lastDigit >= 2 && lastDigit <= 4 { + weekWord = "недели" + } else { + weekWord = "недель" + } + + return fmt.Sprintf("%d %s", weeksInt, weekWord) +} +``` + +**Примечание:** + +- Форматирование на бэкенде, так как сортировка происходит на бэкенде. Фронтенд получает уже отформатированную строку. +- При `weeks == 0` (условие выполнено) возвращается пустая строка (не показываем срок) +- При `weeks >= 99999` (медиана отсутствует, нельзя рассчитать или ошибка расчета) возвращается "∞ недель" + +### 3. Использовать функции в calculateLockedSortValue + +**Файл:** `play-life-backend/main.go` (строки 12314-12337) + +Обновить функцию для использования `calculateProjectUnlockWeeks`: + +```go +func (a *App) calculateLockedSortValue(item WishlistItem, userID int) float64 { + // Если нет условий, возвращаем большое значение (отсутствие условий = все выполнены) + if len(item.UnlockConditions) == 0 { + return 999999.0 + } + + maxWeeks := 0.0 + hasProjectConditions := false + + for _, condition := range item.UnlockConditions { + if condition.Type == "project_points" { + hasProjectConditions = true + if condition.RequiredPoints != nil { + var startDate sql.NullTime + if condition.StartDate != nil { + date, err := time.Parse("2006-01-02", *condition.StartDate) + if err == nil { + startDate = sql.NullTime{Time: date, Valid: true} + } + } + + // ВАЖНО: Используем владельца условия из condition.UserID + // Если condition.UserID есть - это владелец условия + // Если нет - получаем владельца желания из БД (для старых условий) + // НЕ используем текущего пользователя (userID), так как условие может принадлежать другому пользователю + conditionOwnerID := 0 + if condition.UserID != nil { + conditionOwnerID = *condition.UserID + } else { + // Если нет владельца условия, получаем владельца желания из БД + var itemOwnerID int + err := a.DB.QueryRow(`SELECT user_id FROM wishlist_items WHERE id = $1`, item.ID).Scan(&itemOwnerID) + if err != nil { + log.Printf("Error getting wishlist item owner for item %d: %v", item.ID, err) + continue // Пропускаем условие, если не можем получить владельца + } + conditionOwnerID = itemOwnerID + } + + // Получаем projectID из условия + if condition.ProjectID != nil { + weeks := a.calculateProjectUnlockWeeks( + *condition.ProjectID, + *condition.RequiredPoints, + startDate, + conditionOwnerID, // Владелец условия, а не текущий пользователь + ) + // weeks > 0 && < 99999 означает, что условие еще не выполнено и расчет успешен + // weeks == 0 означает условие выполнено + // weeks == 99999 означает медиана отсутствует (нельзя рассчитать) или ошибка расчета + if weeks > 0 && weeks < 99999 { + if weeks > maxWeeks { + maxWeeks = weeks + } + } + } + } + } + } + + // Если были условия по проектам, но все выполнены (maxWeeks = 0) + if hasProjectConditions && maxWeeks == 0.0 { + return 999999.0 + } + + // Если не было условий по проектам (только задачи или нет условий) + if !hasProjectConditions { + return 999999.0 + } + + return maxWeeks +} +``` + +### 4. Использовать функции в API endpoint для расчета недель + +**Файл:** `play-life-backend/main.go` + +Обновить endpoint `/api/wishlist/calculate-weeks` (из плана "добавить расчет недель в форму"): + +**Важно:** Использовать владельца условия, а не текущего пользователя! + +```go +func (a *App) calculateWeeksHandler(w http.ResponseWriter, r *http.Request) { + // ... валидация и получение параметров ... + + // Определяем владельца условия: + // 1. Если передан condition_user_id в запросе - используем его (для существующего условия) + // 2. Иначе используем текущего пользователя (для нового условия) + conditionOwnerID := userID // userID из контекста (текущий пользователь) + if req.ConditionUserID != nil && *req.ConditionUserID > 0 { + conditionOwnerID = *req.ConditionUserID + } + + var startDate sql.NullTime + if req.StartDate != "" { + date, err := time.Parse("2006-01-02", req.StartDate) + if err == nil { + startDate = sql.NullTime{Time: date, Valid: true} + } + } + + // Используем владельца условия, а не текущего пользователя + weeks := a.calculateProjectUnlockWeeks(req.ProjectID, req.RequiredPoints, startDate, conditionOwnerID) + + response := map[string]interface{}{ + "weeks_text": formatWeeksText(weeks), // Отформатированная строка для отображения + } + + // weeks используется только для сортировки на бэкенде, на клиент не отправляется + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} +``` + +**Структура запроса:** + +```go +type CalculateWeeksRequest struct { + ProjectID int `json:"project_id"` + RequiredPoints float64 `json:"required_points"` + StartDate string `json:"start_date,omitempty"` + ConditionUserID *int `json:"condition_user_id,omitempty"` // Владелец условия (если условие существует) +} +``` + +### 5. Добавить weeks_text в UnlockConditionDisplay + +**Файл:** `play-life-backend/main.go` + +Добавить поле `WeeksText *string` в структуру `UnlockConditionDisplay`: + +```go +type UnlockConditionDisplay struct { + // ... существующие поля ... + WeeksText *string `json:"weeks_text,omitempty"` // Отформатированный текст срока разблокировки +} +``` + +При загрузке условий типа `project_points` рассчитывать и форматировать срок: + +```go +if condition.Type == "project_points" && condition.RequiredPoints != nil && condition.ProjectID != nil { + var startDate sql.NullTime + if condition.StartDate != nil { + date, err := time.Parse("2006-01-02", *condition.StartDate) + if err == nil { + startDate = sql.NullTime{Time: date, Valid: true} + } + } + + // ВАЖНО: Используем владельца условия из condition.UserID, а не текущего пользователя + // Если condition.UserID есть - это владелец условия + // Если нет - используем владельца желания (itemOwnerID), но НЕ текущего пользователя (userID) + conditionOwnerID := itemOwnerID // Владелец желания как fallback + if condition.UserID != nil { + conditionOwnerID = *condition.UserID // Владелец условия (приоритет) + } + + weeks := a.calculateProjectUnlockWeeks( + *condition.ProjectID, + *condition.RequiredPoints, + startDate, + conditionOwnerID, // Владелец условия, а не текущий пользователь + ) + + // Форматируем всегда (при weeks == 0 вернет пустую строку, при weeks >= 99999 вернет "∞ недель") + weeksText := formatWeeksText(weeks) + condition.WeeksText = &weeksText +} +``` + +**Важно:** + +- `condition.UserID` - это владелец условия (из `wishlist_conditions.user_id`) +- `itemOwnerID` - это владелец желания (fallback для старых условий) +- `userID` (текущий пользователь) НЕ используется, так как условие может принадлежать другому пользователю + +### 6. Использовать weeks_text на фронтенде + +**Файл:** `play-life-web/src/components/WishlistDetail.jsx` + +Использовать готовый `weeks_text` из условия (приходит уже отформатированным из API): + +```javascript +// В renderUnlockConditions: +{progress.remaining > 0 && condition.weeks_text && ( + + Осталось: {Math.round(progress.remaining)} ({condition.weeks_text}) + +)} +``` + +**Файл:** `play-life-web/src/components/WishlistForm.jsx` + +Использовать `weeks_text` из ответа API для отображения недель в форме редактирования условия. Форматирование уже выполнено на бэкенде. + +### 7. Обновить загрузку медианы в условиях (опционально) + +**Файл:** `play-life-backend/main.go` + +При загрузке условий типа `project_points` медиана не нужна отдельно, так как `calculateProjectUnlockWeeks` сама получит её и вернет уже отформатированный `weeks_text`. + +## Места использования функций + +1. **calculateProjectUnlockWeeks** (бэкенд): + + - `calculateLockedSortValue` - для сортировки заблокированных желаний (использует числовое значение) + - `calculateWeeksHandler` - API endpoint для расчета недель (использует для расчета, но на клиент отправляется только отформатированная строка) + - При загрузке условий для расчета `weeks_text` (используется внутри, на клиент не отправляется) + - Любые другие места, где нужно рассчитать срок разблокировки + +2. **formatWeeksText** (бэкенд): + + - При загрузке условий в `UnlockConditionDisplay.WeeksText` (отправляется на клиент для отображения) + - В API endpoint `/api/wishlist/calculate-weeks` (отправляется на клиент для отображения в форме) + - Форматирование на бэкенде, так как сортировка происходит на бэкенде по числовому значению `weeks` + +## Выявленные и исправленные проблемы + +1. **Проблема с userID в calculateLockedSortValue**: + + - **Проблема**: Использовался текущий пользователь (`userID`), но условие может принадлежать другому пользователю + - **Исправление**: Используется `conditionOwnerID` из `condition.UserID` (владелец условия). Если `condition.UserID` отсутствует, условие пропускается (некорректное состояние) + +2. **Обработка отсутствия медианы**: + + - **Решение**: При отсутствии медианы возвращается `99999` (нельзя рассчитать). В `formatWeeksText` это значение преобразуется в "∞ недель". Такие условия не учитываются при сортировке по времени разблокировки (проверка `weeks > 0 && weeks < 99999`) + +3. **Форматирование и передача данных**: + + - **Решение**: Форматирование на бэкенде, так как сортировка происходит на бэкенде по числовому значению `weeks` + - Числовое значение `weeks` используется только на бэкенде для сортировки, на клиент не отправляется + - На клиент отправляется только отформатированная строка `weeks_text` для отображения + - Фронтенд просто отображает готовую строку без дополнительного форматирования + - Это исключает дублирование логики и обеспечивает единообразие форматирования + +4. **Использование правильного userID (владельца условия)**: + + - **Проблема**: В функцию `calculateProjectUnlockWeeks` может передаваться текущий пользователь вместо владельца условия + - **Решение**: + - В `calculateLockedSortValue`: используется `condition.UserID` (владелец условия) + - В `calculateWeeksHandler`: используется `condition_user_id` из запроса (если передан) или текущий пользователь (для нового условия) + - При загрузке условий: используется `condition.UserID` или `itemOwnerID` (владелец желания), но НЕ текущий пользователь + - **Важно**: Условие может принадлежать другому пользователю (на общих досках), поэтому нужно использовать именно владельца условия + +## Зависимости + +- Функция `getProjectMedian` должна быть создана (из плана сортировки) +- Функция `calculateProjectPointsFromDate` уже существует + +## Финальный шаг: Перезапуск приложения + +**После выполнения всех изменений:** + +Выполнить команду для перезапуска фронтенда и бэкенда: + +```bash +./run.sh +``` + +Это пересоберет и перезапустит: + +- Backend сервер (с пересборкой) +- Frontend приложение (с пересборкой) +- База данных \ No newline at end of file diff --git a/.cursor/rules/migrations.mdc b/.cursor/rules/migrations.mdc new file mode 100644 index 0000000..8e8aaac --- /dev/null +++ b/.cursor/rules/migrations.mdc @@ -0,0 +1,8 @@ +--- +description: "Запрет доработок старых миграций" +alwaysApply: true +--- + +**ВАЖНО:** Если ты меняешь структуру базы данных - напиши НОВУЮ миграцию. +НИ В КОЕМ СЛУЧАЕ не меняй старые миграции, можно добавлять только новые. +Старой миграцией считается та что была уже ранее закомичена diff --git a/.cursor/rules/restart_on_changes.mdc b/.cursor/rules/restart_on_changes.mdc new file mode 100644 index 0000000..0b8cd8f --- /dev/null +++ b/.cursor/rules/restart_on_changes.mdc @@ -0,0 +1,16 @@ +--- +description: "Перезапуск приложения после изменений в бэкенде или фронтенде" +alwaysApply: true +--- + +## Правило перезапуска приложения + +**ВАЖНО:** После применения всех изменений в бэкенде (`play-life-backend/`) или фронтенде (`play-life-web/`), а также после изменений в `docker-compose.yml`, **ОБЯЗАТЕЛЬНО** выполни команду `./run.sh` для перезапуска всех сервисов приложения. + +Это правило применяется при работе с: +- Go кодом в `play-life-backend/` +- Миграциями базы данных в `play-life-backend/migrations/` +- React компонентами и стилями в `play-life-web/src/` +- Docker конфигурациями (`docker-compose.yml`, `Dockerfile`) + +**Команда для перезапуска:** `./run.sh` или `bash run.sh` в корне проекта. diff --git a/.cursor/rules/version_bump_and_push.mdc b/.cursor/rules/version_bump_and_push.mdc new file mode 100644 index 0000000..a10e6f6 --- /dev/null +++ b/.cursor/rules/version_bump_and_push.mdc @@ -0,0 +1,71 @@ +--- +description: "Правило для поднятия версии и пуша в git" +alwaysApply: true +--- + +## Правило поднятия версии и пуша + +Когда пользователь просит **поднять версию и запушить**, выполни следующие шаги: + +### 1. Определи тип версии + +Определи по сообщению пользователя, какую часть версии нужно поднять: +- **major** (мажор) - первая цифра (например: 1.1.1 → 2.0.0), минор и патч должны обнулиться +- **minor** (минор) - вторая цифра (например: 1.0.1 → 1.1.0), патч должна обнулиться +- **patch** (патч) - третья цифра (например: 1.0.0 → 1.0.1) +Любая часть версии может быть больше 9, то есть может быть версия 10, 11, 12 и тд. + +**Если тип версии непонятен из контекста — обязательно спроси у пользователя!** + +### 2. Обнови версию в файлах + +Обнови версию в двух файлах: +- `VERSION` (в корне проекта) +- `play-life-web/package.json` (поле `"version"`) + +### 3. Проанализируй git diff + +Выполни `git diff --staged` и `git diff` для анализа изменений. На основе изменений составь **короткий commit message** (максимум 50 символов) на русском языке, описывающий суть изменений. В начале commit message должна быть указана версия на которую осуществился переход в формате "1.2.3: Коммит мессадж" + +### 4. Закоммить изменения + +Выполни: +```bash +git add -A +git commit -m "" +``` + +### 5. Запушь в репозиторий + +Выполни: +```bash +git push +``` + +## Правило пуша без поднятия версии + +### 1. Проанализируй git diff + +Выполни `git diff --staged` и `git diff` для анализа изменений. На основе изменений составь **короткий commit message** (максимум 50 символов) на русском языке, описывающий суть изменений + +### 2. Закоммить изменения + +Выполни: +```bash +git add -A +git commit -m "" +``` + +### 3. Запушь в репозиторий + +Выполни: +```bash +git push +``` +--- + +**Пример использования:** +- "Подними патч и запушь" → поднять patch версию +- "Bump minor and push" → поднять minor версию +- "Подними версию и запушь" → спросить какой тип версии поднять +- "Запуш именения" → запушить без изменения версии diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..d41e1bf --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +# Игнорируем node_modules при копировании +play-life-web/node_modules +play-life-web/dist +play-life-web/.git +play-life-backend/.git +*.md +.git +.gitignore + diff --git a/.env.test b/.env.test new file mode 100644 index 0000000..48672d3 --- /dev/null +++ b/.env.test @@ -0,0 +1,85 @@ +# ============================================ +# Единый файл конфигурации для всех проектов +# Backend и Play-Life-Web +# ============================================ + +# ============================================ +# Database Configuration +# ============================================ +DB_HOST=localhost +DB_PORT=5432 +DB_USER=playeng +DB_PASSWORD=playeng +DB_NAME=playeng_migration_test_1769347550 + +# ============================================ +# Backend Server Configuration +# ============================================ +# Порт для backend сервера (по умолчанию: 8080) +# В production всегда используется порт 8080 внутри контейнера +PORT=8080 + +# ============================================ +# Play Life Web Configuration +# ============================================ +# Порт для frontend приложения play-life-web +WEB_PORT=3001 + +# ============================================ +# Telegram Bot Configuration +# ============================================ +# Токен единого бота для всех пользователей +# Получить у @BotFather: https://t.me/botfather +TELEGRAM_BOT_TOKEN=your-bot-token-here + +# Base URL для автоматической настройки webhook +# Примеры: +# - Для production с HTTPS: https://your-domain.com +# - Для локальной разработки с ngrok: https://abc123.ngrok.io +# - Для прямого доступа на нестандартном порту: http://your-server:8080 +# Webhook будет настроен автоматически при старте сервера на: /webhook/telegram +# Если не указан, webhook нужно настраивать вручную +WEBHOOK_BASE_URL=https://your-domain.com + +# ============================================ +# Todoist Integration Configuration +# ============================================ +# Единое Todoist приложение для всех пользователей Play Life +# Настроить в: https://developer.todoist.com/appconsole.html +# +# В настройках Todoist приложения указать: +# - OAuth Redirect URL: /api/integrations/todoist/oauth/callback +# - Webhooks callback URL: /webhook/todoist +# - Watched events: item:completed + +# Client ID единого Todoist приложения +TODOIST_CLIENT_ID= + +# Client Secret единого Todoist приложения +TODOIST_CLIENT_SECRET= + +# Секрет для проверки подлинности webhook от Todoist (опционально) +# Получить в Developer Console: "Client secret for webhooks" +TODOIST_WEBHOOK_SECRET= + +# ============================================ +# Authentication Configuration +# ============================================ +# Секретный ключ для подписи JWT токенов +# ВАЖНО: Обязательно задайте свой уникальный секретный ключ для production! +# Если не задан, будет использован случайно сгенерированный (не рекомендуется для production) +# Можно сгенерировать с помощью: openssl rand -base64 32 +JWT_SECRET=your-super-secret-jwt-key-change-in-production + +# ============================================ +# Scheduler Configuration +# ============================================ +# Часовой пояс для планировщика задач (например: Europe/Moscow, America/New_York, UTC) +# Используется для: +# - Автоматической фиксации целей на неделю каждый понедельник в 6:00 +# - Отправки ежедневного отчёта в 23:59 +# ВАЖНО: Укажите правильный часовой пояс, иначе задачи будут срабатывать в UTC! +# Список доступных часовых поясов: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones +TIMEZONE=Europe/Moscow + +DB_NAME=playeng_migration_test_1769347550 diff --git a/.gitea/workflows/build-and-push.yml b/.gitea/workflows/build-and-push.yml new file mode 100644 index 0000000..dae2d1b --- /dev/null +++ b/.gitea/workflows/build-and-push.yml @@ -0,0 +1,215 @@ +name: Build and Push Docker Image + +on: + push: + branches: + - main + +jobs: + build-and-push: + runs-on: ubuntu-latest # Убедитесь, что у вашего раннера есть этот тег + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Get versions and check change + id: version_check + run: | + # Извлекаем текущую версию + CUR=$(cat VERSION | tr -d '[:space:]') + echo "current=$CUR" >> $GITHUB_OUTPUT + + # Извлекаем сообщение последнего коммита + COMMIT_MSG=$(git log -1 --pretty=%B | head -1) + echo "commit_message=$COMMIT_MSG" >> $GITHUB_OUTPUT + + # Безопасно извлекаем старую версию + PREV=$(git show HEAD~1:VERSION 2>/dev/null | tr -d '[:space:]' || echo "none") + + if [ "$CUR" != "$PREV" ]; then + echo "changed=true" >> $GITHUB_OUTPUT + else + echo "changed=false" >> $GITHUB_OUTPUT + fi + + - name: Patch DNS for Local Network + run: | + # Записываем IP Synology прямо в контейнер сборки + echo "192.168.50.55 dungeonsiege.synology.me" | sudo tee -a /etc/hosts + + - name: Build Docker Image + id: build + run: | + REGISTRY="dungeonsiege.synology.me/poignatov/play-life" + VER="${{ steps.version_check.outputs.current }}" + + echo "Building Docker image..." + echo "Registry: $REGISTRY" + echo "Tag: latest" + + # Собираем образ + docker build -t $REGISTRY:latest . + + echo "✅ Successfully built image: $REGISTRY:latest" + + - name: Log in to Gitea Registry + if: steps.version_check.outputs.changed == 'true' + run: | + echo "${{ secrets.GIT_TOKEN }}" | docker login dungeonsiege.synology.me -u ${{ secrets.GIT_USERNAME }} --password-stdin + + - name: Push Docker Image + id: push + if: steps.version_check.outputs.changed == 'true' + run: | + REGISTRY="dungeonsiege.synology.me/poignatov/play-life" + VER="${{ steps.version_check.outputs.current }}" + + # Тегируем образ версией + docker tag $REGISTRY:latest $REGISTRY:$VER + + # Пушим оба тега + echo "Pushing image to registry..." + docker push $REGISTRY:latest + docker push $REGISTRY:$VER + + echo "✅ Successfully pushed to registry:" + echo " - $REGISTRY:latest" + echo " - $REGISTRY:$VER" + + - name: Send Telegram notification (build success) + if: success() && steps.version_check.outputs.changed == 'false' + uses: appleboy/telegram-action@master + with: + to: ${{ secrets.TELEGRAM_TO }} + token: ${{ secrets.TELEGRAM_TOKEN }} + format: markdown + message: | + *play-life* + `${{ steps.version_check.outputs.commit_message }}` + + Build: ✅ + Registration: ⏭️ + Deploy: ⏭️ + + - name: Deploy to Production Server + id: deploy + if: steps.version_check.outputs.changed == 'true' + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.DEPLOY_HOST }} + username: ${{ secrets.DEPLOY_USER }} + password: ${{ secrets.DEPLOY_PASSWORD }} + script: | + set -e + + # Расширяем PATH для Synology (при SSH сессии PATH минимальный) + export PATH="/usr/local/bin:/usr/syno/bin:$PATH" + + REGISTRY="dungeonsiege.synology.me/poignatov/play-life" + DEPLOY_PATH="/volume1/docker/play-life" + + echo "🚀 Начинаю деплой на production сервер..." + echo "PATH: $PATH" + + # Проверяем наличие docker + if ! command -v docker >/dev/null 2>&1; then + echo "❌ Docker не найден в PATH!" + echo "Пробуем найти docker..." + which docker || find /usr -name "docker" -type f 2>/dev/null | head -5 + exit 1 + fi + + DOCKER_CMD="docker" + + # Определяем docker-compose (может быть docker compose или docker-compose) + if command -v docker-compose >/dev/null 2>&1; then + DOCKER_COMPOSE_CMD="docker-compose" + elif docker compose version >/dev/null 2>&1; then + DOCKER_COMPOSE_CMD="docker compose" + else + echo "❌ Docker Compose не найден!" + exit 1 + fi + + echo "Используем: $DOCKER_CMD и $DOCKER_COMPOSE_CMD" + + # Переходим в директорию проекта + cd $DEPLOY_PATH + + # Логинимся в registry + echo "${{ secrets.GIT_TOKEN }}" | $DOCKER_CMD login dungeonsiege.synology.me -u ${{ secrets.GIT_USERNAME }} --password-stdin + + # Обновляем образ + echo "📥 Обновляю образ из registry..." + $DOCKER_CMD pull $REGISTRY:latest + + # Перезапускаем контейнеры + echo "🔄 Перезапускаю контейнеры..." + $DOCKER_COMPOSE_CMD -f docker-compose.prod.yml up -d --force-recreate + + # Проверяем статус + echo "✅ Деплой завершен успешно" + $DOCKER_COMPOSE_CMD -f docker-compose.prod.yml ps + + - name: Send Telegram notification (publish success) + if: steps.build.outcome == 'success' && steps.version_check.outputs.changed == 'true' && steps.push.outcome == 'success' && steps.deploy.outcome == 'success' + uses: appleboy/telegram-action@master + with: + to: ${{ secrets.TELEGRAM_TO }} + token: ${{ secrets.TELEGRAM_TOKEN }} + format: markdown + message: | + *play-life* + `${{ steps.version_check.outputs.commit_message }}` + + Build: ✅ + Registration: ✅ + Deploy: ✅ + + - name: Send Telegram notification (push failure) + if: steps.build.outcome == 'success' && steps.version_check.outputs.changed == 'true' && steps.push.outcome == 'failure' + uses: appleboy/telegram-action@master + with: + to: ${{ secrets.TELEGRAM_TO }} + token: ${{ secrets.TELEGRAM_TOKEN }} + format: markdown + message: | + *play-life* + `${{ steps.version_check.outputs.commit_message }}` + + Build: ✅ + Registration: ❌ + Deploy: ⏭️ + + - name: Send Telegram notification (deploy failure) + if: steps.build.outcome == 'success' && steps.push.outcome == 'success' && steps.version_check.outputs.changed == 'true' && steps.deploy.outcome == 'failure' + uses: appleboy/telegram-action@master + with: + to: ${{ secrets.TELEGRAM_TO }} + token: ${{ secrets.TELEGRAM_TOKEN }} + format: markdown + message: | + *play-life* + `${{ steps.version_check.outputs.commit_message }}` + + Build: ✅ + Registration: ✅ + Deploy: ❌ + + - name: Send Telegram notification (build failure) + if: steps.build.outcome == 'failure' + uses: appleboy/telegram-action@master + with: + to: ${{ secrets.TELEGRAM_TO }} + token: ${{ secrets.TELEGRAM_TOKEN }} + format: markdown + message: | + *play-life* + `${{ steps.version_check.outputs.commit_message }}` + + Build: ❌ + Registration: ⏭️ + Deploy: ⏭️ \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..448efdd --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +.env +.env.local +.env.prod +*.log +main +dist/ +node_modules/ +*.tar + +# Database dumps +database-dumps/*.sql +database-dumps/*.sql.gz +!database-dumps/.gitkeep + +# Uploaded files +uploads/ diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..e913111 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,4 @@ +{ + "version": "0.2.0", + "configurations": [] +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..82e2630 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,82 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "init", + "type": "shell", + "command": "./init.sh", + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "echo": true, + "reveal": "always", + "focus": false, + "panel": "shared", + "showReuseMessage": true, + "clear": false + }, + "problemMatcher": [], + "detail": "Инициализация Play Life: остановка контейнеров, поднятие сервисов, создание дампа с продакшена и восстановление в локальную базу" + }, + { + "label": "run", + "type": "shell", + "command": "./run.sh", + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "echo": true, + "reveal": "always", + "focus": false, + "panel": "shared", + "showReuseMessage": true, + "clear": false + }, + "problemMatcher": [], + "detail": "Перезапуск Play Life: перезапуск всех контейнеров" + }, + { + "label": "backupFromProd", + "type": "shell", + "command": "./dump-db.sh", + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "echo": true, + "reveal": "always", + "focus": false, + "panel": "shared", + "showReuseMessage": true, + "clear": false + }, + "problemMatcher": [], + "detail": "Создание дампа базы данных с продакшена" + }, + { + "label": "restoreToLocal", + "type": "shell", + "command": "./restore-db.sh", + "group": { + "kind": "build", + "isDefault": false + }, + "presentation": { + "echo": true, + "reveal": "always", + "focus": false, + "panel": "shared", + "showReuseMessage": true, + "clear": false + }, + "problemMatcher": [], + "detail": "Восстановление базы данных из самого свежего дампа в локальную базу (автоматически выбирает последний дамп)" + } + ] +} + diff --git a/BUILD_INSTRUCTIONS.md b/BUILD_INSTRUCTIONS.md new file mode 100644 index 0000000..940f3c4 --- /dev/null +++ b/BUILD_INSTRUCTIONS.md @@ -0,0 +1,80 @@ +# Инструкция по сборке единого Docker образа + +Этот проект содержит единый Dockerfile для сборки frontend и backend в один образ. + +## Структура + +- `Dockerfile` - единый Dockerfile для сборки frontend и backend +- `nginx-unified.conf` - конфигурация nginx для единого образа +- `supervisord.conf` - конфигурация supervisor для запуска nginx и backend +- `build-and-save.sh` - скрипт для сборки и сохранения в tar (Linux/Mac) +- `build-and-save.ps1` - скрипт для сборки и сохранения в tar (Windows PowerShell) + +## Сборка образа + +### Linux/Mac: +```bash +./build-and-save.sh +``` + +### Windows PowerShell: +```powershell +.\build-and-save.ps1 +``` + +### Вручную: +```bash +# Сборка образа +docker build -t play-life-unified:latest . + +# Сохранение в tar +docker save play-life-unified:latest -o play-life-unified.tar +``` + +## Загрузка образа на другой машине + +```bash +docker load -i play-life-unified.tar +``` + +## Запуск контейнера + +```bash +docker run -d \ + -p 80:80 \ + --env-file .env \ + --name play-life \ + play-life-unified:latest +``` + +## Переменные окружения + +Создайте файл `.env` на основе `env.example` с необходимыми переменными: + +- `DB_HOST` - хост базы данных (по умолчанию: localhost) +- `DB_PORT` - порт базы данных (по умолчанию: 5432) +- `DB_USER` - пользователь БД +- `DB_PASSWORD` - пароль БД +- `DB_NAME` - имя БД +- `WEBHOOK_BASE_URL` - базовый URL для webhook (опционально) + - Bot Token и Chat ID настраиваются через UI приложения в разделе "Интеграции" -> "Telegram" +- `TODOIST_WEBHOOK_SECRET` - секрет для Todoist webhook (опционально) + +**Важно:** Backend внутри контейнера всегда работает на порту 8080. Nginx проксирует запросы с порта 80 на backend. + +## Проверка работы + +После запуска контейнера: + +- Frontend доступен по адресу: `http://localhost` +- API доступен через nginx: `http://localhost/api/...` +- Admin панель: `http://localhost/admin.html` + +## Логи + +Логи доступны через supervisor: +```bash +docker exec play-life cat /var/log/supervisor/backend.out.log +docker exec play-life cat /var/log/supervisor/nginx.out.log +``` + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..7eee930 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,71 @@ +# Multi-stage build для единого образа frontend + backend + +# Stage 1: Build Frontend +FROM node:20-alpine AS frontend-builder +WORKDIR /app/frontend +COPY play-life-web/package*.json ./ +RUN npm ci +# Копируем исходники (node_modules исключены через .dockerignore) +COPY play-life-web/ . +RUN npm run build + +# Stage 2: Build Backend +FROM golang:1.24-alpine AS backend-builder +WORKDIR /app/backend +# Устанавливаем GOPROXY для более надежной загрузки модулей +ENV GOPROXY=https://proxy.golang.org,direct +ENV GOSUMDB=sum.golang.org +COPY play-life-backend/go.mod play-life-backend/go.sum ./ +RUN go mod download +COPY play-life-backend/ . +RUN go mod tidy +RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o main . + +# Stage 3: Final image +FROM alpine:latest + +# Устанавливаем необходимые пакеты +# tzdata — данные о часовых поясах для корректной работы планировщика +RUN apk --no-cache add \ + ca-certificates \ + nginx \ + supervisor \ + curl \ + tzdata + +# Создаем директории +WORKDIR /app + +# Создаем директорию для загруженных файлов +RUN mkdir -p /app/uploads/wishlist && \ + chmod 755 /app/uploads + +# Копируем собранный frontend +COPY --from=frontend-builder /app/frontend/dist /usr/share/nginx/html + +# Копируем собранный backend +COPY --from=backend-builder /app/backend/main /app/backend/main +COPY play-life-backend/admin.html /app/backend/admin.html +# Копируем миграции для применения при запуске +COPY play-life-backend/migrations /migrations +# Копируем файл версии +COPY VERSION /app/VERSION + +# Копируем конфигурацию nginx +COPY nginx.conf /etc/nginx/nginx.conf +COPY nginx-unified.conf /etc/nginx/conf.d/default.conf + +# Копируем конфигурацию supervisor для запуска backend +COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf + +# Создаем директории для логов +RUN mkdir -p /var/log/supervisor && \ + mkdir -p /var/log/nginx && \ + mkdir -p /var/run + +# Открываем порт 80 +EXPOSE 80 + +# Запускаем supervisor, который запустит nginx и backend +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] + diff --git a/ENV_SETUP.md b/ENV_SETUP.md new file mode 100644 index 0000000..4628061 --- /dev/null +++ b/ENV_SETUP.md @@ -0,0 +1,298 @@ +# Настройка единого .env файла + +Все приложения проекта используют единый файл `.env` в корне проекта. + +## Быстрый старт + +1. Скопируйте файл `.env.example` в `.env`: + ```bash + cp .env.example .env + ``` + +2. Отредактируйте `.env` и укажите свои значения: + ```bash + nano .env + # или + vim .env + ``` + +3. **ВАЖНО**: Файл `.env` уже добавлен в `.gitignore` и не будет попадать в git. + +## Структура переменных окружения + +### Database Configuration +- `DB_HOST` - хост базы данных (по умолчанию: localhost) +- `DB_PORT` - порт базы данных (по умолчанию: 5432) +- `DB_USER` - пользователь БД (по умолчанию: playeng) +- `DB_PASSWORD` - пароль БД (по умолчанию: playeng) +- `DB_NAME` - имя БД (по умолчанию: playeng) + +### Backend Server Configuration +- `PORT` - порт бэкенд сервера (по умолчанию: 8080) + - В production всегда используется порт 8080 внутри контейнера + - Nginx автоматически проксирует запросы к `http://backend:8080` + +### Frontend Configuration (play-life-web) +- `VITE_PORT` - порт для dev-сервера Vite (по умолчанию: 3000) +- `WEB_PORT` - порт для production контейнера (по умолчанию: 3001) + +**Примечание:** API запросы автоматически проксируются к бэкенду. В development режиме Vite проксирует запросы к `http://localhost:8080`. В production nginx проксирует запросы к бэкенд контейнеру. Не требуется настройка `VITE_API_BASE_URL`. + +### Telegram Bot Configuration +- `WEBHOOK_BASE_URL` - базовый URL для автоматической настройки webhook. Webhook будет настроен автоматически при сохранении bot token через UI на `/webhook/telegram`. +- Bot Token и Chat ID настраиваются через UI приложения в разделе "Интеграции" -> "Telegram" + + **Примеры значений:** + - Production с HTTPS: `https://your-domain.com` (порт не нужен для стандартных 80/443) + - Локальная разработка с ngrok: `https://abc123.ngrok.io` (порт не нужен) + - Прямой доступ на нестандартном порту: `http://your-server:8080` (порт обязателен) + +### Todoist Webhook Configuration (опционально) +- `TODOIST_WEBHOOK_SECRET` - секрет для проверки подлинности webhook от Todoist (если задан, все запросы должны содержать заголовок `X-Todoist-Webhook-Secret` с этим значением) + +## Настройка интеграции с Todoist + +Интеграция с Todoist позволяет автоматически обрабатывать закрытые задачи и добавлять их в базу данных play-life. + +### Как это работает + +1. При закрытии задачи в Todoist отправляется webhook на ваш сервер +2. Сервер извлекает `title` (content) и `description` из закрытой задачи +3. Склеивает их в один текст: `title + "\n" + description` +4. Обрабатывает текст через существующую логику `processMessage`, которая: + - Парсит ноды в формате `**[Project][+/-][Score]**` + - Сохраняет данные в базу данных + - Отправляет уведомление в Telegram (если настроено) + +### Настройка webhook в Todoist + +1. Откройте настройки Todoist: https://todoist.com/app/settings/integrations +2. Перейдите в раздел "Webhooks" или "Integrations" +3. Создайте новый webhook: + - **URL**: `http://your-server:8080/webhook/todoist` + - Для локальной разработки: `http://localhost:8080/webhook/todoist` + - Для production: укажите публичный URL вашего сервера + - **Event**: выберите `item:completed` (закрытие задачи) +4. Сохраните webhook + +### Безопасность (опционально) + +Для защиты webhook от несанкционированного доступа: + +1. Установите секрет в `.env`: + ```bash + TODOIST_WEBHOOK_SECRET=your_secret_key_here + ``` + +2. Настройте Todoist для отправки секрета в заголовке: + - В настройках webhook добавьте заголовок: `X-Todoist-Webhook-Secret: your_secret_key_here` + - Или используйте встроенные механизмы безопасности Todoist, если они доступны + +**Примечание**: Если `TODOIST_WEBHOOK_SECRET` не задан, проверка секрета не выполняется. + +### Формат задач в Todoist + +Для корректной обработки задачи должны содержать ноды в формате: +``` +**[ProjectName][+/-][Score]** +``` + +Примеры: +- `**[Work]+5.5**` - добавить 5.5 баллов к проекту "Work" +- `**[Health]-2.0**` - вычесть 2.0 баллов из проекта "Health" + +Ноды можно размещать как в `title` (content), так и в `description` задачи. Они будут обработаны при закрытии задачи. + +### Тестирование + +Для тестирования интеграции: + +1. Создайте задачу в Todoist с нодами, например: + - Title: `Test task` + - Description: `**[TestProject]+10.0**` + +2. Закройте задачу в Todoist + +3. Проверьте логи сервера - должно появиться сообщение: + ``` + Processing Todoist task: title='Test task', description='**[TestProject]+10.0**' + Successfully processed Todoist task, found 1 nodes + ``` + +4. Проверьте базу данных или веб-интерфейс - данные должны быть добавлены + + +## Использование + +### Локальная разработка + +Все приложения автоматически читают переменные из корневого `.env` файла: + +- **play-life-backend**: читает из `../.env` и `.env` (локальный имеет приоритет) +- **play-life-web**: читает из `../.env` и `.env` (локальный имеет приоритет) + +### Docker Compose + +Для запуска всех приложений в одном образе используйте корневой `docker-compose.yml`: + +```bash +docker-compose up --build +``` + +Все сервисы автоматически загружают переменные из корневого `.env` файла. + +### Отдельные приложения + +Если нужно запустить отдельные приложения, они также будут использовать корневой `.env`: + +```bash +# Backend +cd play-life-backend +docker-compose up + +# Frontend +cd play-life-web +docker-compose up +``` + +## Приоритет переменных окружения + +1. Переменные окружения системы (высший приоритет) +2. Локальный `.env` в директории приложения +3. Корневой `.env` файл +4. Значения по умолчанию в коде + +## Примеры использования + +### Изменение порта базы данных + +```bash +# В .env +DB_PORT=5433 +``` + +### Изменение порта бэкенда + +```bash +# В .env +PORT=9090 +``` + +### Изменение порта фронтенда + +```bash +# В .env +VITE_PORT=4000 # для development +WEB_PORT=4001 # для production Docker контейнера +``` + +После изменения `.env` файла перезапустите соответствующие сервисы. + +## Настройка интеграции с Telegram (webhook для сообщений пользователя) + +Интеграция с Telegram позволяет автоматически обрабатывать сообщения, отправленные пользователем в чат бота, и добавлять их в базу данных play-life. + +### Как это работает + +1. Пользователь отправляет сообщение в чат с ботом в Telegram +2. Telegram отправляет webhook на ваш сервер с информацией о сообщении и entities (форматирование) +3. Сервер извлекает жирный текст из entities (type === 'bold') +4. Парсит жирный текст по формату `project+/-score` (без `**`) +5. Обрабатывает текст и сохраняет данные в базу данных +6. **НЕ отправляет сообщение обратно в Telegram** (в отличие от других интеграций) + +### Отличия от других интеграций + +- **Формат нод**: `project+/-score` (без `**`), например: `Work+5.5` или `Health-2.0` +- **Определение жирного текста**: через entities от Telegram, а не через markdown `**` +- **Без обратной отправки**: сообщение не отправляется обратно в Telegram + +### Настройка webhook в Telegram + +#### Автоматическая настройка (рекомендуется) + +1. Создайте бота через [@BotFather](https://t.me/botfather) в Telegram +2. Получите токен бота +3. Добавьте `WEBHOOK_BASE_URL` в `.env`: + ```bash + WEBHOOK_BASE_URL=https://your-domain.com + ``` +4. Откройте приложение и перейдите в раздел "Интеграции" -> "Telegram" +5. Введите Bot Token в поле и нажмите "Сохранить" +6. Отправьте первое сообщение боту в Telegram - Chat ID будет сохранён автоматически + + **Важно о портах:** + - Если сервер доступен на стандартных портах (HTTP 80 или HTTPS 443), порт можно не указывать + - Если сервер работает на нестандартном порту и доступен напрямую, укажите порт: `http://your-server:8080` + - Если используется reverse proxy (nginx, etc.), указывайте внешний URL без порта: `https://your-domain.com` + +3. Запустите сервер - webhook будет настроен автоматически при старте! + + Для локальной разработки можно использовать ngrok или аналогичный сервис: + ```bash + # Установите ngrok: https://ngrok.com/ + ngrok http 8080 + # Используйте полученный URL в WEBHOOK_BASE_URL (без порта) + # Например: WEBHOOK_BASE_URL=https://abc123.ngrok.io + ``` + +4. Проверьте логи сервера - должно появиться сообщение: + ``` + Telegram webhook configured successfully: https://abc123.ngrok.io/webhook/telegram + ``` + +#### Ручная настройка (если не указан WEBHOOK_BASE_URL) + +Если вы не указали `WEBHOOK_BASE_URL`, webhook нужно настроить вручную: + +```bash +curl -X POST "https://api.telegram.org/bot/setWebhook" \ + -H "Content-Type: application/json" \ + -d '{ + "url": "http://your-server:8080/webhook/telegram" + }' +``` + +Проверьте, что webhook установлен: +```bash +curl "https://api.telegram.org/bot/getWebhookInfo" +``` + +### Формат сообщений в Telegram + +Для корректной обработки сообщения должны содержать жирный текст в формате: +``` +project+/-score +``` + +Примеры: +- `Work+5.5` (жирным) - добавить 5.5 баллов к проекту "Work" +- `Health-2.0` (жирным) - вычесть 2.0 баллов из проекта "Health" + +**Важно**: Текст должен быть выделен жирным шрифтом в Telegram (через форматирование сообщения, не через `**`). + +### Тестирование + +Для тестирования интеграции: + +1. Откройте чат с вашим ботом в Telegram +2. Отправьте сообщение с жирным текстом в формате `project+/-score`, например: + - Напишите: `Test message` + - Выделите `Work+10.0` жирным шрифтом (через форматирование) + - Отправьте сообщение + +3. Проверьте логи сервера - должно появиться сообщение: + ``` + Processing Telegram message: text='Test message', entities count=1 + Successfully processed Telegram message, found 1 nodes + ``` + +4. Проверьте базу данных или веб-интерфейс - данные должны быть добавлены + +### Примечания + +- Webhook должен быть доступен из интернета (для production используйте публичный URL) +- Для локальной разработки используйте ngrok или аналогичный сервис для туннелирования +- Сообщения обрабатываются только если содержат жирный текст в правильном формате +- Сообщения **не отправляются обратно** в Telegram (в отличие от других интеграций) + diff --git a/IMPACT_ANALYSIS.md b/IMPACT_ANALYSIS.md new file mode 100644 index 0000000..db0158a --- /dev/null +++ b/IMPACT_ANALYSIS.md @@ -0,0 +1,184 @@ +# Импакт-анализ: Редизайн экрана редактирования доски желаний + +## Дата анализа +2025-01-21 + +## Созданные компоненты (дизайн-система) + +### 1. `SubmitButton.jsx` +- **Путь**: `play-life-web/src/components/SubmitButton.jsx` +- **Назначение**: Переиспользуемый компонент кнопки сохранения с градиентным фоном +- **Пропсы**: `loading`, `disabled`, `children`, `onClick`, `type` +- **Стили**: Градиент от #6366f1 до #8b5cf6, hover эффект с тенью +- **Использование**: Заменяет все кнопки сохранения в формах + +### 2. `DeleteButton.jsx` +- **Путь**: `play-life-web/src/components/DeleteButton.jsx` +- **Назначение**: Переиспользуемый компонент кнопки удаления с красным фоном и иконкой корзины +- **Пропсы**: `loading`, `disabled`, `onClick`, `title` +- **Стили**: Красный фон #ef4444, квадратная кнопка 44x44px +- **Использование**: Заменяет все кнопки удаления в формах + +### 3. `Buttons.css` +- **Путь**: `play-life-web/src/components/Buttons.css` +- **Назначение**: Общие стили для кнопок дизайн-системы +- **Содержимое**: + - `.form-actions` - flex-контейнер для группировки кнопок + - `.submit-button` - стили для кнопки сохранения + - `.delete-button` - стили для кнопки удаления + +## Измененные компоненты + +### 1. `BoardForm.jsx` +**Путь**: `play-life-web/src/components/BoardForm.jsx` + +**Изменения**: +- ✅ Заменена эмодзи копирования (📋) на SVG иконку в кнопке копирования ссылки +- ✅ Удалена кнопка "Отмена" из блока `form-actions` +- ✅ Кнопка удаления перемещена в блок `form-actions` справа от кнопки "Сохранить" +- ✅ Добавлено состояние `isDeleting` для отслеживания процесса удаления +- ✅ Удалена кнопка "Перегенерить ссылку" +- ✅ Удалена функция `handleRegenerateLink` (заменена на `generateInviteLink` для внутреннего использования) +- ✅ Интегрированы компоненты `SubmitButton` и `DeleteButton` +- ✅ Добавлен импорт `Buttons.css` + +**Затронутые места в компоненте**: +- Строки 1-5: Добавлены импорты новых компонентов и стилей +- Строка 14: Добавлено состояние `isDeleting` +- Строки 89-105: Удалена функция `handleRegenerateLink` +- Строки 114-132: Обновлена функция `handleToggleInvite` (использует `generateInviteLink`) +- Строки 134-151: Обновлена функция `handleDelete` (добавлено состояние `isDeleting`) +- Строки 216-222: Заменена эмодзи на SVG иконку копирования +- Строки 224-229: Удалена кнопка "Перегенерить ссылку" +- Строки 247-258: Обновлен блок `form-actions` (удалена кнопка "Отмена", добавлены новые компоненты) +- Строки 261-265: Удален отдельный блок с кнопкой удаления + +### 2. `BoardForm.css` +**Путь**: `play-life-web/src/components/BoardForm.css` + +**Изменения**: +- ✅ Удалены стили `.regenerate-btn` (строки 128-143) +- ✅ Удалены стили `.delete-board-btn` (строки 152-169) +- ✅ Стили кнопок теперь импортируются из `Buttons.css` + +**Затронутые места**: +- Удалено 42 строки неиспользуемых стилей + +### 3. `TaskForm.jsx` +**Путь**: `play-life-web/src/components/TaskForm.jsx` + +**Изменения**: +- ✅ Интегрированы компоненты `SubmitButton` и `DeleteButton` +- ✅ Добавлен импорт `Buttons.css` (через компоненты) +- ✅ Заменены нативные кнопки на компоненты дизайн-системы + +**Затронутые места в компоненте**: +- Строки 1-4: Добавлены импорты новых компонентов +- Строки 1170-1195: Заменены кнопки на компоненты `SubmitButton` и `DeleteButton` + +## Затронутые экраны + +### 1. Экран редактирования доски желаний (`board-form`) +**Компонент**: `BoardForm` +**Навигация**: Открывается из экрана списка желаний (`wishlist`) при нажатии на кнопку редактирования доски + +**Изменения в UI**: +- ✅ Кнопка копирования ссылки: эмодзи 📋 заменена на SVG иконку (два перекрывающихся прямоугольника) +- ✅ При успешном копировании показывается SVG иконка галочки вместо текста ✓ +- ✅ Удалена кнопка "Отмена" - теперь закрытие происходит только через крестик в правом верхнем углу +- ✅ Кнопка "Удалить доску" перемещена в блок действий справа от кнопки "Сохранить" +- ✅ Кнопка удаления теперь имеет красный фон и иконку корзины (как в экране редактирования задачи) +- ✅ Удалена кнопка "Перегенерить ссылку" - ссылка теперь генерируется автоматически при включении доступа +- ✅ Кнопка "Сохранить" имеет градиентный фон и hover эффект (как в экране редактирования задачи) + +**Функциональные изменения**: +- Ссылка для приглашения теперь генерируется автоматически при включении переключателя "Разрешить присоединение по ссылке" +- Кнопка удаления показывает состояние загрузки (три точки) во время удаления +- Кнопка сохранения показывает "Сохранение..." во время процесса сохранения + +**Путь навигации**: +- `wishlist` → `board-form` (при нажатии на кнопку редактирования доски) + +### 2. Экран редактирования задачи (`task-form`) +**Компонент**: `TaskForm` +**Навигация**: Открывается из списка задач (`tasks`) или из деталей желания (`wishlist-detail`) + +**Изменения в UI**: +- ✅ Кнопки сохранения и удаления теперь используют компоненты дизайн-системы +- ✅ Визуально идентичны кнопкам на экране редактирования доски + +**Функциональные изменения**: +- Нет функциональных изменений, только рефакторинг кода + +**Путь навигации**: +- `tasks` → `task-form` (при создании/редактировании задачи) +- `wishlist-detail` → `task-form` (при создании задачи из желания) + +## Потенциальные места для рефакторинга + +Следующие компоненты используют похожие кнопки и могут быть обновлены для использования новых компонентов дизайн-системы: + +### 1. `WishlistForm.jsx` +- **Текущее состояние**: Использует нативную кнопку с классом `submit-button` +- **Потенциал**: Можно заменить на `SubmitButton` +- **Расположение**: Строки 836-838, 1246-1248 + +### 2. `AddWords.jsx` +- **Текущее состояние**: Использует нативную кнопку с классом `submit-button` +- **Потенциал**: Можно заменить на `SubmitButton` +- **Расположение**: Строка 187 + +### 3. Другие формы +- Компоненты с кнопками удаления могут использовать `DeleteButton` +- Компоненты с кнопками сохранения могут использовать `SubmitButton` + +## Файлы, созданные/измененные + +### Созданные файлы: +1. `play-life-web/src/components/SubmitButton.jsx` (новый) +2. `play-life-web/src/components/DeleteButton.jsx` (новый) +3. `play-life-web/src/components/Buttons.css` (новый) + +### Измененные файлы: +1. `play-life-web/src/components/BoardForm.jsx` (обновлен) +2. `play-life-web/src/components/BoardForm.css` (обновлен) +3. `play-life-web/src/components/TaskForm.jsx` (обновлен) + +## Визуальные изменения + +### До изменений: +- Эмодзи в кнопке копирования +- Кнопка "Отмена" в блоке действий +- Кнопка удаления отдельно внизу формы +- Кнопка "Перегенерить ссылку" под полем ссылки +- Разные стили кнопок в разных формах + +### После изменений: +- SVG иконки в кнопке копирования +- Только кнопка "Сохранить" и "Удалить" в блоке действий +- Кнопка удаления справа от кнопки сохранения +- Автоматическая генерация ссылки +- Единый стиль кнопок во всех формах (дизайн-система) + +## Технические детали + +### Зависимости +- Новые компоненты не добавляют внешних зависимостей +- Используют только React и существующие стили + +### Обратная совместимость +- ✅ Все изменения обратно совместимы +- ✅ Функциональность не нарушена +- ✅ API компонентов не изменился + +### Производительность +- ✅ Нет влияния на производительность +- ✅ Компоненты легковесные +- ✅ Стили оптимизированы + +## Рекомендации + +1. **Рефакторинг других форм**: Рассмотреть возможность замены кнопок в `WishlistForm` и `AddWords` на компоненты дизайн-системы +2. **Расширение дизайн-системы**: Добавить другие типы кнопок (например, `CancelButton`, `IconButton`) +3. **Документация**: Создать документацию по использованию компонентов дизайн-системы +4. **Тестирование**: Протестировать все затронутые экраны после развертывания diff --git a/TODOIST_REFACTOR_PLAN.md b/TODOIST_REFACTOR_PLAN.md new file mode 100644 index 0000000..931b2f7 --- /dev/null +++ b/TODOIST_REFACTOR_PLAN.md @@ -0,0 +1,727 @@ +# План рефакторинга интеграции с Todoist + +## Цель +Переделать интеграцию с Todoist для использования **единого приложения**, созданного в Todoist Developer Platform. Все пользователи Play Life используют одно Todoist приложение с единым webhook URL. + +## Текущая реализация +- Каждый пользователь имеет уникальный `webhook_token` в таблице `todoist_integrations` +- Webhook URL: `/webhook/todoist/{token}` (токен в URL) +- Пользователь определяется по токену из URL +- Пользователь должен вручную копировать webhook URL + +## Новая реализация (Единое приложение) +- **Единое Todoist приложение** для всех пользователей Play Life +- **Единый Webhook URL:** `/webhook/todoist` (без токена!) +- Webhook настроен в Todoist Developer Console на уровне приложения +- Пользователь определяется по `todoist_user_id` из `event_data` webhook +- OAuth используется для привязки Todoist аккаунта к Play Life аккаунту +- **Пользователю не нужно ничего настраивать** — просто нажать "Подключить Todoist"! + +## Краткое резюме изменений + +### База данных: +- **Удалить** поле `webhook_token` (больше не нужно!) +- Добавить поля: `todoist_user_id`, `todoist_email`, `access_token` + +### Переменные окружения: +- `TODOIST_CLIENT_ID` - Client ID приложения +- `TODOIST_CLIENT_SECRET` - Client Secret приложения +- `WEBHOOK_BASE_URL` - для формирования OAuth Redirect URI + +### Backend: +- **Изменить webhook handler** — идентификация по `todoist_user_id` +- Добавить OAuth endpoints для подключения/отключения +- Убрать логику с токенами в URL + +### Frontend: +- **Убрать отображение webhook URL** (не нужно!) +- Показать кнопку "Подключить Todoist" +- После подключения показать email и статус + +--- + +## 1. Изменения в базе данных + +### Миграция: `013_refactor_todoist_single_app.sql` + +**Изменения в таблице `todoist_integrations`:** + +1. **Удалить:** + - `webhook_token` — больше не нужен! Webhook единый для всего приложения. + +2. **Добавить:** + - `todoist_user_id` (BIGINT) — ID пользователя в Todoist (из OAuth, для идентификации в webhook) + - `todoist_email` (VARCHAR(255)) — Email пользователя в Todoist (из OAuth) + - `access_token` (TEXT) — OAuth access token (бессрочный в Todoist) + +3. **Индексы:** + - **Уникальный** индекс на `todoist_user_id` — ключевой для идентификации в webhook! + - Уникальный индекс на `todoist_email` + - Удалить индекс на `webhook_token` + +**Структура после миграции:** +```sql +CREATE TABLE todoist_integrations ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + todoist_user_id BIGINT, -- ID пользователя в Todoist (КЛЮЧЕВОЕ для webhook!) + todoist_email VARCHAR(255), -- Email пользователя в Todoist + access_token TEXT, -- OAuth access token (бессрочный) + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT todoist_integrations_user_id_unique UNIQUE (user_id) +); + +-- Индексы +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_todoist_user_id +ON todoist_integrations(todoist_user_id) +WHERE todoist_user_id IS NOT NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_todoist_email +ON todoist_integrations(todoist_email) +WHERE todoist_email IS NOT NULL; +``` + +**Ключевое изменение:** `todoist_user_id` теперь используется для идентификации пользователя при получении webhook от Todoist. + +--- + +## 2. Переменные окружения (.env) + +### Добавить в `env.example`: + +```env +# ============================================ +# Todoist OAuth Configuration +# ============================================ +# Client ID единого Todoist приложения +# Получить в: https://developer.todoist.com/appconsole.html +TODOIST_CLIENT_ID=your-todoist-client-id + +# Client Secret единого Todoist приложения +TODOIST_CLIENT_SECRET=your-todoist-client-secret + +# Секрет для проверки подлинности webhook от Todoist (опционально) +# Если задан, все запросы должны содержать заголовок X-Todoist-Webhook-Secret с этим значением +TODOIST_WEBHOOK_SECRET= +``` + +**Что нужно получить из Todoist приложения:** +1. `TODOIST_CLIENT_ID` - Client ID приложения +2. `TODOIST_CLIENT_SECRET` - Client Secret приложения +3. `TODOIST_WEBHOOK_SECRET` (опционально) - для дополнительной безопасности webhook + +**Важно:** В настройках Todoist приложения нужно указать Redirect URI: +- Используйте: `/api/integrations/todoist/oauth/callback` +- Например, если `WEBHOOK_BASE_URL=https://your-domain.com`, то Redirect URI: `https://your-domain.com/api/integrations/todoist/oauth/callback` + +--- + +## 3. Изменения в Backend (main.go) + +### 3.1. Обновить структуру `TodoistIntegration`: +```go +type TodoistIntegration struct { + ID int `json:"id"` + UserID int `json:"user_id"` + TodoistUserID *int64 `json:"todoist_user_id,omitempty"` // Ключевое для webhook! + TodoistEmail *string `json:"todoist_email,omitempty"` + AccessToken *string `json:"-"` // Не отдавать в JSON! + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} +``` + +**Важно:** +- `AccessToken` не должен отдаваться в JSON ответах (используйте `json:"-"`) +- `TodoistUserID` — ключевое поле для идентификации пользователя в webhook + +### 3.2. Webhook handler (`todoistWebhookHandler`) - КЛЮЧЕВОЕ ИЗМЕНЕНИЕ: + +**Новый подход:** +- URL: `/webhook/todoist` (БЕЗ токена!) +- Webhook настроен в Todoist Developer Console для всего приложения +- Извлекает `user_id` из `event_data` webhook +- Находит пользователя по `todoist_user_id` + +**Новая логика:** +```go +func (a *App) todoistWebhookHandler(w http.ResponseWriter, r *http.Request) { + // CORS, OPTIONS handling + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Проверка webhook secret (если настроен) + todoistWebhookSecret := getEnv("TODOIST_WEBHOOK_SECRET", "") + if todoistWebhookSecret != "" { + providedSecret := r.Header.Get("X-Todoist-Hmac-SHA256") + // TODO: проверить HMAC подпись + } + + // Парсим webhook + var webhook TodoistWebhook + if err := json.NewDecoder(r.Body).Decode(&webhook); err != nil { + log.Printf("Todoist webhook: error decoding: %v", err) + w.WriteHeader(http.StatusOK) + return + } + + log.Printf("Todoist webhook: event=%s", webhook.EventName) + + // Обрабатываем только item:completed + if webhook.EventName != "item:completed" { + log.Printf("Todoist webhook: ignoring event %s", webhook.EventName) + w.WriteHeader(http.StatusOK) + return + } + + // Извлекаем user_id из event_data (это Todoist user_id!) + // Может приходить как string или float64 + var todoistUserID int64 + switch v := webhook.EventData["user_id"].(type) { + case float64: + todoistUserID = int64(v) + case string: + todoistUserID, _ = strconv.ParseInt(v, 10, 64) + default: + log.Printf("Todoist webhook: user_id not found or invalid type in event_data") + w.WriteHeader(http.StatusOK) + return + } + + // Находим пользователя Play Life по todoist_user_id + var userID int + err := a.DB.QueryRow(` + SELECT user_id FROM todoist_integrations + WHERE todoist_user_id = $1 + `, todoistUserID).Scan(&userID) + + if err == sql.ErrNoRows { + // Пользователь не подключил Play Life — игнорируем + log.Printf("Todoist webhook: no user found for todoist_user_id=%d (ignoring)", todoistUserID) + w.WriteHeader(http.StatusOK) + return + } + if err != nil { + log.Printf("Todoist webhook: DB error: %v", err) + w.WriteHeader(http.StatusOK) + return + } + + log.Printf("Todoist webhook: todoist_user_id=%d -> user_id=%d", todoistUserID, userID) + + // ... остальная логика обработки события (как раньше) ... +} +``` + +### 3.3. Маршрут webhook - ИЗМЕНИТЬ: +```go +// Было: +r.HandleFunc("/webhook/todoist/{token}", app.todoistWebhookHandler).Methods("POST", "OPTIONS") + +// Стало: +r.HandleFunc("/webhook/todoist", app.todoistWebhookHandler).Methods("POST", "OPTIONS") +``` + +**Важно:** Этот URL нужно указать в Todoist Developer Console при настройке приложения! + +### 3.4. Добавить OAuth endpoints: + +1. **Инициация OAuth:** + - `GET /api/integrations/todoist/oauth/connect` - перенаправляет на Todoist OAuth + - **ВАЖНО:** Требует авторизацию пользователя (JWT token в cookie или header) + - Генерирует `state` параметр с user_id (JWT подписанный jwtSecret) + - Формирует `redirect_uri` из `WEBHOOK_BASE_URL`: + ```go + baseURL := getEnv("WEBHOOK_BASE_URL", "") + if baseURL == "" { + sendErrorWithCORS(w, "WEBHOOK_BASE_URL must be configured", http.StatusInternalServerError) + return + } + redirectURI := strings.TrimRight(baseURL, "/") + "/api/integrations/todoist/oauth/callback" + + // Генерируем state с user_id + state := generateOAuthState(userID, jwtSecret) // JWT с user_id и exp + + // Формируем URL для редиректа + authURL := fmt.Sprintf( + "https://todoist.com/oauth/authorize?client_id=%s&scope=data:read_write&state=%s&redirect_uri=%s", + url.QueryEscape(todoistClientID), + url.QueryEscape(state), + url.QueryEscape(redirectURI), + ) + + http.Redirect(w, r, authURL, http.StatusTemporaryRedirect) + ``` + +2. **OAuth callback:** + - `GET /api/integrations/todoist/oauth/callback` - обрабатывает callback от Todoist + - **ПУБЛИЧНЫЙ ENDPOINT** (без авторизации, так как пользователь приходит от Todoist) + - Логика: + 1. Проверяет `state` параметр (JWT с user_id, exp = 1 день) + 2. Извлекает `code` из query parameters + 3. Обменивает `code` на `access_token` через POST запрос к Todoist + 4. Получает информацию о пользователе через Sync API + 5. Сохраняет/обновляет данные в БД + 6. Перенаправляет пользователя на страницу интеграций + + ```go + func (a *App) todoistOAuthCallbackHandler(w http.ResponseWriter, r *http.Request) { + frontendURL := getEnv("WEBHOOK_BASE_URL", "") + redirectSuccess := frontendURL + "/?integration=todoist&status=connected" + redirectError := frontendURL + "/?integration=todoist&status=error" + + // 1. Проверяем state (JWT с user_id, exp = 1 день) + state := r.URL.Query().Get("state") + userID, err := validateOAuthState(state, jwtSecret) + if err != nil { + log.Printf("Todoist OAuth: invalid state: %v", err) + http.Redirect(w, r, redirectError+"&message=invalid_state", http.StatusTemporaryRedirect) + return + } + + // 2. Получаем code + code := r.URL.Query().Get("code") + if code == "" { + log.Printf("Todoist OAuth: no code in callback") + http.Redirect(w, r, redirectError+"&message=no_code", http.StatusTemporaryRedirect) + return + } + + // 3. Обмениваем code на access_token + accessToken, err := exchangeCodeForToken(code, redirectURI) + if err != nil { + log.Printf("Todoist OAuth: token exchange failed: %v", err) + http.Redirect(w, r, redirectError+"&message=token_exchange_failed", http.StatusTemporaryRedirect) + return + } + + // 4. Получаем информацию о пользователе + todoistUser, err := getTodoistUserInfo(accessToken) + if err != nil { + log.Printf("Todoist OAuth: get user info failed: %v", err) + http.Redirect(w, r, redirectError+"&message=user_info_failed", http.StatusTemporaryRedirect) + return + } + + log.Printf("Todoist OAuth: user_id=%d connected todoist_user_id=%d email=%s", + userID, todoistUser.ID, todoistUser.Email) + + // 5. Сохраняем в БД (INSERT или UPDATE) + _, err = a.DB.Exec(` + INSERT INTO todoist_integrations (user_id, todoist_user_id, todoist_email, access_token) + VALUES ($1, $2, $3, $4) + ON CONFLICT (user_id) DO UPDATE SET + todoist_user_id = $2, + todoist_email = $3, + access_token = $4, + updated_at = CURRENT_TIMESTAMP + `, userID, todoistUser.ID, todoistUser.Email, accessToken) + + if err != nil { + log.Printf("Todoist OAuth: DB error: %v", err) + http.Redirect(w, r, redirectError+"&message=db_error", http.StatusTemporaryRedirect) + return + } + + // 6. Редирект на страницу интеграций + http.Redirect(w, r, redirectSuccess, http.StatusTemporaryRedirect) + } + ``` + +3. **Получение статуса интеграции:** + - `GET /api/integrations/todoist/status` - возвращает статус подключения + - Требует авторизацию (protected endpoint) + - Возвращает: + ```json + { + "connected": true, + "todoist_email": "user@example.com" + } + ``` + или если не подключено: + ```json + { + "connected": false + } + ``` + - **Примечание:** webhook_url больше не нужен — он единый для всего приложения! + +4. **Отключение интеграции:** + - `DELETE /api/integrations/todoist/disconnect` - отключает интеграцию + - Требует авторизацию (protected endpoint) + - **Удаляет запись** из `todoist_integrations` полностью + - Возвращает: `{"success": true, "message": "Todoist disconnected"}` + +### 3.5. Новые маршруты: +```go +// OAuth endpoints +protected.HandleFunc("/api/integrations/todoist/oauth/connect", app.todoistOAuthConnectHandler).Methods("GET") +r.HandleFunc("/api/integrations/todoist/oauth/callback", app.todoistOAuthCallbackHandler).Methods("GET") // Публичный! +protected.HandleFunc("/api/integrations/todoist/status", app.getTodoistStatusHandler).Methods("GET", "OPTIONS") +protected.HandleFunc("/api/integrations/todoist/disconnect", app.todoistDisconnectHandler).Methods("DELETE", "OPTIONS") + +// Webhook (единый для всего приложения) +r.HandleFunc("/webhook/todoist", app.todoistWebhookHandler).Methods("POST", "OPTIONS") + +// УДАЛИТЬ старый endpoint: +// protected.HandleFunc("/api/integrations/todoist/webhook-url", ...) // Больше не нужен! +``` + +**Важно:** +- OAuth callback должен быть публичным (пользователь приходит от Todoist без JWT) +- Webhook тоже публичный (Todoist отправляет события) +- `/api/integrations/todoist/webhook-url` — **УДАЛИТЬ**, больше не нужен! + +--- + +## 4. Изменения в Frontend (TodoistIntegration.jsx) + +### 4.1. Добавить проверку статуса подключения: +- При загрузке компонента вызывать `GET /api/integrations/todoist/status` +- Определять, подключен ли Todoist + +### 4.2. Добавить OAuth flow: +- **Если не подключено:** + - Показать кнопку "Подключить Todoist" + - При клике: `window.location.href = '/api/integrations/todoist/oauth/connect'` + - После OAuth callback backend перенаправит на `/?integration=todoist&status=connected` + - При загрузке проверять URL параметры и показывать соответствующее сообщение + +- **Если подключено:** + - Показать email пользователя Todoist + - Показать статус: "✅ Todoist подключен" + - Кнопка "Отключить Todoist" (вызывает `DELETE /api/integrations/todoist/disconnect`) + - **Webhook URL не нужен** — всё работает автоматически! + +### 4.3. Обновить инструкции: +- **Если не подключено:** + - Инструкция: "Нажмите кнопку 'Подключить Todoist' для авторизации" + +- **Если подключено:** + - Инструкция: "✅ Todoist подключен! Закрывайте задачи в Todoist — они автоматически появятся в Play Life." + - **Никаких дополнительных настроек не требуется!** + +### 4.4. Удалить: +- Отображение webhook URL +- Кнопку "Копировать" +- Инструкции по настройке webhook в Todoist + +--- + +## 5. Порядок выполнения изменений + +### Шаг 1: Создать миграцию БД +- Создать файл `013_refactor_todoist_single_app.sql` +- Содержимое миграции: +```sql +-- Migration: Refactor todoist_integrations for single Todoist app +-- Webhook теперь единый для всего приложения, токены в URL больше не нужны + +-- 1. Добавляем новые поля +ALTER TABLE todoist_integrations +ADD COLUMN IF NOT EXISTS todoist_user_id BIGINT; + +ALTER TABLE todoist_integrations +ADD COLUMN IF NOT EXISTS todoist_email VARCHAR(255); + +ALTER TABLE todoist_integrations +ADD COLUMN IF NOT EXISTS access_token TEXT; + +-- 2. Удаляем webhook_token (больше не нужен!) +ALTER TABLE todoist_integrations +DROP COLUMN IF EXISTS webhook_token; + +-- 3. Удаляем старый индекс на webhook_token +DROP INDEX IF EXISTS idx_todoist_integrations_webhook_token; + +-- 4. Создаем новые индексы +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_todoist_user_id +ON todoist_integrations(todoist_user_id) +WHERE todoist_user_id IS NOT NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_todoist_email +ON todoist_integrations(todoist_email) +WHERE todoist_email IS NOT NULL; + +-- 5. Комментарии +COMMENT ON COLUMN todoist_integrations.todoist_user_id IS 'Todoist user ID (from OAuth) - used to identify user in webhooks'; +COMMENT ON COLUMN todoist_integrations.todoist_email IS 'Todoist user email (from OAuth)'; +COMMENT ON COLUMN todoist_integrations.access_token IS 'Todoist OAuth access token (permanent)'; +``` +- Применить миграцию + +**Важно:** После миграции старые записи с `webhook_token` будут работать пока не применится миграция. После миграции все пользователи должны переподключить Todoist через OAuth. + +### Шаг 2: Обновить .env +- Добавить новые переменные окружения +- Получить данные из Todoist приложения + +### Шаг 3: Обновить Backend +- Обновить структуру `TodoistIntegration` +- Изменить webhook handler +- Добавить OAuth endpoints +- Обновить маршруты + +### Шаг 4: Обновить Frontend +- Обновить компонент `TodoistIntegration.jsx` +- Добавить OAuth flow + +### Шаг 5: Тестирование +- Протестировать OAuth flow +- Протестировать webhook с новым способом идентификации +- Проверить миграцию данных + +--- + +## 6. Важные замечания + +### 6.1. Идентификация пользователя в webhook +**Новый подход:** +- Используется `todoist_user_id` из `event_data` webhook +- `todoist_user_id` сохраняется при OAuth подключении +- Webhook приходит на единый URL `/webhook/todoist` +- Находим пользователя Play Life по `todoist_user_id` + +### 6.2. Миграция существующих данных +- **Удаляем `webhook_token`** — больше не нужен +- Все существующие записи будут работать после миграции, но без OAuth данных +- Пользователям нужно **переподключить Todoist через OAuth** для работы интеграции +- После миграции старый endpoint `/webhook/todoist/{token}` перестанет работать + +### 6.3. Обратная совместимость +- **НЕТ обратной совместимости** — это breaking change +- Старый endpoint `/webhook/todoist/{token}` удаляется +- Все пользователи должны переподключить Todoist +- **Рекомендация:** Уведомить пользователей о необходимости переподключения + +### 6.3.1. Удаляемый код +**Удалить полностью:** +- Endpoint `GET /api/integrations/todoist/webhook-url` +- Handler `getTodoistWebhookURLHandler` +- Маршрут `/webhook/todoist/{token}` +- Функция генерации webhook_token для Todoist + +### 6.4. Безопасность +- OAuth токен (`access_token`) не отдавать в JSON ответах (json:"-") +- Использовать `TODOIST_WEBHOOK_SECRET` для проверки подлинности webhook (если настроен в Todoist) +- Todoist access_token бессрочный, но пользователь может отозвать его в настройках Todoist +- User-Agent для запросов к Todoist API: `PlayLife` + +### 6.5. OAuth Flow (детально) +1. Пользователь нажимает "Подключить Todoist" +2. Backend генерирует `state` (случайная строка или JWT с user_id) и сохраняет его +3. Перенаправление на Todoist OAuth: + ``` + https://todoist.com/oauth/authorize? + client_id=& + scope=data:read_write& + state=& + redirect_uri=/api/integrations/todoist/oauth/callback + ``` +4. Пользователь авторизуется в Todoist +5. Todoist перенаправляет на `redirect_uri` с `code` и `state` +6. Backend проверяет `state` и обменивает `code` на `access_token`: + ``` + POST https://todoist.com/oauth/access_token + Content-Type: application/x-www-form-urlencoded + + client_id=& + client_secret=& + code=& + redirect_uri= + ``` +7. Backend получает информацию о пользователе через Todoist Sync API: + ``` + POST https://api.todoist.com/sync/v9/sync + Authorization: Bearer + Content-Type: application/x-www-form-urlencoded + User-Agent: PlayLife + + sync_token=*&resource_types=["user"] + ``` + Ответ содержит `user.id` и `user.email` +8. Backend сохраняет `todoist_user_id`, `todoist_email`, `access_token` в БД +9. Перенаправление пользователя на страницу интеграций + +### 6.6. Хранение state для OAuth +Используем JWT токен (не требует хранения в БД): + +```go +// Генерация state (таймаут = 1 день) +func generateOAuthState(userID int, jwtSecret string) string { + state := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{ + "user_id": userID, + "type": "todoist_oauth", + "exp": time.Now().Add(24 * time.Hour).Unix(), // 1 день + }) + stateString, _ := state.SignedString([]byte(jwtSecret)) + return stateString +} + +// Проверка state в callback +func validateOAuthState(stateString string, jwtSecret string) (int, error) { + token, err := jwt.Parse(stateString, func(token *jwt.Token) (interface{}, error) { + return []byte(jwtSecret), nil + }) + if err != nil { + return 0, err + } + + claims, ok := token.Claims.(jwt.MapClaims) + if !ok || !token.Valid { + return 0, fmt.Errorf("invalid token") + } + + if claims["type"] != "todoist_oauth" { + return 0, fmt.Errorf("wrong token type") + } + + userID := int(claims["user_id"].(float64)) + return userID, nil +} +``` + +### 6.7. Особенности Todoist OAuth +- **Scope:** `data:read_write` — полный доступ к данным пользователя +- **Access Token:** Todoist выдает бессрочный access_token +- **Refresh Token:** Todoist НЕ использует refresh_token +- **Отзыв токена:** Пользователь может отозвать доступ в настройках Todoist + +### 6.8. Обработка ошибок + +**Если todoist_user_id не найден в webhook:** +- Логировать: `log.Printf("Todoist webhook: no user found for todoist_user_id=%d", todoistUserID)` +- Возвращать `200 OK` (чтобы Todoist не делал retry) +- Игнорировать событие + +**Если токен отозван пользователем:** +- При попытке использовать access_token Todoist вернет ошибку +- Автоматически отключить интеграцию (удалить запись из БД) +- Логировать: `log.Printf("Todoist: token revoked for user_id=%d, disconnecting", userID)` + +**При disconnect:** +- Просто удалить запись из БД +- НЕ отзывать токен через Todoist API (упрощение) + +### 6.9. События Todoist +Подписываемся только на: **`item:completed`** + +Другие события (`item:added`, `item:updated`, `item:deleted`) не нужны. + +--- + +## 7. Архитектура: Единый Webhook + +**Ключевое решение:** Используем единый webhook URL для всего приложения. + +### Как это работает: + +1. **Настройка в Todoist Developer Console:** + - Создать приложение в https://developer.todoist.com/appconsole.html + - Указать Webhook URL: `/webhook/todoist` + - Указать OAuth Redirect URI: `/api/integrations/todoist/oauth/callback` + - Выбрать события: `item:completed` + +2. **При OAuth подключении:** + - Пользователь нажимает "Подключить Todoist" + - Авторизуется в Todoist + - Play Life получает `access_token` и информацию о пользователе + - Сохраняем `todoist_user_id` — это ключ для идентификации в webhook + +3. **При получении webhook:** + - Todoist отправляет POST на `/webhook/todoist` + - В `event_data` есть `user_id` (это Todoist user_id) + - Находим пользователя Play Life по `todoist_user_id` + - Обрабатываем событие + +### Преимущества: +- ✅ Пользователю не нужно ничего настраивать! +- ✅ Нет токенов в URL +- ✅ Простая архитектура +- ✅ Webhook настраивается один раз в Developer Console + +--- + +## 8. Настройка Todoist приложения в Developer Console + +### Шаги настройки: +1. Зайти в https://developer.todoist.com/appconsole.html +2. Создать новое приложение или открыть существующее +3. Заполнить: + - **App name:** Play Life + - **App description:** Интеграция с Play Life для отслеживания прогресса + - **OAuth Redirect URL:** `/api/integrations/todoist/oauth/callback` + - **Webhooks callback URL:** `/webhook/todoist` + - **Watched events:** `item:completed` (только это событие!) +4. Скопировать: + - **Client ID** → `TODOIST_CLIENT_ID` + - **Client Secret** → `TODOIST_CLIENT_SECRET` + - **Client secret for webhooks** (если есть) → `TODOIST_WEBHOOK_SECRET` + +### Важные настройки: +- **OAuth scope:** `data:read_write` +- **Watched events:** только `item:completed` +- Другие события НЕ подписывать + +### Формат webhook от Todoist: +```json +{ + "event_name": "item:completed", + "user_id": "12345678", // ← Это todoist_user_id для идентификации! + "event_data": { + "id": "task_id", + "content": "Task title", + "description": "Task description", + "user_id": "12345678", // ← Тоже здесь + ... + } +} +``` + +**Важно:** `user_id` приходит как string, нужно конвертировать в int64. + +--- + +## 9. Краткая сводка для быстрого старта + +### Настройка Todoist приложения: +1. Зайти в https://developer.todoist.com/appconsole.html +2. Создать приложение +3. Настроить: + - **OAuth Redirect URL:** `/api/integrations/todoist/oauth/callback` + - **Webhooks callback URL:** `/webhook/todoist` + - **Watched events:** `item:completed` +4. Скопировать Client ID и Client Secret + +### Что добавить в .env: +```env +TODOIST_CLIENT_ID=your-client-id-here +TODOIST_CLIENT_SECRET=your-client-secret-here +TODOIST_WEBHOOK_SECRET= # опционально, из Developer Console +``` + +### Что изменится в базе данных: +- Добавятся поля: `todoist_user_id`, `todoist_email`, `access_token` +- **Удалится поле:** `webhook_token` + +### Что изменится для пользователей: +- Пользователи нажимают "Подключить Todoist" +- Авторизуются в Todoist +- **Готово!** Никаких дополнительных настроек! +- Закрытые задачи в Todoist автоматически появляются в Play Life + +### Порядок реализации: +1. ⬜ Настроить Todoist приложение в Developer Console +2. ⬜ Создать миграцию БД (`013_refactor_todoist_single_app.sql`) +3. ⬜ Обновить `.env` с новыми переменными +4. ⬜ Реализовать OAuth endpoints в Backend +5. ⬜ Обновить webhook handler (идентификация по todoist_user_id) +6. ⬜ Обновить Frontend компонент +7. ⬜ Удалить старый код (webhook-url endpoint, токены) +8. ⬜ Протестировать OAuth flow и webhook + diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..0062ac9 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +5.0.0 diff --git a/build-and-save.ps1 b/build-and-save.ps1 new file mode 100644 index 0000000..1f5d8f5 --- /dev/null +++ b/build-and-save.ps1 @@ -0,0 +1,25 @@ +# PowerShell скрипт для сборки единого Docker образа и сохранения в tar + +$ErrorActionPreference = "Stop" + +$IMAGE_NAME = "play-life-unified" +$IMAGE_TAG = if ($env:IMAGE_TAG) { $env:IMAGE_TAG } else { "latest" } +$TAR_FILE = if ($env:TAR_FILE) { $env:TAR_FILE } else { "play-life-unified.tar" } + +Write-Host "🔨 Сборка единого Docker образа..." -ForegroundColor Cyan +docker build -t "${IMAGE_NAME}:${IMAGE_TAG}" . + +Write-Host "💾 Сохранение образа в tar файл..." -ForegroundColor Cyan +docker save "${IMAGE_NAME}:${IMAGE_TAG}" -o "${TAR_FILE}" + +$fileSize = (Get-Item "${TAR_FILE}").Length / 1MB +Write-Host "✅ Образ успешно сохранен в ${TAR_FILE}" -ForegroundColor Green +Write-Host "📦 Размер файла: $([math]::Round($fileSize, 2)) MB" -ForegroundColor Green + +Write-Host "" +Write-Host "Для загрузки образа на другой машине используйте:" -ForegroundColor Yellow +Write-Host " docker load -i ${TAR_FILE}" -ForegroundColor White +Write-Host "" +Write-Host "Для запуска контейнера используйте:" -ForegroundColor Yellow +Write-Host " docker run -d -p 80:80 --env-file .env ${IMAGE_NAME}:${IMAGE_TAG}" -ForegroundColor White + diff --git a/build-and-save.sh b/build-and-save.sh new file mode 100644 index 0000000..f0ecf72 --- /dev/null +++ b/build-and-save.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# Скрипт для сборки единого Docker образа и сохранения в tar + +set -e + +IMAGE_NAME="play-life-unified" +IMAGE_TAG="${IMAGE_TAG:-latest}" +TAR_FILE="${TAR_FILE:-play-life-unified.tar}" + +echo "🔨 Сборка единого Docker образа..." +docker build -t "${IMAGE_NAME}:${IMAGE_TAG}" . + +echo "💾 Сохранение образа в tar файл..." +docker save "${IMAGE_NAME}:${IMAGE_TAG}" -o "${TAR_FILE}" + +echo "✅ Образ успешно сохранен в ${TAR_FILE}" +echo "📦 Размер файла: $(du -h ${TAR_FILE} | cut -f1)" + +echo "" +echo "Для загрузки образа на другой машине используйте:" +echo " docker load -i ${TAR_FILE}" +echo "" +echo "Для запуска контейнера используйте:" +echo " docker run -d -p 80:80 --env-file .env ${IMAGE_NAME}:${IMAGE_TAG}" + diff --git a/check-repo-fs.sh b/check-repo-fs.sh new file mode 100755 index 0000000..f5a68f4 --- /dev/null +++ b/check-repo-fs.sh @@ -0,0 +1,75 @@ +#!/bin/bash +# Скрипт для проверки файловой системы репозитория Gitea +# Выполните на сервере с административным доступом + +REPO_PATH="/poignatov/play-life.git" +GITEA_USER="git" # или пользователь, под которым работает Gitea + +echo "=== Проверка существования репозитория ===" +if [ -d "$REPO_PATH" ]; then + echo "✓ Репозиторий существует" +else + echo "✗ Репозиторий НЕ найден: $REPO_PATH" + exit 1 +fi + +echo "" +echo "=== Проверка прав доступа ===" +ls -ld "$REPO_PATH" + +echo "" +echo "=== Проверка владельца ===" +OWNER=$(stat -c '%U:%G' "$REPO_PATH" 2>/dev/null || stat -f '%Su:%Sg' "$REPO_PATH" 2>/dev/null) +echo "Владелец: $OWNER" + +echo "" +echo "=== Проверка размера репозитория ===" +du -sh "$REPO_PATH" + +echo "" +echo "=== Проверка свободного места ===" +df -h "$REPO_PATH" | tail -1 + +echo "" +echo "=== Проверка ключевых файлов Git ===" +if [ -f "$REPO_PATH/config" ]; then + echo "✓ config существует" +else + echo "✗ config НЕ найден" +fi + +if [ -d "$REPO_PATH/objects" ]; then + echo "✓ objects/ существует" + echo " Количество объектов: $(find "$REPO_PATH/objects" -type f | wc -l)" +else + echo "✗ objects/ НЕ найден" +fi + +if [ -f "$REPO_PATH/HEAD" ]; then + echo "✓ HEAD существует" + echo " Текущая ветка: $(cat "$REPO_PATH/HEAD")" +else + echo "✗ HEAD НЕ найден" +fi + +if [ -f "$REPO_PATH/refs/heads/main" ]; then + echo "✓ refs/heads/main существует" + echo " Последний коммит: $(cat "$REPO_PATH/refs/heads/main")" +else + echo "✗ refs/heads/main НЕ найден" +fi + +echo "" +echo "=== Проверка целостности репозитория ===" +cd "$REPO_PATH" +if git fsck --no-progress 2>&1 | head -20; then + echo "✓ Репозиторий цел" +else + echo "✗ Обнаружены проблемы с целостностью" +fi + +echo "" +echo "=== Проверка логов Gitea ===" +echo "Проверьте логи Gitea на наличие ошибок:" +echo " - /var/log/gitea/gitea.log" +echo " - или в директории, указанной в конфиге Gitea" diff --git a/database-dumps/.gitkeep b/database-dumps/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/database-dumps/README.md b/database-dumps/README.md new file mode 100644 index 0000000..34579df --- /dev/null +++ b/database-dumps/README.md @@ -0,0 +1,59 @@ +# Database Dumps + +Эта директория содержит дампы базы данных для разработки и тестирования. + +## Использование + +### Создание дампа + +```bash +# Дамп из БД (по умолчанию .env) +./dump-db.sh + +# Дамп с именем +./dump-db.sh production-backup + +# Дамп из другого окружения +./dump-db.sh --env-file .env.prod my-backup +``` + +### Просмотр дампов + +```bash +./list-dumps.sh +``` + +### Восстановление дампа + +```bash +# Восстановление в БД (по умолчанию .env) +./restore-db.sh dump_20240101_120000.sql.gz + +# Восстановление в другое окружение +./restore-db.sh --env-file .env.prod dump_20240101_120000.sql.gz + +# Можно указать имя без расширения +./restore-db.sh dump_20240101_120000 +``` + +## Поведение по умолчанию + +- **Создание дампа**: использует `.env` +- **Восстановление**: использует `.env` + +Это можно изменить с помощью параметра `--env-file`. + +## Важно + +⚠️ **Восстановление дампа удалит все данные в целевой базе данных!** + +Всегда проверяйте, в какую БД вы восстанавливаете данные. + +## Формат файлов + +Дампы сохраняются в формате: +- `dump_YYYYMMDD_HHMMSS.sql.gz` - автоматическое имя с датой/временем +- `имя_дампа.sql.gz` - именованный дамп + +Все дампы автоматически сжимаются с помощью gzip. + diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml new file mode 100644 index 0000000..4ab1ca1 --- /dev/null +++ b/docker-compose.prod.yml @@ -0,0 +1,27 @@ +version: '3.8' + +# Production конфигурация для Synology +# Использует образ из registry вместо локальной сборки +# База данных postgres запущена отдельно (не в этом compose) + +services: + play-life: + image: dungeonsiege.synology.me/poignatov/play-life:latest + container_name: play-life-prod + ports: + - "3080:80" + volumes: + - /volume1/docker/play-life/uploads:/app/uploads:rw + restart: always + env_file: + - .env + # Подключаемся к общей сети playlife-net + # Перед первым запуском нужно создать сеть и подключить postgres: + # docker network create playlife-net + # docker network connect playlife-net postgres1 + networks: + - playlife-net + +networks: + playlife-net: + external: true diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..6b1b341 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,83 @@ +version: '3.8' + +# Единый docker-compose для всех приложений в одном образе +# Использует корневой .env файл + +services: + # База данных PostgreSQL + db: + image: postgres:15-alpine + restart: unless-stopped + environment: + POSTGRES_USER: ${DB_USER:-playeng} + POSTGRES_PASSWORD: ${DB_PASSWORD:-playeng} + POSTGRES_DB: ${DB_NAME:-playeng} + ports: + - "${DB_PORT:-5432}:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-playeng}"] + interval: 10s + timeout: 5s + retries: 5 + env_file: + - .env + + # Backend сервер (Go) + backend: + build: + context: . + dockerfile: ./play-life-backend/Dockerfile + ports: + - "${PORT:-8080}:8080" + environment: + DB_HOST: db + DB_PORT: 5432 + DB_USER: ${DB_USER:-playeng} + DB_PASSWORD: ${DB_PASSWORD:-playeng} + DB_NAME: ${DB_NAME:-playeng} + PORT: ${PORT:-8080} + depends_on: + db: + condition: service_healthy + volumes: + - ./play-life-backend/migrations:/migrations + - ./uploads:/app/uploads + env_file: + - .env + + # Frontend приложение play-life-web + play-life-web: + build: + context: ./play-life-web + dockerfile: Dockerfile + container_name: play-life-web + ports: + - "${WEB_PORT:-3001}:80" + restart: unless-stopped + depends_on: + - backend + env_file: + - .env + + # LLM сервис (Ollama + Tavily), свой Docker и свой env + llm: + build: + context: ./play-life-llm + dockerfile: Dockerfile + container_name: play-life-llm + ports: + - "8090:8090" + restart: unless-stopped + env_file: + - ./play-life-llm/.env + +volumes: + postgres_data: + name: play-life_postgres_data + +networks: + default: + name: play-life-network + diff --git a/dump-db.sh b/dump-db.sh new file mode 100755 index 0000000..2833d42 --- /dev/null +++ b/dump-db.sh @@ -0,0 +1,138 @@ +#!/bin/bash + +# Скрипт для создания дампа базы данных +# Использование: +# ./dump-db.sh [имя_дампа] # Дамп из .env.prod +# ./dump-db.sh --env-file .env [имя] # Дамп из указанного файла +# ./dump-db.sh production-backup # Именованный дамп из .env.prod + +set -e + +# Значения по умолчанию +DEFAULT_ENV_FILE=".env.prod" +ENV_FILE="$DEFAULT_ENV_FILE" +DUMP_NAME="" + +# Парсим аргументы +while [[ $# -gt 0 ]]; do + case $1 in + --env-file) + ENV_FILE="$2" + shift 2 + ;; + *) + if [ -z "$DUMP_NAME" ]; then + DUMP_NAME="$1" + else + echo "❌ Ошибка: Неизвестный аргумент: $1" + echo "Использование: ./dump-db.sh [--env-file FILE] [имя_дампа]" + exit 1 + fi + shift + ;; + esac +done + +# Загружаем переменные окружения из указанного файла +if [ -f "$ENV_FILE" ]; then + export $(cat "$ENV_FILE" | grep -v '^#' | grep -v '^$' | xargs) + echo "📋 Используется файл окружения: $ENV_FILE" +else + echo "⚠️ Файл $ENV_FILE не найден, используются значения по умолчанию" +fi + +DB_HOST=${DB_HOST:-localhost} +DB_PORT=${DB_PORT:-5432} +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} + +# Создаем директорию для дампов, если её нет +mkdir -p database-dumps + +# Генерируем имя файла с датой и временем, если не указано +if [ -z "$DUMP_NAME" ]; then + DUMP_NAME="dump_$(date +%Y%m%d_%H%M%S).sql" +else + DUMP_NAME="$DUMP_NAME.sql" +fi + +DUMP_PATH="database-dumps/$DUMP_NAME" + +echo "🗄️ Создание дампа базы данных..." +echo " База: $DB_NAME" +echo " Хост: $DB_HOST:$DB_PORT" +echo " Пользователь: $DB_USER" +echo " Файл: $DUMP_PATH" + +# Создаем дамп через docker-compose, если контейнер запущен И хост локальный +if [ "$DB_HOST" = "localhost" ] || [ "$DB_HOST" = "127.0.0.1" ] || [ -z "$DB_HOST" ]; then + if docker-compose ps db 2>/dev/null | grep -q "Up"; then + echo " Используется docker-compose..." + docker-compose exec -T db pg_dump -U "$DB_USER" -d "$DB_NAME" > "$DUMP_PATH" + elif command -v pg_dump &> /dev/null; then + # Или напрямую через pg_dump, если БД доступна локально + echo " Используется локальный pg_dump..." + PGPASSWORD="$DB_PASSWORD" pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" > "$DUMP_PATH" + elif command -v docker &> /dev/null; then + # Используем Docker образ postgres для создания дампа + echo " Используется Docker (postgres:latest)..." + docker run --rm -i --network host \ + -e PGPASSWORD="$DB_PASSWORD" \ + postgres:latest \ + pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" > "$DUMP_PATH" + else + echo "❌ Ошибка: pg_dump не найден, docker-compose не запущен и Docker недоступен" + echo " Установите PostgreSQL клиент или Docker" + exit 1 + fi +else + # Для удаленных хостов используем pg_dump или Docker + if command -v pg_dump &> /dev/null; then + echo " Используется локальный pg_dump..." + PGPASSWORD="$DB_PASSWORD" pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" > "$DUMP_PATH" + elif command -v docker &> /dev/null; then + # Используем Docker образ postgres для создания дампа + # Используем latest для совместимости с разными версиями сервера + echo " Используется Docker (postgres:latest)..." + # Используем --network host для доступа к удаленным хостам + docker run --rm -i --network host \ + -e PGPASSWORD="$DB_PASSWORD" \ + postgres:latest \ + pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" > "$DUMP_PATH" + else + echo "❌ Ошибка: pg_dump не найден и Docker недоступен" + echo " Установите PostgreSQL клиент или Docker" + exit 1 + fi +fi + +# Сжимаем дамп +echo " Сжатие дампа..." +gzip -f "$DUMP_PATH" +DUMP_PATH="${DUMP_PATH}.gz" + +echo "✅ Дамп успешно создан: $DUMP_PATH" +echo " Размер: $(du -h "$DUMP_PATH" | cut -f1)" + +# Ограничиваем количество дампов (максимум 10) +MAX_DUMPS=10 +DUMP_COUNT=$(ls -1 database-dumps/*.sql.gz 2>/dev/null | wc -l | tr -d ' ') + +if [ "$DUMP_COUNT" -gt "$MAX_DUMPS" ]; then + echo "" + echo "🧹 Очистка старых дампов (максимум $MAX_DUMPS)..." + # Сортируем по дате модификации (новые первыми) и удаляем самые старые + OLD_DUMPS=$(ls -1t database-dumps/*.sql.gz 2>/dev/null | tail -n +$((MAX_DUMPS + 1))) + if [ -n "$OLD_DUMPS" ]; then + REMOVED_COUNT=0 + for old_dump in $OLD_DUMPS; do + rm -f "$old_dump" + REMOVED_COUNT=$((REMOVED_COUNT + 1)) + echo " Удален: $(basename "$old_dump")" + done + echo " Удалено дампов: $REMOVED_COUNT" + echo " Осталось дампов: $MAX_DUMPS" + fi +fi + diff --git a/env.example b/env.example new file mode 100644 index 0000000..fb24704 --- /dev/null +++ b/env.example @@ -0,0 +1,104 @@ +# ============================================ +# Единый файл конфигурации для всех проектов +# Backend и Play-Life-Web +# ============================================ + +# ============================================ +# Database Configuration +# ============================================ +DB_HOST=localhost +DB_PORT=5432 +DB_USER=playeng +DB_PASSWORD=playeng +DB_NAME=playeng + +# ============================================ +# Backend Server Configuration +# ============================================ +# Порт для backend сервера (по умолчанию: 8080) +# В production всегда используется порт 8080 внутри контейнера +PORT=8080 + +# ============================================ +# Play Life Web Configuration +# ============================================ +# Порт для frontend приложения play-life-web +WEB_PORT=3001 + +# ============================================ +# Telegram Bot Configuration +# ============================================ +# Токен единого бота для всех пользователей +# Получить у @BotFather: https://t.me/botfather +TELEGRAM_BOT_TOKEN=your-bot-token-here + +# Base URL для автоматической настройки webhook +# Примеры: +# - Для production с HTTPS: https://your-domain.com +# - Для локальной разработки с ngrok: https://abc123.ngrok.io +# - Для прямого доступа на нестандартном порту: http://your-server:8080 +# Webhook будет настроен автоматически при старте сервера на: /webhook/telegram +# Если не указан, webhook нужно настраивать вручную +WEBHOOK_BASE_URL=https://your-domain.com + +# ============================================ +# Todoist Integration Configuration +# ============================================ +# Единое Todoist приложение для всех пользователей Play Life +# Настроить в: https://developer.todoist.com/appconsole.html +# +# В настройках Todoist приложения указать: +# - OAuth Redirect URL: /api/integrations/todoist/oauth/callback +# - Webhooks callback URL: /webhook/todoist +# - Watched events: item:completed + +# Client ID единого Todoist приложения +TODOIST_CLIENT_ID= + +# Client Secret единого Todoist приложения +TODOIST_CLIENT_SECRET= + +# Секрет для проверки подлинности webhook от Todoist (опционально) +# Получить в Developer Console: "Client secret for webhooks" +TODOIST_WEBHOOK_SECRET= + +# ============================================ +# Fitbit Integration Configuration +# ============================================ +# Fitbit приложение для интеграции с Play Life +# Настроить в: https://dev.fitbit.com/apps +# +# В настройках Fitbit приложения указать: +# - OAuth 2.0 Application Type: Server +# - Callback URL: /api/integrations/fitbit/oauth/callback +# - Default Access Type: Read-Only +# - Scopes: activity, profile +# - Terms of Service URL: /terms +# - Privacy Policy URL: /privacy + +# Client ID Fitbit приложения +FITBIT_CLIENT_ID= + +# Client Secret Fitbit приложения +FITBIT_CLIENT_SECRET= + +# ============================================ +# Authentication Configuration +# ============================================ +# Секретный ключ для подписи JWT токенов +# ВАЖНО: Обязательно задайте свой уникальный секретный ключ для production! +# Если не задан, будет использован случайно сгенерированный (не рекомендуется для production) +# Можно сгенерировать с помощью: openssl rand -base64 32 +JWT_SECRET=your-super-secret-jwt-key-change-in-production + +# ============================================ +# Scheduler Configuration +# ============================================ +# Часовой пояс для планировщика задач (например: Europe/Moscow, America/New_York, UTC) +# Используется для: +# - Автоматической фиксации целей на неделю каждый понедельник в 6:00 +# - Отправки ежедневного отчёта в 23:59 +# ВАЖНО: Укажите правильный часовой пояс, иначе задачи будут срабатывать в UTC! +# Список доступных часовых поясов: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones +TIMEZONE=Europe/Moscow + diff --git a/init.sh b/init.sh new file mode 100755 index 0000000..6a31379 --- /dev/null +++ b/init.sh @@ -0,0 +1,160 @@ +#!/bin/bash + +# Скрипт для первоначальной настройки и запуска приложения +# Использование: ./init.sh + +set -e + +# Цвета для вывода +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +# Проверка наличия .env файла +if [ ! -f ".env" ]; then + echo -e "${RED}❌ Файл .env не найден!${NC}" + echo " Создайте файл .env на основе env.example" + exit 1 +fi + +# Загружаем переменные окружения +export $(cat .env | grep -v '^#' | grep -v '^$' | xargs) + +# Значения по умолчанию +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} +DB_PORT=${DB_PORT:-5432} +PORT=${PORT:-8080} +WEB_PORT=${WEB_PORT:-3001} + +echo -e "${GREEN}🚀 Инициализация Play Life...${NC}" +echo "" + +# 1. Остановка и удаление существующих контейнеров +echo -e "${YELLOW}1. Остановка существующих контейнеров...${NC}" +docker-compose down -v 2>/dev/null || true +echo -e "${GREEN} ✅ Контейнеры остановлены${NC}" + +# Удаляем старые образы postgres, если они есть +echo -e "${YELLOW} Удаление старых образов postgres...${NC}" +docker images | grep -E "postgres:(15|16|17|18|latest)" | awk '{print $3}' | xargs -r docker rmi -f 2>/dev/null || true +echo -e "${GREEN} ✅ Старые образы postgres удалены${NC}" +echo "" + +# 2. Поднятие всех сервисов +echo -e "${YELLOW}2. Поднятие сервисов через Docker Compose...${NC}" +echo " - База данных PostgreSQL 18.0 (порт: $DB_PORT)" +echo " - Backend сервер (порт: $PORT)" +echo " - Frontend приложение (порт: $WEB_PORT)" +docker-compose up -d --build +echo -e "${GREEN} ✅ Сервисы запущены${NC}" +echo "" + +# 3. Ожидание готовности базы данных +echo -e "${YELLOW}3. Ожидание готовности базы данных...${NC}" +MAX_WAIT=60 +WAIT_COUNT=0 +while ! docker-compose exec -T db pg_isready -U "$DB_USER" >/dev/null 2>&1; do + if [ $WAIT_COUNT -ge $MAX_WAIT ]; then + echo -e "${RED} ❌ База данных не готова за $MAX_WAIT секунд${NC}" + exit 1 + fi + echo -n "." + sleep 1 + WAIT_COUNT=$((WAIT_COUNT + 1)) +done +echo "" +echo -e "${GREEN} ✅ База данных готова${NC}" +echo "" + +# 4. Поиск самого свежего дампа +echo -e "${YELLOW}4. Поиск самого свежего дампа...${NC}" +DUMP_DIR="database-dumps" + +if [ ! -d "$DUMP_DIR" ]; then + echo -e "${YELLOW} ⚠️ Директория дампов не найдена, создаём...${NC}" + mkdir -p "$DUMP_DIR" +fi + +# Ищем все дампы (сначала .sql.gz, потом .sql) +LATEST_DUMP=$(ls -t "$DUMP_DIR"/*.{sql.gz,sql} 2>/dev/null | head -n 1) + +if [ -z "$LATEST_DUMP" ]; then + echo -e "${YELLOW} ⚠️ Дампы не найдены${NC}" + echo "" + + # Создаём дамп с продакшена используя креденшелы из .env + echo -e "${YELLOW}5. Создание дампа с продакшена...${NC}" + echo -e "${BLUE} 📦 Используются креденшелы из .env${NC}" + echo " Используется скрипт dump-db.sh" + + if [ -f "./dump-db.sh" ]; then + chmod +x ./dump-db.sh + DUMP_NAME="prod_backup_$(date +%Y%m%d_%H%M%S)" + + # Временно останавливаем контейнер db, чтобы dump-db.sh не использовал docker-compose exec + # и подключился напрямую к продакшен базе по креденшелам из .env + echo -e "${BLUE} ⏸️ Временно останавливаем локальный контейнер db для создания дампа с продакшена...${NC}" + docker-compose stop db 2>/dev/null || true + + # Используем dump-db.sh с креденшелами из .env (по умолчанию) + # Теперь он подключится напрямую к продакшен базе, а не через docker-compose + ./dump-db.sh "$DUMP_NAME" + + # Запускаем контейнер db обратно + echo -e "${BLUE} ▶️ Запускаем локальный контейнер db обратно...${NC}" + docker-compose start db 2>/dev/null || docker-compose up -d db + + # Проверяем, был ли создан дамп + CREATED_DUMP=$(ls -t "$DUMP_DIR"/"$DUMP_NAME".sql.gz 2>/dev/null | head -n 1) + if [ -n "$CREATED_DUMP" ]; then + echo -e "${GREEN} ✅ Дамп с продакшена создан: $(basename "$CREATED_DUMP")${NC}" + LATEST_DUMP="$CREATED_DUMP" + # Продолжаем с восстановлением ниже + else + echo -e "${RED} ❌ Не удалось создать дамп с продакшена${NC}" + echo -e "${YELLOW} ⚠️ Проверьте креденшелы в .env и доступность базы данных${NC}" + exit 1 + fi + else + echo -e "${RED} ❌ Скрипт dump-db.sh не найден${NC}" + exit 1 + fi +fi + +# Если дамп найден или создан, восстанавливаем его +if [ -n "$LATEST_DUMP" ]; then + LATEST_DUMP_NAME=$(basename "$LATEST_DUMP") + echo -e "${GREEN} ✅ Найден дамп: $LATEST_DUMP_NAME${NC}" + echo "" + + # 6. Восстановление базы данных + echo -e "${YELLOW}6. Восстановление базы данных из дампа...${NC}" + echo " Файл: $LATEST_DUMP_NAME" + echo " Используется скрипт restore-db.sh (восстановление в локальную базу)" + + # Используем restore-db.sh, который автоматически восстанавливает в локальную базу при использовании .env + # restore-db.sh автоматически выберет самый свежий дамп, если имя не указано + if [ -f "./restore-db.sh" ]; then + chmod +x ./restore-db.sh + # Автоматически подтверждаем восстановление + # restore-db.sh сам выберет самый свежий дамп из database-dumps/ + echo "yes" | ./restore-db.sh + else + echo -e "${RED} ❌ Скрипт restore-db.sh не найден${NC}" + exit 1 + fi +fi + +echo "" +echo -e "${GREEN}✅ Инициализация завершена!${NC}" +echo "" +echo -e "${BLUE}📋 Статус сервисов:${NC}" +docker-compose ps + diff --git a/list-dumps.sh b/list-dumps.sh new file mode 100755 index 0000000..a07eac8 --- /dev/null +++ b/list-dumps.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +# Скрипт для просмотра списка доступных дампов + +DUMP_DIR="database-dumps" + +if [ ! -d "$DUMP_DIR" ]; then + echo "❌ Директория дампов не найдена: $DUMP_DIR" + exit 1 +fi + +echo "📦 Доступные дампы базы данных:" +echo "" + +# Показываем дампы с информацией о размере и дате +if ls "$DUMP_DIR"/*.sql.gz 2>/dev/null | grep -q .; then + ls -lh "$DUMP_DIR"/*.sql.gz 2>/dev/null | awk '{ + filename = $9 + gsub(/.*\//, "", filename) + printf " %-40s %8s %s %s %s\n", filename, $5, $6, $7, $8 + }' + echo "" + echo "Всего дампов: $(ls -1 "$DUMP_DIR"/*.sql.gz 2>/dev/null | wc -l | tr -d ' ')" + echo "" + echo "Для восстановления используйте:" + echo " ./restore-db.sh <имя_дампа.sql.gz> # В .env" + echo " ./restore-db.sh --env-file .env.prod <имя_дампа> # В указанный файл" +else + echo " (нет дампов)" + echo "" + echo "Для создания дампа используйте:" + echo " ./dump-db.sh # Из .env" + echo " ./dump-db.sh --env-file .env.prod [имя] # Из указанного файла" +fi + diff --git a/nginx-unified.conf b/nginx-unified.conf new file mode 100644 index 0000000..9af225d --- /dev/null +++ b/nginx-unified.conf @@ -0,0 +1,127 @@ +server { + listen 80; + server_name localhost; + root /usr/share/nginx/html; + index index.html; + + # Gzip compression + gzip on; + gzip_vary on; + gzip_min_length 1024; + gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json; + + # Proxy API requests to backend (localhost внутри контейнера) + location /api/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy webhook endpoints to backend + location /webhook/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy daily-report endpoints to backend + location /daily-report/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy admin panel to backend (must be before location /) + location ^~ /admin { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy project endpoints to backend (must be before location /) + location ^~ /project/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy other API endpoints to backend + location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|weekly_goals/setup|project_score_sample_mv/refresh)$ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Service Worker должен быть без кэширования + location /sw.js { + add_header Cache-Control "no-cache"; + expires 0; + } + + # Manifest тоже без долгого кэширования + location /manifest.webmanifest { + add_header Cache-Control "no-cache"; + expires 0; + } + + # Раздача загруженных файлов (картинки wishlist) - проксируем через backend + # Используем ^~ чтобы этот location имел приоритет над regex locations + location ^~ /uploads/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + expires 30d; + add_header Cache-Control "public, immutable"; + } + + # Handle React Router (SPA) + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } +} + diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 0000000..60a79af --- /dev/null +++ b/nginx.conf @@ -0,0 +1,29 @@ +user nginx; +worker_processes auto; +error_log /var/log/nginx/error.log warn; +pid /var/run/nginx.pid; + +events { + worker_connections 1024; +} + +http { + include /etc/nginx/mime.types; + default_type application/octet-stream; + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + access_log /var/log/nginx/access.log main; + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + + # Include server configurations + include /etc/nginx/conf.d/*.conf; +} + diff --git a/package.json b/package.json new file mode 100644 index 0000000..8b7c317 --- /dev/null +++ b/package.json @@ -0,0 +1,9 @@ +{ + "name": "play-life", + "version": "1.0.0", + "description": "Play Life application", + "scripts": { + "db:dump": "./dump-db.sh", + "db:restore": "./restore-db.sh" + } +} diff --git a/play-life-backend/.gitignore b/play-life-backend/.gitignore new file mode 100644 index 0000000..e8f54ae --- /dev/null +++ b/play-life-backend/.gitignore @@ -0,0 +1,34 @@ +# Environment variables with secrets +.env + +# Go build artifacts +main +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool +*.out + +# Dependency directories +vendor/ + +# Go workspace file +go.work + +# IDE +.idea/ +.vscode/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + diff --git a/play-life-backend/Dockerfile b/play-life-backend/Dockerfile new file mode 100644 index 0000000..10968d4 --- /dev/null +++ b/play-life-backend/Dockerfile @@ -0,0 +1,66 @@ +# Multi-stage build для единого образа frontend + backend + +# Stage 1: Build Frontend +FROM node:20-alpine AS frontend-builder +WORKDIR /app/frontend +COPY play-life-web/package*.json ./ +RUN npm ci +# Копируем РёСЃС…РѕРґРЅРёРєРё (node_modules исключены через .dockerignore) +COPY play-life-web/ . +RUN npm run build + +# Stage 2: Build Backend +FROM golang:1.24-alpine AS backend-builder +WORKDIR /app/backend +# Устанавливаем GOPROXY для более надежной загрузки модулей +ENV GOPROXY=https://proxy.golang.org,direct +ENV GOSUMDB=sum.golang.org +COPY play-life-backend/go.mod play-life-backend/go.sum ./ +RUN go mod download +COPY play-life-backend/ . +RUN go mod tidy +RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o main . + +# Stage 3: Final image +FROM alpine:latest + +# Устанавливаем необходимые пакеты +RUN apk --no-cache add \ + ca-certificates \ + nginx \ + supervisor \ + curl \ + tzdata \ + chromium \ + chromium-chromedriver \ + udev \ + ttf-freefont \ + font-noto-emoji + +# Создаем директории +WORKDIR /app + +# Копируем собранный frontend +COPY --from=frontend-builder /app/frontend/dist /usr/share/nginx/html + +# Копируем собранный backend +COPY --from=backend-builder /app/backend/main /app/backend/main +COPY play-life-backend/admin.html /app/backend/admin.html + +# Копируем конфигурацию nginx +COPY nginx.conf /etc/nginx/nginx.conf +COPY nginx-unified.conf /etc/nginx/conf.d/default.conf + +# Копируем конфигурацию supervisor для запуска backend +COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf + +# Создаем директории для логов +RUN mkdir -p /var/log/supervisor && \ + mkdir -p /var/log/nginx && \ + mkdir -p /var/run + +# Открываем РїРѕСЂС‚ 80 +EXPOSE 80 + +# Запускаем supervisor, который запустит nginx Рё backend +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] \ No newline at end of file diff --git a/play-life-backend/ENV_SETUP.md b/play-life-backend/ENV_SETUP.md new file mode 100644 index 0000000..a6c917b --- /dev/null +++ b/play-life-backend/ENV_SETUP.md @@ -0,0 +1,75 @@ +# Настройка переменных окружения + +## Быстрый старт + +1. Скопируйте файл `env.example` в `.env`: + ```bash + cp env.example .env + ``` + +2. Откройте `.env` и заполните реальные значения: + ```bash + nano .env + # или + vim .env + ``` + +3. **ВАЖНО**: Файл `.env` уже добавлен в `.gitignore` и не будет попадать в git. + +## Переменные окружения + +### Обязательные (для работы приложения) + +- `DB_HOST` - хост базы данных (по умолчанию: localhost) +- `DB_PORT` - порт базы данных (по умолчанию: 5432) +- `DB_USER` - пользователь БД (по умолчанию: playeng) +- `DB_PASSWORD` - пароль БД (по умолчанию: playeng) +- `DB_NAME` - имя БД (по умолчанию: playeng) +- `PORT` - порт сервера (по умолчанию: 8080) + +### Опциональные (для Telegram интеграции) + +- `WEBHOOK_BASE_URL` - базовый URL для автоматической настройки webhook +- Bot Token и Chat ID настраиваются через UI приложения в разделе "Интеграции" -> "Telegram" + +## Использование в коде + +Приложение автоматически читает переменные окружения через `os.Getenv()`. + +Для загрузки `.env` файла в локальной разработке можно использовать: + +### Вариант 1: Установить переменные вручную +```bash +export DB_PASSWORD=your_password +go run main.go +``` + +### Вариант 2: Использовать библиотеку godotenv (рекомендуется) + +1. Установить библиотеку: + ```bash + go get github.com/joho/godotenv + ``` + +2. Добавить в начало `main()`: + ```go + import "github.com/joho/godotenv" + + func main() { + // Загрузить .env файл + godotenv.Load() + // ... остальной код + } + ``` + +### Вариант 3: Использовать docker-compose + +В `docker-compose.yml` уже настроена передача переменных окружения из `.env` файла. + +## Безопасность + +- ✅ Файл `.env` добавлен в `.gitignore` +- ✅ Файл `env.example` содержит только шаблоны без реальных значений +- ✅ Никогда не коммитьте `.env` в git +- ✅ Используйте разные токены для dev/prod окружений + diff --git a/play-life-backend/MIGRATION_BASELINE.md b/play-life-backend/MIGRATION_BASELINE.md new file mode 100644 index 0000000..5dcc0f1 --- /dev/null +++ b/play-life-backend/MIGRATION_BASELINE.md @@ -0,0 +1,120 @@ +# Инструкция по применению baseline миграции + +## Обзор + +После перехода на `golang-migrate` текущая схема БД была зафиксирована как baseline миграция `000001_baseline.up.sql`. Для существующих баз данных baseline миграция **не должна применяться автоматически** - вместо этого нужно использовать команду `migrate force` для установки текущей версии миграции. + +## Для существующих баз данных + +### Шаг 1: Создание backup + +**ОБЯЗАТЕЛЬНО** создайте backup базы данных перед применением baseline: + +```bash +# Используйте существующий скрипт dump-db.sh +./dump-db.sh + +# Или вручную: +pg_dump -h $DB_HOST -U $DB_USER -d $DB_NAME > backup_$(date +%Y%m%d_%H%M%S).sql +``` + +### Шаг 2: Установка версии миграции + +Для существующих баз данных нужно установить версию миграции в `1` (baseline), не применяя саму миграцию: + +```bash +# Установите переменные окружения +export DB_HOST=localhost +export DB_PORT=5432 +export DB_USER=playeng +export DB_PASSWORD=playeng +export DB_NAME=playeng + +# Установите версию миграции в 1 (baseline) +migrate -path ./play-life-backend/migrations \ + -database "postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" \ + force 1 +``` + +**Важно:** Команда `force 1` устанавливает версию миграции в `1`, но **не выполняет** SQL из baseline миграции. Это правильно, так как схема уже существует. + +### Шаг 3: Проверка + +Проверьте, что версия миграции установлена правильно: + +```bash +migrate -path ./play-life-backend/migrations \ + -database "postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" \ + version +``` + +Должно вывести: `1 (dirty)` + +Если выводит `1 (dirty)`, это нормально - это означает, что версия установлена, но миграция не была применена (что и требуется для baseline). + +### Шаг 4: Очистка dirty флага (опционально) + +Если нужно убрать dirty флаг: + +```bash +migrate -path ./play-life-backend/migrations \ + -database "postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" \ + force 1 +``` + +## Для новых баз данных + +Для новых баз данных baseline миграция применится автоматически при первом запуске приложения через функцию `runMigrations()`. + +Или вручную: + +```bash +migrate -path ./play-life-backend/migrations \ + -database "postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" \ + up +``` + +## Проверка схемы + +После применения baseline (или установки версии для существующих БД) можно проверить схему: + +```bash +# Экспорт схемы +pg_dump -h $DB_HOST -U $DB_USER -d $DB_NAME --schema-only > current_schema.sql + +# Сравнение с baseline (если нужно) +diff current_schema.sql play-life-backend/migrations/000001_baseline.up.sql +``` + +## Важные замечания + +1. **Никогда не применяйте baseline миграцию на существующих БД** - используйте только `migrate force 1` +2. **Всегда создавайте backup** перед любыми операциями с миграциями +3. **Проверяйте версию миграции** после установки baseline +4. **Новые миграции** будут применяться автоматически при запуске приложения + +## Устранение проблем + +### Ошибка "dirty database version" + +Если база данных находится в состоянии "dirty", исправьте это: + +```bash +migrate -path ./play-life-backend/migrations \ + -database "postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" \ + force +``` + +Где `` - текущая версия миграции (обычно 1 для baseline). + +### Ошибка "no change" + +Если при применении миграций вы видите "no change", это нормально - база данных уже на актуальной версии. + +### Проблемы с путями к миграциям + +Убедитесь, что путь к миграциям правильный: +- Локально: `./play-life-backend/migrations` +- В Docker: `/migrations` + +Приложение автоматически проверяет оба пути. diff --git a/play-life-backend/MIGRATION_RISKS_AND_SOLUTIONS.md b/play-life-backend/MIGRATION_RISKS_AND_SOLUTIONS.md new file mode 100644 index 0000000..f208489 --- /dev/null +++ b/play-life-backend/MIGRATION_RISKS_AND_SOLUTIONS.md @@ -0,0 +1,458 @@ +# Анализ рисков миграции на golang-migrate с baseline + +## Критические риски + +### 1. Потеря данных при неправильном применении baseline + +**Риск**: При применении baseline миграции на существующую БД может произойти: +- Попытка создать уже существующие таблицы (ошибка) +- Потеря данных при DROP/CREATE операциях +- Конфликты с существующими данными + +**Вероятность**: Средняя +**Влияние**: Критическое + +**Решения**: + +1. **Обязательный backup перед применением** + ```bash + # Создать backup перед миграцией + ./dump-db.sh --env-file .env baseline-migration-backup + ``` + +2. **Использование `migrate force` вместо `migrate up` для существующих БД** + ```bash + # Для существующих БД - установить версию без применения + migrate -path ./migrations -database "postgres://..." force 1 + ``` + +3. **Проверка существования таблиц в baseline миграции** + - Использовать `CREATE TABLE IF NOT EXISTS` (но это не идеально для baseline) + - Или создать скрипт проверки перед применением + +4. **Тестирование на dev окружении** + - Сначала применить на dev БД + - Проверить целостность данных + - Только потом применять на production + +--- + +### 2. Ошибки в baseline миграции (неполная схема) + +**Риск**: Baseline миграция может не включать: +- Некоторые таблицы или колонки +- Индексы или constraints +- Materialized views +- Начальные данные (словарь с id=0) +- Sequences с правильными значениями + +**Вероятность**: Высокая +**Влияние**: Критическое + +**Решения**: + +1. **Автоматическая проверка полноты схемы** + ```bash + # Создать скрипт для сравнения текущей схемы с baseline + # Использовать pg_dump --schema-only для сравнения + pg_dump --schema-only -h $DB_HOST -U $DB_USER -d $DB_NAME > current_schema.sql + # Сравнить с baseline миграцией + ``` + +2. **Пошаговая сборка baseline** + - Собрать схему из всех init*DB функций + - Добавить все миграции 012-029 + - Проверить через `pg_dump --schema-only` на актуальной БД + +3. **Тестирование baseline на чистой БД** + ```bash + # Создать тестовую БД + createdb test_baseline + # Применить baseline + migrate -path ./migrations -database "postgres://.../test_baseline" up + # Сравнить схему с production + ``` + +4. **Валидация через SQL проверки** + - Добавить в baseline проверки существования всех таблиц + - Использовать `DO $$ BEGIN ... END $$;` блоки для валидации + +--- + +### 3. Проблемы с sequences и начальными данными + +**Риск**: +- Sequences могут быть не синхронизированы +- Начальные данные (словарь id=0) могут конфликтовать +- Автоинкременты могут начаться с неправильного значения + +**Вероятность**: Средняя +**Влияние**: Среднее + +**Решения**: + +1. **Правильная настройка sequences в baseline** + ```sql + -- После создания таблицы и вставки данных + SELECT setval('dictionaries_id_seq', + (SELECT MAX(id) FROM dictionaries), + true); + ``` + +2. **Использование ON CONFLICT для начальных данных** + ```sql + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; + ``` + +3. **Проверка sequences после baseline** + ```sql + -- Скрипт для проверки всех sequences + SELECT schemaname, sequencename, last_value + FROM pg_sequences; + ``` + +--- + +### 4. Проблемы с materialized views + +**Риск**: +- Materialized view может не создаться корректно +- Зависимости от таблиц могут быть нарушены +- Данные в MV могут быть неактуальными + +**Вероятность**: Средняя +**Влияние**: Среднее + +**Решения**: + +1. **Создание MV после всех таблиц** + - Убедиться, что все таблицы созданы до создания MV + - Использовать `DROP MATERIALIZED VIEW IF EXISTS` перед созданием + +2. **Обновление данных после создания** + ```sql + -- После создания MV + REFRESH MATERIALIZED VIEW weekly_report_mv; + ``` + +3. **Проверка зависимостей** + ```sql + -- Проверить зависимости MV + SELECT * FROM pg_depend + WHERE objid = 'weekly_report_mv'::regclass; + ``` + +--- + +### 5. Конфликты версий миграций + +**Риск**: +- Таблица `schema_migrations` может быть в неправильном состоянии +- Версия может быть установлена неправильно +- Конфликт между старой и новой системой миграций + +**Вероятность**: Средняя +**Влияние**: Высокое + +**Решения**: + +1. **Проверка состояния schema_migrations перед применением** + ```go + // Проверить, существует ли таблица schema_migrations + // Если да - проверить текущую версию + var version uint + err := db.QueryRow("SELECT version FROM schema_migrations LIMIT 1").Scan(&version) + ``` + +2. **Очистка старой таблицы (если была)** + ```sql + -- Если была старая таблица миграций + DROP TABLE IF EXISTS old_migrations_table; + ``` + +3. **Использование `migrate force` только для существующих БД** + - Новые БД должны использовать `migrate up` + - Существующие БД - `migrate force 1` + +--- + +### 6. Проблемы с окружениями (dev/prod различия) + +**Риск**: +- Различия в схемах между dev и prod +- Разные версии PostgreSQL +- Разные настройки БД + +**Вероятность**: Средняя +**Влияние**: Высокое + +**Решения**: + +1. **Проверка версии PostgreSQL** + ```sql + SELECT version(); + ``` + +2. **Тестирование на всех окружениях** + - Dev окружение + - Staging (если есть) + - Production (после успешного тестирования) + +3. **Документирование различий** + - Зафиксировать версию PostgreSQL + - Зафиксировать настройки БД + +--- + +### 7. Проблемы с откатом (rollback) + +**Риск**: +- Baseline миграция не может быть откачена +- Ошибки при откате последующих миграций +- Потеря данных при откате + +**Вероятность**: Низкая +**Влияние**: Высокое + +**Решения**: + +1. **Baseline не откатывается (по дизайну)** + - Пустой `000001_baseline.down.sql` + - Документировать это ограничение + +2. **Правильные down миграции для новых миграций** + - Каждая новая миграция должна иметь корректный down файл + - Тестировать откат на dev окружении + +3. **Backup перед откатом** + - Всегда создавать backup перед откатом + - Особенно на production + +--- + +### 8. Проблемы при старте приложения + +**Риск**: +- Миграции могут не примениться при старте +- Ошибки подключения к БД во время миграций +- Таймауты при применении миграций + +**Вероятность**: Средняя +**Влияние**: Высокое + +**Решения**: + +1. **Обработка ошибок миграций** + ```go + m, err := migrate.NewWithDatabaseInstance( + "file://migrations", + "postgres", driver) + if err != nil { + log.Fatal("Failed to initialize migrations:", err) + } + + if err := m.Up(); err != nil { + if err != migrate.ErrNoChange { + log.Fatal("Failed to apply migrations:", err) + } + log.Println("Database is up to date") + } + ``` + +2. **Retry логика для подключения к БД** + - Уже есть в коде (10 попыток) + - Применить перед миграциями + +3. **Таймауты для миграций** + ```go + // Установить таймаут для миграций + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + ``` + +4. **Логирование процесса миграций** + - Логировать каждую применяемую миграцию + - Логировать ошибки с деталями + +--- + +### 9. Проблемы с Docker и путями к миграциям + +**Риск**: +- Миграции могут не найтись в контейнере +- Неправильные пути к файлам миграций +- Проблемы с правами доступа + +**Вероятность**: Низкая +**Влияние**: Среднее + +**Решения**: + +1. **Проверка путей в Dockerfile** + ```dockerfile + # Убедиться, что миграции копируются + COPY play-life-backend/migrations /migrations + ``` + +2. **Использование абсолютных путей** + ```go + migrationsPath := "/migrations" + if _, err := os.Stat(migrationsPath); os.IsNotExist(err) { + // Fallback для локальной разработки + migrationsPath = "play-life-backend/migrations" + } + ``` + +3. **Проверка доступности миграций при старте** + ```go + // Проверить, что папка миграций существует + if _, err := os.Stat(migrationsPath); os.IsNotExist(err) { + log.Fatal("Migrations directory not found:", migrationsPath) + } + ``` + +--- + +### 10. Проблемы с параллельным доступом + +**Риск**: +- Несколько инстансов приложения могут пытаться применить миграции одновременно +- Конфликты при применении миграций + +**Вероятность**: Низкая +**Влияние**: Высокое + +**Решения**: + +1. **Блокировки на уровне БД** + - golang-migrate использует транзакции + - PostgreSQL блокирует таблицу schema_migrations + +2. **Применение миграций только в одном инстансе** + - Использовать флаг `--migrate` для запуска миграций + - Или применять миграции отдельным процессом + +3. **Проверка версии перед применением** + ```go + version, dirty, err := m.Version() + if dirty { + log.Fatal("Database is in dirty state, manual intervention required") + } + ``` + +--- + +## План митигации рисков + +### Этап 1: Подготовка (до применения baseline) + +1. ✅ Создать backup всех БД (dev, staging, prod) +2. ✅ Собрать полную схему через `pg_dump --schema-only` +3. ✅ Создать baseline миграцию на основе схемы +4. ✅ Протестировать baseline на чистой БД +5. ✅ Сравнить схему после baseline с текущей схемой + +### Этап 2: Тестирование (на dev окружении) + +1. ✅ Применить baseline через `migrate force 1` +2. ✅ Проверить целостность данных +3. ✅ Проверить работу приложения +4. ✅ Проверить sequences и начальные данные +5. ✅ Проверить materialized views + +### Этап 3: Применение (на production) + +1. ✅ Создать backup production БД +2. ✅ Применить baseline через `migrate force 1` +3. ✅ Проверить работу приложения +4. ✅ Мониторинг в течение первых часов + +### Этап 4: Мониторинг (после применения) + +1. ✅ Проверить логи приложения +2. ✅ Проверить ошибки БД +3. ✅ Проверить производительность +4. ✅ Собрать обратную связь от пользователей + +--- + +## Чеклист перед применением baseline + +- [ ] Backup всех БД создан и проверен +- [ ] Baseline миграция протестирована на чистой БД +- [ ] Схема после baseline совпадает с текущей схемой +- [ ] Тестирование на dev окружении успешно +- [ ] Инструкции по применению baseline готовы +- [ ] Команда проинформирована о миграции +- [ ] Окно для миграции запланировано (для production) +- [ ] План отката подготовлен (если что-то пойдет не так) + +--- + +## Скрипты для проверки + +### Скрипт проверки схемы + +```bash +#!/bin/bash +# check_schema.sh - Проверка полноты baseline миграции + +DB_HOST=${DB_HOST:-localhost} +DB_PORT=${DB_PORT:-5432} +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} + +echo "Проверка схемы БД..." + +# Получить список всех таблиц +PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -t -c " +SELECT tablename +FROM pg_tables +WHERE schemaname = 'public' +ORDER BY tablename; +" > current_tables.txt + +echo "Таблицы в БД сохранены в current_tables.txt" +echo "Сравните с таблицами в baseline миграции" +``` + +### Скрипт применения baseline + +```bash +#!/bin/bash +# apply_baseline.sh - Безопасное применение baseline + +set -e + +DB_HOST=${DB_HOST:-localhost} +DB_PORT=${DB_PORT:-5432} +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} + +DATABASE_URL="postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" + +echo "⚠️ ВНИМАНИЕ: Это применит baseline миграцию!" +echo "База данных: $DB_NAME" +echo "Хост: $DB_HOST:$DB_PORT" +read -p "Вы уверены? (yes/no): " confirm + +if [ "$confirm" != "yes" ]; then + echo "Отменено" + exit 1 +fi + +# Создать backup +echo "Создание backup..." +./dump-db.sh --env-file .env baseline-backup-$(date +%Y%m%d_%H%M%S) + +# Применить baseline +echo "Применение baseline..." +migrate -path ./play-life-backend/migrations -database "$DATABASE_URL" force 1 + +echo "✅ Baseline применен успешно" +echo "Проверьте работу приложения" +``` diff --git a/play-life-backend/admin.html b/play-life-backend/admin.html new file mode 100644 index 0000000..8de5fe5 --- /dev/null +++ b/play-life-backend/admin.html @@ -0,0 +1,394 @@ + + + + + + Play Life Backend - Admin Panel + + + + + +
+

🎯 Play Life Backend - Admin Panel

+ +
+ +
+

+ 🎯 Weekly Goals Setup + +

+

+ Нажмите кнопку для установки целей на текущую неделю на основе медианы за последние 3 месяца (с отправкой в чат). Обычно срабатывает автоматически в начале недели. +

+ +
+
+ + +
+

+ 📊 project_score_sample_mv + +

+

+ Обновить материализованное представление и показать данные текущего пользователя (по одному представителю на вариант баллов проекта). +

+ +
+
+
+
+ + + + + diff --git a/play-life-backend/apply_baseline.sh b/play-life-backend/apply_baseline.sh new file mode 100755 index 0000000..3a6ee74 --- /dev/null +++ b/play-life-backend/apply_baseline.sh @@ -0,0 +1,168 @@ +#!/bin/bash + +# Безопасный скрипт для применения baseline миграции к существующим БД +# Включает создание backup, проверки и применение baseline + +set -e + +# Цвета для вывода +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Получаем переменные окружения +DB_HOST=${DB_HOST:-localhost} +DB_PORT=${DB_PORT:-5432} +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} + +MIGRATIONS_PATH="play-life-backend/migrations" +BACKUP_DIR="../database-dumps" + +echo "=== Применение baseline миграции ===" +echo "" + +# Проверяем наличие необходимых инструментов +if ! command -v migrate &> /dev/null; then + echo -e "${RED}Ошибка: migrate не найден. Установите golang-migrate:${NC}" + echo " brew install golang-migrate" + echo " или" + echo " go install -tags 'postgres' github.com/golang-migrate/migrate/v4/cmd/migrate@latest" + exit 1 +fi + +if ! command -v pg_dump &> /dev/null; then + echo -e "${RED}Ошибка: pg_dump не найден. Установите PostgreSQL client tools.${NC}" + exit 1 +fi + +# Проверяем наличие директории миграций +if [ ! -d "$MIGRATIONS_PATH" ]; then + echo -e "${RED}Ошибка: Директория миграций не найдена: $MIGRATIONS_PATH${NC}" + exit 1 +fi + +# Проверяем наличие baseline миграции +if [ ! -f "$MIGRATIONS_PATH/000001_baseline.up.sql" ]; then + echo -e "${RED}Ошибка: Baseline миграция не найдена: $MIGRATIONS_PATH/000001_baseline.up.sql${NC}" + exit 1 +fi + +echo "Параметры подключения:" +echo " Host: $DB_HOST" +echo " Port: $DB_PORT" +echo " User: $DB_USER" +echo " Database: $DB_NAME" +echo "" + +# Проверяем подключение к БД +echo "1. Проверка подключения к БД..." +PGPASSWORD=$DB_PASSWORD psql \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d $DB_NAME \ + -c "SELECT 1;" > /dev/null 2>&1 + +if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось подключиться к БД${NC}" + exit 1 +fi + +echo -e "${GREEN}✓ Подключение успешно${NC}" +echo "" + +# Проверяем текущую версию миграции +echo "2. Проверка текущей версии миграции..." +DATABASE_URL="postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME?sslmode=disable" + +CURRENT_VERSION=$(migrate -path "$MIGRATIONS_PATH" -database "$DATABASE_URL" version 2>&1 || echo "none") + +if echo "$CURRENT_VERSION" | grep -q "dirty"; then + echo -e "${YELLOW}⚠ База данных находится в состоянии 'dirty'${NC}" + echo " Это нормально для baseline - будет исправлено" +elif echo "$CURRENT_VERSION" | grep -q "^[0-9]"; then + VERSION_NUM=$(echo "$CURRENT_VERSION" | grep -oE "^[0-9]+" || echo "0") + if [ "$VERSION_NUM" -ge 1 ]; then + echo -e "${GREEN}✓ Версия миграции уже установлена: $VERSION_NUM${NC}" + echo " Baseline уже применен, дальнейшие действия не требуются" + exit 0 + fi +fi + +echo " Текущая версия: $CURRENT_VERSION" +echo "" + +# Создаем backup +echo "3. Создание backup БД..." +mkdir -p "$BACKUP_DIR" +BACKUP_FILE="$BACKUP_DIR/baseline_backup_$(date +%Y%m%d_%H%M%S).sql.gz" + +PGPASSWORD=$DB_PASSWORD pg_dump \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d $DB_NAME \ + | gzip > "$BACKUP_FILE" + +if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось создать backup${NC}" + exit 1 +fi + +BACKUP_SIZE=$(du -h "$BACKUP_FILE" | cut -f1) +echo -e "${GREEN}✓ Backup создан: $BACKUP_FILE (размер: $BACKUP_SIZE)${NC}" +echo "" + +# Подтверждение +echo "4. Подтверждение применения baseline..." +echo "" +echo -e "${YELLOW}ВНИМАНИЕ:${NC}" +echo " Будет установлена версия миграции в 1 (baseline)" +echo " Сама миграция НЕ будет применена (схема уже существует)" +echo " Backup сохранен в: $BACKUP_FILE" +echo "" +read -p "Продолжить? (yes/no): " CONFIRM + +if [ "$CONFIRM" != "yes" ]; then + echo "Отменено пользователем" + exit 0 +fi + +# Применяем baseline (force 1) +echo "" +echo "5. Установка версии миграции в 1 (baseline)..." +migrate -path "$MIGRATIONS_PATH" \ + -database "$DATABASE_URL" \ + force 1 + +if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось установить версию миграции${NC}" + echo " Backup доступен в: $BACKUP_FILE" + exit 1 +fi + +echo -e "${GREEN}✓ Версия миграции установлена${NC}" +echo "" + +# Проверяем результат +echo "6. Проверка результата..." +FINAL_VERSION=$(migrate -path "$MIGRATIONS_PATH" -database "$DATABASE_URL" version 2>&1) +echo " Версия миграции: $FINAL_VERSION" + +if echo "$FINAL_VERSION" | grep -qE "^1"; then + echo -e "${GREEN}✓ Baseline успешно применен!${NC}" +else + echo -e "${YELLOW}⚠ Версия миграции: $FINAL_VERSION${NC}" + echo " Это может быть нормально, если база в состоянии 'dirty'" +fi + +echo "" +echo "=== Готово ===" +echo "" +echo "Backup сохранен в: $BACKUP_FILE" +echo "Версия миграции установлена в: 1 (baseline)" +echo "" +echo "Теперь приложение будет автоматически применять новые миграции при запуске." diff --git a/play-life-backend/docker-compose.yml b/play-life-backend/docker-compose.yml new file mode 100644 index 0000000..060fc1c --- /dev/null +++ b/play-life-backend/docker-compose.yml @@ -0,0 +1,41 @@ +version: '3.8' + +services: + db: + image: postgres:15-alpine + restart: unless-stopped + environment: + POSTGRES_USER: ${DB_USER:-playeng} + POSTGRES_PASSWORD: ${DB_PASSWORD:-playeng} + POSTGRES_DB: ${DB_NAME:-playeng} + ports: + - "${DB_PORT:-5432}:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-playeng}"] + interval: 10s + timeout: 5s + retries: 5 + env_file: + - ../.env + - .env # Локальный .env имеет приоритет + + backend: + build: . + ports: + - "${PORT:-8080}:8080" + environment: + DB_HOST: db + DB_PORT: 5432 + DB_USER: ${DB_USER:-playeng} + DB_PASSWORD: ${DB_PASSWORD:-playeng} + DB_NAME: ${DB_NAME:-playeng} + PORT: ${PORT:-8080} + depends_on: + db: + condition: service_healthy + volumes: + - ./migrations:/migrations + env_file: + - ../.env + - .env # Локальный .env имеет приоритет + diff --git a/play-life-backend/go.mod b/play-life-backend/go.mod new file mode 100644 index 0000000..1edcd6c --- /dev/null +++ b/play-life-backend/go.mod @@ -0,0 +1,27 @@ +module play-eng-backend + +go 1.24.0 + +require ( + github.com/chromedp/chromedp v0.14.2 + github.com/disintegration/imaging v1.6.2 + github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/golang-migrate/migrate/v4 v4.19.1 + github.com/gorilla/mux v1.8.1 + github.com/joho/godotenv v1.5.1 + github.com/lib/pq v1.10.9 + github.com/robfig/cron/v3 v3.0.1 + golang.org/x/crypto v0.45.0 +) + +require ( + github.com/chromedp/cdproto v0.0.0-20250724212937-08a3db8b4327 // indirect + github.com/chromedp/sysutil v1.1.0 // indirect + github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect + github.com/gobwas/httphead v0.1.0 // indirect + github.com/gobwas/pool v0.2.1 // indirect + github.com/gobwas/ws v1.4.0 // indirect + golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 // indirect + golang.org/x/sys v0.38.0 // indirect +) diff --git a/play-life-backend/go.sum b/play-life-backend/go.sum new file mode 100644 index 0000000..23888dd --- /dev/null +++ b/play-life-backend/go.sum @@ -0,0 +1,98 @@ +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/chromedp/cdproto v0.0.0-20250724212937-08a3db8b4327 h1:UQ4AU+BGti3Sy/aLU8KVseYKNALcX9UXY6DfpwQ6J8E= +github.com/chromedp/cdproto v0.0.0-20250724212937-08a3db8b4327/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k= +github.com/chromedp/chromedp v0.14.2 h1:r3b/WtwM50RsBZHMUm9fsNhhzRStTHrKdr2zmwbZSzM= +github.com/chromedp/chromedp v0.14.2/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo= +github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM= +github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8= +github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI= +github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M= +github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE= +github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dhui/dktest v0.4.6 h1:+DPKyScKSEp3VLtbMDHcUq6V5Lm5zfZZVb0Sk7Ahom4= +github.com/dhui/dktest v0.4.6/go.mod h1:JHTSYDtKkvFNFHJKqCzVzqXecyv+tKt8EzceOmQOgbU= +github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= +github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI= +github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 h1:wG8n/XJQ07TmjbITcGiUaOtXxdrINDz1b0J1w0SzqDc= +github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1/go.mod h1:A2S0CWkNylc2phvKXWBBdD3K0iGnDBGbzRpISP2zBl8= +github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.4.0 h1:CTaoG1tojrh4ucGPcoJFiAQUAsEWekEWvLy7GsVNqGs= +github.com/gobwas/ws v1.4.0/go.mod h1:G3gNqMNtPppf5XUz7O4shetPpcZ1VJ7zt18dlUeakrc= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang-migrate/migrate/v4 v4.19.1 h1:OCyb44lFuQfYXYLx1SCxPZQGU7mcaZ7gH9yH4jSFbBA= +github.com/golang-migrate/migrate/v4 v4.19.1/go.mod h1:CTcgfjxhaUtsLipnLoQRWCrjYXycRz/g5+RWDuYgPrE= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw= +github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= +golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/play-life-backend/main.go b/play-life-backend/main.go new file mode 100644 index 0000000..a8dd052 --- /dev/null +++ b/play-life-backend/main.go @@ -0,0 +1,16617 @@ +package main + +import ( + "bytes" + "compress/gzip" + "context" + "crypto/rand" + "database/sql" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "html" + "io" + "log" + "math" + "net/http" + "net/url" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + "sync" + "time" + "unicode/utf16" + + "image/jpeg" + + mathrand "math/rand" + + "github.com/chromedp/chromedp" + "github.com/disintegration/imaging" + tgbotapi "github.com/go-telegram-bot-api/telegram-bot-api/v5" + "github.com/golang-jwt/jwt/v5" + "github.com/golang-migrate/migrate/v4" + _ "github.com/golang-migrate/migrate/v4/database/postgres" + _ "github.com/golang-migrate/migrate/v4/source/file" + "github.com/gorilla/mux" + "github.com/joho/godotenv" + "github.com/lib/pq" + _ "github.com/lib/pq" + "github.com/robfig/cron/v3" + "golang.org/x/crypto/bcrypt" +) + +// Палитра из 30 контрастных цветов для проектов (HEX формат) +// Используется для генерации случайного цвета при создании проекта +// Должна быть синхронизирована с frontend (projectUtils.js) +var projectColorsPalette = []string{ + "#EF4444", // Красный + "#F97316", // Оранжевый + "#F59E0B", // Янтарный + "#EAB308", // Желтый + "#84CC16", // Лайм + "#22C55E", // Зеленый + "#10B981", // Изумрудный + "#14B8A6", // Бирюзовый + "#06B6D4", // Голубой + "#0EA5E9", // Небесный + "#3B82F6", // Синий + "#6366F1", // Индиго + "#8B5CF6", // Фиолетовый + "#A855F7", // Пурпурный + "#D946EF", // Фуксия + "#EC4899", // Розовый + "#F43F5E", // Розово-красный + "#DC2626", // Темно-красный + "#EA580C", // Темно-оранжевый + "#CA8A04", // Темно-желтый + "#65A30D", // Темно-лайм + "#16A34A", // Темно-зеленый + "#059669", // Темно-изумрудный + "#0D9488", // Темно-бирюзовый + "#0891B2", // Темно-голубой + "#0284C7", // Темно-небесный + "#2563EB", // Темно-синий + "#4F46E5", // Темно-индиго + "#7C3AED", // Темно-фиолетовый + "#9333EA", // Темно-пурпурный +} + +type Word struct { + ID int `json:"id"` + Name string `json:"name"` + Translation string `json:"translation"` + Description string `json:"description"` + Success int `json:"success"` + Failure int `json:"failure"` + LastSuccess *string `json:"last_success_at,omitempty"` + LastFailure *string `json:"last_failure_at,omitempty"` +} + +type WordRequest struct { + Name string `json:"name"` + Translation string `json:"translation"` + Description string `json:"description"` + DictionaryID *int `json:"dictionary_id,omitempty"` +} + +type WordsRequest struct { + Words []WordRequest `json:"words"` +} + +type TestProgressUpdate struct { + ID int `json:"id"` + Success int `json:"success"` + Failure int `json:"failure"` + LastSuccessAt *string `json:"last_success_at,omitempty"` + LastFailureAt *string `json:"last_failure_at,omitempty"` +} + +type TestProgressRequest struct { + Words []TestProgressUpdate `json:"words"` + ConfigID *int `json:"config_id,omitempty"` +} + +type Config struct { + ID int `json:"id"` + WordsCount int `json:"words_count"` + MaxCards *int `json:"max_cards,omitempty"` +} + +type ConfigRequest struct { + WordsCount int `json:"words_count"` + MaxCards *int `json:"max_cards,omitempty"` + DictionaryIDs []int `json:"dictionary_ids,omitempty"` +} + +type Dictionary struct { + ID int `json:"id"` + Name string `json:"name"` + WordsCount int `json:"wordsCount"` +} + +type DictionaryRequest struct { + Name string `json:"name"` +} + +type TestConfigsAndDictionariesResponse struct { + Configs []Config `json:"configs"` + Dictionaries []Dictionary `json:"dictionaries"` +} + +type WeeklyProjectStats struct { + ProjectName string `json:"project_name"` + TotalScore float64 `json:"total_score"` + MinGoalScore float64 `json:"min_goal_score"` + MaxGoalScore *float64 `json:"max_goal_score,omitempty"` + Priority *int `json:"priority,omitempty"` + CalculatedScore float64 `json:"calculated_score"` + TodayChange *float64 `json:"today_change,omitempty"` + Color string `json:"color"` +} + +type GroupsProgress struct { + Group1 *float64 `json:"group1,omitempty"` + Group2 *float64 `json:"group2,omitempty"` + Group0 *float64 `json:"group0,omitempty"` +} + +type WeeklyStatsResponse struct { + Total *float64 `json:"total,omitempty"` + GroupProgress1 *float64 `json:"group_progress_1,omitempty"` + GroupProgress2 *float64 `json:"group_progress_2,omitempty"` + GroupProgress0 *float64 `json:"group_progress_0,omitempty"` + Projects []WeeklyProjectStats `json:"projects"` +} + +type MessagePostRequest struct { + Body struct { + Text string `json:"text"` + } `json:"body"` +} + +type ProcessedNode struct { + Project string `json:"project"` + Score float64 `json:"score"` +} + +type ProcessedEntry struct { + Text string `json:"text"` + CreatedDate string `json:"createdDate"` + Nodes []ProcessedNode `json:"nodes"` + Raw string `json:"raw"` + Markdown string `json:"markdown"` +} + +type WeeklyGoalSetup struct { + ProjectName string `json:"project_name"` + MinGoalScore float64 `json:"min_goal_score"` + MaxGoalScore float64 `json:"max_goal_score"` +} + +// ProjectScoreSampleMvRow represents one row from project_score_sample_mv +type ProjectScoreSampleMvRow struct { + ProjectID int `json:"project_id"` + Score float64 `json:"score"` + EntryMessage string `json:"entry_message"` + UserID *int `json:"user_id,omitempty"` + CreatedDate time.Time `json:"created_date"` +} + +type Project struct { + ProjectID int `json:"project_id"` + ProjectName string `json:"project_name"` + Priority *int `json:"priority,omitempty"` + Color string `json:"color"` +} + +type ProjectPriorityUpdate struct { + ID int `json:"id"` + Priority *int `json:"priority"` +} + +type ProjectPriorityRequest struct { + Body []ProjectPriorityUpdate `json:"body"` +} + +type FullStatisticsItem struct { + ProjectName string `json:"project_name"` + ReportYear int `json:"report_year"` + ReportWeek int `json:"report_week"` + TotalScore float64 `json:"total_score"` + MinGoalScore float64 `json:"min_goal_score"` + MaxGoalScore float64 `json:"max_goal_score"` + NormalizedTotalScore float64 `json:"normalized_total_score"` + Color string `json:"color"` +} + +type TodayEntryNode struct { + ProjectName string `json:"project_name"` + Score float64 `json:"score"` + Index int `json:"index"` +} + +type TodayEntry struct { + ID int `json:"id"` + Text string `json:"text"` + CreatedDate string `json:"created_date"` + Nodes []TodayEntryNode `json:"nodes"` +} + +type TodoistWebhook struct { + EventName string `json:"event_name"` + EventData map[string]interface{} `json:"event_data"` +} + +type TelegramEntity struct { + Type string `json:"type"` + Offset int `json:"offset"` + Length int `json:"length"` +} + +type TelegramChat struct { + ID int64 `json:"id"` +} + +type TelegramUser struct { + ID int64 `json:"id"` +} + +type TelegramMessage struct { + Text string `json:"text"` + Entities []TelegramEntity `json:"entities"` + Chat TelegramChat `json:"chat"` + From *TelegramUser `json:"from,omitempty"` +} + +type TelegramWebhook struct { + Message TelegramMessage `json:"message"` +} + +// TelegramUpdate - структура для Telegram webhook (обычно это Update объект) +type TelegramUpdate struct { + UpdateID int `json:"update_id"` + Message *TelegramMessage `json:"message,omitempty"` + EditedMessage *TelegramMessage `json:"edited_message,omitempty"` +} + +// Tracking structures +type TrackingUserStats struct { + UserID int `json:"user_id"` + UserName string `json:"user_name"` + IsCurrentUser bool `json:"is_current_user"` + Total *float64 `json:"total,omitempty"` + Projects []TrackingProjectStats `json:"projects"` +} + +type TrackingProjectStats struct { + ProjectName string `json:"project_name"` + CalculatedScore float64 `json:"calculated_score"` // процент выполнения + Priority *int `json:"priority,omitempty"` +} + +type TrackingStatsResponse struct { + WeekNumber int `json:"week_number"` + Year int `json:"year"` + Users []TrackingUserStats `json:"users"` +} + +type TrackingAccessResponse struct { + Trackers []TrackingUser `json:"trackers"` // кто меня отслеживает + Tracked []TrackingUser `json:"tracked"` // кого я отслеживаю +} + +type TrackingUser struct { + ID int `json:"id"` + RelationID int `json:"relation_id"` // id записи в user_tracking для удаления + Name string `json:"name"` + Email string `json:"email"` + CreatedAt time.Time `json:"created_at"` +} + +type TrackingInviteInfo struct { + UserID int `json:"user_id"` + UserName string `json:"user_name"` +} + +type TrackingInviteResponse struct { + InviteURL string `json:"invite_url"` +} + +// Task structures +type Task struct { + ID int `json:"id"` + Name string `json:"name"` + Completed int `json:"completed"` + LastCompletedAt *string `json:"last_completed_at,omitempty"` + NextShowAt *string `json:"next_show_at,omitempty"` + RewardMessage *string `json:"reward_message,omitempty"` + ProgressionBase *float64 `json:"progression_base,omitempty"` + RepetitionPeriod *string `json:"repetition_period,omitempty"` + RepetitionDate *string `json:"repetition_date,omitempty"` + WishlistID *int `json:"wishlist_id,omitempty"` + ConfigID *int `json:"config_id,omitempty"` + RewardPolicy *string `json:"reward_policy,omitempty"` // "personal" или "general" для задач, связанных с желаниями + Position *int `json:"position,omitempty"` // Position for subtasks + // Дополнительные поля для списка задач (без omitempty чтобы всегда передавались) + ProjectNames []string `json:"project_names"` + GroupName *string `json:"group_name,omitempty"` // Название группы задачи + SubtasksCount int `json:"subtasks_count"` + HasProgression bool `json:"has_progression"` + AutoComplete bool `json:"auto_complete"` +} + +type Reward struct { + ID int `json:"id"` + Position int `json:"position"` + ProjectName string `json:"project_name"` + Value float64 `json:"value"` + UseProgression bool `json:"use_progression"` +} + +type Subtask struct { + Task Task `json:"task"` + Rewards []Reward `json:"rewards"` +} + +type WishlistInfo struct { + ID int `json:"id"` + Name string `json:"name"` + Unlocked bool `json:"unlocked"` +} + +type TaskDetail struct { + Task Task `json:"task"` + Rewards []Reward `json:"rewards"` + Subtasks []Subtask `json:"subtasks"` + WishlistInfo *WishlistInfo `json:"wishlist_info,omitempty"` + // Test-specific fields (only present if task has config_id) + WordsCount *int `json:"words_count,omitempty"` + MaxCards *int `json:"max_cards,omitempty"` + DictionaryIDs []int `json:"dictionary_ids,omitempty"` + // Draft fields (only present if draft exists) + DraftProgressionValue *float64 `json:"draft_progression_value,omitempty"` + DraftSubtasks []DraftSubtask `json:"draft_subtasks,omitempty"` +} + +type RewardRequest struct { + Position int `json:"position"` + ProjectName string `json:"project_name"` + Value float64 `json:"value"` + UseProgression bool `json:"use_progression"` +} + +type SubtaskRequest struct { + ID *int `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + RewardMessage *string `json:"reward_message,omitempty"` + Position *int `json:"position,omitempty"` + Rewards []RewardRequest `json:"rewards,omitempty"` +} + +type TaskRequest struct { + Name string `json:"name"` + ProgressionBase *float64 `json:"progression_base,omitempty"` + RewardMessage *string `json:"reward_message,omitempty"` + RepetitionPeriod *string `json:"repetition_period,omitempty"` + RepetitionDate *string `json:"repetition_date,omitempty"` + WishlistID *int `json:"wishlist_id,omitempty"` + RewardPolicy *string `json:"reward_policy,omitempty"` // "personal" или "general" для задач, связанных с желаниями + GroupName *string `json:"group_name,omitempty"` // Название группы задачи + Rewards []RewardRequest `json:"rewards,omitempty"` + Subtasks []SubtaskRequest `json:"subtasks,omitempty"` + // Test-specific fields + IsTest bool `json:"is_test,omitempty"` + WordsCount *int `json:"words_count,omitempty"` + MaxCards *int `json:"max_cards,omitempty"` + DictionaryIDs []int `json:"dictionary_ids,omitempty"` +} + +type CompleteTaskRequest struct { + Value *float64 `json:"value,omitempty"` + ChildrenTaskIDs []int `json:"children_task_ids,omitempty"` +} + +type PostponeTaskRequest struct { + NextShowAt *string `json:"next_show_at"` +} + +// ============================================ +// Task Draft structures +// ============================================ + +type SaveDraftRequest struct { + ProgressionValue *float64 `json:"progression_value,omitempty"` + ChildrenTaskIDs []int `json:"children_task_ids,omitempty"` // только checked подзадачи + AutoComplete bool `json:"auto_complete"` +} + +type TaskDraft struct { + ID int + TaskID int + UserID int + ProgressionValue *float64 + AutoComplete bool + CreatedAt time.Time + UpdatedAt time.Time +} + +type TaskDraftSubtask struct { + ID int + TaskDraftID int + SubtaskID int +} + +type DraftSubtask struct { + SubtaskID int `json:"subtask_id"` +} + +// ============================================ +// Wishlist structures +// ============================================ + +type LinkedTask struct { + ID int `json:"id"` + Name string `json:"name"` + Completed int `json:"completed"` + NextShowAt *string `json:"next_show_at,omitempty"` + UserID *int `json:"user_id,omitempty"` // ID пользователя-владельца задачи +} + +type WishlistItem struct { + ID int `json:"id"` + Name string `json:"name"` + Price *float64 `json:"price,omitempty"` + ImageURL *string `json:"image_url,omitempty"` + Link *string `json:"link,omitempty"` + Unlocked bool `json:"unlocked"` + Completed bool `json:"completed"` + FirstLockedCondition *UnlockConditionDisplay `json:"first_locked_condition,omitempty"` + MoreLockedConditions int `json:"more_locked_conditions,omitempty"` + LockedConditionsCount int `json:"locked_conditions_count,omitempty"` // Общее количество заблокированных условий + UnlockConditions []UnlockConditionDisplay `json:"unlock_conditions,omitempty"` + LinkedTask *LinkedTask `json:"linked_task,omitempty"` + TasksCount int `json:"tasks_count,omitempty"` // Количество задач для этого желания + GroupName *string `json:"group_name,omitempty"` // Название группы желания +} + +type UnlockConditionDisplay struct { + ID int `json:"id"` + Type string `json:"type"` + TaskID *int `json:"task_id,omitempty"` // ID задачи (для task_completion) + TaskName *string `json:"task_name,omitempty"` + TaskNextShowAt *string `json:"task_next_show_at,omitempty"` // Дата следующего показа задачи (для task_completion) + ProjectID *int `json:"project_id,omitempty"` // ID проекта (для project_points) + ProjectName *string `json:"project_name,omitempty"` + RequiredPoints *float64 `json:"required_points,omitempty"` + StartDate *string `json:"start_date,omitempty"` // Дата начала подсчёта (YYYY-MM-DD), NULL = за всё время + DisplayOrder int `json:"display_order"` + // Прогресс выполнения + CurrentPoints *float64 `json:"current_points,omitempty"` // Текущее количество баллов (для project_points) + TaskCompleted *bool `json:"task_completed,omitempty"` // Выполнена ли задача (для task_completion) + // Персональные цели + UserID *int `json:"user_id,omitempty"` // ID пользователя для персональных целей + UserName *string `json:"user_name,omitempty"` // Имя пользователя для персональных целей + // Срок разблокировки + WeeksText *string `json:"weeks_text,omitempty"` // Отформатированный текст срока разблокировки +} + +type WishlistRequest struct { + Name string `json:"name"` + Price *float64 `json:"price,omitempty"` + Link *string `json:"link,omitempty"` + GroupName *string `json:"group_name,omitempty"` // Название группы желания + UnlockConditions []UnlockConditionRequest `json:"unlock_conditions,omitempty"` +} + +type UnlockConditionRequest struct { + ID *int `json:"id,omitempty"` // ID существующего условия (для сохранения чужих условий) + Type string `json:"type"` + TaskID *int `json:"task_id,omitempty"` + ProjectID *int `json:"project_id,omitempty"` + RequiredPoints *float64 `json:"required_points,omitempty"` + StartDate *string `json:"start_date,omitempty"` // Дата начала подсчёта (YYYY-MM-DD), NULL = за всё время + DisplayOrder *int `json:"display_order,omitempty"` +} + +type WishlistResponse struct { + Unlocked []WishlistItem `json:"unlocked"` + Locked []WishlistItem `json:"locked"` + Completed []WishlistItem `json:"completed,omitempty"` + CompletedCount int `json:"completed_count"` // Количество завершённых желаний +} + +// ============================================ +// Wishlist Boards (доски желаний) +// ============================================ + +type WishlistBoard struct { + ID int `json:"id"` + OwnerID int `json:"owner_id"` + OwnerName string `json:"owner_name,omitempty"` + Name string `json:"name"` + InviteEnabled bool `json:"invite_enabled"` + InviteToken *string `json:"invite_token,omitempty"` + InviteURL *string `json:"invite_url,omitempty"` + MemberCount int `json:"member_count"` + IsOwner bool `json:"is_owner"` + CreatedAt time.Time `json:"created_at"` +} + +type BoardMember struct { + ID int `json:"id"` + UserID int `json:"user_id"` + Name string `json:"name"` + Email string `json:"email"` + JoinedAt time.Time `json:"joined_at"` +} + +type BoardRequest struct { + Name string `json:"name"` + InviteEnabled *bool `json:"invite_enabled,omitempty"` +} + +type BoardInviteInfo struct { + BoardID int `json:"board_id"` + Name string `json:"name"` + OwnerName string `json:"owner_name"` + MemberCount int `json:"member_count"` +} + +type JoinBoardResponse struct { + Board WishlistBoard `json:"board"` + Message string `json:"message"` +} + +// ============================================ +// Helper functions for repetition_date +// ============================================ + +// calculateNextShowAtFromRepetitionDate calculates the next occurrence date based on repetition_date pattern +// Formats: +// - "N week" - Nth day of week (1=Monday, 7=Sunday) +// - "N month" - Nth day of month (1-31) +// - "MM-DD year" - specific date each year +func calculateNextShowAtFromRepetitionDate(repetitionDate string, fromDate time.Time) *time.Time { + if repetitionDate == "" { + return nil + } + + parts := strings.Fields(strings.TrimSpace(repetitionDate)) + if len(parts) < 2 { + return nil + } + + value := parts[0] + unit := strings.ToLower(parts[1]) + + // Start from tomorrow at midnight + nextDate := time.Date(fromDate.Year(), fromDate.Month(), fromDate.Day(), 0, 0, 0, 0, fromDate.Location()) + nextDate = nextDate.AddDate(0, 0, 1) + + switch unit { + case "week": + // N-th day of week (1=Monday, 7=Sunday) + dayOfWeek, err := strconv.Atoi(value) + if err != nil || dayOfWeek < 1 || dayOfWeek > 7 { + return nil + } + // Go: Sunday=0, Monday=1, ..., Saturday=6 + // Our format: Monday=1, ..., Sunday=7 + // Convert our format to Go format + targetGoDay := dayOfWeek % 7 // Monday(1)->1, Sunday(7)->0 + + currentGoDay := int(nextDate.Weekday()) + daysUntil := (targetGoDay - currentGoDay + 7) % 7 + if daysUntil == 0 { + daysUntil = 7 // If same day, go to next week + } + nextDate = nextDate.AddDate(0, 0, daysUntil) + + case "month": + // N-th day of month + dayOfMonth, err := strconv.Atoi(value) + if err != nil || dayOfMonth < 1 || dayOfMonth > 31 { + return nil + } + + // Find the next occurrence of this day + for i := 0; i < 12; i++ { // Check up to 12 months ahead + // Get the last day of the current month + year, month, _ := nextDate.Date() + lastDayOfMonth := time.Date(year, month+1, 0, 0, 0, 0, 0, nextDate.Location()).Day() + + // Use the actual day (capped at last day of month if needed) + actualDay := dayOfMonth + if actualDay > lastDayOfMonth { + actualDay = lastDayOfMonth + } + + candidateDate := time.Date(year, month, actualDay, 0, 0, 0, 0, nextDate.Location()) + + // If this date is in the future (after fromDate), use it + if candidateDate.After(fromDate) { + nextDate = candidateDate + break + } + + // Otherwise, try next month + nextDate = time.Date(year, month+1, 1, 0, 0, 0, 0, nextDate.Location()) + } + + case "year": + // MM-DD format (e.g., "02-01" for February 1st) + dateParts := strings.Split(value, "-") + if len(dateParts) != 2 { + return nil + } + month, err1 := strconv.Atoi(dateParts[0]) + day, err2 := strconv.Atoi(dateParts[1]) + if err1 != nil || err2 != nil || month < 1 || month > 12 || day < 1 || day > 31 { + return nil + } + + // Find the next occurrence of this date + year := nextDate.Year() + candidateDate := time.Date(year, time.Month(month), day, 0, 0, 0, 0, nextDate.Location()) + + // If this year's date has passed, use next year + if !candidateDate.After(fromDate) { + candidateDate = time.Date(year+1, time.Month(month), day, 0, 0, 0, 0, nextDate.Location()) + } + nextDate = candidateDate + + default: + return nil + } + + return &nextDate +} + +// calculateNextShowAtFromRepetitionPeriod calculates the next show date by adding repetition_period to fromDate +// Format: PostgreSQL INTERVAL string (e.g., "1 day", "2 weeks", "3 months" or "3 mons") +// Note: PostgreSQL may return weeks as days (e.g., "7 days" instead of "1 week") +func calculateNextShowAtFromRepetitionPeriod(repetitionPeriod string, fromDate time.Time) *time.Time { + if repetitionPeriod == "" { + return nil + } + + parts := strings.Fields(strings.TrimSpace(repetitionPeriod)) + if len(parts) < 2 { + log.Printf("calculateNextShowAtFromRepetitionPeriod: invalid format, parts=%v", parts) + return nil + } + + value, err := strconv.Atoi(parts[0]) + if err != nil { + log.Printf("calculateNextShowAtFromRepetitionPeriod: failed to parse value '%s': %v", parts[0], err) + return nil + } + + unit := strings.ToLower(parts[1]) + log.Printf("calculateNextShowAtFromRepetitionPeriod: value=%d, unit='%s'", value, unit) + + // Start from fromDate at midnight + nextDate := time.Date(fromDate.Year(), fromDate.Month(), fromDate.Day(), 0, 0, 0, 0, fromDate.Location()) + + switch unit { + case "minute", "minutes", "mins", "min": + nextDate = nextDate.Add(time.Duration(value) * time.Minute) + case "hour", "hours", "hrs", "hr": + nextDate = nextDate.Add(time.Duration(value) * time.Hour) + case "day", "days": + // PostgreSQL может возвращать недели как дни (например, "7 days" вместо "1 week") + // Если количество дней кратно 7, обрабатываем как недели + if value%7 == 0 && value >= 7 { + weeks := value / 7 + nextDate = nextDate.AddDate(0, 0, weeks*7) + } else { + nextDate = nextDate.AddDate(0, 0, value) + } + case "week", "weeks", "wks", "wk": + nextDate = nextDate.AddDate(0, 0, value*7) + case "month", "months", "mons", "mon": + nextDate = nextDate.AddDate(0, value, 0) + log.Printf("calculateNextShowAtFromRepetitionPeriod: added %d months, result=%v", value, nextDate) + case "year", "years", "yrs", "yr": + nextDate = nextDate.AddDate(value, 0, 0) + default: + log.Printf("calculateNextShowAtFromRepetitionPeriod: unknown unit '%s'", unit) + return nil + } + + return &nextDate +} + +// ============================================ +// Auth types +// ============================================ + +type User struct { + ID int `json:"id"` + Email string `json:"email"` + Name *string `json:"name,omitempty"` + PasswordHash string `json:"-"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + IsActive bool `json:"is_active"` + IsAdmin bool `json:"is_admin"` + LastLoginAt *time.Time `json:"last_login_at,omitempty"` +} + +type LoginRequest struct { + Email string `json:"email"` + Password string `json:"password"` +} + +type RegisterRequest struct { + Email string `json:"email"` + Password string `json:"password"` + Name *string `json:"name,omitempty"` +} + +type TokenResponse struct { + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + ExpiresIn int `json:"expires_in"` + User User `json:"user"` +} + +type RefreshRequest struct { + RefreshToken string `json:"refresh_token"` +} + +type UserResponse struct { + User User `json:"user"` +} + +type JWTClaims struct { + UserID int `json:"user_id"` + jwt.RegisteredClaims +} + +// Context key for user ID +type contextKey string + +const userIDKey contextKey = "user_id" + +type App struct { + DB *sql.DB + webhookMutex sync.Mutex + lastWebhookTime map[int]time.Time // config_id -> last webhook time + telegramBot *tgbotapi.BotAPI + telegramBotUsername string + jwtSecret []byte +} + +func setCORSHeaders(w http.ResponseWriter) { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization") +} + +// ============================================ +// Auth helper functions +// ============================================ + +func hashPassword(password string) (string, error) { + bytes, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + return string(bytes), err +} + +func checkPasswordHash(password, hash string) bool { + err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) + return err == nil +} + +func generateRefreshToken() (string, error) { + b := make([]byte, 32) + _, err := rand.Read(b) + if err != nil { + return "", err + } + return base64.URLEncoding.EncodeToString(b), nil +} + +// generateWebhookToken generates a unique token for webhook URL identification +func generateWebhookToken() (string, error) { + b := make([]byte, 24) // 24 bytes = 32 chars in base64 + _, err := rand.Read(b) + if err != nil { + return "", err + } + return base64.URLEncoding.EncodeToString(b), nil +} + +// generateRandomProjectColor возвращает случайный цвет из предопределенной палитры +func generateRandomProjectColor() string { + if len(projectColorsPalette) == 0 { + return "#3B82F6" // Fallback цвет + } + return projectColorsPalette[mathrand.Intn(len(projectColorsPalette))] +} + +func (a *App) generateAccessToken(userID int) (string, error) { + claims := JWTClaims{ + UserID: userID, + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + } + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + return token.SignedString(a.jwtSecret) +} + +func (a *App) validateAccessToken(tokenString string) (*JWTClaims, error) { + token, err := jwt.ParseWithClaims(tokenString, &JWTClaims{}, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return a.jwtSecret, nil + }) + if err != nil { + return nil, err + } + if claims, ok := token.Claims.(*JWTClaims); ok && token.Valid { + return claims, nil + } + return nil, fmt.Errorf("invalid token") +} + +// getUserIDFromContext extracts user ID from request context +func getUserIDFromContext(r *http.Request) (int, bool) { + userID, ok := r.Context().Value(userIDKey).(int) + return userID, ok +} + +// ============================================ +// Auth middleware +// ============================================ + +func (a *App) authMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Handle CORS preflight + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + authHeader := r.Header.Get("Authorization") + if authHeader == "" { + sendErrorWithCORS(w, "Authorization header required", http.StatusUnauthorized) + return + } + + parts := strings.Split(authHeader, " ") + if len(parts) != 2 || parts[0] != "Bearer" { + sendErrorWithCORS(w, "Invalid authorization header format", http.StatusUnauthorized) + return + } + + claims, err := a.validateAccessToken(parts[1]) + if err != nil { + sendErrorWithCORS(w, "Invalid or expired token", http.StatusUnauthorized) + return + } + + // Add user_id to context + ctx := context.WithValue(r.Context(), userIDKey, claims.UserID) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +func (a *App) adminMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Handle CORS preflight + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + // Get user_id from context (should be set by authMiddleware) + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Check if user is admin + var isAdmin bool + err := a.DB.QueryRow("SELECT is_admin FROM users WHERE id = $1", userID).Scan(&isAdmin) + if err != nil { + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "User not found", http.StatusNotFound) + return + } + log.Printf("Error checking admin status: %v", err) + sendErrorWithCORS(w, "Database error", http.StatusInternalServerError) + return + } + + if !isAdmin { + sendErrorWithCORS(w, "Forbidden: Admin access required", http.StatusForbidden) + return + } + + next.ServeHTTP(w, r) + }) +} + +// ============================================ +// Auth handlers +// ============================================ + +func (a *App) registerHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + var req RegisterRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.Email == "" || req.Password == "" { + sendErrorWithCORS(w, "Email and password are required", http.StatusBadRequest) + return + } + + if len(req.Password) < 6 { + sendErrorWithCORS(w, "Password must be at least 6 characters", http.StatusBadRequest) + return + } + + // Check if email already exists + var existingID int + err := a.DB.QueryRow("SELECT id FROM users WHERE email = $1", req.Email).Scan(&existingID) + if err == nil { + sendErrorWithCORS(w, "Email already registered", http.StatusConflict) + return + } + if err != sql.ErrNoRows { + log.Printf("Error checking existing user: %v", err) + sendErrorWithCORS(w, "Database error", http.StatusInternalServerError) + return + } + + // Hash password + passwordHash, err := hashPassword(req.Password) + if err != nil { + log.Printf("Error hashing password: %v", err) + sendErrorWithCORS(w, "Error processing password", http.StatusInternalServerError) + return + } + + // Insert user + var user User + err = a.DB.QueryRow(` + INSERT INTO users (email, password_hash, name, created_at, updated_at, is_active, is_admin) + VALUES ($1, $2, $3, NOW(), NOW(), true, false) + RETURNING id, email, name, created_at, updated_at, is_active, is_admin, last_login_at + `, req.Email, passwordHash, req.Name).Scan( + &user.ID, &user.Email, &user.Name, &user.CreatedAt, &user.UpdatedAt, &user.IsActive, &user.IsAdmin, &user.LastLoginAt, + ) + if err != nil { + log.Printf("Error inserting user: %v", err) + sendErrorWithCORS(w, "Error creating user", http.StatusInternalServerError) + return + } + + // Check if this is the first user - if so, claim all orphaned data + var userCount int + a.DB.QueryRow("SELECT COUNT(*) FROM users").Scan(&userCount) + if userCount == 1 { + log.Printf("First user registered (ID: %d), claiming all orphaned data", user.ID) + a.claimOrphanedData(user.ID) + } + + // Generate tokens + accessToken, err := a.generateAccessToken(user.ID) + if err != nil { + log.Printf("Error generating access token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + refreshToken, err := generateRefreshToken() + if err != nil { + log.Printf("Error generating refresh token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + // Hash and store refresh token + refreshTokenHash, _ := hashPassword(refreshToken) + _, err = a.DB.Exec(` + INSERT INTO refresh_tokens (user_id, token_hash, expires_at) + VALUES ($1, $2, $3) + `, user.ID, refreshTokenHash, nil) + if err != nil { + log.Printf("Error storing refresh token: %v", err) + } + + // Update last login + a.DB.Exec("UPDATE users SET last_login_at = NOW() WHERE id = $1", user.ID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(TokenResponse{ + AccessToken: accessToken, + RefreshToken: refreshToken, + ExpiresIn: 86400, // 24 hours + User: user, + }) +} + +func (a *App) loginHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + var req LoginRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.Email == "" || req.Password == "" { + sendErrorWithCORS(w, "Email and password are required", http.StatusBadRequest) + return + } + + // Find user + var user User + err := a.DB.QueryRow(` + SELECT id, email, password_hash, name, created_at, updated_at, is_active, is_admin, last_login_at + FROM users WHERE email = $1 + `, req.Email).Scan( + &user.ID, &user.Email, &user.PasswordHash, &user.Name, + &user.CreatedAt, &user.UpdatedAt, &user.IsActive, &user.IsAdmin, &user.LastLoginAt, + ) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Invalid email or password", http.StatusUnauthorized) + return + } + if err != nil { + log.Printf("Error finding user: %v", err) + sendErrorWithCORS(w, "Database error", http.StatusInternalServerError) + return + } + + if !user.IsActive { + sendErrorWithCORS(w, "Account is disabled", http.StatusForbidden) + return + } + + // Check password + if !checkPasswordHash(req.Password, user.PasswordHash) { + sendErrorWithCORS(w, "Invalid email or password", http.StatusUnauthorized) + return + } + + // Check if there is any orphaned data - claim it for this user + var orphanedDataCount int + a.DB.QueryRow(` + SELECT COUNT(*) FROM ( + SELECT 1 FROM projects WHERE user_id IS NULL + UNION ALL SELECT 1 FROM entries WHERE user_id IS NULL + UNION ALL SELECT 1 FROM nodes WHERE user_id IS NULL + UNION ALL SELECT 1 FROM dictionaries WHERE user_id IS NULL + UNION ALL SELECT 1 FROM words WHERE user_id IS NULL + UNION ALL SELECT 1 FROM progress WHERE user_id IS NULL + UNION ALL SELECT 1 FROM configs WHERE user_id IS NULL + UNION ALL SELECT 1 FROM telegram_integrations WHERE user_id IS NULL + UNION ALL SELECT 1 FROM weekly_goals WHERE user_id IS NULL + LIMIT 1 + ) orphaned + `).Scan(&orphanedDataCount) + if orphanedDataCount > 0 { + log.Printf("User %d logging in, claiming orphaned data from all tables", user.ID) + a.claimOrphanedData(user.ID) + } + + // Generate tokens + accessToken, err := a.generateAccessToken(user.ID) + if err != nil { + log.Printf("Error generating access token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + refreshToken, err := generateRefreshToken() + if err != nil { + log.Printf("Error generating refresh token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + // Hash and store refresh token + refreshTokenHash, _ := hashPassword(refreshToken) + _, err = a.DB.Exec(` + INSERT INTO refresh_tokens (user_id, token_hash, expires_at) + VALUES ($1, $2, $3) + `, user.ID, refreshTokenHash, nil) + if err != nil { + log.Printf("Error storing refresh token: %v", err) + } + + // Update last login + a.DB.Exec("UPDATE users SET last_login_at = NOW() WHERE id = $1", user.ID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(TokenResponse{ + AccessToken: accessToken, + RefreshToken: refreshToken, + ExpiresIn: 86400, // 24 hours + User: user, + }) +} + +func (a *App) refreshTokenHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + var req RefreshRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.RefreshToken == "" { + sendErrorWithCORS(w, "Refresh token is required", http.StatusBadRequest) + return + } + + // Find valid refresh token (expires_at is NULL for tokens without expiration) + rows, err := a.DB.Query(` + SELECT rt.id, rt.user_id, rt.token_hash, u.email, u.name, u.created_at, u.updated_at, u.is_active, u.is_admin, u.last_login_at + FROM refresh_tokens rt + JOIN users u ON rt.user_id = u.id + WHERE rt.expires_at IS NULL OR rt.expires_at > NOW() + `) + if err != nil { + log.Printf("Error querying refresh tokens: %v", err) + sendErrorWithCORS(w, "Database error", http.StatusInternalServerError) + return + } + defer rows.Close() + + var foundTokenID int + var user User + var tokenFound bool + + for rows.Next() { + var tokenID int + var tokenHash string + err := rows.Scan(&tokenID, &user.ID, &tokenHash, &user.Email, &user.Name, + &user.CreatedAt, &user.UpdatedAt, &user.IsActive, &user.IsAdmin, &user.LastLoginAt) + if err != nil { + continue + } + if checkPasswordHash(req.RefreshToken, tokenHash) { + foundTokenID = tokenID + tokenFound = true + break + } + } + + if !tokenFound { + sendErrorWithCORS(w, "Invalid or expired refresh token", http.StatusUnauthorized) + return + } + + if !user.IsActive { + sendErrorWithCORS(w, "Account is disabled", http.StatusForbidden) + return + } + + // Generate new tokens FIRST before deleting old one to prevent race condition + accessToken, err := a.generateAccessToken(user.ID) + if err != nil { + log.Printf("Error generating access token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + refreshToken, err := generateRefreshToken() + if err != nil { + log.Printf("Error generating refresh token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + // Store new refresh token FIRST + refreshTokenHash, _ := hashPassword(refreshToken) + _, err = a.DB.Exec(` + INSERT INTO refresh_tokens (user_id, token_hash, expires_at) + VALUES ($1, $2, $3) + `, user.ID, refreshTokenHash, nil) + if err != nil { + log.Printf("Error storing new refresh token: %v", err) + sendErrorWithCORS(w, "Error generating token", http.StatusInternalServerError) + return + } + + // Delete old refresh token AFTER new one is successfully stored + // This prevents race condition where multiple refresh requests might use the same token + a.DB.Exec("DELETE FROM refresh_tokens WHERE id = $1", foundTokenID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(TokenResponse{ + AccessToken: accessToken, + RefreshToken: refreshToken, + ExpiresIn: 86400, // 24 hours + User: user, + }) +} + +func (a *App) logoutHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Delete all refresh tokens for this user + a.DB.Exec("DELETE FROM refresh_tokens WHERE user_id = $1", userID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"message": "Logged out successfully"}) +} + +func (a *App) getMeHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var user User + err := a.DB.QueryRow(` + SELECT id, email, name, created_at, updated_at, is_active, is_admin, last_login_at + FROM users WHERE id = $1 + `, userID).Scan( + &user.ID, &user.Email, &user.Name, &user.CreatedAt, &user.UpdatedAt, &user.IsActive, &user.IsAdmin, &user.LastLoginAt, + ) + if err != nil { + log.Printf("Error finding user: %v", err) + sendErrorWithCORS(w, "User not found", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(UserResponse{User: user}) +} + +// claimOrphanedData assigns all data with NULL user_id to the specified user +func (a *App) claimOrphanedData(userID int) { + tables := []string{"projects", "entries", "nodes", "dictionaries", "words", "progress", "configs", "telegram_integrations", "weekly_goals"} + for _, table := range tables { + // First check if user_id column exists + var columnExists bool + err := a.DB.QueryRow(` + SELECT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = $1 AND column_name = 'user_id' + ) + `, table).Scan(&columnExists) + + if err != nil || !columnExists { + log.Printf("Skipping %s: user_id column does not exist (run migrations as table owner)", table) + continue + } + + result, err := a.DB.Exec(fmt.Sprintf("UPDATE %s SET user_id = $1 WHERE user_id IS NULL", table), userID) + if err != nil { + log.Printf("Error claiming orphaned data in %s: %v", table, err) + } else { + rowsAffected, _ := result.RowsAffected() + if rowsAffected > 0 { + log.Printf("Claimed %d orphaned rows in %s for user %d", rowsAffected, table, userID) + } + } + } +} + +func sendErrorWithCORS(w http.ResponseWriter, message string, statusCode int) { + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(statusCode) + json.NewEncoder(w).Encode(map[string]interface{}{ + "error": message, + }) +} + +func (a *App) getWordsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Get dictionary_id from query parameter + dictionaryIDStr := r.URL.Query().Get("dictionary_id") + var dictionaryID *int + if dictionaryIDStr != "" { + if id, err := strconv.Atoi(dictionaryIDStr); err == nil { + dictionaryID = &id + } + } + + query := ` + SELECT + w.id, + w.name, + w.translation, + w.description, + COALESCE(p.success, 0) as success, + COALESCE(p.failure, 0) as failure, + CASE WHEN p.last_success_at IS NOT NULL THEN p.last_success_at::text ELSE NULL END as last_success_at, + CASE WHEN p.last_failure_at IS NOT NULL THEN p.last_failure_at::text ELSE NULL END as last_failure_at + FROM words w + JOIN dictionaries d ON w.dictionary_id = d.id + LEFT JOIN progress p ON w.id = p.word_id AND p.user_id = $1 + WHERE d.user_id = $1 AND ($2::INTEGER IS NULL OR w.dictionary_id = $2) + ORDER BY w.id + ` + + rows, err := a.DB.Query(query, userID, dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + words := make([]Word, 0) + for rows.Next() { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + words = append(words, word) + } + + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(words) +} + +func (a *App) addWordsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req WordsRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding addWords request: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + log.Printf("addWords: user_id=%d, words_count=%d", userID, len(req.Words)) + + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Create default dictionary for user if needed + var defaultDictID int + err = tx.QueryRow(` + SELECT id FROM dictionaries WHERE user_id = $1 ORDER BY id LIMIT 1 + `, userID).Scan(&defaultDictID) + if err == sql.ErrNoRows { + // Create default dictionary for user + log.Printf("Creating default dictionary for user_id=%d", userID) + err = tx.QueryRow(` + INSERT INTO dictionaries (name, user_id) VALUES ('Все слова', $1) RETURNING id + `, userID).Scan(&defaultDictID) + if err != nil { + log.Printf("Error creating default dictionary: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + log.Printf("Created default dictionary id=%d for user_id=%d", defaultDictID, userID) + } else if err != nil { + log.Printf("Error finding default dictionary: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } else { + log.Printf("Using default dictionary id=%d for user_id=%d", defaultDictID, userID) + } + + stmt, err := tx.Prepare(` + INSERT INTO words (name, translation, description, dictionary_id, user_id) + VALUES ($1, $2, $3, $4, $5) + RETURNING id + `) + if err != nil { + log.Printf("Error preparing insert statement: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + var addedCount int + for i, wordReq := range req.Words { + var id int + dictionaryID := defaultDictID + if wordReq.DictionaryID != nil { + dictionaryID = *wordReq.DictionaryID + // Проверяем, что словарь принадлежит пользователю + var dictUserID int + err := tx.QueryRow(` + SELECT user_id FROM dictionaries WHERE id = $1 + `, dictionaryID).Scan(&dictUserID) + if err == sql.ErrNoRows { + log.Printf("Dictionary %d not found for word %d", dictionaryID, i) + sendErrorWithCORS(w, fmt.Sprintf("Dictionary %d not found", dictionaryID), http.StatusBadRequest) + return + } else if err != nil { + log.Printf("Error checking dictionary ownership: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + if dictUserID != userID { + log.Printf("Dictionary %d belongs to user %d, but request from user %d", dictionaryID, dictUserID, userID) + sendErrorWithCORS(w, fmt.Sprintf("Dictionary %d does not belong to user", dictionaryID), http.StatusForbidden) + return + } + } + err := stmt.QueryRow(wordReq.Name, wordReq.Translation, wordReq.Description, dictionaryID, userID).Scan(&id) + if err != nil { + log.Printf("Error inserting word %d (name='%s', dict_id=%d, user_id=%d): %v", i, wordReq.Name, dictionaryID, userID, err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + addedCount++ + log.Printf("Successfully added word id=%d: name='%s', dict_id=%d", id, wordReq.Name, dictionaryID) + } + + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + log.Printf("Successfully added %d words for user_id=%d", addedCount, userID) + + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": fmt.Sprintf("Added %d words", addedCount), + "added": addedCount, + }) +} + +func (a *App) deleteWordHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + wordID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid word ID", http.StatusBadRequest) + return + } + + // Verify ownership - check that word belongs to user + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM words WHERE id = $1", wordID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Word not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking word ownership: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Word not found", http.StatusNotFound) + return + } + + // Delete the word (progress will be deleted automatically due to CASCADE) + result, err := a.DB.Exec("DELETE FROM words WHERE id = $1", wordID) + if err != nil { + log.Printf("Error deleting word: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + log.Printf("Error getting rows affected: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + sendErrorWithCORS(w, "Word not found", http.StatusNotFound) + return + } + + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Word deleted successfully", + }) +} + +func (a *App) resetWordProgressHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + wordID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid word ID", http.StatusBadRequest) + return + } + + // Verify ownership - check that word belongs to user + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM words WHERE id = $1", wordID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Word not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking word ownership: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Word not found", http.StatusNotFound) + return + } + + // Reset progress for this word and user + _, err = a.DB.Exec(` + UPDATE progress + SET success = 0, + failure = 0, + last_success_at = NULL, + last_failure_at = NULL + WHERE word_id = $1 AND user_id = $2 + `, wordID, userID) + if err != nil { + log.Printf("Error resetting word progress: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Word progress reset successfully", + }) +} + +func (a *App) getTestWordsHandler(w http.ResponseWriter, r *http.Request) { + log.Printf("getTestWordsHandler called: %s %s", r.Method, r.URL.Path) + setCORSHeaders(w) + + if r.Method == "OPTIONS" { + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Get config_id from query parameter (required) + configIDStr := r.URL.Query().Get("config_id") + if configIDStr == "" { + sendErrorWithCORS(w, "config_id parameter is required", http.StatusBadRequest) + return + } + + configID, err := strconv.Atoi(configIDStr) + if err != nil { + sendErrorWithCORS(w, "invalid config_id parameter", http.StatusBadRequest) + return + } + + // Get words_count from config (verify ownership) + var wordsCount int + err = a.DB.QueryRow("SELECT words_count FROM configs WHERE id = $1 AND user_id = $2", configID, userID).Scan(&wordsCount) + if err != nil { + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "config not found", http.StatusNotFound) + return + } + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Get dictionary IDs for this config + var dictionaryIDs []int + dictQuery := ` + SELECT dictionary_id + FROM config_dictionaries + WHERE config_id = $1 + ` + dictRows, err := a.DB.Query(dictQuery, configID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer dictRows.Close() + + for dictRows.Next() { + var dictID int + if err := dictRows.Scan(&dictID); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaryIDs = append(dictionaryIDs, dictID) + } + + // If no dictionaries are selected for config, use all dictionaries (no filter) + var dictFilter string + var dictArgs []interface{} + if len(dictionaryIDs) > 0 { + placeholders := make([]string, len(dictionaryIDs)) + for i := range dictionaryIDs { + placeholders[i] = fmt.Sprintf("$%d", i+1) + } + dictFilter = fmt.Sprintf("w.dictionary_id IN (%s)", strings.Join(placeholders, ",")) + for _, dictID := range dictionaryIDs { + dictArgs = append(dictArgs, dictID) + } + } else { + dictFilter = "1=1" // No filter + } + + // Calculate group sizes (use ceiling to ensure we don't lose words due to rounding) + group1Count := int(float64(wordsCount) * 0.3) // 30% + group2Count := int(float64(wordsCount) * 0.4) // 40% + // group3Count is calculated dynamically based on actual words collected from groups 1 and 2 + + // Base query parts + baseSelect := ` + w.id, + w.name, + w.translation, + w.description, + COALESCE(p.success, 0) as success, + COALESCE(p.failure, 0) as failure, + CASE WHEN p.last_success_at IS NOT NULL THEN p.last_success_at::text ELSE NULL END as last_success_at, + CASE WHEN p.last_failure_at IS NOT NULL THEN p.last_failure_at::text ELSE NULL END as last_failure_at + ` + baseFrom := fmt.Sprintf(` + FROM words w + JOIN dictionaries d ON w.dictionary_id = d.id AND d.user_id = %d + LEFT JOIN progress p ON w.id = p.word_id AND p.user_id = %d + WHERE `, userID, userID) + dictFilter + + // Group 1: success <= 3, sorted by success ASC, then last_success_at ASC (NULL first) + group1Query := ` + SELECT ` + baseSelect + ` + ` + baseFrom + ` + AND COALESCE(p.success, 0) <= 3 + ORDER BY + COALESCE(p.success, 0) ASC, + CASE WHEN p.last_success_at IS NULL THEN 0 ELSE 1 END, + p.last_success_at ASC + LIMIT $` + fmt.Sprintf("%d", len(dictArgs)+1) + + group1Args := append(dictArgs, group1Count*2) // Get more to ensure uniqueness + group1Rows, err := a.DB.Query(group1Query, group1Args...) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer group1Rows.Close() + + group1Words := make([]Word, 0) + group1WordIDs := make(map[int]bool) + for group1Rows.Next() && len(group1Words) < group1Count { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := group1Rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + group1Words = append(group1Words, word) + group1WordIDs[word.ID] = true + } + + // Group 2: sorted by (failure + 1)/(success + 1) DESC, take top 40% + // Exclude words already in group1 + group2Exclude := "" + group2Args := make([]interface{}, 0) + group2Args = append(group2Args, dictArgs...) + if len(group1WordIDs) > 0 { + excludePlaceholders := make([]string, 0, len(group1WordIDs)) + idx := len(dictArgs) + 1 + for wordID := range group1WordIDs { + excludePlaceholders = append(excludePlaceholders, fmt.Sprintf("$%d", idx)) + group2Args = append(group2Args, wordID) + idx++ + } + group2Exclude = " AND w.id NOT IN (" + strings.Join(excludePlaceholders, ",") + ")" + } + + group2Query := ` + SELECT ` + baseSelect + ` + ` + baseFrom + ` + ` + group2Exclude + ` + ORDER BY + (COALESCE(p.failure, 0) + 1.0) / (COALESCE(p.success, 0) + 1.0) DESC, + CASE WHEN p.last_success_at IS NULL THEN 0 ELSE 1 END, + p.last_success_at ASC + LIMIT $` + fmt.Sprintf("%d", len(group2Args)+1) + + group2Args = append(group2Args, group2Count) + group2Rows, err := a.DB.Query(group2Query, group2Args...) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer group2Rows.Close() + + group2Words := make([]Word, 0) + group2WordIDs := make(map[int]bool) + for group2Rows.Next() { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := group2Rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + group2Words = append(group2Words, word) + group2WordIDs[word.ID] = true + } + + // Group 3: All remaining words, sorted by last_success_at ASC (NULL first) + // Exclude words already in group1 and group2 + allExcludedIDs := make(map[int]bool) + for id := range group1WordIDs { + allExcludedIDs[id] = true + } + for id := range group2WordIDs { + allExcludedIDs[id] = true + } + + group3Exclude := "" + group3Args := make([]interface{}, 0) + group3Args = append(group3Args, dictArgs...) + if len(allExcludedIDs) > 0 { + excludePlaceholders := make([]string, 0, len(allExcludedIDs)) + idx := len(dictArgs) + 1 + for wordID := range allExcludedIDs { + excludePlaceholders = append(excludePlaceholders, fmt.Sprintf("$%d", idx)) + group3Args = append(group3Args, wordID) + idx++ + } + group3Exclude = " AND w.id NOT IN (" + strings.Join(excludePlaceholders, ",") + ")" + } + + // Calculate how many words we still need from group 3 + wordsCollected := len(group1Words) + len(group2Words) + group3Needed := wordsCount - wordsCollected + + log.Printf("Word selection: wordsCount=%d, group1=%d, group2=%d, collected=%d, group3Needed=%d", + wordsCount, len(group1Words), len(group2Words), wordsCollected, group3Needed) + + group3Words := make([]Word, 0) + if group3Needed > 0 { + group3Query := ` + SELECT ` + baseSelect + ` + ` + baseFrom + ` + ` + group3Exclude + ` + ORDER BY + CASE WHEN p.last_success_at IS NULL THEN 0 ELSE 1 END, + p.last_success_at ASC + LIMIT $` + fmt.Sprintf("%d", len(group3Args)+1) + + group3Args = append(group3Args, group3Needed) + group3Rows, err := a.DB.Query(group3Query, group3Args...) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer group3Rows.Close() + + for group3Rows.Next() { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := group3Rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + group3Words = append(group3Words, word) + } + } + + // Combine all groups + words := make([]Word, 0) + words = append(words, group1Words...) + words = append(words, group2Words...) + words = append(words, group3Words...) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(words) +} + +func (a *App) updateTestProgressHandler(w http.ResponseWriter, r *http.Request) { + log.Printf("updateTestProgressHandler called: %s %s", r.Method, r.URL.Path) + setCORSHeaders(w) + + if r.Method == "OPTIONS" { + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req TestProgressRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding request: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + log.Printf("Received %d word updates, config_id: %v, user_id: %d", len(req.Words), req.ConfigID, userID) + + tx, err := a.DB.Begin() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Create unique constraint for (word_id, user_id) if not exists + tx.Exec("CREATE UNIQUE INDEX IF NOT EXISTS progress_word_user_unique ON progress(word_id, user_id)") + + stmt, err := tx.Prepare(` + INSERT INTO progress (word_id, user_id, success, failure, last_success_at, last_failure_at) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (word_id, user_id) + DO UPDATE SET + success = EXCLUDED.success, + failure = EXCLUDED.failure, + last_success_at = COALESCE(EXCLUDED.last_success_at, progress.last_success_at), + last_failure_at = COALESCE(EXCLUDED.last_failure_at, progress.last_failure_at) + `) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + for _, wordUpdate := range req.Words { + // Convert pointers to values for logging + lastSuccessStr := "nil" + if wordUpdate.LastSuccessAt != nil { + lastSuccessStr = *wordUpdate.LastSuccessAt + } + lastFailureStr := "nil" + if wordUpdate.LastFailureAt != nil { + lastFailureStr = *wordUpdate.LastFailureAt + } + log.Printf("Updating word %d: success=%d, failure=%d, last_success_at=%s, last_failure_at=%s", + wordUpdate.ID, wordUpdate.Success, wordUpdate.Failure, lastSuccessStr, lastFailureStr) + + // Convert pointers to sql.NullString for proper NULL handling + var lastSuccess, lastFailure interface{} + if wordUpdate.LastSuccessAt != nil && *wordUpdate.LastSuccessAt != "" { + lastSuccess = *wordUpdate.LastSuccessAt + } else { + lastSuccess = nil + } + if wordUpdate.LastFailureAt != nil && *wordUpdate.LastFailureAt != "" { + lastFailure = *wordUpdate.LastFailureAt + } else { + lastFailure = nil + } + + _, err := stmt.Exec( + wordUpdate.ID, + userID, + wordUpdate.Success, + wordUpdate.Failure, + lastSuccess, + lastFailure, + ) + if err != nil { + log.Printf("Error executing update for word %d: %v", wordUpdate.ID, err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + } + + if err := tx.Commit(); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Note: Reward message is now sent via completeTaskHandler when the test task is automatically completed. + // The config_id is kept in the request for potential future use, but we no longer send messages here + // to avoid duplicate messages (one from test completion, one from task completion). + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Progress updated successfully", + }) +} + +func (a *App) getConfigsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + query := ` + SELECT id, words_count, max_cards + FROM configs + WHERE user_id = $1 + ORDER BY id + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + configs := make([]Config, 0) + for rows.Next() { + var config Config + var maxCards sql.NullInt64 + err := rows.Scan( + &config.ID, + &config.WordsCount, + &maxCards, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + if maxCards.Valid { + maxCardsVal := int(maxCards.Int64) + config.MaxCards = &maxCardsVal + } + configs = append(configs, config) + } + + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(configs) +} + +func (a *App) getDictionariesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + query := ` + SELECT + d.id, + d.name, + COALESCE(COUNT(w.id), 0) as words_count + FROM dictionaries d + LEFT JOIN words w ON d.id = w.dictionary_id + WHERE d.user_id = $1 + GROUP BY d.id, d.name + ORDER BY d.id + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + dictionaries := make([]Dictionary, 0) + for rows.Next() { + var dict Dictionary + err := rows.Scan( + &dict.ID, + &dict.Name, + &dict.WordsCount, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaries = append(dictionaries, dict) + } + + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(dictionaries) +} + +func (a *App) addDictionaryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req DictionaryRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name == "" { + sendErrorWithCORS(w, "Имя словаря обязательно", http.StatusBadRequest) + return + } + + var id int + err := a.DB.QueryRow(` + INSERT INTO dictionaries (name, user_id) + VALUES ($1, $2) + RETURNING id + `, req.Name, userID).Scan(&id) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "id": id, + "name": req.Name, + }) +} + +func (a *App) updateDictionaryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + dictionaryID := vars["id"] + + var req DictionaryRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name == "" { + sendErrorWithCORS(w, "Имя словаря обязательно", http.StatusBadRequest) + return + } + + result, err := a.DB.Exec(` + UPDATE dictionaries + SET name = $1 + WHERE id = $2 AND user_id = $3 + `, req.Name, dictionaryID, userID) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + http.Error(w, "Dictionary not found", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Dictionary updated successfully", + }) +} + +func (a *App) deleteDictionaryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + dictionaryID := vars["id"] + + // Prevent deletion of default dictionary (id = 0) + if dictionaryID == "0" { + sendErrorWithCORS(w, "Cannot delete default dictionary", http.StatusBadRequest) + return + } + + // Verify ownership + var ownerID int + err := a.DB.QueryRow("SELECT user_id FROM dictionaries WHERE id = $1", dictionaryID).Scan(&ownerID) + if err != nil || ownerID != userID { + sendErrorWithCORS(w, "Dictionary not found", http.StatusNotFound) + return + } + + tx, err := a.DB.Begin() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Delete all words from this dictionary (progress will be deleted automatically due to CASCADE) + _, err = tx.Exec(` + DELETE FROM words + WHERE dictionary_id = $1 + `, dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Delete all config-dictionary associations (will be deleted automatically due to CASCADE, but doing explicitly for clarity) + _, err = tx.Exec(` + DELETE FROM config_dictionaries + WHERE dictionary_id = $1 + `, dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Delete the dictionary + result, err := tx.Exec("DELETE FROM dictionaries WHERE id = $1", dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + sendErrorWithCORS(w, "Dictionary not found", http.StatusNotFound) + return + } + + if err := tx.Commit(); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Dictionary deleted successfully. All words and configuration associations have been deleted.", + }) +} + +func (a *App) getConfigDictionariesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + vars := mux.Vars(r) + configID := vars["id"] + + query := ` + SELECT dictionary_id + FROM config_dictionaries + WHERE config_id = $1 + ORDER BY dictionary_id + ` + + rows, err := a.DB.Query(query, configID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + dictionaryIDs := make([]int, 0) + for rows.Next() { + var dictID int + err := rows.Scan(&dictID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaryIDs = append(dictionaryIDs, dictID) + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "dictionary_ids": dictionaryIDs, + }) +} + +func (a *App) getTestConfigsAndDictionariesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + log.Printf("getTestConfigsAndDictionariesHandler: Unauthorized request") + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + log.Printf("getTestConfigsAndDictionariesHandler called, user: %d", userID) + + // Get configs + configsQuery := ` + SELECT id, words_count, max_cards + FROM configs + WHERE user_id = $1 + ORDER BY id + ` + + configsRows, err := a.DB.Query(configsQuery, userID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer configsRows.Close() + + configs := make([]Config, 0) + for configsRows.Next() { + var config Config + var maxCards sql.NullInt64 + err := configsRows.Scan( + &config.ID, + &config.WordsCount, + &maxCards, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + if maxCards.Valid { + maxCardsVal := int(maxCards.Int64) + config.MaxCards = &maxCardsVal + } + configs = append(configs, config) + } + + // Get dictionaries + dictsQuery := ` + SELECT + d.id, + d.name, + COALESCE(COUNT(w.id), 0) as words_count + FROM dictionaries d + LEFT JOIN words w ON d.id = w.dictionary_id + WHERE d.user_id = $1 + GROUP BY d.id, d.name + ORDER BY d.id + ` + + dictsRows, err := a.DB.Query(dictsQuery, userID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer dictsRows.Close() + + dictionaries := make([]Dictionary, 0) + for dictsRows.Next() { + var dict Dictionary + err := dictsRows.Scan( + &dict.ID, + &dict.Name, + &dict.WordsCount, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaries = append(dictionaries, dict) + } + + response := TestConfigsAndDictionariesResponse{ + Configs: configs, + Dictionaries: dictionaries, + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(response) +} + +func (a *App) addConfigHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req ConfigRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + if req.WordsCount <= 0 { + sendErrorWithCORS(w, "Количество слов должно быть больше 0", http.StatusBadRequest) + return + } + + tx, err := a.DB.Begin() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + var id int + err = tx.QueryRow(` + INSERT INTO configs (words_count, max_cards, user_id) + VALUES ($1, $2, $3) + RETURNING id + `, req.WordsCount, req.MaxCards, userID).Scan(&id) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Insert dictionary associations if provided + if len(req.DictionaryIDs) > 0 { + stmt, err := tx.Prepare(` + INSERT INTO config_dictionaries (config_id, dictionary_id) + VALUES ($1, $2) + `) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + for _, dictID := range req.DictionaryIDs { + _, err := stmt.Exec(id, dictID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + } + } + + if err := tx.Commit(); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Config created successfully", + "id": id, + }) +} + +func (a *App) updateConfigHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + configID := vars["id"] + + // Verify ownership + var ownerID int + err := a.DB.QueryRow("SELECT user_id FROM configs WHERE id = $1", configID).Scan(&ownerID) + if err != nil || ownerID != userID { + sendErrorWithCORS(w, "Config not found", http.StatusNotFound) + return + } + + var req ConfigRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + if req.WordsCount <= 0 { + sendErrorWithCORS(w, "Количество слов должно быть больше 0", http.StatusBadRequest) + return + } + + tx, err := a.DB.Begin() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + result, err := tx.Exec(` + UPDATE configs + SET words_count = $1, max_cards = $2 + WHERE id = $3 + `, req.WordsCount, req.MaxCards, configID) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + http.Error(w, "Config not found", http.StatusNotFound) + return + } + + // Delete existing dictionary associations + _, err = tx.Exec("DELETE FROM config_dictionaries WHERE config_id = $1", configID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Insert new dictionary associations if provided + if len(req.DictionaryIDs) > 0 { + stmt, err := tx.Prepare(` + INSERT INTO config_dictionaries (config_id, dictionary_id) + VALUES ($1, $2) + `) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + for _, dictID := range req.DictionaryIDs { + _, err := stmt.Exec(configID, dictID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + } + } + + if err := tx.Commit(); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Config updated successfully", + }) +} + +func (a *App) deleteConfigHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + configID := vars["id"] + + result, err := a.DB.Exec("DELETE FROM configs WHERE id = $1 AND user_id = $2", configID, userID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + sendErrorWithCORS(w, "Config not found", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Config deleted successfully", + }) +} + +func (a *App) getWeeklyStatsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + log.Printf("getWeeklyStatsHandler called from %s, path: %s, user: %d", r.RemoteAddr, r.URL.Path, userID) + + // Получаем данные текущей недели напрямую из nodes + currentWeekScores, err := a.getCurrentWeekScores(userID) + if err != nil { + log.Printf("Error getting current week scores: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Получаем сегодняшние приросты + todayScores, err := a.getTodayScores(userID) + if err != nil { + log.Printf("Error getting today scores: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + query := ` + SELECT + p.id AS project_id, + p.name AS project_name, + -- Используем COALESCE для установки total_score в 0.0000, если нет данных в weekly_report_mv + COALESCE(wr.total_score, 0.0000) AS total_score, + wg.min_goal_score, + wg.max_goal_score, + wg.priority AS priority, + p.color + FROM + projects p + LEFT JOIN + weekly_goals wg ON wg.project_id = p.id + AND wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + LEFT JOIN + weekly_report_mv wr + ON p.id = wr.project_id + AND EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER = wr.report_year + AND EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER = wr.report_week + WHERE + p.deleted = FALSE AND p.user_id = $1 + ORDER BY + total_score DESC + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + log.Printf("Error querying weekly stats: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + projects := make([]WeeklyProjectStats, 0) + // Группы для расчета среднего по priority + groups := make(map[int][]float64) + + for rows.Next() { + var project WeeklyProjectStats + var projectID int + var minGoalScore sql.NullFloat64 + var maxGoalScore sql.NullFloat64 + var priority sql.NullInt64 + + err := rows.Scan( + &projectID, + &project.ProjectName, + &project.TotalScore, + &minGoalScore, + &maxGoalScore, + &priority, + &project.Color, + ) + if err != nil { + log.Printf("Error scanning weekly stats row: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Объединяем данные: если есть данные текущей недели, используем их вместо MV + if currentWeekScore, exists := currentWeekScores[projectID]; exists { + project.TotalScore = currentWeekScore + } + + // Добавляем сегодняшний прирост + if todayScore, exists := todayScores[projectID]; exists && todayScore != 0 { + project.TodayChange = &todayScore + } + + if minGoalScore.Valid { + project.MinGoalScore = minGoalScore.Float64 + } else { + project.MinGoalScore = 0 + } + + if maxGoalScore.Valid { + maxGoalVal := maxGoalScore.Float64 + project.MaxGoalScore = &maxGoalVal + } + + var priorityVal int + if priority.Valid { + priorityVal = int(priority.Int64) + project.Priority = &priorityVal + } + + // Расчет calculated_score по формуле из n8n + totalScore := project.TotalScore + minGoalScoreVal := project.MinGoalScore + var maxGoalScoreVal float64 + if project.MaxGoalScore != nil { + maxGoalScoreVal = *project.MaxGoalScore + } + + // Параметры бонуса в зависимости от priority + var extraBonusLimit float64 = 20 + if priorityVal == 1 { + extraBonusLimit = 50 + } else if priorityVal == 2 { + extraBonusLimit = 35 + } + + // Расчет calculated_score по логике фронтенда + // min_goal -> 100%, max_goal -> 150%/135%/120% в зависимости от приоритета + var resultScore float64 + if minGoalScoreVal <= 0 { + // Если нет minGoal, возвращаем 0 (или можно относительно maxGoal, но обычно 0) + resultScore = 0 + } else if totalScore < minGoalScoreVal { + // До достижения minGoal растем линейно от 0 до 100% + resultScore = (totalScore / minGoalScoreVal) * 100.0 + } else { + // Достигнут minGoal - базовый прогресс = 100% + baseProgress := 100.0 + + // Если maxGoal задан корректно и больше minGoal, добавляем экстра прогресс + if maxGoalScoreVal > minGoalScoreVal { + extraRange := maxGoalScoreVal - minGoalScoreVal + excess := min(totalScore, maxGoalScoreVal) - minGoalScoreVal + extraRatio := min(1.0, max(0.0, excess/extraRange)) + extraProgress := extraRatio * extraBonusLimit + resultScore = min(100.0+extraBonusLimit, baseProgress+extraProgress) + } else { + // Если maxGoal не задан или некорректен, просто 100% + resultScore = baseProgress + } + } + + project.CalculatedScore = roundToTwoDecimals(resultScore) + + // Группировка для итогового расчета + // Проекты с minGoal = 0 или null не учитываются в общем проценте выполнения + if minGoalScoreVal > 0 { + if _, exists := groups[priorityVal]; !exists { + groups[priorityVal] = make([]float64, 0) + } + groups[priorityVal] = append(groups[priorityVal], project.CalculatedScore) + } + + projects = append(projects, project) + } + + // Вычисляем проценты для каждой группы + groupsProgress := calculateGroupsProgress(groups) + + // Вычисляем общий процент выполнения + total := calculateOverallProgress(groupsProgress, groups) + + response := WeeklyStatsResponse{ + Total: total, + GroupProgress1: groupsProgress.Group1, + GroupProgress2: groupsProgress.Group2, + GroupProgress0: groupsProgress.Group0, + Projects: projects, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// runMigrations applies database migrations using golang-migrate +func (a *App) runMigrations() error { + migrationsPath := "migrations" + if _, err := os.Stat(migrationsPath); os.IsNotExist(err) { + // Try alternative path for Docker + migrationsPath = "/migrations" + if _, err := os.Stat(migrationsPath); os.IsNotExist(err) { + return fmt.Errorf("migrations directory not found") + } + } + + // Get database connection string from environment + dbHost := getEnv("DB_HOST", "localhost") + dbPort := getEnv("DB_PORT", "5432") + dbUser := getEnv("DB_USER", "playeng") + dbPassword := getEnv("DB_PASSWORD", "playeng") + dbName := getEnv("DB_NAME", "playeng") + + // Build database URL with proper encoding for special characters in password + // url.UserPassword properly encodes special characters like ^, @, etc. + userInfo := url.UserPassword(dbUser, dbPassword) + databaseURL := fmt.Sprintf("postgres://%s@%s:%s/%s?sslmode=disable", + userInfo.String(), dbHost, dbPort, dbName) + + // Create migrate instance + m, err := migrate.New( + fmt.Sprintf("file://%s", migrationsPath), + databaseURL, + ) + if err != nil { + return fmt.Errorf("failed to initialize migrations: %w", err) + } + defer m.Close() + + // Check if schema_migrations table exists and its state + var schemaExists bool + var currentVersion int64 + var isDirty bool + err = a.DB.QueryRow(` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name = 'schema_migrations' + ) + `).Scan(&schemaExists) + if err != nil { + log.Printf("Warning: Could not check schema_migrations table: %v", err) + } + + // If schema_migrations exists, check its state + if schemaExists { + err = a.DB.QueryRow(` + SELECT version, dirty FROM schema_migrations LIMIT 1 + `).Scan(¤tVersion, &isDirty) + if err != nil { + log.Printf("Warning: Could not read schema_migrations: %v", err) + schemaExists = false // Treat as if it doesn't exist + } else if isDirty { + // Database is in dirty state - fix it + log.Println("Detected dirty migration state, fixing...") + _, err = a.DB.Exec(` + UPDATE schema_migrations SET dirty = false WHERE version = $1 + `, currentVersion) + if err != nil { + return fmt.Errorf("failed to fix dirty migration state: %w", err) + } + log.Printf("Fixed dirty migration state for version %d", currentVersion) + // Continue to apply migrations normally + } + } + + // If schema_migrations doesn't exist, check if database has existing tables + // This handles the case when an old dump was restored + if !schemaExists { + var tableCount int + err = a.DB.QueryRow(` + SELECT COUNT(*) FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name NOT IN ('schema_migrations') + `).Scan(&tableCount) + if err == nil && tableCount > 0 { + // Database has existing tables but no schema_migrations + // This means an old dump was restored - set version to 1 without applying migration + log.Println("Detected existing database schema without schema_migrations table") + log.Println("Setting migration version to 1 (baseline) without applying migration") + + // Create schema_migrations table and set version to 1 + _, err = a.DB.Exec(` + CREATE TABLE IF NOT EXISTS schema_migrations ( + version bigint NOT NULL PRIMARY KEY, + dirty boolean NOT NULL + ) + `) + if err != nil { + return fmt.Errorf("failed to create schema_migrations table: %w", err) + } + + _, err = a.DB.Exec(` + INSERT INTO schema_migrations (version, dirty) + VALUES (1, false) + ON CONFLICT (version) DO UPDATE SET dirty = false + `) + if err != nil { + return fmt.Errorf("failed to set migration version: %w", err) + } + + log.Println("Migration version set to 1 (baseline) for existing database") + return nil + } + } + + // Apply migrations normally + if err := m.Up(); err != nil { + if err == migrate.ErrNoChange { + log.Println("Database is up to date, no migrations to apply") + return nil + } + return fmt.Errorf("failed to apply migrations: %w", err) + } + + log.Println("Database migrations applied successfully") + return nil +} + +func (a *App) initDB() error { + // This function is kept for backward compatibility but does nothing + // Database schema is now managed by golang-migrate + return nil +} + +func (a *App) initAuthDB() error { + // Clean up expired refresh tokens (only those with expiration date set) + // This is business logic that should run on startup + a.DB.Exec("DELETE FROM refresh_tokens WHERE expires_at IS NOT NULL AND expires_at < NOW()") + return nil +} + +func (a *App) initPlayLifeDB() error { + // This function is kept for backward compatibility but does nothing + // Database schema is now managed by golang-migrate + return nil +} + +// DEPRECATED: All migration functions below are no longer used +// Database migrations are now handled by golang-migrate +// These functions are kept for reference only and will be removed in future versions +// +// NOTE: Functions applyMigration012-029 have been removed as they are no longer needed. +// All database schema is now managed by golang-migrate baseline migration. + +// DEPRECATED: initPlayLifeDBOld is no longer used - schema is managed by golang-migrate +func (a *App) initPlayLifeDBOld() error { + // This function is kept for backward compatibility but does nothing + // Database schema is now managed by golang-migrate + return nil +} + +// startWeeklyGoalsScheduler запускает планировщик для автоматической фиксации целей на неделю +// каждый понедельник в 6:00 утра в указанном часовом поясе +func (a *App) startWeeklyGoalsScheduler() { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + log.Printf("Loading timezone for weekly goals scheduler: '%s'", timezoneStr) + + // Загружаем часовой пояс + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + log.Printf("Note: Timezone must be in IANA format (e.g., 'Europe/Moscow', 'America/New_York'), not 'UTC+3'") + loc = time.UTC + timezoneStr = "UTC" + } else { + log.Printf("Weekly goals scheduler timezone set to: %s", timezoneStr) + } + + // Логируем текущее время в указанном часовом поясе для проверки + now := time.Now().In(loc) + log.Printf("Current time in scheduler timezone (%s): %s", timezoneStr, now.Format("2006-01-02 15:04:05 MST")) + log.Printf("Next weekly goals setup will be on Monday at: 06:00 %s (cron: '0 6 * * 1')", timezoneStr) + + // Создаем планировщик с указанным часовым поясом + c := cron.New(cron.WithLocation(loc)) + + // Добавляем задачу: каждый понедельник в 6:00 утра + // Cron выражение: "0 6 * * 1" означает: минута=0, час=6, любой день месяца, любой месяц, понедельник (1) + _, err = c.AddFunc("0 6 * * 1", func() { + now := time.Now().In(loc) + log.Printf("Scheduled task: Refreshing materialized views and setting up weekly goals (timezone: %s, local time: %s)", timezoneStr, now.Format("2006-01-02 15:04:05 MST")) + + // Сначала обновляем MV (чтобы в ней были данные прошлой недели) + _, err := a.DB.Exec("REFRESH MATERIALIZED VIEW weekly_report_mv") + if err != nil { + log.Printf("Error refreshing materialized view: %v", err) + } else { + log.Printf("Materialized view refreshed successfully") + } + + // Обновляем projects_median_mv после обновления weekly_report_mv + _, err = a.DB.Exec("REFRESH MATERIALIZED VIEW projects_median_mv") + if err != nil { + log.Printf("Error refreshing projects_median_mv: %v", err) + } else { + log.Printf("Projects median materialized view refreshed successfully") + } + + // Обновляем project_score_sample_mv + _, err = a.DB.Exec("REFRESH MATERIALIZED VIEW project_score_sample_mv") + if err != nil { + log.Printf("Error refreshing project_score_sample_mv: %v", err) + } else { + log.Printf("Project score sample materialized view refreshed successfully") + } + + // Затем настраиваем цели на новую неделю + if err := a.setupWeeklyGoals(); err != nil { + log.Printf("Error in scheduled weekly goals setup: %v", err) + } + }) + if err != nil { + log.Printf("Warning: Failed to add weekly goals scheduler: %v", err) + return + } + + // Запускаем планировщик + c.Start() + log.Println("Weekly goals scheduler started") +} + +// getCurrentWeekScores получает данные текущей недели напрямую из таблицы nodes для конкретного пользователя +// Возвращает map[project_id]total_score для текущей недели +func (a *App) getCurrentWeekScores(userID int) (map[int]float64, error) { + query := ` + SELECT + n.project_id, + COALESCE(SUM(n.score), 0) AS total_score + FROM nodes n + JOIN projects p ON n.project_id = p.id + WHERE + p.deleted = FALSE + AND p.user_id = $1 + AND n.user_id = $1 + AND EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND EXTRACT(WEEK FROM n.created_date)::INTEGER = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + GROUP BY n.project_id + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + log.Printf("Error querying current week scores: %v", err) + return nil, fmt.Errorf("error querying current week scores: %w", err) + } + defer rows.Close() + + scores := make(map[int]float64) + for rows.Next() { + var projectID int + var totalScore float64 + if err := rows.Scan(&projectID, &totalScore); err != nil { + log.Printf("Error scanning current week scores row: %v", err) + return nil, fmt.Errorf("error scanning current week scores row: %w", err) + } + scores[projectID] = totalScore + } + + return scores, nil +} + +// getTodayScores получает сумму score всех нод, созданных сегодня для конкретного пользователя +// Возвращает map[project_id]today_score для сегодняшнего дня +func (a *App) getTodayScores(userID int) (map[int]float64, error) { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + timezoneStr = "UTC" + } + + // Вычисляем текущую дату в нужном часовом поясе + now := time.Now().In(loc) + todayStart := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, loc) + todayEnd := todayStart.Add(24 * time.Hour) + + query := ` + SELECT + n.project_id, + COALESCE(SUM(n.score), 0) AS today_score + FROM nodes n + JOIN projects p ON n.project_id = p.id + WHERE + p.deleted = FALSE + AND p.user_id = $1 + AND n.user_id = $1 + AND n.created_date >= $2 + AND n.created_date < $3 + GROUP BY n.project_id + ` + + rows, err := a.DB.Query(query, userID, todayStart, todayEnd) + if err != nil { + log.Printf("Error querying today scores: %v", err) + return nil, fmt.Errorf("error querying today scores: %w", err) + } + defer rows.Close() + + scores := make(map[int]float64) + for rows.Next() { + var projectID int + var todayScore float64 + if err := rows.Scan(&projectID, &todayScore); err != nil { + log.Printf("Error scanning today scores row: %v", err) + return nil, fmt.Errorf("error scanning today scores row: %w", err) + } + scores[projectID] = todayScore + } + + return scores, nil +} + +// getTodayScoresAllUsers получает сумму score всех нод, созданных сегодня для всех пользователей +// Возвращает map[project_id]today_score для сегодняшнего дня +func (a *App) getTodayScoresAllUsers() (map[int]float64, error) { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + timezoneStr = "UTC" + } + + // Вычисляем текущую дату в нужном часовом поясе + now := time.Now().In(loc) + todayStart := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, loc) + todayEnd := todayStart.Add(24 * time.Hour) + + query := ` + SELECT + n.project_id, + COALESCE(SUM(n.score), 0) AS today_score + FROM nodes n + JOIN projects p ON n.project_id = p.id + WHERE + p.deleted = FALSE + AND n.created_date >= $1 + AND n.created_date < $2 + GROUP BY n.project_id + ` + + rows, err := a.DB.Query(query, todayStart, todayEnd) + if err != nil { + log.Printf("Error querying today scores for all users: %v", err) + return nil, fmt.Errorf("error querying today scores for all users: %w", err) + } + defer rows.Close() + + scores := make(map[int]float64) + for rows.Next() { + var projectID int + var todayScore float64 + if err := rows.Scan(&projectID, &todayScore); err != nil { + log.Printf("Error scanning today scores row: %v", err) + return nil, fmt.Errorf("error scanning today scores row: %w", err) + } + scores[projectID] = todayScore + } + + return scores, nil +} + +// getCurrentWeekScoresAllUsers получает данные текущей недели для всех пользователей +// Возвращает map[project_id]total_score для текущей недели +func (a *App) getCurrentWeekScoresAllUsers() (map[int]float64, error) { + query := ` + SELECT + n.project_id, + COALESCE(SUM(n.score), 0) AS total_score + FROM nodes n + JOIN projects p ON n.project_id = p.id + WHERE + p.deleted = FALSE + AND EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND EXTRACT(WEEK FROM n.created_date)::INTEGER = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + GROUP BY n.project_id + ` + + rows, err := a.DB.Query(query) + if err != nil { + log.Printf("Error querying current week scores for all users: %v", err) + return nil, fmt.Errorf("error querying current week scores for all users: %w", err) + } + defer rows.Close() + + scores := make(map[int]float64) + for rows.Next() { + var projectID int + var totalScore float64 + if err := rows.Scan(&projectID, &totalScore); err != nil { + log.Printf("Error scanning current week scores row: %v", err) + return nil, fmt.Errorf("error scanning current week scores row: %w", err) + } + scores[projectID] = totalScore + } + + return scores, nil +} + +// getWeeklyStatsData получает данные о проектах и их целях (без HTTP обработки) +func (a *App) getWeeklyStatsData() (*WeeklyStatsResponse, error) { + // Получаем данные текущей недели для всех пользователей + currentWeekScores, err := a.getCurrentWeekScoresAllUsers() + if err != nil { + log.Printf("Error getting current week scores: %v", err) + return nil, fmt.Errorf("error getting current week scores: %w", err) + } + + // Получаем сегодняшние приросты для всех пользователей + todayScores, err := a.getTodayScoresAllUsers() + if err != nil { + log.Printf("Error getting today scores: %v", err) + return nil, fmt.Errorf("error getting today scores: %w", err) + } + + query := ` + SELECT + p.id AS project_id, + p.name AS project_name, + -- Используем COALESCE для установки total_score в 0.0000, если нет данных в weekly_report_mv + COALESCE(wr.total_score, 0.0000) AS total_score, + wg.min_goal_score, + wg.max_goal_score, + wg.priority AS priority, + p.color + FROM + projects p + LEFT JOIN + weekly_goals wg ON wg.project_id = p.id + AND wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + LEFT JOIN + weekly_report_mv wr + ON p.id = wr.project_id + AND EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER = wr.report_year + AND EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER = wr.report_week + WHERE + p.deleted = FALSE + ORDER BY + total_score DESC + ` + + rows, err := a.DB.Query(query) + if err != nil { + log.Printf("Error querying weekly stats: %v", err) + return nil, fmt.Errorf("error querying weekly stats: %w", err) + } + defer rows.Close() + + projects := make([]WeeklyProjectStats, 0) + // Группы для расчета среднего по priority + groups := make(map[int][]float64) + + for rows.Next() { + var project WeeklyProjectStats + var projectID int + var minGoalScore sql.NullFloat64 + var maxGoalScore sql.NullFloat64 + var priority sql.NullInt64 + + err := rows.Scan( + &projectID, + &project.ProjectName, + &project.TotalScore, + &minGoalScore, + &maxGoalScore, + &priority, + ) + if err != nil { + log.Printf("Error scanning weekly stats row: %v", err) + return nil, fmt.Errorf("error scanning weekly stats row: %w", err) + } + + // Объединяем данные: если есть данные текущей недели, используем их вместо MV + if currentWeekScore, exists := currentWeekScores[projectID]; exists { + project.TotalScore = currentWeekScore + } + + // Добавляем сегодняшний прирост + if todayScore, exists := todayScores[projectID]; exists && todayScore != 0 { + project.TodayChange = &todayScore + } + + if minGoalScore.Valid { + project.MinGoalScore = minGoalScore.Float64 + } else { + project.MinGoalScore = 0 + } + + if maxGoalScore.Valid { + maxGoalVal := maxGoalScore.Float64 + project.MaxGoalScore = &maxGoalVal + } + + var priorityVal int + if priority.Valid { + priorityVal = int(priority.Int64) + project.Priority = &priorityVal + } + + // Расчет calculated_score по формуле из n8n + totalScore := project.TotalScore + minGoalScoreVal := project.MinGoalScore + var maxGoalScoreVal float64 + if project.MaxGoalScore != nil { + maxGoalScoreVal = *project.MaxGoalScore + } + + // Параметры бонуса в зависимости от priority + var extraBonusLimit float64 = 20 + if priorityVal == 1 { + extraBonusLimit = 50 + } else if priorityVal == 2 { + extraBonusLimit = 35 + } + + // Расчет calculated_score по логике фронтенда + // min_goal -> 100%, max_goal -> 150%/135%/120% в зависимости от приоритета + var resultScore float64 + if minGoalScoreVal <= 0 { + // Если нет minGoal, возвращаем 0 (или можно относительно maxGoal, но обычно 0) + resultScore = 0 + } else if totalScore < minGoalScoreVal { + // До достижения minGoal растем линейно от 0 до 100% + resultScore = (totalScore / minGoalScoreVal) * 100.0 + } else { + // Достигнут minGoal - базовый прогресс = 100% + baseProgress := 100.0 + + // Если maxGoal задан корректно и больше minGoal, добавляем экстра прогресс + if maxGoalScoreVal > minGoalScoreVal { + extraRange := maxGoalScoreVal - minGoalScoreVal + excess := min(totalScore, maxGoalScoreVal) - minGoalScoreVal + extraRatio := min(1.0, max(0.0, excess/extraRange)) + extraProgress := extraRatio * extraBonusLimit + resultScore = min(100.0+extraBonusLimit, baseProgress+extraProgress) + } else { + // Если maxGoal не задан или некорректен, просто 100% + resultScore = baseProgress + } + } + + project.CalculatedScore = roundToTwoDecimals(resultScore) + + // Группировка для итогового расчета + // Проекты с minGoal = 0 или null не учитываются в общем проценте выполнения + if minGoalScoreVal > 0 { + if _, exists := groups[priorityVal]; !exists { + groups[priorityVal] = make([]float64, 0) + } + groups[priorityVal] = append(groups[priorityVal], project.CalculatedScore) + } + + projects = append(projects, project) + } + + // Вычисляем проценты для каждой группы + groupsProgress := calculateGroupsProgress(groups) + + // Вычисляем общий процент выполнения + total := calculateOverallProgress(groupsProgress, groups) + + response := WeeklyStatsResponse{ + Total: total, + GroupProgress1: groupsProgress.Group1, + GroupProgress2: groupsProgress.Group2, + GroupProgress0: groupsProgress.Group0, + Projects: projects, + } + + return &response, nil +} + +// getWeeklyStatsDataForUser получает данные о проектах для конкретного пользователя +func (a *App) getWeeklyStatsDataForUser(userID int) (*WeeklyStatsResponse, error) { + // Получаем данные текущей недели напрямую из nodes + currentWeekScores, err := a.getCurrentWeekScores(userID) + if err != nil { + log.Printf("Error getting current week scores: %v", err) + return nil, fmt.Errorf("error getting current week scores: %w", err) + } + + // Получаем сегодняшние приросты + todayScores, err := a.getTodayScores(userID) + if err != nil { + log.Printf("Error getting today scores: %v", err) + return nil, fmt.Errorf("error getting today scores: %w", err) + } + + query := ` + SELECT + p.id AS project_id, + p.name AS project_name, + COALESCE(wr.total_score, 0.0000) AS total_score, + wg.min_goal_score, + wg.max_goal_score, + wg.priority AS priority, + p.color + FROM + projects p + LEFT JOIN + weekly_goals wg ON wg.project_id = p.id + AND wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + LEFT JOIN + weekly_report_mv wr + ON p.id = wr.project_id + AND EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER = wr.report_year + AND EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER = wr.report_week + WHERE + p.deleted = FALSE AND p.user_id = $1 + ORDER BY + total_score DESC + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + return nil, fmt.Errorf("error querying weekly stats: %w", err) + } + defer rows.Close() + + projects := make([]WeeklyProjectStats, 0) + groups := make(map[int][]float64) + + for rows.Next() { + var project WeeklyProjectStats + var projectID int + var minGoalScore sql.NullFloat64 + var maxGoalScore sql.NullFloat64 + var priority sql.NullInt64 + + err := rows.Scan( + &projectID, + &project.ProjectName, + &project.TotalScore, + &minGoalScore, + &maxGoalScore, + &priority, + &project.Color, + ) + if err != nil { + return nil, fmt.Errorf("error scanning weekly stats row: %w", err) + } + + // Объединяем данные: если есть данные текущей недели, используем их вместо MV + if currentWeekScore, exists := currentWeekScores[projectID]; exists { + project.TotalScore = currentWeekScore + } + + // Добавляем сегодняшний прирост + if todayScore, exists := todayScores[projectID]; exists && todayScore != 0 { + project.TodayChange = &todayScore + } + + if minGoalScore.Valid { + project.MinGoalScore = minGoalScore.Float64 + } else { + project.MinGoalScore = 0 + } + + if maxGoalScore.Valid { + maxGoalVal := maxGoalScore.Float64 + project.MaxGoalScore = &maxGoalVal + } + + var priorityVal int + if priority.Valid { + priorityVal = int(priority.Int64) + project.Priority = &priorityVal + } + + // Расчет calculated_score + totalScore := project.TotalScore + minGoalScoreVal := project.MinGoalScore + var maxGoalScoreVal float64 + if project.MaxGoalScore != nil { + maxGoalScoreVal = *project.MaxGoalScore + } + + // Параметры бонуса в зависимости от priority + var extraBonusLimit float64 = 20 + if priorityVal == 1 { + extraBonusLimit = 50 + } else if priorityVal == 2 { + extraBonusLimit = 35 + } + + // Расчет calculated_score по логике фронтенда + // min_goal -> 100%, max_goal -> 150%/135%/120% в зависимости от приоритета + var resultScore float64 + if minGoalScoreVal <= 0 { + // Если нет minGoal, возвращаем 0 (или можно относительно maxGoal, но обычно 0) + resultScore = 0 + } else if totalScore < minGoalScoreVal { + // До достижения minGoal растем линейно от 0 до 100% + resultScore = (totalScore / minGoalScoreVal) * 100.0 + } else { + // Достигнут minGoal - базовый прогресс = 100% + baseProgress := 100.0 + + // Если maxGoal задан корректно и больше minGoal, добавляем экстра прогресс + if maxGoalScoreVal > minGoalScoreVal { + extraRange := maxGoalScoreVal - minGoalScoreVal + excess := min(totalScore, maxGoalScoreVal) - minGoalScoreVal + extraRatio := min(1.0, max(0.0, excess/extraRange)) + extraProgress := extraRatio * extraBonusLimit + resultScore = min(100.0+extraBonusLimit, baseProgress+extraProgress) + } else { + // Если maxGoal не задан или некорректен, просто 100% + resultScore = baseProgress + } + } + + project.CalculatedScore = roundToTwoDecimals(resultScore) + projects = append(projects, project) + + // Группировка для итогового расчета + // Проекты с minGoal = 0 или null не учитываются в общем проценте выполнения + if minGoalScoreVal > 0 { + if _, exists := groups[priorityVal]; !exists { + groups[priorityVal] = make([]float64, 0) + } + groups[priorityVal] = append(groups[priorityVal], project.CalculatedScore) + } + } + + // Вычисляем проценты для каждой группы + groupsProgress := calculateGroupsProgress(groups) + + // Вычисляем общий процент выполнения + total := calculateOverallProgress(groupsProgress, groups) + + response := WeeklyStatsResponse{ + Total: total, + GroupProgress1: groupsProgress.Group1, + GroupProgress2: groupsProgress.Group2, + GroupProgress0: groupsProgress.Group0, + Projects: projects, + } + + return &response, nil +} + +// formatDailyReport форматирует данные проектов в сообщение для Telegram +// Формат аналогичен JS коду из n8n +func (a *App) formatDailyReport(data *WeeklyStatsResponse) string { + if data == nil || len(data.Projects) == 0 { + return "" + } + + // Заголовок сообщения + markdownMessage := "*📈 Отчет:*\n\n" + + // Простой вывод списка проектов + for _, item := range data.Projects { + projectName := item.ProjectName + if projectName == "" { + projectName = "Без названия" + } + + actualScore := item.TotalScore + minGoal := item.MinGoalScore + var maxGoal float64 + hasMaxGoal := false + if item.MaxGoalScore != nil { + maxGoal = *item.MaxGoalScore + hasMaxGoal = true + } + + // Форматирование Score (+/-) + scoreFormatted := "" + if actualScore >= 0 { + scoreFormatted = fmt.Sprintf("+%.2f", actualScore) + } else { + scoreFormatted = fmt.Sprintf("%.2f", actualScore) + } + + // Форматирование текста целей + // Проверяем, что minGoal валиден (не NaN, как в JS коде: !isNaN(minGoal)) + goalText := "" + if !math.IsNaN(minGoal) { + if hasMaxGoal && !math.IsNaN(maxGoal) { + goalText = fmt.Sprintf(" (Цель: %.1f–%.1f)", minGoal, maxGoal) + } else { + goalText = fmt.Sprintf(" (Цель: мин. %.1f)", minGoal) + } + } + + // Собираем строку: Проект: +Score (Цели) + markdownMessage += fmt.Sprintf("*%s*: %s%s\n", projectName, scoreFormatted, goalText) + } + + // Выводим итоговый total из корня JSON + if data.Total != nil { + markdownMessage += "\n---\n" + markdownMessage += fmt.Sprintf("*Общее выполнение целей*: %.1f%%", *data.Total) + } + + return markdownMessage +} + +// sendDailyReport отправляет персональные ежедневные отчеты всем пользователям +func (a *App) sendDailyReport() error { + log.Printf("Scheduled task: Sending daily reports") + + userIDs, err := a.getAllUsersWithTelegram() + if err != nil { + return fmt.Errorf("error getting users: %w", err) + } + + if len(userIDs) == 0 { + log.Printf("No users with Telegram connected, skipping daily report") + return nil + } + + for _, userID := range userIDs { + data, err := a.getWeeklyStatsDataForUser(userID) + if err != nil { + log.Printf("Error getting data for user %d: %v", userID, err) + continue + } + + message := a.formatDailyReport(data) + if message == "" { + continue + } + + if err := a.sendTelegramMessageToUser(userID, message); err != nil { + log.Printf("Error sending daily report to user %d: %v", userID, err) + } else { + log.Printf("Daily report sent to user %d", userID) + } + } + + return nil +} + +// startDailyReportScheduler запускает планировщик для ежедневного отчета +// каждый день в 23:59 в указанном часовом поясе +func (a *App) startDailyReportScheduler() { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + log.Printf("Loading timezone for daily report scheduler: '%s'", timezoneStr) + + // Загружаем часовой пояс + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + log.Printf("Note: Timezone must be in IANA format (e.g., 'Europe/Moscow', 'America/New_York'), not 'UTC+3'") + loc = time.UTC + timezoneStr = "UTC" + } else { + log.Printf("Daily report scheduler timezone set to: %s", timezoneStr) + } + + // Логируем текущее время в указанном часовом поясе для проверки + now := time.Now().In(loc) + log.Printf("Current time in scheduler timezone (%s): %s", timezoneStr, now.Format("2006-01-02 15:04:05 MST")) + log.Printf("Next daily report will be sent at: 23:59 %s (cron: '59 23 * * *')", timezoneStr) + + // Создаем планировщик с указанным часовым поясом + c := cron.New(cron.WithLocation(loc)) + + // Добавляем задачу: каждый день в 23:59 + // Cron выражение: "59 23 * * *" означает: минута=59, час=23, любой день месяца, любой месяц, любой день недели + _, err = c.AddFunc("59 23 * * *", func() { + now := time.Now().In(loc) + log.Printf("Scheduled task: Sending daily report (timezone: %s, local time: %s)", timezoneStr, now.Format("2006-01-02 15:04:05 MST")) + if err := a.sendDailyReport(); err != nil { + log.Printf("Error in scheduled daily report: %v", err) + } + }) + + if err != nil { + log.Printf("Error adding cron job for daily report: %v", err) + return + } + + // Запускаем планировщик + c.Start() + log.Printf("Daily report scheduler started: every day at 23:59 %s", timezoneStr) + + // Планировщик будет работать в фоновом режиме +} + +// startFitbitSyncScheduler запускает планировщик для синхронизации данных Fitbit каждые 4 часа +func (a *App) startFitbitSyncScheduler() { + // Создаем планировщик в UTC (синхронизация не зависит от часового пояса пользователя) + c := cron.New(cron.WithLocation(time.UTC)) + + // Добавляем задачу: каждые 4 часа + // Cron выражение: "0 */4 * * *" означает: минута=0, каждый 4-й час, любой день месяца, любой месяц, любой день недели + _, err := c.AddFunc("0 */4 * * *", func() { + log.Printf("Scheduled task: Syncing Fitbit data for all users") + if err := a.syncFitbitDataForAllUsers(); err != nil { + log.Printf("Error in scheduled Fitbit sync: %v", err) + } + }) + + if err != nil { + log.Printf("Error adding cron job for Fitbit sync: %v", err) + return + } + + // Запускаем планировщик + c.Start() + log.Printf("Fitbit sync scheduler started: every 4 hours") + + // Планировщик будет работать в фоновом режиме +} + +// syncFitbitDataForAllUsers синхронизирует данные Fitbit для всех подключенных пользователей +func (a *App) syncFitbitDataForAllUsers() error { + rows, err := a.DB.Query(` + SELECT user_id FROM fitbit_integrations + WHERE access_token IS NOT NULL + `) + if err != nil { + return fmt.Errorf("failed to get users: %w", err) + } + defer rows.Close() + + var userIDs []int + for rows.Next() { + var userID int + if err := rows.Scan(&userID); err != nil { + log.Printf("Error scanning user_id: %v", err) + continue + } + userIDs = append(userIDs, userID) + } + + if err := rows.Err(); err != nil { + return fmt.Errorf("error iterating users: %w", err) + } + + log.Printf("Syncing Fitbit data for %d users", len(userIDs)) + + // Синхронизируем данные за сегодня для каждого пользователя + today := time.Now() + for _, userID := range userIDs { + if err := a.syncFitbitData(userID, today); err != nil { + log.Printf("Failed to sync Fitbit data for user_id=%d: %v", userID, err) + // Продолжаем синхронизацию для остальных пользователей + continue + } + } + + return nil +} + +// startEndOfDayTaskScheduler запускает планировщик для автовыполнения задач в конце дня +// каждый день в 23:55 в указанном часовом поясе +func (a *App) startEndOfDayTaskScheduler() { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + log.Printf("Loading timezone for end of day task scheduler: '%s'", timezoneStr) + + // Загружаем часовой пояс + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + log.Printf("Note: Timezone must be in IANA format (e.g., 'Europe/Moscow', 'America/New_York'), not 'UTC+3'") + loc = time.UTC + timezoneStr = "UTC" + } else { + log.Printf("End of day task scheduler timezone set to: %s", timezoneStr) + } + + // Логируем текущее время в указанном часовом поясе для проверки + now := time.Now().In(loc) + log.Printf("Current time in scheduler timezone (%s): %s", timezoneStr, now.Format("2006-01-02 15:04:05 MST")) + log.Printf("Next end of day task execution will be at: 23:55 %s (cron: '55 23 * * *')", timezoneStr) + + // Создаем планировщик с указанным часовым поясом + c := cron.New(cron.WithLocation(loc)) + + // Добавляем задачу: каждый день в 23:55 + // Cron выражение: "55 23 * * *" означает: минута=55, час=23, любой день месяца, любой месяц, любой день недели + _, err = c.AddFunc("55 23 * * *", func() { + now := time.Now().In(loc) + log.Printf("Scheduled task: Executing end of day tasks (timezone: %s, local time: %s)", timezoneStr, now.Format("2006-01-02 15:04:05 MST")) + + // Находим все задачи с auto_complete = true + rows, err := a.DB.Query(` + SELECT task_id, user_id, progression_value + FROM task_drafts + WHERE auto_complete = TRUE + `) + if err != nil { + log.Printf("Error querying tasks for end of day execution: %v", err) + return + } + defer rows.Close() + + tasksToExecute := make([]struct { + TaskID int + UserID int + ProgressionValue *float64 + }, 0) + + for rows.Next() { + var taskID, userID int + var progressionValue sql.NullFloat64 + if err := rows.Scan(&taskID, &userID, &progressionValue); err != nil { + log.Printf("Error scanning task for end of day execution: %v", err) + continue + } + var progValue *float64 + if progressionValue.Valid { + progValue = &progressionValue.Float64 + } + tasksToExecute = append(tasksToExecute, struct { + TaskID int + UserID int + ProgressionValue *float64 + }{TaskID: taskID, UserID: userID, ProgressionValue: progValue}) + } + + // Для каждой задачи загружаем подзадачи из драфта и выполняем + for _, taskInfo := range tasksToExecute { + // Загружаем подзадачи из драфта + subtaskRows, err := a.DB.Query(` + SELECT subtask_id + FROM task_draft_subtasks + WHERE task_draft_id = (SELECT id FROM task_drafts WHERE task_id = $1) + `, taskInfo.TaskID) + + childrenTaskIDs := make([]int, 0) + if err == nil { + defer subtaskRows.Close() + for subtaskRows.Next() { + var subtaskID int + if err := subtaskRows.Scan(&subtaskID); err == nil { + childrenTaskIDs = append(childrenTaskIDs, subtaskID) + } + } + } else if err != sql.ErrNoRows { + log.Printf("Error loading subtasks for task %d: %v", taskInfo.TaskID, err) + } + + // Формируем CompleteTaskRequest из данных драфта + req := CompleteTaskRequest{ + Value: taskInfo.ProgressionValue, + ChildrenTaskIDs: childrenTaskIDs, + } + + // Вызываем executeTask - она сама удалит драфт перед выполнением + err = a.executeTask(taskInfo.TaskID, taskInfo.UserID, req) + if err != nil { + log.Printf("Error executing task %d at end of day: %v", taskInfo.TaskID, err) + } else { + log.Printf("Task %d executed successfully at end of day", taskInfo.TaskID) + } + } + }) + + if err != nil { + log.Printf("Error adding cron job for end of day tasks: %v", err) + return + } + + // Запускаем планировщик + c.Start() + log.Printf("End of day task scheduler started: every day at 23:55 %s", timezoneStr) + + // Планировщик будет работать в фоновом режиме +} + +// readVersion читает версию из файла VERSION +func readVersion() string { + // Пробуем разные пути к файлу VERSION + paths := []string{ + "/app/VERSION", // В Docker контейнере + "../VERSION", // При запуске из play-life-backend/ + "../../VERSION", // Альтернативный путь + "VERSION", // Текущая директория + } + + for _, path := range paths { + data, err := os.ReadFile(path) + if err == nil { + version := strings.TrimSpace(string(data)) + if version != "" { + return version + } + } + } + + return "unknown" +} + +func main() { + // Читаем версию приложения + version := readVersion() + log.Printf("========================================") + log.Printf("Play Life Backend v%s", version) + log.Printf("========================================") + + // Загружаем переменные окружения из .env файла (если существует) + // Сначала пробуем загрузить из корня проекта, затем из текущей директории + // Игнорируем ошибку, если файл не найден + godotenv.Load("../.env") // Пробуем корневой .env + godotenv.Load(".env") // Пробуем локальный .env + + dbHost := getEnv("DB_HOST", "localhost") + dbPort := getEnv("DB_PORT", "5432") + dbUser := getEnv("DB_USER", "playeng") + dbPassword := getEnv("DB_PASSWORD", "playeng") + dbName := getEnv("DB_NAME", "playeng") + + // Логируем параметры подключения к БД (без пароля) + log.Printf("Database connection parameters: host=%s port=%s user=%s dbname=%s", dbHost, dbPort, dbUser, dbName) + + dsn := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", + dbHost, dbPort, dbUser, dbPassword, dbName) + + var db *sql.DB + var err error + + // Retry connection + for i := 0; i < 10; i++ { + db, err = sql.Open("postgres", dsn) + if err == nil { + err = db.Ping() + if err == nil { + break + } + } + if i < 9 { + time.Sleep(2 * time.Second) + } + } + + if err != nil { + log.Fatal("Failed to connect to database:", err) + } + log.Printf("Successfully connected to database: %s@%s:%s/%s", dbUser, dbHost, dbPort, dbName) + defer db.Close() + + // Telegram бот теперь загружается из БД при необходимости + // Webhook будет настроен автоматически при сохранении bot token через UI + + // JWT secret from env or generate random + jwtSecret := getEnv("JWT_SECRET", "") + if jwtSecret == "" { + // Generate random secret if not provided (not recommended for production) + b := make([]byte, 32) + rand.Read(b) + jwtSecret = base64.StdEncoding.EncodeToString(b) + log.Printf("WARNING: JWT_SECRET not set, using randomly generated secret. Set JWT_SECRET env var for production.") + } + + app := &App{ + DB: db, + lastWebhookTime: make(map[int]time.Time), + telegramBot: nil, + telegramBotUsername: "", + jwtSecret: []byte(jwtSecret), + } + + // Инициализация Telegram бота из .env + telegramBotToken := getEnv("TELEGRAM_BOT_TOKEN", "") + if telegramBotToken != "" { + bot, err := tgbotapi.NewBotAPI(telegramBotToken) + if err != nil { + log.Printf("WARNING: Failed to initialize Telegram bot: %v", err) + } else { + app.telegramBot = bot + log.Printf("Telegram bot initialized successfully") + + // Получаем username бота через getMe + botInfo, err := bot.GetMe() + if err != nil { + log.Printf("WARNING: Failed to get bot info via getMe(): %v", err) + } else { + app.telegramBotUsername = botInfo.UserName + log.Printf("Telegram bot username: @%s", app.telegramBotUsername) + } + + // Настраиваем webhook для единого бота + webhookBaseURL := getEnv("WEBHOOK_BASE_URL", "") + if webhookBaseURL != "" { + webhookURL := strings.TrimRight(webhookBaseURL, "/") + "/webhook/telegram" + log.Printf("Setting up Telegram webhook: URL=%s", webhookURL) + if err := setupTelegramWebhook(telegramBotToken, webhookURL); err != nil { + log.Printf("WARNING: Failed to setup Telegram webhook: %v", err) + } else { + log.Printf("SUCCESS: Telegram webhook configured: %s", webhookURL) + } + } else { + log.Printf("WEBHOOK_BASE_URL not set. Webhook will not be configured.") + } + } + } else { + log.Printf("WARNING: TELEGRAM_BOT_TOKEN not set in environment") + } + + // Apply database migrations + if err := app.runMigrations(); err != nil { + log.Fatal("Failed to apply database migrations:", err) + } + log.Println("Database migrations applied successfully") + + // Запускаем планировщик для автоматической фиксации целей на неделю + app.startWeeklyGoalsScheduler() + + // Запускаем планировщик для ежедневного отчета в 23:59 + app.startDailyReportScheduler() + + // Запускаем планировщик для автовыполнения задач в конце дня в 23:55 + app.startEndOfDayTaskScheduler() + + // Запускаем планировщик синхронизации Fitbit каждые 4 часа + app.startFitbitSyncScheduler() + + r := mux.NewRouter() + + // Public auth routes (no authentication required) + r.HandleFunc("/api/auth/register", app.registerHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/api/auth/login", app.loginHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/api/auth/refresh", app.refreshTokenHandler).Methods("POST", "OPTIONS") + + // Webhooks - no auth (external services) + r.HandleFunc("/webhook/message/post", app.messagePostHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/webhook/todoist", app.todoistWebhookHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/webhook/telegram", app.telegramWebhookHandler).Methods("POST", "OPTIONS") + + // Admin pages (HTML is public, but API calls require auth) + // Note: We serve HTML without auth check, but JavaScript will check auth and API calls are protected + r.HandleFunc("/admin", app.adminHandler).Methods("GET") + r.HandleFunc("/admin.html", app.adminHandler).Methods("GET") + + // Admin API routes (require authentication and admin privileges) + adminAPIRoutes := r.PathPrefix("/").Subrouter() + adminAPIRoutes.Use(app.authMiddleware) + adminAPIRoutes.Use(app.adminMiddleware) + adminAPIRoutes.HandleFunc("/message/post", app.messagePostHandler).Methods("POST", "OPTIONS") + adminAPIRoutes.HandleFunc("/weekly_goals/setup", app.weeklyGoalsSetupHandler).Methods("POST", "OPTIONS") + adminAPIRoutes.HandleFunc("/daily-report/trigger", app.dailyReportTriggerHandler).Methods("POST", "OPTIONS") + adminAPIRoutes.HandleFunc("/project_score_sample_mv/refresh", app.projectScoreSampleMvRefreshHandler).Methods("POST", "OPTIONS") + + // Static files handler для uploads (public, no auth required) - ДО protected! + // Backend работает из /app/backend/, но uploads находится в /app/uploads/ + r.HandleFunc("/uploads/{path:.*}", func(w http.ResponseWriter, r *http.Request) { + vars := mux.Vars(r) + path := vars["path"] + filePath := filepath.Join("/app/uploads", path) + + // Проверяем, что файл существует + if _, err := os.Stat(filePath); os.IsNotExist(err) { + http.NotFound(w, r) + return + } + + // Отдаём файл + http.ServeFile(w, r, filePath) + }).Methods("GET") + + // Protected routes (require authentication) + protected := r.PathPrefix("/").Subrouter() + protected.Use(app.authMiddleware) + + // Auth routes that need authentication + protected.HandleFunc("/api/auth/logout", app.logoutHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/auth/me", app.getMeHandler).Methods("GET", "OPTIONS") + + // Words & dictionaries + protected.HandleFunc("/api/words", app.getWordsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/words", app.addWordsHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/words/{id}", app.deleteWordHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/words/{id}/reset-progress", app.resetWordProgressHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/test/words", app.getTestWordsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/test/progress", app.updateTestProgressHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/dictionaries", app.getDictionariesHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/dictionaries", app.addDictionaryHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/dictionaries/{id}", app.updateDictionaryHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/dictionaries/{id}", app.deleteDictionaryHandler).Methods("DELETE", "OPTIONS") + + // Configs + protected.HandleFunc("/api/configs", app.getConfigsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/configs", app.addConfigHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/configs/{id}", app.updateConfigHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/configs/{id}", app.deleteConfigHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/configs/{id}/dictionaries", app.getConfigDictionariesHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/test-configs-and-dictionaries", app.getTestConfigsAndDictionariesHandler).Methods("GET", "OPTIONS") + + // Projects & stats + protected.HandleFunc("/api/weekly-stats", app.getWeeklyStatsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/playlife-feed", app.getWeeklyStatsHandler).Methods("GET", "OPTIONS") + // Note: /message/post, /weekly_goals/setup, /daily-report/trigger moved to adminAPIRoutes + protected.HandleFunc("/projects", app.getProjectsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/project/priority", app.setProjectPriorityHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/project/color", app.setProjectColorHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/project/move", app.moveProjectHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/project/delete", app.deleteProjectHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/project/create", app.createProjectHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/d2dc349a-0d13-49b2-a8f0-1ab094bfba9b", app.getFullStatisticsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/today-entries", app.getTodayEntriesHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/entries/{id}", app.deleteEntryHandler).Methods("DELETE", "OPTIONS") + + // Integrations + protected.HandleFunc("/api/integrations/telegram", app.getTelegramIntegrationHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/integrations/telegram", app.updateTelegramIntegrationHandler).Methods("POST", "OPTIONS") + + // Todoist OAuth endpoints + protected.HandleFunc("/api/integrations/todoist/oauth/connect", app.todoistOAuthConnectHandler).Methods("GET") + r.HandleFunc("/api/integrations/todoist/oauth/callback", app.todoistOAuthCallbackHandler).Methods("GET") // Публичный! + protected.HandleFunc("/api/integrations/todoist/status", app.getTodoistStatusHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/integrations/todoist/disconnect", app.todoistDisconnectHandler).Methods("DELETE", "OPTIONS") + + // Fitbit OAuth endpoints + protected.HandleFunc("/api/integrations/fitbit/oauth/connect", app.fitbitOAuthConnectHandler).Methods("GET") + r.HandleFunc("/api/integrations/fitbit/oauth/callback", app.fitbitOAuthCallbackHandler).Methods("GET") // Публичный! + protected.HandleFunc("/api/integrations/fitbit/status", app.getFitbitStatusHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/integrations/fitbit/disconnect", app.fitbitDisconnectHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/integrations/fitbit/goals", app.updateFitbitGoalsHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/integrations/fitbit/sync", app.fitbitSyncHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/integrations/fitbit/stats", app.getFitbitStatsHandler).Methods("GET", "OPTIONS") + + // Tasks + protected.HandleFunc("/api/tasks", app.getTasksHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/tasks", app.createTaskHandler).Methods("POST", "OPTIONS") + // Специфичные роуты должны быть ПЕРЕД общим роутом /api/tasks/{id} + protected.HandleFunc("/api/tasks/{id}/complete", app.completeTaskHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}/complete-and-delete", app.completeAndDeleteTaskHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}/postpone", app.postponeTaskHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}/draft", app.saveTaskDraftHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}/complete-at-end-of-day", app.completeTaskAtEndOfDayHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}", app.getTaskDetailHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}", app.updateTaskHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/tasks/{id}", app.deleteTaskHandler).Methods("DELETE", "OPTIONS") + + // Wishlist + protected.HandleFunc("/api/wishlist", app.getWishlistHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist", app.createWishlistHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/completed", app.getWishlistCompletedHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/metadata", app.extractLinkMetadataHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/proxy-image", app.proxyImageHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/calculate-weeks", app.calculateWeeksHandler).Methods("POST", "OPTIONS") + + // Wishlist Boards (ВАЖНО: должны быть ПЕРЕД /api/wishlist/{id} чтобы избежать конфликта роутов!) + protected.HandleFunc("/api/wishlist/boards", app.getBoardsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards", app.createBoardHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}", app.getBoardHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}", app.updateBoardHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}", app.deleteBoardHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}/regenerate-invite", app.regenerateBoardInviteHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}/members", app.getBoardMembersHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}/members/{userId}", app.removeBoardMemberHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{id}/leave", app.leaveBoardHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{boardId}/items", app.getBoardItemsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{boardId}/items", app.createBoardItemHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/boards/{boardId}/completed", app.getBoardCompletedHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/invite/{token}", app.getBoardInviteInfoHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/invite/{token}/join", app.joinBoardHandler).Methods("POST", "OPTIONS") + + // Tracking + protected.HandleFunc("/api/tracking/stats", app.getTrackingStatsHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/tracking/invite", app.createTrackingInviteHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/tracking/invite/{token}", app.getTrackingInviteInfoHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/tracking/invite/{token}/accept", app.acceptTrackingInviteHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/tracking/access", app.getTrackingAccessHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/tracking/trackers/{id}", app.deleteTrackingTrackerHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/tracking/tracked/{id}", app.deleteTrackingTrackedHandler).Methods("DELETE", "OPTIONS") + + // Wishlist items (после boards, чтобы {id} не перехватывал "boards") + protected.HandleFunc("/api/wishlist/{id}", app.getWishlistItemHandler).Methods("GET", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}", app.updateWishlistHandler).Methods("PUT", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}", app.deleteWishlistHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}/image", app.uploadWishlistImageHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}/image", app.deleteWishlistImageHandler).Methods("DELETE", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}/complete", app.completeWishlistHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}/uncomplete", app.uncompleteWishlistHandler).Methods("POST", "OPTIONS") + protected.HandleFunc("/api/wishlist/{id}/copy", app.copyWishlistHandler).Methods("POST", "OPTIONS") + + // Group suggestions + protected.HandleFunc("/api/group-suggestions", app.getGroupSuggestionsHandler).Methods("GET", "OPTIONS") + + // Admin operations + protected.HandleFunc("/admin/recreate-mv", app.recreateMaterializedViewHandler).Methods("POST", "OPTIONS") + + port := getEnv("PORT", "8080") + log.Printf("Server starting on port %s", port) + log.Printf("Registered public routes: /api/auth/register, /api/auth/login, /api/auth/refresh, webhooks") + log.Printf("All other routes require authentication via Bearer token") + log.Printf("Admin panel available at: http://localhost:%s/admin.html", port) + log.Fatal(http.ListenAndServe(":"+port, r)) +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +// getMapKeys возвращает список ключей из map +func getMapKeys(m map[string]interface{}) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + return keys +} + +// setupTelegramWebhook настраивает webhook для Telegram бота +func setupTelegramWebhook(botToken, webhookURL string) error { + apiURL := fmt.Sprintf("https://api.telegram.org/bot%s/setWebhook", botToken) + log.Printf("Setting up Telegram webhook: apiURL=%s, webhookURL=%s", apiURL, webhookURL) + + payload := map[string]string{ + "url": webhookURL, + } + + jsonData, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("failed to marshal webhook payload: %w", err) + } + + // Создаем HTTP клиент с таймаутом + client := &http.Client{ + Timeout: 10 * time.Second, + } + + resp, err := client.Post(apiURL, "application/json", bytes.NewBuffer(jsonData)) + if err != nil { + log.Printf("ERROR: Failed to send webhook setup request: %v", err) + return fmt.Errorf("failed to send webhook setup request: %w", err) + } + defer resp.Body.Close() + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to read response body: %w", err) + } + log.Printf("Telegram API response: status=%d, body=%s", resp.StatusCode, string(bodyBytes)) + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("telegram API returned status %d: %s", resp.StatusCode, string(bodyBytes)) + } + + // Декодируем из уже прочитанных байтов + var result map[string]interface{} + if err := json.Unmarshal(bodyBytes, &result); err != nil { + return fmt.Errorf("failed to decode response: %w", err) + } + + if ok, _ := result["ok"].(bool); !ok { + description, _ := result["description"].(string) + return fmt.Errorf("telegram API returned error: %s", description) + } + + return nil +} + +// Вспомогательные функции для расчетов +func min(a, b float64) float64 { + if a < b { + return a + } + return b +} + +func max(a, b float64) float64 { + if a > b { + return a + } + return b +} + +func roundToTwoDecimals(val float64) float64 { + return float64(int(val*100+0.5)) / 100.0 +} + +func roundToFourDecimals(val float64) float64 { + return float64(int(val*10000+0.5)) / 10000.0 +} + +// calculateGroupsProgress вычисляет проценты выполнения для каждой группы приоритетов +// groups - карта приоритетов к спискам calculatedScore проектов +// Возвращает структуру GroupsProgress с процентами для каждой группы +// Если какая-то группа отсутствует, она считается как 100% +// min_goal = 100%, max_goal = 150%/135%/120% в зависимости от приоритета +func calculateGroupsProgress(groups map[int][]float64) GroupsProgress { + // Всего есть 3 группы: приоритет 1, приоритет 2, приоритет 0 + // Вычисляем среднее для каждой группы, если она есть + // Если группы нет, считаем её как 100% + + result := GroupsProgress{} + + // Обрабатываем все 3 возможных приоритета + priorities := []int{1, 2, 0} + + for _, priorityVal := range priorities { + scores, exists := groups[priorityVal] + + var avg float64 + if !exists || len(scores) == 0 { + // Если группы нет, считаем как 100% + avg = 100.0 + } else { + // Для приоритета 1 и 2 - обычное среднее + if priorityVal == 1 || priorityVal == 2 { + sum := 0.0 + for _, score := range scores { + sum += score + } + avg = sum / float64(len(scores)) + } else { + // Для проектов без приоритета (priorityVal == 0) - специальная формула + projectCount := float64(len(scores)) + multiplier := 100.0 / (projectCount * 0.8) + + sum := 0.0 + for _, score := range scores { + // score уже в процентах (например, 80.0), переводим в долю (0.8) + scoreAsDecimal := score / 100.0 + sum += scoreAsDecimal * multiplier + } + + avg = math.Min(120.0, sum) + } + } + + // Сохраняем результат в соответствующее поле + avgRounded := roundToFourDecimals(avg) + switch priorityVal { + case 1: + result.Group1 = &avgRounded + case 2: + result.Group2 = &avgRounded + case 0: + result.Group0 = &avgRounded + } + } + + return result +} + +// calculateOverallProgress вычисляет общий процент выполнения на основе процентов групп +// groupsProgress - структура с процентами для каждой группы приоритетов +// groups - карта приоритетов к спискам calculatedScore проектов (используется для точного расчета) +// Возвращает указатель на float64 с общим процентом выполнения +// Вычисляет среднее между группами (min_goal = 100%, max_goal = 150%/135%/120%) +func calculateOverallProgress(groupsProgress GroupsProgress, groups map[int][]float64) *float64 { + // Собираем проценты по группам + var groupScores []float64 + + // Добавляем проценты только тех групп, которые существуют (имеют проекты) + if groupsProgress.Group1 != nil { + groupScores = append(groupScores, *groupsProgress.Group1) + } + if groupsProgress.Group2 != nil { + groupScores = append(groupScores, *groupsProgress.Group2) + } + if groupsProgress.Group0 != nil { + groupScores = append(groupScores, *groupsProgress.Group0) + } + + // Если нет групп с проектами, возвращаем 0 + if len(groupScores) == 0 { + zero := 0.0 + return &zero + } + + // Вычисляем среднее между группами + var sum float64 + for _, score := range groupScores { + sum += score + } + + overallProgress := sum / float64(len(groupScores)) + overallProgressRounded := roundToFourDecimals(overallProgress) + total := &overallProgressRounded + + return total +} + +// TelegramIntegration представляет запись из таблицы telegram_integrations +type TelegramIntegration struct { + ID int `json:"id"` + UserID int `json:"user_id"` + TelegramUserID *int64 `json:"telegram_user_id,omitempty"` + ChatID *string `json:"chat_id,omitempty"` + StartToken *string `json:"start_token,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} + +// TodoistIntegration представляет запись из таблицы todoist_integrations +type TodoistIntegration struct { + ID int `json:"id"` + UserID int `json:"user_id"` + TodoistUserID *int64 `json:"todoist_user_id,omitempty"` + TodoistEmail *string `json:"todoist_email,omitempty"` + AccessToken *string `json:"-"` // Не отдавать в JSON! + CreatedAt *time.Time `json:"created_at,omitempty"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` +} + +// getTelegramIntegration получает telegram интеграцию из БД +// getTelegramIntegrationForUser gets telegram integration for specific user +func (a *App) getTelegramIntegrationForUser(userID int) (*TelegramIntegration, error) { + var integration TelegramIntegration + var telegramUserID sql.NullInt64 + var chatID, startToken sql.NullString + var createdAt, updatedAt sql.NullTime + + err := a.DB.QueryRow(` + SELECT id, user_id, telegram_user_id, chat_id, start_token, created_at, updated_at + FROM telegram_integrations + WHERE user_id = $1 + LIMIT 1 + `, userID).Scan( + &integration.ID, + &integration.UserID, + &telegramUserID, + &chatID, + &startToken, + &createdAt, + &updatedAt, + ) + + if err == sql.ErrNoRows { + // Создаем новую запись с start_token + startTokenValue, err := generateWebhookToken() + if err != nil { + return nil, fmt.Errorf("failed to generate start token: %w", err) + } + + err = a.DB.QueryRow(` + INSERT INTO telegram_integrations (user_id, start_token) + VALUES ($1, $2) + RETURNING id, user_id, telegram_user_id, chat_id, start_token, created_at, updated_at + `, userID, startTokenValue).Scan( + &integration.ID, + &integration.UserID, + &telegramUserID, + &chatID, + &startToken, + &createdAt, + &updatedAt, + ) + if err != nil { + return nil, fmt.Errorf("failed to create telegram integration: %w", err) + } + startToken = sql.NullString{String: startTokenValue, Valid: true} + } else if err != nil { + return nil, fmt.Errorf("failed to get telegram integration: %w", err) + } + + // Заполняем указатели + if telegramUserID.Valid { + integration.TelegramUserID = &telegramUserID.Int64 + } + if chatID.Valid { + integration.ChatID = &chatID.String + } + if startToken.Valid { + integration.StartToken = &startToken.String + } + if createdAt.Valid { + integration.CreatedAt = &createdAt.Time + } + if updatedAt.Valid { + integration.UpdatedAt = &updatedAt.Time + } + + return &integration, nil +} + +// sendTelegramMessageToChat - отправляет сообщение в конкретный чат по chat_id +func (a *App) sendTelegramMessageToChat(chatID int64, text string) error { + if a.telegramBot == nil { + return fmt.Errorf("telegram bot not initialized") + } + + telegramText := regexp.MustCompile(`\*\*([^*]+)\*\*`).ReplaceAllString(text, "*$1*") + msg := tgbotapi.NewMessage(chatID, telegramText) + msg.ParseMode = "Markdown" + + _, err := a.telegramBot.Send(msg) + if err != nil { + // Проверяем, не заблокирован ли бот + if strings.Contains(err.Error(), "blocked") || + strings.Contains(err.Error(), "chat not found") || + strings.Contains(err.Error(), "bot was blocked") { + // Пользователь заблокировал бота - очищаем данные + chatIDStr := strconv.FormatInt(chatID, 10) + a.DB.Exec(` + UPDATE telegram_integrations + SET telegram_user_id = NULL, chat_id = NULL, updated_at = CURRENT_TIMESTAMP + WHERE chat_id = $1 + `, chatIDStr) + log.Printf("User blocked bot, cleared integration for chat_id=%d", chatID) + } + return err + } + + log.Printf("Message sent to chat_id=%d", chatID) + return nil +} + +// sendTelegramMessageToUser - отправляет сообщение пользователю по user_id +func (a *App) sendTelegramMessageToUser(userID int, text string) error { + var chatID sql.NullString + err := a.DB.QueryRow(` + SELECT chat_id FROM telegram_integrations + WHERE user_id = $1 AND chat_id IS NOT NULL + `, userID).Scan(&chatID) + + if err == sql.ErrNoRows || !chatID.Valid { + return fmt.Errorf("telegram not connected for user %d", userID) + } + if err != nil { + return err + } + + chatIDInt, err := strconv.ParseInt(chatID.String, 10, 64) + if err != nil { + return fmt.Errorf("invalid chat_id format: %w", err) + } + + return a.sendTelegramMessageToChat(chatIDInt, text) +} + +// getAllUsersWithTelegram - получает список всех user_id с подключенным Telegram +func (a *App) getAllUsersWithTelegram() ([]int, error) { + rows, err := a.DB.Query(` + SELECT user_id FROM telegram_integrations + WHERE chat_id IS NOT NULL AND telegram_user_id IS NOT NULL + `) + if err != nil { + return nil, err + } + defer rows.Close() + + var userIDs []int + for rows.Next() { + var userID int + if err := rows.Scan(&userID); err == nil { + userIDs = append(userIDs, userID) + } + } + return userIDs, nil +} + +// utf16OffsetToUTF8 конвертирует UTF-16 offset в UTF-8 byte offset +func utf16OffsetToUTF8(text string, utf16Offset int) int { + utf16Runes := utf16.Encode([]rune(text)) + if utf16Offset >= len(utf16Runes) { + return len(text) + } + + // Конвертируем UTF-16 кодовые единицы обратно в UTF-8 байты + runes := utf16.Decode(utf16Runes[:utf16Offset]) + return len(string(runes)) +} + +// utf16LengthToUTF8 конвертирует UTF-16 length в UTF-8 byte length +func utf16LengthToUTF8(text string, utf16Offset, utf16Length int) int { + utf16Runes := utf16.Encode([]rune(text)) + if utf16Offset+utf16Length > len(utf16Runes) { + utf16Length = len(utf16Runes) - utf16Offset + } + if utf16Length <= 0 { + return 0 + } + + // Конвертируем UTF-16 кодовые единицы в UTF-8 байты + startRunes := utf16.Decode(utf16Runes[:utf16Offset]) + endRunes := utf16.Decode(utf16Runes[:utf16Offset+utf16Length]) + + startBytes := len(string(startRunes)) + endBytes := len(string(endRunes)) + + return endBytes - startBytes +} + +// processTelegramMessage обрабатывает сообщение из Telegram с использованием entities +// Логика отличается от processMessage: использует entities для определения жирного текста +// и не отправляет сообщение обратно в Telegram +// userID может быть nil, если пользователь не определен +func (a *App) processTelegramMessage(fullText string, entities []TelegramEntity, userID *int) (*ProcessedEntry, error) { + fullText = strings.TrimSpace(fullText) + + // Регулярное выражение: project+/-score (без **) + scoreRegex := regexp.MustCompile(`^([а-яА-ЯёЁ\w]+)([+-])(\d+(?:\.\d+)?)$`) + + // Массив для хранения извлеченных элементов {project, score} + scoreNodes := make([]ProcessedNode, 0) + workingText := fullText + placeholderIndex := 0 + + // Находим все элементы, выделенные жирным шрифтом + boldEntities := make([]TelegramEntity, 0) + for _, entity := range entities { + if entity.Type == "bold" { + boldEntities = append(boldEntities, entity) + } + } + + // Сортируем в ПРЯМОМ порядке (по offset), чтобы гарантировать, что ${0} соответствует первому в тексте + sort.Slice(boldEntities, func(i, j int) bool { + return boldEntities[i].Offset < boldEntities[j].Offset + }) + + // Массив для хранения данных, которые будут использоваться для замены в обратном порядке + type ReplacementData struct { + Start int + Length int + Placeholder string + } + replacementData := make([]ReplacementData, 0) + + for _, entity := range boldEntities { + // Telegram использует UTF-16 для offset и length, конвертируем в UTF-8 байты + start := utf16OffsetToUTF8(fullText, entity.Offset) + length := utf16LengthToUTF8(fullText, entity.Offset, entity.Length) + + // Извлекаем чистый жирный текст + if start+length > len(fullText) { + continue // Пропускаем некорректные entities + } + boldText := strings.TrimSpace(fullText[start : start+length]) + + // Проверяем соответствие формату + match := scoreRegex.FindStringSubmatch(boldText) + + if match != nil && len(match) == 4 { + // Создаем элемент node + project := match[1] + sign := match[2] + rawScore, err := strconv.ParseFloat(match[3], 64) + if err != nil { + log.Printf("Error parsing score: %v", err) + continue + } + score := rawScore + if sign == "-" { + score = -rawScore + } + + // Добавляем в массив nodes (по порядку) + scoreNodes = append(scoreNodes, ProcessedNode{ + Project: project, + Score: score, + }) + + // Создаем данные для замены + replacementData = append(replacementData, ReplacementData{ + Start: start, + Length: length, + Placeholder: fmt.Sprintf("${%d}", placeholderIndex), + }) + + placeholderIndex++ + } + } + + // Теперь выполняем замены в ОБРАТНОМ порядке, чтобы offset не "смещались" + sort.Slice(replacementData, func(i, j int) bool { + return replacementData[i].Start > replacementData[j].Start + }) + + for _, item := range replacementData { + // Заменяем сегмент в workingText, используя оригинальные offset и length + if item.Start+item.Length <= len(workingText) { + workingText = workingText[:item.Start] + item.Placeholder + workingText[item.Start+item.Length:] + } + } + + // Удаляем пустые строки и лишние пробелы + lines := strings.Split(workingText, "\n") + cleanedLines := make([]string, 0) + for _, line := range lines { + trimmed := strings.TrimSpace(line) + if trimmed != "" { + cleanedLines = append(cleanedLines, trimmed) + } + } + processedText := strings.Join(cleanedLines, "\n") + + // Используем текущее время в формате ISO 8601 (UTC) + createdDate := time.Now().UTC().Format(time.RFC3339) + + // Вставляем данные в БД только если есть nodes + if len(scoreNodes) > 0 { + err := a.insertMessageData(processedText, createdDate, scoreNodes, userID) + if err != nil { + log.Printf("Error inserting message data: %v", err) + return nil, fmt.Errorf("error inserting data: %w", err) + } + } else { + // Если nodes нет, используем исходный текст для processedText + processedText = fullText + log.Printf("No nodes found in Telegram message, message will not be saved to database") + } + + // Формируем ответ + response := &ProcessedEntry{ + Text: processedText, + CreatedDate: createdDate, + Nodes: scoreNodes, + Raw: fullText, + Markdown: fullText, // Для Telegram markdown не нужен + } + + // НЕ отправляем сообщение обратно в Telegram (в отличие от processMessage) + + return response, nil +} + +// processMessage обрабатывает текст сообщения: парсит ноды, сохраняет в БД и отправляет в Telegram +func (a *App) processMessage(rawText string, userID *int) (*ProcessedEntry, error) { + return a.processMessageInternal(rawText, true, userID) +} + +// processMessageWithoutTelegram обрабатывает текст сообщения: парсит ноды, сохраняет в БД, но НЕ отправляет в Telegram +func (a *App) processMessageWithoutTelegram(rawText string, userID *int) (*ProcessedEntry, error) { + return a.processMessageInternal(rawText, false, userID) +} + +// processMessageInternal - внутренняя функция обработки сообщения +// sendToTelegram определяет, нужно ли отправлять сообщение в Telegram +func (a *App) processMessageInternal(rawText string, sendToTelegram bool, userID *int) (*ProcessedEntry, error) { + rawText = strings.TrimSpace(rawText) + + // Регулярное выражение для поиска **[Project][+| -][Score]** + regex := regexp.MustCompile(`\*\*(.+?)([+-])([\d.]+)\*\*`) + + nodes := make([]ProcessedNode, 0) + nodeCounter := 0 + + // Ищем все node и заменяем их в тексте на плейсхолдеры ${0}, ${1} и т.д. + processedText := regex.ReplaceAllStringFunc(rawText, func(fullMatch string) string { + matches := regex.FindStringSubmatch(fullMatch) + if len(matches) != 4 { + return fullMatch + } + + projectName := strings.TrimSpace(matches[1]) + sign := matches[2] + scoreString := matches[3] + + score, err := strconv.ParseFloat(scoreString, 64) + if err != nil { + log.Printf("Error parsing score: %v", err) + return fullMatch + } + + if sign == "-" { + score = -score + } + + // Добавляем данные в массив nodes + nodes = append(nodes, ProcessedNode{ + Project: projectName, + Score: score, + }) + + placeholder := fmt.Sprintf("${%d}", nodeCounter) + nodeCounter++ + return placeholder + }) + + // Удаляем пустые строки и лишние пробелы + lines := strings.Split(processedText, "\n") + cleanedLines := make([]string, 0) + for _, line := range lines { + trimmed := strings.TrimSpace(line) + if trimmed != "" { + cleanedLines = append(cleanedLines, trimmed) + } + } + processedText = strings.Join(cleanedLines, "\n") + + // Формируем Markdown (Legacy) контент: заменяем ** на * + markdownText := strings.ReplaceAll(rawText, "**", "*") + + // Используем текущее время + createdDate := time.Now().UTC().Format(time.RFC3339) + + // Вставляем данные в БД только если есть nodes + if len(nodes) > 0 { + err := a.insertMessageData(processedText, createdDate, nodes, userID) + if err != nil { + log.Printf("Error inserting message data: %v", err) + return nil, fmt.Errorf("error inserting data: %w", err) + } + } else { + // Если nodes нет, используем исходный текст для processedText + processedText = rawText + if sendToTelegram { + log.Printf("No nodes found in text, message will be sent to Telegram but not saved to database") + } else { + log.Printf("No nodes found in text, message will be ignored (not saved to database and not sent to Telegram)") + } + } + + // Формируем ответ + response := &ProcessedEntry{ + Text: processedText, + CreatedDate: createdDate, + Nodes: nodes, + Raw: rawText, + Markdown: markdownText, + } + + // Отправляем дублирующее сообщение в Telegram только если указано + if sendToTelegram && userID != nil { + if err := a.sendTelegramMessageToUser(*userID, rawText); err != nil { + log.Printf("Error sending Telegram message: %v", err) + } + } + + return response, nil +} + +func (a *App) messagePostHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Get user ID from context (may be nil for webhook) + var userIDPtr *int + if userID, ok := getUserIDFromContext(r); ok { + userIDPtr = &userID + } + + // Парсим входящий запрос - может быть как {body: {text: ...}}, так и {text: ...} + var rawReq map[string]interface{} + if err := json.NewDecoder(r.Body).Decode(&rawReq); err != nil { + log.Printf("Error decoding message post request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Извлекаем text из разных возможных структур + var rawText string + if body, ok := rawReq["body"].(map[string]interface{}); ok { + if text, ok := body["text"].(string); ok { + rawText = text + } + } + + // Если не нашли в body, пробуем напрямую + if rawText == "" { + if text, ok := rawReq["text"].(string); ok { + rawText = text + } + } + + // Проверка на наличие нужного поля + if rawText == "" { + sendErrorWithCORS(w, "Missing 'text' field in body", http.StatusBadRequest) + return + } + + // Обрабатываем сообщение + response, err := a.processMessage(rawText, userIDPtr) + if err != nil { + log.Printf("Error processing message: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +func (a *App) insertMessageData(entryText string, createdDate string, nodes []ProcessedNode, userID *int) error { + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + return fmt.Errorf("failed to begin transaction: %w", err) + } + defer tx.Rollback() + + // 1. UPSERT проектов + projectNames := make(map[string]bool) + for _, node := range nodes { + projectNames[node.Project] = true + } + + // Вставляем проекты + for projectName := range projectNames { + if userID != nil { + // Используем более универсальный подход: проверяем существование и вставляем/обновляем + var existingID int + err := tx.QueryRow(` + SELECT id FROM projects + WHERE name = $1 AND user_id = $2 AND deleted = FALSE + `, projectName, *userID).Scan(&existingID) + + if err == sql.ErrNoRows { + // Проект не существует, создаем новый + randomColor := generateRandomProjectColor() + _, err = tx.Exec(` + INSERT INTO projects (name, deleted, user_id, color) + VALUES ($1, FALSE, $2, $3) + `, projectName, *userID, randomColor) + if err != nil { + // Если ошибка из-за уникальности, пробуем обновить существующий + _, err = tx.Exec(` + UPDATE projects + SET deleted = FALSE, user_id = COALESCE(user_id, $2) + WHERE name = $1 + `, projectName, *userID) + if err != nil { + return fmt.Errorf("failed to upsert project %s: %w", projectName, err) + } + } + } else if err != nil { + return fmt.Errorf("failed to check project %s: %w", projectName, err) + } + // Проект уже существует, ничего не делаем + } else { + // Для случая без user_id (legacy) + var existingID int + err := tx.QueryRow(` + SELECT id FROM projects + WHERE name = $1 AND deleted = FALSE + `, projectName).Scan(&existingID) + + if err == sql.ErrNoRows { + // Проект не существует, создаем новый + randomColor := generateRandomProjectColor() + _, err = tx.Exec(` + INSERT INTO projects (name, deleted, color) + VALUES ($1, FALSE, $2) + `, projectName, randomColor) + if err != nil { + return fmt.Errorf("failed to insert project %s: %w", projectName, err) + } + } else if err != nil { + return fmt.Errorf("failed to check project %s: %w", projectName, err) + } + // Проект уже существует, ничего не делаем + } + } + + // 2. Вставляем entry + var entryID int + if userID != nil { + err = tx.QueryRow(` + INSERT INTO entries (text, created_date, user_id) + VALUES ($1, $2, $3) + RETURNING id + `, entryText, createdDate, *userID).Scan(&entryID) + } else { + err = tx.QueryRow(` + INSERT INTO entries (text, created_date) + VALUES ($1, $2) + RETURNING id + `, entryText, createdDate).Scan(&entryID) + } + if err != nil { + return fmt.Errorf("failed to insert entry: %w", err) + } + + // 3. Вставляем nodes + for _, node := range nodes { + var projectID int + if userID != nil { + err = tx.QueryRow(` + SELECT id FROM projects + WHERE name = $1 AND user_id = $2 AND deleted = FALSE + `, node.Project, *userID).Scan(&projectID) + } else { + err = tx.QueryRow(` + SELECT id FROM projects + WHERE name = $1 AND deleted = FALSE + `, node.Project).Scan(&projectID) + } + + if err == sql.ErrNoRows { + return fmt.Errorf("project %s not found after insert", node.Project) + } else if err != nil { + return fmt.Errorf("failed to find project %s: %w", node.Project, err) + } + + // Вставляем node с user_id и created_date (денормализация) + if userID != nil { + _, err = tx.Exec(` + INSERT INTO nodes (project_id, entry_id, score, user_id, created_date) + VALUES ($1, $2, $3, $4, $5) + `, projectID, entryID, node.Score, *userID, createdDate) + } else { + _, err = tx.Exec(` + INSERT INTO nodes (project_id, entry_id, score, created_date) + VALUES ($1, $2, $3, $4) + `, projectID, entryID, node.Score, createdDate) + } + if err != nil { + return fmt.Errorf("failed to insert node for project %s: %w", node.Project, err) + } + } + + // MV обновляется только по крону в понедельник в 6:00 утра + // Данные текущей недели берутся напрямую из nodes + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + return fmt.Errorf("failed to commit transaction: %w", err) + } + + return nil +} + +// setupWeeklyGoals выполняет установку целей на неделю (без HTTP обработки) +func (a *App) setupWeeklyGoals() error { + // 1. Выполняем SQL запрос для установки целей + setupQuery := ` + WITH current_info AS ( + -- Сегодня это будет 2026 год / 1 неделя + SELECT + EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER AS c_year, + EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER AS c_week + ), + goal_metrics AS ( + -- Считаем медиану на основе данных за последние 4 недели, исключая текущую неделю + SELECT + project_id, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score + FROM ( + SELECT + project_id, + normalized_total_score, + report_year, + report_week, + -- Нумеруем недели от новых к старым + ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn + FROM weekly_report_mv + WHERE + -- Исключаем текущую неделю и все будущие недели + -- Используем сравнение (year, week) < (current_year, current_week) для корректного исключения + (report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER) + OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER) + ) sub + WHERE rn <= 4 -- Берем историю за последние 4 недели, исключая текущую неделю + GROUP BY project_id + ) + INSERT INTO weekly_goals ( + project_id, + goal_year, + goal_week, + min_goal_score, + max_goal_score, + max_score, + priority, + user_id + ) + SELECT + p.id, + ci.c_year, + ci.c_week, + -- Если нет данных (gm.median_score IS NULL), используем 0 (значение по умолчанию) + COALESCE(gm.median_score, 0) AS min_goal_score, + -- Логика max_score в зависимости от приоритета (только если есть данные) + CASE + WHEN gm.median_score IS NULL THEN NULL + WHEN p.priority = 1 THEN gm.median_score * 2.0 + WHEN p.priority = 2 THEN gm.median_score * 1.7 + ELSE gm.median_score * 1.4 + END AS max_goal_score, + -- max_score (snapshot) заполняется при INSERT, но НЕ обновляется при конфликте + CASE + WHEN gm.median_score IS NULL THEN NULL + WHEN p.priority = 1 THEN gm.median_score * 2.0 + WHEN p.priority = 2 THEN gm.median_score * 1.7 + ELSE gm.median_score * 1.4 + END AS max_score, + p.priority, + p.user_id + FROM projects p + CROSS JOIN current_info ci + LEFT JOIN goal_metrics gm ON p.id = gm.project_id + WHERE p.deleted = FALSE + ON CONFLICT (project_id, goal_year, goal_week) DO UPDATE + SET + min_goal_score = EXCLUDED.min_goal_score, + max_goal_score = EXCLUDED.max_goal_score, + priority = EXCLUDED.priority, + user_id = EXCLUDED.user_id + ` + + _, err := a.DB.Exec(setupQuery) + if err != nil { + log.Printf("Error setting up weekly goals: %v", err) + return fmt.Errorf("error setting up weekly goals: %w", err) + } + + log.Println("Weekly goals setup completed successfully") + + // Отправляем сообщение в Telegram с зафиксированными целями + if err := a.sendWeeklyGoalsTelegramMessage(); err != nil { + log.Printf("Error sending weekly goals Telegram message: %v", err) + // Не возвращаем ошибку, так как фиксация целей уже выполнена успешно + } + + return nil +} + +// getWeeklyGoalsForUser получает цели для конкретного пользователя +func (a *App) getWeeklyGoalsForUser(userID int) ([]WeeklyGoalSetup, error) { + selectQuery := ` + SELECT + p.name AS project_name, + wg.min_goal_score, + wg.max_goal_score + FROM + weekly_goals wg + JOIN + projects p ON wg.project_id = p.id + WHERE + wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + AND p.deleted = FALSE + AND p.user_id = $1 + ORDER BY + p.name + ` + + rows, err := a.DB.Query(selectQuery, userID) + if err != nil { + return nil, fmt.Errorf("error querying weekly goals: %w", err) + } + defer rows.Close() + + goals := make([]WeeklyGoalSetup, 0) + for rows.Next() { + var goal WeeklyGoalSetup + var maxGoalScore sql.NullFloat64 + + err := rows.Scan( + &goal.ProjectName, + &goal.MinGoalScore, + &maxGoalScore, + ) + if err != nil { + log.Printf("Error scanning weekly goal row: %v", err) + continue + } + + if maxGoalScore.Valid { + goal.MaxGoalScore = maxGoalScore.Float64 + } else { + goal.MaxGoalScore = math.NaN() + } + + goals = append(goals, goal) + } + + return goals, nil +} + +// sendWeeklyGoalsTelegramMessage отправляет персональные цели всем пользователям +func (a *App) sendWeeklyGoalsTelegramMessage() error { + userIDs, err := a.getAllUsersWithTelegram() + if err != nil { + return err + } + + for _, userID := range userIDs { + goals, err := a.getWeeklyGoalsForUser(userID) + if err != nil { + log.Printf("Error getting goals for user %d: %v", userID, err) + continue + } + + message := a.formatWeeklyGoalsMessage(goals) + if message == "" { + continue + } + + if err := a.sendTelegramMessageToUser(userID, message); err != nil { + log.Printf("Error sending weekly goals to user %d: %v", userID, err) + } + } + + return nil +} + +// formatWeeklyGoalsMessage форматирует список целей в сообщение для Telegram +// Формат аналогичен JS коду из n8n +func (a *App) formatWeeklyGoalsMessage(goals []WeeklyGoalSetup) string { + if len(goals) == 0 { + return "" + } + + // Заголовок сообщения: "Цели на неделю" + markdownMessage := "*🎯 Цели:*\n\n" + + // Обработка каждого проекта + for _, goal := range goals { + // Пропускаем проекты без названия + if goal.ProjectName == "" { + continue + } + + // Получаем и форматируем цели + minGoal := goal.MinGoalScore + maxGoal := goal.MaxGoalScore + + var goalText string + + // Форматируем текст цели, если они существуют + // Проверяем, что minGoal валиден (не NaN) + // В JS коде проверяется isNaN, поэтому проверяем только на NaN + if !math.IsNaN(minGoal) { + minGoalFormatted := fmt.Sprintf("%.2f", minGoal) + + // Формируем диапазон: [MIN] или [MIN - MAX] + // maxGoal должен быть валиден (не NaN) для отображения диапазона + if !math.IsNaN(maxGoal) { + maxGoalFormatted := fmt.Sprintf("%.2f", maxGoal) + // Формат: *Проект*: от 15.00 до 20.00 + goalText = fmt.Sprintf(" от %s до %s", minGoalFormatted, maxGoalFormatted) + } else { + // Формат: *Проект*: мин. 15.00 + goalText = fmt.Sprintf(" мин. %s", minGoalFormatted) + } + } else { + // Если minGoal не установлен (NaN), пропускаем вывод цели + continue + } + + // Форматирование строки для Markdown (Legacy): *Название*: Цель + markdownMessage += fmt.Sprintf("*%s*:%s\n", goal.ProjectName, goalText) + } + + return markdownMessage +} + +func (a *App) weeklyGoalsSetupHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + err := a.setupWeeklyGoals() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Получаем установленные цели для ответа + selectQuery := ` + SELECT + p.name AS project_name, + wg.min_goal_score, + wg.max_goal_score + FROM + weekly_goals wg + JOIN + projects p ON wg.project_id = p.id + WHERE + wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + AND p.deleted = FALSE + ORDER BY + p.name + ` + + rows, err := a.DB.Query(selectQuery) + if err != nil { + log.Printf("Error querying weekly goals: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying weekly goals: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + goals := make([]WeeklyGoalSetup, 0) + for rows.Next() { + var goal WeeklyGoalSetup + var maxGoalScore sql.NullFloat64 + + err := rows.Scan( + &goal.ProjectName, + &goal.MinGoalScore, + &maxGoalScore, + ) + if err != nil { + log.Printf("Error scanning weekly goal row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + + if maxGoalScore.Valid { + goal.MaxGoalScore = maxGoalScore.Float64 + } else { + goal.MaxGoalScore = 0.0 + } + + goals = append(goals, goal) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(goals) +} + +// dailyReportTriggerHandler обрабатывает запрос на отправку ежедневного отчёта +func (a *App) dailyReportTriggerHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + log.Printf("Manual trigger: Sending daily report") + err := a.sendDailyReport() + if err != nil { + log.Printf("Error in manual daily report trigger: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "message": "Daily report sent successfully", + }) +} + +// projectScoreSampleMvRefreshHandler refreshes project_score_sample_mv and returns rows for the current user +func (a *App) projectScoreSampleMvRefreshHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + _, err := a.DB.Exec("REFRESH MATERIALIZED VIEW project_score_sample_mv") + if err != nil { + log.Printf("Error refreshing project_score_sample_mv: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error refreshing MV: %v", err), http.StatusInternalServerError) + return + } + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + rows, err := a.DB.Query(` + SELECT project_id, score, entry_message, user_id, created_date + FROM project_score_sample_mv + WHERE user_id = $1 + ORDER BY project_id, score + `, userID) + if err != nil { + log.Printf("Error querying project_score_sample_mv: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying MV: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + data := make([]ProjectScoreSampleMvRow, 0) + for rows.Next() { + var row ProjectScoreSampleMvRow + var userIDNull sql.NullInt64 + err := rows.Scan(&row.ProjectID, &row.Score, &row.EntryMessage, &userIDNull, &row.CreatedDate) + if err != nil { + log.Printf("Error scanning project_score_sample_mv row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + if userIDNull.Valid { + uid := int(userIDNull.Int64) + row.UserID = &uid + } + data = append(data, row) + } + if err = rows.Err(); err != nil { + log.Printf("Error iterating project_score_sample_mv rows: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error reading data: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(data) +} + +func (a *App) adminHandler(w http.ResponseWriter, r *http.Request) { + // Пробуем найти файл admin.html в разных местах + var adminPath string + + // 1. Пробуем в текущей рабочей директории + if _, err := os.Stat("admin.html"); err == nil { + adminPath = "admin.html" + } else { + // 2. Пробуем в директории play-life-backend относительно текущей директории + adminPath = filepath.Join("play-life-backend", "admin.html") + if _, err := os.Stat(adminPath); err != nil { + // 3. Пробуем получить путь к исполняемому файлу и искать рядом + if execPath, err := os.Executable(); err == nil { + execDir := filepath.Dir(execPath) + adminPath = filepath.Join(execDir, "admin.html") + if _, err := os.Stat(adminPath); err != nil { + // 4. Последняя попытка - просто "admin.html" + adminPath = "admin.html" + } + } else { + adminPath = "admin.html" + } + } + } + + http.ServeFile(w, r, adminPath) +} + +// recreateMaterializedViewHandler пересоздает materialized view с исправленной логикой ISOYEAR +func (a *App) recreateMaterializedViewHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + log.Printf("Recreating materialized view weekly_report_mv with ISOYEAR fix") + + // Удаляем старый view + dropMaterializedView := `DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv` + if _, err := a.DB.Exec(dropMaterializedView); err != nil { + log.Printf("Error dropping materialized view: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error dropping materialized view: %v", err), http.StatusInternalServerError) + return + } + + // Создаем новый view с ISOYEAR + createMaterializedView := ` + CREATE MATERIALIZED VIEW weekly_report_mv AS + SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score, + CASE + WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000) + ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score) + END AS normalized_total_score + FROM + projects p + LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id + LEFT JOIN + weekly_goals wg + ON wg.project_id = p.id + AND wg.goal_year = agg.report_year + AND wg.goal_week = agg.report_week + WHERE + p.deleted = FALSE + ORDER BY + p.id, agg.report_year, agg.report_week + ` + + if _, err := a.DB.Exec(createMaterializedView); err != nil { + log.Printf("Error creating materialized view: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating materialized view: %v", err), http.StatusInternalServerError) + return + } + + // Создаем индекс + createMVIndex := ` + CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week) + ` + if _, err := a.DB.Exec(createMVIndex); err != nil { + log.Printf("Warning: Failed to create materialized view index: %v", err) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "message": "Materialized view recreated successfully with ISOYEAR fix", + }) +} + +func (a *App) getProjectsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + query := ` + SELECT + id AS project_id, + name AS project_name, + priority, + color + FROM + projects + WHERE + deleted = FALSE AND user_id = $1 + ORDER BY + priority ASC NULLS LAST, + project_name + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + log.Printf("Error querying projects: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying projects: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + projects := make([]Project, 0) + for rows.Next() { + var project Project + var priority sql.NullInt64 + + err := rows.Scan( + &project.ProjectID, + &project.ProjectName, + &priority, + &project.Color, + ) + if err != nil { + log.Printf("Error scanning project row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + + if priority.Valid { + priorityVal := int(priority.Int64) + project.Priority = &priorityVal + } + + projects = append(projects, project) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(projects) +} + +func (a *App) setProjectPriorityHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + _ = userID // Will be used in SQL queries + + // Читаем тело запроса один раз + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + log.Printf("Error reading request body: %v", err) + sendErrorWithCORS(w, "Error reading request body", http.StatusBadRequest) + return + } + defer r.Body.Close() + + // Парсим входящий запрос - может быть как {body: [...]}, так и просто массив + var projectsToUpdate []ProjectPriorityUpdate + + // Сначала пробуем декодировать как прямой массив + var directArray []interface{} + arrayErr := json.Unmarshal(bodyBytes, &directArray) + if arrayErr == nil && len(directArray) > 0 { + // Успешно декодировали как массив + log.Printf("Received direct array format with %d items", len(directArray)) + for _, item := range directArray { + if itemMap, ok := item.(map[string]interface{}); ok { + var project ProjectPriorityUpdate + + // Извлекаем id + if idVal, ok := itemMap["id"].(float64); ok { + project.ID = int(idVal) + } else if idVal, ok := itemMap["id"].(int); ok { + project.ID = idVal + } else { + log.Printf("Invalid id in request item: %v", itemMap) + continue + } + + // Извлекаем priority (может быть null, undefined, или числом) + if priorityVal, ok := itemMap["priority"]; ok && priorityVal != nil { + // Проверяем, не является ли это строкой "null" + if strVal, ok := priorityVal.(string); ok && (strVal == "null" || strVal == "NULL") { + project.Priority = nil + } else if numVal, ok := priorityVal.(float64); ok { + priorityInt := int(numVal) + project.Priority = &priorityInt + } else if numVal, ok := priorityVal.(int); ok { + project.Priority = &numVal + } else { + project.Priority = nil + } + } else { + project.Priority = nil + } + + projectsToUpdate = append(projectsToUpdate, project) + } + } + } + + // Если не получилось как массив (ошибка декодирования), пробуем как объект с body + // НЕ пытаемся декодировать как объект, если массив декодировался успешно (даже если пустой) + if len(projectsToUpdate) == 0 && arrayErr != nil { + log.Printf("Failed to decode as array (error: %v), trying as object", arrayErr) + var rawReq map[string]interface{} + if err := json.Unmarshal(bodyBytes, &rawReq); err != nil { + log.Printf("Error decoding project priority request as object: %v, body: %s", err, string(bodyBytes)) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Извлекаем массив проектов из body + if body, ok := rawReq["body"].([]interface{}); ok { + log.Printf("Received body format with %d items", len(body)) + for _, item := range body { + if itemMap, ok := item.(map[string]interface{}); ok { + var project ProjectPriorityUpdate + + // Извлекаем id + if idVal, ok := itemMap["id"].(float64); ok { + project.ID = int(idVal) + } else if idVal, ok := itemMap["id"].(int); ok { + project.ID = idVal + } else { + log.Printf("Invalid id in request item: %v", itemMap) + continue + } + + // Извлекаем priority (может быть null, undefined, или числом) + if priorityVal, ok := itemMap["priority"]; ok && priorityVal != nil { + // Проверяем, не является ли это строкой "null" + if strVal, ok := priorityVal.(string); ok && (strVal == "null" || strVal == "NULL") { + project.Priority = nil + } else if numVal, ok := priorityVal.(float64); ok { + priorityInt := int(numVal) + project.Priority = &priorityInt + } else if numVal, ok := priorityVal.(int); ok { + project.Priority = &numVal + } else { + project.Priority = nil + } + } else { + project.Priority = nil + } + + projectsToUpdate = append(projectsToUpdate, project) + } + } + } + } + + if len(projectsToUpdate) == 0 { + log.Printf("No projects to update after parsing. Body was: %s", string(bodyBytes)) + sendErrorWithCORS(w, "No projects to update", http.StatusBadRequest) + return + } + + log.Printf("Successfully parsed %d projects to update", len(projectsToUpdate)) + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Обновляем приоритеты для каждого проекта + for _, project := range projectsToUpdate { + if project.Priority == nil { + _, err = tx.Exec(` + UPDATE projects + SET priority = NULL + WHERE id = $1 AND user_id = $2 + `, project.ID, userID) + } else { + _, err = tx.Exec(` + UPDATE projects + SET priority = $1 + WHERE id = $2 AND user_id = $3 + `, *project.Priority, project.ID, userID) + } + + if err != nil { + log.Printf("Error updating project %d priority: %v", project.ID, err) + tx.Rollback() + sendErrorWithCORS(w, fmt.Sprintf("Error updating project %d: %v", project.ID, err), http.StatusInternalServerError) + return + } + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Возвращаем успешный ответ + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": fmt.Sprintf("Updated priorities for %d projects", len(projectsToUpdate)), + "updated": len(projectsToUpdate), + }) +} + +type ProjectColorRequest struct { + ID int `json:"id"` + Color string `json:"color"` +} + +func (a *App) setProjectColorHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req ProjectColorRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding project color request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.ID == 0 { + sendErrorWithCORS(w, "id is required", http.StatusBadRequest) + return + } + + if req.Color == "" { + sendErrorWithCORS(w, "color is required", http.StatusBadRequest) + return + } + + // Проверяем, что цвет в правильном формате HEX + if !strings.HasPrefix(req.Color, "#") || len(req.Color) != 7 { + sendErrorWithCORS(w, "color must be in HEX format (e.g., #FF5733)", http.StatusBadRequest) + return + } + + // Обновляем цвет проекта + _, err := a.DB.Exec(` + UPDATE projects + SET color = $1 + WHERE id = $2 AND user_id = $3 + `, req.Color, req.ID, userID) + + if err != nil { + log.Printf("Error updating project color: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating project color: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Project color updated successfully", + "id": req.ID, + "color": req.Color, + }) +} + +type ProjectMoveRequest struct { + ID int `json:"id"` + NewName string `json:"new_name"` +} + +type ProjectDeleteRequest struct { + ID int `json:"id"` +} + +type ProjectCreateRequest struct { + Name string `json:"name"` +} + +func (a *App) moveProjectHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + _ = userID // Will be used in SQL queries + + var req ProjectMoveRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding move project request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.NewName == "" { + sendErrorWithCORS(w, "new_name is required", http.StatusBadRequest) + return + } + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Ищем проект с таким именем + var targetProjectID int + err = tx.QueryRow(` + SELECT id FROM projects WHERE name = $1 AND deleted = FALSE + `, req.NewName).Scan(&targetProjectID) + + if err == sql.ErrNoRows { + // Проект не найден - просто переименовываем текущий проект + _, err = tx.Exec(` + UPDATE projects + SET name = $1 + WHERE id = $2 + `, req.NewName, req.ID) + if err != nil { + log.Printf("Error renaming project: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error renaming project: %v", err), http.StatusInternalServerError) + return + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов (имя проекта изменилось) + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Project renamed successfully", + "project_id": req.ID, + }) + return + } else if err != nil { + log.Printf("Error querying target project: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying target project: %v", err), http.StatusInternalServerError) + return + } + + // Проект найден - переносим данные в существующий проект + finalProjectID := targetProjectID + + // Обновляем все nodes с project_id на целевой + _, err = tx.Exec(` + UPDATE nodes + SET project_id = $1 + WHERE project_id = $2 + `, finalProjectID, req.ID) + if err != nil { + log.Printf("Error updating nodes: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating nodes: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем weekly_goals + // Сначала удаляем записи старого проекта, которые конфликтуют с записями целевого проекта + // (если у целевого проекта уже есть запись для той же недели) + _, err = tx.Exec(` + DELETE FROM weekly_goals + WHERE project_id = $1 + AND EXISTS ( + SELECT 1 + FROM weekly_goals wg2 + WHERE wg2.project_id = $2 + AND wg2.goal_year = weekly_goals.goal_year + AND wg2.goal_week = weekly_goals.goal_week + ) + `, req.ID, finalProjectID) + if err != nil { + log.Printf("Error deleting conflicting weekly_goals: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting conflicting weekly_goals: %v", err), http.StatusInternalServerError) + return + } + + // Теперь обновляем оставшиеся записи (те, которые не конфликтуют) + // Обновляем project_id и user_id из целевого проекта + _, err = tx.Exec(` + UPDATE weekly_goals wg + SET project_id = $1, user_id = p.user_id + FROM projects p + WHERE wg.project_id = $2 + AND p.id = $1 + `, finalProjectID, req.ID) + if err != nil { + log.Printf("Error updating weekly_goals: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating weekly_goals: %v", err), http.StatusInternalServerError) + return + } + + // Помечаем старый проект как удаленный + _, err = tx.Exec(` + UPDATE projects + SET deleted = TRUE + WHERE id = $1 + `, req.ID) + if err != nil { + log.Printf("Error marking project as deleted: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error marking project as deleted: %v", err), http.StatusInternalServerError) + return + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов (проект переименован или удалён) + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Project moved successfully", + "project_id": finalProjectID, + }) +} + +func (a *App) deleteProjectHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req ProjectDeleteRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding delete project request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Verify ownership + var ownerID int + err := a.DB.QueryRow("SELECT user_id FROM projects WHERE id = $1", req.ID).Scan(&ownerID) + if err != nil || ownerID != userID { + sendErrorWithCORS(w, "Project not found", http.StatusNotFound) + return + } + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Удаляем все записи weekly_goals для этого проекта + _, err = tx.Exec(` + DELETE FROM weekly_goals + WHERE project_id = $1 + `, req.ID) + if err != nil { + log.Printf("Error deleting weekly_goals: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting weekly_goals: %v", err), http.StatusInternalServerError) + return + } + + // Помечаем проект как удаленный + _, err = tx.Exec(` + UPDATE projects + SET deleted = TRUE + WHERE id = $1 + `, req.ID) + if err != nil { + log.Printf("Error marking project as deleted: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error marking project as deleted: %v", err), http.StatusInternalServerError) + return + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов (проект удалён) + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Project deleted successfully", + }) +} + +func (a *App) createProjectHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req ProjectCreateRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding create project request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.Name == "" { + sendErrorWithCORS(w, "name is required", http.StatusBadRequest) + return + } + + // Проверяем, существует ли уже проект с таким именем + var existingID int + err := a.DB.QueryRow(` + SELECT id FROM projects + WHERE name = $1 AND user_id = $2 AND deleted = FALSE + `, req.Name, userID).Scan(&existingID) + + if err == nil { + // Проект уже существует + sendErrorWithCORS(w, "Project with this name already exists", http.StatusConflict) + return + } else if err != sql.ErrNoRows { + log.Printf("Error checking project existence: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking project existence: %v", err), http.StatusInternalServerError) + return + } + + // Создаем новый проект + randomColor := generateRandomProjectColor() + var projectID int + err = a.DB.QueryRow(` + INSERT INTO projects (name, deleted, user_id, color) + VALUES ($1, FALSE, $2, $3) + RETURNING id + `, req.Name, userID, randomColor).Scan(&projectID) + + if err != nil { + log.Printf("Error creating project: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating project: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов (проекты попадают в саджесты) + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Project created successfully", + "project_id": projectID, + "project_name": req.Name, + }) +} + +func (a *App) todoistWebhookHandler(w http.ResponseWriter, r *http.Request) { + // Логирование входящего запроса + log.Printf("=== Todoist Webhook Request ===") + log.Printf("Method: %s", r.Method) + log.Printf("URL: %s", r.URL.String()) + log.Printf("Path: %s", r.URL.Path) + log.Printf("RemoteAddr: %s", r.RemoteAddr) + + if r.Method == "OPTIONS" { + log.Printf("OPTIONS request, returning OK") + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Проверка webhook secret (если настроен) + todoistWebhookSecret := getEnv("TODOIST_WEBHOOK_SECRET", "") + if todoistWebhookSecret != "" { + providedSecret := r.Header.Get("X-Todoist-Hmac-SHA256") + if providedSecret == "" { + providedSecret = r.Header.Get("X-Todoist-Webhook-Secret") + } + if providedSecret != todoistWebhookSecret { + log.Printf("Invalid Todoist webhook secret provided") + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Unauthorized", + "message": "Invalid webhook secret", + }) + return + } + log.Printf("Webhook secret validated successfully") + } + + // Читаем тело запроса + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + log.Printf("Error reading request body: %v", err) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Error reading request body", + "message": "Failed to read request", + }) + return + } + + log.Printf("Request body (raw): %s", string(bodyBytes)) + log.Printf("Request body length: %d bytes", len(bodyBytes)) + + // Парсим webhook от Todoist + var webhook TodoistWebhook + if err := json.Unmarshal(bodyBytes, &webhook); err != nil { + log.Printf("Error decoding Todoist webhook: %v", err) + log.Printf("Failed to parse body as JSON: %s", string(bodyBytes)) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Invalid request body", + "message": "Failed to parse JSON", + }) + return + } + + // Логируем структуру webhook + log.Printf("Parsed webhook structure:") + log.Printf(" EventName: %s", webhook.EventName) + log.Printf(" EventData keys: %v", getMapKeys(webhook.EventData)) + + // Проверяем, что это событие закрытия задачи + if webhook.EventName != "item:completed" { + log.Printf("Received Todoist event '%s', ignoring (only processing 'item:completed')", webhook.EventName) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": true, + "message": "Event ignored", + "event": webhook.EventName, + }) + return + } + + // Извлекаем user_id из event_data (это Todoist user_id!) + var todoistUserID int64 + switch v := webhook.EventData["user_id"].(type) { + case float64: + todoistUserID = int64(v) + case string: + todoistUserID, _ = strconv.ParseInt(v, 10, 64) + default: + log.Printf("Todoist webhook: user_id not found or invalid type in event_data") + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Missing user_id in event_data", + "message": "Cannot identify user", + }) + return + } + + log.Printf("Todoist webhook: todoist_user_id=%d", todoistUserID) + + // Находим пользователя Play Life по todoist_user_id + var userID int + err = a.DB.QueryRow(` + SELECT user_id FROM todoist_integrations + WHERE todoist_user_id = $1 + `, todoistUserID).Scan(&userID) + + if err == sql.ErrNoRows { + // Пользователь не подключил Play Life — игнорируем + log.Printf("Todoist webhook: no user found for todoist_user_id=%d (ignoring)", todoistUserID) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": true, + "message": "User not found (not connected)", + }) + return + } + if err != nil { + log.Printf("Error finding user by todoist_user_id: %v", err) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Internal server error", + "message": "Database error", + }) + return + } + + log.Printf("Todoist webhook: todoist_user_id=%d -> user_id=%d", todoistUserID, userID) + + // Извлекаем content (title) и description из event_data + log.Printf("Extracting content and description from event_data...") + var title, description string + + if content, ok := webhook.EventData["content"].(string); ok { + title = strings.TrimSpace(content) + log.Printf(" Found 'content' (title): '%s' (length: %d)", title, len(title)) + } else { + log.Printf(" 'content' not found or not a string (type: %T, value: %v)", webhook.EventData["content"], webhook.EventData["content"]) + } + + if desc, ok := webhook.EventData["description"].(string); ok { + description = strings.TrimSpace(desc) + log.Printf(" Found 'description': '%s' (length: %d)", description, len(description)) + } else { + log.Printf(" 'description' not found or not a string (type: %T, value: %v)", webhook.EventData["description"], webhook.EventData["description"]) + } + + // Склеиваем title и description + // Логика: если есть оба - склеиваем через \n, если только один - используем его + var combinedText string + if title != "" && description != "" { + combinedText = title + "\n" + description + log.Printf(" Both title and description present, combining them") + } else if title != "" { + combinedText = title + log.Printf(" Only title present, using title only") + } else if description != "" { + combinedText = description + log.Printf(" Only description present, using description only") + } else { + combinedText = "" + log.Printf(" WARNING: Both title and description are empty!") + } + log.Printf("Combined text result: '%s' (length: %d)", combinedText, len(combinedText)) + + // Проверяем, что есть хотя бы title или description + if combinedText == "" { + log.Printf("ERROR: Todoist webhook: no content or description found in event_data") + log.Printf(" title='%s' (empty: %v), description='%s' (empty: %v)", title, title == "", description, description == "") + log.Printf("Available keys in event_data: %v", getMapKeys(webhook.EventData)) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Missing 'content' or 'description' in event_data", + "message": "No content to process", + }) + return + } + + log.Printf("Processing Todoist task: title='%s' (len=%d), description='%s' (len=%d), combined='%s' (len=%d)", + title, len(title), description, len(description), combinedText, len(combinedText)) + + // Обрабатываем сообщение через существующую логику (без отправки в Telegram) + userIDPtr := &userID + log.Printf("Calling processMessageWithoutTelegram with combined text, user_id=%d...", userID) + response, err := a.processMessageWithoutTelegram(combinedText, userIDPtr) + if err != nil { + log.Printf("ERROR processing Todoist message: %v", err) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": err.Error(), + "message": "Error processing message", + }) + return + } + + // Проверяем наличие nodes - если их нет, игнорируем сообщение + if len(response.Nodes) == 0 { + log.Printf("Todoist webhook: no nodes found in message, ignoring (not saving to database and not sending to Telegram)") + log.Printf("=== Todoist Webhook Request Ignored (No Nodes) ===") + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": true, + "message": "Message ignored (no nodes found)", + "ignored": true, + }) + return + } + + log.Printf("Successfully processed Todoist task, found %d nodes", len(response.Nodes)) + if len(response.Nodes) > 0 { + log.Printf("Nodes details:") + for i, node := range response.Nodes { + log.Printf(" Node %d: Project='%s', Score=%f", i+1, node.Project, node.Score) + } + + // Отправляем сообщение в Telegram после успешной обработки + log.Printf("Preparing to send message to Telegram...") + log.Printf("Combined text to send: '%s'", combinedText) + if err := a.sendTelegramMessageToUser(userID, combinedText); err != nil { + log.Printf("Error sending Telegram message: %v", err) + } else { + log.Printf("sendTelegramMessage call completed") + } + } else { + log.Printf("No nodes found, skipping Telegram message") + } + + log.Printf("=== Todoist Webhook Request Completed Successfully ===") + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": true, + "message": "Task processed successfully", + "result": response, + }) +} + +func (a *App) telegramWebhookHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Парсим webhook от Telegram + var update TelegramUpdate + if err := json.NewDecoder(r.Body).Decode(&update); err != nil { + log.Printf("Error decoding Telegram webhook: %v", err) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": false, + "error": "Invalid request body", + }) + return + } + + // Определяем сообщение + var message *TelegramMessage + if update.Message != nil { + message = update.Message + } else if update.EditedMessage != nil { + message = update.EditedMessage + } else { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]bool{"ok": true}) + return + } + + if message.From == nil { + log.Printf("Telegram webhook: message without From field") + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]bool{"ok": true}) + return + } + + telegramUserID := message.From.ID + chatID := message.Chat.ID + chatIDStr := strconv.FormatInt(chatID, 10) + + log.Printf("Telegram webhook: telegram_user_id=%d, chat_id=%d, text=%s", + telegramUserID, chatID, message.Text) + + // Обработка команды /start с токеном + if strings.HasPrefix(message.Text, "/start") { + parts := strings.Fields(message.Text) + if len(parts) > 1 { + startToken := parts[1] + + var userID int + err := a.DB.QueryRow(` + SELECT user_id FROM telegram_integrations + WHERE start_token = $1 + `, startToken).Scan(&userID) + + if err == nil { + // Привязываем Telegram к пользователю + telegramUserIDStr := strconv.FormatInt(telegramUserID, 10) + _, err = a.DB.Exec(` + UPDATE telegram_integrations + SET telegram_user_id = $1, + chat_id = $2, + start_token = NULL, + updated_at = CURRENT_TIMESTAMP + WHERE user_id = $3 + `, telegramUserIDStr, chatIDStr, userID) + + if err != nil { + log.Printf("Error updating telegram integration: %v", err) + } else { + log.Printf("Telegram connected for user_id=%d", userID) + + // Приветственное сообщение + welcomeMsg := "✅ Telegram успешно подключен к Play Life!\n\nТеперь вы будете получать уведомления и отчеты." + if err := a.sendTelegramMessageToChat(chatID, welcomeMsg); err != nil { + log.Printf("Error sending welcome message: %v", err) + } + } + } else { + log.Printf("Invalid start_token: %s", startToken) + a.sendTelegramMessageToChat(chatID, "❌ Неверный токен. Попробуйте получить новую ссылку в приложении.") + } + } else { + // /start без токена + a.sendTelegramMessageToChat(chatID, "Привет! Для подключения используйте ссылку из приложения Play Life.") + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]bool{"ok": true}) + return + } + + // Обычное сообщение - ищем пользователя по telegram_user_id + var userID int + err := a.DB.QueryRow(` + SELECT user_id FROM telegram_integrations + WHERE telegram_user_id = $1 + `, telegramUserID).Scan(&userID) + + if err == sql.ErrNoRows { + log.Printf("User not found for telegram_user_id=%d", telegramUserID) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]bool{"ok": true}) + return + } else if err != nil { + log.Printf("Error finding user: %v", err) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + return + } + + // Обновляем chat_id (на случай переподключения) + a.DB.Exec(` + UPDATE telegram_integrations + SET chat_id = $1, updated_at = CURRENT_TIMESTAMP + WHERE user_id = $2 + `, chatIDStr, userID) + + // Обрабатываем сообщение + if message.Text == "" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]bool{"ok": true}) + return + } + + entities := message.Entities + if entities == nil { + entities = []TelegramEntity{} + } + + userIDPtr := &userID + response, err := a.processTelegramMessage(message.Text, entities, userIDPtr) + if err != nil { + log.Printf("Error processing message: %v", err) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "ok": true, + "result": response, + }) +} + +func (a *App) getFullStatisticsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Получаем данные текущей недели + currentWeekScores, err := a.getCurrentWeekScores(userID) + if err != nil { + log.Printf("Error getting current week scores: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting current week scores: %v", err), http.StatusInternalServerError) + return + } + + // Получаем ISO год и неделю для текущей даты + now := time.Now() + _, currentWeekInt := now.ISOWeek() + currentYearInt := now.Year() + + query := ` + SELECT + p.name AS project_name, + -- Определяем год и неделю, беря значение из той таблицы, где оно не NULL + COALESCE(wr.report_year, wg.goal_year) AS report_year, + COALESCE(wr.report_week, wg.goal_week) AS report_week, + + -- Фактический score: COALESCE(NULL, 0.0000) + COALESCE(wr.total_score, 0.0000) AS total_score, + + -- Normalized score из MV + COALESCE(wr.normalized_total_score, 0.0000) AS normalized_total_score, + + -- Минимальная цель: COALESCE(NULL, 0.0000) + COALESCE(wg.min_goal_score, 0.0000) AS min_goal_score, + + -- Максимальная цель: COALESCE(NULL, 0.0000) + COALESCE(wg.max_goal_score, 0.0000) AS max_goal_score, + p.id AS project_id, + p.color + FROM + weekly_report_mv wr + FULL OUTER JOIN + weekly_goals wg + -- Слияние по всем трем ключевым полям + ON wr.project_id = wg.project_id + AND wr.report_year = wg.goal_year + AND wr.report_week = wg.goal_week + JOIN + projects p + -- Присоединяем имя проекта, используя ID из той таблицы, где он не NULL + ON p.id = COALESCE(wr.project_id, wg.project_id) + WHERE + p.deleted = FALSE AND p.user_id = $1 + AND COALESCE(wr.report_year, wg.goal_year) IS NOT NULL + ORDER BY + report_year DESC, + report_week DESC, + project_name + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + log.Printf("Error querying full statistics: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying full statistics: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + statistics := make([]FullStatisticsItem, 0) + + for rows.Next() { + var item FullStatisticsItem + var projectID int + + err := rows.Scan( + &item.ProjectName, + &item.ReportYear, + &item.ReportWeek, + &item.TotalScore, + &item.NormalizedTotalScore, + &item.MinGoalScore, + &item.MaxGoalScore, + &projectID, + &item.Color, + ) + if err != nil { + log.Printf("Error scanning full statistics row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + + // Если это текущая неделя, заменяем данные из MV на данные из nodes + if item.ReportYear == currentYearInt && item.ReportWeek == currentWeekInt { + if score, exists := currentWeekScores[projectID]; exists { + item.TotalScore = score + // Для текущей недели normalized_total_score не отправляем + item.NormalizedTotalScore = 0 + } + } + + // Если normalized_total_score равен total_score, не отправляем его + if item.NormalizedTotalScore == item.TotalScore { + item.NormalizedTotalScore = 0 + } + + statistics = append(statistics, item) + } + + // Добавляем проекты текущей недели, которых нет в MV (новые проекты без исторических данных) + // Получаем goals для текущей недели + currentWeekGoalsQuery := ` + SELECT + p.id AS project_id, + p.name AS project_name, + COALESCE(wg.min_goal_score, 0.0000) AS min_goal_score, + COALESCE(wg.max_goal_score, 0.0000) AS max_goal_score, + p.color + FROM projects p + LEFT JOIN weekly_goals wg ON wg.project_id = p.id + AND wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + WHERE p.deleted = FALSE AND p.user_id = $1 + AND NOT EXISTS ( + SELECT 1 FROM weekly_report_mv wr + WHERE wr.project_id = p.id + AND wr.report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wr.report_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + ) + ` + + goalsRows, err := a.DB.Query(currentWeekGoalsQuery, userID) + if err == nil { + defer goalsRows.Close() + existingProjects := make(map[int]bool) + for _, stat := range statistics { + if stat.ReportYear == currentYearInt && stat.ReportWeek == currentWeekInt { + // Найдем project_id по имени проекта (не идеально, но работает) + var pid int + if err := a.DB.QueryRow("SELECT id FROM projects WHERE name = $1 AND user_id = $2", stat.ProjectName, userID).Scan(&pid); err == nil { + existingProjects[pid] = true + } + } + } + + for goalsRows.Next() { + var projectID int + var projectName string + var minGoalScore, maxGoalScore float64 + var projectColor string + if err := goalsRows.Scan(&projectID, &projectName, &minGoalScore, &maxGoalScore, &projectColor); err == nil { + // Добавляем только если проекта еще нет в статистике + if !existingProjects[projectID] { + totalScore := 0.0 + if score, exists := currentWeekScores[projectID]; exists { + totalScore = score + } + + // Для текущей недели normalized_total_score не отправляем + _, weekISO := time.Now().ISOWeek() + item := FullStatisticsItem{ + ProjectName: projectName, + ReportYear: time.Now().Year(), + ReportWeek: weekISO, + TotalScore: totalScore, + NormalizedTotalScore: 0, + MinGoalScore: minGoalScore, + MaxGoalScore: maxGoalScore, + Color: projectColor, + } + statistics = append(statistics, item) + } + } + } + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(statistics) +} + +// getTodayEntriesHandler возвращает entries с nodes за сегодняшний день +func (a *App) getTodayEntriesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Получаем опциональные параметры из query string + projectName := r.URL.Query().Get("project") + var projectFilter *string + if projectName != "" { + projectFilter = &projectName + } + + // Получаем дату из query string (формат: YYYY-MM-DD), если не указана - используем сегодня + dateParam := r.URL.Query().Get("date") + var targetDate time.Time + if dateParam != "" { + parsedDate, err := time.Parse("2006-01-02", dateParam) + if err != nil { + log.Printf("Error parsing date parameter: %v", err) + sendErrorWithCORS(w, "Invalid date format. Use YYYY-MM-DD", http.StatusBadRequest) + return + } + targetDate = parsedDate + } else { + targetDate = time.Now() + } + + // Запрос для получения entries с nodes за указанный день + // Если указан проект, показываем все записи, которые содержат хотя бы одну ноду этого проекта, + // но возвращаем все ноды этих записей, а не только ноды выбранного проекта + query := ` + WITH filtered_entries AS ( + -- Если проект указан, находим entry_id записей, содержащих хотя бы одну ноду этого проекта + SELECT DISTINCT e.id as entry_id + FROM entries e + JOIN nodes n ON n.entry_id = e.id + JOIN projects p ON n.project_id = p.id + WHERE DATE(n.created_date) = DATE($3) + AND e.user_id = $1 + AND n.user_id = $1 + AND p.user_id = $1 + AND p.deleted = FALSE + AND ($2::text IS NULL OR p.name = $2) + ), + entry_nodes AS ( + -- Получаем все ноды для найденных записей (или всех записей, если проект не указан) + SELECT + e.id as entry_id, + e.text, + e.created_date, + p.name as project_name, + n.score, + ROW_NUMBER() OVER (PARTITION BY e.id ORDER BY n.id) - 1 as node_index + FROM entries e + JOIN nodes n ON n.entry_id = e.id + JOIN projects p ON n.project_id = p.id + WHERE DATE(n.created_date) = DATE($3) + AND e.user_id = $1 + AND n.user_id = $1 + AND p.user_id = $1 + AND p.deleted = FALSE + AND ($2::text IS NULL OR e.id IN (SELECT entry_id FROM filtered_entries)) + ) + SELECT + entry_id, + text, + created_date, + json_agg( + json_build_object( + 'project_name', project_name, + 'score', score, + 'index', node_index + ) ORDER BY node_index + ) as nodes + FROM entry_nodes + GROUP BY entry_id, text, created_date + ORDER BY created_date DESC + ` + + rows, err := a.DB.Query(query, userID, projectFilter, targetDate) + if err != nil { + log.Printf("Error querying today entries: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying today entries: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + entries := make([]TodayEntry, 0) + + for rows.Next() { + var entry TodayEntry + var createdDate time.Time + var nodesJSON string + + err := rows.Scan( + &entry.ID, + &entry.Text, + &createdDate, + &nodesJSON, + ) + if err != nil { + log.Printf("Error scanning today entry row: %v", err) + continue + } + + // Парсим JSON с nodes + if err := json.Unmarshal([]byte(nodesJSON), &entry.Nodes); err != nil { + log.Printf("Error unmarshaling nodes JSON: %v", err) + continue + } + + // Форматируем дату в ISO 8601 + entry.CreatedDate = createdDate.Format(time.RFC3339) + + entries = append(entries, entry) + } + + if err := rows.Err(); err != nil { + log.Printf("Error iterating today entries rows: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error iterating rows: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(entries) +} + +// deleteEntryHandler удаляет entry и каскадно удаляет связанные nodes +func (a *App) deleteEntryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + entryIDStr := vars["id"] + entryID, err := strconv.Atoi(entryIDStr) + if err != nil { + sendErrorWithCORS(w, "Invalid entry ID", http.StatusBadRequest) + return + } + + // Проверяем, что entry принадлежит пользователю + var entryUserID int + err = a.DB.QueryRow("SELECT user_id FROM entries WHERE id = $1", entryID).Scan(&entryUserID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Entry not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking entry ownership: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Проверяем права доступа + if entryUserID != userID { + sendErrorWithCORS(w, "Forbidden", http.StatusForbidden) + return + } + + // Удаляем entry (nodes удалятся каскадно из-за ON DELETE CASCADE) + result, err := a.DB.Exec("DELETE FROM entries WHERE id = $1 AND user_id = $2", entryID, userID) + if err != nil { + log.Printf("Error deleting entry: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + log.Printf("Error getting rows affected: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + sendErrorWithCORS(w, "Entry not found", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Entry deleted successfully", + }) +} + +// getTelegramIntegrationHandler возвращает текущую telegram интеграцию с deep link +func (a *App) getTelegramIntegrationHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + integration, err := a.getTelegramIntegrationForUser(userID) + if err != nil { + sendErrorWithCORS(w, fmt.Sprintf("Failed to get telegram integration: %v", err), http.StatusInternalServerError) + return + } + + // Генерируем start_token если его нет + if integration.StartToken == nil || *integration.StartToken == "" { + token, err := generateWebhookToken() + if err == nil { + _, _ = a.DB.Exec(` + UPDATE telegram_integrations + SET start_token = $1, updated_at = CURRENT_TIMESTAMP + WHERE user_id = $2 + `, token, userID) + integration.StartToken = &token + } + } + + // Формируем deep link + var deepLink string + if a.telegramBotUsername != "" && integration.StartToken != nil { + deepLink = fmt.Sprintf("https://t.me/%s?start=%s", a.telegramBotUsername, *integration.StartToken) + } + + isConnected := integration.ChatID != nil && integration.TelegramUserID != nil + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "id": integration.ID, + "telegram_user_id": integration.TelegramUserID, + "is_connected": isConnected, + "deep_link": deepLink, + }) +} + +// updateTelegramIntegrationHandler больше не используется (bot_token теперь в .env) +// Оставлен для совместимости, возвращает ошибку +func (a *App) updateTelegramIntegrationHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + sendErrorWithCORS(w, "Bot token is now configured via TELEGRAM_BOT_TOKEN environment variable", http.StatusBadRequest) +} + +// OAuthStateClaims структура для OAuth state JWT +type OAuthStateClaims struct { + UserID int `json:"user_id"` + Type string `json:"type"` + jwt.RegisteredClaims +} + +// generateOAuthState генерирует JWT state для OAuth +func generateOAuthState(userID int, jwtSecret []byte) (string, error) { + claims := OAuthStateClaims{ + UserID: userID, + Type: "todoist_oauth", + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)), // 1 день + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + } + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + return token.SignedString(jwtSecret) +} + +// validateOAuthState проверяет и извлекает user_id из JWT state +func validateOAuthState(stateString string, jwtSecret []byte) (int, error) { + token, err := jwt.ParseWithClaims(stateString, &OAuthStateClaims{}, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return jwtSecret, nil + }) + if err != nil { + return 0, err + } + + claims, ok := token.Claims.(*OAuthStateClaims) + if !ok || !token.Valid { + return 0, fmt.Errorf("invalid token") + } + + if claims.Type != "todoist_oauth" { + return 0, fmt.Errorf("wrong token type") + } + + return claims.UserID, nil +} + +// exchangeCodeForToken обменивает OAuth code на access_token +func exchangeCodeForToken(code, redirectURI, clientID, clientSecret string) (string, error) { + data := url.Values{} + data.Set("client_id", clientID) + data.Set("client_secret", clientSecret) + data.Set("code", code) + data.Set("redirect_uri", redirectURI) + + resp, err := http.PostForm("https://todoist.com/oauth/access_token", data) + if err != nil { + return "", fmt.Errorf("failed to exchange code: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return "", fmt.Errorf("token exchange failed: %s", string(body)) + } + + var result struct { + AccessToken string `json:"access_token"` + Error string `json:"error"` + } + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return "", fmt.Errorf("failed to decode response: %w", err) + } + + if result.Error != "" { + return "", fmt.Errorf("token exchange error: %s", result.Error) + } + + return result.AccessToken, nil +} + +// getTodoistUserInfo получает информацию о пользователе через Sync API +func getTodoistUserInfo(accessToken string) (struct { + ID int64 + Email string +}, error) { + var userInfo struct { + ID int64 + Email string + } + + // Формируем правильный запрос к Sync API + data := url.Values{} + data.Set("sync_token", "*") + data.Set("resource_types", `["user"]`) + + req, err := http.NewRequest("POST", "https://api.todoist.com/sync/v9/sync", strings.NewReader(data.Encode())) + if err != nil { + log.Printf("Todoist API: failed to create request: %v", err) + return userInfo, err + } + req.Header.Set("Authorization", "Bearer "+accessToken) + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set("User-Agent", "PlayLife") + + log.Printf("Todoist API: requesting user info from sync/v9/sync") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + log.Printf("Todoist API: request failed: %v", err) + return userInfo, fmt.Errorf("failed to get user info: %w", err) + } + defer resp.Body.Close() + + bodyBytes, _ := io.ReadAll(resp.Body) + log.Printf("Todoist API: response status=%d, body=%s", resp.StatusCode, string(bodyBytes)) + + if resp.StatusCode != http.StatusOK { + return userInfo, fmt.Errorf("get user info failed (status %d): %s", resp.StatusCode, string(bodyBytes)) + } + + // Парсим ответ - в Sync API user может быть объектом или массивом + var result map[string]interface{} + if err := json.Unmarshal(bodyBytes, &result); err != nil { + log.Printf("Todoist API: failed to parse JSON: %v, body: %s", err, string(bodyBytes)) + return userInfo, fmt.Errorf("failed to decode user info: %w", err) + } + + log.Printf("Todoist API: parsed response keys: %v", getMapKeys(result)) + + // Функция для извлечения ID из разных типов + extractID := func(idValue interface{}) int64 { + switch v := idValue.(type) { + case float64: + return int64(v) + case int64: + return v + case int: + return int64(v) + case string: + if id, err := strconv.ParseInt(v, 10, 64); err == nil { + return id + } + } + return 0 + } + + // Проверяем разные варианты структуры ответа + if userObj, ok := result["user"].(map[string]interface{}); ok { + // Один объект user + userInfo.ID = extractID(userObj["id"]) + if email, ok := userObj["email"].(string); ok { + userInfo.Email = email + } + } else if usersArr, ok := result["user"].([]interface{}); ok && len(usersArr) > 0 { + // Массив users, берем первый + if userObj, ok := usersArr[0].(map[string]interface{}); ok { + userInfo.ID = extractID(userObj["id"]) + if email, ok := userObj["email"].(string); ok { + userInfo.Email = email + } + } + } else { + log.Printf("Todoist API: user not found in response, available keys: %v", getMapKeys(result)) + return userInfo, fmt.Errorf("user not found in response") + } + + if userInfo.ID == 0 || userInfo.Email == "" { + log.Printf("Todoist API: incomplete user info: ID=%d, Email=%s", userInfo.ID, userInfo.Email) + return userInfo, fmt.Errorf("incomplete user info: ID=%d, Email=%s", userInfo.ID, userInfo.Email) + } + + log.Printf("Todoist API: successfully got user info: ID=%d, Email=%s", userInfo.ID, userInfo.Email) + return userInfo, nil +} + +// todoistOAuthConnectHandler инициирует OAuth flow +func (a *App) todoistOAuthConnectHandler(w http.ResponseWriter, r *http.Request) { + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + clientID := getEnv("TODOIST_CLIENT_ID", "") + clientSecret := getEnv("TODOIST_CLIENT_SECRET", "") + baseURL := getEnv("WEBHOOK_BASE_URL", "") + + if clientID == "" || clientSecret == "" { + sendErrorWithCORS(w, "TODOIST_CLIENT_ID and TODOIST_CLIENT_SECRET must be configured", http.StatusInternalServerError) + return + } + if baseURL == "" { + sendErrorWithCORS(w, "WEBHOOK_BASE_URL must be configured", http.StatusInternalServerError) + return + } + + redirectURI := strings.TrimRight(baseURL, "/") + "/api/integrations/todoist/oauth/callback" + + state, err := generateOAuthState(userID, a.jwtSecret) + if err != nil { + log.Printf("Todoist OAuth: failed to generate state: %v", err) + sendErrorWithCORS(w, "Failed to generate OAuth state", http.StatusInternalServerError) + return + } + + authURL := fmt.Sprintf( + "https://todoist.com/oauth/authorize?client_id=%s&scope=data:read_write&state=%s&redirect_uri=%s", + url.QueryEscape(clientID), + url.QueryEscape(state), + url.QueryEscape(redirectURI), + ) + + log.Printf("Todoist OAuth: returning auth URL for user_id=%d", userID) + + // Возвращаем JSON с URL для редиректа (frontend сделает редирект) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "auth_url": authURL, + }) +} + +// todoistOAuthCallbackHandler обрабатывает OAuth callback +func (a *App) todoistOAuthCallbackHandler(w http.ResponseWriter, r *http.Request) { + frontendURL := getEnv("WEBHOOK_BASE_URL", "") + redirectSuccess := frontendURL + "/?integration=todoist&status=connected" + redirectError := frontendURL + "/?integration=todoist&status=error" + + clientID := getEnv("TODOIST_CLIENT_ID", "") + clientSecret := getEnv("TODOIST_CLIENT_SECRET", "") + baseURL := getEnv("WEBHOOK_BASE_URL", "") + + if clientID == "" || clientSecret == "" || baseURL == "" { + log.Printf("Todoist OAuth: missing configuration") + http.Redirect(w, r, redirectError+"&message=config_error", http.StatusTemporaryRedirect) + return + } + + redirectURI := strings.TrimRight(baseURL, "/") + "/api/integrations/todoist/oauth/callback" + + // Проверяем state + state := r.URL.Query().Get("state") + userID, err := validateOAuthState(state, a.jwtSecret) + if err != nil { + log.Printf("Todoist OAuth: invalid state: %v", err) + http.Redirect(w, r, redirectError+"&message=invalid_state", http.StatusTemporaryRedirect) + return + } + + // Получаем code + code := r.URL.Query().Get("code") + if code == "" { + log.Printf("Todoist OAuth: no code in callback") + http.Redirect(w, r, redirectError+"&message=no_code", http.StatusTemporaryRedirect) + return + } + + // Обмениваем code на access_token + accessToken, err := exchangeCodeForToken(code, redirectURI, clientID, clientSecret) + if err != nil { + log.Printf("Todoist OAuth: token exchange failed: %v", err) + http.Redirect(w, r, redirectError+"&message=token_exchange_failed", http.StatusTemporaryRedirect) + return + } + + // Получаем информацию о пользователе + todoistUser, err := getTodoistUserInfo(accessToken) + if err != nil { + log.Printf("Todoist OAuth: get user info failed: %v", err) + http.Redirect(w, r, redirectError+"&message=user_info_failed", http.StatusTemporaryRedirect) + return + } + + log.Printf("Todoist OAuth: user_id=%d connected todoist_user_id=%d email=%s", userID, todoistUser.ID, todoistUser.Email) + + // Сохраняем в БД + _, err = a.DB.Exec(` + INSERT INTO todoist_integrations (user_id, todoist_user_id, todoist_email, access_token) + VALUES ($1, $2, $3, $4) + ON CONFLICT (user_id) DO UPDATE SET + todoist_user_id = $2, + todoist_email = $3, + access_token = $4, + updated_at = CURRENT_TIMESTAMP + `, userID, todoistUser.ID, todoistUser.Email, accessToken) + + if err != nil { + log.Printf("Todoist OAuth: DB error: %v", err) + http.Redirect(w, r, redirectError+"&message=db_error", http.StatusTemporaryRedirect) + return + } + + // Редирект на страницу интеграций + http.Redirect(w, r, redirectSuccess, http.StatusTemporaryRedirect) +} + +// getTodoistStatusHandler возвращает статус подключения Todoist +func (a *App) getTodoistStatusHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var todoistEmail sql.NullString + err := a.DB.QueryRow(` + SELECT todoist_email FROM todoist_integrations + WHERE user_id = $1 AND access_token IS NOT NULL + `, userID).Scan(&todoistEmail) + + if err == sql.ErrNoRows || !todoistEmail.Valid { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "connected": false, + }) + return + } + if err != nil { + sendErrorWithCORS(w, fmt.Sprintf("Failed to get status: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "connected": true, + "todoist_email": todoistEmail.String, + }) +} + +// ============================================ +// Tasks handlers +// ============================================ + +// getTasksHandler возвращает список задач пользователя +func (a *App) getTasksHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Запрос с получением всех необходимых данных для группировки и отображения + query := ` + SELECT + t.id, + t.name, + t.completed, + t.last_completed_at, + t.next_show_at, + t.repetition_period::text, + t.repetition_date, + t.progression_base, + t.wishlist_id, + t.config_id, + t.reward_policy, + t.group_name, + COALESCE(( + SELECT COUNT(*) + FROM tasks st + WHERE st.parent_task_id = t.id AND st.deleted = FALSE + ), 0) as subtasks_count, + COALESCE( + (SELECT array_agg(DISTINCT p.name) FILTER (WHERE p.name IS NOT NULL) + FROM reward_configs rc + JOIN projects p ON rc.project_id = p.id + WHERE rc.task_id = t.id), + ARRAY[]::text[] + ) as project_names, + COALESCE( + (SELECT array_agg(DISTINCT p.name) FILTER (WHERE p.name IS NOT NULL) + FROM tasks st + JOIN reward_configs rc ON rc.task_id = st.id + JOIN projects p ON rc.project_id = p.id + WHERE st.parent_task_id = t.id AND st.deleted = FALSE), + ARRAY[]::text[] + ) as subtask_project_names, + COALESCE(td.auto_complete, FALSE) as auto_complete + FROM tasks t + LEFT JOIN task_drafts td ON td.task_id = t.id AND td.user_id = $1 + WHERE t.user_id = $1 AND t.parent_task_id IS NULL AND t.deleted = FALSE + ORDER BY + -- Сначала разделяем на невыполненные (0) и выполненные (1) + CASE WHEN t.last_completed_at IS NULL OR t.last_completed_at::date < CURRENT_DATE THEN 0 ELSE 1 END, + -- Для невыполненных: сортируем по completed DESC (больше завершений выше), затем по id ASC (раньше добавленные выше) + CASE WHEN t.last_completed_at IS NULL OR t.last_completed_at::date < CURRENT_DATE THEN -t.completed ELSE 0 END, + CASE WHEN t.last_completed_at IS NULL OR t.last_completed_at::date < CURRENT_DATE THEN t.id ELSE 0 END, + -- Для выполненных: сортируем по next_show_at ASC (ранние в начале), NULL значения в начале через COALESCE + CASE WHEN t.last_completed_at IS NOT NULL AND t.last_completed_at::date >= CURRENT_DATE + THEN COALESCE(t.next_show_at, '1970-01-01'::timestamp with time zone) + ELSE '1970-01-01'::timestamp with time zone + END + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + log.Printf("Error querying tasks: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying tasks: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + tasks := make([]Task, 0) + for rows.Next() { + var task Task + var lastCompletedAt sql.NullString + var nextShowAt sql.NullString + var repetitionPeriod sql.NullString + var repetitionDate sql.NullString + var progressionBase sql.NullFloat64 + var wishlistID sql.NullInt64 + var configID sql.NullInt64 + var rewardPolicy sql.NullString + var groupName sql.NullString + var projectNames pq.StringArray + var subtaskProjectNames pq.StringArray + var autoComplete bool + + err := rows.Scan( + &task.ID, + &task.Name, + &task.Completed, + &lastCompletedAt, + &nextShowAt, + &repetitionPeriod, + &repetitionDate, + &progressionBase, + &wishlistID, + &configID, + &rewardPolicy, + &groupName, + &task.SubtasksCount, + &projectNames, + &subtaskProjectNames, + &autoComplete, + ) + if err != nil { + log.Printf("Error scanning task: %v", err) + continue + } + + if lastCompletedAt.Valid { + task.LastCompletedAt = &lastCompletedAt.String + } + if nextShowAt.Valid { + task.NextShowAt = &nextShowAt.String + } + if repetitionPeriod.Valid { + task.RepetitionPeriod = &repetitionPeriod.String + } + if repetitionDate.Valid { + task.RepetitionDate = &repetitionDate.String + } + if progressionBase.Valid { + task.HasProgression = true + task.ProgressionBase = &progressionBase.Float64 + } else { + task.HasProgression = false + } + if wishlistID.Valid { + wishlistIDInt := int(wishlistID.Int64) + task.WishlistID = &wishlistIDInt + } + if configID.Valid { + configIDInt := int(configID.Int64) + task.ConfigID = &configIDInt + } + if rewardPolicy.Valid { + task.RewardPolicy = &rewardPolicy.String + } + if groupName.Valid && groupName.String != "" { + groupNameVal := groupName.String + task.GroupName = &groupNameVal + } + task.AutoComplete = autoComplete + + // Объединяем проекты из основной задачи и подзадач + allProjects := make(map[string]bool) + for _, pn := range projectNames { + if pn != "" { + allProjects[pn] = true + } + } + for _, pn := range subtaskProjectNames { + if pn != "" { + allProjects[pn] = true + } + } + + task.ProjectNames = make([]string, 0, len(allProjects)) + for pn := range allProjects { + task.ProjectNames = append(task.ProjectNames, pn) + } + + tasks = append(tasks, task) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(tasks) +} + +// getTaskDetailHandler возвращает детальную информацию о задаче +func (a *App) getTaskDetailHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + // Получаем основную задачу + var task Task + var rewardMessage sql.NullString + var progressionBase sql.NullFloat64 + var lastCompletedAt sql.NullString + var nextShowAt sql.NullString + var repetitionPeriod sql.NullString + var repetitionDate sql.NullString + var wishlistID sql.NullInt64 + var configID sql.NullInt64 + var rewardPolicy sql.NullString + var groupName sql.NullString + + // Сначала получаем значение как строку напрямую, чтобы избежать проблем с NULL + var repetitionPeriodStr string + var repetitionDateStr string + err = a.DB.QueryRow(` + SELECT id, name, completed, last_completed_at, next_show_at, reward_message, progression_base, + CASE WHEN repetition_period IS NULL THEN '' ELSE repetition_period::text END as repetition_period, + COALESCE(repetition_date, '') as repetition_date, + wishlist_id, + config_id, + reward_policy, + group_name + FROM tasks + WHERE id = $1 AND user_id = $2 AND deleted = FALSE + `, taskID, userID).Scan( + &task.ID, &task.Name, &task.Completed, &lastCompletedAt, &nextShowAt, &rewardMessage, &progressionBase, &repetitionPeriodStr, &repetitionDateStr, &wishlistID, &configID, &rewardPolicy, &groupName, + ) + + log.Printf("Scanned repetition_period for task %d: String='%s', repetition_date='%s'", taskID, repetitionPeriodStr, repetitionDateStr) + + // Преобразуем в sql.NullString для совместимости + if repetitionPeriodStr != "" { + repetitionPeriod = sql.NullString{String: repetitionPeriodStr, Valid: true} + } else { + repetitionPeriod = sql.NullString{Valid: false} + } + if repetitionDateStr != "" { + repetitionDate = sql.NullString{String: repetitionDateStr, Valid: true} + } else { + repetitionDate = sql.NullString{Valid: false} + } + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Task not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error querying task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying task: %v", err), http.StatusInternalServerError) + return + } + + if rewardMessage.Valid { + task.RewardMessage = &rewardMessage.String + } + if progressionBase.Valid { + task.ProgressionBase = &progressionBase.Float64 + } + if lastCompletedAt.Valid { + task.LastCompletedAt = &lastCompletedAt.String + } + if nextShowAt.Valid { + task.NextShowAt = &nextShowAt.String + } + if repetitionPeriod.Valid && repetitionPeriod.String != "" { + task.RepetitionPeriod = &repetitionPeriod.String + log.Printf("Task %d has repetition_period: %s", task.ID, repetitionPeriod.String) + } else { + log.Printf("Task %d has no repetition_period (Valid: %v, String: '%s')", task.ID, repetitionPeriod.Valid, repetitionPeriod.String) + } + if repetitionDate.Valid && repetitionDate.String != "" { + task.RepetitionDate = &repetitionDate.String + log.Printf("Task %d has repetition_date: %s", task.ID, repetitionDate.String) + } + if wishlistID.Valid { + wishlistIDInt := int(wishlistID.Int64) + task.WishlistID = &wishlistIDInt + } + if configID.Valid { + configIDInt := int(configID.Int64) + task.ConfigID = &configIDInt + } + if rewardPolicy.Valid { + task.RewardPolicy = &rewardPolicy.String + } + if groupName.Valid && groupName.String != "" { + groupNameVal := groupName.String + task.GroupName = &groupNameVal + } + + // Получаем награды основной задачи + rewards := make([]Reward, 0) + rewardRows, err := a.DB.Query(` + SELECT rc.id, rc.position, p.name AS project_name, rc.value, rc.use_progression + FROM reward_configs rc + JOIN projects p ON rc.project_id = p.id + WHERE rc.task_id = $1 + ORDER BY rc.position + `, taskID) + + if err != nil { + log.Printf("Error querying rewards: %v", err) + } else { + defer rewardRows.Close() + for rewardRows.Next() { + var reward Reward + err := rewardRows.Scan(&reward.ID, &reward.Position, &reward.ProjectName, &reward.Value, &reward.UseProgression) + if err != nil { + log.Printf("Error scanning reward: %v", err) + continue + } + rewards = append(rewards, reward) + } + } + + // Получаем подзадачи + subtasks := make([]Subtask, 0) + subtaskRows, err := a.DB.Query(` + SELECT id, name, completed, last_completed_at, reward_message, progression_base, position + FROM tasks + WHERE parent_task_id = $1 AND deleted = FALSE + ORDER BY COALESCE(position, id) + `, taskID) + + if err != nil { + log.Printf("Error querying subtasks: %v", err) + } else { + defer subtaskRows.Close() + subtaskMap := make(map[int]*Subtask) + subtaskIDs := make([]int, 0) + + for subtaskRows.Next() { + var subtaskTask Task + var subtaskRewardMessage sql.NullString + var subtaskProgressionBase sql.NullFloat64 + var subtaskLastCompletedAt sql.NullString + var subtaskPosition sql.NullInt64 + + err := subtaskRows.Scan( + &subtaskTask.ID, &subtaskTask.Name, &subtaskTask.Completed, + &subtaskLastCompletedAt, &subtaskRewardMessage, &subtaskProgressionBase, + &subtaskPosition, + ) + if err != nil { + log.Printf("Error scanning subtask: %v", err) + continue + } + + if subtaskRewardMessage.Valid { + subtaskTask.RewardMessage = &subtaskRewardMessage.String + } + if subtaskProgressionBase.Valid { + subtaskTask.ProgressionBase = &subtaskProgressionBase.Float64 + } + if subtaskLastCompletedAt.Valid { + subtaskTask.LastCompletedAt = &subtaskLastCompletedAt.String + } + if subtaskPosition.Valid { + pos := int(subtaskPosition.Int64) + subtaskTask.Position = &pos + } + + subtaskIDs = append(subtaskIDs, subtaskTask.ID) + subtask := Subtask{ + Task: subtaskTask, + Rewards: make([]Reward, 0), + } + subtaskMap[subtaskTask.ID] = &subtask + } + + // Загружаем все награды всех подзадач одним запросом + if len(subtaskIDs) > 0 { + // Используем параметризованный запрос с ANY(ARRAY[...]) + query := ` + SELECT rc.task_id, rc.id, rc.position, p.name AS project_name, rc.value, rc.use_progression + FROM reward_configs rc + JOIN projects p ON rc.project_id = p.id + WHERE rc.task_id = ANY($1::int[]) + ORDER BY rc.task_id, rc.position + ` + + subtaskRewardRows, err := a.DB.Query(query, pq.Array(subtaskIDs)) + if err != nil { + log.Printf("Error querying subtask rewards: %v", err) + } else { + defer subtaskRewardRows.Close() + for subtaskRewardRows.Next() { + var taskID int + var reward Reward + err := subtaskRewardRows.Scan(&taskID, &reward.ID, &reward.Position, &reward.ProjectName, &reward.Value, &reward.UseProgression) + if err != nil { + log.Printf("Error scanning subtask reward: %v", err) + continue + } + if subtask, exists := subtaskMap[taskID]; exists { + subtask.Rewards = append(subtask.Rewards, reward) + } + } + } + } + + // Преобразуем map в slice, сохраняя порядок по ID + for _, id := range subtaskIDs { + if subtask, exists := subtaskMap[id]; exists { + subtasks = append(subtasks, *subtask) + } + } + } + + // Инициализируем auto_complete значением по умолчанию + task.AutoComplete = false + + // Загружаем данные из драфта, если он существует + var draftProgressionValue sql.NullFloat64 + var draftAutoComplete sql.NullBool + var draftProgressionValuePtr *float64 + var draftSubtasks []DraftSubtask + err = a.DB.QueryRow(` + SELECT progression_value, auto_complete + FROM task_drafts + WHERE task_id = $1 AND user_id = $2 + `, taskID, userID).Scan(&draftProgressionValue, &draftAutoComplete) + + if err == nil { + // Драфт существует, загружаем данные + if draftProgressionValue.Valid { + draftProgressionValuePtr = &draftProgressionValue.Float64 + } + // Устанавливаем auto_complete из драфта (если Valid, иначе остается false) + if draftAutoComplete.Valid { + task.AutoComplete = draftAutoComplete.Bool + log.Printf("Task %d: auto_complete set to %v from draft", taskID, task.AutoComplete) + } else { + log.Printf("Task %d: draft exists but auto_complete is NULL, keeping default false", taskID) + } + + // Загружаем подзадачи из драфта + draftSubtaskRows, err := a.DB.Query(` + SELECT subtask_id + FROM task_draft_subtasks + WHERE task_draft_id = (SELECT id FROM task_drafts WHERE task_id = $1 AND user_id = $2) + `, taskID, userID) + if err == nil { + defer draftSubtaskRows.Close() + draftSubtasks = make([]DraftSubtask, 0) + validSubtaskIDs := make(map[int]bool) + // Создаем map валидных подзадач для фильтрации + for _, subtask := range subtasks { + validSubtaskIDs[subtask.Task.ID] = true + } + + for draftSubtaskRows.Next() { + var subtaskID int + if err := draftSubtaskRows.Scan(&subtaskID); err == nil { + // Игнорируем подзадачи, которых больше нет в основной задаче + if validSubtaskIDs[subtaskID] { + draftSubtasks = append(draftSubtasks, DraftSubtask{ + SubtaskID: subtaskID, + }) + } + } + } + } else if err != sql.ErrNoRows { + log.Printf("Error loading draft subtasks for task %d: %v", taskID, err) + } + } else if err != sql.ErrNoRows { + log.Printf("Error loading draft for task %d: %v", taskID, err) + } else { + log.Printf("Task %d: no draft found, auto_complete remains false", taskID) + } + // Если драфта нет (err == sql.ErrNoRows), auto_complete остается false + log.Printf("Task %d: final auto_complete value = %v", taskID, task.AutoComplete) + + response := TaskDetail{ + Task: task, + Rewards: rewards, + Subtasks: subtasks, + } + + // Устанавливаем DraftProgressionValue если он был загружен + if draftProgressionValuePtr != nil { + response.DraftProgressionValue = draftProgressionValuePtr + } + + // Устанавливаем DraftSubtasks если они были загружены + if len(draftSubtasks) > 0 { + response.DraftSubtasks = draftSubtasks + } + + // Если задача связана с wishlist, загружаем базовую информацию о wishlist + if wishlistID.Valid { + var wishlistName string + err := a.DB.QueryRow(` + SELECT name + FROM wishlist_items + WHERE id = $1 AND deleted = FALSE + `, wishlistID.Int64).Scan(&wishlistName) + + if err == nil { + unlocked, err := a.checkWishlistUnlock(int(wishlistID.Int64), userID) + if err != nil { + log.Printf("Error checking wishlist unlock status: %v", err) + unlocked = false + } + + response.WishlistInfo = &WishlistInfo{ + ID: int(wishlistID.Int64), + Name: wishlistName, + Unlocked: unlocked, + } + } else if err != sql.ErrNoRows { + log.Printf("Error loading wishlist info for task %d: %v", taskID, err) + } + } + + // Если задача - тест (есть config_id), загружаем данные конфигурации + if configID.Valid { + var wordsCount int + var maxCards sql.NullInt64 + err := a.DB.QueryRow(` + SELECT words_count, max_cards + FROM configs + WHERE id = $1 + `, configID.Int64).Scan(&wordsCount, &maxCards) + + if err == nil { + response.WordsCount = &wordsCount + if maxCards.Valid { + maxCardsInt := int(maxCards.Int64) + response.MaxCards = &maxCardsInt + } + + // Загружаем связанные словари + dictRows, err := a.DB.Query(` + SELECT dictionary_id + FROM config_dictionaries + WHERE config_id = $1 + `, configID.Int64) + if err == nil { + defer dictRows.Close() + dictionaryIDs := make([]int, 0) + for dictRows.Next() { + var dictID int + if err := dictRows.Scan(&dictID); err == nil { + dictionaryIDs = append(dictionaryIDs, dictID) + } + } + if len(dictionaryIDs) > 0 { + response.DictionaryIDs = dictionaryIDs + } + } + } else { + log.Printf("Error loading config for task %d: %v", taskID, err) + } + } + + log.Printf("Task %d: Sending response with auto_complete = %v (task.AutoComplete = %v)", taskID, response.Task.AutoComplete, task.AutoComplete) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// findProjectByName находит проект по имени (регистронезависимо) или возвращает ошибку +func (a *App) findProjectByName(projectName string, userID int) (int, error) { + var projectID int + err := a.DB.QueryRow(` + SELECT id FROM projects + WHERE LOWER(name) = LOWER($1) AND user_id = $2 AND deleted = FALSE + `, projectName, userID).Scan(&projectID) + + if err == sql.ErrNoRows { + return 0, fmt.Errorf("project not found: %s", projectName) + } + if err != nil { + return 0, fmt.Errorf("error finding project: %w", err) + } + + return projectID, nil +} + +// findProjectByNameTx находит проект по имени в транзакции +func (a *App) findProjectByNameTx(tx *sql.Tx, projectName string, userID int) (int, error) { + var projectID int + err := tx.QueryRow(` + SELECT id FROM projects + WHERE LOWER(name) = LOWER($1) AND user_id = $2 AND deleted = FALSE + `, projectName, userID).Scan(&projectID) + + if err == sql.ErrNoRows { + return 0, fmt.Errorf("project not found: %s", projectName) + } + if err != nil { + return 0, fmt.Errorf("error finding project: %w", err) + } + + return projectID, nil +} + +// createTaskHandler создает новую задачу +func (a *App) createTaskHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req TaskRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding task request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Валидация + if len(strings.TrimSpace(req.Name)) < 1 { + sendErrorWithCORS(w, "Task name is required and must be at least 1 character", http.StatusBadRequest) + return + } + + // Проверяем, что все rewards имеют project_name + for _, reward := range req.Rewards { + if strings.TrimSpace(reward.ProjectName) == "" { + sendErrorWithCORS(w, "Project name is required for all rewards", http.StatusBadRequest) + return + } + } + + // Валидация wishlist_id: если указан, проверяем что желание существует и пользователь имеет доступ + var wishlistName string + if req.WishlistID != nil { + var wishlistOwnerID int + var authorID sql.NullInt64 + var boardID sql.NullInt64 + err := a.DB.QueryRow(` + SELECT user_id, name, author_id, board_id FROM wishlist_items + WHERE id = $1 AND deleted = FALSE + `, *req.WishlistID).Scan(&wishlistOwnerID, &wishlistName, &authorID, &boardID) + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusBadRequest) + return + } + if err != nil { + log.Printf("Error checking wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking wishlist item: %v", err), http.StatusInternalServerError) + return + } + + hasAccess := wishlistOwnerID == userID + // Проверяем, является ли пользователь автором желания + if !hasAccess && authorID.Valid && authorID.Int64 == int64(userID) { + hasAccess = true + } + // Проверяем доступ к доске, если желание принадлежит доске + if !hasAccess && boardID.Valid { + var boardOwnerID int + err := a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID.Int64).Scan(&boardOwnerID) + if err == nil && boardOwnerID == userID { + hasAccess = true + } else if err == nil { + var isMember bool + a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2)`, boardID.Int64, userID).Scan(&isMember) + if isMember { + hasAccess = true + } + } + } + + if !hasAccess { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + + // Проверяем, что нет другой активной (не удаленной и не выполненной) задачи с таким wishlist_id для этого пользователя + // Если задача была выполнена (completed > 0) или удалена, можно создать новую + var existingTaskID int + var existingTaskCompleted int + err = a.DB.QueryRow(` + SELECT id, completed FROM tasks + WHERE wishlist_id = $1 AND user_id = $2 AND deleted = FALSE + `, *req.WishlistID, userID).Scan(&existingTaskID, &existingTaskCompleted) + + if err != sql.ErrNoRows { + if err != nil { + log.Printf("Error checking existing task for wishlist: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking existing task: %v", err), http.StatusInternalServerError) + return + } + // Если задача была выполнена (completed > 0), можно создать новую + if existingTaskCompleted > 0 { + log.Printf("Existing task %d for wishlist %d was completed (%d times), marking as deleted and allowing new task creation", existingTaskID, *req.WishlistID, existingTaskCompleted) + // Помечаем старую выполненную задачу как удаленную, чтобы избежать конфликта с уникальным индексом + _, err = a.DB.Exec(` + UPDATE tasks + SET deleted = TRUE + WHERE id = $1 + `, existingTaskID) + if err != nil { + log.Printf("Error marking existing completed task as deleted: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error marking existing task as deleted: %v", err), http.StatusInternalServerError) + return + } + } else { + sendErrorWithCORS(w, "Task already exists for this wishlist item", http.StatusBadRequest) + return + } + } + + // Если название задачи не указано или пустое, используем название желания + if strings.TrimSpace(req.Name) == "" { + req.Name = wishlistName + } + + // Если сообщение награды не указано или пустое, устанавливаем "Выполнить желание: {TITLE}" + if req.RewardMessage == nil || strings.TrimSpace(*req.RewardMessage) == "" { + rewardMsg := fmt.Sprintf("Выполнить желание: %s", wishlistName) + req.RewardMessage = &rewardMsg + } + + // Задачи, привязанные к желанию, не могут быть периодическими + if (req.RepetitionPeriod != nil && strings.TrimSpace(*req.RepetitionPeriod) != "") || + (req.RepetitionDate != nil && strings.TrimSpace(*req.RepetitionDate) != "") { + // Проверяем, что это не бесконечная задача (оба поля = 0) + isPeriodZero := req.RepetitionPeriod != nil && (strings.TrimSpace(*req.RepetitionPeriod) == "0 day" || strings.HasPrefix(strings.TrimSpace(*req.RepetitionPeriod), "0 ")) + isDateZero := req.RepetitionDate != nil && (strings.TrimSpace(*req.RepetitionDate) == "0 week" || strings.HasPrefix(strings.TrimSpace(*req.RepetitionDate), "0 ")) + if !isPeriodZero || !isDateZero { + sendErrorWithCORS(w, "Tasks linked to wishlist items cannot be periodic", http.StatusBadRequest) + return + } + } + + // Задачи, привязанные к желанию, не могут иметь прогрессию + if req.ProgressionBase != nil { + sendErrorWithCORS(w, "Tasks linked to wishlist items cannot have progression", http.StatusBadRequest) + return + } + } + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Создаем основную задачу + var taskID int + var rewardMessage sql.NullString + var progressionBase sql.NullFloat64 + var repetitionPeriod sql.NullString + var repetitionDate sql.NullString + if req.RewardMessage != nil { + rewardMessage = sql.NullString{String: *req.RewardMessage, Valid: true} + } + if req.ProgressionBase != nil { + progressionBase = sql.NullFloat64{Float64: *req.ProgressionBase, Valid: true} + } + if req.RepetitionPeriod != nil && strings.TrimSpace(*req.RepetitionPeriod) != "" { + repetitionPeriod = sql.NullString{String: strings.TrimSpace(*req.RepetitionPeriod), Valid: true} + log.Printf("Creating task with repetition_period: %s", repetitionPeriod.String) + } else { + log.Printf("Creating task without repetition_period (req.RepetitionPeriod: %v)", req.RepetitionPeriod) + } + if req.RepetitionDate != nil && strings.TrimSpace(*req.RepetitionDate) != "" { + repetitionDate = sql.NullString{String: strings.TrimSpace(*req.RepetitionDate), Valid: true} + log.Printf("Creating task with repetition_date: %s", repetitionDate.String) + } + + // Используем CAST для преобразования строки в INTERVAL + var repetitionPeriodValue interface{} + if repetitionPeriod.Valid { + repetitionPeriodValue = repetitionPeriod.String + } else { + repetitionPeriodValue = nil + } + + // Подготовка wishlist_id для INSERT + var wishlistIDValue interface{} + if req.WishlistID != nil { + wishlistIDValue = *req.WishlistID + log.Printf("Creating task with wishlist_id: %d", *req.WishlistID) + } else { + wishlistIDValue = nil + log.Printf("Creating task without wishlist_id") + } + + // Подготовка reward_policy: если задача связана с желанием и политика не указана, используем "personal" по умолчанию + var rewardPolicyValue interface{} + if req.WishlistID != nil { + if req.RewardPolicy != nil && (*req.RewardPolicy == "personal" || *req.RewardPolicy == "general") { + rewardPolicyValue = *req.RewardPolicy + } else { + rewardPolicyValue = "personal" // Значение по умолчанию для задач, связанных с желаниями + } + } else { + rewardPolicyValue = nil // NULL для задач, не связанных с желаниями + } + + // Используем условный SQL для обработки NULL значений + var insertSQL string + var insertArgs []interface{} + if repetitionPeriod.Valid { + // Для repetition_period выставляем сегодняшнюю дату + + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } + + now := time.Now().In(loc) + insertSQL = ` + INSERT INTO tasks (user_id, name, reward_message, progression_base, repetition_period, repetition_date, next_show_at, completed, deleted, wishlist_id, reward_policy, group_name) + VALUES ($1, $2, $3, $4, $5::INTERVAL, NULL, $6, 0, FALSE, $7, $8, $9) + RETURNING id + ` + insertArgs = []interface{}{userID, strings.TrimSpace(req.Name), rewardMessage, progressionBase, repetitionPeriodValue, now, wishlistIDValue, rewardPolicyValue, req.GroupName} + } else if repetitionDate.Valid { + // Вычисляем next_show_at для задачи с repetition_date + + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } + + nextShowAt := calculateNextShowAtFromRepetitionDate(repetitionDate.String, time.Now().In(loc)) + if nextShowAt != nil { + insertSQL = ` + INSERT INTO tasks (user_id, name, reward_message, progression_base, repetition_period, repetition_date, next_show_at, completed, deleted, wishlist_id, reward_policy, group_name) + VALUES ($1, $2, $3, $4, NULL, $5, $6, 0, FALSE, $7, $8, $9) + RETURNING id + ` + insertArgs = []interface{}{userID, strings.TrimSpace(req.Name), rewardMessage, progressionBase, repetitionDate.String, nextShowAt, wishlistIDValue, rewardPolicyValue, req.GroupName} + } else { + insertSQL = ` + INSERT INTO tasks (user_id, name, reward_message, progression_base, repetition_period, repetition_date, completed, deleted, wishlist_id, reward_policy, group_name) + VALUES ($1, $2, $3, $4, NULL, $5, 0, FALSE, $6, $7, $8) + RETURNING id + ` + insertArgs = []interface{}{userID, strings.TrimSpace(req.Name), rewardMessage, progressionBase, repetitionDate.String, wishlistIDValue, rewardPolicyValue, req.GroupName} + } + } else { + insertSQL = ` + INSERT INTO tasks (user_id, name, reward_message, progression_base, repetition_period, repetition_date, completed, deleted, wishlist_id, reward_policy, group_name) + VALUES ($1, $2, $3, $4, NULL, NULL, 0, FALSE, $5, $6, $7) + RETURNING id + ` + insertArgs = []interface{}{userID, strings.TrimSpace(req.Name), rewardMessage, progressionBase, wishlistIDValue, rewardPolicyValue, req.GroupName} + } + + err = tx.QueryRow(insertSQL, insertArgs...).Scan(&taskID) + + if err != nil { + log.Printf("Error creating task: %v", err) + // Проверяем, не является ли это ошибкой уникального индекса + if strings.Contains(err.Error(), "unique") || strings.Contains(err.Error(), "duplicate") { + sendErrorWithCORS(w, "Task already exists for this wishlist item", http.StatusBadRequest) + return + } + sendErrorWithCORS(w, fmt.Sprintf("Error creating task: %v", err), http.StatusInternalServerError) + return + } + + // Создаем награды для основной задачи + for _, rewardReq := range req.Rewards { + projectID, err := a.findProjectByNameTx(tx, rewardReq.ProjectName, userID) + if err != nil { + log.Printf("Error finding project %s: %v", rewardReq.ProjectName, err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + _, err = tx.Exec(` + INSERT INTO reward_configs (position, task_id, project_id, value, use_progression) + VALUES ($1, $2, $3, $4, $5) + `, rewardReq.Position, taskID, projectID, rewardReq.Value, rewardReq.UseProgression) + + if err != nil { + log.Printf("Error creating reward: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating reward: %v", err), http.StatusInternalServerError) + return + } + } + + // Создаем подзадачи + for index, subtaskReq := range req.Subtasks { + var subtaskName sql.NullString + var subtaskRewardMessage sql.NullString + var subtaskProgressionBase sql.NullFloat64 + var subtaskPosition sql.NullInt64 + + if subtaskReq.Name != nil && strings.TrimSpace(*subtaskReq.Name) != "" { + subtaskName = sql.NullString{String: strings.TrimSpace(*subtaskReq.Name), Valid: true} + } + if subtaskReq.RewardMessage != nil { + subtaskRewardMessage = sql.NullString{String: *subtaskReq.RewardMessage, Valid: true} + } + if req.ProgressionBase != nil { + subtaskProgressionBase = sql.NullFloat64{Float64: *req.ProgressionBase, Valid: true} + } + // Используем position из запроса, если указан, иначе используем индекс в массиве + if subtaskReq.Position != nil { + subtaskPosition = sql.NullInt64{Int64: int64(*subtaskReq.Position), Valid: true} + } else { + subtaskPosition = sql.NullInt64{Int64: int64(index), Valid: true} + } + + var subtaskID int + err = tx.QueryRow(` + INSERT INTO tasks (user_id, name, parent_task_id, reward_message, progression_base, completed, deleted, position) + VALUES ($1, $2, $3, $4, $5, 0, FALSE, $6) + RETURNING id + `, userID, subtaskName, taskID, subtaskRewardMessage, subtaskProgressionBase, subtaskPosition).Scan(&subtaskID) + + if err != nil { + log.Printf("Error creating subtask: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating subtask: %v", err), http.StatusInternalServerError) + return + } + + // Создаем награды для подзадачи + for _, rewardReq := range subtaskReq.Rewards { + if strings.TrimSpace(rewardReq.ProjectName) == "" { + sendErrorWithCORS(w, "Project name is required for all rewards", http.StatusBadRequest) + return + } + + projectID, err := a.findProjectByNameTx(tx, rewardReq.ProjectName, userID) + if err != nil { + log.Printf("Error finding project %s for subtask: %v", rewardReq.ProjectName, err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + _, err = tx.Exec(` + INSERT INTO reward_configs (position, task_id, project_id, value, use_progression) + VALUES ($1, $2, $3, $4, $5) + `, rewardReq.Position, subtaskID, projectID, rewardReq.Value, rewardReq.UseProgression) + + if err != nil { + log.Printf("Error creating subtask reward: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating subtask reward: %v", err), http.StatusInternalServerError) + return + } + } + } + + // Если это тест, создаем конфигурацию + if req.IsTest { + // Валидация: для теста должны быть указаны words_count и хотя бы один словарь + if req.WordsCount == nil || *req.WordsCount < 1 { + sendErrorWithCORS(w, "Words count is required for test tasks and must be at least 1", http.StatusBadRequest) + return + } + if len(req.DictionaryIDs) == 0 { + sendErrorWithCORS(w, "At least one dictionary is required for test tasks", http.StatusBadRequest) + return + } + + // Создаем конфигурацию теста + var configID int + if req.MaxCards != nil { + err = tx.QueryRow(` + INSERT INTO configs (user_id, words_count, max_cards) + VALUES ($1, $2, $3) + RETURNING id + `, userID, *req.WordsCount, *req.MaxCards).Scan(&configID) + } else { + err = tx.QueryRow(` + INSERT INTO configs (user_id, words_count) + VALUES ($1, $2) + RETURNING id + `, userID, *req.WordsCount).Scan(&configID) + } + + if err != nil { + log.Printf("Error creating config: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating config: %v", err), http.StatusInternalServerError) + return + } + + // Связываем конфигурацию со словарями + for _, dictID := range req.DictionaryIDs { + _, err = tx.Exec(` + INSERT INTO config_dictionaries (config_id, dictionary_id) + VALUES ($1, $2) + `, configID, dictID) + if err != nil { + log.Printf("Error linking dictionary %d to config: %v", dictID, err) + sendErrorWithCORS(w, fmt.Sprintf("Error linking dictionary to config: %v", err), http.StatusInternalServerError) + return + } + } + + // Обновляем задачу, привязывая config_id + _, err = tx.Exec(` + UPDATE tasks SET config_id = $1 WHERE id = $2 + `, configID, taskID) + if err != nil { + log.Printf("Error linking config to task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error linking config to task: %v", err), http.StatusInternalServerError) + return + } + + log.Printf("Created test config %d for task %d", configID, taskID) + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов + if req.GroupName != nil && *req.GroupName != "" { + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + } + + // Возвращаем созданную задачу + var createdTask Task + var lastCompletedAt sql.NullString + var createdRepetitionPeriod sql.NullString + var createdRepetitionDate sql.NullString + err = a.DB.QueryRow(` + SELECT id, name, completed, last_completed_at, reward_message, progression_base, repetition_period::text, repetition_date + FROM tasks + WHERE id = $1 + `, taskID).Scan( + &createdTask.ID, &createdTask.Name, &createdTask.Completed, + &lastCompletedAt, &rewardMessage, &progressionBase, &createdRepetitionPeriod, &createdRepetitionDate, + ) + + if err != nil { + log.Printf("Error fetching created task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error fetching created task: %v", err), http.StatusInternalServerError) + return + } + + if rewardMessage.Valid { + createdTask.RewardMessage = &rewardMessage.String + } + if progressionBase.Valid { + createdTask.ProgressionBase = &progressionBase.Float64 + } + if lastCompletedAt.Valid { + createdTask.LastCompletedAt = &lastCompletedAt.String + } + if createdRepetitionPeriod.Valid { + createdTask.RepetitionPeriod = &createdRepetitionPeriod.String + } + if createdRepetitionDate.Valid { + createdTask.RepetitionDate = &createdRepetitionDate.String + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(createdTask) +} + +// updateTaskHandler обновляет существующую задачу +func (a *App) updateTaskHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + // Проверяем владельца + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM tasks WHERE id = $1", taskID).Scan(&ownerID) + if err == sql.ErrNoRows || ownerID != userID { + sendErrorWithCORS(w, "Task not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking task ownership: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking task ownership: %v", err), http.StatusInternalServerError) + return + } + + var req TaskRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding task request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Валидация + if len(strings.TrimSpace(req.Name)) < 1 { + sendErrorWithCORS(w, "Task name is required and must be at least 1 character", http.StatusBadRequest) + return + } + + // Проверяем, что все rewards имеют project_name + for _, reward := range req.Rewards { + if strings.TrimSpace(reward.ProjectName) == "" { + sendErrorWithCORS(w, "Project name is required for all rewards", http.StatusBadRequest) + return + } + } + + // Обработка wishlist_id: можно только отвязать (установить в NULL), нельзя привязать + // Если req.WishlistID == nil, значит пользователь хочет отвязать (или не трогать) + // Если req.WishlistID != nil, игнорируем (нельзя привязать при редактировании) + // Получаем текущий wishlist_id задачи + var currentWishlistID sql.NullInt64 + err = a.DB.QueryRow("SELECT wishlist_id FROM tasks WHERE id = $1", taskID).Scan(¤tWishlistID) + if err != nil { + log.Printf("Error getting current wishlist_id: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting task: %v", err), http.StatusInternalServerError) + return + } + + // Определяем новое значение wishlist_id + // Если задача была привязана и req.WishlistID == nil, значит отвязываем + // Если req.WishlistID != nil, игнорируем (нельзя привязать) + var newWishlistID interface{} + if currentWishlistID.Valid && req.WishlistID == nil { + // Отвязываем от желания + newWishlistID = nil + } else if currentWishlistID.Valid { + // Оставляем текущее значение (нельзя привязать) + newWishlistID = currentWishlistID.Int64 + } else { + // Задача не была привязана, оставляем NULL + newWishlistID = nil + } + + // Если задача привязана к желанию, не позволяем устанавливать повторения и прогрессию + if currentWishlistID.Valid { + if (req.RepetitionPeriod != nil && strings.TrimSpace(*req.RepetitionPeriod) != "") || + (req.RepetitionDate != nil && strings.TrimSpace(*req.RepetitionDate) != "") { + // Проверяем, что это не бесконечная задача (оба поля = 0) + isPeriodZero := req.RepetitionPeriod != nil && (strings.TrimSpace(*req.RepetitionPeriod) == "0 day" || strings.HasPrefix(strings.TrimSpace(*req.RepetitionPeriod), "0 ")) + isDateZero := req.RepetitionDate != nil && (strings.TrimSpace(*req.RepetitionDate) == "0 week" || strings.HasPrefix(strings.TrimSpace(*req.RepetitionDate), "0 ")) + if !isPeriodZero || !isDateZero { + sendErrorWithCORS(w, "Tasks linked to wishlist items cannot be periodic", http.StatusBadRequest) + return + } + } + + // Задачи, привязанные к желанию, не могут иметь прогрессию + if req.ProgressionBase != nil { + sendErrorWithCORS(w, "Tasks linked to wishlist items cannot have progression", http.StatusBadRequest) + return + } + } + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Обновляем основную задачу + var rewardMessage sql.NullString + var progressionBase sql.NullFloat64 + var repetitionPeriod sql.NullString + var repetitionDate sql.NullString + if req.RewardMessage != nil { + rewardMessage = sql.NullString{String: *req.RewardMessage, Valid: true} + } + if req.ProgressionBase != nil { + progressionBase = sql.NullFloat64{Float64: *req.ProgressionBase, Valid: true} + } + if req.RepetitionPeriod != nil && strings.TrimSpace(*req.RepetitionPeriod) != "" { + repetitionPeriod = sql.NullString{String: strings.TrimSpace(*req.RepetitionPeriod), Valid: true} + log.Printf("Updating task %d with repetition_period: %s", taskID, repetitionPeriod.String) + } else { + log.Printf("Updating task %d without repetition_period (req.RepetitionPeriod: %v)", taskID, req.RepetitionPeriod) + } + if req.RepetitionDate != nil && strings.TrimSpace(*req.RepetitionDate) != "" { + repetitionDate = sql.NullString{String: strings.TrimSpace(*req.RepetitionDate), Valid: true} + log.Printf("Updating task %d with repetition_date: %s", taskID, repetitionDate.String) + } + + // Подготовка reward_policy: если задача связана с желанием и политика не указана, используем "personal" по умолчанию + var rewardPolicyValue interface{} + if newWishlistID != nil { + // Если reward_policy явно указан в запросе, используем его + if req.RewardPolicy != nil && (*req.RewardPolicy == "personal" || *req.RewardPolicy == "general") { + rewardPolicyValue = *req.RewardPolicy + } else if req.RewardPolicy == nil { + // Если reward_policy не указан в запросе (undefined), сохраняем текущее значение из БД + // Это важно для случаев, когда обновляются другие поля, но reward_policy не должен меняться + var currentRewardPolicy sql.NullString + err = a.DB.QueryRow("SELECT reward_policy FROM tasks WHERE id = $1", taskID).Scan(¤tRewardPolicy) + if err == nil && currentRewardPolicy.Valid { + rewardPolicyValue = currentRewardPolicy.String + } else { + // Если в БД нет значения, используем "personal" по умолчанию + rewardPolicyValue = "personal" + } + } + } else { + rewardPolicyValue = nil // NULL для задач, не связанных с желаниями + } + + // Получаем текущие значения repetition_period, repetition_date и next_show_at из БД + // для проверки, изменились ли поля повторения + var currentRepetitionPeriod sql.NullString + var currentRepetitionDate sql.NullString + var currentNextShowAt sql.NullTime + err = a.DB.QueryRow("SELECT repetition_period, repetition_date, next_show_at FROM tasks WHERE id = $1", taskID).Scan( + ¤tRepetitionPeriod, + ¤tRepetitionDate, + ¤tNextShowAt, + ) + if err != nil { + log.Printf("Error getting current repetition values: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting task: %v", err), http.StatusInternalServerError) + return + } + + // Проверяем, изменились ли поля повторения + repetitionChanged := false + if repetitionPeriod.Valid { + // Новое значение есть - проверяем, отличается ли от текущего + if !currentRepetitionPeriod.Valid || currentRepetitionPeriod.String != repetitionPeriod.String { + repetitionChanged = true + } + // Также проверяем, что текущее repetition_date было не NULL (если было, значит изменился тип повторения) + if currentRepetitionDate.Valid { + repetitionChanged = true + } + } else if repetitionDate.Valid { + // Новое значение есть - проверяем, отличается ли от текущего + if !currentRepetitionDate.Valid || currentRepetitionDate.String != repetitionDate.String { + repetitionChanged = true + } + // Также проверяем, что текущее repetition_period было не NULL (если было, значит изменился тип повторения) + if currentRepetitionPeriod.Valid { + repetitionChanged = true + } + } else { + // Оба поля NULL - проверяем, были ли они NULL до этого + if currentRepetitionPeriod.Valid || currentRepetitionDate.Valid { + repetitionChanged = true + } + } + + // Используем условный SQL для обработки NULL значений + var updateSQL string + var updateArgs []interface{} + + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } + + if repetitionPeriod.Valid { + // Для repetition_period выставляем сегодняшнюю дату только если поле изменилось + var nextShowAtValue interface{} + if repetitionChanged { + now := time.Now().In(loc) + nextShowAtValue = now + } else { + // Поле не изменилось - сохраняем текущее значение next_show_at + if currentNextShowAt.Valid { + nextShowAtValue = currentNextShowAt.Time + } else { + nextShowAtValue = nil + } + } + updateSQL = ` + UPDATE tasks + SET name = $1, reward_message = $2, progression_base = $3, repetition_period = $4::INTERVAL, repetition_date = NULL, next_show_at = $5, wishlist_id = $6, reward_policy = $7, group_name = $8 + WHERE id = $9 + ` + updateArgs = []interface{}{strings.TrimSpace(req.Name), rewardMessage, progressionBase, repetitionPeriod.String, nextShowAtValue, newWishlistID, rewardPolicyValue, req.GroupName, taskID} + } else if repetitionDate.Valid { + // Вычисляем next_show_at для задачи с repetition_date только если поле изменилось + var nextShowAtValue interface{} + if repetitionChanged { + nextShowAt := calculateNextShowAtFromRepetitionDate(repetitionDate.String, time.Now().In(loc)) + nextShowAtValue = nextShowAt + } else { + // Поле не изменилось - сохраняем текущее значение next_show_at + if currentNextShowAt.Valid { + nextShowAtValue = currentNextShowAt.Time + } else { + nextShowAtValue = nil + } + } + if nextShowAtValue != nil { + updateSQL = ` + UPDATE tasks + SET name = $1, reward_message = $2, progression_base = $3, repetition_period = NULL, repetition_date = $4, next_show_at = $5, wishlist_id = $6, reward_policy = $7, group_name = $8 + WHERE id = $9 + ` + updateArgs = []interface{}{strings.TrimSpace(req.Name), rewardMessage, progressionBase, repetitionDate.String, nextShowAtValue, newWishlistID, rewardPolicyValue, req.GroupName, taskID} + } else { + updateSQL = ` + UPDATE tasks + SET name = $1, reward_message = $2, progression_base = $3, repetition_period = NULL, repetition_date = $4, wishlist_id = $5, reward_policy = $6, group_name = $7 + WHERE id = $8 + ` + updateArgs = []interface{}{strings.TrimSpace(req.Name), rewardMessage, progressionBase, repetitionDate.String, newWishlistID, rewardPolicyValue, req.GroupName, taskID} + } + } else { + // Оба поля NULL - устанавливаем next_show_at в NULL только если поля повторения изменились + var nextShowAtValue interface{} + if repetitionChanged { + nextShowAtValue = nil + } else { + // Поля не изменились - сохраняем текущее значение next_show_at + if currentNextShowAt.Valid { + nextShowAtValue = currentNextShowAt.Time + } else { + nextShowAtValue = nil + } + } + updateSQL = ` + UPDATE tasks + SET name = $1, reward_message = $2, progression_base = $3, repetition_period = NULL, repetition_date = NULL, next_show_at = $4, wishlist_id = $5, reward_policy = $6, group_name = $7 + WHERE id = $8 + ` + updateArgs = []interface{}{strings.TrimSpace(req.Name), rewardMessage, progressionBase, nextShowAtValue, newWishlistID, rewardPolicyValue, req.GroupName, taskID} + } + + _, err = tx.Exec(updateSQL, updateArgs...) + + if err != nil { + log.Printf("Error updating task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating task: %v", err), http.StatusInternalServerError) + return + } + + // Удаляем старые награды основной задачи + _, err = tx.Exec("DELETE FROM reward_configs WHERE task_id = $1", taskID) + if err != nil { + log.Printf("Error deleting old rewards: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting old rewards: %v", err), http.StatusInternalServerError) + return + } + + // Вставляем новые награды + for _, rewardReq := range req.Rewards { + projectID, err := a.findProjectByNameTx(tx, rewardReq.ProjectName, userID) + if err != nil { + log.Printf("Error finding project %s: %v", rewardReq.ProjectName, err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + _, err = tx.Exec(` + INSERT INTO reward_configs (position, task_id, project_id, value, use_progression) + VALUES ($1, $2, $3, $4, $5) + `, rewardReq.Position, taskID, projectID, rewardReq.Value, rewardReq.UseProgression) + + if err != nil { + log.Printf("Error creating reward: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating reward: %v", err), http.StatusInternalServerError) + return + } + } + + // Получаем список текущих подзадач + currentSubtaskIDs := make(map[int]bool) + rows, err := tx.Query("SELECT id FROM tasks WHERE parent_task_id = $1 AND deleted = FALSE", taskID) + if err == nil { + for rows.Next() { + var id int + if err := rows.Scan(&id); err == nil { + currentSubtaskIDs[id] = true + } + } + rows.Close() + } + + // Обрабатываем подзадачи из запроса + subtaskIDsInRequest := make(map[int]bool) + for index, subtaskReq := range req.Subtasks { + if subtaskReq.ID != nil { + subtaskIDsInRequest[*subtaskReq.ID] = true + + // Обновляем существующую подзадачу + var subtaskName sql.NullString + var subtaskRewardMessage sql.NullString + var subtaskProgressionBase sql.NullFloat64 + var subtaskPosition sql.NullInt64 + + if subtaskReq.Name != nil && strings.TrimSpace(*subtaskReq.Name) != "" { + subtaskName = sql.NullString{String: strings.TrimSpace(*subtaskReq.Name), Valid: true} + } + if subtaskReq.RewardMessage != nil { + subtaskRewardMessage = sql.NullString{String: *subtaskReq.RewardMessage, Valid: true} + } + if req.ProgressionBase != nil { + subtaskProgressionBase = sql.NullFloat64{Float64: *req.ProgressionBase, Valid: true} + } + // Используем position из запроса, если указан, иначе используем индекс в массиве + if subtaskReq.Position != nil { + subtaskPosition = sql.NullInt64{Int64: int64(*subtaskReq.Position), Valid: true} + } else { + subtaskPosition = sql.NullInt64{Int64: int64(index), Valid: true} + } + + _, err = tx.Exec(` + UPDATE tasks + SET name = $1, reward_message = $2, progression_base = $3, position = $4 + WHERE id = $5 AND parent_task_id = $6 + `, subtaskName, subtaskRewardMessage, subtaskProgressionBase, subtaskPosition, *subtaskReq.ID, taskID) + + if err != nil { + log.Printf("Error updating subtask: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating subtask: %v", err), http.StatusInternalServerError) + return + } + + // Удаляем старые награды подзадачи + _, err = tx.Exec("DELETE FROM reward_configs WHERE task_id = $1", *subtaskReq.ID) + if err != nil { + log.Printf("Error deleting old subtask rewards: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting old subtask rewards: %v", err), http.StatusInternalServerError) + return + } + + // Вставляем новые награды подзадачи + for _, rewardReq := range subtaskReq.Rewards { + if strings.TrimSpace(rewardReq.ProjectName) == "" { + sendErrorWithCORS(w, "Project name is required for all rewards", http.StatusBadRequest) + return + } + + projectID, err := a.findProjectByNameTx(tx, rewardReq.ProjectName, userID) + if err != nil { + log.Printf("Error finding project %s for subtask: %v", rewardReq.ProjectName, err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + _, err = tx.Exec(` + INSERT INTO reward_configs (position, task_id, project_id, value, use_progression) + VALUES ($1, $2, $3, $4, $5) + `, rewardReq.Position, *subtaskReq.ID, projectID, rewardReq.Value, rewardReq.UseProgression) + + if err != nil { + log.Printf("Error creating subtask reward: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating subtask reward: %v", err), http.StatusInternalServerError) + return + } + } + } else { + // Создаем новую подзадачу + var subtaskName sql.NullString + var subtaskRewardMessage sql.NullString + var subtaskProgressionBase sql.NullFloat64 + var subtaskPosition sql.NullInt64 + + if subtaskReq.Name != nil && strings.TrimSpace(*subtaskReq.Name) != "" { + subtaskName = sql.NullString{String: strings.TrimSpace(*subtaskReq.Name), Valid: true} + } + if subtaskReq.RewardMessage != nil { + subtaskRewardMessage = sql.NullString{String: *subtaskReq.RewardMessage, Valid: true} + } + if req.ProgressionBase != nil { + subtaskProgressionBase = sql.NullFloat64{Float64: *req.ProgressionBase, Valid: true} + } + // Используем position из запроса, если указан, иначе используем индекс в массиве + if subtaskReq.Position != nil { + subtaskPosition = sql.NullInt64{Int64: int64(*subtaskReq.Position), Valid: true} + } else { + subtaskPosition = sql.NullInt64{Int64: int64(index), Valid: true} + } + + var subtaskID int + err = tx.QueryRow(` + INSERT INTO tasks (user_id, name, parent_task_id, reward_message, progression_base, completed, deleted, position) + VALUES ($1, $2, $3, $4, $5, 0, FALSE, $6) + RETURNING id + `, userID, subtaskName, taskID, subtaskRewardMessage, subtaskProgressionBase, subtaskPosition).Scan(&subtaskID) + + if err != nil { + log.Printf("Error creating subtask: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating subtask: %v", err), http.StatusInternalServerError) + return + } + + // Создаем награды для новой подзадачи + for _, rewardReq := range subtaskReq.Rewards { + if strings.TrimSpace(rewardReq.ProjectName) == "" { + sendErrorWithCORS(w, "Project name is required for all rewards", http.StatusBadRequest) + return + } + + projectID, err := a.findProjectByNameTx(tx, rewardReq.ProjectName, userID) + if err != nil { + log.Printf("Error finding project %s for new subtask: %v", rewardReq.ProjectName, err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + _, err = tx.Exec(` + INSERT INTO reward_configs (position, task_id, project_id, value, use_progression) + VALUES ($1, $2, $3, $4, $5) + `, rewardReq.Position, subtaskID, projectID, rewardReq.Value, rewardReq.UseProgression) + + if err != nil { + log.Printf("Error creating subtask reward: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating subtask reward: %v", err), http.StatusInternalServerError) + return + } + } + } + } + + // Помечаем подзадачи, которые были в БД, но не пришли в запросе, как deleted + for subtaskID := range currentSubtaskIDs { + if !subtaskIDsInRequest[subtaskID] { + _, err = tx.Exec("UPDATE tasks SET deleted = TRUE WHERE id = $1", subtaskID) + if err != nil { + log.Printf("Error marking subtask as deleted: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error marking subtask as deleted: %v", err), http.StatusInternalServerError) + return + } + } + } + + // Получаем текущий config_id задачи + var currentConfigID sql.NullInt64 + err = tx.QueryRow("SELECT config_id FROM tasks WHERE id = $1", taskID).Scan(¤tConfigID) + if err != nil { + log.Printf("Error getting current config_id: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting task config: %v", err), http.StatusInternalServerError) + return + } + + // Обработка конфигурации теста + if req.IsTest { + // Валидация: для теста должны быть указаны words_count и хотя бы один словарь + if req.WordsCount == nil || *req.WordsCount < 1 { + sendErrorWithCORS(w, "Words count is required for test tasks and must be at least 1", http.StatusBadRequest) + return + } + if len(req.DictionaryIDs) == 0 { + sendErrorWithCORS(w, "At least one dictionary is required for test tasks", http.StatusBadRequest) + return + } + + if currentConfigID.Valid { + // Обновляем существующую конфигурацию + if req.MaxCards != nil { + _, err = tx.Exec(` + UPDATE configs SET words_count = $1, max_cards = $2 WHERE id = $3 + `, *req.WordsCount, *req.MaxCards, currentConfigID.Int64) + } else { + _, err = tx.Exec(` + UPDATE configs SET words_count = $1, max_cards = NULL WHERE id = $2 + `, *req.WordsCount, currentConfigID.Int64) + } + if err != nil { + log.Printf("Error updating config: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating config: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем связи со словарями + _, err = tx.Exec("DELETE FROM config_dictionaries WHERE config_id = $1", currentConfigID.Int64) + if err != nil { + log.Printf("Error deleting config dictionaries: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating config dictionaries: %v", err), http.StatusInternalServerError) + return + } + + for _, dictID := range req.DictionaryIDs { + _, err = tx.Exec(` + INSERT INTO config_dictionaries (config_id, dictionary_id) VALUES ($1, $2) + `, currentConfigID.Int64, dictID) + if err != nil { + log.Printf("Error linking dictionary %d to config: %v", dictID, err) + sendErrorWithCORS(w, fmt.Sprintf("Error linking dictionary to config: %v", err), http.StatusInternalServerError) + return + } + } + } else { + // Создаем новую конфигурацию для существующей задачи + var newConfigID int + if req.MaxCards != nil { + err = tx.QueryRow(` + INSERT INTO configs (user_id, words_count, max_cards) VALUES ($1, $2, $3) RETURNING id + `, userID, *req.WordsCount, *req.MaxCards).Scan(&newConfigID) + } else { + err = tx.QueryRow(` + INSERT INTO configs (user_id, words_count) VALUES ($1, $2) RETURNING id + `, userID, *req.WordsCount).Scan(&newConfigID) + } + if err != nil { + log.Printf("Error creating config: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating config: %v", err), http.StatusInternalServerError) + return + } + + for _, dictID := range req.DictionaryIDs { + _, err = tx.Exec(` + INSERT INTO config_dictionaries (config_id, dictionary_id) VALUES ($1, $2) + `, newConfigID, dictID) + if err != nil { + log.Printf("Error linking dictionary %d to config: %v", dictID, err) + sendErrorWithCORS(w, fmt.Sprintf("Error linking dictionary to config: %v", err), http.StatusInternalServerError) + return + } + } + + _, err = tx.Exec("UPDATE tasks SET config_id = $1 WHERE id = $2", newConfigID, taskID) + if err != nil { + log.Printf("Error linking config to task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error linking config to task: %v", err), http.StatusInternalServerError) + return + } + } + } else if currentConfigID.Valid { + // Задача перестала быть тестом - удаляем конфигурацию + _, err = tx.Exec("DELETE FROM config_dictionaries WHERE config_id = $1", currentConfigID.Int64) + if err != nil { + log.Printf("Error deleting config dictionaries: %v", err) + } + _, err = tx.Exec("DELETE FROM configs WHERE id = $1", currentConfigID.Int64) + if err != nil { + log.Printf("Error deleting config: %v", err) + } + _, err = tx.Exec("UPDATE tasks SET config_id = NULL WHERE id = $1", taskID) + if err != nil { + log.Printf("Error unlinking config from task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error unlinking config from task: %v", err), http.StatusInternalServerError) + return + } + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов + if req.GroupName != nil && *req.GroupName != "" { + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + } + + // Возвращаем обновленную задачу + var updatedTask Task + var lastCompletedAt sql.NullString + var updatedRepetitionPeriod sql.NullString + var updatedRepetitionDate sql.NullString + err = a.DB.QueryRow(` + SELECT id, name, completed, last_completed_at, reward_message, progression_base, repetition_period::text, repetition_date + FROM tasks + WHERE id = $1 + `, taskID).Scan( + &updatedTask.ID, &updatedTask.Name, &updatedTask.Completed, + &lastCompletedAt, &rewardMessage, &progressionBase, &updatedRepetitionPeriod, &updatedRepetitionDate, + ) + + if err != nil { + log.Printf("Error fetching updated task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error fetching updated task: %v", err), http.StatusInternalServerError) + return + } + + if rewardMessage.Valid { + updatedTask.RewardMessage = &rewardMessage.String + } + if progressionBase.Valid { + updatedTask.ProgressionBase = &progressionBase.Float64 + } + if lastCompletedAt.Valid { + updatedTask.LastCompletedAt = &lastCompletedAt.String + } + if updatedRepetitionPeriod.Valid { + updatedTask.RepetitionPeriod = &updatedRepetitionPeriod.String + } + if updatedRepetitionDate.Valid { + updatedTask.RepetitionDate = &updatedRepetitionDate.String + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(updatedTask) +} + +// saveTaskDraftHandler сохраняет или обновляет драфт задачи +func (a *App) saveTaskDraftHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + // Проверяем владельца задачи + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM tasks WHERE id = $1 AND deleted = FALSE", taskID).Scan(&ownerID) + if err == sql.ErrNoRows || ownerID != userID { + sendErrorWithCORS(w, "Task not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking task ownership: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking task ownership: %v", err), http.StatusInternalServerError) + return + } + + var req SaveDraftRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding save draft request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Проверяем, существует ли драфт + var draftID int + err = tx.QueryRow("SELECT id FROM task_drafts WHERE task_id = $1", taskID).Scan(&draftID) + + var progressionValue sql.NullFloat64 + if req.ProgressionValue != nil { + progressionValue = sql.NullFloat64{Float64: *req.ProgressionValue, Valid: true} + } + + if err == sql.ErrNoRows { + // Создаем новый драфт + err = tx.QueryRow(` + INSERT INTO task_drafts (task_id, user_id, progression_value, auto_complete, created_at, updated_at) + VALUES ($1, $2, $3, $4, NOW(), NOW()) + RETURNING id + `, taskID, userID, progressionValue, req.AutoComplete).Scan(&draftID) + + if err != nil { + log.Printf("Error creating draft: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating draft: %v", err), http.StatusInternalServerError) + return + } + } else if err != nil { + log.Printf("Error checking draft existence: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking draft existence: %v", err), http.StatusInternalServerError) + return + } else { + // Обновляем существующий драфт + // При обновлении очищаем auto_complete если параметр false + autoComplete := req.AutoComplete + _, err = tx.Exec(` + UPDATE task_drafts + SET progression_value = $1, auto_complete = $2, updated_at = NOW() + WHERE id = $3 + `, progressionValue, autoComplete, draftID) + + if err != nil { + log.Printf("Error updating draft: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating draft: %v", err), http.StatusInternalServerError) + return + } + + // Удаляем все старые записи подзадач + _, err = tx.Exec("DELETE FROM task_draft_subtasks WHERE task_draft_id = $1", draftID) + if err != nil { + log.Printf("Error deleting old draft subtasks: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting old draft subtasks: %v", err), http.StatusInternalServerError) + return + } + } + + // Вставляем новые записи подзадач (только checked подзадачи) + if len(req.ChildrenTaskIDs) > 0 { + // Проверяем, что все подзадачи принадлежат этой задаче + placeholders := make([]string, len(req.ChildrenTaskIDs)) + args := make([]interface{}, len(req.ChildrenTaskIDs)+1) + args[0] = taskID + for i, id := range req.ChildrenTaskIDs { + placeholders[i] = fmt.Sprintf("$%d", i+2) + args[i+1] = id + } + + query := fmt.Sprintf(` + SELECT id FROM tasks + WHERE parent_task_id = $1 AND id IN (%s) AND deleted = FALSE + `, strings.Join(placeholders, ",")) + + validSubtaskRows, err := tx.Query(query, args...) + if err != nil { + log.Printf("Error validating subtasks: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error validating subtasks: %v", err), http.StatusInternalServerError) + return + } + defer validSubtaskRows.Close() + + validSubtaskIDs := make(map[int]bool) + for validSubtaskRows.Next() { + var id int + if err := validSubtaskRows.Scan(&id); err == nil { + validSubtaskIDs[id] = true + } + } + + // Вставляем только валидные подзадачи + for _, subtaskID := range req.ChildrenTaskIDs { + if validSubtaskIDs[subtaskID] { + _, err = tx.Exec(` + INSERT INTO task_draft_subtasks (task_draft_id, subtask_id) + VALUES ($1, $2) + ON CONFLICT (task_draft_id, subtask_id) DO NOTHING + `, draftID, subtaskID) + if err != nil { + log.Printf("Error inserting draft subtask: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error inserting draft subtask: %v", err), http.StatusInternalServerError) + return + } + } + } + } + + // Коммитим транзакцию + if err = tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Draft saved successfully", + }) +} + +// deleteTaskHandler удаляет задачу (помечает как deleted) +func (a *App) deleteTaskHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + // Проверяем владельца + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM tasks WHERE id = $1", taskID).Scan(&ownerID) + if err == sql.ErrNoRows || ownerID != userID { + sendErrorWithCORS(w, "Task not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking task ownership: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking task ownership: %v", err), http.StatusInternalServerError) + return + } + + // Помечаем задачу как удаленную + _, err = a.DB.Exec("UPDATE tasks SET deleted = TRUE WHERE id = $1 AND user_id = $2", taskID, userID) + if err != nil { + log.Printf("Error deleting task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting task: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Task deleted successfully", + }) +} + +// executeTask выполняет задачу (вынесенная логика) +// Удаляет драфт перед выполнением и выполняет всю логику выполнения задачи +func (a *App) executeTask(taskID int, userID int, req CompleteTaskRequest) error { + // Удаляем драфт перед выполнением (если есть) + _, err := a.DB.Exec(`DELETE FROM task_drafts WHERE task_id = $1`, taskID) + if err != nil { + log.Printf("Error deleting draft for task %d: %v", taskID, err) + // Не возвращаем ошибку, продолжаем выполнение + } + + // Получаем задачу и проверяем владельца + var task Task + var rewardMessage sql.NullString + var progressionBase sql.NullFloat64 + var repetitionPeriod sql.NullString + var repetitionDate sql.NullString + var ownerID int + var wishlistID sql.NullInt64 + + err = a.DB.QueryRow(` + SELECT id, name, reward_message, progression_base, repetition_period::text, repetition_date, user_id, wishlist_id + FROM tasks + WHERE id = $1 AND deleted = FALSE + `, taskID).Scan(&task.ID, &task.Name, &rewardMessage, &progressionBase, &repetitionPeriod, &repetitionDate, &ownerID, &wishlistID) + + if err == sql.ErrNoRows { + return fmt.Errorf("task not found") + } + if err != nil { + log.Printf("Error querying task: %v", err) + return fmt.Errorf("error querying task: %v", err) + } + + if ownerID != userID { + return fmt.Errorf("task not found") + } + + // Проверяем, что желание разблокировано (если задача связана с желанием) + if wishlistID.Valid { + unlocked, err := a.checkWishlistUnlock(int(wishlistID.Int64), userID) + if err != nil { + log.Printf("Error checking wishlist unlock status: %v", err) + return fmt.Errorf("error checking wishlist unlock status: %v", err) + } + if !unlocked { + return fmt.Errorf("cannot complete task: wishlist item is not unlocked") + } + } + + // Валидация: если progression_base != null, то value обязателен + if progressionBase.Valid && req.Value == nil { + return fmt.Errorf("value is required when progression_base is set") + } + + if rewardMessage.Valid { + task.RewardMessage = &rewardMessage.String + } + if progressionBase.Valid { + task.ProgressionBase = &progressionBase.Float64 + } + + // Получаем награды основной задачи + rewardRows, err := a.DB.Query(` + SELECT rc.position, p.name AS project_name, rc.value, rc.use_progression + FROM reward_configs rc + JOIN projects p ON rc.project_id = p.id + WHERE rc.task_id = $1 + ORDER BY rc.position + `, taskID) + + if err != nil { + log.Printf("Error querying rewards: %v", err) + return fmt.Errorf("error querying rewards: %v", err) + } + defer rewardRows.Close() + + rewards := make([]Reward, 0) + for rewardRows.Next() { + var reward Reward + err := rewardRows.Scan(&reward.Position, &reward.ProjectName, &reward.Value, &reward.UseProgression) + if err != nil { + log.Printf("Error scanning reward: %v", err) + continue + } + rewards = append(rewards, reward) + } + + // Вычисляем score для каждой награды и формируем строки для подстановки + rewardStrings := make(map[int]string) + for _, reward := range rewards { + var score float64 + if reward.UseProgression && progressionBase.Valid && req.Value != nil { + score = (*req.Value / progressionBase.Float64) * reward.Value + } else { + score = reward.Value + } + + // Формируем строку награды + var rewardStr string + if score >= 0 { + rewardStr = fmt.Sprintf("**%s+%.4g**", reward.ProjectName, score) + } else { + // Убираем знак минуса из числа (используем абсолютное значение) + rewardStr = fmt.Sprintf("**%s-%.4g**", reward.ProjectName, math.Abs(score)) + } + rewardStrings[reward.Position] = rewardStr + } + + // Функция для замены плейсхолдеров в сообщении награды + replaceRewardPlaceholders := func(message string, rewardStrings map[int]string) string { + result := message + // Сначала сохраняем экранированные плейсхолдеры \$0, \$1 и т.д. во временные маркеры + escapedMarkers := make(map[string]string) + for i := 0; i < 100; i++ { + escaped := fmt.Sprintf(`\$%d`, i) + marker := fmt.Sprintf(`__ESCAPED_DOLLAR_%d__`, i) + if strings.Contains(result, escaped) { + escapedMarkers[marker] = escaped + result = strings.ReplaceAll(result, escaped, marker) + } + } + // Заменяем ${0}, ${1}, и т.д. + for i := 0; i < 100; i++ { // Максимум 100 плейсхолдеров + placeholder := fmt.Sprintf("${%d}", i) + if rewardStr, ok := rewardStrings[i]; ok { + result = strings.ReplaceAll(result, placeholder, rewardStr) + } + } + // Затем заменяем $0, $1, и т.д. (экранированные уже защищены маркерами) + // Ищем $N, где после N не идет еще одна цифра (чтобы не заменить $10 при поиске $1) + // Go regexp не поддерживает lookahead, поэтому заменяем с конца (от больших чисел к меньшим) + for i := 99; i >= 0; i-- { + if rewardStr, ok := rewardStrings[i]; ok { + searchStr := fmt.Sprintf("$%d", i) + // Ищем все вхождения с конца строки + for { + idx := strings.LastIndex(result, searchStr) + if idx == -1 { + break + } + // Проверяем, что после $N не идет еще одна цифра + afterIdx := idx + len(searchStr) + if afterIdx >= len(result) || result[afterIdx] < '0' || result[afterIdx] > '9' { + // Можно заменить + result = result[:idx] + rewardStr + result[afterIdx:] + } else { + // После $N идет еще цифра (например, $10), пропускаем + break + } + } + } + } + // Восстанавливаем экранированные доллары из временных маркеров + for marker, escaped := range escapedMarkers { + result = strings.ReplaceAll(result, marker, escaped) + } + return result + } + + // Подставляем в reward_message основной задачи + var mainTaskMessage string + if task.RewardMessage != nil && *task.RewardMessage != "" { + mainTaskMessage = replaceRewardPlaceholders(*task.RewardMessage, rewardStrings) + } else { + // Если reward_message пустой, используем имя задачи + mainTaskMessage = task.Name + } + + // Получаем выбранные подзадачи (только с непустым reward_message и deleted = FALSE) + subtaskMessages := make([]string, 0) + if len(req.ChildrenTaskIDs) > 0 { + placeholders := make([]string, len(req.ChildrenTaskIDs)) + args := make([]interface{}, len(req.ChildrenTaskIDs)+1) + args[0] = taskID + for i, id := range req.ChildrenTaskIDs { + placeholders[i] = fmt.Sprintf("$%d", i+2) + args[i+1] = id + } + + query := fmt.Sprintf(` + SELECT id, name, reward_message, progression_base + FROM tasks + WHERE parent_task_id = $1 AND id IN (%s) AND deleted = FALSE + `, strings.Join(placeholders, ",")) + + subtaskRows, err := a.DB.Query(query, args...) + if err != nil { + log.Printf("Error querying subtasks: %v", err) + } else { + defer subtaskRows.Close() + for subtaskRows.Next() { + var subtaskID int + var subtaskName string + var subtaskRewardMessage sql.NullString + var subtaskProgressionBase sql.NullFloat64 + + err := subtaskRows.Scan(&subtaskID, &subtaskName, &subtaskRewardMessage, &subtaskProgressionBase) + if err != nil { + log.Printf("Error scanning subtask: %v", err) + continue + } + + // Пропускаем подзадачи с пустым reward_message + if !subtaskRewardMessage.Valid || subtaskRewardMessage.String == "" { + continue + } + + // Получаем награды подзадачи + subtaskRewardRows, err := a.DB.Query(` + SELECT rc.position, p.name AS project_name, rc.value, rc.use_progression + FROM reward_configs rc + JOIN projects p ON rc.project_id = p.id + WHERE rc.task_id = $1 + ORDER BY rc.position + `, subtaskID) + + if err != nil { + log.Printf("Error querying subtask rewards: %v", err) + continue + } + + subtaskRewards := make([]Reward, 0) + for subtaskRewardRows.Next() { + var reward Reward + err := subtaskRewardRows.Scan(&reward.Position, &reward.ProjectName, &reward.Value, &reward.UseProgression) + if err != nil { + log.Printf("Error scanning subtask reward: %v", err) + continue + } + subtaskRewards = append(subtaskRewards, reward) + } + subtaskRewardRows.Close() + + // Вычисляем score для наград подзадачи + subtaskRewardStrings := make(map[int]string) + for _, reward := range subtaskRewards { + var score float64 + if reward.UseProgression && subtaskProgressionBase.Valid && req.Value != nil { + score = (*req.Value / subtaskProgressionBase.Float64) * reward.Value + } else if reward.UseProgression && progressionBase.Valid && req.Value != nil { + // Если у подзадачи нет progression_base, используем основной + score = (*req.Value / progressionBase.Float64) * reward.Value + } else { + score = reward.Value + } + + var rewardStr string + if score >= 0 { + rewardStr = fmt.Sprintf("**%s+%.4g**", reward.ProjectName, score) + } else { + rewardStr = fmt.Sprintf("**%s-%.4g**", reward.ProjectName, math.Abs(score)) + } + subtaskRewardStrings[reward.Position] = rewardStr + } + + // Подставляем в reward_message подзадачи + subtaskMessage := replaceRewardPlaceholders(subtaskRewardMessage.String, subtaskRewardStrings) + + subtaskMessages = append(subtaskMessages, subtaskMessage) + } + } + } + + // Формируем итоговое сообщение + var finalMessage strings.Builder + finalMessage.WriteString(mainTaskMessage) + for _, subtaskMsg := range subtaskMessages { + finalMessage.WriteString("\n + ") + finalMessage.WriteString(subtaskMsg) + } + + // Отправляем сообщение через processMessage + userIDPtr := &userID + _, err = a.processMessage(finalMessage.String(), userIDPtr) + if err != nil { + // Логируем ошибку, но не откатываем транзакцию + log.Printf("Error sending message to Telegram: %v", err) + } + + // Обновляем completed и last_completed_at для основной задачи + // Если repetition_date установлен, вычисляем next_show_at + // Если repetition_period не установлен и repetition_date не установлен, помечаем задачу как удаленную + // Если repetition_period = "0 day" (или любое значение с 0), не обновляем last_completed_at + + // Проверяем наличие repetition_date (используем COALESCE, поэтому пустая строка означает отсутствие) + hasRepetitionDate := repetitionDate.Valid && strings.TrimSpace(repetitionDate.String) != "" + + if hasRepetitionDate { + // Есть repetition_date - вычисляем следующую дату показа + + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } + + nextShowAt := calculateNextShowAtFromRepetitionDate(repetitionDate.String, time.Now().In(loc)) + if nextShowAt != nil { + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW(), next_show_at = $2 + WHERE id = $1 + `, taskID, nextShowAt) + } else { + // Если не удалось вычислить дату, обновляем как обычно + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW(), next_show_at = NULL + WHERE id = $1 + `, taskID) + } + } else if repetitionPeriod.Valid { + // Проверяем, является ли период нулевым (начинается с "0 ") + periodStr := strings.TrimSpace(repetitionPeriod.String) + isZeroPeriod := strings.HasPrefix(periodStr, "0 ") || periodStr == "0" + + if isZeroPeriod { + // Период = 0: обновляем только счетчик, но не last_completed_at + // Задача никогда не будет переноситься в выполненные + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, next_show_at = NULL + WHERE id = $1 + `, taskID) + } else { + // Обычный период: обновляем счетчик и last_completed_at, вычисляем next_show_at + // next_show_at = last_completed_at + repetition_period + + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } + + now := time.Now().In(loc) + log.Printf("Calculating next_show_at for task %d: repetition_period='%s', fromDate=%v (timezone: %s)", taskID, repetitionPeriod.String, now, timezoneStr) + nextShowAt := calculateNextShowAtFromRepetitionPeriod(repetitionPeriod.String, now) + if nextShowAt != nil { + log.Printf("Calculated next_show_at for task %d: %v", taskID, *nextShowAt) + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW(), next_show_at = $2 + WHERE id = $1 + `, taskID, nextShowAt) + } else { + log.Printf("Failed to calculate next_show_at for task %d: repetition_period='%s' returned nil", taskID, repetitionPeriod.String) + // Если не удалось вычислить дату, обновляем как обычно + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW(), next_show_at = NULL + WHERE id = $1 + `, taskID) + } + } + } else { + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW(), next_show_at = NULL, deleted = TRUE + WHERE id = $1 + `, taskID) + } + + if err != nil { + log.Printf("Error updating task completion: %v", err) + return fmt.Errorf("error updating task completion: %v", err) + } + + // Обновляем выбранные подзадачи + if len(req.ChildrenTaskIDs) > 0 { + placeholders := make([]string, len(req.ChildrenTaskIDs)) + args := make([]interface{}, len(req.ChildrenTaskIDs)) + for i, id := range req.ChildrenTaskIDs { + placeholders[i] = fmt.Sprintf("$%d", i+1) + args[i] = id + } + + query := fmt.Sprintf(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW() + WHERE id IN (%s) AND deleted = FALSE + `, strings.Join(placeholders, ",")) + + _, err = a.DB.Exec(query, args...) + if err != nil { + log.Printf("Error updating subtasks completion: %v", err) + // Не возвращаем ошибку, основная задача уже обновлена + } + } + + // Если задача связана с желанием, завершаем желание и обрабатываем политику награждения + if wishlistID.Valid { + // Завершаем желание + _, completeErr := a.DB.Exec(` + UPDATE wishlist_items + SET completed = TRUE, updated_at = NOW() + WHERE id = $1 AND completed = FALSE + `, wishlistID.Int64) + if completeErr != nil { + log.Printf("Error completing wishlist item %d: %v", wishlistID.Int64, completeErr) + // Не возвращаем ошибку, задача уже выполнена + } else { + log.Printf("Wishlist item %d completed automatically after task %d completion", wishlistID.Int64, taskID) + // Обрабатываем политику награждения для всех задач, связанных с этим желанием + // Исключаем задачу, которая была закрыта (taskID), чтобы не обрабатывать её повторно + a.processWishlistRewardPolicy(int(wishlistID.Int64), taskID) + } + } + + return nil +} + +// completeTaskAtEndOfDayHandler устанавливает автовыполнение задачи в конце дня +func (a *App) completeTaskAtEndOfDayHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + // Проверяем владельца задачи + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM tasks WHERE id = $1 AND deleted = FALSE", taskID).Scan(&ownerID) + if err == sql.ErrNoRows || ownerID != userID { + sendErrorWithCORS(w, "Task not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking task ownership: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking task ownership: %v", err), http.StatusInternalServerError) + return + } + + var req SaveDraftRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding save draft request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Устанавливаем auto_complete = true + req.AutoComplete = true + + // Используем ту же логику что и saveTaskDraftHandler + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Проверяем, существует ли драфт + var draftID int + err = tx.QueryRow("SELECT id FROM task_drafts WHERE task_id = $1", taskID).Scan(&draftID) + + var progressionValue sql.NullFloat64 + if req.ProgressionValue != nil { + progressionValue = sql.NullFloat64{Float64: *req.ProgressionValue, Valid: true} + } + + if err == sql.ErrNoRows { + // Создаем новый драфт + err = tx.QueryRow(` + INSERT INTO task_drafts (task_id, user_id, progression_value, auto_complete, created_at, updated_at) + VALUES ($1, $2, $3, $4, NOW(), NOW()) + RETURNING id + `, taskID, userID, progressionValue, req.AutoComplete).Scan(&draftID) + + if err != nil { + log.Printf("Error creating draft: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating draft: %v", err), http.StatusInternalServerError) + return + } + } else if err != nil { + log.Printf("Error checking draft existence: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking draft existence: %v", err), http.StatusInternalServerError) + return + } else { + // Обновляем существующий драфт с auto_complete = true + _, err = tx.Exec(` + UPDATE task_drafts + SET progression_value = $1, auto_complete = $2, updated_at = NOW() + WHERE id = $3 + `, progressionValue, req.AutoComplete, draftID) + + if err != nil { + log.Printf("Error updating draft: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating draft: %v", err), http.StatusInternalServerError) + return + } + + // Удаляем все старые записи подзадач + _, err = tx.Exec("DELETE FROM task_draft_subtasks WHERE task_draft_id = $1", draftID) + if err != nil { + log.Printf("Error deleting old draft subtasks: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting old draft subtasks: %v", err), http.StatusInternalServerError) + return + } + } + + // Вставляем новые записи подзадач (только checked подзадачи) + if len(req.ChildrenTaskIDs) > 0 { + // Проверяем, что все подзадачи принадлежат этой задаче + placeholders := make([]string, len(req.ChildrenTaskIDs)) + args := make([]interface{}, len(req.ChildrenTaskIDs)+1) + args[0] = taskID + for i, id := range req.ChildrenTaskIDs { + placeholders[i] = fmt.Sprintf("$%d", i+2) + args[i+1] = id + } + + query := fmt.Sprintf(` + SELECT id FROM tasks + WHERE parent_task_id = $1 AND id IN (%s) AND deleted = FALSE + `, strings.Join(placeholders, ",")) + + validSubtaskRows, err := tx.Query(query, args...) + if err != nil { + log.Printf("Error validating subtasks: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error validating subtasks: %v", err), http.StatusInternalServerError) + return + } + defer validSubtaskRows.Close() + + validSubtaskIDs := make(map[int]bool) + for validSubtaskRows.Next() { + var id int + if err := validSubtaskRows.Scan(&id); err == nil { + validSubtaskIDs[id] = true + } + } + + // Вставляем только валидные подзадачи + for _, subtaskID := range req.ChildrenTaskIDs { + if validSubtaskIDs[subtaskID] { + _, err = tx.Exec(` + INSERT INTO task_draft_subtasks (task_draft_id, subtask_id) + VALUES ($1, $2) + ON CONFLICT (task_draft_id, subtask_id) DO NOTHING + `, draftID, subtaskID) + if err != nil { + log.Printf("Error inserting draft subtask: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error inserting draft subtask: %v", err), http.StatusInternalServerError) + return + } + } + } + } + + // Коммитим транзакцию + if err = tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Task will be completed at end of day", + }) +} + +// completeTaskHandler выполняет задачу +func (a *App) completeTaskHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + var req CompleteTaskRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding complete task request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Используем executeTask для выполнения задачи + err = a.executeTask(taskID, userID, req) + if err != nil { + if strings.Contains(err.Error(), "not found") { + sendErrorWithCORS(w, err.Error(), http.StatusNotFound) + } else if strings.Contains(err.Error(), "unlocked") { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + } else if strings.Contains(err.Error(), "required") { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + } else { + log.Printf("Error executing task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error executing task: %v", err), http.StatusInternalServerError) + } + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Task completed successfully", + }) +} + +// completeAndDeleteTaskHandler выполняет задачу и затем удаляет её +func (a *App) completeAndDeleteTaskHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + // Сначала выполняем задачу используя executeTask + var req CompleteTaskRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding complete task request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Используем executeTask для выполнения задачи + err = a.executeTask(taskID, userID, req) + if err != nil { + if strings.Contains(err.Error(), "not found") { + sendErrorWithCORS(w, err.Error(), http.StatusNotFound) + } else if strings.Contains(err.Error(), "unlocked") { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + } else if strings.Contains(err.Error(), "required") { + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + } else { + log.Printf("Error executing task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error executing task: %v", err), http.StatusInternalServerError) + } + return + } + + // Помечаем задачу как удаленную + _, err = a.DB.Exec("UPDATE tasks SET deleted = TRUE WHERE id = $1 AND user_id = $2", taskID, userID) + if err != nil { + log.Printf("Error deleting task: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting task: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Task completed and deleted successfully", + }) +} + +// postponeTaskHandler переносит задачу на указанную дату +func (a *App) postponeTaskHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + taskID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid task ID", http.StatusBadRequest) + return + } + + var req PostponeTaskRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding postpone task request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Проверяем владельца + var ownerID int + err = a.DB.QueryRow("SELECT user_id FROM tasks WHERE id = $1 AND deleted = FALSE", taskID).Scan(&ownerID) + if err == sql.ErrNoRows || ownerID != userID { + sendErrorWithCORS(w, "Task not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking task ownership: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking task ownership: %v", err), http.StatusInternalServerError) + return + } + + // Если NextShowAt == nil, устанавливаем next_show_at в NULL + // Иначе парсим дату и устанавливаем значение + var nextShowAtValue interface{} + if req.NextShowAt == nil || *req.NextShowAt == "" { + nextShowAtValue = nil + } else { + nextShowAt, err := time.Parse(time.RFC3339, *req.NextShowAt) + if err != nil { + log.Printf("Error parsing next_show_at: %v", err) + sendErrorWithCORS(w, "Invalid date format. Use RFC3339 format", http.StatusBadRequest) + return + } + nextShowAtValue = nextShowAt + } + + // Обновляем next_show_at + _, err = a.DB.Exec(` + UPDATE tasks + SET next_show_at = $1 + WHERE id = $2 AND user_id = $3 + `, nextShowAtValue, taskID, userID) + if err != nil { + log.Printf("Error updating next_show_at: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating next_show_at: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Task postponed successfully", + }) +} + +// todoistDisconnectHandler отключает интеграцию Todoist +func (a *App) todoistDisconnectHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + _, err := a.DB.Exec(` + DELETE FROM todoist_integrations WHERE user_id = $1 + `, userID) + + if err != nil { + log.Printf("Todoist disconnect: DB error: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Failed to disconnect: %v", err), http.StatusInternalServerError) + return + } + + log.Printf("Todoist disconnected for user_id=%d", userID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Todoist disconnected", + }) +} + +// ============================================ +// Fitbit OAuth handlers +// ============================================ + +// generateFitbitOAuthState генерирует JWT state для Fitbit OAuth +func generateFitbitOAuthState(userID int, jwtSecret []byte) (string, error) { + claims := OAuthStateClaims{ + UserID: userID, + Type: "fitbit_oauth", + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)), // 1 день + IssuedAt: jwt.NewNumericDate(time.Now()), + }, + } + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + return token.SignedString(jwtSecret) +} + +// validateFitbitOAuthState проверяет и извлекает user_id из JWT state для Fitbit +func validateFitbitOAuthState(stateString string, jwtSecret []byte) (int, error) { + token, err := jwt.ParseWithClaims(stateString, &OAuthStateClaims{}, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return jwtSecret, nil + }) + if err != nil { + return 0, err + } + + claims, ok := token.Claims.(*OAuthStateClaims) + if !ok || !token.Valid { + return 0, fmt.Errorf("invalid token") + } + + if claims.Type != "fitbit_oauth" { + return 0, fmt.Errorf("wrong token type") + } + + return claims.UserID, nil +} + +// exchangeFitbitCodeForToken обменивает OAuth code на access_token и refresh_token для Fitbit +func exchangeFitbitCodeForToken(code, redirectURI, clientID, clientSecret string) (accessToken, refreshToken string, expiresIn int, err error) { + data := url.Values{} + data.Set("grant_type", "authorization_code") + data.Set("code", code) + data.Set("redirect_uri", redirectURI) + + req, err := http.NewRequest("POST", "https://api.fitbit.com/oauth2/token", strings.NewReader(data.Encode())) + if err != nil { + return "", "", 0, fmt.Errorf("failed to create request: %w", err) + } + + // Fitbit требует Basic Auth для Server приложений + auth := base64.StdEncoding.EncodeToString([]byte(clientID + ":" + clientSecret)) + req.Header.Set("Authorization", "Basic "+auth) + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return "", "", 0, fmt.Errorf("failed to exchange code: %w", err) + } + defer resp.Body.Close() + + bodyBytes, _ := io.ReadAll(resp.Body) + + if resp.StatusCode != http.StatusOK { + return "", "", 0, fmt.Errorf("token exchange failed (status %d): %s", resp.StatusCode, string(bodyBytes)) + } + + var result struct { + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + ExpiresIn int `json:"expires_in"` + TokenType string `json:"token_type"` + UserID string `json:"user_id"` + Error string `json:"error"` + ErrorDesc string `json:"error_description"` + } + + if err := json.Unmarshal(bodyBytes, &result); err != nil { + return "", "", 0, fmt.Errorf("failed to decode response: %w", err) + } + + if result.Error != "" { + return "", "", 0, fmt.Errorf("token exchange error: %s - %s", result.Error, result.ErrorDesc) + } + + return result.AccessToken, result.RefreshToken, result.ExpiresIn, nil +} + +// getFitbitUserInfo получает информацию о пользователе Fitbit +func getFitbitUserInfo(accessToken string) (string, error) { + req, err := http.NewRequest("GET", "https://api.fitbit.com/1/user/-/profile.json", nil) + if err != nil { + return "", fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Authorization", "Bearer "+accessToken) + req.Header.Set("Accept", "application/json") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return "", fmt.Errorf("failed to get user info: %w", err) + } + defer resp.Body.Close() + + bodyBytes, _ := io.ReadAll(resp.Body) + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("get user info failed (status %d): %s", resp.StatusCode, string(bodyBytes)) + } + + var result struct { + User struct { + EncodedID string `json:"encodedId"` + } `json:"user"` + } + + if err := json.Unmarshal(bodyBytes, &result); err != nil { + return "", fmt.Errorf("failed to decode response: %w", err) + } + + if result.User.EncodedID == "" { + return "", fmt.Errorf("user ID not found in response") + } + + return result.User.EncodedID, nil +} + +// refreshFitbitToken обновляет access_token используя refresh_token +func refreshFitbitToken(refreshToken, clientID, clientSecret string) (accessToken, newRefreshToken string, expiresIn int, err error) { + data := url.Values{} + data.Set("grant_type", "refresh_token") + data.Set("refresh_token", refreshToken) + + req, err := http.NewRequest("POST", "https://api.fitbit.com/oauth2/token", strings.NewReader(data.Encode())) + if err != nil { + return "", "", 0, fmt.Errorf("failed to create request: %w", err) + } + + auth := base64.StdEncoding.EncodeToString([]byte(clientID + ":" + clientSecret)) + req.Header.Set("Authorization", "Basic "+auth) + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return "", "", 0, fmt.Errorf("failed to refresh token: %w", err) + } + defer resp.Body.Close() + + bodyBytes, _ := io.ReadAll(resp.Body) + + if resp.StatusCode != http.StatusOK { + return "", "", 0, fmt.Errorf("token refresh failed (status %d): %s", resp.StatusCode, string(bodyBytes)) + } + + var result struct { + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + ExpiresIn int `json:"expires_in"` + Error string `json:"error"` + ErrorDesc string `json:"error_description"` + } + + if err := json.Unmarshal(bodyBytes, &result); err != nil { + return "", "", 0, fmt.Errorf("failed to decode response: %w", err) + } + + if result.Error != "" { + return "", "", 0, fmt.Errorf("token refresh error: %s - %s", result.Error, result.ErrorDesc) + } + + return result.AccessToken, result.RefreshToken, result.ExpiresIn, nil +} + +// fitbitOAuthConnectHandler инициирует OAuth flow для Fitbit +func (a *App) fitbitOAuthConnectHandler(w http.ResponseWriter, r *http.Request) { + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + clientID := getEnv("FITBIT_CLIENT_ID", "") + clientSecret := getEnv("FITBIT_CLIENT_SECRET", "") + baseURL := getEnv("WEBHOOK_BASE_URL", "") + + if clientID == "" || clientSecret == "" { + sendErrorWithCORS(w, "FITBIT_CLIENT_ID and FITBIT_CLIENT_SECRET must be configured", http.StatusInternalServerError) + return + } + if baseURL == "" { + sendErrorWithCORS(w, "WEBHOOK_BASE_URL must be configured", http.StatusInternalServerError) + return + } + + redirectURI := strings.TrimRight(baseURL, "/") + "/api/integrations/fitbit/oauth/callback" + + state, err := generateFitbitOAuthState(userID, a.jwtSecret) + if err != nil { + log.Printf("Fitbit OAuth: failed to generate state: %v", err) + sendErrorWithCORS(w, "Failed to generate OAuth state", http.StatusInternalServerError) + return + } + + // Fitbit OAuth URL с необходимыми scopes + authURL := fmt.Sprintf( + "https://www.fitbit.com/oauth2/authorize?response_type=code&client_id=%s&redirect_uri=%s&scope=activity%%20profile&state=%s", + url.QueryEscape(clientID), + url.QueryEscape(redirectURI), + url.QueryEscape(state), + ) + + log.Printf("Fitbit OAuth: returning auth URL for user_id=%d", userID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "auth_url": authURL, + }) +} + +// fitbitOAuthCallbackHandler обрабатывает OAuth callback от Fitbit +func (a *App) fitbitOAuthCallbackHandler(w http.ResponseWriter, r *http.Request) { + log.Printf("Fitbit OAuth callback: received request, URL=%s", r.URL.String()) + + frontendURL := getEnv("WEBHOOK_BASE_URL", "") + redirectSuccess := frontendURL + "/?integration=fitbit&status=connected" + redirectError := frontendURL + "/?integration=fitbit&status=error" + + clientID := getEnv("FITBIT_CLIENT_ID", "") + clientSecret := getEnv("FITBIT_CLIENT_SECRET", "") + baseURL := getEnv("WEBHOOK_BASE_URL", "") + + log.Printf("Fitbit OAuth callback: WEBHOOK_BASE_URL=%s, FITBIT_CLIENT_ID set=%v, FITBIT_CLIENT_SECRET set=%v", + baseURL, clientID != "", clientSecret != "") + + if clientID == "" || clientSecret == "" || baseURL == "" { + log.Printf("Fitbit OAuth: missing configuration (clientID=%v, clientSecret=%v, baseURL=%v)", + clientID != "", clientSecret != "", baseURL != "") + http.Redirect(w, r, redirectError+"&message=config_error", http.StatusTemporaryRedirect) + return + } + + redirectURI := strings.TrimRight(baseURL, "/") + "/api/integrations/fitbit/oauth/callback" + log.Printf("Fitbit OAuth callback: redirectURI=%s", redirectURI) + + // Проверяем state + state := r.URL.Query().Get("state") + userID, err := validateFitbitOAuthState(state, a.jwtSecret) + if err != nil { + log.Printf("Fitbit OAuth: invalid state: %v (state length=%d)", err, len(state)) + http.Redirect(w, r, redirectError+"&message=invalid_state", http.StatusTemporaryRedirect) + return + } + + log.Printf("Fitbit OAuth callback: validated state, user_id=%d", userID) + + // Получаем code + code := r.URL.Query().Get("code") + if code == "" { + // Проверяем наличие ошибки от Fitbit + fitbitError := r.URL.Query().Get("error") + fitbitErrorDesc := r.URL.Query().Get("error_description") + log.Printf("Fitbit OAuth: no code in callback, error=%s, error_description=%s", fitbitError, fitbitErrorDesc) + http.Redirect(w, r, redirectError+"&message=no_code", http.StatusTemporaryRedirect) + return + } + + log.Printf("Fitbit OAuth callback: got code, exchanging for tokens...") + + // Обмениваем code на токены + accessToken, refreshToken, expiresIn, err := exchangeFitbitCodeForToken(code, redirectURI, clientID, clientSecret) + if err != nil { + log.Printf("Fitbit OAuth: token exchange failed for user_id=%d: %v", userID, err) + http.Redirect(w, r, redirectError+"&message=token_exchange_failed", http.StatusTemporaryRedirect) + return + } + + log.Printf("Fitbit OAuth callback: token exchange successful, expiresIn=%d", expiresIn) + + // Получаем информацию о пользователе + fitbitUserID, err := getFitbitUserInfo(accessToken) + if err != nil { + log.Printf("Fitbit OAuth: get user info failed for user_id=%d: %v", userID, err) + http.Redirect(w, r, redirectError+"&message=user_info_failed", http.StatusTemporaryRedirect) + return + } + + log.Printf("Fitbit OAuth: user_id=%d connected fitbit_user_id=%s", userID, fitbitUserID) + + // Вычисляем время истечения токена + tokenExpiresAt := time.Now().Add(time.Duration(expiresIn) * time.Second) + + // Сохраняем в БД + _, err = a.DB.Exec(` + INSERT INTO fitbit_integrations (user_id, fitbit_user_id, access_token, refresh_token, token_expires_at) + VALUES ($1, $2, $3, $4, $5) + ON CONFLICT (user_id) DO UPDATE SET + fitbit_user_id = $2, + access_token = $3, + refresh_token = $4, + token_expires_at = $5, + updated_at = CURRENT_TIMESTAMP + `, userID, fitbitUserID, accessToken, refreshToken, tokenExpiresAt) + + if err != nil { + log.Printf("Fitbit OAuth: DB error for user_id=%d: %v", userID, err) + http.Redirect(w, r, redirectError+"&message=db_error", http.StatusTemporaryRedirect) + return + } + + log.Printf("Fitbit OAuth: successfully saved integration for user_id=%d, redirecting to %s", userID, redirectSuccess) + + // Редирект на страницу интеграций + http.Redirect(w, r, redirectSuccess, http.StatusTemporaryRedirect) +} + +// getFitbitStatusHandler возвращает статус подключения Fitbit +func (a *App) getFitbitStatusHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var fitbitUserID sql.NullString + var goalStepsMin, goalStepsMax, goalFloorsMin, goalFloorsMax, goalAzmMin, goalAzmMax sql.NullInt64 + err := a.DB.QueryRow(` + SELECT fitbit_user_id, goal_steps_min, goal_steps_max, goal_floors_min, goal_floors_max, goal_azm_min, goal_azm_max + FROM fitbit_integrations + WHERE user_id = $1 AND access_token IS NOT NULL + `, userID).Scan(&fitbitUserID, &goalStepsMin, &goalStepsMax, &goalFloorsMin, &goalFloorsMax, &goalAzmMin, &goalAzmMax) + + if err == sql.ErrNoRows || !fitbitUserID.Valid { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "connected": false, + }) + return + } + if err != nil { + sendErrorWithCORS(w, fmt.Sprintf("Failed to get status: %v", err), http.StatusInternalServerError) + return + } + + response := map[string]interface{}{ + "connected": true, + "goals": map[string]interface{}{ + "steps": map[string]interface{}{ + "min": goalStepsMin.Int64, + "max": goalStepsMax.Int64, + }, + "floors": map[string]interface{}{ + "min": goalFloorsMin.Int64, + "max": goalFloorsMax.Int64, + }, + "azm": map[string]interface{}{ + "min": goalAzmMin.Int64, + "max": goalAzmMax.Int64, + }, + }, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// fitbitDisconnectHandler отключает интеграцию Fitbit +func (a *App) fitbitDisconnectHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + _, err := a.DB.Exec(` + DELETE FROM fitbit_integrations WHERE user_id = $1 + `, userID) + + if err != nil { + log.Printf("Fitbit disconnect: DB error: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Failed to disconnect: %v", err), http.StatusInternalServerError) + return + } + + log.Printf("Fitbit disconnected for user_id=%d", userID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Fitbit disconnected", + }) +} + +// updateFitbitGoalsHandler обновляет цели пользователя +func (a *App) updateFitbitGoalsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req struct { + Steps map[string]int64 `json:"steps"` + Floors map[string]int64 `json:"floors"` + Azm map[string]int64 `json:"azm"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + _, err := a.DB.Exec(` + UPDATE fitbit_integrations + SET goal_steps_min = $1, goal_steps_max = $2, + goal_floors_min = $3, goal_floors_max = $4, + goal_azm_min = $5, goal_azm_max = $6, + updated_at = CURRENT_TIMESTAMP + WHERE user_id = $7 + `, req.Steps["min"], req.Steps["max"], + req.Floors["min"], req.Floors["max"], + req.Azm["min"], req.Azm["max"], + userID) + + if err != nil { + log.Printf("Fitbit update goals: DB error: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Failed to update goals: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Goals updated", + }) +} + +// getFitbitAccessToken получает актуальный access_token (обновляет если нужно) +func (a *App) getFitbitAccessToken(userID int) (string, error) { + var accessToken, refreshToken sql.NullString + var tokenExpiresAt sql.NullTime + + err := a.DB.QueryRow(` + SELECT access_token, refresh_token, token_expires_at + FROM fitbit_integrations + WHERE user_id = $1 + `, userID).Scan(&accessToken, &refreshToken, &tokenExpiresAt) + + if err == sql.ErrNoRows { + return "", fmt.Errorf("fitbit integration not found") + } + if err != nil { + return "", fmt.Errorf("failed to get tokens: %w", err) + } + + if !accessToken.Valid { + return "", fmt.Errorf("access token not found") + } + + // Проверяем, не истек ли токен (с запасом 5 минут) + if tokenExpiresAt.Valid && time.Now().Add(5*time.Minute).After(tokenExpiresAt.Time) { + // Токен истек или скоро истечет, обновляем + if !refreshToken.Valid { + return "", fmt.Errorf("refresh token not found") + } + + clientID := getEnv("FITBIT_CLIENT_ID", "") + clientSecret := getEnv("FITBIT_CLIENT_SECRET", "") + if clientID == "" || clientSecret == "" { + return "", fmt.Errorf("FITBIT_CLIENT_ID and FITBIT_CLIENT_SECRET must be configured") + } + + newAccessToken, newRefreshToken, expiresIn, err := refreshFitbitToken(refreshToken.String, clientID, clientSecret) + if err != nil { + return "", fmt.Errorf("failed to refresh token: %w", err) + } + + // Обновляем токены в БД + tokenExpiresAtNew := time.Now().Add(time.Duration(expiresIn) * time.Second) + _, err = a.DB.Exec(` + UPDATE fitbit_integrations + SET access_token = $1, refresh_token = $2, token_expires_at = $3, updated_at = CURRENT_TIMESTAMP + WHERE user_id = $4 + `, newAccessToken, newRefreshToken, tokenExpiresAtNew, userID) + + if err != nil { + return "", fmt.Errorf("failed to update tokens: %w", err) + } + + log.Printf("Fitbit token refreshed for user_id=%d", userID) + return newAccessToken, nil + } + + return accessToken.String, nil +} + +// syncFitbitData синхронизирует данные из Fitbit API для указанной даты +func (a *App) syncFitbitData(userID int, date time.Time) error { + accessToken, err := a.getFitbitAccessToken(userID) + if err != nil { + return fmt.Errorf("failed to get access token: %w", err) + } + + dateStr := date.Format("2006-01-02") + + // Получаем данные активности за день + activityURL := fmt.Sprintf("https://api.fitbit.com/1/user/-/activities/date/%s.json", dateStr) + req, err := http.NewRequest("GET", activityURL, nil) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Authorization", "Bearer "+accessToken) + req.Header.Set("Accept", "application/json") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return fmt.Errorf("failed to get activity data: %w", err) + } + defer resp.Body.Close() + + bodyBytes, _ := io.ReadAll(resp.Body) + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("get activity data failed (status %d): %s", resp.StatusCode, string(bodyBytes)) + } + + var activityData struct { + Summary struct { + Steps int `json:"steps"` + Floors int `json:"floors"` + } `json:"summary"` + } + + if err := json.Unmarshal(bodyBytes, &activityData); err != nil { + return fmt.Errorf("failed to decode activity data: %w", err) + } + + // Получаем Active Zone Minutes + azmURL := fmt.Sprintf("https://api.fitbit.com/1/user/-/activities/active-zone-minutes/date/%s/1d.json", dateStr) + reqAZM, err := http.NewRequest("GET", azmURL, nil) + if err != nil { + return fmt.Errorf("failed to create AZM request: %w", err) + } + + reqAZM.Header.Set("Authorization", "Bearer "+accessToken) + reqAZM.Header.Set("Accept", "application/json") + + respAZM, err := client.Do(reqAZM) + if err != nil { + return fmt.Errorf("failed to get AZM data: %w", err) + } + defer respAZM.Body.Close() + + bodyBytesAZM, _ := io.ReadAll(respAZM.Body) + + var azmValue int + if respAZM.StatusCode == http.StatusOK { + var azmData struct { + ActivitiesActiveZoneMinutes []struct { + Value struct { + ActiveZoneMinutes int `json:"activeZoneMinutes"` + } `json:"value"` + } `json:"activities-active-zone-minutes"` + } + + if err := json.Unmarshal(bodyBytesAZM, &azmData); err == nil { + if len(azmData.ActivitiesActiveZoneMinutes) > 0 { + azmValue = azmData.ActivitiesActiveZoneMinutes[0].Value.ActiveZoneMinutes + } + } + } + + // Сохраняем данные в БД + _, err = a.DB.Exec(` + INSERT INTO fitbit_daily_stats (user_id, date, steps, floors, active_zone_minutes, updated_at) + VALUES ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP) + ON CONFLICT (user_id, date) DO UPDATE SET + steps = $3, + floors = $4, + active_zone_minutes = $5, + updated_at = CURRENT_TIMESTAMP + `, userID, dateStr, activityData.Summary.Steps, activityData.Summary.Floors, azmValue) + + if err != nil { + return fmt.Errorf("failed to save stats: %w", err) + } + + log.Printf("Fitbit data synced for user_id=%d, date=%s: steps=%d, floors=%d, azm=%d", + userID, dateStr, activityData.Summary.Steps, activityData.Summary.Floors, azmValue) + + return nil +} + +// fitbitSyncHandler выполняет ручную синхронизацию данных Fitbit +func (a *App) fitbitSyncHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Синхронизируем данные за сегодня + err := a.syncFitbitData(userID, time.Now()) + if err != nil { + log.Printf("Fitbit sync error: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Sync failed: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Data synced successfully", + }) +} + +// getFitbitStatsHandler возвращает статистику Fitbit за указанную дату +func (a *App) getFitbitStatsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Получаем дату из query параметра (по умолчанию сегодня) + dateStr := r.URL.Query().Get("date") + if dateStr == "" { + dateStr = time.Now().Format("2006-01-02") + } + + var steps, floors, azm sql.NullInt64 + err := a.DB.QueryRow(` + SELECT steps, floors, active_zone_minutes + FROM fitbit_daily_stats + WHERE user_id = $1 AND date = $2 + `, userID, dateStr).Scan(&steps, &floors, &azm) + + if err == sql.ErrNoRows { + // Данных нет, возвращаем нули + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "date": dateStr, + "steps": 0, + "floors": 0, + "azm": 0, + }) + return + } + + if err != nil { + sendErrorWithCORS(w, fmt.Sprintf("Failed to get stats: %v", err), http.StatusInternalServerError) + return + } + + // Получаем цели пользователя + var goalStepsMin, goalStepsMax, goalFloorsMin, goalFloorsMax, goalAzmMin, goalAzmMax sql.NullInt64 + err = a.DB.QueryRow(` + SELECT goal_steps_min, goal_steps_max, goal_floors_min, goal_floors_max, goal_azm_min, goal_azm_max + FROM fitbit_integrations + WHERE user_id = $1 + `, userID).Scan(&goalStepsMin, &goalStepsMax, &goalFloorsMin, &goalFloorsMax, &goalAzmMin, &goalAzmMax) + + if err != nil { + // Если целей нет, используем значения по умолчанию + goalStepsMin = sql.NullInt64{Int64: 8000, Valid: true} + goalStepsMax = sql.NullInt64{Int64: 10000, Valid: true} + goalFloorsMin = sql.NullInt64{Int64: 8, Valid: true} + goalFloorsMax = sql.NullInt64{Int64: 10, Valid: true} + goalAzmMin = sql.NullInt64{Int64: 22, Valid: true} + goalAzmMax = sql.NullInt64{Int64: 44, Valid: true} + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "date": dateStr, + "steps": map[string]interface{}{ + "value": steps.Int64, + "goal": map[string]interface{}{ + "min": goalStepsMin.Int64, + "max": goalStepsMax.Int64, + }, + }, + "floors": map[string]interface{}{ + "value": floors.Int64, + "goal": map[string]interface{}{ + "min": goalFloorsMin.Int64, + "max": goalFloorsMax.Int64, + }, + }, + "azm": map[string]interface{}{ + "value": azm.Int64, + "goal": map[string]interface{}{ + "min": goalAzmMin.Int64, + "max": goalAzmMax.Int64, + }, + }, + }) +} + +// ============================================ +// Wishlist handlers +// ============================================ + +// calculateProjectPointsFromDate считает баллы проекта с указанной даты до текущего момента +// Считает напрямую из таблицы nodes, используя денормализованное поле created_date +func (a *App) calculateProjectPointsFromDate( + projectID int, + startDate sql.NullTime, + userID int, +) (float64, error) { + var totalScore float64 + var err error + + if !startDate.Valid { + // За всё время - считаем все nodes этого пользователя для указанного проекта + err = a.DB.QueryRow(` + SELECT COALESCE(SUM(n.score), 0) + FROM nodes n + JOIN projects p ON n.project_id = p.id + WHERE n.project_id = $1 AND n.user_id = $2 AND p.user_id = $2 + `, projectID, userID).Scan(&totalScore) + } else { + // С указанной даты до текущего момента + // Считаем все nodes этого пользователя, где дата created_date >= startDate + // Используем DATE() для сравнения только по дате (без времени) + // Теперь используем nodes.created_date напрямую (без JOIN с entries) + err = a.DB.QueryRow(` + SELECT COALESCE(SUM(n.score), 0) + FROM nodes n + JOIN projects p ON n.project_id = p.id + WHERE n.project_id = $1 + AND n.user_id = $2 + AND p.user_id = $2 + AND DATE(n.created_date) >= DATE($3) + `, projectID, userID, startDate.Time).Scan(&totalScore) + } + + if err != nil { + log.Printf("Error calculating project points from date: %v", err) + return 0, err + } + + return totalScore, nil +} + +// getProjectMedian получает медиану проекта из materialized view projects_median_mv +// Если медиана отсутствует, возвращает ошибку +func (a *App) getProjectMedian(projectID int) (float64, error) { + var median float64 + err := a.DB.QueryRow(` + SELECT median_score + FROM projects_median_mv + WHERE project_id = $1 + `, projectID).Scan(&median) + if err != nil { + if err == sql.ErrNoRows { + return 0, fmt.Errorf("median not found for project %d", projectID) + } + return 0, err + } + return median, nil +} + +// calculateProjectUnlockWeeks рассчитывает срок разблокировки проекта в неделях +// projectID - ID проекта +// requiredPoints - необходимое количество баллов +// startDate - дата начала подсчета (может быть nil - за всё время) +// userID - ID пользователя (владельца условия) +// Возвращает количество недель (float64): +// - > 0: условие не выполнено, возвращает количество недель +// - 0: условие уже выполнено (remaining <= 0) +// - 99999: медиана отсутствует или равна 0 (нельзя рассчитать) или ошибка расчета +func (a *App) calculateProjectUnlockWeeks(projectID int, requiredPoints float64, startDate sql.NullTime, userID int) float64 { + // 1. Получаем текущие баллы от startDate + currentPoints, err := a.calculateProjectPointsFromDate(projectID, startDate, userID) + if err != nil { + log.Printf("Error calculating project points for project %d, user %d: %v", projectID, userID, err) + return 99999 // Ошибка расчета - возвращаем 99999 + } + + // 2. Вычисляем остаток + remaining := requiredPoints - currentPoints + if remaining <= 0 { + // Условие уже выполнено + return 0 + } + + // 3. Получаем медиану проекта + median, err := a.getProjectMedian(projectID) + if err != nil || median <= 0 { + // Если медиана отсутствует или равна 0, возвращаем 99999 (нельзя рассчитать) + // Это нормальная ситуация, не логируем + return 99999 + } + + // 4. Рассчитываем недели + weeks := remaining / median + return weeks +} + +// formatWeeksText форматирует количество недель в текстовый формат +// weeks - количество недель (float64) +// Возвращает строку: "2 недели", "<1 недели", "5 недель", "∞ недель" и т.д. +func formatWeeksText(weeks float64) string { + // Если weeks == 0, условие уже выполнено - не показываем срок + if weeks == 0 { + return "" + } + + // Если weeks >= 99999, это означает что медиана отсутствует или нельзя рассчитать + if weeks >= 99999 { + return "∞ недель" + } + + if weeks < 0 { + return "" + } + + if weeks < 1 { + return "<1 недели" + } + + weeksRounded := math.Round(weeks) + weeksInt := int(weeksRounded) + + // Правильное склонение для русского языка + var weekWord string + lastDigit := weeksInt % 10 + lastTwoDigits := weeksInt % 100 + + if lastTwoDigits >= 11 && lastTwoDigits <= 14 { + weekWord = "недель" + } else if lastDigit == 1 { + weekWord = "неделя" + } else if lastDigit >= 2 && lastDigit <= 4 { + weekWord = "недели" + } else { + weekWord = "недель" + } + + return fmt.Sprintf("%d %s", weeksInt, weekWord) +} + +// checkWishlistUnlock проверяет ВСЕ условия для желания +// Все условия должны выполняться (AND логика) +func (a *App) checkWishlistUnlock(itemID int, userID int) (bool, error) { + // Получаем все условия разблокировки + rows, err := a.DB.Query(` + SELECT + wc.id, + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + wc.user_id AS condition_user_id, + tc.task_id, + sc.project_id, + sc.required_points, + sc.start_date + FROM wishlist_conditions wc + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + WHERE wc.wishlist_item_id = $1 + ORDER BY wc.display_order, wc.id + `, itemID) + + if err != nil { + return false, err + } + defer rows.Close() + + var hasConditions bool + var allConditionsMet = true + + for rows.Next() { + hasConditions = true + + var wcID, displayOrder int + var taskConditionID, scoreConditionID sql.NullInt64 + var conditionUserID sql.NullInt64 + var taskID sql.NullInt64 + var projectID sql.NullInt64 + var requiredPoints sql.NullFloat64 + var startDate sql.NullTime + + err := rows.Scan( + &wcID, &displayOrder, + &taskConditionID, &scoreConditionID, &conditionUserID, + &taskID, &projectID, &requiredPoints, &startDate, + ) + if err != nil { + return false, err + } + + // Используем user_id из условия, если он есть, иначе используем текущего пользователя + conditionOwnerID := userID + if conditionUserID.Valid { + conditionOwnerID = int(conditionUserID.Int64) + } + + var conditionMet bool + + if taskConditionID.Valid { + // Проверяем условие по задаче + if !taskID.Valid { + return false, fmt.Errorf("task_id is missing for task_condition_id=%d", taskConditionID.Int64) + } + + var completed int + err := a.DB.QueryRow(` + SELECT completed + FROM tasks + WHERE id = $1 AND user_id = $2 AND deleted = FALSE + `, taskID.Int64, conditionOwnerID).Scan(&completed) + + if err == sql.ErrNoRows { + // Задача удалена или не существует - не блокируем желание + conditionMet = true + } else if err != nil { + return false, err + } else { + conditionMet = completed > 0 + } + + } else if scoreConditionID.Valid { + // Проверяем условие по баллам + if !projectID.Valid || !requiredPoints.Valid { + return false, fmt.Errorf("project_id or required_points missing for score_condition_id=%d", scoreConditionID.Int64) + } + + totalScore, err := a.calculateProjectPointsFromDate( + int(projectID.Int64), + startDate, + conditionOwnerID, + ) + if err != nil { + return false, err + } + + conditionMet = totalScore >= requiredPoints.Float64 + } else { + return false, fmt.Errorf("invalid condition: neither task nor score condition") + } + + if !conditionMet { + allConditionsMet = false + } + } + + // Если нет условий - желание разблокировано по умолчанию + if !hasConditions { + return true, nil + } + + return allConditionsMet, nil +} + +// isConditionLocked определяет, заблокировано ли условие +func isConditionLocked(cond UnlockConditionDisplay) bool { + if cond.Type == "task_completion" { + return cond.TaskCompleted == nil || !*cond.TaskCompleted + } else if cond.Type == "project_points" { + return cond.CurrentPoints == nil || cond.RequiredPoints == nil || *cond.CurrentPoints < *cond.RequiredPoints + } + return false +} + +// getConditionUnlockWeeks возвращает количество недель для разблокировки условия +// Используется для сортировки заблокированных условий по баллам +func (a *App) getConditionUnlockWeeks(cond UnlockConditionDisplay, userID int) float64 { + if cond.Type != "project_points" { + return 0 + } + if cond.ProjectID == nil || cond.RequiredPoints == nil { + return 99999.0 + } + + var startDate sql.NullTime + if cond.StartDate != nil { + date, err := time.Parse("2006-01-02", *cond.StartDate) + if err == nil { + startDate = sql.NullTime{Time: date, Valid: true} + } + } + + conditionOwnerID := userID + if cond.UserID != nil { + conditionOwnerID = *cond.UserID + } + + return a.calculateProjectUnlockWeeks(*cond.ProjectID, *cond.RequiredPoints, startDate, conditionOwnerID) +} + +// sortUnlockConditions сортирует условия в следующем порядке: +// 1. Заблокированные задачи (по алфавиту) +// 2. Заблокированные баллы (по сроку от меньшего к большему) +// 3. Разблокированные задачи (по алфавиту) +// 4. Разблокированные баллы (по алфавиту) +func (a *App) sortUnlockConditions(conditions []UnlockConditionDisplay, userID int) { + sort.Slice(conditions, func(i, j int) bool { + condI := conditions[i] + condJ := conditions[j] + + lockedI := isConditionLocked(condI) + lockedJ := isConditionLocked(condJ) + + // 1. Заблокированные идут перед разблокированными + if lockedI != lockedJ { + return lockedI // lockedI == true идет первым + } + + // Если оба заблокированы или оба разблокированы, сортируем по типу + if lockedI { + // Заблокированные: задачи идут перед баллами + if condI.Type == "task_completion" && condJ.Type == "project_points" { + return true + } + if condI.Type == "project_points" && condJ.Type == "task_completion" { + return false + } + + // Если оба одного типа + if condI.Type == "task_completion" { + // Заблокированные задачи: по алфавиту + taskNameI := "" + taskNameJ := "" + if condI.TaskName != nil { + taskNameI = *condI.TaskName + } + if condJ.TaskName != nil { + taskNameJ = *condJ.TaskName + } + if taskNameI != taskNameJ { + return taskNameI < taskNameJ + } + return condI.ID < condJ.ID + } else { + // Заблокированные баллы: по сроку от меньшего к большему + weeksI := a.getConditionUnlockWeeks(condI, userID) + weeksJ := a.getConditionUnlockWeeks(condJ, userID) + if weeksI != weeksJ { + return weeksI < weeksJ + } + // Если сроки равны, сортируем по алфавиту по названию проекта + projectNameI := "" + projectNameJ := "" + if condI.ProjectName != nil { + projectNameI = *condI.ProjectName + } + if condJ.ProjectName != nil { + projectNameJ = *condJ.ProjectName + } + if projectNameI != projectNameJ { + return projectNameI < projectNameJ + } + return condI.ID < condJ.ID + } + } else { + // Разблокированные: задачи идут перед баллами + if condI.Type == "task_completion" && condJ.Type == "project_points" { + return true + } + if condI.Type == "project_points" && condJ.Type == "task_completion" { + return false + } + + // Если оба одного типа, сортируем по алфавиту + if condI.Type == "task_completion" { + // Разблокированные задачи: по алфавиту + taskNameI := "" + taskNameJ := "" + if condI.TaskName != nil { + taskNameI = *condI.TaskName + } + if condJ.TaskName != nil { + taskNameJ = *condJ.TaskName + } + if taskNameI != taskNameJ { + return taskNameI < taskNameJ + } + return condI.ID < condJ.ID + } else { + // Разблокированные баллы: по алфавиту + projectNameI := "" + projectNameJ := "" + if condI.ProjectName != nil { + projectNameI = *condI.ProjectName + } + if condJ.ProjectName != nil { + projectNameJ = *condJ.ProjectName + } + if projectNameI != projectNameJ { + return projectNameI < projectNameJ + } + return condI.ID < condJ.ID + } + } + }) +} + +// getWishlistItemsWithConditions загружает желания с их условиями +func (a *App) getWishlistItemsWithConditions(userID int, includeCompleted bool) ([]WishlistItem, error) { + query := ` + SELECT + wi.id, + wi.name, + wi.price, + wi.image_path, + wi.link, + wi.completed, + wi.group_name, + wc.id AS condition_id, + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + wc.user_id AS condition_user_id, + tc.task_id, + t.name AS task_name, + sc.project_id, + p.name AS project_name, + sc.required_points, + sc.start_date + FROM wishlist_items wi + LEFT JOIN wishlist_conditions wc ON wi.id = wc.wishlist_item_id + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN tasks t ON tc.task_id = t.id AND t.deleted = FALSE + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + LEFT JOIN projects p ON sc.project_id = p.id AND p.deleted = FALSE + WHERE wi.user_id = $1 + AND wi.deleted = FALSE + AND ($2 = TRUE OR wi.completed = FALSE) + ORDER BY wi.completed, wi.id, wc.display_order, wc.id + ` + + rows, err := a.DB.Query(query, userID, includeCompleted) + if err != nil { + return nil, err + } + defer rows.Close() + + // Группируем по wishlist_item_id + itemsMap := make(map[int]*WishlistItem) + + for rows.Next() { + var itemID int + var name string + var price sql.NullFloat64 + var imagePath, link sql.NullString + var completed bool + var groupName sql.NullString + + var conditionID, displayOrder sql.NullInt64 + var taskConditionID, scoreConditionID sql.NullInt64 + var conditionUserID sql.NullInt64 + var taskID sql.NullInt64 + var taskName sql.NullString + var projectID sql.NullInt64 + var projectName sql.NullString + var requiredPoints sql.NullFloat64 + var startDate sql.NullTime + + err := rows.Scan( + &itemID, &name, &price, &imagePath, &link, &completed, + &groupName, + &conditionID, &displayOrder, + &taskConditionID, &scoreConditionID, &conditionUserID, + &taskID, &taskName, + &projectID, &projectName, &requiredPoints, &startDate, + ) + if err != nil { + return nil, err + } + + // Получаем или создаём item + item, exists := itemsMap[itemID] + if !exists { + item = &WishlistItem{ + ID: itemID, + Name: name, + Completed: completed, + UnlockConditions: []UnlockConditionDisplay{}, + } + if price.Valid { + p := price.Float64 + item.Price = &p + } + if imagePath.Valid { + url := imagePath.String + item.ImageURL = &url + } + if link.Valid { + l := link.String + item.Link = &l + } + if groupName.Valid && groupName.String != "" { + groupNameVal := groupName.String + item.GroupName = &groupNameVal + } + itemsMap[itemID] = item + } + + // Добавляем условие, если есть + if conditionID.Valid { + // Определяем владельца условия + conditionOwnerID := userID + if conditionUserID.Valid { + conditionOwnerID = int(conditionUserID.Int64) + } + + // Если это условие по задаче, проверяем существует ли задача + if taskConditionID.Valid && taskID.Valid { + // Проверяем, существует ли задача (не удалена) + var taskExists bool + err := a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE)`, taskID.Int64, conditionOwnerID).Scan(&taskExists) + if err != nil || !taskExists { + // Задача удалена - не добавляем условие в список, но при проверке блокировки оно считается выполненным + continue + } + } + + condition := UnlockConditionDisplay{ + ID: int(conditionID.Int64), + DisplayOrder: int(displayOrder.Int64), + } + + // Заполняем UserID для условия + if conditionUserID.Valid { + conditionOwnerID := int(conditionUserID.Int64) + condition.UserID = &conditionOwnerID + } else { + condition.UserID = &userID + } + + if taskConditionID.Valid { + condition.Type = "task_completion" + if taskName.Valid { + condition.TaskName = &taskName.String + } + if taskID.Valid { + taskIDVal := int(taskID.Int64) + condition.TaskID = &taskIDVal + } + } else if scoreConditionID.Valid { + condition.Type = "project_points" + if projectName.Valid { + condition.ProjectName = &projectName.String + } + if projectID.Valid { + projectIDVal := int(projectID.Int64) + condition.ProjectID = &projectIDVal + } + if requiredPoints.Valid { + condition.RequiredPoints = &requiredPoints.Float64 + } + if startDate.Valid { + // Форматируем дату в YYYY-MM-DD + dateStr := startDate.Time.Format("2006-01-02") + condition.StartDate = &dateStr + } + } + + item.UnlockConditions = append(item.UnlockConditions, condition) + } + } + + // Конвертируем map в slice и проверяем разблокировку + items := make([]WishlistItem, 0, len(itemsMap)) + for _, item := range itemsMap { + unlocked, err := a.checkWishlistUnlock(item.ID, userID) + if err != nil { + log.Printf("Error checking unlock for wishlist %d: %v", item.ID, err) + unlocked = false + } + item.Unlocked = unlocked + + // Сортируем условия в нужном порядке + a.sortUnlockConditions(item.UnlockConditions, userID) + + // Определяем первое заблокированное условие и количество остальных, а также рассчитываем прогресс + if !unlocked && !item.Completed { + lockedCount := 0 + var firstLocked *UnlockConditionDisplay + for i := range item.UnlockConditions { + // Проверяем каждое условие отдельно + condition := &item.UnlockConditions[i] + var conditionMet bool + var err error + + if condition.Type == "task_completion" { + // Находим task_id и user_id для этого условия + var taskID int + var conditionOwnerID int + err = a.DB.QueryRow(` + SELECT tc.task_id, COALESCE(wc.user_id, $2) + FROM wishlist_conditions wc + JOIN task_conditions tc ON wc.task_condition_id = tc.id + WHERE wc.id = $1 + `, condition.ID, userID).Scan(&taskID, &conditionOwnerID) + if err == nil { + var completed int + err = a.DB.QueryRow(` + SELECT completed FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE + `, taskID, conditionOwnerID).Scan(&completed) + if err == sql.ErrNoRows { + // Задача удалена или не существует - не блокируем желание + conditionMet = true + completedBool := true + condition.TaskCompleted = &completedBool + } else if err == nil { + conditionMet = completed > 0 + completedBool := conditionMet + condition.TaskCompleted = &completedBool + } + } + } else if condition.Type == "project_points" { + // Находим project_id, required_points и user_id для этого условия + var projectID int + var requiredPoints float64 + var startDate sql.NullTime + var conditionOwnerID int + err = a.DB.QueryRow(` + SELECT sc.project_id, sc.required_points, sc.start_date, COALESCE(wc.user_id, $2) + FROM wishlist_conditions wc + JOIN score_conditions sc ON wc.score_condition_id = sc.id + WHERE wc.id = $1 + `, condition.ID, userID).Scan(&projectID, &requiredPoints, &startDate, &conditionOwnerID) + if err == nil { + totalScore, err := a.calculateProjectPointsFromDate(projectID, startDate, conditionOwnerID) + if err != nil { + // Если ошибка при расчете, устанавливаем 0 + zeroScore := 0.0 + condition.CurrentPoints = &zeroScore + conditionMet = false + } else { + condition.CurrentPoints = &totalScore + conditionMet = totalScore >= requiredPoints + } + } + } + + if !conditionMet { + lockedCount++ + if firstLocked == nil { + firstLocked = condition + } + } + } + if firstLocked != nil { + item.FirstLockedCondition = firstLocked + item.MoreLockedConditions = lockedCount - 1 + item.LockedConditionsCount = lockedCount + } + } else { + // Даже если желание разблокировано, рассчитываем прогресс для всех условий + for i := range item.UnlockConditions { + condition := &item.UnlockConditions[i] + if condition.Type == "task_completion" { + var taskID int + var conditionOwnerID int + err := a.DB.QueryRow(` + SELECT tc.task_id, COALESCE(wc.user_id, $2) + FROM wishlist_conditions wc + JOIN task_conditions tc ON wc.task_condition_id = tc.id + WHERE wc.id = $1 + `, condition.ID, userID).Scan(&taskID, &conditionOwnerID) + if err == nil { + var completed int + err = a.DB.QueryRow(` + SELECT completed FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE + `, taskID, conditionOwnerID).Scan(&completed) + if err == sql.ErrNoRows { + // Задача удалена или не существует - не блокируем желание + completedBool := true + condition.TaskCompleted = &completedBool + } else if err == nil { + completedBool := completed > 0 + condition.TaskCompleted = &completedBool + } + } + } else if condition.Type == "project_points" { + var projectID int + var requiredPoints float64 + var startDate sql.NullTime + var conditionOwnerID int + err := a.DB.QueryRow(` + SELECT sc.project_id, sc.required_points, sc.start_date, COALESCE(wc.user_id, $2) + FROM wishlist_conditions wc + JOIN score_conditions sc ON wc.score_condition_id = sc.id + WHERE wc.id = $1 + `, condition.ID, userID).Scan(&projectID, &requiredPoints, &startDate, &conditionOwnerID) + if err == nil { + totalScore, err := a.calculateProjectPointsFromDate(projectID, startDate, conditionOwnerID) + if err != nil { + // Если ошибка при расчете, устанавливаем 0 + zeroScore := 0.0 + condition.CurrentPoints = &zeroScore + } else { + condition.CurrentPoints = &totalScore + } + // Рассчитываем и форматируем срок разблокировки + if condition.ProjectID != nil && condition.RequiredPoints != nil { + weeks := a.calculateProjectUnlockWeeks( + projectID, + requiredPoints, + startDate, + conditionOwnerID, + ) + weeksText := formatWeeksText(weeks) + condition.WeeksText = &weeksText + } + } + } + } + } + + // Загружаем связанную задачу текущего пользователя, если есть + var linkedTaskID, linkedTaskCompleted, linkedTaskUserID sql.NullInt64 + var linkedTaskName sql.NullString + var linkedTaskNextShowAt sql.NullTime + linkedTaskErr := a.DB.QueryRow(` + SELECT t.id, t.name, t.completed, t.next_show_at, t.user_id + FROM tasks t + WHERE t.wishlist_id = $1 AND t.user_id = $2 AND t.deleted = FALSE + LIMIT 1 + `, item.ID, userID).Scan(&linkedTaskID, &linkedTaskName, &linkedTaskCompleted, &linkedTaskNextShowAt, &linkedTaskUserID) + + if linkedTaskErr == nil && linkedTaskID.Valid { + linkedTask := &LinkedTask{ + ID: int(linkedTaskID.Int64), + Name: linkedTaskName.String, + Completed: int(linkedTaskCompleted.Int64), + } + if linkedTaskNextShowAt.Valid { + nextShowAtStr := linkedTaskNextShowAt.Time.Format(time.RFC3339) + linkedTask.NextShowAt = &nextShowAtStr + } + if linkedTaskUserID.Valid { + userIDVal := int(linkedTaskUserID.Int64) + linkedTask.UserID = &userIDVal + } + item.LinkedTask = linkedTask + } else if linkedTaskErr != sql.ErrNoRows { + log.Printf("Error loading linked task for wishlist %d: %v", item.ID, linkedTaskErr) + // Не возвращаем ошибку, просто не устанавливаем linked_task + } + + // Подсчитываем общее количество не закрытых задач для этого желания (всех пользователей) + // Исключаем linked_task из подсчета, если она есть + // Учитываем только не закрытые задачи (completed = 0) + var tasksCount int + if linkedTaskID.Valid { + // Если есть linked_task, исключаем её из подсчета + err = a.DB.QueryRow(` + SELECT COUNT(*) + FROM tasks t + WHERE t.wishlist_id = $1 AND t.deleted = FALSE AND t.completed = 0 AND t.id != $2 + `, item.ID, linkedTaskID.Int64).Scan(&tasksCount) + } else { + // Если нет linked_task, считаем все не закрытые задачи + err = a.DB.QueryRow(` + SELECT COUNT(*) + FROM tasks t + WHERE t.wishlist_id = $1 AND t.deleted = FALSE AND t.completed = 0 + `, item.ID).Scan(&tasksCount) + } + if err != nil { + log.Printf("Error counting tasks for wishlist %d: %v", item.ID, err) + tasksCount = 0 + } + item.TasksCount = tasksCount + + items = append(items, *item) + } + + return items, nil +} + +// saveWishlistConditions сохраняет условия для желания +// userID - автор условий (пользователь, который создает/обновляет условия) +func (a *App) saveWishlistConditions( + tx *sql.Tx, + wishlistItemID int, + userID int, + conditions []UnlockConditionRequest, +) error { + // Получаем все существующие условия с их user_id перед удалением + existingConditions := make(map[int]int) // map[conditionID]userID + rows, err := tx.Query(` + SELECT id, user_id + FROM wishlist_conditions + WHERE wishlist_item_id = $1 + `, wishlistItemID) + if err != nil { + return fmt.Errorf("error getting existing conditions: %w", err) + } + defer rows.Close() + + for rows.Next() { + var condID int + var condUserID sql.NullInt64 + if err := rows.Scan(&condID, &condUserID); err != nil { + return fmt.Errorf("error scanning existing condition: %w", err) + } + if condUserID.Valid { + existingConditions[condID] = int(condUserID.Int64) + } + } + + // Удаляем только условия текущего пользователя + _, err = tx.Exec(` + DELETE FROM wishlist_conditions + WHERE wishlist_item_id = $1 AND user_id = $2 + `, wishlistItemID, userID) + if err != nil { + return fmt.Errorf("error deleting user conditions: %w", err) + } + + if len(conditions) == 0 { + return nil + } + + // Подготавливаем statement для вставки условий + stmt, err := tx.Prepare(` + INSERT INTO wishlist_conditions + (wishlist_item_id, user_id, task_condition_id, score_condition_id, display_order) + VALUES ($1, $2, $3, $4, $5) + `) + if err != nil { + return err + } + defer stmt.Close() + + for i, condition := range conditions { + displayOrder := i + if condition.DisplayOrder != nil { + displayOrder = *condition.DisplayOrder + } + + var taskConditionID interface{} + var scoreConditionID interface{} + + if condition.Type == "task_completion" { + if condition.TaskID == nil { + return fmt.Errorf("task_id is required for task_completion") + } + + // Получаем или создаём task_condition + var tcID int + err := tx.QueryRow(` + SELECT id FROM task_conditions WHERE task_id = $1 + `, *condition.TaskID).Scan(&tcID) + + if err == sql.ErrNoRows { + // Создаём новое условие + err = tx.QueryRow(` + INSERT INTO task_conditions (task_id) + VALUES ($1) + ON CONFLICT (task_id) DO UPDATE SET task_id = EXCLUDED.task_id + RETURNING id + `, *condition.TaskID).Scan(&tcID) + if err != nil { + return err + } + } else if err != nil { + return err + } + + taskConditionID = tcID + + } else if condition.Type == "project_points" { + if condition.ProjectID == nil || condition.RequiredPoints == nil { + return fmt.Errorf("project_id and required_points are required for project_points") + } + + startDateStr := condition.StartDate + + // Получаем или создаём score_condition + var scID int + var startDateVal interface{} + if startDateStr != nil && *startDateStr != "" { + // Парсим дату из строки YYYY-MM-DD + startDateVal = *startDateStr + } else { + // Пустая строка или nil = NULL для "за всё время" + startDateVal = nil + } + + err := tx.QueryRow(` + SELECT id FROM score_conditions + WHERE project_id = $1 + AND required_points = $2 + AND (start_date = $3::DATE OR (start_date IS NULL AND $3 IS NULL)) + `, *condition.ProjectID, *condition.RequiredPoints, startDateVal).Scan(&scID) + + if err == sql.ErrNoRows { + // Создаём новое условие + err = tx.QueryRow(` + INSERT INTO score_conditions (project_id, required_points, start_date) + VALUES ($1, $2, $3::DATE) + ON CONFLICT (project_id, required_points, start_date) + DO UPDATE SET project_id = EXCLUDED.project_id + RETURNING id + `, *condition.ProjectID, *condition.RequiredPoints, startDateVal).Scan(&scID) + if err != nil { + return err + } + } else if err != nil { + return err + } + + scoreConditionID = scID + } + + // Определяем user_id для условия: + // - Если условие имеет id и это условие существовало - проверяем, принадлежит ли оно текущему пользователю + // - Если условие принадлежит другому пользователю - пропускаем (не сохраняем, так как чужие условия не редактируются) + // - Если условие имеет id, но не существовало (например, было только что добавлено) - это новое условие, используем userID текущего пользователя + // - Если условие без id - это новое условие, используем userID текущего пользователя + conditionUserID := userID + if condition.ID != nil { + if originalUserID, exists := existingConditions[*condition.ID]; exists { + // Если условие принадлежит другому пользователю - пропускаем (не сохраняем, так как чужие условия не редактируются) + if originalUserID != userID { + continue + } + // Условие принадлежит текущему пользователю - обновляем его + conditionUserID = originalUserID + } else { + // Условие имеет id, но не существует в базе - это новое условие, используем userID текущего пользователя + conditionUserID = userID + } + } + + // Создаём связь + _, err = stmt.Exec( + wishlistItemID, + conditionUserID, + taskConditionID, + scoreConditionID, + displayOrder, + ) + if err != nil { + return err + } + } + + return nil +} + +// getWishlistHandler возвращает список незавершённых желаний и счётчик завершённых +func (a *App) getWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Загружаем только незавершённые + items, err := a.getWishlistItemsWithConditions(userID, false) + if err != nil { + log.Printf("Error getting wishlist items: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting wishlist items: %v", err), http.StatusInternalServerError) + return + } + + // Получаем количество завершённых + var completedCount int + err = a.DB.QueryRow(` + SELECT COUNT(*) FROM wishlist_items + WHERE user_id = $1 AND deleted = FALSE AND completed = TRUE + `, userID).Scan(&completedCount) + if err != nil { + log.Printf("Error counting completed wishlist items: %v", err) + completedCount = 0 + } + + // Группируем и сортируем + unlocked := make([]WishlistItem, 0) + locked := make([]WishlistItem, 0) + + for _, item := range items { + if item.Unlocked { + unlocked = append(unlocked, item) + } else { + locked = append(locked, item) + } + } + + // Сортируем разблокированные по цене от меньшего к большему + sort.Slice(unlocked, func(i, j int) bool { + priceI := 0.0 + priceJ := 0.0 + if unlocked[i].Price != nil { + priceI = *unlocked[i].Price + } + if unlocked[j].Price != nil { + priceJ = *unlocked[j].Price + } + if priceI == priceJ { + return unlocked[i].ID < unlocked[j].ID + } + return priceI < priceJ // Сортировка по цене от меньшего к большему (заменяет calculateUnlockedSortValue) + }) + + // Разделяем заблокированные на группы + lockedWithoutTasks := []WishlistItem{} + lockedWithTasks := []WishlistItem{} + + for _, item := range locked { + hasUncompletedTasks := false + for _, cond := range item.UnlockConditions { + if cond.Type == "task_completion" && (cond.TaskCompleted == nil || !*cond.TaskCompleted) { + hasUncompletedTasks = true + break + } + } + if hasUncompletedTasks { + lockedWithTasks = append(lockedWithTasks, item) + } else { + lockedWithoutTasks = append(lockedWithoutTasks, item) + } + } + + // Сортируем каждую группу по времени разблокировки (от меньшего срока к большему) + sort.Slice(lockedWithoutTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithoutTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithoutTasks[j], userID) + if valueI == valueJ { + return lockedWithoutTasks[i].ID < lockedWithoutTasks[j].ID + } + return valueI < valueJ + }) + + sort.Slice(lockedWithTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithTasks[j], userID) + if valueI == valueJ { + return lockedWithTasks[i].ID < lockedWithTasks[j].ID + } + return valueI < valueJ + }) + + // Объединяем: сначала без задач, потом с задачами + locked = append(lockedWithoutTasks, lockedWithTasks...) + + response := WishlistResponse{ + Unlocked: unlocked, + Locked: locked, + CompletedCount: completedCount, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// getWishlistCompletedHandler возвращает список завершённых желаний +func (a *App) getWishlistCompletedHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Загружаем все желания включая завершённые + items, err := a.getWishlistItemsWithConditions(userID, true) + if err != nil { + log.Printf("Error getting completed wishlist items: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting completed wishlist items: %v", err), http.StatusInternalServerError) + return + } + + // Фильтруем только завершённые + completed := make([]WishlistItem, 0) + for _, item := range items { + if item.Completed { + completed = append(completed, item) + } + } + + // Сортируем по цене (дорогие → дешёвые) + sort.Slice(completed, func(i, j int) bool { + priceI := 0.0 + priceJ := 0.0 + if completed[i].Price != nil { + priceI = *completed[i].Price + } + if completed[j].Price != nil { + priceJ = *completed[j].Price + } + return priceI > priceJ + }) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(completed) +} + +// createWishlistHandler создаёт новое желание +func (a *App) createWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + log.Printf("createWishlistHandler: Unauthorized") + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + log.Printf("createWishlistHandler: userID=%d", userID) + + var req WishlistRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("createWishlistHandler: Error decoding wishlist request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + log.Printf("createWishlistHandler: decoded request - name='%s', price=%v, link='%s', conditions=%d", + req.Name, req.Price, req.Link, len(req.UnlockConditions)) + + if req.UnlockConditions == nil { + log.Printf("createWishlistHandler: WARNING - UnlockConditions is nil, initializing empty slice") + req.UnlockConditions = []UnlockConditionRequest{} + } + + for i, cond := range req.UnlockConditions { + log.Printf("createWishlistHandler: condition %d - type='%s', task_id=%v, project_id=%v, required_points=%v, start_date='%v'", + i, cond.Type, cond.TaskID, cond.ProjectID, cond.RequiredPoints, cond.StartDate) + } + + if strings.TrimSpace(req.Name) == "" { + log.Printf("createWishlistHandler: Name is required") + sendErrorWithCORS(w, "Name is required", http.StatusBadRequest) + return + } + + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + var wishlistID int + err = tx.QueryRow(` + INSERT INTO wishlist_items (user_id, author_id, name, price, link, group_name, completed, deleted) + VALUES ($1, $1, $2, $3, $4, $5, FALSE, FALSE) + RETURNING id + `, userID, strings.TrimSpace(req.Name), req.Price, req.Link, req.GroupName).Scan(&wishlistID) + + if err != nil { + log.Printf("Error creating wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating wishlist item: %v", err), http.StatusInternalServerError) + return + } + + // Сохраняем условия + if len(req.UnlockConditions) > 0 { + log.Printf("createWishlistHandler: saving %d conditions", len(req.UnlockConditions)) + err = a.saveWishlistConditionsWithUserID(tx, wishlistID, userID, req.UnlockConditions) + if err != nil { + log.Printf("createWishlistHandler: Error saving wishlist conditions: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error saving wishlist conditions: %v", err), http.StatusInternalServerError) + return + } + log.Printf("createWishlistHandler: conditions saved successfully") + } else { + log.Printf("createWishlistHandler: no conditions to save") + } + + log.Printf("createWishlistHandler: committing transaction") + if err := tx.Commit(); err != nil { + log.Printf("createWishlistHandler: Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + log.Printf("createWishlistHandler: transaction committed successfully") + + // Обновляем MV для групповых саджестов + if req.GroupName != nil && *req.GroupName != "" { + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + } + + // Получаем созданное желание с условиями + items, err := a.getWishlistItemsWithConditions(userID, false) + if err != nil { + log.Printf("Error getting created wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting created wishlist item: %v", err), http.StatusInternalServerError) + return + } + + var createdItem *WishlistItem + for i := range items { + if items[i].ID == wishlistID { + createdItem = &items[i] + break + } + } + + if createdItem == nil { + log.Printf("createWishlistHandler: Created item not found") + sendErrorWithCORS(w, "Created item not found", http.StatusInternalServerError) + return + } + + log.Printf("createWishlistHandler: Successfully created wishlist item id=%d, name='%s'", + createdItem.ID, createdItem.Name) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(createdItem) +} + +// checkWishlistAccess проверяет доступ пользователя к желанию +// Возвращает (hasAccess, itemUserID, boardID, error) +func (a *App) checkWishlistAccess(itemID int, userID int) (bool, int, sql.NullInt64, error) { + var itemUserID int + var boardID sql.NullInt64 + err := a.DB.QueryRow(` + SELECT user_id, board_id + FROM wishlist_items + WHERE id = $1 AND deleted = FALSE + `, itemID).Scan(&itemUserID, &boardID) + + if err == sql.ErrNoRows { + return false, 0, sql.NullInt64{}, err + } + if err != nil { + return false, 0, sql.NullInt64{}, err + } + + // Проверяем доступ: владелец ИЛИ участник доски + hasAccess := itemUserID == userID + if !hasAccess && boardID.Valid { + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID.Int64).Scan(&ownerID) + if err == nil { + hasAccess = ownerID == userID + if !hasAccess { + var isMember bool + err = a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2)`, + int(boardID.Int64), userID).Scan(&isMember) + if err == nil { + hasAccess = isMember + } + } + } + } + + return hasAccess, itemUserID, boardID, nil +} + +// CalculateWeeksRequest структура запроса для расчета недель +type CalculateWeeksRequest struct { + ProjectID int `json:"project_id"` + RequiredPoints float64 `json:"required_points"` + StartDate string `json:"start_date,omitempty"` + ConditionUserID *int `json:"condition_user_id,omitempty"` // Владелец условия (если условие существует) +} + +// calculateWeeksHandler обрабатывает запрос на расчет недель для разблокировки условия +func (a *App) calculateWeeksHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req CalculateWeeksRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Определяем владельца условия: + // 1. Если передан condition_user_id в запросе - используем его (для существующего условия) + // 2. Иначе используем текущего пользователя (для нового условия) + conditionOwnerID := userID // userID из контекста (текущий пользователь) + if req.ConditionUserID != nil && *req.ConditionUserID > 0 { + conditionOwnerID = *req.ConditionUserID + } + + var startDate sql.NullTime + if req.StartDate != "" { + date, err := time.Parse("2006-01-02", req.StartDate) + if err == nil { + startDate = sql.NullTime{Time: date, Valid: true} + } + } + + // Используем владельца условия, а не текущего пользователя + weeks := a.calculateProjectUnlockWeeks(req.ProjectID, req.RequiredPoints, startDate, conditionOwnerID) + + response := map[string]interface{}{ + "weeks_text": formatWeeksText(weeks), // Отформатированная строка для отображения + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// getWishlistItemHandler возвращает одно желание +func (a *App) getWishlistItemHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + itemID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к желанию + hasAccess, itemUserID, boardID, err := a.checkWishlistAccess(itemID, userID) + if err == sql.ErrNoRows { + log.Printf("Wishlist item not found: id=%d, userID=%d", itemID, userID) + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting wishlist item (id=%d, userID=%d): %v", itemID, userID, err) + sendErrorWithCORS(w, "Error getting wishlist item", http.StatusInternalServerError) + return + } + + log.Printf("Wishlist item found: id=%d, itemUserID=%d, boardID=%v, currentUserID=%d", itemID, itemUserID, boardID, userID) + + if !hasAccess { + log.Printf("Access denied for wishlist item: id=%d, itemUserID=%d, boardID=%v, currentUserID=%d", itemID, itemUserID, boardID, userID) + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + log.Printf("Access granted for wishlist item: id=%d, itemUserID=%d, boardID=%v, currentUserID=%d", itemID, itemUserID, boardID, userID) + + // Сохраняем itemUserID для использования в качестве fallback, если conditionUserID NULL + itemOwnerID := itemUserID + + // Загружаем полную информацию о желании + query := ` + SELECT + wi.id, + wi.name, + wi.price, + wi.image_path, + wi.link, + wi.completed, + wi.group_name, + wc.id AS condition_id, + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + wc.user_id AS condition_user_id, + tc.task_id, + t.name AS task_name, + t.next_show_at AS task_next_show_at, + sc.project_id, + p.name AS project_name, + sc.required_points, + sc.start_date + FROM wishlist_items wi + LEFT JOIN wishlist_conditions wc ON wi.id = wc.wishlist_item_id + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN tasks t ON tc.task_id = t.id AND t.deleted = FALSE + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + LEFT JOIN projects p ON sc.project_id = p.id AND p.deleted = FALSE + WHERE wi.id = $1 + AND wi.deleted = FALSE + ORDER BY wc.display_order, wc.id + ` + + rows, err := a.DB.Query(query, itemID) + if err != nil { + log.Printf("Error querying wishlist item: %v", err) + sendErrorWithCORS(w, "Error getting wishlist item", http.StatusInternalServerError) + return + } + defer rows.Close() + + itemsMap := make(map[int]*WishlistItem) + for rows.Next() { + var itemID int + var name string + var price sql.NullFloat64 + var imagePath sql.NullString + var link sql.NullString + var completed bool + var groupName sql.NullString + var conditionID sql.NullInt64 + var displayOrder sql.NullInt64 + var taskConditionID sql.NullInt64 + var scoreConditionID sql.NullInt64 + var conditionUserID sql.NullInt64 + var taskID sql.NullInt64 + var taskName sql.NullString + var taskNextShowAt sql.NullTime + var projectID sql.NullInt64 + var projectName sql.NullString + var requiredPoints sql.NullFloat64 + var startDate sql.NullTime + + err := rows.Scan( + &itemID, &name, &price, &imagePath, &link, &completed, &groupName, + &conditionID, &displayOrder, &taskConditionID, &scoreConditionID, &conditionUserID, + &taskID, &taskName, &taskNextShowAt, &projectID, &projectName, &requiredPoints, &startDate, + ) + if err != nil { + log.Printf("Error scanning wishlist item: %v", err) + continue + } + + item, exists := itemsMap[itemID] + if !exists { + item = &WishlistItem{ + ID: itemID, + Name: name, + Completed: completed, + UnlockConditions: []UnlockConditionDisplay{}, + } + if price.Valid { + item.Price = &price.Float64 + } + if imagePath.Valid && imagePath.String != "" { + url := imagePath.String + if !strings.HasPrefix(url, "http") { + url = url + "?t=" + strconv.FormatInt(time.Now().Unix(), 10) + } + item.ImageURL = &url + } + if link.Valid { + item.Link = &link.String + } + if groupName.Valid && groupName.String != "" { + groupNameVal := groupName.String + item.GroupName = &groupNameVal + } + itemsMap[itemID] = item + } + + if conditionID.Valid { + // Используем user_id из условия, если он есть, иначе используем владельца желания + // Это важно для старых условий, созданных до добавления user_id в wishlist_conditions + conditionOwnerID := itemOwnerID + if conditionUserID.Valid { + conditionOwnerID = int(conditionUserID.Int64) + } + + // Если это условие по задаче, проверяем существует ли задача + if taskConditionID.Valid && taskID.Valid { + // Проверяем, существует ли задача (не удалена) + var taskExists bool + err := a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE)`, taskID.Int64, conditionOwnerID).Scan(&taskExists) + if err != nil || !taskExists { + // Задача удалена - не добавляем условие в список, но при проверке блокировки оно считается выполненным + continue + } + } + + condition := UnlockConditionDisplay{ + ID: int(conditionID.Int64), + DisplayOrder: int(displayOrder.Int64), + } + + if conditionUserID.Valid { + conditionOwnerID := int(conditionUserID.Int64) + condition.UserID = &conditionOwnerID + } else { + condition.UserID = &itemOwnerID + } + + if taskConditionID.Valid { + condition.Type = "task_completion" + if taskName.Valid { + condition.TaskName = &taskName.String + } + if taskID.Valid { + taskIDVal := int(taskID.Int64) + condition.TaskID = &taskIDVal + var taskCompleted int + err := a.DB.QueryRow(`SELECT completed FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE`, taskID.Int64, conditionOwnerID).Scan(&taskCompleted) + if err == nil { + isCompleted := taskCompleted > 0 + condition.TaskCompleted = &isCompleted + } + } + if taskNextShowAt.Valid { + nextShowAtStr := taskNextShowAt.Time.Format(time.RFC3339) + condition.TaskNextShowAt = &nextShowAtStr + } + } else if scoreConditionID.Valid { + condition.Type = "project_points" + if projectName.Valid { + condition.ProjectName = &projectName.String + } + if projectID.Valid { + projectIDVal := int(projectID.Int64) + condition.ProjectID = &projectIDVal + points, _ := a.calculateProjectPointsFromDate(int(projectID.Int64), startDate, conditionOwnerID) + condition.CurrentPoints = &points + } + if requiredPoints.Valid { + condition.RequiredPoints = &requiredPoints.Float64 + } + if startDate.Valid { + dateStr := startDate.Time.Format("2006-01-02") + condition.StartDate = &dateStr + } + // Рассчитываем и форматируем срок разблокировки + if condition.ProjectID != nil && condition.RequiredPoints != nil { + weeks := a.calculateProjectUnlockWeeks( + *condition.ProjectID, + *condition.RequiredPoints, + startDate, + conditionOwnerID, + ) + weeksText := formatWeeksText(weeks) + condition.WeeksText = &weeksText + } + } + + item.UnlockConditions = append(item.UnlockConditions, condition) + } + } + + // Получаем желание из map + var item *WishlistItem + for _, it := range itemsMap { + if it.ID == itemID { + item = it + break + } + } + + if item == nil { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + + // Проверяем разблокировку + item.Unlocked = true + if len(item.UnlockConditions) > 0 { + for _, cond := range item.UnlockConditions { + if cond.Type == "task_completion" { + if cond.TaskCompleted == nil || !*cond.TaskCompleted { + item.Unlocked = false + break + } + } else if cond.Type == "project_points" { + if cond.CurrentPoints == nil || cond.RequiredPoints == nil || *cond.CurrentPoints < *cond.RequiredPoints { + item.Unlocked = false + break + } + } + } + } + + // Также проверяем через checkWishlistUnlock для совместимости + unlocked, err := a.checkWishlistUnlock(itemID, userID) + if err == nil { + item.Unlocked = unlocked + } + + // Сортируем условия в нужном порядке + a.sortUnlockConditions(item.UnlockConditions, userID) + + // Загружаем связанную задачу текущего пользователя, если есть + var linkedTaskID, linkedTaskCompleted, linkedTaskUserID sql.NullInt64 + var linkedTaskName sql.NullString + var linkedTaskNextShowAt sql.NullTime + err = a.DB.QueryRow(` + SELECT t.id, t.name, t.completed, t.next_show_at, t.user_id + FROM tasks t + WHERE t.wishlist_id = $1 AND t.user_id = $2 AND t.deleted = FALSE + LIMIT 1 + `, itemID, userID).Scan(&linkedTaskID, &linkedTaskName, &linkedTaskCompleted, &linkedTaskNextShowAt, &linkedTaskUserID) + + if err == nil && linkedTaskID.Valid { + linkedTask := &LinkedTask{ + ID: int(linkedTaskID.Int64), + Name: linkedTaskName.String, + Completed: int(linkedTaskCompleted.Int64), + } + if linkedTaskNextShowAt.Valid { + nextShowAtStr := linkedTaskNextShowAt.Time.Format(time.RFC3339) + linkedTask.NextShowAt = &nextShowAtStr + } + if linkedTaskUserID.Valid { + userIDVal := int(linkedTaskUserID.Int64) + linkedTask.UserID = &userIDVal + } + item.LinkedTask = linkedTask + } else if err != sql.ErrNoRows { + log.Printf("Error loading linked task for wishlist %d: %v", itemID, err) + // Не возвращаем ошибку, просто не устанавливаем linked_task + } + + // Подсчитываем общее количество не закрытых задач для этого желания (всех пользователей) + // Исключаем linked_task из подсчета, если она есть + // Учитываем только не закрытые задачи (completed = 0) + var tasksCount int + if linkedTaskID.Valid { + // Если есть linked_task, исключаем её из подсчета + err = a.DB.QueryRow(` + SELECT COUNT(*) + FROM tasks t + WHERE t.wishlist_id = $1 AND t.deleted = FALSE AND t.completed = 0 AND t.id != $2 + `, itemID, linkedTaskID.Int64).Scan(&tasksCount) + } else { + // Если нет linked_task, считаем все не закрытые задачи + err = a.DB.QueryRow(` + SELECT COUNT(*) + FROM tasks t + WHERE t.wishlist_id = $1 AND t.deleted = FALSE AND t.completed = 0 + `, itemID).Scan(&tasksCount) + } + if err != nil { + log.Printf("Error counting tasks for wishlist %d: %v", itemID, err) + tasksCount = 0 + } + item.TasksCount = tasksCount + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(item) +} + +// updateWishlistHandler обновляет желание +func (a *App) updateWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + log.Printf("updateWishlistHandler called: method=%s, path=%s", r.Method, r.URL.Path) + + userID, ok := getUserIDFromContext(r) + if !ok { + log.Printf("updateWishlistHandler: Unauthorized") + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + itemID, err := strconv.Atoi(vars["id"]) + if err != nil { + log.Printf("updateWishlistHandler: Invalid wishlist ID: %v", err) + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + log.Printf("updateWishlistHandler: itemID=%d, userID=%d", itemID, userID) + + // Проверяем доступ к желанию + hasAccess, _, _, err := a.checkWishlistAccess(itemID, userID) + if err == sql.ErrNoRows { + log.Printf("updateWishlistHandler: Wishlist item not found: id=%d, userID=%d", itemID, userID) + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("updateWishlistHandler: Error getting wishlist item (id=%d, userID=%d): %v", itemID, userID, err) + sendErrorWithCORS(w, "Error getting wishlist item", http.StatusInternalServerError) + return + } + + if !hasAccess { + log.Printf("updateWishlistHandler: Access denied: id=%d, userID=%d", itemID, userID) + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + log.Printf("updateWishlistHandler: Access granted: id=%d, userID=%d", itemID, userID) + + var req WishlistRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding wishlist request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if strings.TrimSpace(req.Name) == "" { + sendErrorWithCORS(w, "Name is required", http.StatusBadRequest) + return + } + + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Обновляем желание (не проверяем user_id в WHERE, так как доступ уже проверен выше) + _, err = tx.Exec(` + UPDATE wishlist_items + SET name = $1, price = $2, link = $3, group_name = $4, updated_at = NOW() + WHERE id = $5 + `, strings.TrimSpace(req.Name), req.Price, req.Link, req.GroupName, itemID) + + if err != nil { + log.Printf("Error updating wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error updating wishlist item: %v", err), http.StatusInternalServerError) + return + } + + // Сохраняем условия + err = a.saveWishlistConditions(tx, itemID, userID, req.UnlockConditions) + if err != nil { + log.Printf("Error saving wishlist conditions: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error saving wishlist conditions: %v", err), http.StatusInternalServerError) + return + } + + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов + if req.GroupName != nil && *req.GroupName != "" { + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + } + + // Получаем обновлённое желание через getWishlistItemHandler логику + // Используем тот же запрос, что и в getWishlistItemHandler + query := ` + SELECT + wi.id, + wi.name, + wi.price, + wi.image_path, + wi.link, + wi.completed, + wi.group_name, + wc.id AS condition_id, + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + wc.user_id AS condition_user_id, + tc.task_id, + t.name AS task_name, + sc.project_id, + p.name AS project_name, + sc.required_points, + sc.start_date + FROM wishlist_items wi + LEFT JOIN wishlist_conditions wc ON wi.id = wc.wishlist_item_id + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN tasks t ON tc.task_id = t.id AND t.deleted = FALSE + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + LEFT JOIN projects p ON sc.project_id = p.id AND p.deleted = FALSE + WHERE wi.id = $1 + AND wi.deleted = FALSE + ORDER BY wc.display_order, wc.id + ` + + rows, err := a.DB.Query(query, itemID) + if err != nil { + log.Printf("Error querying updated wishlist item: %v", err) + sendErrorWithCORS(w, "Error getting updated wishlist item", http.StatusInternalServerError) + return + } + defer rows.Close() + + itemsMap := make(map[int]*WishlistItem) + var itemOwnerID int + for rows.Next() { + var itemID int + var name string + var price sql.NullFloat64 + var imagePath sql.NullString + var link sql.NullString + var completed bool + var groupName sql.NullString + var conditionID sql.NullInt64 + var displayOrder sql.NullInt64 + var taskConditionID sql.NullInt64 + var scoreConditionID sql.NullInt64 + var conditionUserID sql.NullInt64 + var taskID sql.NullInt64 + var taskName sql.NullString + var projectID sql.NullInt64 + var projectName sql.NullString + var requiredPoints sql.NullFloat64 + var startDate sql.NullTime + + err := rows.Scan( + &itemID, &name, &price, &imagePath, &link, &completed, &groupName, + &conditionID, &displayOrder, &taskConditionID, &scoreConditionID, &conditionUserID, + &taskID, &taskName, &projectID, &projectName, &requiredPoints, &startDate, + ) + if err != nil { + log.Printf("Error scanning updated wishlist item: %v", err) + continue + } + + item, exists := itemsMap[itemID] + if !exists { + // Получаем user_id для этого желания + err = a.DB.QueryRow(`SELECT user_id FROM wishlist_items WHERE id = $1`, itemID).Scan(&itemOwnerID) + if err != nil { + log.Printf("Error getting item owner: %v", err) + continue + } + + item = &WishlistItem{ + ID: itemID, + Name: name, + Completed: completed, + UnlockConditions: []UnlockConditionDisplay{}, + } + if price.Valid { + item.Price = &price.Float64 + } + if imagePath.Valid && imagePath.String != "" { + url := imagePath.String + if !strings.HasPrefix(url, "http") { + url = url + "?t=" + strconv.FormatInt(time.Now().Unix(), 10) + } + item.ImageURL = &url + } + if link.Valid { + item.Link = &link.String + } + if groupName.Valid && groupName.String != "" { + groupNameVal := groupName.String + item.GroupName = &groupNameVal + } + itemsMap[itemID] = item + } + + if conditionID.Valid { + // Определяем владельца условия + conditionOwnerID := itemOwnerID + if conditionUserID.Valid { + conditionOwnerID = int(conditionUserID.Int64) + } + + // Если это условие по задаче, проверяем существует ли задача + if taskConditionID.Valid && taskID.Valid { + // Проверяем, существует ли задача (не удалена) + var taskExists bool + err := a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE)`, taskID.Int64, conditionOwnerID).Scan(&taskExists) + if err != nil || !taskExists { + // Задача удалена - не добавляем условие в список, но при проверке блокировки оно считается выполненным + continue + } + } + + condition := UnlockConditionDisplay{ + ID: int(conditionID.Int64), + DisplayOrder: int(displayOrder.Int64), + } + + if conditionUserID.Valid { + conditionOwnerID := int(conditionUserID.Int64) + condition.UserID = &conditionOwnerID + } else { + condition.UserID = &itemOwnerID + } + + if taskConditionID.Valid { + condition.Type = "task_completion" + if taskName.Valid { + condition.TaskName = &taskName.String + } + if taskID.Valid { + taskIDVal := int(taskID.Int64) + condition.TaskID = &taskIDVal + var taskCompleted int + err := a.DB.QueryRow(`SELECT completed FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE`, taskID.Int64, conditionOwnerID).Scan(&taskCompleted) + if err == nil { + isCompleted := taskCompleted > 0 + condition.TaskCompleted = &isCompleted + } + } + } else if scoreConditionID.Valid { + condition.Type = "project_points" + if projectName.Valid { + condition.ProjectName = &projectName.String + } + if projectID.Valid { + projectIDVal := int(projectID.Int64) + condition.ProjectID = &projectIDVal + points, _ := a.calculateProjectPointsFromDate(int(projectID.Int64), startDate, conditionOwnerID) + condition.CurrentPoints = &points + } + if requiredPoints.Valid { + condition.RequiredPoints = &requiredPoints.Float64 + } + if startDate.Valid { + dateStr := startDate.Time.Format("2006-01-02") + condition.StartDate = &dateStr + } + // Рассчитываем и форматируем срок разблокировки + if condition.ProjectID != nil && condition.RequiredPoints != nil { + weeks := a.calculateProjectUnlockWeeks( + *condition.ProjectID, + *condition.RequiredPoints, + startDate, + conditionOwnerID, + ) + weeksText := formatWeeksText(weeks) + condition.WeeksText = &weeksText + } + } + + item.UnlockConditions = append(item.UnlockConditions, condition) + } + } + + var updatedItem *WishlistItem + for _, it := range itemsMap { + if it.ID == itemID { + updatedItem = it + break + } + } + + if updatedItem == nil { + log.Printf("Updated item not found: id=%d", itemID) + sendErrorWithCORS(w, "Updated item not found", http.StatusInternalServerError) + return + } + + // Проверяем разблокировку + updatedItem.Unlocked = true + if len(updatedItem.UnlockConditions) > 0 { + for _, cond := range updatedItem.UnlockConditions { + if cond.Type == "task_completion" { + if cond.TaskCompleted == nil || !*cond.TaskCompleted { + updatedItem.Unlocked = false + break + } + } else if cond.Type == "project_points" { + if cond.CurrentPoints == nil || cond.RequiredPoints == nil || *cond.CurrentPoints < *cond.RequiredPoints { + updatedItem.Unlocked = false + break + } + } + } + } + + unlocked, err := a.checkWishlistUnlock(itemID, userID) + if err == nil { + updatedItem.Unlocked = unlocked + } + + // Сортируем условия в нужном порядке + a.sortUnlockConditions(updatedItem.UnlockConditions, userID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(updatedItem) +} + +// deleteWishlistHandler удаляет желание (soft delete) +func (a *App) deleteWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + itemID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к желанию + hasAccess, _, _, err := a.checkWishlistAccess(itemID, userID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking wishlist access: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking wishlist access: %v", err), http.StatusInternalServerError) + return + } + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + _, err = a.DB.Exec(` + UPDATE wishlist_items + SET deleted = TRUE, updated_at = NOW() + WHERE id = $1 + `, itemID) + + if err != nil { + log.Printf("Error deleting wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error deleting wishlist item: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Wishlist item deleted successfully", + }) +} + +// uploadWishlistImageHandler загружает картинку для желания +func (a *App) uploadWishlistImageHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + wishlistID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к желанию + hasAccess, _, _, err := a.checkWishlistAccess(wishlistID, userID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking wishlist access: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking wishlist access: %v", err), http.StatusInternalServerError) + return + } + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + // Парсим multipart form (макс 5MB) + err = r.ParseMultipartForm(5 << 20) + if err != nil { + sendErrorWithCORS(w, "File too large (max 5MB)", http.StatusBadRequest) + return + } + + file, _, err := r.FormFile("image") + if err != nil { + sendErrorWithCORS(w, "Error retrieving file", http.StatusBadRequest) + return + } + defer file.Close() + + // Декодируем изображение + img, err := imaging.Decode(file) + if err != nil { + sendErrorWithCORS(w, "Invalid image format", http.StatusBadRequest) + return + } + + // Сжимаем до максимальной ширины 1200px (сохраняя пропорции) + if img.Bounds().Dx() > 1200 { + img = imaging.Resize(img, 1200, 0, imaging.Lanczos) + } + + // Создаём директорию + uploadDir := fmt.Sprintf("/app/uploads/wishlist/%d", userID) + err = os.MkdirAll(uploadDir, 0755) + if err != nil { + log.Printf("Error creating directory: %v", err) + sendErrorWithCORS(w, "Error creating directory", http.StatusInternalServerError) + return + } + + // Генерируем уникальное имя файла + randomBytes := make([]byte, 8) + rand.Read(randomBytes) + filename := fmt.Sprintf("%d_%x.jpg", wishlistID, randomBytes) + filepath := filepath.Join(uploadDir, filename) + + dst, err := os.Create(filepath) + if err != nil { + log.Printf("Error creating file: %v", err) + sendErrorWithCORS(w, "Error saving file", http.StatusInternalServerError) + return + } + defer dst.Close() + + // Кодируем в JPEG с качеством 85% + err = jpeg.Encode(dst, img, &jpeg.Options{Quality: 85}) + if err != nil { + log.Printf("Error encoding image: %v", err) + sendErrorWithCORS(w, "Error encoding image", http.StatusInternalServerError) + return + } + + // Обновляем путь в БД (уникальное имя файла уже обеспечивает сброс кэша) + imagePath := fmt.Sprintf("/uploads/wishlist/%d/%s", userID, filename) + _, err = a.DB.Exec(` + UPDATE wishlist_items + SET image_path = $1, updated_at = NOW() + WHERE id = $2 + `, imagePath, wishlistID) + if err != nil { + log.Printf("Error updating database: %v", err) + sendErrorWithCORS(w, "Error updating database", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "image_url": imagePath, + }) +} + +// deleteWishlistImageHandler удаляет картинку желания +func (a *App) deleteWishlistImageHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + wishlistID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к желанию + hasAccess, _, _, err := a.checkWishlistAccess(wishlistID, userID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking wishlist access: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking wishlist access: %v", err), http.StatusInternalServerError) + return + } + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + // Получаем текущий путь к изображению из БД + var currentImagePath sql.NullString + err = a.DB.QueryRow(` + SELECT image_path + FROM wishlist_items + WHERE id = $1 + `, wishlistID).Scan(¤tImagePath) + + if err != nil { + log.Printf("Error getting image path: %v", err) + sendErrorWithCORS(w, "Error getting image path", http.StatusInternalServerError) + return + } + + // Удаляем файл, если он существует + if currentImagePath.Valid && currentImagePath.String != "" { + filePath := filepath.Join("/app", currentImagePath.String) + err = os.Remove(filePath) + if err != nil && !os.IsNotExist(err) { + log.Printf("Error deleting image file: %v", err) + // Продолжаем выполнение даже если файл не найден + } + } + + // Обновляем БД, устанавливая image_path в NULL + _, err = a.DB.Exec(` + UPDATE wishlist_items + SET image_path = NULL, updated_at = NOW() + WHERE id = $1 + `, wishlistID) + + if err != nil { + log.Printf("Error updating database: %v", err) + sendErrorWithCORS(w, "Error updating database", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Image deleted successfully", + }) +} + +// completeWishlistHandler помечает желание как завершённое +func (a *App) completeWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + itemID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к желанию + hasAccess, _, _, err := a.checkWishlistAccess(itemID, userID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking wishlist access: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking wishlist access: %v", err), http.StatusInternalServerError) + return + } + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + _, err = a.DB.Exec(` + UPDATE wishlist_items + SET completed = TRUE, updated_at = NOW() + WHERE id = $1 + `, itemID) + + if err != nil { + log.Printf("Error completing wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error completing wishlist item: %v", err), http.StatusInternalServerError) + return + } + + // Находим задачу пользователя для этого желания, чтобы исключить её из обработки + // (так же, как при закрытии через задачу) + var userTaskID int + err = a.DB.QueryRow(` + SELECT id FROM tasks + WHERE wishlist_id = $1 AND user_id = $2 AND deleted = FALSE + LIMIT 1 + `, itemID, userID).Scan(&userTaskID) + + // Если задача не найдена, используем 0 (не будет исключена, но это нормально, если задачи нет) + if err == sql.ErrNoRows { + userTaskID = 0 + } else if err != nil { + log.Printf("Error finding user task for wishlist item %d: %v", itemID, err) + userTaskID = 0 + } + + // Обрабатываем политику награждения для всех задач, связанных с этим желанием + // Исключаем задачу пользователя, который закрыл желание (если она есть) + a.processWishlistRewardPolicy(itemID, userTaskID) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Wishlist item completed successfully", + }) +} + +// processWishlistRewardPolicy обрабатывает политику награждения для всех задач, связанных с желанием +// completedTaskID - ID задачи, которая была закрыта (исключается из обработки). Если 0, задача не найдена, но это нормально +func (a *App) processWishlistRewardPolicy(wishlistItemID int, completedTaskID int) { + var rows *sql.Rows + var err error + if completedTaskID == 0 { + // Если задача не найдена (желание закрывается напрямую, но у пользователя нет задачи), + // обрабатываем все задачи + rows, err = a.DB.Query(` + SELECT id, user_id, reward_policy + FROM tasks + WHERE wishlist_id = $1 AND deleted = FALSE + `, wishlistItemID) + } else { + // Исключаем задачу, которая была закрыта (через задачу или найдена при прямом закрытии желания) + rows, err = a.DB.Query(` + SELECT id, user_id, reward_policy + FROM tasks + WHERE wishlist_id = $1 AND deleted = FALSE AND id != $2 + `, wishlistItemID, completedTaskID) + } + if err != nil { + log.Printf("Error querying tasks for wishlist item %d: %v", wishlistItemID, err) + return + } + defer rows.Close() + + for rows.Next() { + var taskID, taskUserID int + var rewardPolicy sql.NullString + err := rows.Scan(&taskID, &taskUserID, &rewardPolicy) + if err != nil { + log.Printf("Error scanning task: %v", err) + continue + } + + policy := "personal" // Значение по умолчанию + if rewardPolicy.Valid { + policy = rewardPolicy.String + } + + if policy == "personal" { + // Личная политика: при закрытии задачи-желания другим пользователем, личная задача удаляется + _, err = a.DB.Exec(` + UPDATE tasks + SET deleted = TRUE + WHERE id = $1 + `, taskID) + if err != nil { + log.Printf("Error deleting task %d: %v", taskID, err) + } else { + log.Printf("Task %d deleted because wishlist item %d was completed by another user (personal policy)", taskID, wishlistItemID) + } + } else if policy == "general" { + // Общая политика: при закрытии задачи-желания другим пользователем, общая задача закрывается + _, err = a.DB.Exec(` + UPDATE tasks + SET completed = completed + 1, last_completed_at = NOW() + WHERE id = $1 + `, taskID) + if err != nil { + log.Printf("Error completing task %d: %v", taskID, err) + } else { + log.Printf("Task %d completed automatically after wishlist item %d completion (general policy)", taskID, wishlistItemID) + } + } + } +} + +// uncompleteWishlistHandler снимает отметку завершения +func (a *App) uncompleteWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + itemID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к желанию + hasAccess, _, _, err := a.checkWishlistAccess(itemID, userID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error checking wishlist access: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error checking wishlist access: %v", err), http.StatusInternalServerError) + return + } + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + _, err = a.DB.Exec(` + UPDATE wishlist_items + SET completed = FALSE, updated_at = NOW() + WHERE id = $1 + `, itemID) + + if err != nil { + log.Printf("Error uncompleting wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error uncompleting wishlist item: %v", err), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "success": true, + "message": "Wishlist item uncompleted successfully", + }) +} + +// copyWishlistHandler копирует желание +func (a *App) copyWishlistHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + itemID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid wishlist ID", http.StatusBadRequest) + return + } + + // Получаем оригинальное желание + var name string + var price sql.NullFloat64 + var link sql.NullString + var imagePath sql.NullString + var ownerID int + var boardID sql.NullInt64 + var authorID sql.NullInt64 + var groupName sql.NullString + err = a.DB.QueryRow(` + SELECT user_id, name, price, link, image_path, board_id, author_id, group_name + FROM wishlist_items + WHERE id = $1 AND deleted = FALSE + `, itemID).Scan(&ownerID, &name, &price, &link, &imagePath, &boardID, &authorID, &groupName) + + if err == sql.ErrNoRows || ownerID != userID { + sendErrorWithCORS(w, "Wishlist item not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting wishlist item: %v", err), http.StatusInternalServerError) + return + } + + // Получаем условия оригинального желания + rows, err := a.DB.Query(` + SELECT + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + tc.task_id, + sc.project_id, + sc.required_points, + sc.start_date + FROM wishlist_conditions wc + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + WHERE wc.wishlist_item_id = $1 + ORDER BY wc.display_order + `, itemID) + if err != nil { + log.Printf("Error getting wishlist conditions: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting wishlist conditions: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + var conditions []UnlockConditionRequest + for rows.Next() { + var displayOrder int + var taskConditionID, scoreConditionID sql.NullInt64 + var taskID, projectID sql.NullInt64 + var requiredPoints sql.NullFloat64 + var startDate sql.NullString + + err := rows.Scan(&displayOrder, &taskConditionID, &scoreConditionID, &taskID, &projectID, &requiredPoints, &startDate) + if err != nil { + log.Printf("Error scanning condition row: %v", err) + continue + } + + cond := UnlockConditionRequest{ + DisplayOrder: &displayOrder, + } + + if taskConditionID.Valid && taskID.Valid { + cond.Type = "task_completion" + tid := int(taskID.Int64) + cond.TaskID = &tid + } else if scoreConditionID.Valid && projectID.Valid { + cond.Type = "project_points" + pid := int(projectID.Int64) + cond.ProjectID = &pid + if requiredPoints.Valid { + cond.RequiredPoints = &requiredPoints.Float64 + } + if startDate.Valid { + cond.StartDate = &startDate.String + } + } + + conditions = append(conditions, cond) + } + + // Создаём копию в транзакции + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Создаём копию желания + var newWishlistID int + var priceVal, linkVal interface{} + if price.Valid { + priceVal = price.Float64 + } + if link.Valid { + linkVal = link.String + } + + // Определяем значения для board_id и author_id + var boardIDVal, authorIDVal, groupNameVal interface{} + if boardID.Valid { + boardIDVal = int(boardID.Int64) + } + if authorID.Valid { + authorIDVal = int(authorID.Int64) + } else { + // Если author_id не был установлен, используем текущего пользователя + authorIDVal = userID + } + if groupName.Valid { + groupNameVal = groupName.String + } + + err = tx.QueryRow(` + INSERT INTO wishlist_items (user_id, board_id, author_id, name, price, link, group_name, completed, deleted) + VALUES ($1, $2, $3, $4, $5, $6, $7, FALSE, FALSE) + RETURNING id + `, ownerID, boardIDVal, authorIDVal, name+" (копия)", priceVal, linkVal, groupNameVal).Scan(&newWishlistID) + if err != nil { + log.Printf("Error creating wishlist copy: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error creating wishlist copy: %v", err), http.StatusInternalServerError) + return + } + + // Сохраняем условия + if len(conditions) > 0 { + err = a.saveWishlistConditions(tx, newWishlistID, userID, conditions) + if err != nil { + log.Printf("Error saving wishlist conditions: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error saving wishlist conditions: %v", err), http.StatusInternalServerError) + return + } + } + + // Копируем изображение, если есть + if imagePath.Valid && imagePath.String != "" { + // Получаем путь к оригинальному файлу + uploadsDir := getEnv("UPLOADS_DIR", "/app/uploads") + + // Очищаем путь от /uploads/ в начале и query параметров + cleanPath := imagePath.String + cleanPath = strings.TrimPrefix(cleanPath, "/uploads/") + if idx := strings.Index(cleanPath, "?"); idx != -1 { + cleanPath = cleanPath[:idx] + } + + originalPath := filepath.Join(uploadsDir, cleanPath) + + log.Printf("Copying image: imagePath=%s, cleanPath=%s, originalPath=%s", imagePath.String, cleanPath, originalPath) + + // Проверяем, существует ли файл + if _, statErr := os.Stat(originalPath); statErr == nil { + // Создаём директорию для нового желания + newImageDir := filepath.Join(uploadsDir, "wishlist", strconv.Itoa(userID)) + if mkdirErr := os.MkdirAll(newImageDir, 0755); mkdirErr != nil { + log.Printf("Error creating image dir: %v", mkdirErr) + } + + // Генерируем уникальное имя файла + ext := filepath.Ext(cleanPath) + randomBytes := make([]byte, 8) + rand.Read(randomBytes) + newFileName := fmt.Sprintf("%d_%s%s", newWishlistID, hex.EncodeToString(randomBytes), ext) + newImagePath := filepath.Join(newImageDir, newFileName) + + log.Printf("New image path: %s", newImagePath) + + // Копируем файл + srcFile, openErr := os.Open(originalPath) + if openErr != nil { + log.Printf("Error opening source file: %v", openErr) + } else { + defer srcFile.Close() + dstFile, createErr := os.Create(newImagePath) + if createErr != nil { + log.Printf("Error creating dest file: %v", createErr) + } else { + defer dstFile.Close() + _, copyErr := io.Copy(dstFile, srcFile) + if copyErr != nil { + log.Printf("Error copying file: %v", copyErr) + } else { + // Обновляем путь к изображению в БД (с /uploads/ в начале для совместимости) + relativePath := "/uploads/" + filepath.Join("wishlist", strconv.Itoa(userID), newFileName) + log.Printf("Updating image_path in DB to: %s", relativePath) + _, updateErr := tx.Exec(`UPDATE wishlist_items SET image_path = $1 WHERE id = $2`, relativePath, newWishlistID) + if updateErr != nil { + log.Printf("Error updating image_path in DB: %v", updateErr) + } + } + } + } + } else { + log.Printf("Original image file not found: %s, error: %v", originalPath, statErr) + } + } else { + log.Printf("No image to copy: imagePath.Valid=%v, imagePath.String=%s", imagePath.Valid, imagePath.String) + } + + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов + if groupName.Valid && groupName.String != "" { + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + } + + // Получаем созданное желание с условиями + items, err := a.getWishlistItemsWithConditions(userID, false) + if err != nil { + log.Printf("Error getting created wishlist item: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting created wishlist item: %v", err), http.StatusInternalServerError) + return + } + + var createdItem *WishlistItem + for i := range items { + if items[i].ID == newWishlistID { + createdItem = &items[i] + break + } + } + + if createdItem == nil { + sendErrorWithCORS(w, "Created item not found", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(createdItem) +} + +// ============================================ +// Wishlist Boards handlers +// ============================================ + +// generateInviteToken генерирует уникальный токен для приглашения +func generateInviteToken() string { + b := make([]byte, 32) + rand.Read(b) + return hex.EncodeToString(b) +} + +// getBoardsHandler возвращает список досок пользователя (свои + присоединённые) +func (a *App) getBoardsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + boards := []WishlistBoard{} + + // Получаем свои доски + доски где пользователь участник + rows, err := a.DB.Query(` + SELECT DISTINCT + wb.id, + wb.owner_id, + COALESCE(u.name, u.email) as owner_name, + wb.name, + wb.invite_enabled, + wb.invite_token, + wb.created_at, + (SELECT COUNT(*) FROM wishlist_board_members wbm WHERE wbm.board_id = wb.id) as member_count, + (wb.owner_id = $1) as is_owner + FROM wishlist_boards wb + JOIN users u ON wb.owner_id = u.id + LEFT JOIN wishlist_board_members wbm ON wb.id = wbm.board_id + WHERE wb.deleted = FALSE + AND (wb.owner_id = $1 OR wbm.user_id = $1) + ORDER BY is_owner DESC, wb.created_at DESC + `, userID) + if err != nil { + log.Printf("Error getting boards: %v", err) + sendErrorWithCORS(w, "Error getting boards", http.StatusInternalServerError) + return + } + defer rows.Close() + + baseURL := getEnv("WEBHOOK_BASE_URL", "") + + for rows.Next() { + var board WishlistBoard + var inviteToken sql.NullString + err := rows.Scan( + &board.ID, + &board.OwnerID, + &board.OwnerName, + &board.Name, + &board.InviteEnabled, + &inviteToken, + &board.CreatedAt, + &board.MemberCount, + &board.IsOwner, + ) + if err != nil { + log.Printf("Error scanning board: %v", err) + continue + } + + // Invite token и URL только для владельца + if board.IsOwner && inviteToken.Valid { + board.InviteToken = &inviteToken.String + if baseURL != "" { + url := baseURL + "/invite/" + inviteToken.String + board.InviteURL = &url + } + } + + boards = append(boards, board) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(boards) +} + +// createBoardHandler создаёт новую доску +func (a *App) createBoardHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + var req BoardRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if strings.TrimSpace(req.Name) == "" { + sendErrorWithCORS(w, "Name is required", http.StatusBadRequest) + return + } + + var boardID int + err := a.DB.QueryRow(` + INSERT INTO wishlist_boards (owner_id, name) + VALUES ($1, $2) + RETURNING id + `, userID, strings.TrimSpace(req.Name)).Scan(&boardID) + + if err != nil { + log.Printf("Error creating board: %v", err) + sendErrorWithCORS(w, "Error creating board", http.StatusInternalServerError) + return + } + + // Возвращаем созданную доску + board := WishlistBoard{ + ID: boardID, + OwnerID: userID, + Name: strings.TrimSpace(req.Name), + InviteEnabled: false, + MemberCount: 0, + IsOwner: true, + CreatedAt: time.Now(), + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(board) +} + +// getBoardHandler возвращает детали доски +func (a *App) getBoardHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + var board WishlistBoard + var inviteToken sql.NullString + + err = a.DB.QueryRow(` + SELECT + wb.id, + wb.owner_id, + COALESCE(u.name, u.email) as owner_name, + wb.name, + wb.invite_enabled, + wb.invite_token, + wb.created_at, + (SELECT COUNT(*) FROM wishlist_board_members wbm WHERE wbm.board_id = wb.id) as member_count + FROM wishlist_boards wb + JOIN users u ON wb.owner_id = u.id + WHERE wb.id = $1 AND wb.deleted = FALSE + `, boardID).Scan( + &board.ID, + &board.OwnerID, + &board.OwnerName, + &board.Name, + &board.InviteEnabled, + &inviteToken, + &board.CreatedAt, + &board.MemberCount, + ) + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting board: %v", err) + sendErrorWithCORS(w, "Error getting board", http.StatusInternalServerError) + return + } + + board.IsOwner = board.OwnerID == userID + + // Проверяем доступ (владелец или участник) + if !board.IsOwner { + var isMember bool + a.DB.QueryRow(` + SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2) + `, boardID, userID).Scan(&isMember) + + if !isMember { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + } + + // Invite token и URL только для владельца + if board.IsOwner && inviteToken.Valid { + board.InviteToken = &inviteToken.String + baseURL := getEnv("WEBHOOK_BASE_URL", "") + if baseURL != "" { + url := baseURL + "/invite/" + inviteToken.String + board.InviteURL = &url + } + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(board) +} + +// updateBoardHandler обновляет доску (только владелец) +func (a *App) updateBoardHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем что пользователь - владелец + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Only owner can update board", http.StatusForbidden) + return + } + + var req BoardRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Обновляем поля + if strings.TrimSpace(req.Name) != "" { + _, err = a.DB.Exec(`UPDATE wishlist_boards SET name = $1, updated_at = NOW() WHERE id = $2`, + strings.TrimSpace(req.Name), boardID) + if err != nil { + log.Printf("Error updating board name: %v", err) + } + } + + if req.InviteEnabled != nil { + // Если включаем приглашения и нет токена - генерируем + if *req.InviteEnabled { + var currentToken sql.NullString + a.DB.QueryRow(`SELECT invite_token FROM wishlist_boards WHERE id = $1`, boardID).Scan(¤tToken) + + if !currentToken.Valid || currentToken.String == "" { + token := generateInviteToken() + _, err = a.DB.Exec(`UPDATE wishlist_boards SET invite_enabled = TRUE, invite_token = $1, updated_at = NOW() WHERE id = $2`, + token, boardID) + } else { + _, err = a.DB.Exec(`UPDATE wishlist_boards SET invite_enabled = TRUE, updated_at = NOW() WHERE id = $1`, boardID) + } + } else { + _, err = a.DB.Exec(`UPDATE wishlist_boards SET invite_enabled = FALSE, updated_at = NOW() WHERE id = $1`, boardID) + } + if err != nil { + log.Printf("Error updating board invite_enabled: %v", err) + } + } + + // Возвращаем обновлённую доску + var board WishlistBoard + var inviteToken sql.NullString + + a.DB.QueryRow(` + SELECT + wb.id, wb.owner_id, COALESCE(u.name, u.email), wb.name, wb.invite_enabled, wb.invite_token, wb.created_at, + (SELECT COUNT(*) FROM wishlist_board_members wbm WHERE wbm.board_id = wb.id) + FROM wishlist_boards wb + JOIN users u ON wb.owner_id = u.id + WHERE wb.id = $1 + `, boardID).Scan(&board.ID, &board.OwnerID, &board.OwnerName, &board.Name, &board.InviteEnabled, &inviteToken, &board.CreatedAt, &board.MemberCount) + + board.IsOwner = true + if inviteToken.Valid { + board.InviteToken = &inviteToken.String + baseURL := getEnv("WEBHOOK_BASE_URL", "") + if baseURL != "" { + url := baseURL + "/invite/" + inviteToken.String + board.InviteURL = &url + } + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(board) +} + +// deleteBoardHandler удаляет доску (только владелец) +func (a *App) deleteBoardHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем что пользователь - владелец + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Only owner can delete board", http.StatusForbidden) + return + } + + // Soft delete доски и всех её желаний + _, err = a.DB.Exec(`UPDATE wishlist_boards SET deleted = TRUE, updated_at = NOW() WHERE id = $1`, boardID) + if err != nil { + log.Printf("Error deleting board: %v", err) + sendErrorWithCORS(w, "Error deleting board", http.StatusInternalServerError) + return + } + + // Soft delete всех желаний на доске + _, err = a.DB.Exec(`UPDATE wishlist_items SET deleted = TRUE, updated_at = NOW() WHERE board_id = $1`, boardID) + if err != nil { + log.Printf("Error deleting board items: %v", err) + } + + w.WriteHeader(http.StatusNoContent) +} + +// regenerateBoardInviteHandler перегенерирует invite token +func (a *App) regenerateBoardInviteHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем что пользователь - владелец + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Only owner can regenerate invite", http.StatusForbidden) + return + } + + token := generateInviteToken() + _, err = a.DB.Exec(`UPDATE wishlist_boards SET invite_token = $1, invite_enabled = TRUE, updated_at = NOW() WHERE id = $2`, + token, boardID) + if err != nil { + log.Printf("Error regenerating invite token: %v", err) + sendErrorWithCORS(w, "Error regenerating invite", http.StatusInternalServerError) + return + } + + baseURL := getEnv("WEBHOOK_BASE_URL", "") + inviteURL := "" + if baseURL != "" { + inviteURL = baseURL + "/invite/" + token + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "invite_token": token, + "invite_url": inviteURL, + }) +} + +// getBoardMembersHandler возвращает список участников доски +func (a *App) getBoardMembersHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем что пользователь - владелец + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Only owner can view members", http.StatusForbidden) + return + } + + members := []BoardMember{} + rows, err := a.DB.Query(` + SELECT wbm.id, wbm.user_id, COALESCE(u.name, '') as name, u.email, wbm.joined_at + FROM wishlist_board_members wbm + JOIN users u ON wbm.user_id = u.id + WHERE wbm.board_id = $1 + ORDER BY wbm.joined_at DESC + `, boardID) + if err != nil { + log.Printf("Error getting members: %v", err) + sendErrorWithCORS(w, "Error getting members", http.StatusInternalServerError) + return + } + defer rows.Close() + + for rows.Next() { + var member BoardMember + err := rows.Scan(&member.ID, &member.UserID, &member.Name, &member.Email, &member.JoinedAt) + if err != nil { + log.Printf("Error scanning member: %v", err) + continue + } + members = append(members, member) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(members) +} + +// removeBoardMemberHandler удаляет участника из доски +func (a *App) removeBoardMemberHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + memberUserID, err := strconv.Atoi(vars["userId"]) + if err != nil { + sendErrorWithCORS(w, "Invalid user ID", http.StatusBadRequest) + return + } + + // Проверяем что пользователь - владелец + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if ownerID != userID { + sendErrorWithCORS(w, "Only owner can remove members", http.StatusForbidden) + return + } + + _, err = a.DB.Exec(`DELETE FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2`, boardID, memberUserID) + if err != nil { + log.Printf("Error removing member: %v", err) + sendErrorWithCORS(w, "Error removing member", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) +} + +// leaveBoardHandler позволяет участнику выйти из доски +func (a *App) leaveBoardHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["id"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем что пользователь НЕ владелец + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + if ownerID == userID { + sendErrorWithCORS(w, "Owner cannot leave board, delete it instead", http.StatusBadRequest) + return + } + + _, err = a.DB.Exec(`DELETE FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2`, boardID, userID) + if err != nil { + log.Printf("Error leaving board: %v", err) + sendErrorWithCORS(w, "Error leaving board", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) +} + +// getBoardInviteInfoHandler возвращает информацию о доске по invite token +func (a *App) getBoardInviteInfoHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + vars := mux.Vars(r) + token := vars["token"] + + var info BoardInviteInfo + var ownerName string + err := a.DB.QueryRow(` + SELECT + wb.id, + wb.name, + COALESCE(u.name, u.email) as owner_name, + (SELECT COUNT(*) FROM wishlist_board_members wbm WHERE wbm.board_id = wb.id) as member_count + FROM wishlist_boards wb + JOIN users u ON wb.owner_id = u.id + WHERE wb.invite_token = $1 AND wb.invite_enabled = TRUE AND wb.deleted = FALSE + `, token).Scan(&info.BoardID, &info.Name, &ownerName, &info.MemberCount) + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Invalid or expired invite link", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting invite info: %v", err) + sendErrorWithCORS(w, "Error getting invite info", http.StatusInternalServerError) + return + } + + info.OwnerName = ownerName + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(info) +} + +// joinBoardHandler присоединяет пользователя к доске по invite token +func (a *App) joinBoardHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + token := vars["token"] + + // Получаем доску по токену + var boardID, ownerID int + var boardName, ownerName string + err := a.DB.QueryRow(` + SELECT wb.id, wb.owner_id, wb.name, COALESCE(u.name, u.email) + FROM wishlist_boards wb + JOIN users u ON wb.owner_id = u.id + WHERE wb.invite_token = $1 AND wb.invite_enabled = TRUE AND wb.deleted = FALSE + `, token).Scan(&boardID, &ownerID, &boardName, &ownerName) + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Invalid or expired invite link", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting board by token: %v", err) + sendErrorWithCORS(w, "Error joining board", http.StatusInternalServerError) + return + } + + // Проверяем что пользователь не владелец + if ownerID == userID { + sendErrorWithCORS(w, "You are the owner of this board", http.StatusBadRequest) + return + } + + // Проверяем что пользователь ещё не участник + var exists bool + a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2)`, + boardID, userID).Scan(&exists) + if exists { + sendErrorWithCORS(w, "You are already a member of this board", http.StatusBadRequest) + return + } + + // Добавляем пользователя как участника + _, err = a.DB.Exec(`INSERT INTO wishlist_board_members (board_id, user_id) VALUES ($1, $2)`, boardID, userID) + if err != nil { + log.Printf("Error joining board: %v", err) + sendErrorWithCORS(w, "Error joining board", http.StatusInternalServerError) + return + } + + // Получаем количество участников + var memberCount int + a.DB.QueryRow(`SELECT COUNT(*) FROM wishlist_board_members WHERE board_id = $1`, boardID).Scan(&memberCount) + + board := WishlistBoard{ + ID: boardID, + OwnerID: ownerID, + OwnerName: ownerName, + Name: boardName, + InviteEnabled: true, + MemberCount: memberCount, + IsOwner: false, + } + + response := JoinBoardResponse{ + Board: board, + Message: "Вы успешно присоединились к доске!", + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(response) +} + +// getBoardItemsHandler возвращает желания на доске +func (a *App) getBoardItemsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["boardId"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к доске (владелец или участник) + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + + hasAccess := ownerID == userID + if !hasAccess { + var isMember bool + a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2)`, + boardID, userID).Scan(&isMember) + hasAccess = isMember + } + + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + // Получаем желания на доске (используем существующую логику, но фильтруем по board_id) + items, err := a.getWishlistItemsByBoard(boardID, userID) + if err != nil { + log.Printf("Error getting board items: %v", err) + sendErrorWithCORS(w, "Error getting items", http.StatusInternalServerError) + return + } + + // Разделяем на unlocked/locked + unlocked := []WishlistItem{} + locked := []WishlistItem{} + for _, item := range items { + if item.Unlocked { + unlocked = append(unlocked, item) + } else { + locked = append(locked, item) + } + } + + // Сортируем разблокированные по цене от меньшего к большему + sort.Slice(unlocked, func(i, j int) bool { + priceI := 0.0 + priceJ := 0.0 + if unlocked[i].Price != nil { + priceI = *unlocked[i].Price + } + if unlocked[j].Price != nil { + priceJ = *unlocked[j].Price + } + if priceI == priceJ { + return unlocked[i].ID < unlocked[j].ID + } + return priceI < priceJ + }) + + // Разделяем заблокированные на группы (с задачами и без задач) + lockedWithoutTasks := []WishlistItem{} + lockedWithTasks := []WishlistItem{} + + for _, item := range locked { + hasUncompletedTasks := false + for _, cond := range item.UnlockConditions { + if cond.Type == "task_completion" && (cond.TaskCompleted == nil || !*cond.TaskCompleted) { + hasUncompletedTasks = true + break + } + } + if hasUncompletedTasks { + lockedWithTasks = append(lockedWithTasks, item) + } else { + lockedWithoutTasks = append(lockedWithoutTasks, item) + } + } + + // Сортируем каждую группу по времени разблокировки (от меньшего срока к большему) + sort.Slice(lockedWithoutTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithoutTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithoutTasks[j], userID) + if valueI == valueJ { + return lockedWithoutTasks[i].ID < lockedWithoutTasks[j].ID + } + return valueI < valueJ + }) + + sort.Slice(lockedWithTasks, func(i, j int) bool { + valueI := a.calculateLockedSortValue(lockedWithTasks[i], userID) + valueJ := a.calculateLockedSortValue(lockedWithTasks[j], userID) + if valueI == valueJ { + return lockedWithTasks[i].ID < lockedWithTasks[j].ID + } + return valueI < valueJ + }) + + // Объединяем: сначала без задач, потом с задачами + locked = append(lockedWithoutTasks, lockedWithTasks...) + + // Считаем завершённые + var completedCount int + a.DB.QueryRow(`SELECT COUNT(*) FROM wishlist_items WHERE board_id = $1 AND completed = TRUE AND deleted = FALSE`, + boardID).Scan(&completedCount) + + response := WishlistResponse{ + Unlocked: unlocked, + Locked: locked, + CompletedCount: completedCount, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// getBoardCompletedHandler возвращает завершённые желания на доске +func (a *App) getBoardCompletedHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["boardId"]) + if err != nil { + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к доске (владелец или участник) + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + + hasAccess := ownerID == userID + if !hasAccess { + var isMember bool + a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2)`, + boardID, userID).Scan(&isMember) + hasAccess = isMember + } + + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + // Получаем завершённые желания на доске (отдельный запрос, так как getWishlistItemsByBoard исключает завершённые) + query := ` + SELECT + wi.id, + wi.name, + wi.price, + wi.image_path, + wi.link, + wi.completed, + wi.project_id AS item_project_id, + wp.name AS item_project_name, + wc.id AS condition_id, + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + wc.user_id, + tc.task_id, + t.name AS task_name, + sc.project_id, + p.name AS project_name, + sc.required_points, + sc.start_date, + COALESCE(u.name, u.email) AS user_name + FROM wishlist_items wi + LEFT JOIN projects wp ON wi.project_id = wp.id AND wp.deleted = FALSE + LEFT JOIN wishlist_conditions wc ON wi.id = wc.wishlist_item_id + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN tasks t ON tc.task_id = t.id AND t.deleted = FALSE + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + LEFT JOIN projects p ON sc.project_id = p.id AND p.deleted = FALSE + LEFT JOIN users u ON wc.user_id = u.id + WHERE wi.board_id = $1 + AND wi.deleted = FALSE + AND wi.completed = TRUE + ORDER BY wi.id, wc.display_order, wc.id + ` + + rows, err := a.DB.Query(query, boardID) + if err != nil { + log.Printf("Error executing query for board completed items (boardID=%d): %v", boardID, err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting completed items: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + itemsMap := make(map[int]*WishlistItem) + + for rows.Next() { + var itemID int + var name string + var price sql.NullFloat64 + var imagePath sql.NullString + var link sql.NullString + var completed bool + var itemProjectID sql.NullInt64 + var itemProjectName sql.NullString + var conditionID sql.NullInt64 + var displayOrder sql.NullInt64 + var taskConditionID sql.NullInt64 + var scoreConditionID sql.NullInt64 + var userIDCond sql.NullInt64 + var taskID sql.NullInt64 + var taskName sql.NullString + var projectID sql.NullInt64 + var projectName sql.NullString + var requiredPoints sql.NullFloat64 + var startDate sql.NullTime + var userName sql.NullString + + err := rows.Scan( + &itemID, &name, &price, &imagePath, &link, &completed, &itemProjectID, &itemProjectName, + &conditionID, &displayOrder, &taskConditionID, &scoreConditionID, &userIDCond, + &taskID, &taskName, &projectID, &projectName, &requiredPoints, &startDate, &userName, + ) + if err != nil { + log.Printf("Error scanning completed wishlist item: %v", err) + continue + } + + item, exists := itemsMap[itemID] + if !exists { + item = &WishlistItem{ + ID: itemID, + Name: name, + Completed: completed, + UnlockConditions: []UnlockConditionDisplay{}, + } + if price.Valid { + item.Price = &price.Float64 + } + if imagePath.Valid && imagePath.String != "" { + url := imagePath.String + if !strings.HasPrefix(url, "http") { + url = url + "?t=" + strconv.FormatInt(time.Now().Unix(), 10) + } + item.ImageURL = &url + } + if link.Valid { + item.Link = &link.String + } + // Для завершённых желаний не устанавливаем project_id и project_name + // Они отображаются отдельно без группировки по проектам + itemsMap[itemID] = item + } + + if conditionID.Valid { + // Определяем владельца условия + conditionOwnerID := userID + if userIDCond.Valid { + conditionOwnerID = int(userIDCond.Int64) + } + + // Если это условие по задаче, проверяем существует ли задача + if taskConditionID.Valid && taskID.Valid { + // Проверяем, существует ли задача (не удалена) + var taskExists bool + err := a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE)`, taskID.Int64, conditionOwnerID).Scan(&taskExists) + if err != nil || !taskExists { + // Задача удалена - не добавляем условие в список, но при проверке блокировки оно считается выполненным + continue + } + } + + condition := UnlockConditionDisplay{ + ID: int(conditionID.Int64), + DisplayOrder: int(displayOrder.Int64), + } + + if taskConditionID.Valid { + condition.Type = "task_completion" + if taskID.Valid { + taskIDVal := int(taskID.Int64) + condition.TaskID = &taskIDVal + if taskName.Valid { + condition.TaskName = &taskName.String + } + } + } else if scoreConditionID.Valid { + condition.Type = "project_points" + if projectID.Valid { + projectIDVal := int(projectID.Int64) + condition.ProjectID = &projectIDVal + if projectName.Valid { + condition.ProjectName = &projectName.String + } + if requiredPoints.Valid { + condition.RequiredPoints = &requiredPoints.Float64 + } + if startDate.Valid { + dateStr := startDate.Time.Format("2006-01-02") + condition.StartDate = &dateStr + } + } + } + + if userIDCond.Valid { + userIDVal := int(userIDCond.Int64) + condition.UserID = &userIDVal + if userName.Valid { + condition.UserName = &userName.String + } + } + + item.UnlockConditions = append(item.UnlockConditions, condition) + } + } + + if err := rows.Err(); err != nil { + log.Printf("Error iterating rows for board completed items (boardID=%d): %v", boardID, err) + sendErrorWithCORS(w, fmt.Sprintf("Error getting completed items: %v", err), http.StatusInternalServerError) + return + } + + // Преобразуем map в slice + completed := make([]WishlistItem, 0, len(itemsMap)) + for _, item := range itemsMap { + completed = append(completed, *item) + } + + // Сортируем по цене (дорогие → дешёвые) + sort.Slice(completed, func(i, j int) bool { + priceI := 0.0 + priceJ := 0.0 + if completed[i].Price != nil { + priceI = *completed[i].Price + } + if completed[j].Price != nil { + priceJ = *completed[j].Price + } + return priceI > priceJ + }) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(completed) +} + +// calculateUnlockedSortValue считает сумму баллов, которые были нужны для разблокировки +// Задача считается как 1 балл, project_points как required_points +func calculateUnlockedSortValue(item WishlistItem) float64 { + var totalRequired float64 = 0.0 + for _, condition := range item.UnlockConditions { + if condition.Type == "task_completion" { + totalRequired += 1.0 + } else if condition.Type == "project_points" { + if condition.RequiredPoints != nil { + totalRequired += *condition.RequiredPoints + } + } + } + return totalRequired +} + +// calculateLockedSortValue считает сумму оставшихся баллов для разблокировки +// Задача считается как 1 балл (если не выполнена), project_points как remaining баллы +func (a *App) calculateLockedSortValue(item WishlistItem, userID int) float64 { + // Если нет условий, возвращаем большое значение (отсутствие условий = все выполнены) + if len(item.UnlockConditions) == 0 { + return 999999.0 + } + + maxWeeks := 0.0 + hasProjectConditions := false + allCompleted := true + + for _, condition := range item.UnlockConditions { + if condition.Type == "project_points" { + hasProjectConditions = true + if condition.RequiredPoints != nil { + var startDate sql.NullTime + if condition.StartDate != nil { + date, err := time.Parse("2006-01-02", *condition.StartDate) + if err == nil { + startDate = sql.NullTime{Time: date, Valid: true} + } + } + + // ВАЖНО: Используем владельца условия из condition.UserID + // Если condition.UserID есть - это владелец условия + // Если нет - получаем владельца желания из БД (для старых условий) + // НЕ используем текущего пользователя (userID), так как условие может принадлежать другому пользователю + conditionOwnerID := 0 + if condition.UserID != nil { + conditionOwnerID = *condition.UserID + } else { + // Если нет владельца условия, получаем владельца желания из БД + var itemOwnerID int + err := a.DB.QueryRow(`SELECT user_id FROM wishlist_items WHERE id = $1`, item.ID).Scan(&itemOwnerID) + if err != nil { + log.Printf("Error getting wishlist item owner for item %d: %v", item.ID, err) + continue // Пропускаем условие, если не можем получить владельца + } + conditionOwnerID = itemOwnerID + } + + // Получаем projectID из условия + if condition.ProjectID != nil { + weeks := a.calculateProjectUnlockWeeks( + *condition.ProjectID, + *condition.RequiredPoints, + startDate, + conditionOwnerID, // Владелец условия, а не текущий пользователь + ) + // weeks > 0 && < 99999 означает, что условие еще не выполнено и расчет успешен + // weeks == 0 означает условие выполнено + // weeks == 99999 означает медиана отсутствует (нельзя рассчитать) или ошибка расчета + if weeks == 0 { + // Условие выполнено - считаем как 0 недель + // Не обновляем maxWeeks, так как 0 < любого положительного значения + } else if weeks > 0 && weeks < 99999 { + // Условие не выполнено - учитываем в maxWeeks + allCompleted = false + if weeks > maxWeeks { + maxWeeks = weeks + } + } else { + // weeks == 99999 - нельзя рассчитать, считаем как невыполненное + allCompleted = false + } + } + } + } + } + + // Если были условия по проектам и все выполнены, возвращаем 0 (закрытые испытания = 0 недель) + if hasProjectConditions && allCompleted { + return 0.0 + } + + // Если не было условий по проектам (только задачи или нет условий) + if !hasProjectConditions { + return 999999.0 + } + + return maxWeeks +} + +// getWishlistItemsByBoard загружает желания конкретной доски + +func (a *App) getWishlistItemsByBoard(boardID int, userID int) ([]WishlistItem, error) { + query := ` + SELECT + wi.id, + wi.name, + wi.price, + wi.image_path, + wi.link, + wi.completed, + wi.group_name, + COALESCE(wi.author_id, wi.user_id) AS item_owner_id, + wc.id AS condition_id, + wc.display_order, + wc.task_condition_id, + wc.score_condition_id, + wc.user_id AS condition_user_id, + tc.task_id, + t.name AS task_name, + sc.project_id, + p.name AS project_name, + sc.required_points, + sc.start_date + FROM wishlist_items wi + LEFT JOIN wishlist_conditions wc ON wi.id = wc.wishlist_item_id + LEFT JOIN task_conditions tc ON wc.task_condition_id = tc.id + LEFT JOIN tasks t ON tc.task_id = t.id AND t.deleted = FALSE + LEFT JOIN score_conditions sc ON wc.score_condition_id = sc.id + LEFT JOIN projects p ON sc.project_id = p.id AND p.deleted = FALSE + WHERE wi.board_id = $1 + AND wi.deleted = FALSE + AND wi.completed = FALSE + ORDER BY wi.id, wc.display_order, wc.id + ` + + rows, err := a.DB.Query(query, boardID) + if err != nil { + return nil, err + } + defer rows.Close() + + itemsMap := make(map[int]*WishlistItem) + + for rows.Next() { + var itemID int + var name string + var price sql.NullFloat64 + var imagePath sql.NullString + var link sql.NullString + var completed bool + var groupName sql.NullString + var itemOwnerID sql.NullInt64 + var conditionID sql.NullInt64 + var displayOrder sql.NullInt64 + var taskConditionID sql.NullInt64 + var scoreConditionID sql.NullInt64 + var conditionUserID sql.NullInt64 + var taskID sql.NullInt64 + var taskName sql.NullString + var projectID sql.NullInt64 + var projectName sql.NullString + var requiredPoints sql.NullFloat64 + var startDate sql.NullTime + + err := rows.Scan( + &itemID, &name, &price, &imagePath, &link, &completed, &groupName, &itemOwnerID, + &conditionID, &displayOrder, &taskConditionID, &scoreConditionID, &conditionUserID, + &taskID, &taskName, &projectID, &projectName, &requiredPoints, &startDate, + ) + if err != nil { + log.Printf("Error scanning wishlist item: %v", err) + continue + } + + item, exists := itemsMap[itemID] + if !exists { + item = &WishlistItem{ + ID: itemID, + Name: name, + Completed: completed, + UnlockConditions: []UnlockConditionDisplay{}, + } + if price.Valid { + item.Price = &price.Float64 + } + if imagePath.Valid && imagePath.String != "" { + url := imagePath.String + if !strings.HasPrefix(url, "http") { + url = url + "?t=" + strconv.FormatInt(time.Now().Unix(), 10) + } + item.ImageURL = &url + } + if link.Valid { + item.Link = &link.String + } + if groupName.Valid && groupName.String != "" { + groupNameVal := groupName.String + item.GroupName = &groupNameVal + } + itemsMap[itemID] = item + } + + if conditionID.Valid { + // Используем user_id из условия, если он есть, иначе используем владельца желания + if !itemOwnerID.Valid { + log.Printf("Warning: item_owner_id is NULL for wishlist item %d, skipping condition", itemID) + continue + } + conditionOwnerID := int(itemOwnerID.Int64) + if conditionUserID.Valid { + conditionOwnerID = int(conditionUserID.Int64) + } + + // Если это условие по задаче, проверяем существует ли задача + if taskConditionID.Valid && taskID.Valid { + // Проверяем, существует ли задача (не удалена) + var taskExists bool + err := a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE)`, taskID.Int64, conditionOwnerID).Scan(&taskExists) + if err != nil || !taskExists { + // Задача удалена - не добавляем условие в список, но при проверке блокировки оно считается выполненным + continue + } + } + + condition := UnlockConditionDisplay{ + ID: int(conditionID.Int64), + DisplayOrder: int(displayOrder.Int64), + } + + if conditionUserID.Valid { + conditionOwnerIDVal := int(conditionUserID.Int64) + condition.UserID = &conditionOwnerIDVal + } else { + itemOwnerIDVal := int(itemOwnerID.Int64) + condition.UserID = &itemOwnerIDVal + } + + if taskConditionID.Valid { + condition.Type = "task_completion" + if taskName.Valid { + condition.TaskName = &taskName.String + } + // Проверяем выполнена ли задача для владельца условия + if taskID.Valid { + taskIDVal := int(taskID.Int64) + condition.TaskID = &taskIDVal + var taskCompleted int + err := a.DB.QueryRow(`SELECT completed FROM tasks WHERE id = $1 AND user_id = $2 AND deleted = FALSE`, taskID.Int64, conditionOwnerID).Scan(&taskCompleted) + if err == nil { + isCompleted := taskCompleted > 0 + condition.TaskCompleted = &isCompleted + } + } + } else if scoreConditionID.Valid { + condition.Type = "project_points" + if projectName.Valid { + condition.ProjectName = &projectName.String + } + if projectID.Valid { + projectIDVal := int(projectID.Int64) + condition.ProjectID = &projectIDVal + // Считаем текущие баллы для владельца условия + points, _ := a.calculateProjectPointsFromDate(int(projectID.Int64), startDate, conditionOwnerID) + condition.CurrentPoints = &points + } + if requiredPoints.Valid { + condition.RequiredPoints = &requiredPoints.Float64 + } + if startDate.Valid { + dateStr := startDate.Time.Format("2006-01-02") + condition.StartDate = &dateStr + } + // Рассчитываем и форматируем срок разблокировки + if condition.ProjectID != nil && condition.RequiredPoints != nil { + weeks := a.calculateProjectUnlockWeeks( + *condition.ProjectID, + *condition.RequiredPoints, + startDate, + conditionOwnerID, + ) + weeksText := formatWeeksText(weeks) + condition.WeeksText = &weeksText + } + } + + item.UnlockConditions = append(item.UnlockConditions, condition) + } + } + + // Преобразуем map в slice и определяем unlocked + items := make([]WishlistItem, 0, len(itemsMap)) + for _, item := range itemsMap { + // Сортируем условия в нужном порядке + a.sortUnlockConditions(item.UnlockConditions, userID) + + // Проверяем все условия + item.Unlocked = true + if len(item.UnlockConditions) > 0 { + for _, cond := range item.UnlockConditions { + if cond.Type == "task_completion" { + if cond.TaskCompleted == nil || !*cond.TaskCompleted { + item.Unlocked = false + break + } + } else if cond.Type == "project_points" { + if cond.CurrentPoints == nil || cond.RequiredPoints == nil || *cond.CurrentPoints < *cond.RequiredPoints { + item.Unlocked = false + break + } + } + } + } + + // Определяем первое заблокированное условие и количество остальных + if !item.Unlocked && !item.Completed { + lockedCount := 0 + var firstLocked *UnlockConditionDisplay + for i := range item.UnlockConditions { + condition := &item.UnlockConditions[i] + if isConditionLocked(*condition) { + lockedCount++ + if firstLocked == nil { + firstLocked = condition + } + } + } + if firstLocked != nil { + item.FirstLockedCondition = firstLocked + item.MoreLockedConditions = lockedCount - 1 + item.LockedConditionsCount = lockedCount + } + } + + // Загружаем связанную задачу текущего пользователя, если есть + var linkedTaskID, linkedTaskCompleted, linkedTaskUserID sql.NullInt64 + var linkedTaskName sql.NullString + var linkedTaskNextShowAt sql.NullTime + linkedTaskErr := a.DB.QueryRow(` + SELECT t.id, t.name, t.completed, t.next_show_at, t.user_id + FROM tasks t + WHERE t.wishlist_id = $1 AND t.user_id = $2 AND t.deleted = FALSE + LIMIT 1 + `, item.ID, userID).Scan(&linkedTaskID, &linkedTaskName, &linkedTaskCompleted, &linkedTaskNextShowAt, &linkedTaskUserID) + + if linkedTaskErr == nil && linkedTaskID.Valid { + linkedTask := &LinkedTask{ + ID: int(linkedTaskID.Int64), + Name: linkedTaskName.String, + Completed: int(linkedTaskCompleted.Int64), + } + if linkedTaskNextShowAt.Valid { + nextShowAtStr := linkedTaskNextShowAt.Time.Format(time.RFC3339) + linkedTask.NextShowAt = &nextShowAtStr + } + if linkedTaskUserID.Valid { + userIDVal := int(linkedTaskUserID.Int64) + linkedTask.UserID = &userIDVal + } + item.LinkedTask = linkedTask + } else if linkedTaskErr != sql.ErrNoRows { + log.Printf("Error loading linked task for wishlist %d: %v", item.ID, linkedTaskErr) + // Не возвращаем ошибку, просто не устанавливаем linked_task + } + + // Подсчитываем общее количество не закрытых задач для этого желания (всех пользователей) + // Исключаем linked_task из подсчета, если она есть + // Учитываем только не закрытые задачи (completed = 0) + var tasksCount int + if linkedTaskID.Valid { + // Если есть linked_task, исключаем её из подсчета + err = a.DB.QueryRow(` + SELECT COUNT(*) + FROM tasks t + WHERE t.wishlist_id = $1 AND t.deleted = FALSE AND t.completed = 0 AND t.id != $2 + `, item.ID, linkedTaskID.Int64).Scan(&tasksCount) + } else { + // Если нет linked_task, считаем все не закрытые задачи + err = a.DB.QueryRow(` + SELECT COUNT(*) + FROM tasks t + WHERE t.wishlist_id = $1 AND t.deleted = FALSE AND t.completed = 0 + `, item.ID).Scan(&tasksCount) + } + if err != nil { + log.Printf("Error counting tasks for wishlist %d: %v", item.ID, err) + tasksCount = 0 + } + item.TasksCount = tasksCount + + items = append(items, *item) + } + + return items, nil +} + +// createBoardItemHandler создаёт желание на доске +func (a *App) createBoardItemHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + boardID, err := strconv.Atoi(vars["boardId"]) + if err != nil { + log.Printf("createBoardItemHandler: Error parsing boardId from URL: %v, vars['boardId']='%s'", err, vars["boardId"]) + sendErrorWithCORS(w, "Invalid board ID", http.StatusBadRequest) + return + } + + // Проверяем доступ к доске + var ownerID int + err = a.DB.QueryRow(`SELECT owner_id FROM wishlist_boards WHERE id = $1 AND deleted = FALSE`, boardID).Scan(&ownerID) + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Board not found", http.StatusNotFound) + return + } + + hasAccess := ownerID == userID + if !hasAccess { + var isMember bool + a.DB.QueryRow(`SELECT EXISTS(SELECT 1 FROM wishlist_board_members WHERE board_id = $1 AND user_id = $2)`, + boardID, userID).Scan(&isMember) + hasAccess = isMember + } + + if !hasAccess { + sendErrorWithCORS(w, "Access denied", http.StatusForbidden) + return + } + + var req WishlistRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("createBoardItemHandler: Error decoding wishlist request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + log.Printf("createBoardItemHandler: decoded request - name='%s', price=%v, link='%s', conditions=%d", + req.Name, req.Price, req.Link, len(req.UnlockConditions)) + + if req.UnlockConditions == nil { + log.Printf("createBoardItemHandler: WARNING - UnlockConditions is nil, initializing empty slice") + req.UnlockConditions = []UnlockConditionRequest{} + } + + if strings.TrimSpace(req.Name) == "" { + log.Printf("createBoardItemHandler: Name is required") + sendErrorWithCORS(w, "Name is required", http.StatusBadRequest) + return + } + + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error starting transaction: %v", err) + sendErrorWithCORS(w, "Error creating item", http.StatusInternalServerError) + return + } + defer tx.Rollback() + + var itemID int + err = tx.QueryRow(` + INSERT INTO wishlist_items (user_id, board_id, author_id, name, price, link, group_name, completed, deleted) + VALUES ($1, $2, $3, $4, $5, $6, $7, FALSE, FALSE) + RETURNING id + `, ownerID, boardID, userID, strings.TrimSpace(req.Name), req.Price, req.Link, req.GroupName).Scan(&itemID) + + if err != nil { + log.Printf("createBoardItemHandler: Error creating board item: %v", err) + sendErrorWithCORS(w, "Error creating item", http.StatusInternalServerError) + return + } + + log.Printf("createBoardItemHandler: created wishlist item id=%d", itemID) + + // Сохраняем условия с user_id текущего пользователя + if len(req.UnlockConditions) > 0 { + log.Printf("createBoardItemHandler: saving %d conditions", len(req.UnlockConditions)) + err = a.saveWishlistConditionsWithUserID(tx, itemID, userID, req.UnlockConditions) + if err != nil { + log.Printf("createBoardItemHandler: Error saving wishlist conditions: %v", err) + sendErrorWithCORS(w, "Error saving conditions", http.StatusInternalServerError) + return + } + log.Printf("createBoardItemHandler: conditions saved successfully") + } else { + log.Printf("createBoardItemHandler: no conditions to save") + } + + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, "Error creating item", http.StatusInternalServerError) + return + } + + // Обновляем MV для групповых саджестов + if req.GroupName != nil && *req.GroupName != "" { + if err := a.refreshGroupSuggestionsMV(); err != nil { + log.Printf("Warning: Failed to refresh group suggestions MV: %v", err) + } + } + + // Возвращаем созданное желание + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + json.NewEncoder(w).Encode(map[string]int{"id": itemID}) +} + +// saveWishlistConditionsWithUserID сохраняет условия с указанием user_id +func (a *App) saveWishlistConditionsWithUserID(tx *sql.Tx, wishlistItemID int, userID int, conditions []UnlockConditionRequest) error { + log.Printf("saveWishlistConditionsWithUserID: wishlistItemID=%d, userID=%d, conditions=%d", + wishlistItemID, userID, len(conditions)) + + for i, cond := range conditions { + displayOrder := i + if cond.DisplayOrder != nil { + displayOrder = *cond.DisplayOrder + } + + log.Printf("saveWishlistConditionsWithUserID: processing condition %d - type='%s', taskID=%v, projectID=%v", + i, cond.Type, cond.TaskID, cond.ProjectID) + + switch cond.Type { + case "task_completion": + if cond.TaskID == nil { + continue + } + // Создаём task_condition + var taskConditionID int + err := tx.QueryRow(` + INSERT INTO task_conditions (task_id) + VALUES ($1) + ON CONFLICT (task_id) DO UPDATE SET task_id = EXCLUDED.task_id + RETURNING id + `, *cond.TaskID).Scan(&taskConditionID) + if err != nil { + log.Printf("saveWishlistConditionsWithUserID: error creating task condition: %v", err) + return fmt.Errorf("error creating task condition: %w", err) + } + // Связываем с wishlist_item + _, err = tx.Exec(` + INSERT INTO wishlist_conditions (wishlist_item_id, user_id, task_condition_id, display_order) + VALUES ($1, $2, $3, $4) + `, wishlistItemID, userID, taskConditionID, displayOrder) + if err != nil { + log.Printf("saveWishlistConditionsWithUserID: error linking task condition: %v", err) + return fmt.Errorf("error linking task condition: %w", err) + } + + case "project_points": + if cond.ProjectID == nil || cond.RequiredPoints == nil { + continue + } + // Создаём score_condition + var scoreConditionID int + var startDateVal interface{} = nil + if cond.StartDate != nil && *cond.StartDate != "" { + startDateVal = *cond.StartDate + } + err := tx.QueryRow(` + INSERT INTO score_conditions (project_id, required_points, start_date) + VALUES ($1, $2, $3) + ON CONFLICT (project_id, required_points, start_date) DO UPDATE SET required_points = EXCLUDED.required_points + RETURNING id + `, *cond.ProjectID, *cond.RequiredPoints, startDateVal).Scan(&scoreConditionID) + if err != nil { + log.Printf("saveWishlistConditionsWithUserID: error creating score condition: %v", err) + return fmt.Errorf("error creating score condition: %w", err) + } + // Связываем с wishlist_item + _, err = tx.Exec(` + INSERT INTO wishlist_conditions (wishlist_item_id, user_id, score_condition_id, display_order) + VALUES ($1, $2, $3, $4) + `, wishlistItemID, userID, scoreConditionID, displayOrder) + if err != nil { + log.Printf("saveWishlistConditionsWithUserID: error linking score condition: %v", err) + return fmt.Errorf("error linking score condition: %w", err) + } + } + } + return nil +} + +// LinkMetadataResponse структура ответа с метаданными ссылки +type LinkMetadataResponse struct { + Title string `json:"title,omitempty"` + Image string `json:"image,omitempty"` + Price *float64 `json:"price,omitempty"` + Description string `json:"description,omitempty"` +} + +// extractMetadataViaHTTP извлекает метаданные через HTTP-запрос и парсинг HTML +// Это стандартный метод, используемый Telegram, Facebook и другими сервисами +func extractMetadataViaHTTP(targetURL string) (*LinkMetadataResponse, error) { + // Валидация URL + parsedURL, err := url.Parse(targetURL) + if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" { + return nil, fmt.Errorf("invalid URL format: %s", targetURL) + } + + // HTTP клиент с увеличенным таймаутом и поддержкой редиректов + transport := &http.Transport{ + DisableKeepAlives: false, + MaxIdleConns: 10, + IdleConnTimeout: 90 * time.Second, + } + + client := &http.Client{ + Timeout: 30 * time.Second, + Transport: transport, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + if len(via) >= 10 { + return fmt.Errorf("stopped after 10 redirects") + } + return nil + }, + } + + httpReq, err := http.NewRequest("GET", targetURL, nil) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + + // Устанавливаем заголовки, максимально имитирующие реальный браузер Chrome + httpReq.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36") + httpReq.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7") + httpReq.Header.Set("Accept-Language", "ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7") + httpReq.Header.Set("Accept-Encoding", "gzip, deflate, br, zstd") + httpReq.Header.Set("Connection", "keep-alive") + httpReq.Header.Set("Upgrade-Insecure-Requests", "1") + httpReq.Header.Set("Sec-Fetch-Dest", "document") + httpReq.Header.Set("Sec-Fetch-Mode", "navigate") + httpReq.Header.Set("Sec-Fetch-Site", "none") + httpReq.Header.Set("Sec-Fetch-User", "?1") + httpReq.Header.Set("Sec-Ch-Ua", `"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"`) + httpReq.Header.Set("Sec-Ch-Ua-Mobile", "?0") + httpReq.Header.Set("Sec-Ch-Ua-Platform", `"macOS"`) + httpReq.Header.Set("Cache-Control", "max-age=0") + httpReq.Header.Set("DNT", "1") + + if parsedURL.Host != "" { + referer := fmt.Sprintf("%s://%s/", parsedURL.Scheme, parsedURL.Host) + httpReq.Header.Set("Referer", referer) + } + + time.Sleep(100 * time.Millisecond) + + resp, err := client.Do(httpReq) + if err != nil { + return nil, fmt.Errorf("error fetching URL: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, resp.Status) + } + + limitedReader := io.LimitReader(resp.Body, 512*1024) + bodyBytes, err := io.ReadAll(limitedReader) + if err != nil { + return nil, fmt.Errorf("error reading response: %w", err) + } + + if len(bodyBytes) >= 2 && bodyBytes[0] == 0x1f && bodyBytes[1] == 0x8b { + gzipReader, err := gzip.NewReader(bytes.NewReader(bodyBytes)) + if err == nil { + defer gzipReader.Close() + decompressed, err := io.ReadAll(gzipReader) + if err == nil { + bodyBytes = decompressed + } + } + } + + body := string(bodyBytes) + metadata := &LinkMetadataResponse{} + + // Извлекаем Open Graph теги + ogTitleRe := regexp.MustCompile(`(?i)]*(?:property|name)\s*=\s*["']og:title["'][^>]*content\s*=\s*["']([^"']+)["']`) + ogTitleRe2 := regexp.MustCompile(`(?i)]*content\s*=\s*["']([^"']+)["'][^>]*(?:property|name)\s*=\s*["']og:title["']`) + ogImageRe := regexp.MustCompile(`(?i)]*(?:property|name)\s*=\s*["']og:image["'][^>]*content\s*=\s*["']([^"']+)["']`) + ogImageRe2 := regexp.MustCompile(`(?i)]*content\s*=\s*["']([^"']+)["'][^>]*(?:property|name)\s*=\s*["']og:image["']`) + ogDescRe := regexp.MustCompile(`(?i)]*(?:property|name)\s*=\s*["']og:description["'][^>]*content\s*=\s*["']([^"']+)["']`) + ogDescRe2 := regexp.MustCompile(`(?i)]*content\s*=\s*["']([^"']+)["'][^>]*(?:property|name)\s*=\s*["']og:description["']`) + + if matches := ogTitleRe.FindStringSubmatch(body); len(matches) > 1 { + metadata.Title = strings.TrimSpace(matches[1]) + } else if matches := ogTitleRe2.FindStringSubmatch(body); len(matches) > 1 { + metadata.Title = strings.TrimSpace(matches[1]) + } + + if matches := ogImageRe.FindStringSubmatch(body); len(matches) > 1 { + metadata.Image = strings.TrimSpace(matches[1]) + } else if matches := ogImageRe2.FindStringSubmatch(body); len(matches) > 1 { + metadata.Image = strings.TrimSpace(matches[1]) + } + + if matches := ogDescRe.FindStringSubmatch(body); len(matches) > 1 { + metadata.Description = strings.TrimSpace(matches[1]) + } else if matches := ogDescRe2.FindStringSubmatch(body); len(matches) > 1 { + metadata.Description = strings.TrimSpace(matches[1]) + } + + if metadata.Title == "" { + titleRe := regexp.MustCompile(`(?i)]*>([^<]+)`) + if matches := titleRe.FindStringSubmatch(body); len(matches) > 1 { + metadata.Title = strings.TrimSpace(matches[1]) + if strings.Contains(strings.ToLower(metadata.Title), "робот") || + strings.Contains(strings.ToLower(metadata.Title), "captcha") || + strings.Contains(strings.ToLower(metadata.Title), "вы не робот") { + metadata.Title = "" + metadata.Image = "" + metadata.Description = "" + } + } + } + + if metadata.Image == "" { + twitterImageRe := regexp.MustCompile(`(?i)]*(?:property|name)\s*=\s*["']twitter:image["'][^>]*content\s*=\s*["']([^"']+)["']`) + if matches := twitterImageRe.FindStringSubmatch(body); len(matches) > 1 { + metadata.Image = strings.TrimSpace(matches[1]) + } else { + twitterImageRe2 := regexp.MustCompile(`(?i)]*content\s*=\s*["']([^"']+)["'][^>]*(?:property|name)\s*=\s*["']twitter:image["']`) + if matches := twitterImageRe2.FindStringSubmatch(body); len(matches) > 1 { + metadata.Image = strings.TrimSpace(matches[1]) + } + } + } + + // Поиск цены + jsonLdRe := regexp.MustCompile(`(?i)]*type\s*=\s*["']application/ld\+json["'][^>]*>([^<]+)`) + jsonLdMatches := jsonLdRe.FindAllStringSubmatch(body, -1) + for _, match := range jsonLdMatches { + if len(match) > 1 { + jsonStr := match[1] + priceRe := regexp.MustCompile(`(?i)"price"\s*:\s*"?(\d+(?:[.,]\d+)?)"?`) + if priceMatches := priceRe.FindStringSubmatch(jsonStr); len(priceMatches) > 1 { + priceStr := strings.ReplaceAll(priceMatches[1], ",", ".") + if price, err := strconv.ParseFloat(priceStr, 64); err == nil && price > 0 && price < 100000000 { + metadata.Price = &price + break + } + } + } + } + + if metadata.Price == nil { + priceRe := regexp.MustCompile(`(?i)"price"\s*:\s*"?(\d+(?:[.,]\d+)?)"?`) + if matches := priceRe.FindStringSubmatch(body); len(matches) > 1 { + priceStr := strings.ReplaceAll(matches[1], ",", ".") + if price, err := strconv.ParseFloat(priceStr, 64); err == nil && price > 0 && price < 100000000 { + metadata.Price = &price + } + } + } + + if metadata.Price == nil { + metaPriceRe := regexp.MustCompile(`(?i)]*(?:property|name)\s*=\s*["'](?:price|product:price)["'][^>]*content\s*=\s*["']([^"']+)["']`) + if matches := metaPriceRe.FindStringSubmatch(body); len(matches) > 1 { + priceStr := strings.ReplaceAll(strings.TrimSpace(matches[1]), ",", ".") + priceStr = regexp.MustCompile(`[^\d.]`).ReplaceAllString(priceStr, "") + if price, err := strconv.ParseFloat(priceStr, 64); err == nil && price > 0 && price < 100000000 { + metadata.Price = &price + } + } + } + + // Нормализуем URL изображения + if metadata.Image != "" && !strings.HasPrefix(metadata.Image, "http") { + baseURL := fmt.Sprintf("%s://%s", parsedURL.Scheme, parsedURL.Host) + if strings.HasPrefix(metadata.Image, "//") { + metadata.Image = parsedURL.Scheme + ":" + metadata.Image + } else if strings.HasPrefix(metadata.Image, "/") { + metadata.Image = baseURL + metadata.Image + } else { + metadata.Image = baseURL + "/" + metadata.Image + } + } + + metadata.Title = html.UnescapeString(metadata.Title) + metadata.Description = html.UnescapeString(metadata.Description) + + return metadata, nil +} + +// extractMetadataViaChrome извлекает метаданные через headless Chrome +// Используется как fallback для JavaScript-рендеринга страниц +func extractMetadataViaChrome(targetURL string) (*LinkMetadataResponse, error) { + opts := append(chromedp.DefaultExecAllocatorOptions[:], + chromedp.Flag("headless", true), + chromedp.Flag("disable-gpu", true), + chromedp.Flag("no-sandbox", true), + chromedp.Flag("disable-dev-shm-usage", true), + ) + + allocCtx, cancel := chromedp.NewExecAllocator(context.Background(), opts...) + defer cancel() + + ctx, cancel := chromedp.NewContext(allocCtx, chromedp.WithLogf(log.Printf)) + defer cancel() + + ctx, cancel = context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + metadata := &LinkMetadataResponse{} + + // Используем map для получения данных из JavaScript + var result map[string]interface{} + + err := chromedp.Run(ctx, + chromedp.Navigate(targetURL), + chromedp.WaitVisible("body", chromedp.ByQuery), + chromedp.Sleep(2*time.Second), // Даем время на выполнение JavaScript + chromedp.Evaluate(` + (function() { + const result = { + title: '', + image: '', + description: '', + price: null + }; + + // Извлекаем og:title + const ogTitle = document.querySelector('meta[property="og:title"]'); + if (ogTitle) { + result.title = ogTitle.getAttribute('content') || ''; + } else { + // Fallback на обычный title + const titleEl = document.querySelector('title'); + if (titleEl) { + result.title = titleEl.textContent || ''; + } + } + + // Извлекаем og:image + const ogImage = document.querySelector('meta[property="og:image"]'); + if (ogImage) { + result.image = ogImage.getAttribute('content') || ''; + } else { + // Fallback на twitter:image + const twitterImage = document.querySelector('meta[name="twitter:image"]'); + if (twitterImage) { + result.image = twitterImage.getAttribute('content') || ''; + } + } + + // Извлекаем og:description + const ogDesc = document.querySelector('meta[property="og:description"]'); + if (ogDesc) { + result.description = ogDesc.getAttribute('content') || ''; + } + + // Извлекаем цену из JSON-LD + const jsonLdScripts = document.querySelectorAll('script[type="application/ld+json"]'); + for (const script of jsonLdScripts) { + try { + const data = JSON.parse(script.textContent); + if (data.offers && data.offers.price) { + result.price = parseFloat(data.offers.price); + break; + } + if (data.price) { + result.price = parseFloat(data.price); + break; + } + } catch (e) {} + } + + // Если не нашли в JSON-LD, ищем в meta тегах + if (!result.price) { + const priceMeta = document.querySelector('meta[property="product:price:amount"]'); + if (priceMeta) { + result.price = parseFloat(priceMeta.getAttribute('content')); + } + } + + // Нормализуем URL изображения + if (result.image && !result.image.startsWith('http')) { + const baseURL = window.location.origin; + if (result.image.startsWith('//')) { + result.image = window.location.protocol + result.image; + } else if (result.image.startsWith('/')) { + result.image = baseURL + result.image; + } else { + result.image = baseURL + '/' + result.image; + } + } + + return result; + })(); + `, &result), + ) + + if err != nil { + return nil, fmt.Errorf("error extracting metadata via Chrome: %w", err) + } + + // Преобразуем map в структуру + if title, ok := result["title"].(string); ok { + metadata.Title = strings.TrimSpace(title) + } + if image, ok := result["image"].(string); ok { + metadata.Image = strings.TrimSpace(image) + } + if desc, ok := result["description"].(string); ok { + metadata.Description = strings.TrimSpace(desc) + } + if priceVal := result["price"]; priceVal != nil { + if priceFloat, ok := priceVal.(float64); ok { + if priceFloat > 0 && priceFloat < 100000000 { + metadata.Price = &priceFloat + } + } + } + + // Валидация и очистка данных + if metadata.Title != "" { + metadata.Title = strings.TrimSpace(metadata.Title) + if strings.Contains(strings.ToLower(metadata.Title), "робот") || + strings.Contains(strings.ToLower(metadata.Title), "captcha") || + strings.Contains(strings.ToLower(metadata.Title), "вы не робот") { + metadata.Title = "" + metadata.Image = "" + metadata.Description = "" + } + } + + if metadata.Price != nil && (*metadata.Price <= 0 || *metadata.Price >= 100000000) { + metadata.Price = nil + } + + return metadata, nil +} + +// extractLinkMetadataHandler извлекает метаданные (Open Graph, Title, Image) из URL +// Использует HTTP-метод как основной (стандартный подход), chromedp как fallback +func (a *App) extractLinkMetadataHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + var req struct { + URL string `json:"url"` + } + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding metadata request body: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + if req.URL == "" { + log.Printf("Empty URL in metadata request") + sendErrorWithCORS(w, "URL is required", http.StatusBadRequest) + return + } + + // Валидация URL + _, err := url.Parse(req.URL) + if err != nil { + log.Printf("Invalid URL format: %s, error: %v", req.URL, err) + sendErrorWithCORS(w, "Invalid URL", http.StatusBadRequest) + return + } + + log.Printf("Extracting metadata for URL: %s", req.URL) + + // Шаг 1: Пытаемся получить метаданные через HTTP-метод (основной, быстрый метод) + metadata, err := extractMetadataViaHTTP(req.URL) + if err != nil { + log.Printf("HTTP method failed for URL %s: %v", req.URL, err) + metadata = &LinkMetadataResponse{} // Инициализируем пустую структуру для fallback + } + + // Шаг 2: Проверяем, достаточно ли данных из HTTP-метода + // Если нет title и image, используем chromedp fallback + needsFallback := (metadata.Title == "" && metadata.Image == "") + + if needsFallback { + log.Printf("HTTP method didn't return enough data for URL %s, trying Chrome fallback", req.URL) + chromeMetadata, chromeErr := extractMetadataViaChrome(req.URL) + if chromeErr != nil { + log.Printf("Chrome fallback failed for URL %s: %v", req.URL, chromeErr) + // Возвращаем результаты HTTP-метода, даже если они пустые + } else { + // Объединяем результаты: приоритет у HTTP, дополняем из Chrome + if metadata.Title == "" && chromeMetadata.Title != "" { + metadata.Title = chromeMetadata.Title + log.Printf("Chrome fallback provided title: %s", chromeMetadata.Title) + } + if metadata.Image == "" && chromeMetadata.Image != "" { + metadata.Image = chromeMetadata.Image + log.Printf("Chrome fallback provided image: %s", chromeMetadata.Image) + } + if metadata.Description == "" && chromeMetadata.Description != "" { + metadata.Description = chromeMetadata.Description + } + if metadata.Price == nil && chromeMetadata.Price != nil { + metadata.Price = chromeMetadata.Price + } + } + } else { + log.Printf("HTTP method successfully extracted metadata for URL %s", req.URL) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(metadata) +} + +// proxyImageHandler проксирует изображение через бэкенд для обхода CORS +func (a *App) proxyImageHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + imageURL := r.URL.Query().Get("url") + if imageURL == "" { + sendErrorWithCORS(w, "URL parameter is required", http.StatusBadRequest) + return + } + + // Валидация URL + parsedURL, err := url.Parse(imageURL) + if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" { + log.Printf("Invalid image URL: %s", imageURL) + sendErrorWithCORS(w, "Invalid URL", http.StatusBadRequest) + return + } + + log.Printf("Proxying image for user %d: %s", userID, imageURL) + + // Создаем HTTP запрос к изображению + client := &http.Client{ + Timeout: 30 * time.Second, + } + + req, err := http.NewRequest("GET", imageURL, nil) + if err != nil { + log.Printf("Error creating image request: %v", err) + sendErrorWithCORS(w, "Error creating request", http.StatusInternalServerError) + return + } + + // Устанавливаем User-Agent для имитации браузера + req.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36") + req.Header.Set("Referer", parsedURL.Scheme+"://"+parsedURL.Host+"/") + + resp, err := client.Do(req) + if err != nil { + log.Printf("Error fetching image: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error fetching image: %v", err), http.StatusBadGateway) + return + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + log.Printf("Image fetch returned status %d", resp.StatusCode) + sendErrorWithCORS(w, fmt.Sprintf("HTTP %d", resp.StatusCode), http.StatusBadGateway) + return + } + + // Ограничиваем размер (максимум 5MB) + limitedReader := io.LimitReader(resp.Body, 5*1024*1024) + bodyBytes, err := io.ReadAll(limitedReader) + if err != nil { + log.Printf("Error reading image: %v", err) + sendErrorWithCORS(w, "Error reading image", http.StatusInternalServerError) + return + } + + // Определяем Content-Type + contentType := resp.Header.Get("Content-Type") + if contentType == "" { + // Пытаемся определить по содержимому + if len(bodyBytes) >= 2 { + if bodyBytes[0] == 0xFF && bodyBytes[1] == 0xD8 { + contentType = "image/jpeg" + } else if len(bodyBytes) >= 8 && string(bodyBytes[0:8]) == "\x89PNG\r\n\x1a\n" { + contentType = "image/png" + } else if len(bodyBytes) >= 4 && string(bodyBytes[0:4]) == "RIFF" { + contentType = "image/webp" + } else { + contentType = "application/octet-stream" + } + } + } + + // Проверяем, что это изображение + if !strings.HasPrefix(contentType, "image/") { + log.Printf("Invalid content type: %s", contentType) + sendErrorWithCORS(w, "Not an image", http.StatusBadRequest) + return + } + + // Отправляем изображение + w.Header().Set("Content-Type", contentType) + w.Header().Set("Content-Length", strconv.Itoa(len(bodyBytes))) + w.Header().Set("Cache-Control", "public, max-age=3600") + w.WriteHeader(http.StatusOK) + w.Write(bodyBytes) +} + +// ============================================ +// Tracking handlers +// ============================================ + +// getWeeklyStatsDataForUserAndWeek получает данные о проектах для конкретного пользователя и недели +func (a *App) getWeeklyStatsDataForUserAndWeek(userID int, year int, week int) (*WeeklyStatsResponse, error) { + // Определяем, является ли это текущей неделей + now := time.Now() + currentYear, currentWeek := now.ISOWeek() + isCurrentWeek := year == currentYear && week == currentWeek + + var currentWeekScores map[int]float64 + var err error + + if isCurrentWeek { + // Для текущей недели используем realtime данные + currentWeekScores, err = a.getCurrentWeekScores(userID) + if err != nil { + log.Printf("Error getting current week scores: %v", err) + return nil, fmt.Errorf("error getting current week scores: %w", err) + } + } else { + // Для исторических недель используем пустой map (данные из MV) + currentWeekScores = make(map[int]float64) + } + + query := ` + SELECT + p.id AS project_id, + p.name AS project_name, + COALESCE(wr.total_score, 0.0000) AS total_score, + wg.min_goal_score, + wg.max_goal_score, + wg.priority AS priority, + p.color + FROM + projects p + INNER JOIN + weekly_goals wg ON wg.project_id = p.id + AND wg.goal_year = $2 + AND wg.goal_week = $3 + LEFT JOIN + weekly_report_mv wr + ON p.id = wr.project_id + AND $2 = wr.report_year + AND $3 = wr.report_week + WHERE + p.deleted = FALSE AND p.user_id = $1 + AND wg.min_goal_score IS NOT NULL + ORDER BY + total_score DESC + ` + + rows, err := a.DB.Query(query, userID, year, week) + if err != nil { + return nil, fmt.Errorf("error querying weekly stats: %w", err) + } + defer rows.Close() + + projects := make([]WeeklyProjectStats, 0) + groups := make(map[int][]float64) + + for rows.Next() { + var project WeeklyProjectStats + var projectID int + var minGoalScore sql.NullFloat64 + var maxGoalScore sql.NullFloat64 + var priority sql.NullInt64 + + err := rows.Scan( + &projectID, + &project.ProjectName, + &project.TotalScore, + &minGoalScore, + &maxGoalScore, + &priority, + &project.Color, + ) + if err != nil { + return nil, fmt.Errorf("error scanning weekly stats row: %w", err) + } + + // Объединяем данные: если это текущая неделя и есть данные, используем их вместо MV + if isCurrentWeek { + if currentWeekScore, exists := currentWeekScores[projectID]; exists { + project.TotalScore = currentWeekScore + } + } + + if minGoalScore.Valid { + project.MinGoalScore = minGoalScore.Float64 + } else { + project.MinGoalScore = 0 + } + + if maxGoalScore.Valid { + maxGoalVal := maxGoalScore.Float64 + project.MaxGoalScore = &maxGoalVal + } + + var priorityVal int + if priority.Valid { + priorityVal = int(priority.Int64) + project.Priority = &priorityVal + } + + // Расчет calculated_score + totalScore := project.TotalScore + minGoalScoreVal := project.MinGoalScore + var maxGoalScoreVal float64 + if project.MaxGoalScore != nil { + maxGoalScoreVal = *project.MaxGoalScore + } + + // Параметры бонуса в зависимости от priority + var extraBonusLimit float64 = 20 + if priorityVal == 1 { + extraBonusLimit = 50 + } else if priorityVal == 2 { + extraBonusLimit = 35 + } + + // Расчет calculated_score по логике фронтенда + // min_goal -> 100%, max_goal -> 150%/135%/120% в зависимости от приоритета + var resultScore float64 + if minGoalScoreVal <= 0 { + // Если нет minGoal, возвращаем 0 (или можно относительно maxGoal, но обычно 0) + resultScore = 0 + } else if totalScore < minGoalScoreVal { + // До достижения minGoal растем линейно от 0 до 100% + resultScore = (totalScore / minGoalScoreVal) * 100.0 + } else { + // Достигнут minGoal - базовый прогресс = 100% + baseProgress := 100.0 + + // Если maxGoal задан корректно и больше minGoal, добавляем экстра прогресс + if maxGoalScoreVal > minGoalScoreVal { + extraRange := maxGoalScoreVal - minGoalScoreVal + excess := min(totalScore, maxGoalScoreVal) - minGoalScoreVal + extraRatio := min(1.0, max(0.0, excess/extraRange)) + extraProgress := extraRatio * extraBonusLimit + resultScore = min(100.0+extraBonusLimit, baseProgress+extraProgress) + } else { + // Если maxGoal не задан или некорректен, просто 100% + resultScore = baseProgress + } + } + + project.CalculatedScore = roundToTwoDecimals(resultScore) + projects = append(projects, project) + + // Группировка для итогового расчета + if minGoalScoreVal > 0 { + if _, exists := groups[priorityVal]; !exists { + groups[priorityVal] = make([]float64, 0) + } + groups[priorityVal] = append(groups[priorityVal], project.CalculatedScore) + } + } + + // Вычисляем проценты для каждой группы + groupsProgress := calculateGroupsProgress(groups) + + // Вычисляем общий процент выполнения + total := calculateOverallProgress(groupsProgress, groups) + + response := WeeklyStatsResponse{ + Total: total, + GroupProgress1: groupsProgress.Group1, + GroupProgress2: groupsProgress.Group2, + GroupProgress0: groupsProgress.Group0, + Projects: projects, + } + + return &response, nil +} + +// getISOWeek вычисляет номер недели ISO для даты +func getISOWeek(t time.Time) int { + _, week := t.ISOWeek() + return week +} + +// getTrackingStatsHandler возвращает статистику недели для текущего пользователя и отслеживаемых +func (a *App) getTrackingStatsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Получаем year и week из query params + yearStr := r.URL.Query().Get("year") + weekStr := r.URL.Query().Get("week") + + var year, week int + now := time.Now() + + if yearStr == "" || weekStr == "" { + // Если не указаны - текущая неделя + year, week = now.ISOWeek() + } else { + var err error + year, err = strconv.Atoi(yearStr) + if err != nil { + sendErrorWithCORS(w, "Invalid year parameter", http.StatusBadRequest) + return + } + week, err = strconv.Atoi(weekStr) + if err != nil { + sendErrorWithCORS(w, "Invalid week parameter", http.StatusBadRequest) + return + } + } + + // Получаем список отслеживаемых пользователей + rows, err := a.DB.Query(` + SELECT tracked_id + FROM user_tracking + WHERE tracker_id = $1 + `, userID) + if err != nil { + log.Printf("Error getting tracked users: %v", err) + sendErrorWithCORS(w, "Error getting tracked users", http.StatusInternalServerError) + return + } + defer rows.Close() + + trackedUserIDs := []int{userID} // Начинаем с текущего пользователя + for rows.Next() { + var trackedID int + if err := rows.Scan(&trackedID); err != nil { + log.Printf("Error scanning tracked user: %v", err) + continue + } + trackedUserIDs = append(trackedUserIDs, trackedID) + } + + // Получаем данные для каждого пользователя + users := make([]TrackingUserStats, 0) + for _, uid := range trackedUserIDs { + stats, err := a.getWeeklyStatsDataForUserAndWeek(uid, year, week) + if err != nil { + log.Printf("Error getting stats for user %d: %v", uid, err) + continue + } + + // Получаем имя пользователя + var userName string + err = a.DB.QueryRow(`SELECT COALESCE(name, email) FROM users WHERE id = $1`, uid).Scan(&userName) + if err != nil { + log.Printf("Error getting user name: %v", err) + userName = "Unknown" + } + + // Преобразуем проекты в TrackingProjectStats + projects := make([]TrackingProjectStats, 0) + for _, p := range stats.Projects { + projects = append(projects, TrackingProjectStats{ + ProjectName: p.ProjectName, + CalculatedScore: p.CalculatedScore, + Priority: p.Priority, + }) + } + + // Сортируем проекты по priority (1, 2, остальные) + sort.Slice(projects, func(i, j int) bool { + pi, pj := 99, 99 + if projects[i].Priority != nil { + pi = *projects[i].Priority + } + if projects[j].Priority != nil { + pj = *projects[j].Priority + } + return pi < pj + }) + + users = append(users, TrackingUserStats{ + UserID: uid, + UserName: userName, + IsCurrentUser: uid == userID, + Total: stats.Total, + Projects: projects, + }) + } + + // Сортируем: текущий пользователь всегда первый + sortedUsers := make([]TrackingUserStats, 0) + for _, u := range users { + if u.IsCurrentUser { + sortedUsers = append([]TrackingUserStats{u}, sortedUsers...) + } else { + sortedUsers = append(sortedUsers, u) + } + } + + response := TrackingStatsResponse{ + WeekNumber: week, + Year: year, + Users: sortedUsers, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// createTrackingInviteHandler создает токен приглашения +func (a *App) createTrackingInviteHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Генерируем токен + token := generateInviteToken() + + // Сохраняем в базу с истечением через 1 час + _, err := a.DB.Exec(` + INSERT INTO tracking_invite_tokens (user_id, token, expires_at) + VALUES ($1, $2, NOW() + INTERVAL '1 hour') + `, userID, token) + if err != nil { + log.Printf("Error creating tracking invite token: %v", err) + sendErrorWithCORS(w, "Error creating invite token", http.StatusInternalServerError) + return + } + + // Формируем URL + baseURL := getEnv("WEBHOOK_BASE_URL", "") + inviteURL := baseURL + "/tracking/invite/" + token + + response := TrackingInviteResponse{ + InviteURL: inviteURL, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// getTrackingInviteInfoHandler возвращает информацию о приглашении +func (a *App) getTrackingInviteInfoHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + vars := mux.Vars(r) + token := vars["token"] + + var userID int + var expiresAt time.Time + err := a.DB.QueryRow(` + SELECT user_id, expires_at + FROM tracking_invite_tokens + WHERE token = $1 + `, token).Scan(&userID, &expiresAt) + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Ссылка недействительна или устарела", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting invite token: %v", err) + sendErrorWithCORS(w, "Error getting invite info", http.StatusInternalServerError) + return + } + + // Проверяем срок действия + if time.Now().After(expiresAt) { + sendErrorWithCORS(w, "Ссылка недействительна или устарела", http.StatusNotFound) + return + } + + // Получаем имя пользователя + var userName string + err = a.DB.QueryRow(`SELECT COALESCE(name, email) FROM users WHERE id = $1`, userID).Scan(&userName) + if err != nil { + log.Printf("Error getting user name: %v", err) + sendErrorWithCORS(w, "Error getting user info", http.StatusInternalServerError) + return + } + + response := TrackingInviteInfo{ + UserID: userID, + UserName: userName, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// acceptTrackingInviteHandler принимает приглашение на отслеживание +func (a *App) acceptTrackingInviteHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + currentUserID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + token := vars["token"] + + // Получаем информацию о токене + var trackedUserID int + var expiresAt time.Time + err := a.DB.QueryRow(` + SELECT user_id, expires_at + FROM tracking_invite_tokens + WHERE token = $1 AND expires_at > NOW() + `, token).Scan(&trackedUserID, &expiresAt) + + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "Ссылка недействительна или устарела", http.StatusNotFound) + return + } + if err != nil { + log.Printf("Error getting invite token: %v", err) + sendErrorWithCORS(w, "Error getting invite info", http.StatusInternalServerError) + return + } + + // Проверяем, что пользователь не пытается отслеживать себя + if trackedUserID == currentUserID { + sendErrorWithCORS(w, "Нельзя отслеживать себя", http.StatusBadRequest) + return + } + + // Создаем запись отслеживания + _, err = a.DB.Exec(` + INSERT INTO user_tracking (tracker_id, tracked_id) + VALUES ($1, $2) + ON CONFLICT (tracker_id, tracked_id) DO NOTHING + `, currentUserID, trackedUserID) + if err != nil { + log.Printf("Error creating tracking relation: %v", err) + sendErrorWithCORS(w, "Error accepting invite", http.StatusInternalServerError) + return + } + + // Удаляем использованный токен (одноразовый) + _, err = a.DB.Exec(`DELETE FROM tracking_invite_tokens WHERE token = $1`, token) + if err != nil { + log.Printf("Error deleting used token: %v", err) + // Не критично, продолжаем + } + + // Получаем имя пользователя для ответа + var userName string + a.DB.QueryRow(`SELECT COALESCE(name, email) FROM users WHERE id = $1`, trackedUserID).Scan(&userName) + + response := map[string]interface{}{ + "success": true, + "tracked_user_name": userName, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// getTrackingAccessHandler возвращает списки трекеров и отслеживаемых +func (a *App) getTrackingAccessHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + // Trackers (кто меня отслеживает) + trackers := make([]TrackingUser, 0) + rows, err := a.DB.Query(` + SELECT ut.id as relation_id, u.id, COALESCE(u.name, u.email) as name, u.email, ut.created_at + FROM user_tracking ut + JOIN users u ON ut.tracker_id = u.id + WHERE ut.tracked_id = $1 + ORDER BY ut.created_at DESC + `, userID) + if err != nil { + log.Printf("Error getting trackers: %v", err) + sendErrorWithCORS(w, "Error getting trackers", http.StatusInternalServerError) + return + } + defer rows.Close() + + for rows.Next() { + var t TrackingUser + if err := rows.Scan(&t.RelationID, &t.ID, &t.Name, &t.Email, &t.CreatedAt); err != nil { + log.Printf("Error scanning tracker: %v", err) + continue + } + trackers = append(trackers, t) + } + + // Tracked (кого я отслеживаю) + tracked := make([]TrackingUser, 0) + rows, err = a.DB.Query(` + SELECT ut.id as relation_id, u.id, COALESCE(u.name, u.email) as name, u.email, ut.created_at + FROM user_tracking ut + JOIN users u ON ut.tracked_id = u.id + WHERE ut.tracker_id = $1 + ORDER BY ut.created_at DESC + `, userID) + if err != nil { + log.Printf("Error getting tracked: %v", err) + sendErrorWithCORS(w, "Error getting tracked", http.StatusInternalServerError) + return + } + defer rows.Close() + + for rows.Next() { + var t TrackingUser + if err := rows.Scan(&t.RelationID, &t.ID, &t.Name, &t.Email, &t.CreatedAt); err != nil { + log.Printf("Error scanning tracked: %v", err) + continue + } + tracked = append(tracked, t) + } + + response := TrackingAccessResponse{ + Trackers: trackers, + Tracked: tracked, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +// deleteTrackingTrackerHandler удаляет того, кто меня отслеживает +func (a *App) deleteTrackingTrackerHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + relationIDStr := vars["id"] + relationID, err := strconv.Atoi(relationIDStr) + if err != nil { + sendErrorWithCORS(w, "Invalid relation ID", http.StatusBadRequest) + return + } + + // Удаляем только если это действительно тот, кто отслеживает меня + result, err := a.DB.Exec(` + DELETE FROM user_tracking + WHERE id = $1 AND tracked_id = $2 + `, relationID, userID) + if err != nil { + log.Printf("Error deleting tracker relation: %v", err) + sendErrorWithCORS(w, "Error deleting relation", http.StatusInternalServerError) + return + } + + rowsAffected, _ := result.RowsAffected() + if rowsAffected == 0 { + sendErrorWithCORS(w, "Relation not found or access denied", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{"success": true}) +} + +// deleteTrackingTrackedHandler прекращает отслеживать пользователя +func (a *App) deleteTrackingTrackedHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + vars := mux.Vars(r) + relationIDStr := vars["id"] + relationID, err := strconv.Atoi(relationIDStr) + if err != nil { + sendErrorWithCORS(w, "Invalid relation ID", http.StatusBadRequest) + return + } + + // Удаляем только если это действительно тот, кого я отслеживаю + result, err := a.DB.Exec(` + DELETE FROM user_tracking + WHERE id = $1 AND tracker_id = $2 + `, relationID, userID) + if err != nil { + log.Printf("Error deleting tracked relation: %v", err) + sendErrorWithCORS(w, "Error deleting relation", http.StatusInternalServerError) + return + } + + rowsAffected, _ := result.RowsAffected() + if rowsAffected == 0 { + sendErrorWithCORS(w, "Relation not found or access denied", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{"success": true}) +} + +// decodeHTMLEntities декодирует базовые HTML entities +func decodeHTMLEntities(s string) string { + replacements := map[string]string{ + "&": "&", + "<": "<", + ">": ">", + """: "\"", + "'": "'", + "'": "'", + " ": " ", + "—": "—", + "–": "–", + "«": "«", + "»": "»", + } + for entity, char := range replacements { + s = strings.ReplaceAll(s, entity, char) + } + return s +} + +// refreshGroupSuggestionsMV обновляет materialized view для групповых саджестов +func (a *App) refreshGroupSuggestionsMV() error { + _, err := a.DB.Exec("REFRESH MATERIALIZED VIEW CONCURRENTLY user_group_suggestions_mv") + if err != nil { + log.Printf("Error refreshing user_group_suggestions_mv: %v", err) + return err + } + return nil +} + +// getGroupSuggestionsHandler возвращает список уникальных имён групп для текущего пользователя +func (a *App) getGroupSuggestionsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + userID, ok := getUserIDFromContext(r) + if !ok { + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + + query := ` + SELECT DISTINCT group_name + FROM user_group_suggestions_mv + WHERE user_id = $1 + ORDER BY group_name + ` + + rows, err := a.DB.Query(query, userID) + if err != nil { + log.Printf("Error querying group suggestions: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying group suggestions: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + groups := make([]string, 0) + for rows.Next() { + var groupName string + if err := rows.Scan(&groupName); err != nil { + log.Printf("Error scanning group name: %v", err) + continue + } + groups = append(groups, groupName) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(groups) +} diff --git a/play-life-backend/migrations/000001_baseline.down.sql b/play-life-backend/migrations/000001_baseline.down.sql new file mode 100644 index 0000000..1e67506 --- /dev/null +++ b/play-life-backend/migrations/000001_baseline.down.sql @@ -0,0 +1,3 @@ +-- Baseline migration cannot be rolled back +-- This is the initial state of the database schema +-- If you need to revert, you must manually drop all tables and recreate from scratch diff --git a/play-life-backend/migrations/000001_baseline.up.sql b/play-life-backend/migrations/000001_baseline.up.sql new file mode 100644 index 0000000..b5300cd --- /dev/null +++ b/play-life-backend/migrations/000001_baseline.up.sql @@ -0,0 +1,497 @@ +-- Baseline Migration: Complete database schema +-- This migration represents the current state of the database schema +-- For existing databases, use: migrate force 1 (do not run this migration) +-- For new databases, this will create the complete schema + +-- ============================================ +-- Core Tables (no dependencies) +-- ============================================ + +-- Users table (base for multi-tenancy) +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL UNIQUE, + password_hash VARCHAR(255) NOT NULL, + name VARCHAR(255), + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT TRUE, + last_login_at TIMESTAMP WITH TIME ZONE +); + +CREATE INDEX idx_users_email ON users(email); + +-- Dictionaries table +CREATE TABLE dictionaries ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX idx_dictionaries_user_id ON dictionaries(user_id); + +-- Insert default dictionary with id = 0 +DO $$ +BEGIN + -- Set sequence to -1 so next value will be 0 + PERFORM setval('dictionaries_id_seq', -1, false); + + -- Insert the default dictionary with id = 0 + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; + + -- Set the sequence to start from 1 (so next auto-increment will be 1) + PERFORM setval('dictionaries_id_seq', 1, false); +EXCEPTION + WHEN others THEN + -- If sequence doesn't exist or other error, try without sequence manipulation + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; +END $$; + +-- Projects table +CREATE TABLE projects ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + priority SMALLINT, + deleted BOOLEAN NOT NULL DEFAULT FALSE, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX idx_projects_deleted ON projects(deleted); +CREATE INDEX idx_projects_user_id ON projects(user_id); + +-- Entries table +CREATE TABLE entries ( + id SERIAL PRIMARY KEY, + text TEXT NOT NULL, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX idx_entries_user_id ON entries(user_id); + +-- ============================================ +-- Dependent Tables +-- ============================================ + +-- Words table (depends on dictionaries, users) +CREATE TABLE words ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + translation TEXT NOT NULL, + description TEXT, + dictionary_id INTEGER NOT NULL DEFAULT 0 REFERENCES dictionaries(id), + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX idx_words_dictionary_id ON words(dictionary_id); +CREATE INDEX idx_words_user_id ON words(user_id); + +-- Progress table (depends on words, users) +CREATE TABLE progress ( + id SERIAL PRIMARY KEY, + word_id INTEGER NOT NULL REFERENCES words(id) ON DELETE CASCADE, + success INTEGER DEFAULT 0, + failure INTEGER DEFAULT 0, + last_success_at TIMESTAMP, + last_failure_at TIMESTAMP, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + CONSTRAINT progress_word_user_unique UNIQUE (word_id, user_id) +); + +CREATE INDEX idx_progress_user_id ON progress(user_id); +CREATE UNIQUE INDEX idx_progress_word_user_unique ON progress(word_id, user_id); + +-- Configs table (depends on users) +CREATE TABLE configs ( + id SERIAL PRIMARY KEY, + words_count INTEGER NOT NULL, + max_cards INTEGER, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX idx_configs_user_id ON configs(user_id); + +-- Config dictionaries table (depends on configs, dictionaries) +CREATE TABLE config_dictionaries ( + config_id INTEGER NOT NULL REFERENCES configs(id) ON DELETE CASCADE, + dictionary_id INTEGER NOT NULL REFERENCES dictionaries(id) ON DELETE CASCADE, + PRIMARY KEY (config_id, dictionary_id) +); + +CREATE INDEX idx_config_dictionaries_config_id ON config_dictionaries(config_id); +CREATE INDEX idx_config_dictionaries_dictionary_id ON config_dictionaries(dictionary_id); + +-- Nodes table (depends on projects, entries, users) +CREATE TABLE nodes ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + entry_id INTEGER NOT NULL REFERENCES entries(id) ON DELETE CASCADE, + score NUMERIC(8,4), + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX idx_nodes_project_id ON nodes(project_id); +CREATE INDEX idx_nodes_entry_id ON nodes(entry_id); +CREATE INDEX idx_nodes_user_id ON nodes(user_id); + +-- Weekly goals table (depends on projects, users) +CREATE TABLE weekly_goals ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + goal_year INTEGER NOT NULL, + goal_week INTEGER NOT NULL, + min_goal_score NUMERIC(10,4) NOT NULL DEFAULT 0, + max_goal_score NUMERIC(10,4), + max_score NUMERIC(10,4), + priority SMALLINT, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + CONSTRAINT weekly_goals_project_id_goal_year_goal_week_key UNIQUE (project_id, goal_year, goal_week) +); + +CREATE INDEX idx_weekly_goals_project_id ON weekly_goals(project_id); +CREATE INDEX idx_weekly_goals_user_id ON weekly_goals(user_id); + +-- Tasks table (depends on users) +CREATE TABLE tasks ( + id SERIAL PRIMARY KEY, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + completed INTEGER DEFAULT 0, + last_completed_at TIMESTAMP WITH TIME ZONE, + parent_task_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE, + reward_message TEXT, + progression_base NUMERIC(10,4), + deleted BOOLEAN DEFAULT FALSE, + repetition_period INTERVAL, + next_show_at TIMESTAMP WITH TIME ZONE, + repetition_date TEXT, + config_id INTEGER REFERENCES configs(id) ON DELETE SET NULL, + wishlist_id INTEGER, + reward_policy VARCHAR(20) DEFAULT 'personal' +); + +CREATE INDEX idx_tasks_user_id ON tasks(user_id); +CREATE INDEX idx_tasks_parent_task_id ON tasks(parent_task_id); +CREATE INDEX idx_tasks_deleted ON tasks(deleted); +CREATE INDEX idx_tasks_last_completed_at ON tasks(last_completed_at); +CREATE INDEX idx_tasks_config_id ON tasks(config_id); +CREATE UNIQUE INDEX idx_tasks_config_id_unique ON tasks(config_id) WHERE config_id IS NOT NULL AND deleted = FALSE; +CREATE INDEX idx_tasks_wishlist_id ON tasks(wishlist_id); +CREATE UNIQUE INDEX idx_tasks_wishlist_id_unique ON tasks(wishlist_id) WHERE wishlist_id IS NOT NULL AND deleted = FALSE; +CREATE INDEX idx_tasks_id_user_deleted ON tasks(id, user_id, deleted) WHERE deleted = FALSE; +CREATE INDEX idx_tasks_parent_deleted_covering ON tasks(parent_task_id, deleted, id) + INCLUDE (name, completed, last_completed_at, reward_message, progression_base) + WHERE deleted = FALSE; + +COMMENT ON COLUMN tasks.config_id IS 'Link to test config. NULL if task is not a test.'; +COMMENT ON COLUMN tasks.reward_policy IS 'For wishlist tasks: personal = only if user completes, shared = anyone completes'; + +-- Reward configs table (depends on tasks, projects) +CREATE TABLE reward_configs ( + id SERIAL PRIMARY KEY, + position INTEGER NOT NULL, + task_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE, + project_id INTEGER REFERENCES projects(id) ON DELETE CASCADE, + value NUMERIC(10,4) NOT NULL, + use_progression BOOLEAN DEFAULT FALSE +); + +CREATE INDEX idx_reward_configs_task_id ON reward_configs(task_id); +CREATE INDEX idx_reward_configs_project_id ON reward_configs(project_id); +CREATE UNIQUE INDEX idx_reward_configs_task_position ON reward_configs(task_id, position); + +-- Telegram integrations table (depends on users) +CREATE TABLE telegram_integrations ( + id SERIAL PRIMARY KEY, + chat_id VARCHAR(255), + telegram_user_id BIGINT, + start_token VARCHAR(255), + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +CREATE UNIQUE INDEX idx_telegram_integrations_user_id_unique ON telegram_integrations(user_id) WHERE user_id IS NOT NULL; +CREATE INDEX idx_telegram_integrations_user_id ON telegram_integrations(user_id); +CREATE UNIQUE INDEX idx_telegram_integrations_start_token ON telegram_integrations(start_token) WHERE start_token IS NOT NULL; +CREATE UNIQUE INDEX idx_telegram_integrations_telegram_user_id ON telegram_integrations(telegram_user_id) WHERE telegram_user_id IS NOT NULL; +CREATE INDEX idx_telegram_integrations_chat_id ON telegram_integrations(chat_id) WHERE chat_id IS NOT NULL; + +-- Todoist integrations table (depends on users) +CREATE TABLE todoist_integrations ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + todoist_user_id BIGINT, + todoist_email VARCHAR(255), + access_token TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT todoist_integrations_user_id_unique UNIQUE (user_id) +); + +CREATE INDEX idx_todoist_integrations_user_id ON todoist_integrations(user_id); +CREATE UNIQUE INDEX idx_todoist_integrations_todoist_user_id ON todoist_integrations(todoist_user_id) WHERE todoist_user_id IS NOT NULL; +CREATE UNIQUE INDEX idx_todoist_integrations_todoist_email ON todoist_integrations(todoist_email) WHERE todoist_email IS NOT NULL; + +-- Wishlist boards table (depends on users) +CREATE TABLE wishlist_boards ( + id SERIAL PRIMARY KEY, + owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + invite_token VARCHAR(64) UNIQUE, + invite_enabled BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + deleted BOOLEAN DEFAULT FALSE +); + +CREATE INDEX idx_wishlist_boards_owner_id ON wishlist_boards(owner_id); +CREATE INDEX idx_wishlist_boards_invite_token ON wishlist_boards(invite_token) WHERE invite_token IS NOT NULL; +CREATE INDEX idx_wishlist_boards_owner_deleted ON wishlist_boards(owner_id, deleted); + +-- Wishlist board members table (depends on wishlist_boards, users) +CREATE TABLE wishlist_board_members ( + id SERIAL PRIMARY KEY, + board_id INTEGER NOT NULL REFERENCES wishlist_boards(id) ON DELETE CASCADE, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + joined_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT unique_board_member UNIQUE (board_id, user_id) +); + +CREATE INDEX idx_board_members_board_id ON wishlist_board_members(board_id); +CREATE INDEX idx_board_members_user_id ON wishlist_board_members(user_id); + +-- Wishlist items table (depends on users, wishlist_boards) +CREATE TABLE wishlist_items ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + price NUMERIC(10,2), + image_path VARCHAR(500), + link TEXT, + completed BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + deleted BOOLEAN DEFAULT FALSE, + board_id INTEGER REFERENCES wishlist_boards(id) ON DELETE CASCADE, + author_id INTEGER REFERENCES users(id) ON DELETE SET NULL +); + +CREATE INDEX idx_wishlist_items_user_id ON wishlist_items(user_id); +CREATE INDEX idx_wishlist_items_user_deleted ON wishlist_items(user_id, deleted); +CREATE INDEX idx_wishlist_items_user_completed ON wishlist_items(user_id, completed, deleted); +CREATE INDEX idx_wishlist_items_board_id ON wishlist_items(board_id); +CREATE INDEX idx_wishlist_items_author_id ON wishlist_items(author_id); +CREATE INDEX idx_wishlist_items_id_deleted_covering ON wishlist_items(id, deleted) + INCLUDE (name) + WHERE deleted = FALSE; + +-- Add foreign key for tasks.wishlist_id after wishlist_items is created +ALTER TABLE tasks ADD CONSTRAINT tasks_wishlist_id_fkey + FOREIGN KEY (wishlist_id) REFERENCES wishlist_items(id) ON DELETE SET NULL; + +COMMENT ON TABLE wishlist_items IS 'Wishlist items for users'; +COMMENT ON COLUMN wishlist_items.completed IS 'Flag indicating item was purchased/received'; +COMMENT ON COLUMN wishlist_items.image_path IS 'Path to image file relative to uploads root'; + +-- Task conditions table (depends on tasks) +CREATE TABLE task_conditions ( + id SERIAL PRIMARY KEY, + task_id INTEGER NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT unique_task_condition UNIQUE (task_id) +); + +CREATE INDEX idx_task_conditions_task_id ON task_conditions(task_id); + +COMMENT ON TABLE task_conditions IS 'Reusable unlock conditions based on task completion'; + +-- Score conditions table (depends on projects, users) +CREATE TABLE score_conditions ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + required_points NUMERIC(10,4) NOT NULL, + start_date DATE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + CONSTRAINT unique_score_condition UNIQUE (project_id, required_points, start_date) +); + +CREATE INDEX idx_score_conditions_project_id ON score_conditions(project_id); +CREATE INDEX idx_score_conditions_user_id ON score_conditions(user_id); + +COMMENT ON TABLE score_conditions IS 'Reusable unlock conditions based on project points'; +COMMENT ON COLUMN score_conditions.start_date IS 'Date from which to start counting points. NULL means count all time.'; + +-- Wishlist conditions table (depends on wishlist_items, task_conditions, score_conditions, users) +CREATE TABLE wishlist_conditions ( + id SERIAL PRIMARY KEY, + wishlist_item_id INTEGER NOT NULL REFERENCES wishlist_items(id) ON DELETE CASCADE, + task_condition_id INTEGER REFERENCES task_conditions(id) ON DELETE CASCADE, + score_condition_id INTEGER REFERENCES score_conditions(id) ON DELETE CASCADE, + display_order INTEGER DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + CONSTRAINT check_exactly_one_condition CHECK ( + (task_condition_id IS NOT NULL AND score_condition_id IS NULL) OR + (task_condition_id IS NULL AND score_condition_id IS NOT NULL) + ) +); + +CREATE INDEX idx_wishlist_conditions_item_id ON wishlist_conditions(wishlist_item_id); +CREATE INDEX idx_wishlist_conditions_item_order ON wishlist_conditions(wishlist_item_id, display_order); +CREATE INDEX idx_wishlist_conditions_task_condition_id ON wishlist_conditions(task_condition_id); +CREATE INDEX idx_wishlist_conditions_score_condition_id ON wishlist_conditions(score_condition_id); +CREATE INDEX idx_wishlist_conditions_user_id ON wishlist_conditions(user_id); + +COMMENT ON TABLE wishlist_conditions IS 'Links between wishlist items and unlock conditions. Multiple conditions per item use AND logic.'; +COMMENT ON COLUMN wishlist_conditions.display_order IS 'Order for displaying conditions in UI'; +COMMENT ON COLUMN wishlist_conditions.user_id IS 'Owner of this condition. Each user has their own goals on shared boards'; + +-- Refresh tokens table (depends on users) +CREATE TABLE refresh_tokens ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + token_hash VARCHAR(255) NOT NULL, + expires_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX idx_refresh_tokens_user_id ON refresh_tokens(user_id); +CREATE INDEX idx_refresh_tokens_token_hash ON refresh_tokens(token_hash); + +-- ============================================ +-- Materialized Views +-- ============================================ + +-- Weekly report materialized view +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score, + CASE + WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000) + ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score) + END AS normalized_total_score +FROM + projects p +LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id +LEFT JOIN + weekly_goals wg + ON wg.project_id = p.id + AND wg.goal_year = agg.report_year + AND wg.goal_week = agg.report_week +WHERE + p.deleted = FALSE +ORDER BY + p.id, agg.report_year, agg.report_week +WITH DATA; + +CREATE INDEX idx_weekly_report_mv_project_year_week ON weekly_report_mv(project_id, report_year, report_week); + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_score snapshot.'; + +-- ============================================ +-- Comments +-- ============================================ + +COMMENT ON TABLE configs IS 'Test configurations (words_count, max_cards, dictionary associations). Linked to tasks via tasks.config_id.'; +COMMENT ON TABLE wishlist_boards IS 'Wishlist boards for organizing and sharing wishes'; +COMMENT ON COLUMN wishlist_boards.invite_token IS 'Token for invite link, NULL = disabled'; +COMMENT ON COLUMN wishlist_boards.invite_enabled IS 'Whether invite link is active'; +COMMENT ON TABLE wishlist_board_members IS 'Users who joined boards via invite link (not owners)'; +COMMENT ON COLUMN wishlist_items.author_id IS 'User who created this item (may differ from board owner on shared boards)'; +COMMENT ON COLUMN wishlist_items.board_id IS 'Board this item belongs to'; + +-- ============================================ +-- Additional Tables +-- ============================================ + +-- Eateries table +CREATE TABLE eateries ( + id SERIAL PRIMARY KEY, + name VARCHAR(255), + address VARCHAR(255), + type VARCHAR(50), + distance DOUBLE PRECISION +); + +-- Interesting places table +CREATE TABLE interesting_places ( + id INTEGER PRIMARY KEY, + name TEXT, + city TEXT, + description TEXT, + added_at TIMESTAMP WITH TIME ZONE, + is_visited BOOLEAN, + phone_number TEXT, + address TEXT, + updated_at TIMESTAMP WITH TIME ZONE +); + +-- Music groups table +CREATE TABLE music_groups ( + id INTEGER PRIMARY KEY, + name TEXT, + possible_locations TEXT +); + +-- N8N chat histories table +CREATE TABLE n8n_chat_histories ( + id SERIAL PRIMARY KEY, + session_id VARCHAR(255) NOT NULL, + message JSONB NOT NULL +); + +-- Places to visit table +CREATE TABLE places_to_visit ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + city TEXT, + description TEXT, + added_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + is_visited BOOLEAN DEFAULT FALSE, + phone_number TEXT, + address TEXT, + updated_at TIMESTAMP WITH TIME ZONE +); + +-- Restaurants table +CREATE TABLE restaurants ( + id SERIAL PRIMARY KEY, + name VARCHAR(255), + address VARCHAR(255), + contact_info VARCHAR(255) +); + +-- Upcoming concerts table (depends on music_groups) +CREATE TABLE upcoming_concerts ( + id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + group_id INTEGER NOT NULL REFERENCES music_groups(id), + scheduled_at TIMESTAMP WITH TIME ZONE NOT NULL, + venue TEXT, + city TEXT, + tickets_url TEXT +); + +CREATE UNIQUE INDEX idx_unique_concert ON upcoming_concerts(scheduled_at, city, group_id); diff --git a/play-life-backend/migrations/000002_fix_weekly_goals_user_id.down.sql b/play-life-backend/migrations/000002_fix_weekly_goals_user_id.down.sql new file mode 100644 index 0000000..b0f4783 --- /dev/null +++ b/play-life-backend/migrations/000002_fix_weekly_goals_user_id.down.sql @@ -0,0 +1,4 @@ +-- Rollback migration: This migration cannot be automatically rolled back +-- The user_id values were corrected from projects.user_id, so reverting would +-- require knowing the original incorrect values, which is not possible. +-- If rollback is needed, you would need to manually restore from a backup. diff --git a/play-life-backend/migrations/000002_fix_weekly_goals_user_id.up.sql b/play-life-backend/migrations/000002_fix_weekly_goals_user_id.up.sql new file mode 100644 index 0000000..5c450aa --- /dev/null +++ b/play-life-backend/migrations/000002_fix_weekly_goals_user_id.up.sql @@ -0,0 +1,9 @@ +-- Migration: Fix weekly_goals.user_id by updating it from projects.user_id +-- This migration fixes the issue where weekly_goals.user_id was incorrectly set to NULL or wrong user_id +-- It updates all weekly_goals records to have the correct user_id from their associated project + +UPDATE weekly_goals wg +SET user_id = p.user_id +FROM projects p +WHERE wg.project_id = p.id + AND (wg.user_id IS NULL OR wg.user_id != p.user_id); diff --git a/play-life-backend/migrations/000003_add_reward_configs_covering_index.down.sql b/play-life-backend/migrations/000003_add_reward_configs_covering_index.down.sql new file mode 100644 index 0000000..291e61b --- /dev/null +++ b/play-life-backend/migrations/000003_add_reward_configs_covering_index.down.sql @@ -0,0 +1,3 @@ +-- Rollback migration: Remove covering index for reward_configs + +DROP INDEX IF EXISTS idx_reward_configs_task_id_covering; diff --git a/play-life-backend/migrations/000003_add_reward_configs_covering_index.up.sql b/play-life-backend/migrations/000003_add_reward_configs_covering_index.up.sql new file mode 100644 index 0000000..a22f5dc --- /dev/null +++ b/play-life-backend/migrations/000003_add_reward_configs_covering_index.up.sql @@ -0,0 +1,14 @@ +-- Migration: Add covering index for reward_configs to optimize subtask rewards queries +-- Date: 2026-01-26 +-- +-- This migration adds a covering index to optimize queries that load rewards for multiple subtasks. +-- The index includes all columns needed for the query, allowing PostgreSQL to perform +-- index-only scans without accessing the main table. +-- +-- Covering index for reward_configs query +-- Includes all columns needed for rewards selection to avoid table lookups +CREATE INDEX IF NOT EXISTS idx_reward_configs_task_id_covering +ON reward_configs(task_id, position) +INCLUDE (id, project_id, value, use_progression); + +COMMENT ON INDEX idx_reward_configs_task_id_covering IS 'Covering index for rewards query - includes all selected columns to avoid table lookups. Enables index-only scans for better performance when loading rewards for multiple tasks.'; diff --git a/play-life-backend/migrations/000004_optimize_weekly_report_mv.down.sql b/play-life-backend/migrations/000004_optimize_weekly_report_mv.down.sql new file mode 100644 index 0000000..feedc27 --- /dev/null +++ b/play-life-backend/migrations/000004_optimize_weekly_report_mv.down.sql @@ -0,0 +1,67 @@ +-- Migration: Revert optimization of weekly_report_mv +-- Date: 2026-01-26 +-- +-- This migration reverts: +-- 1. Removes created_date column from nodes table +-- 2. Drops indexes +-- 3. Restores MV to original structure (include current week, use entries.created_date) + +-- ============================================ +-- Step 1: Recreate MV with original structure +-- ============================================ +DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv; + +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score, + CASE + WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000) + ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score) + END AS normalized_total_score +FROM + projects p +LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id +LEFT JOIN + weekly_goals wg + ON wg.project_id = p.id + AND wg.goal_year = agg.report_year + AND wg.goal_week = agg.report_week +WHERE + p.deleted = FALSE +ORDER BY + p.id, agg.report_year, agg.report_week +WITH DATA; + +CREATE INDEX idx_weekly_report_mv_project_year_week +ON weekly_report_mv(project_id, report_year, report_week); + +-- ============================================ +-- Step 2: Drop indexes +-- ============================================ +DROP INDEX IF EXISTS idx_nodes_project_user_created_date; +DROP INDEX IF EXISTS idx_nodes_created_date_user; + +-- ============================================ +-- Step 3: Remove created_date column from nodes +-- ============================================ +ALTER TABLE nodes +DROP COLUMN IF EXISTS created_date; + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_score snapshot.'; diff --git a/play-life-backend/migrations/000004_optimize_weekly_report_mv.up.sql b/play-life-backend/migrations/000004_optimize_weekly_report_mv.up.sql new file mode 100644 index 0000000..97ba7be --- /dev/null +++ b/play-life-backend/migrations/000004_optimize_weekly_report_mv.up.sql @@ -0,0 +1,94 @@ +-- Migration: Optimize weekly_report_mv by denormalizing created_date into nodes and excluding current week from MV +-- Date: 2026-01-26 +-- +-- This migration: +-- 1. Adds created_date column to nodes table (denormalization to avoid JOIN with entries) +-- 2. Populates existing data from entries +-- 3. Creates indexes for optimized queries +-- 4. Updates MV to exclude current week and use nodes.created_date instead of entries.created_date + +-- ============================================ +-- Step 1: Add created_date column to nodes +-- ============================================ +ALTER TABLE nodes +ADD COLUMN created_date TIMESTAMP WITH TIME ZONE; + +-- ============================================ +-- Step 2: Populate existing data from entries +-- ============================================ +UPDATE nodes n +SET created_date = e.created_date +FROM entries e +WHERE n.entry_id = e.id; + +-- ============================================ +-- Step 3: Set NOT NULL constraint +-- ============================================ +ALTER TABLE nodes +ALTER COLUMN created_date SET NOT NULL; + +-- ============================================ +-- Step 4: Create indexes for optimized queries +-- ============================================ +-- Index for filtering by date and user (for current week queries) +CREATE INDEX IF NOT EXISTS idx_nodes_created_date_user +ON nodes(created_date, user_id); + +-- Index for queries with grouping by project (for current week queries) +CREATE INDEX IF NOT EXISTS idx_nodes_project_user_created_date +ON nodes(project_id, user_id, created_date); + +COMMENT ON INDEX idx_nodes_created_date_user IS 'Index for filtering nodes by created_date and user_id - optimized for current week queries'; +COMMENT ON INDEX idx_nodes_project_user_created_date IS 'Index for grouping nodes by project, user and created_date - optimized for current week aggregation queries'; + +-- ============================================ +-- Step 5: Recreate MV to exclude current week and use nodes.created_date +-- ============================================ +DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv; + +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score, + CASE + WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000) + ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score) + END AS normalized_total_score +FROM + projects p +LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM n.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM n.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + WHERE + -- Exclude current week: only include data from previous weeks + (EXTRACT(ISOYEAR FROM n.created_date)::INTEGER < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER) + OR (EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND EXTRACT(WEEK FROM n.created_date)::INTEGER < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER) + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id +LEFT JOIN + weekly_goals wg + ON wg.project_id = p.id + AND wg.goal_year = agg.report_year + AND wg.goal_week = agg.report_week +WHERE + p.deleted = FALSE +ORDER BY + p.id, agg.report_year, agg.report_week +WITH DATA; + +-- Recreate index on MV +CREATE INDEX idx_weekly_report_mv_project_year_week +ON weekly_report_mv(project_id, report_year, report_week); + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_score snapshot. Contains only historical data (excludes current week). Uses nodes.created_date (denormalized) instead of entries.created_date.'; diff --git a/play-life-backend/migrations/000005_add_task_drafts.down.sql b/play-life-backend/migrations/000005_add_task_drafts.down.sql new file mode 100644 index 0000000..59837f6 --- /dev/null +++ b/play-life-backend/migrations/000005_add_task_drafts.down.sql @@ -0,0 +1,7 @@ +-- Migration: Remove task drafts tables +-- Date: 2026-01-26 +-- +-- This migration removes tables created for task drafts + +DROP TABLE IF EXISTS task_draft_subtasks; +DROP TABLE IF EXISTS task_drafts; diff --git a/play-life-backend/migrations/000005_add_task_drafts.up.sql b/play-life-backend/migrations/000005_add_task_drafts.up.sql new file mode 100644 index 0000000..e818a06 --- /dev/null +++ b/play-life-backend/migrations/000005_add_task_drafts.up.sql @@ -0,0 +1,45 @@ +-- Migration: Add task drafts tables +-- Date: 2026-01-26 +-- +-- This migration creates tables for storing task drafts: +-- 1. task_drafts - main table for task drafts with progression value and auto_complete flag +-- 2. task_draft_subtasks - stores only checked subtask IDs for each draft + +-- ============================================ +-- Table: task_drafts +-- ============================================ +CREATE TABLE task_drafts ( + id SERIAL PRIMARY KEY, + task_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + progression_value NUMERIC(10,4), + auto_complete BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + UNIQUE(task_id) +); + +CREATE INDEX idx_task_drafts_task_id ON task_drafts(task_id); +CREATE INDEX idx_task_drafts_user_id ON task_drafts(user_id); +CREATE INDEX idx_task_drafts_auto_complete ON task_drafts(auto_complete) WHERE auto_complete = TRUE; + +COMMENT ON TABLE task_drafts IS 'Stores draft states for tasks with progression value and auto-complete flag'; +COMMENT ON COLUMN task_drafts.progression_value IS 'Saved progression value from user input'; +COMMENT ON COLUMN task_drafts.auto_complete IS 'Flag indicating task should be auto-completed at end of day (23:55)'; +COMMENT ON COLUMN task_drafts.task_id IS 'Reference to task. UNIQUE constraint ensures one draft per task'; + +-- ============================================ +-- Table: task_draft_subtasks +-- ============================================ +CREATE TABLE task_draft_subtasks ( + id SERIAL PRIMARY KEY, + task_draft_id INTEGER REFERENCES task_drafts(id) ON DELETE CASCADE, + subtask_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE, + UNIQUE(task_draft_id, subtask_id) +); + +CREATE INDEX idx_task_draft_subtasks_task_draft_id ON task_draft_subtasks(task_draft_id); +CREATE INDEX idx_task_draft_subtasks_subtask_id ON task_draft_subtasks(subtask_id); + +COMMENT ON TABLE task_draft_subtasks IS 'Stores only checked subtask IDs for each draft. If subtask is not in this table, it means it is unchecked'; +COMMENT ON COLUMN task_draft_subtasks.subtask_id IS 'Reference to subtask task. Only checked subtasks are stored here'; diff --git a/play-life-backend/migrations/000006_fix_wishlist_id_unique_index.down.sql b/play-life-backend/migrations/000006_fix_wishlist_id_unique_index.down.sql new file mode 100644 index 0000000..f8f100a --- /dev/null +++ b/play-life-backend/migrations/000006_fix_wishlist_id_unique_index.down.sql @@ -0,0 +1,13 @@ +-- Migration: Revert wishlist_id unique index fix +-- Date: 2026-01-30 +-- +-- This migration reverts the composite unique index back to the original +-- unique index that only checked wishlist_id. + +-- Drop the composite unique index +DROP INDEX IF EXISTS idx_tasks_wishlist_id_user_id_unique; + +-- Restore the original unique index on wishlist_id only +CREATE UNIQUE INDEX idx_tasks_wishlist_id_unique +ON tasks(wishlist_id) +WHERE wishlist_id IS NOT NULL AND deleted = FALSE; diff --git a/play-life-backend/migrations/000006_fix_wishlist_id_unique_index.up.sql b/play-life-backend/migrations/000006_fix_wishlist_id_unique_index.up.sql new file mode 100644 index 0000000..26d4bb6 --- /dev/null +++ b/play-life-backend/migrations/000006_fix_wishlist_id_unique_index.up.sql @@ -0,0 +1,16 @@ +-- Migration: Fix wishlist_id unique index to allow multiple users +-- Date: 2026-01-30 +-- +-- This migration fixes the unique index on wishlist_id to allow multiple users +-- to create tasks for the same wishlist item. The old index only checked wishlist_id, +-- but now we need a composite unique index on (wishlist_id, user_id). + +-- Drop the old unique index that only checked wishlist_id +DROP INDEX IF EXISTS idx_tasks_wishlist_id_unique; + +-- Create a new composite unique index on (wishlist_id, user_id) +-- This allows multiple users to have tasks for the same wishlist item, +-- but prevents the same user from having multiple tasks for the same wishlist item +CREATE UNIQUE INDEX idx_tasks_wishlist_id_user_id_unique +ON tasks(wishlist_id, user_id) +WHERE wishlist_id IS NOT NULL AND deleted = FALSE; diff --git a/play-life-backend/migrations/000007_add_projects_median_mv.down.sql b/play-life-backend/migrations/000007_add_projects_median_mv.down.sql new file mode 100644 index 0000000..db3cb17 --- /dev/null +++ b/play-life-backend/migrations/000007_add_projects_median_mv.down.sql @@ -0,0 +1,4 @@ +-- Migration: Drop projects_median_mv materialized view +-- Date: 2026-01-30 + +DROP MATERIALIZED VIEW IF EXISTS projects_median_mv; diff --git a/play-life-backend/migrations/000007_add_projects_median_mv.up.sql b/play-life-backend/migrations/000007_add_projects_median_mv.up.sql new file mode 100644 index 0000000..a464531 --- /dev/null +++ b/play-life-backend/migrations/000007_add_projects_median_mv.up.sql @@ -0,0 +1,34 @@ +-- Migration: Add projects_median_mv materialized view +-- Date: 2026-01-30 +-- +-- This migration creates a materialized view that calculates the median score +-- for each project based on the last 12 weeks of historical data from weekly_report_mv. +-- The view includes user_id to support multi-tenant queries. + +CREATE MATERIALIZED VIEW projects_median_mv AS +SELECT + p.id AS project_id, + p.user_id, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score +FROM ( + SELECT + project_id, + normalized_total_score, + report_year, + report_week, + ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn + FROM weekly_report_mv + WHERE + (report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER) + OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER) +) sub +JOIN projects p ON p.id = sub.project_id +WHERE rn <= 12 AND p.deleted = FALSE +GROUP BY p.id, p.user_id +WITH DATA; + +CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id); +CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id); + +COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 12 weeks of historical data. Includes user_id for multi-tenant support.'; diff --git a/play-life-backend/migrations/000008_change_median_to_4_weeks.down.sql b/play-life-backend/migrations/000008_change_median_to_4_weeks.down.sql new file mode 100644 index 0000000..2dbf843 --- /dev/null +++ b/play-life-backend/migrations/000008_change_median_to_4_weeks.down.sql @@ -0,0 +1,34 @@ +-- Migration: Revert median calculation back to 12 weeks +-- Date: 2026-02-02 +-- +-- This migration reverts projects_median_mv back to using 12 weeks. + +DROP MATERIALIZED VIEW IF EXISTS projects_median_mv; + +CREATE MATERIALIZED VIEW projects_median_mv AS +SELECT + p.id AS project_id, + p.user_id, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score +FROM ( + SELECT + project_id, + normalized_total_score, + report_year, + report_week, + ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn + FROM weekly_report_mv + WHERE + (report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER) + OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER) +) sub +JOIN projects p ON p.id = sub.project_id +WHERE rn <= 12 AND p.deleted = FALSE +GROUP BY p.id, p.user_id +WITH DATA; + +CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id); +CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id); + +COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 12 weeks of historical data. Includes user_id for multi-tenant support.'; diff --git a/play-life-backend/migrations/000008_change_median_to_4_weeks.up.sql b/play-life-backend/migrations/000008_change_median_to_4_weeks.up.sql new file mode 100644 index 0000000..90dd770 --- /dev/null +++ b/play-life-backend/migrations/000008_change_median_to_4_weeks.up.sql @@ -0,0 +1,35 @@ +-- Migration: Change median calculation from 12 weeks to 4 weeks +-- Date: 2026-02-02 +-- +-- This migration updates projects_median_mv to calculate median based on +-- the last 4 weeks instead of 12 weeks. + +DROP MATERIALIZED VIEW IF EXISTS projects_median_mv; + +CREATE MATERIALIZED VIEW projects_median_mv AS +SELECT + p.id AS project_id, + p.user_id, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score +FROM ( + SELECT + project_id, + normalized_total_score, + report_year, + report_week, + ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn + FROM weekly_report_mv + WHERE + (report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER) + OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER) +) sub +JOIN projects p ON p.id = sub.project_id +WHERE rn <= 4 AND p.deleted = FALSE +GROUP BY p.id, p.user_id +WITH DATA; + +CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id); +CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id); + +COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 4 weeks of historical data. Includes user_id for multi-tenant support.'; diff --git a/play-life-backend/migrations/000009_add_is_admin_to_users.down.sql b/play-life-backend/migrations/000009_add_is_admin_to_users.down.sql new file mode 100644 index 0000000..413e897 --- /dev/null +++ b/play-life-backend/migrations/000009_add_is_admin_to_users.down.sql @@ -0,0 +1,9 @@ +-- Migration: Remove is_admin field from users table +-- Date: 2026-02-02 +-- +-- This migration reverts the addition of is_admin field. + +DROP INDEX IF EXISTS idx_users_is_admin; + +ALTER TABLE users +DROP COLUMN IF EXISTS is_admin; diff --git a/play-life-backend/migrations/000009_add_is_admin_to_users.up.sql b/play-life-backend/migrations/000009_add_is_admin_to_users.up.sql new file mode 100644 index 0000000..93909dc --- /dev/null +++ b/play-life-backend/migrations/000009_add_is_admin_to_users.up.sql @@ -0,0 +1,12 @@ +-- Migration: Add is_admin field to users table +-- Date: 2026-02-02 +-- +-- This migration adds is_admin boolean field to users table to identify admin users. +-- Default value is FALSE, so existing users will not become admins automatically. + +ALTER TABLE users +ADD COLUMN is_admin BOOLEAN NOT NULL DEFAULT FALSE; + +CREATE INDEX idx_users_is_admin ON users(is_admin); + +COMMENT ON COLUMN users.is_admin IS 'Indicates if the user has admin privileges'; diff --git a/play-life-backend/migrations/000010_add_project_id_to_wishlist_items.down.sql b/play-life-backend/migrations/000010_add_project_id_to_wishlist_items.down.sql new file mode 100644 index 0000000..3b2e68c --- /dev/null +++ b/play-life-backend/migrations/000010_add_project_id_to_wishlist_items.down.sql @@ -0,0 +1,9 @@ +-- Migration: Remove project_id field from wishlist_items table +-- Date: 2026-02-02 +-- +-- This migration reverts the addition of project_id field. + +DROP INDEX IF EXISTS idx_wishlist_items_project_id; + +ALTER TABLE wishlist_items +DROP COLUMN IF EXISTS project_id; diff --git a/play-life-backend/migrations/000010_add_project_id_to_wishlist_items.up.sql b/play-life-backend/migrations/000010_add_project_id_to_wishlist_items.up.sql new file mode 100644 index 0000000..6c4043e --- /dev/null +++ b/play-life-backend/migrations/000010_add_project_id_to_wishlist_items.up.sql @@ -0,0 +1,13 @@ +-- Migration: Add project_id field to wishlist_items table +-- Date: 2026-02-02 +-- +-- This migration adds project_id field to wishlist_items table to allow +-- grouping wishlist items by project. The field is nullable, so existing +-- items without a project will remain valid. + +ALTER TABLE wishlist_items +ADD COLUMN project_id INTEGER REFERENCES projects(id) ON DELETE SET NULL; + +CREATE INDEX idx_wishlist_items_project_id ON wishlist_items(project_id); + +COMMENT ON COLUMN wishlist_items.project_id IS 'Project this wishlist item belongs to (optional)'; diff --git a/play-life-backend/migrations/000011_add_color_to_projects.down.sql b/play-life-backend/migrations/000011_add_color_to_projects.down.sql new file mode 100644 index 0000000..52c341b --- /dev/null +++ b/play-life-backend/migrations/000011_add_color_to_projects.down.sql @@ -0,0 +1,9 @@ +-- Migration: Remove color field from projects table +-- Date: 2026-02-02 +-- +-- This migration removes the color field from projects table. + +DROP INDEX IF EXISTS idx_projects_color; + +ALTER TABLE projects +DROP COLUMN IF EXISTS color; diff --git a/play-life-backend/migrations/000011_add_color_to_projects.up.sql b/play-life-backend/migrations/000011_add_color_to_projects.up.sql new file mode 100644 index 0000000..2301b24 --- /dev/null +++ b/play-life-backend/migrations/000011_add_color_to_projects.up.sql @@ -0,0 +1,45 @@ +-- Migration: Add color field to projects table +-- Date: 2026-02-02 +-- +-- This migration adds color field to projects table to allow +-- custom color selection for projects. The field is NOT NULL, +-- and existing projects will be assigned colors from a predefined palette. + +-- Добавляем поле color +ALTER TABLE projects +ADD COLUMN color VARCHAR(7) NOT NULL DEFAULT '#3B82F6'; + +-- Палитра из 30 контрастных цветов (синхронизирована с backend и frontend) +-- Заполняем существующие проекты цветами из палитры +DO $$ +DECLARE + colors TEXT[] := ARRAY[ + '#EF4444', '#F97316', '#F59E0B', '#EAB308', '#84CC16', + '#22C55E', '#10B981', '#14B8A6', '#06B6D4', '#0EA5E9', + '#3B82F6', '#6366F1', '#8B5CF6', '#A855F7', '#D946EF', + '#EC4899', '#F43F5E', '#DC2626', '#EA580C', '#CA8A04', + '#65A30D', '#16A34A', '#059669', '#0D9488', '#0891B2', + '#0284C7', '#2563EB', '#4F46E5', '#7C3AED', '#9333EA' + ]; + project_record RECORD; + color_index INTEGER := 0; +BEGIN + -- Обновляем существующие проекты, присваивая им цвета из палитры + FOR project_record IN + SELECT id FROM projects ORDER BY id + LOOP + UPDATE projects + SET color = colors[1 + (color_index % array_length(colors, 1))] + WHERE id = project_record.id; + + color_index := color_index + 1; + END LOOP; +END $$; + +-- Убираем DEFAULT, так как теперь все проекты имеют цвет +ALTER TABLE projects +ALTER COLUMN color DROP DEFAULT; + +CREATE INDEX IF NOT EXISTS idx_projects_color ON projects(color); + +COMMENT ON COLUMN projects.color IS 'Project color in HEX format (e.g., #FF5733)'; diff --git a/play-life-backend/migrations/000012_add_subtask_position.down.sql b/play-life-backend/migrations/000012_add_subtask_position.down.sql new file mode 100644 index 0000000..751fc6d --- /dev/null +++ b/play-life-backend/migrations/000012_add_subtask_position.down.sql @@ -0,0 +1,9 @@ +-- Migration: Remove position field from tasks table +-- Date: 2026-02-02 +-- +-- This migration removes the position field from tasks table. + +DROP INDEX IF EXISTS idx_tasks_parent_position; + +ALTER TABLE tasks +DROP COLUMN IF EXISTS position; diff --git a/play-life-backend/migrations/000012_add_subtask_position.up.sql b/play-life-backend/migrations/000012_add_subtask_position.up.sql new file mode 100644 index 0000000..6617ff8 --- /dev/null +++ b/play-life-backend/migrations/000012_add_subtask_position.up.sql @@ -0,0 +1,49 @@ +-- Migration: Add position field to tasks table for subtasks ordering +-- Date: 2026-02-02 +-- +-- This migration adds position field to tasks table to allow +-- custom ordering of subtasks. The field is NULL for regular tasks +-- and contains position number for subtasks (tasks with parent_task_id). + +-- Добавляем поле position +ALTER TABLE tasks +ADD COLUMN position INTEGER; + +-- Заполняем позиции для всех существующих подзадач +-- Позиции присваиваются по порядку id в рамках каждой родительской задачи +DO $$ +DECLARE + parent_record RECORD; + subtask_record RECORD; + pos INTEGER; +BEGIN + -- Для каждой родительской задачи + FOR parent_record IN + SELECT DISTINCT parent_task_id + FROM tasks + WHERE parent_task_id IS NOT NULL + ORDER BY parent_task_id + LOOP + pos := 0; + -- Обновляем подзадачи этой родительской задачи + FOR subtask_record IN + SELECT id + FROM tasks + WHERE parent_task_id = parent_record.parent_task_id + AND deleted = FALSE + ORDER BY id + LOOP + UPDATE tasks + SET position = pos + WHERE id = subtask_record.id; + + pos := pos + 1; + END LOOP; + END LOOP; +END $$; + +-- Создаем индекс для быстрой сортировки подзадач +CREATE INDEX idx_tasks_parent_position ON tasks(parent_task_id, position) +WHERE parent_task_id IS NOT NULL AND deleted = FALSE; + +COMMENT ON COLUMN tasks.position IS 'Position of subtask within parent task. NULL for regular tasks.'; diff --git a/play-life-backend/migrations/000013_add_user_tracking.down.sql b/play-life-backend/migrations/000013_add_user_tracking.down.sql new file mode 100644 index 0000000..06ebad0 --- /dev/null +++ b/play-life-backend/migrations/000013_add_user_tracking.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS tracking_invite_tokens; +DROP TABLE IF EXISTS user_tracking; diff --git a/play-life-backend/migrations/000013_add_user_tracking.up.sql b/play-life-backend/migrations/000013_add_user_tracking.up.sql new file mode 100644 index 0000000..f31db29 --- /dev/null +++ b/play-life-backend/migrations/000013_add_user_tracking.up.sql @@ -0,0 +1,24 @@ +-- Таблица отслеживания между пользователями +CREATE TABLE user_tracking ( + id SERIAL PRIMARY KEY, + tracker_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + tracked_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT unique_tracking_pair UNIQUE (tracker_id, tracked_id), + CONSTRAINT no_self_tracking CHECK (tracker_id != tracked_id) +); + +CREATE INDEX idx_user_tracking_tracker ON user_tracking(tracker_id); +CREATE INDEX idx_user_tracking_tracked ON user_tracking(tracked_id); + +-- Таблица токенов приглашений (живут 1 час) +CREATE TABLE tracking_invite_tokens ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + token VARCHAR(64) NOT NULL UNIQUE, + expires_at TIMESTAMP WITH TIME ZONE NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX idx_tracking_invite_tokens_token ON tracking_invite_tokens(token); +CREATE INDEX idx_tracking_invite_tokens_user ON tracking_invite_tokens(user_id); diff --git a/play-life-backend/migrations/000014_add_group_name.down.sql b/play-life-backend/migrations/000014_add_group_name.down.sql new file mode 100644 index 0000000..d3e3d79 --- /dev/null +++ b/play-life-backend/migrations/000014_add_group_name.down.sql @@ -0,0 +1,36 @@ +-- Migration: Remove group_name field from wishlist_items and tasks tables +-- Date: 2026-02-XX +-- +-- This migration reverses the changes made in 000014_add_group_name.up.sql + +-- Step 1: Drop materialized view +DROP MATERIALIZED VIEW IF EXISTS user_group_suggestions_mv; + +-- Step 2: Drop indexes on group_name +DROP INDEX IF EXISTS idx_tasks_group_name; +DROP INDEX IF EXISTS idx_wishlist_items_group_name; + +-- Step 3: Remove group_name from tasks +ALTER TABLE tasks +DROP COLUMN group_name; + +-- Step 4: Add back project_id to wishlist_items +ALTER TABLE wishlist_items +ADD COLUMN project_id INTEGER REFERENCES projects(id) ON DELETE SET NULL; + +-- Step 5: Try to restore project_id from group_name (if possible) +-- Note: This is best-effort, as group_name might not match project names exactly +UPDATE wishlist_items wi +SET project_id = p.id +FROM projects p +WHERE wi.group_name = p.name + AND wi.group_name IS NOT NULL + AND wi.group_name != '' + AND p.deleted = FALSE; + +-- Step 6: Create index on project_id +CREATE INDEX idx_wishlist_items_project_id ON wishlist_items(project_id); + +-- Step 7: Remove group_name from wishlist_items +ALTER TABLE wishlist_items +DROP COLUMN group_name; diff --git a/play-life-backend/migrations/000014_add_group_name.up.sql b/play-life-backend/migrations/000014_add_group_name.up.sql new file mode 100644 index 0000000..ca9512d --- /dev/null +++ b/play-life-backend/migrations/000014_add_group_name.up.sql @@ -0,0 +1,60 @@ +-- Migration: Add group_name field to wishlist_items and tasks tables +-- Date: 2026-02-XX +-- +-- This migration: +-- 1. Adds group_name field to wishlist_items (replacing project_id) +-- 2. Migrates existing data from project_id to group_name +-- 3. Removes project_id column from wishlist_items +-- 4. Adds group_name field to tasks +-- 5. Creates materialized view for group suggestions + +-- Step 1: Add group_name to wishlist_items +ALTER TABLE wishlist_items +ADD COLUMN group_name VARCHAR(255); + +-- Step 2: Migrate existing data from project_id to group_name +UPDATE wishlist_items wi +SET group_name = p.name +FROM projects p +WHERE wi.project_id = p.id AND wi.project_id IS NOT NULL; + +-- Step 3: Remove project_id column and its index +DROP INDEX IF EXISTS idx_wishlist_items_project_id; +ALTER TABLE wishlist_items +DROP COLUMN project_id; + +-- Step 4: Add group_name to tasks +ALTER TABLE tasks +ADD COLUMN group_name VARCHAR(255); + +-- Step 5: Create indexes on group_name +CREATE INDEX idx_wishlist_items_group_name ON wishlist_items(group_name) WHERE group_name IS NOT NULL; +CREATE INDEX idx_tasks_group_name ON tasks(group_name) WHERE group_name IS NOT NULL; + +-- Step 6: Create materialized view for group suggestions +CREATE MATERIALIZED VIEW user_group_suggestions_mv AS +SELECT DISTINCT user_id, group_name FROM ( + -- Желания пользователя (собственные) + SELECT wi.user_id, wi.group_name FROM wishlist_items wi + WHERE wi.deleted = FALSE AND wi.group_name IS NOT NULL AND wi.group_name != '' + UNION + -- Желания с досок, на которых пользователь участник + SELECT wbm.user_id, wi.group_name FROM wishlist_items wi + JOIN wishlist_board_members wbm ON wi.board_id = wbm.board_id + WHERE wi.deleted = FALSE AND wi.group_name IS NOT NULL AND wi.group_name != '' + UNION + -- Задачи пользователя + SELECT t.user_id, t.group_name FROM tasks t + WHERE t.deleted = FALSE AND t.group_name IS NOT NULL AND t.group_name != '' + UNION + -- Имена проектов пользователя + SELECT p.user_id, p.name FROM projects p + WHERE p.deleted = FALSE +) sub; + +-- Step 7: Create unique index for CONCURRENT refresh +CREATE UNIQUE INDEX idx_user_group_suggestions_mv_user_group ON user_group_suggestions_mv(user_id, group_name); + +COMMENT ON COLUMN wishlist_items.group_name IS 'Group name for wishlist item (free text, replaces project_id)'; +COMMENT ON COLUMN tasks.group_name IS 'Group name for task (free text)'; +COMMENT ON MATERIALIZED VIEW user_group_suggestions_mv IS 'Materialized view for group name suggestions per user'; diff --git a/play-life-backend/migrations/000015_add_fitbit_integration.down.sql b/play-life-backend/migrations/000015_add_fitbit_integration.down.sql new file mode 100644 index 0000000..71f660d --- /dev/null +++ b/play-life-backend/migrations/000015_add_fitbit_integration.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS fitbit_daily_stats; +DROP TABLE IF EXISTS fitbit_integrations; diff --git a/play-life-backend/migrations/000015_add_fitbit_integration.up.sql b/play-life-backend/migrations/000015_add_fitbit_integration.up.sql new file mode 100644 index 0000000..82a51e5 --- /dev/null +++ b/play-life-backend/migrations/000015_add_fitbit_integration.up.sql @@ -0,0 +1,38 @@ +-- Fitbit integrations table (depends on users) +CREATE TABLE fitbit_integrations ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + fitbit_user_id VARCHAR(255), + access_token TEXT, + refresh_token TEXT, + token_expires_at TIMESTAMP WITH TIME ZONE, + goal_steps_min INTEGER DEFAULT 8000, + goal_steps_max INTEGER DEFAULT 10000, + goal_floors_min INTEGER DEFAULT 8, + goal_floors_max INTEGER DEFAULT 10, + goal_azm_min INTEGER DEFAULT 22, + goal_azm_max INTEGER DEFAULT 44, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT fitbit_integrations_user_id_unique UNIQUE (user_id) +); + +CREATE INDEX idx_fitbit_integrations_user_id ON fitbit_integrations(user_id); +CREATE UNIQUE INDEX idx_fitbit_integrations_fitbit_user_id ON fitbit_integrations(fitbit_user_id) WHERE fitbit_user_id IS NOT NULL; + +-- Fitbit daily stats table (depends on users and fitbit_integrations) +CREATE TABLE fitbit_daily_stats ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + date DATE NOT NULL, + steps INTEGER DEFAULT 0, + floors INTEGER DEFAULT 0, + active_zone_minutes INTEGER DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT fitbit_daily_stats_user_date_unique UNIQUE (user_id, date) +); + +CREATE INDEX idx_fitbit_daily_stats_user_id ON fitbit_daily_stats(user_id); +CREATE INDEX idx_fitbit_daily_stats_date ON fitbit_daily_stats(date); +CREATE INDEX idx_fitbit_daily_stats_user_date ON fitbit_daily_stats(user_id, date); diff --git a/play-life-backend/migrations/000016_add_project_score_sample_mv.down.sql b/play-life-backend/migrations/000016_add_project_score_sample_mv.down.sql new file mode 100644 index 0000000..29b3458 --- /dev/null +++ b/play-life-backend/migrations/000016_add_project_score_sample_mv.down.sql @@ -0,0 +1,3 @@ +-- Migration: Drop project_score_sample_mv materialized view + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; diff --git a/play-life-backend/migrations/000016_add_project_score_sample_mv.up.sql b/play-life-backend/migrations/000016_add_project_score_sample_mv.up.sql new file mode 100644 index 0000000..7b874c4 --- /dev/null +++ b/play-life-backend/migrations/000016_add_project_score_sample_mv.up.sql @@ -0,0 +1,31 @@ +-- Migration: Add project_score_sample_mv materialized view +-- +-- One row per (project_id, score, user_id): sum of nodes.score per entry, +-- representative entry_message (latest by date). Used for admin display and reporting. + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +) +SELECT DISTINCT ON (es.project_id, es.score, es.user_id) + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date +FROM entry_scores es +JOIN entries e ON e.id = es.entry_id +ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): sum of nodes per entry, representative entry_message (latest by date).'; diff --git a/play-life-backend/migrations/000017_project_score_sample_mv_unique_entry_message.down.sql b/play-life-backend/migrations/000017_project_score_sample_mv_unique_entry_message.down.sql new file mode 100644 index 0000000..998c067 --- /dev/null +++ b/play-life-backend/migrations/000017_project_score_sample_mv_unique_entry_message.down.sql @@ -0,0 +1,30 @@ +-- Revert to previous MV definition (one row per project_id, score, user_id) + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +) +SELECT DISTINCT ON (es.project_id, es.score, es.user_id) + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date +FROM entry_scores es +JOIN entries e ON e.id = es.entry_id +ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): sum of nodes per entry, representative entry_message (latest by date).'; diff --git a/play-life-backend/migrations/000017_project_score_sample_mv_unique_entry_message.up.sql b/play-life-backend/migrations/000017_project_score_sample_mv_unique_entry_message.up.sql new file mode 100644 index 0000000..7b9923b --- /dev/null +++ b/play-life-backend/migrations/000017_project_score_sample_mv_unique_entry_message.up.sql @@ -0,0 +1,42 @@ +-- Migration: Make entry_message unique per (project_id, user_id) in project_score_sample_mv +-- +-- One row per (project_id, user_id, entry_message): choose the row with latest created_date. +-- Ensures the same entry_message does not repeat for different score values. + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +), +with_message AS ( + SELECT + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date + FROM entry_scores es + JOIN entries e ON e.id = es.entry_id +) +SELECT DISTINCT ON (project_id, user_id, entry_message) + project_id, + score, + entry_message, + user_id, + created_date +FROM with_message +ORDER BY project_id, user_id, entry_message, created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): representative row (latest by date). entry_message is unique per project and user.'; diff --git a/play-life-backend/migrations/000018_project_score_sample_mv_one_per_score.down.sql b/play-life-backend/migrations/000018_project_score_sample_mv_one_per_score.down.sql new file mode 100644 index 0000000..8587384 --- /dev/null +++ b/play-life-backend/migrations/000018_project_score_sample_mv_one_per_score.down.sql @@ -0,0 +1,39 @@ +-- Revert to one row per (project_id, user_id, entry_message) + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +), +with_message AS ( + SELECT + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date + FROM entry_scores es + JOIN entries e ON e.id = es.entry_id +) +SELECT DISTINCT ON (project_id, user_id, entry_message) + project_id, + score, + entry_message, + user_id, + created_date +FROM with_message +ORDER BY project_id, user_id, entry_message, created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): representative row (latest by date).'; diff --git a/play-life-backend/migrations/000018_project_score_sample_mv_one_per_score.up.sql b/play-life-backend/migrations/000018_project_score_sample_mv_one_per_score.up.sql new file mode 100644 index 0000000..aac8f4b --- /dev/null +++ b/play-life-backend/migrations/000018_project_score_sample_mv_one_per_score.up.sql @@ -0,0 +1,32 @@ +-- Migration: One row per (project_id, user_id, score) in project_score_sample_mv +-- +-- For each score value (per project and user) exactly one record; representative entry_message (latest by date). + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +) +SELECT DISTINCT ON (es.project_id, es.score, es.user_id) + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date +FROM entry_scores es +JOIN entries e ON e.id = es.entry_id +ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): one record per score, representative entry_message (latest by date).'; diff --git a/play-life-backend/migrations/000019_project_score_sample_mv_one_entry_message_per_project.down.sql b/play-life-backend/migrations/000019_project_score_sample_mv_one_entry_message_per_project.down.sql new file mode 100644 index 0000000..cdb9a5c --- /dev/null +++ b/play-life-backend/migrations/000019_project_score_sample_mv_one_entry_message_per_project.down.sql @@ -0,0 +1,30 @@ +-- Revert to one row per (project_id, score, user_id) + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +) +SELECT DISTINCT ON (es.project_id, es.score, es.user_id) + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date +FROM entry_scores es +JOIN entries e ON e.id = es.entry_id +ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): one record per score, representative entry_message (latest by date).'; diff --git a/play-life-backend/migrations/000019_project_score_sample_mv_one_entry_message_per_project.up.sql b/play-life-backend/migrations/000019_project_score_sample_mv_one_entry_message_per_project.up.sql new file mode 100644 index 0000000..70d6138 --- /dev/null +++ b/play-life-backend/migrations/000019_project_score_sample_mv_one_entry_message_per_project.up.sql @@ -0,0 +1,42 @@ +-- Migration: One entry_message per (project_id, user_id) in project_score_sample_mv +-- +-- One record per score (per project, user) and one record per entry_message per project. +-- DISTINCT ON (project_id, user_id, entry_message): same message with different scores → one row (latest by date). + +DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv; + +CREATE MATERIALIZED VIEW project_score_sample_mv AS +WITH entry_scores AS ( + SELECT + n.project_id, + n.entry_id, + n.user_id, + SUM(n.score) AS score, + MAX(n.created_date) AS created_date + FROM nodes n + GROUP BY n.project_id, n.entry_id, n.user_id +), +with_message AS ( + SELECT + es.project_id, + es.score, + e.text AS entry_message, + es.user_id, + es.created_date + FROM entry_scores es + JOIN entries e ON e.id = es.entry_id +) +SELECT DISTINCT ON (project_id, user_id, entry_message) + project_id, + score, + entry_message, + user_id, + created_date +FROM with_message +ORDER BY project_id, user_id, entry_message, created_date DESC +WITH DATA; + +CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id); +CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id); + +COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): one record per score (chosen row), one entry_message per project; representative = latest by date.'; diff --git a/play-life-backend/migrations/README.md b/play-life-backend/migrations/README.md new file mode 100644 index 0000000..ccad621 --- /dev/null +++ b/play-life-backend/migrations/README.md @@ -0,0 +1,115 @@ +# Database Migrations + +Этот каталог содержит SQL миграции для создания структуры базы данных проекта play-life. + +## Использование + +### Создание базы данных с нуля + +Выполните миграцию для создания всех таблиц и представлений: + +```bash +psql -U your_user -d your_database -f 001_create_schema.sql +``` + +Или через docker-compose: + +```bash +docker-compose exec db psql -U playeng -d playeng -f /migrations/001_create_schema.sql +``` + +## Структура базы данных + +### Таблицы + +1. **projects** - Проекты + - `id` (SERIAL PRIMARY KEY) + - `name` (VARCHAR(255) NOT NULL, UNIQUE) + - `priority` (SMALLINT) + +2. **entries** - Записи с текстом и датами создания + - `id` (SERIAL PRIMARY KEY) + - `text` (TEXT NOT NULL) + - `created_date` (TIMESTAMP WITH TIME ZONE NOT NULL, DEFAULT CURRENT_TIMESTAMP) + +3. **nodes** - Узлы, связывающие проекты и записи + - `id` (SERIAL PRIMARY KEY) + - `project_id` (INTEGER NOT NULL, FK -> projects.id ON DELETE CASCADE) + - `entry_id` (INTEGER NOT NULL, FK -> entries.id ON DELETE CASCADE) + - `score` (NUMERIC(8,4)) + +4. **weekly_goals** - Недельные цели для проектов + - `id` (SERIAL PRIMARY KEY) + - `project_id` (INTEGER NOT NULL, FK -> projects.id ON DELETE CASCADE) + - `goal_year` (INTEGER NOT NULL) + - `goal_week` (INTEGER NOT NULL) + - `min_goal_score` (NUMERIC(10,4) NOT NULL, DEFAULT 0) + - `max_goal_score` (NUMERIC(10,4)) + - `max_score` (NUMERIC(10,4), NULL) — snapshot max на неделю (заполняется только для новых недель) + - `priority` (SMALLINT) + - UNIQUE CONSTRAINT: `(project_id, goal_year, goal_week)` + +### Materialized View + +- **weekly_report_mv** - Агрегированные данные по неделям для каждого проекта + - `project_id` (INTEGER) + - `report_year` (INTEGER) + - `report_week` (INTEGER) + - `total_score` (NUMERIC) + - `normalized_total_score` (NUMERIC) + +## Миграции + +### Порядок применения миграций + +1. **001_create_schema.sql** - Создание базовой структуры (таблицы, индексы, materialized view) +2. **002_add_dictionaries.sql** - Добавление таблиц для словарей +3. **003_remove_words_unique_constraint.sql** - Удаление уникального ограничения на words.name +4. **004_add_config_dictionaries.sql** - Добавление связи между конфигурациями и словарями +5. **005_fix_weekly_report_mv.sql** - Исправление использования ISOYEAR вместо YEAR для корректной работы на границе года +6. **006_fix_weekly_report_mv_structure.sql** - Исправление структуры view (добавление LEFT JOIN для включения всех проектов) +7. **026_weekly_goals_max_score.sql** - Добавление snapshot поля weekly_goals.max_score и удаление неиспользуемого actual_score +8. **027_add_normalized_total_score_to_weekly_report_mv.sql** - Добавление normalized_total_score в weekly_report_mv (ограничение total_score по max_score) + +### Применение миграций + +Для существующей базы данных применяйте миграции последовательно: + +```bash +psql -U playeng -d playeng -f migrations/005_fix_weekly_report_mv.sql +psql -U playeng -d playeng -f migrations/006_fix_weekly_report_mv_structure.sql +psql -U playeng -d playeng -f migrations/026_weekly_goals_max_score.sql +psql -U playeng -d playeng -f migrations/027_add_normalized_total_score_to_weekly_report_mv.sql +``` + +Или через docker-compose: + +```bash +docker-compose exec db psql -U playeng -d playeng -f /migrations/005_fix_weekly_report_mv.sql +docker-compose exec db psql -U playeng -d playeng -f /migrations/006_fix_weekly_report_mv_structure.sql +docker-compose exec db psql -U playeng -d playeng -f /migrations/026_weekly_goals_max_score.sql +docker-compose exec db psql -U playeng -d playeng -f /migrations/027_add_normalized_total_score_to_weekly_report_mv.sql +``` + +## Обновление Materialized View + +После изменения данных в таблицах `nodes` или `entries`, необходимо обновить materialized view: + +```sql +REFRESH MATERIALIZED VIEW weekly_report_mv; +``` + +## Связи между таблицами + +- `nodes.project_id` → `projects.id` (ON DELETE CASCADE) +- `nodes.entry_id` → `entries.id` (ON DELETE CASCADE) +- `weekly_goals.project_id` → `projects.id` (ON DELETE CASCADE) + +## Индексы + +Созданы индексы для оптимизации запросов: +- `idx_nodes_project_id` на `nodes(project_id)` +- `idx_nodes_entry_id` на `nodes(entry_id)` +- `idx_weekly_goals_project_id` на `weekly_goals(project_id)` +- `idx_weekly_report_mv_project_year_week` на `weekly_report_mv(project_id, report_year, report_week)` + diff --git a/play-life-backend/migrations_old/001_create_schema.sql b/play-life-backend/migrations_old/001_create_schema.sql new file mode 100644 index 0000000..90c8d26 --- /dev/null +++ b/play-life-backend/migrations_old/001_create_schema.sql @@ -0,0 +1,106 @@ +-- Migration: Create database schema for play-life project +-- This script creates all tables and materialized views needed for the project + +-- ============================================ +-- Table: projects +-- ============================================ +CREATE TABLE IF NOT EXISTS projects ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + priority SMALLINT, + CONSTRAINT unique_project_name UNIQUE (name) +); + +-- ============================================ +-- Table: entries +-- ============================================ +-- This table stores entries with creation dates +-- Used in weekly_report_mv for grouping by week +CREATE TABLE IF NOT EXISTS entries ( + id SERIAL PRIMARY KEY, + text TEXT NOT NULL, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- ============================================ +-- Table: nodes +-- ============================================ +-- This table stores nodes linked to projects and entries +-- Contains score information used in weekly reports +CREATE TABLE IF NOT EXISTS nodes ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + entry_id INTEGER NOT NULL REFERENCES entries(id) ON DELETE CASCADE, + score NUMERIC(8,4) +); + +-- Create index on project_id for better join performance +CREATE INDEX IF NOT EXISTS idx_nodes_project_id ON nodes(project_id); +-- Create index on entry_id for better join performance +CREATE INDEX IF NOT EXISTS idx_nodes_entry_id ON nodes(entry_id); + +-- ============================================ +-- Table: weekly_goals +-- ============================================ +-- This table stores weekly goals for projects +CREATE TABLE IF NOT EXISTS weekly_goals ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + goal_year INTEGER NOT NULL, + goal_week INTEGER NOT NULL, + min_goal_score NUMERIC(10,4) NOT NULL DEFAULT 0, + max_goal_score NUMERIC(10,4), + actual_score NUMERIC(10,4) DEFAULT 0, + priority SMALLINT, + CONSTRAINT weekly_goals_project_id_goal_year_goal_week_key UNIQUE (project_id, goal_year, goal_week) +); + +-- Create index on project_id for better join performance +CREATE INDEX IF NOT EXISTS idx_weekly_goals_project_id ON weekly_goals(project_id); + +-- ============================================ +-- Materialized View: weekly_report_mv +-- ============================================ +CREATE MATERIALIZED VIEW IF NOT EXISTS weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + -- Используем COALESCE для установки total_score в 0.0000, если нет данных (NULL) + COALESCE(agg.total_score, 0.0000) AS total_score +FROM + projects p +LEFT JOIN + ( + -- 1. Предварительная агрегация: суммируем score по неделям + -- Используем ISOYEAR для корректной работы на границе года + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + -- 2. Присоединяем агрегированные данные ко ВСЕМ проектам + ON p.id = agg.project_id +ORDER BY + p.id, agg.report_year, agg.report_week; + +-- Create index on materialized view for better query performance +CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE projects IS 'Projects table storing project information with priority'; +COMMENT ON TABLE entries IS 'Entries table storing entry creation timestamps'; +COMMENT ON TABLE nodes IS 'Nodes table linking projects, entries and storing scores'; +COMMENT ON TABLE weekly_goals IS 'Weekly goals for projects'; +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries'; + diff --git a/play-life-backend/migrations_old/002_add_dictionaries.sql b/play-life-backend/migrations_old/002_add_dictionaries.sql new file mode 100644 index 0000000..8b140b7 --- /dev/null +++ b/play-life-backend/migrations_old/002_add_dictionaries.sql @@ -0,0 +1,53 @@ +-- Migration: Add dictionaries table and dictionary_id to words +-- This script creates the dictionaries table and adds dictionary_id field to words table + +-- ============================================ +-- Table: dictionaries +-- ============================================ +CREATE TABLE IF NOT EXISTS dictionaries ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL +); + +-- Insert default dictionary "Все слова" with id = 0 +-- Note: PostgreSQL SERIAL starts from 1, so we need to use a workaround +-- First, set the sequence to allow inserting 0, then insert, then reset sequence +DO $$ +BEGIN + -- Set sequence to -1 so next value will be 0 + PERFORM setval('dictionaries_id_seq', -1, false); + + -- Insert the default dictionary with id = 0 + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; + + -- Set the sequence to start from 1 (so next auto-increment will be 1) + PERFORM setval('dictionaries_id_seq', 1, false); +END $$; + +-- ============================================ +-- Alter words table: Add dictionary_id column +-- ============================================ +ALTER TABLE words +ADD COLUMN IF NOT EXISTS dictionary_id INTEGER DEFAULT 0 REFERENCES dictionaries(id); + +-- Update all existing words to have dictionary_id = 0 +UPDATE words +SET dictionary_id = 0 +WHERE dictionary_id IS NULL; + +-- Make dictionary_id NOT NULL after setting default values +ALTER TABLE words +ALTER COLUMN dictionary_id SET NOT NULL, +ALTER COLUMN dictionary_id SET DEFAULT 0; + +-- Create index on dictionary_id for better join performance +CREATE INDEX IF NOT EXISTS idx_words_dictionary_id ON words(dictionary_id); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE dictionaries IS 'Dictionaries table storing dictionary information'; +COMMENT ON COLUMN words.dictionary_id IS 'Reference to dictionary. Default is 0 (Все слова)'; + diff --git a/play-life-backend/migrations_old/003_remove_words_unique_constraint.sql b/play-life-backend/migrations_old/003_remove_words_unique_constraint.sql new file mode 100644 index 0000000..deed27e --- /dev/null +++ b/play-life-backend/migrations_old/003_remove_words_unique_constraint.sql @@ -0,0 +1,11 @@ +-- Migration: Remove UNIQUE constraint from words.name +-- This script removes the unique constraint on the name column in the words table + +-- Drop the unique constraint on words.name if it exists +ALTER TABLE words +DROP CONSTRAINT IF EXISTS words_name_key; + +-- Also try to drop constraint if it was created with different name +ALTER TABLE words +DROP CONSTRAINT IF EXISTS words_name_unique; + diff --git a/play-life-backend/migrations_old/004_add_config_dictionaries.sql b/play-life-backend/migrations_old/004_add_config_dictionaries.sql new file mode 100644 index 0000000..7d2bc69 --- /dev/null +++ b/play-life-backend/migrations_old/004_add_config_dictionaries.sql @@ -0,0 +1,21 @@ +-- Migration: Add config_dictionaries table (many-to-many relationship) +-- This script creates the config_dictionaries table linking configs and dictionaries + +-- ============================================ +-- Table: config_dictionaries +-- ============================================ +CREATE TABLE IF NOT EXISTS config_dictionaries ( + config_id INTEGER NOT NULL REFERENCES configs(id) ON DELETE CASCADE, + dictionary_id INTEGER NOT NULL REFERENCES dictionaries(id) ON DELETE CASCADE, + PRIMARY KEY (config_id, dictionary_id) +); + +-- Create indexes for better query performance +CREATE INDEX IF NOT EXISTS idx_config_dictionaries_config_id ON config_dictionaries(config_id); +CREATE INDEX IF NOT EXISTS idx_config_dictionaries_dictionary_id ON config_dictionaries(dictionary_id); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE config_dictionaries IS 'Many-to-many relationship table linking configs and dictionaries. If no dictionaries are selected for a config, all dictionaries will be used.'; + diff --git a/play-life-backend/migrations_old/005_fix_weekly_report_mv.sql b/play-life-backend/migrations_old/005_fix_weekly_report_mv.sql new file mode 100644 index 0000000..45ecd4f --- /dev/null +++ b/play-life-backend/migrations_old/005_fix_weekly_report_mv.sql @@ -0,0 +1,29 @@ +-- Migration: Fix weekly_report_mv to use ISOYEAR instead of YEAR +-- This fixes incorrect week calculations at year boundaries +-- Date: 2024 + +-- Drop existing materialized view +DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv; + +-- Recreate materialized view with ISOYEAR +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + n.project_id, + -- 🔑 ГЛАВНОЕ ИСПРАВЛЕНИЕ: Используем ISOYEAR + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score +FROM + nodes n +JOIN + entries e ON n.entry_id = e.id +GROUP BY + 1, 2, 3 +WITH DATA; + +-- Recreate index +CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week); + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations'; + diff --git a/play-life-backend/migrations_old/006_fix_weekly_report_mv_structure.sql b/play-life-backend/migrations_old/006_fix_weekly_report_mv_structure.sql new file mode 100644 index 0000000..90ddb78 --- /dev/null +++ b/play-life-backend/migrations_old/006_fix_weekly_report_mv_structure.sql @@ -0,0 +1,48 @@ +-- Migration: Fix weekly_report_mv structure to include all projects via LEFT JOIN +-- This ensures the view structure matches the code in main.go +-- Date: 2024-12-29 +-- +-- Issue: Migration 005 created the view without LEFT JOIN to projects table, +-- which means projects without data were not included in the view. +-- This migration fixes the structure to match main.go implementation. + +-- Drop existing materialized view +DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv; + +-- Recreate materialized view with correct structure (LEFT JOIN with projects) +-- This ensures all projects are included, even if they have no data for a given week +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score +FROM + projects p +LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id +WHERE + p.deleted = FALSE +ORDER BY + p.id, agg.report_year, agg.report_week +WITH DATA; + +-- Recreate index +CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week); + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN.'; + diff --git a/play-life-backend/migrations_old/007_add_deleted_to_projects.sql b/play-life-backend/migrations_old/007_add_deleted_to_projects.sql new file mode 100644 index 0000000..fd9f824 --- /dev/null +++ b/play-life-backend/migrations_old/007_add_deleted_to_projects.sql @@ -0,0 +1,13 @@ +-- Migration: Add deleted field to projects table +-- This script adds a deleted boolean field to mark projects as deleted (soft delete) + +-- Add deleted column to projects table +ALTER TABLE projects +ADD COLUMN IF NOT EXISTS deleted BOOLEAN NOT NULL DEFAULT FALSE; + +-- Create index on deleted column for better query performance +CREATE INDEX IF NOT EXISTS idx_projects_deleted ON projects(deleted); + +-- Add comment for documentation +COMMENT ON COLUMN projects.deleted IS 'Soft delete flag: TRUE if project is deleted, FALSE otherwise'; + diff --git a/play-life-backend/migrations_old/008_add_telegram_integrations.sql b/play-life-backend/migrations_old/008_add_telegram_integrations.sql new file mode 100644 index 0000000..ac807a2 --- /dev/null +++ b/play-life-backend/migrations_old/008_add_telegram_integrations.sql @@ -0,0 +1,16 @@ +-- Migration: Add telegram_integrations table +-- This script creates a table to store Telegram bot tokens and chat IDs + +-- Create telegram_integrations table +CREATE TABLE IF NOT EXISTS telegram_integrations ( + id SERIAL PRIMARY KEY, + chat_id VARCHAR(255), + bot_token VARCHAR(255) +); + +-- Add comment for documentation +COMMENT ON TABLE telegram_integrations IS 'Stores Telegram bot tokens and chat IDs for integrations'; +COMMENT ON COLUMN telegram_integrations.id IS 'Auto-increment primary key'; +COMMENT ON COLUMN telegram_integrations.chat_id IS 'Telegram chat ID (nullable, set automatically after first message)'; +COMMENT ON COLUMN telegram_integrations.bot_token IS 'Telegram bot token (nullable, set by user)'; + diff --git a/play-life-backend/migrations_old/009_add_users_and_multitenancy.sql b/play-life-backend/migrations_old/009_add_users_and_multitenancy.sql new file mode 100644 index 0000000..dc34ef0 --- /dev/null +++ b/play-life-backend/migrations_old/009_add_users_and_multitenancy.sql @@ -0,0 +1,128 @@ +-- Migration: Add users table and user_id to all tables for multi-tenancy +-- This script adds user authentication and makes all data user-specific +-- All statements use IF NOT EXISTS / IF EXISTS for idempotency + +-- ============================================ +-- Table: users +-- ============================================ +CREATE TABLE IF NOT EXISTS users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) NOT NULL UNIQUE, + password_hash VARCHAR(255) NOT NULL, + name VARCHAR(255), + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT TRUE, + last_login_at TIMESTAMP WITH TIME ZONE +); + +CREATE INDEX IF NOT EXISTS idx_users_email ON users(email); + +-- ============================================ +-- Table: refresh_tokens +-- ============================================ +CREATE TABLE IF NOT EXISTS refresh_tokens ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + token_hash VARCHAR(255) NOT NULL, + expires_at TIMESTAMP WITH TIME ZONE NOT NULL, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_refresh_tokens_user_id ON refresh_tokens(user_id); +CREATE INDEX IF NOT EXISTS idx_refresh_tokens_token_hash ON refresh_tokens(token_hash); + +-- ============================================ +-- Add user_id to projects +-- ============================================ +ALTER TABLE projects +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_projects_user_id ON projects(user_id); + +-- Drop old unique constraint (name now unique per user, handled in app) +ALTER TABLE projects DROP CONSTRAINT IF EXISTS unique_project_name; + +-- ============================================ +-- Add user_id to entries +-- ============================================ +ALTER TABLE entries +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_entries_user_id ON entries(user_id); + +-- ============================================ +-- Add user_id to dictionaries +-- ============================================ +ALTER TABLE dictionaries +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_dictionaries_user_id ON dictionaries(user_id); + +-- ============================================ +-- Add user_id to words +-- ============================================ +ALTER TABLE words +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_words_user_id ON words(user_id); + +-- ============================================ +-- Add user_id to progress +-- ============================================ +ALTER TABLE progress +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_progress_user_id ON progress(user_id); + +-- Drop old unique constraint (word_id now unique per user) +ALTER TABLE progress DROP CONSTRAINT IF EXISTS progress_word_id_key; + +-- Create new unique constraint per user +CREATE UNIQUE INDEX IF NOT EXISTS idx_progress_word_user_unique ON progress(word_id, user_id); + +-- ============================================ +-- Add user_id to configs +-- ============================================ +ALTER TABLE configs +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_configs_user_id ON configs(user_id); + +-- ============================================ +-- Add user_id to telegram_integrations +-- ============================================ +ALTER TABLE telegram_integrations +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_telegram_integrations_user_id ON telegram_integrations(user_id); + +-- ============================================ +-- Add user_id to weekly_goals +-- ============================================ +ALTER TABLE weekly_goals +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_weekly_goals_user_id ON weekly_goals(user_id); + +-- ============================================ +-- Add user_id to nodes (score data) +-- ============================================ +ALTER TABLE nodes +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_nodes_user_id ON nodes(user_id); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE users IS 'Users table for authentication and multi-tenancy'; +COMMENT ON COLUMN users.email IS 'User email address (unique, used for login)'; +COMMENT ON COLUMN users.password_hash IS 'Bcrypt hashed password'; +COMMENT ON COLUMN users.name IS 'User display name'; +COMMENT ON COLUMN users.is_active IS 'Whether the user account is active'; +COMMENT ON TABLE refresh_tokens IS 'JWT refresh tokens for persistent login'; + +-- Note: The first user who logs in will automatically become the owner of all +-- existing data (projects, entries, dictionaries, words, etc.) that have NULL user_id. +-- This is handled in the application code (claimOrphanedData function). diff --git a/play-life-backend/migrations_old/011_add_webhook_tokens.sql b/play-life-backend/migrations_old/011_add_webhook_tokens.sql new file mode 100644 index 0000000..f871e9f --- /dev/null +++ b/play-life-backend/migrations_old/011_add_webhook_tokens.sql @@ -0,0 +1,17 @@ +-- Migration: Add webhook_token to telegram_integrations +-- This allows identifying user by webhook URL token + +-- Add webhook_token column to telegram_integrations +ALTER TABLE telegram_integrations +ADD COLUMN IF NOT EXISTS webhook_token VARCHAR(255); + +-- Create unique index on webhook_token for fast lookups +CREATE UNIQUE INDEX IF NOT EXISTS idx_telegram_integrations_webhook_token +ON telegram_integrations(webhook_token) +WHERE webhook_token IS NOT NULL; + +-- Generate webhook tokens for existing integrations +-- This will be handled by application code, but we ensure the column exists + +COMMENT ON COLUMN telegram_integrations.webhook_token IS 'Unique token for webhook URL identification (e.g., /webhook/telegram/{token})'; + diff --git a/play-life-backend/migrations_old/012_refactor_telegram_single_bot.sql b/play-life-backend/migrations_old/012_refactor_telegram_single_bot.sql new file mode 100644 index 0000000..5e9d528 --- /dev/null +++ b/play-life-backend/migrations_old/012_refactor_telegram_single_bot.sql @@ -0,0 +1,103 @@ +-- Migration: Refactor telegram_integrations for single shared bot +-- and move Todoist webhook_token to separate table + +-- ============================================ +-- 1. Создаем таблицу todoist_integrations +-- ============================================ +CREATE TABLE IF NOT EXISTS todoist_integrations ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + webhook_token VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT todoist_integrations_user_id_unique UNIQUE (user_id) +); + +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_webhook_token +ON todoist_integrations(webhook_token); + +CREATE INDEX IF NOT EXISTS idx_todoist_integrations_user_id +ON todoist_integrations(user_id); + +COMMENT ON TABLE todoist_integrations IS 'Todoist webhook integration settings per user'; +COMMENT ON COLUMN todoist_integrations.webhook_token IS 'Unique token for Todoist webhook URL'; + +-- ============================================ +-- 2. Мигрируем webhook_token из telegram_integrations в todoist_integrations +-- ============================================ +INSERT INTO todoist_integrations (user_id, webhook_token, created_at, updated_at) +SELECT user_id, webhook_token, COALESCE(created_at, CURRENT_TIMESTAMP), CURRENT_TIMESTAMP +FROM telegram_integrations +WHERE webhook_token IS NOT NULL + AND webhook_token != '' + AND user_id IS NOT NULL +ON CONFLICT (user_id) DO NOTHING; + +-- ============================================ +-- 3. Модифицируем telegram_integrations +-- ============================================ + +-- Удаляем bot_token (будет в .env) +ALTER TABLE telegram_integrations +DROP COLUMN IF EXISTS bot_token; + +-- Удаляем webhook_token (перенесли в todoist_integrations) +ALTER TABLE telegram_integrations +DROP COLUMN IF EXISTS webhook_token; + +-- Добавляем telegram_user_id +ALTER TABLE telegram_integrations +ADD COLUMN IF NOT EXISTS telegram_user_id BIGINT; + +-- Добавляем start_token для deep links +ALTER TABLE telegram_integrations +ADD COLUMN IF NOT EXISTS start_token VARCHAR(255); + +-- Добавляем timestamps если их нет +ALTER TABLE telegram_integrations +ADD COLUMN IF NOT EXISTS created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP; + +ALTER TABLE telegram_integrations +ADD COLUMN IF NOT EXISTS updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP; + +-- ============================================ +-- 4. Создаем индексы +-- ============================================ +CREATE UNIQUE INDEX IF NOT EXISTS idx_telegram_integrations_start_token +ON telegram_integrations(start_token) +WHERE start_token IS NOT NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_telegram_integrations_telegram_user_id +ON telegram_integrations(telegram_user_id) +WHERE telegram_user_id IS NOT NULL; + +-- Уникальность user_id +DROP INDEX IF EXISTS idx_telegram_integrations_user_id; +CREATE UNIQUE INDEX IF NOT EXISTS idx_telegram_integrations_user_id_unique +ON telegram_integrations(user_id) +WHERE user_id IS NOT NULL; + +-- Индекс для поиска по chat_id +CREATE INDEX IF NOT EXISTS idx_telegram_integrations_chat_id +ON telegram_integrations(chat_id) +WHERE chat_id IS NOT NULL; + +-- Удаляем старый индекс webhook_token +DROP INDEX IF EXISTS idx_telegram_integrations_webhook_token; + +-- ============================================ +-- 5. Очищаем данные Telegram для переподключения +-- ============================================ +UPDATE telegram_integrations +SET chat_id = NULL, + telegram_user_id = NULL, + start_token = NULL, + updated_at = CURRENT_TIMESTAMP; + +-- ============================================ +-- Комментарии +-- ============================================ +COMMENT ON COLUMN telegram_integrations.telegram_user_id IS 'Telegram user ID (message.from.id)'; +COMMENT ON COLUMN telegram_integrations.chat_id IS 'Telegram chat ID для отправки сообщений'; +COMMENT ON COLUMN telegram_integrations.start_token IS 'Временный токен для deep link при первом подключении'; + diff --git a/play-life-backend/migrations_old/013_refactor_todoist_single_app.sql b/play-life-backend/migrations_old/013_refactor_todoist_single_app.sql new file mode 100644 index 0000000..6103250 --- /dev/null +++ b/play-life-backend/migrations_old/013_refactor_todoist_single_app.sql @@ -0,0 +1,45 @@ +-- Migration: Refactor todoist_integrations for single Todoist app +-- Webhook теперь единый для всего приложения, токены в URL больше не нужны +-- Все пользователи используют одно Todoist приложение + +-- ============================================ +-- 1. Добавляем новые поля +-- ============================================ +ALTER TABLE todoist_integrations +ADD COLUMN IF NOT EXISTS todoist_user_id BIGINT; + +ALTER TABLE todoist_integrations +ADD COLUMN IF NOT EXISTS todoist_email VARCHAR(255); + +ALTER TABLE todoist_integrations +ADD COLUMN IF NOT EXISTS access_token TEXT; + +-- ============================================ +-- 2. Удаляем webhook_token (больше не нужен!) +-- ============================================ +ALTER TABLE todoist_integrations +DROP COLUMN IF EXISTS webhook_token; + +-- ============================================ +-- 3. Удаляем старый индекс на webhook_token +-- ============================================ +DROP INDEX IF EXISTS idx_todoist_integrations_webhook_token; + +-- ============================================ +-- 4. Создаем новые индексы +-- ============================================ +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_todoist_user_id +ON todoist_integrations(todoist_user_id) +WHERE todoist_user_id IS NOT NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_todoist_integrations_todoist_email +ON todoist_integrations(todoist_email) +WHERE todoist_email IS NOT NULL; + +-- ============================================ +-- 5. Комментарии +-- ============================================ +COMMENT ON COLUMN todoist_integrations.todoist_user_id IS 'Todoist user ID (from OAuth) - used to identify user in webhooks'; +COMMENT ON COLUMN todoist_integrations.todoist_email IS 'Todoist user email (from OAuth)'; +COMMENT ON COLUMN todoist_integrations.access_token IS 'Todoist OAuth access token (permanent)'; + diff --git a/play-life-backend/migrations_old/014_make_refresh_tokens_permanent.sql b/play-life-backend/migrations_old/014_make_refresh_tokens_permanent.sql new file mode 100644 index 0000000..f4e2530 --- /dev/null +++ b/play-life-backend/migrations_old/014_make_refresh_tokens_permanent.sql @@ -0,0 +1,21 @@ +-- Migration: Make refresh tokens permanent (no expiration) +-- Refresh tokens теперь не имеют срока действия (expires_at может быть NULL) +-- Access tokens живут 24 часа вместо 15 минут + +-- ============================================ +-- 1. Изменяем expires_at на NULLABLE +-- ============================================ +ALTER TABLE refresh_tokens +ALTER COLUMN expires_at DROP NOT NULL; + +-- ============================================ +-- 2. Устанавливаем NULL для всех существующих токенов +-- (или можно оставить их как есть, если они еще не истекли) +-- ============================================ +-- UPDATE refresh_tokens SET expires_at = NULL WHERE expires_at > NOW(); + +-- ============================================ +-- 3. Комментарий +-- ============================================ +COMMENT ON COLUMN refresh_tokens.expires_at IS 'Expiration date for refresh token. NULL means token never expires.'; + diff --git a/play-life-backend/migrations_old/015_add_tasks.sql b/play-life-backend/migrations_old/015_add_tasks.sql new file mode 100644 index 0000000..fc4aec4 --- /dev/null +++ b/play-life-backend/migrations_old/015_add_tasks.sql @@ -0,0 +1,58 @@ +-- Migration: Add tasks and reward_configs tables +-- This script creates tables for task management system + +-- ============================================ +-- Table: tasks +-- ============================================ +CREATE TABLE IF NOT EXISTS tasks ( + id SERIAL PRIMARY KEY, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + completed INTEGER DEFAULT 0, + last_completed_at TIMESTAMP WITH TIME ZONE, + parent_task_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE, + reward_message TEXT, + progression_base NUMERIC(10,4), + deleted BOOLEAN DEFAULT FALSE +); + +CREATE INDEX IF NOT EXISTS idx_tasks_user_id ON tasks(user_id); +CREATE INDEX IF NOT EXISTS idx_tasks_parent_task_id ON tasks(parent_task_id); +CREATE INDEX IF NOT EXISTS idx_tasks_deleted ON tasks(deleted); +CREATE INDEX IF NOT EXISTS idx_tasks_last_completed_at ON tasks(last_completed_at); + +-- ============================================ +-- Table: reward_configs +-- ============================================ +CREATE TABLE IF NOT EXISTS reward_configs ( + id SERIAL PRIMARY KEY, + position INTEGER NOT NULL, + task_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE, + project_id INTEGER REFERENCES projects(id) ON DELETE CASCADE, + value NUMERIC(10,4) NOT NULL, + use_progression BOOLEAN DEFAULT FALSE +); + +CREATE INDEX IF NOT EXISTS idx_reward_configs_task_id ON reward_configs(task_id); +CREATE INDEX IF NOT EXISTS idx_reward_configs_project_id ON reward_configs(project_id); +CREATE UNIQUE INDEX IF NOT EXISTS idx_reward_configs_task_position ON reward_configs(task_id, position); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE tasks IS 'Tasks table for task management system'; +COMMENT ON COLUMN tasks.name IS 'Task name (required for main tasks, optional for subtasks)'; +COMMENT ON COLUMN tasks.completed IS 'Number of times task was completed'; +COMMENT ON COLUMN tasks.last_completed_at IS 'Date and time of last task completion'; +COMMENT ON COLUMN tasks.parent_task_id IS 'Parent task ID for subtasks (NULL for main tasks)'; +COMMENT ON COLUMN tasks.reward_message IS 'Reward message template with placeholders ${0}, ${1}, etc.'; +COMMENT ON COLUMN tasks.progression_base IS 'Base value for progression calculation (NULL means no progression)'; +COMMENT ON COLUMN tasks.deleted IS 'Soft delete flag'; + +COMMENT ON TABLE reward_configs IS 'Reward configurations for tasks'; +COMMENT ON COLUMN reward_configs.position IS 'Position in reward_message template (0, 1, 2, etc.)'; +COMMENT ON COLUMN reward_configs.task_id IS 'Task this reward belongs to'; +COMMENT ON COLUMN reward_configs.project_id IS 'Project to add reward to'; +COMMENT ON COLUMN reward_configs.value IS 'Default score value (can be negative)'; +COMMENT ON COLUMN reward_configs.use_progression IS 'Whether to use progression multiplier for this reward'; + diff --git a/play-life-backend/migrations_old/016_add_repetition_period.sql b/play-life-backend/migrations_old/016_add_repetition_period.sql new file mode 100644 index 0000000..4af049d --- /dev/null +++ b/play-life-backend/migrations_old/016_add_repetition_period.sql @@ -0,0 +1,14 @@ +-- Migration: Add repetition_period field to tasks table +-- This script adds the repetition_period field for recurring tasks + +-- ============================================ +-- Add repetition_period column +-- ============================================ +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS repetition_period INTERVAL; + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON COLUMN tasks.repetition_period IS 'Period after which task should be repeated (NULL means task is not recurring)'; + diff --git a/play-life-backend/migrations_old/017_add_next_show_at.sql b/play-life-backend/migrations_old/017_add_next_show_at.sql new file mode 100644 index 0000000..636dc23 --- /dev/null +++ b/play-life-backend/migrations_old/017_add_next_show_at.sql @@ -0,0 +1,14 @@ +-- Migration: Add next_show_at field to tasks table +-- This script adds the next_show_at field for postponing tasks + +-- ============================================ +-- Add next_show_at column +-- ============================================ +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS next_show_at TIMESTAMP WITH TIME ZONE; + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON COLUMN tasks.next_show_at IS 'Date when task should be shown again (NULL means use last_completed_at + period)'; + diff --git a/play-life-backend/migrations_old/018_add_repetition_date.sql b/play-life-backend/migrations_old/018_add_repetition_date.sql new file mode 100644 index 0000000..ba502e0 --- /dev/null +++ b/play-life-backend/migrations_old/018_add_repetition_date.sql @@ -0,0 +1,16 @@ +-- Migration: Add repetition_date field to tasks table +-- This script adds the repetition_date field for pattern-based recurring tasks +-- Format examples: "2 week" (2nd day of week), "15 month" (15th day of month), "02-01 year" (Feb 1st) + +-- ============================================ +-- Add repetition_date column +-- ============================================ +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS repetition_date TEXT; + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON COLUMN tasks.repetition_date IS 'Pattern-based repetition: "N week" (day of week 1-7), "N month" (day of month 1-31), "MM-DD year" (specific date). Mutually exclusive with repetition_period.'; + + diff --git a/play-life-backend/migrations_old/019_add_wishlist.sql b/play-life-backend/migrations_old/019_add_wishlist.sql new file mode 100644 index 0000000..59a1f02 --- /dev/null +++ b/play-life-backend/migrations_old/019_add_wishlist.sql @@ -0,0 +1,86 @@ +-- Migration: Add wishlist tables +-- This script creates tables for wishlist management system +-- Supports multiple unlock conditions per wishlist item (AND logic) + +-- ============================================ +-- Table: wishlist_items +-- ============================================ +CREATE TABLE IF NOT EXISTS wishlist_items ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + price NUMERIC(10,2), + image_path VARCHAR(500), + link TEXT, + completed BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + deleted BOOLEAN DEFAULT FALSE +); + +CREATE INDEX IF NOT EXISTS idx_wishlist_items_user_id ON wishlist_items(user_id); +CREATE INDEX IF NOT EXISTS idx_wishlist_items_user_deleted ON wishlist_items(user_id, deleted); +CREATE INDEX IF NOT EXISTS idx_wishlist_items_user_completed ON wishlist_items(user_id, completed, deleted); + +-- ============================================ +-- Table: task_conditions +-- ============================================ +-- Reusable conditions for task completion +CREATE TABLE IF NOT EXISTS task_conditions ( + id SERIAL PRIMARY KEY, + task_id INTEGER NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT unique_task_condition UNIQUE (task_id) +); + +CREATE INDEX IF NOT EXISTS idx_task_conditions_task_id ON task_conditions(task_id); + +-- ============================================ +-- Table: score_conditions +-- ============================================ +-- Reusable conditions for project points +CREATE TABLE IF NOT EXISTS score_conditions ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + required_points NUMERIC(10,4) NOT NULL, + period_type VARCHAR(20), -- 'week', 'month', 'year', NULL (all time) + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT unique_score_condition UNIQUE (project_id, required_points, period_type) +); + +CREATE INDEX IF NOT EXISTS idx_score_conditions_project_id ON score_conditions(project_id); + +-- ============================================ +-- Table: wishlist_conditions +-- ============================================ +-- Links wishlist items to unlock conditions +CREATE TABLE IF NOT EXISTS wishlist_conditions ( + id SERIAL PRIMARY KEY, + wishlist_item_id INTEGER NOT NULL REFERENCES wishlist_items(id) ON DELETE CASCADE, + task_condition_id INTEGER REFERENCES task_conditions(id) ON DELETE CASCADE, + score_condition_id INTEGER REFERENCES score_conditions(id) ON DELETE CASCADE, + display_order INTEGER DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT check_exactly_one_condition CHECK ( + (task_condition_id IS NOT NULL AND score_condition_id IS NULL) OR + (task_condition_id IS NULL AND score_condition_id IS NOT NULL) + ) +); + +CREATE INDEX IF NOT EXISTS idx_wishlist_conditions_item_id ON wishlist_conditions(wishlist_item_id); +CREATE INDEX IF NOT EXISTS idx_wishlist_conditions_item_order ON wishlist_conditions(wishlist_item_id, display_order); +CREATE INDEX IF NOT EXISTS idx_wishlist_conditions_task_condition_id ON wishlist_conditions(task_condition_id); +CREATE INDEX IF NOT EXISTS idx_wishlist_conditions_score_condition_id ON wishlist_conditions(score_condition_id); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE wishlist_items IS 'Wishlist items for users'; +COMMENT ON COLUMN wishlist_items.completed IS 'Flag indicating item was purchased/received'; +COMMENT ON COLUMN wishlist_items.image_path IS 'Path to image file relative to uploads root'; + +COMMENT ON TABLE task_conditions IS 'Reusable unlock conditions based on task completion'; +COMMENT ON TABLE score_conditions IS 'Reusable unlock conditions based on project points'; +COMMENT ON TABLE wishlist_conditions IS 'Links between wishlist items and unlock conditions. Multiple conditions per item use AND logic.'; +COMMENT ON COLUMN wishlist_conditions.display_order IS 'Order for displaying conditions in UI'; + diff --git a/play-life-backend/migrations_old/020_change_period_to_start_date.sql b/play-life-backend/migrations_old/020_change_period_to_start_date.sql new file mode 100644 index 0000000..2727b15 --- /dev/null +++ b/play-life-backend/migrations_old/020_change_period_to_start_date.sql @@ -0,0 +1,37 @@ +-- Migration: Change period_type to start_date in score_conditions +-- This allows specifying a start date for counting points instead of period type +-- Date can be in the past or future, NULL means count all time + +-- Добавляем новое поле start_date +ALTER TABLE score_conditions +ADD COLUMN IF NOT EXISTS start_date DATE; + +-- Миграция данных: для существующих записей с period_type устанавливаем start_date +-- Если period_type = 'week', то start_date = начало текущей недели +-- Если period_type = 'month', то start_date = начало текущего месяца +-- Если period_type = 'year', то start_date = начало текущего года +-- Если period_type IS NULL, то start_date = NULL (за всё время) +UPDATE score_conditions +SET start_date = CASE + WHEN period_type = 'week' THEN DATE_TRUNC('week', CURRENT_DATE)::DATE + WHEN period_type = 'month' THEN DATE_TRUNC('month', CURRENT_DATE)::DATE + WHEN period_type = 'year' THEN DATE_TRUNC('year', CURRENT_DATE)::DATE + ELSE NULL +END +WHERE start_date IS NULL; + +-- Обновляем уникальное ограничение (удаляем старое, добавляем новое) +ALTER TABLE score_conditions +DROP CONSTRAINT IF EXISTS unique_score_condition; + +ALTER TABLE score_conditions +ADD CONSTRAINT unique_score_condition +UNIQUE (project_id, required_points, start_date); + +-- Обновляем комментарии +COMMENT ON COLUMN score_conditions.start_date IS 'Date from which to start counting points. NULL means count all time.'; + +-- Примечание: поле period_type оставляем пока для обратной совместимости +-- Его можно будет удалить позже после проверки, что всё работает: +-- ALTER TABLE score_conditions DROP COLUMN period_type; + diff --git a/play-life-backend/migrations_old/021_add_wishlist_id_to_tasks.sql b/play-life-backend/migrations_old/021_add_wishlist_id_to_tasks.sql new file mode 100644 index 0000000..f0a9422 --- /dev/null +++ b/play-life-backend/migrations_old/021_add_wishlist_id_to_tasks.sql @@ -0,0 +1,18 @@ +-- Migration: Add wishlist_id to tasks table for linking tasks to wishlist items +-- This allows creating tasks directly from wishlist items and tracking the relationship + +-- Добавляем поле wishlist_id в таблицу tasks +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS wishlist_id INTEGER REFERENCES wishlist_items(id) ON DELETE SET NULL; + +-- Создаём индекс для быстрого поиска задач по wishlist_id +CREATE INDEX IF NOT EXISTS idx_tasks_wishlist_id ON tasks(wishlist_id); + +-- Уникальный индекс: только одна незавершённая задача на желание +-- Это предотвращает создание нескольких задач для одного желания +CREATE UNIQUE INDEX IF NOT EXISTS idx_tasks_wishlist_id_unique +ON tasks(wishlist_id) WHERE wishlist_id IS NOT NULL AND deleted = FALSE; + +-- Добавляем комментарий для документации +COMMENT ON COLUMN tasks.wishlist_id IS 'Link to wishlist item that this task fulfills. NULL if task is not linked to any wishlist item.'; + diff --git a/play-life-backend/migrations_old/022_refactor_configs_to_tasks.sql b/play-life-backend/migrations_old/022_refactor_configs_to_tasks.sql new file mode 100644 index 0000000..6187187 --- /dev/null +++ b/play-life-backend/migrations_old/022_refactor_configs_to_tasks.sql @@ -0,0 +1,49 @@ +-- Migration: Refactor configs to link via tasks.config_id +-- This migration adds config_id to tasks table and migrates existing configs to tasks +-- After migration: configs only contain words_count, max_cards (name and try_message removed) + +-- ============================================ +-- Step 1: Add config_id to tasks +-- ============================================ +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS config_id INTEGER REFERENCES configs(id) ON DELETE SET NULL; + +CREATE INDEX IF NOT EXISTS idx_tasks_config_id ON tasks(config_id); + +-- Unique index: only one task per config +CREATE UNIQUE INDEX IF NOT EXISTS idx_tasks_config_id_unique +ON tasks(config_id) WHERE config_id IS NOT NULL AND deleted = FALSE; + +COMMENT ON COLUMN tasks.config_id IS 'Link to test config. NULL if task is not a test.'; + +-- ============================================ +-- Step 2: Migrate existing configs to tasks +-- Create a task for each config that doesn't have one yet +-- ============================================ +INSERT INTO tasks (user_id, name, reward_message, repetition_period, repetition_date, config_id) +SELECT + c.user_id, + c.name, -- Config name -> Task name + c.try_message, -- try_message -> reward_message + '0 day'::INTERVAL, -- repetition_period = 0 (infinite task) + '0 week', -- repetition_date = 0 (infinite task) + c.id -- Link to config +FROM configs c +WHERE c.name IS NOT NULL -- Only configs with names +AND NOT EXISTS ( + SELECT 1 FROM tasks t WHERE t.config_id = c.id AND t.deleted = FALSE +); + +-- ============================================ +-- Step 3: Remove name and try_message from configs +-- These are now stored in the linked task +-- ============================================ +ALTER TABLE configs DROP COLUMN IF EXISTS name; +ALTER TABLE configs DROP COLUMN IF EXISTS try_message; + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE configs IS 'Test configurations (words_count, max_cards, dictionary associations). Linked to tasks via tasks.config_id.'; + + diff --git a/play-life-backend/migrations_old/023_add_wishlist_boards.sql b/play-life-backend/migrations_old/023_add_wishlist_boards.sql new file mode 100644 index 0000000..b690cdc --- /dev/null +++ b/play-life-backend/migrations_old/023_add_wishlist_boards.sql @@ -0,0 +1,116 @@ +-- Migration: Add wishlist boards for multi-user collaboration +-- Each user can have multiple boards, share them via invite links, +-- and collaborate with other users on shared wishes + +-- ============================================ +-- Table: wishlist_boards (доски желаний) +-- ============================================ +CREATE TABLE IF NOT EXISTS wishlist_boards ( + id SERIAL PRIMARY KEY, + owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name VARCHAR(255) NOT NULL, + + -- Настройки доступа по ссылке + invite_token VARCHAR(64) UNIQUE, + invite_enabled BOOLEAN DEFAULT FALSE, + + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + deleted BOOLEAN DEFAULT FALSE +); + +CREATE INDEX IF NOT EXISTS idx_wishlist_boards_owner_id ON wishlist_boards(owner_id); +CREATE INDEX IF NOT EXISTS idx_wishlist_boards_invite_token ON wishlist_boards(invite_token) + WHERE invite_token IS NOT NULL; +CREATE INDEX IF NOT EXISTS idx_wishlist_boards_owner_deleted ON wishlist_boards(owner_id, deleted); + +-- ============================================ +-- Table: wishlist_board_members (участники доски) +-- ============================================ +CREATE TABLE IF NOT EXISTS wishlist_board_members ( + id SERIAL PRIMARY KEY, + board_id INTEGER NOT NULL REFERENCES wishlist_boards(id) ON DELETE CASCADE, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + joined_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT unique_board_member UNIQUE (board_id, user_id) +); + +CREATE INDEX IF NOT EXISTS idx_board_members_board_id ON wishlist_board_members(board_id); +CREATE INDEX IF NOT EXISTS idx_board_members_user_id ON wishlist_board_members(user_id); + +-- ============================================ +-- Modify: wishlist_items - добавляем board_id и author_id +-- ============================================ +ALTER TABLE wishlist_items +ADD COLUMN IF NOT EXISTS board_id INTEGER REFERENCES wishlist_boards(id) ON DELETE CASCADE; + +ALTER TABLE wishlist_items +ADD COLUMN IF NOT EXISTS author_id INTEGER REFERENCES users(id) ON DELETE SET NULL; + +CREATE INDEX IF NOT EXISTS idx_wishlist_items_board_id ON wishlist_items(board_id); +CREATE INDEX IF NOT EXISTS idx_wishlist_items_author_id ON wishlist_items(author_id); + +-- ============================================ +-- Modify: wishlist_conditions - добавляем user_id для персональных целей +-- ============================================ +ALTER TABLE wishlist_conditions +ADD COLUMN IF NOT EXISTS user_id INTEGER REFERENCES users(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_wishlist_conditions_user_id ON wishlist_conditions(user_id); + +-- ============================================ +-- Modify: tasks - добавляем политику награждения для wishlist задач +-- ============================================ +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS reward_policy VARCHAR(20) DEFAULT 'personal'; + +COMMENT ON COLUMN tasks.reward_policy IS + 'For wishlist tasks: personal = only if user completes, shared = anyone completes'; + +-- ============================================ +-- Миграция данных: Этап 1 - создаём персональные доски +-- ============================================ +-- Создаём доску "Мои желания" для каждого пользователя с желаниями +INSERT INTO wishlist_boards (owner_id, name) +SELECT DISTINCT user_id, 'Мои желания' +FROM wishlist_items +WHERE user_id IS NOT NULL + AND deleted = FALSE + AND NOT EXISTS ( + SELECT 1 FROM wishlist_boards wb + WHERE wb.owner_id = wishlist_items.user_id AND wb.name = 'Мои желания' + ); + +-- ============================================ +-- Миграция данных: Этап 2 - привязываем желания к доскам +-- ============================================ +UPDATE wishlist_items wi +SET + board_id = wb.id, + author_id = COALESCE(wi.author_id, wi.user_id) +FROM wishlist_boards wb +WHERE wi.board_id IS NULL + AND wi.user_id = wb.owner_id + AND wb.name = 'Мои желания'; + +-- ============================================ +-- Миграция данных: Этап 3 - заполняем user_id в условиях +-- ============================================ +UPDATE wishlist_conditions wc +SET user_id = wi.user_id +FROM wishlist_items wi +WHERE wc.wishlist_item_id = wi.id + AND wc.user_id IS NULL; + +-- ============================================ +-- Comments +-- ============================================ +COMMENT ON TABLE wishlist_boards IS 'Wishlist boards for organizing and sharing wishes'; +COMMENT ON COLUMN wishlist_boards.invite_token IS 'Token for invite link, NULL = disabled'; +COMMENT ON COLUMN wishlist_boards.invite_enabled IS 'Whether invite link is active'; +COMMENT ON TABLE wishlist_board_members IS 'Users who joined boards via invite link (not owners)'; +COMMENT ON COLUMN wishlist_conditions.user_id IS 'Owner of this condition. Each user has their own goals on shared boards.'; +COMMENT ON COLUMN wishlist_items.author_id IS 'User who created this item (may differ from board owner on shared boards)'; +COMMENT ON COLUMN wishlist_items.board_id IS 'Board this item belongs to'; + diff --git a/play-life-backend/migrations_old/024_add_reward_policy.sql b/play-life-backend/migrations_old/024_add_reward_policy.sql new file mode 100644 index 0000000..0db06c8 --- /dev/null +++ b/play-life-backend/migrations_old/024_add_reward_policy.sql @@ -0,0 +1,13 @@ +-- Migration: Add reward_policy to tasks table +-- This migration adds reward_policy column for wishlist tasks +-- If the column already exists (from migration 023), this will be a no-op + +-- ============================================ +-- Modify: tasks - добавляем политику награждения для wishlist задач +-- ============================================ +ALTER TABLE tasks +ADD COLUMN IF NOT EXISTS reward_policy VARCHAR(20) DEFAULT 'personal'; + +COMMENT ON COLUMN tasks.reward_policy IS + 'For wishlist tasks: personal = only if user completes, shared = anyone completes'; + diff --git a/play-life-backend/migrations_old/025_remove_conditions_without_user_id.sql b/play-life-backend/migrations_old/025_remove_conditions_without_user_id.sql new file mode 100644 index 0000000..e19b420 --- /dev/null +++ b/play-life-backend/migrations_old/025_remove_conditions_without_user_id.sql @@ -0,0 +1,13 @@ +-- Migration: Remove wishlist conditions without user_id +-- These conditions should not exist as every condition must have an owner +-- This migration removes orphaned conditions that were created before the fix + +-- ============================================ +-- Remove conditions without user_id +-- ============================================ +DELETE FROM wishlist_conditions WHERE user_id IS NULL; + +-- ============================================ +-- Comments +-- ============================================ +COMMENT ON COLUMN wishlist_conditions.user_id IS 'Owner of this condition. Each user has their own goals on shared boards. Required field.'; diff --git a/play-life-backend/migrations_old/026_weekly_goals_max_score.sql b/play-life-backend/migrations_old/026_weekly_goals_max_score.sql new file mode 100644 index 0000000..2c3b630 --- /dev/null +++ b/play-life-backend/migrations_old/026_weekly_goals_max_score.sql @@ -0,0 +1,10 @@ +-- Migration: Add weekly_goals.max_score snapshot column and drop unused actual_score +-- Date: 2026-01-24 + +ALTER TABLE weekly_goals + DROP COLUMN IF EXISTS actual_score; + +-- max_score is a snapshot of max_goal_score for a week, filled only for new weeks by cron +ALTER TABLE weekly_goals + ADD COLUMN IF NOT EXISTS max_score NUMERIC(10,4); + diff --git a/play-life-backend/migrations_old/027_add_normalized_total_score_to_weekly_report_mv.sql b/play-life-backend/migrations_old/027_add_normalized_total_score_to_weekly_report_mv.sql new file mode 100644 index 0000000..b374848 --- /dev/null +++ b/play-life-backend/migrations_old/027_add_normalized_total_score_to_weekly_report_mv.sql @@ -0,0 +1,51 @@ +-- Migration: Add normalized_total_score to weekly_report_mv using weekly_goals.max_score +-- Date: 2026-01-24 +-- +-- normalized_total_score = LEAST(total_score, max_score) if max_score is set, else total_score. +-- Note: max_score is a snapshot field (filled only for new weeks by cron). + +DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv; + +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score, + CASE + WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000) + ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score) + END AS normalized_total_score +FROM + projects p +LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id +LEFT JOIN + weekly_goals wg + ON wg.project_id = p.id + AND wg.goal_year = agg.report_year + AND wg.goal_week = agg.report_week +WHERE + p.deleted = FALSE +ORDER BY + p.id, agg.report_year, agg.report_week +WITH DATA; + +CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week); + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_score snapshot.'; + diff --git a/play-life-backend/migrations_old/028_optimize_task_queries.sql b/play-life-backend/migrations_old/028_optimize_task_queries.sql new file mode 100644 index 0000000..e39f962 --- /dev/null +++ b/play-life-backend/migrations_old/028_optimize_task_queries.sql @@ -0,0 +1,14 @@ +-- Migration: Optimize task queries with composite index +-- Date: 2026-01-24 +-- +-- This migration adds a composite index to optimize the task detail query: +-- WHERE id = $1 AND user_id = $2 AND deleted = FALSE +-- +-- The index uses a partial index with WHERE deleted = FALSE to reduce index size +-- and improve query performance for active (non-deleted) tasks. + +CREATE INDEX IF NOT EXISTS idx_tasks_id_user_deleted +ON tasks(id, user_id, deleted) +WHERE deleted = FALSE; + +COMMENT ON INDEX idx_tasks_id_user_deleted IS 'Composite index for optimizing task detail queries with id, user_id, and deleted filter. Partial index for non-deleted tasks only.'; diff --git a/play-life-backend/migrations_old/029_add_covering_indexes.sql b/play-life-backend/migrations_old/029_add_covering_indexes.sql new file mode 100644 index 0000000..057050e --- /dev/null +++ b/play-life-backend/migrations_old/029_add_covering_indexes.sql @@ -0,0 +1,25 @@ +-- Migration: Add covering indexes for task detail queries +-- Date: 2026-01-25 +-- +-- This migration adds covering indexes to optimize queries by including +-- all needed columns in the index, avoiding table lookups. +-- +-- Covering indexes allow PostgreSQL to perform index-only scans, +-- getting all data directly from the index without accessing the table. + +-- Covering index for subtasks query +-- Includes all columns needed for subtasks selection to avoid table lookups +CREATE INDEX IF NOT EXISTS idx_tasks_parent_deleted_covering +ON tasks(parent_task_id, deleted, id) +INCLUDE (name, completed, last_completed_at, reward_message, progression_base) +WHERE deleted = FALSE; + +-- Covering index for wishlist name lookup +-- Includes name and deleted flag for quick lookup without table access +CREATE INDEX IF NOT EXISTS idx_wishlist_items_id_deleted_covering +ON wishlist_items(id, deleted) +INCLUDE (name) +WHERE deleted = FALSE; + +COMMENT ON INDEX idx_tasks_parent_deleted_covering IS 'Covering index for subtasks query - includes all selected columns to avoid table lookups. Enables index-only scans for better performance.'; +COMMENT ON INDEX idx_wishlist_items_id_deleted_covering IS 'Covering index for wishlist name lookup - includes name to avoid table lookup. Enables index-only scans for better performance.'; diff --git a/play-life-backend/migrations_old/README.md b/play-life-backend/migrations_old/README.md new file mode 100644 index 0000000..d9d7f11 --- /dev/null +++ b/play-life-backend/migrations_old/README.md @@ -0,0 +1,15 @@ +# Архив старых миграций + +Эта директория содержит старые SQL миграции (001-029), которые были заменены baseline миграцией `000001_baseline.up.sql`. + +## Примечание + +Эти миграции сохранены только для справки и истории. Они **не должны применяться** в новых установках или после перехода на golang-migrate. + +## Новые миграции + +Все новые миграции должны создаваться в формате golang-migrate: +- `000002_*.up.sql` - миграция вверх +- `000002_*.down.sql` - миграция вниз (откат) + +Используйте команду `migrate create -ext sql -dir migrations -seq ` для создания новых миграций. diff --git a/play-life-backend/play-eng-backend b/play-life-backend/play-eng-backend new file mode 100755 index 0000000..fbfc1ea Binary files /dev/null and b/play-life-backend/play-eng-backend differ diff --git a/play-life-backend/start_backend.sh b/play-life-backend/start_backend.sh new file mode 100755 index 0000000..0ee7445 --- /dev/null +++ b/play-life-backend/start_backend.sh @@ -0,0 +1,20 @@ +#!/bin/bash +cd "$(dirname "$0")" + +# Настройки подключения к БД (можно изменить через переменные окружения) +export DB_HOST=${DB_HOST:-localhost} +export DB_PORT=${DB_PORT:-5432} +export DB_USER=${DB_USER:-postgres} +export DB_PASSWORD=${DB_PASSWORD:-postgres} +export DB_NAME=${DB_NAME:-playlife} +export PORT=${PORT:-8080} + +echo "Starting backend server..." +echo "DB_HOST: $DB_HOST" +echo "DB_PORT: $DB_PORT" +echo "DB_USER: $DB_USER" +echo "DB_NAME: $DB_NAME" +echo "PORT: $PORT" +echo "" + +go run main.go diff --git a/play-life-backend/test_baseline.sh b/play-life-backend/test_baseline.sh new file mode 100755 index 0000000..ffdb285 --- /dev/null +++ b/play-life-backend/test_baseline.sh @@ -0,0 +1,347 @@ +#!/bin/bash + +# Скрипт для тестирования baseline миграции на чистой БД +# Создает тестовую БД, применяет baseline, и сравнивает схему с production + +set -e + +# Цвета для вывода +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Получаем переменные окружения +DB_HOST=${DB_HOST:-localhost} +DB_PORT=${DB_PORT:-5432} +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} + +TEST_DB_NAME="playeng_baseline_test_$$" +MIGRATIONS_PATH="migrations" +TMP_DIR=$(mktemp -d) + +echo "=== Тестирование baseline миграции на чистой БД ===" +echo "" + +# Добавляем ~/go/bin в PATH если migrate не найден +if ! command -v migrate &> /dev/null; then + export PATH="$HOME/go/bin:$PATH" +fi + +# Проверяем наличие необходимых инструментов +if ! command -v migrate &> /dev/null; then + echo -e "${RED}Ошибка: migrate не найден. Установите golang-migrate:${NC}" + echo " brew install golang-migrate" + echo " или" + echo " go install -tags 'postgres' github.com/golang-migrate/migrate/v4/cmd/migrate@latest" + exit 1 +fi + +# Определяем способ выполнения PostgreSQL команд +PG_DUMP_CMD="" +PG_PSQL_CMD="" +POSTGRES_CONTAINER="" +if command -v pg_dump &> /dev/null; then + PG_DUMP_CMD="pg_dump" + PG_PSQL_CMD="psql" +else + # Пытаемся найти PostgreSQL контейнер + if command -v docker &> /dev/null; then + POSTGRES_CONTAINER=$(docker ps --format "{{.Names}}" 2>/dev/null | grep -iE "(postgres|db)" | head -1) + if [ -n "$POSTGRES_CONTAINER" ]; then + PG_DUMP_CMD="docker exec $POSTGRES_CONTAINER pg_dump" + PG_PSQL_CMD="docker exec -i $POSTGRES_CONTAINER psql" + echo -e "${BLUE}Используется PostgreSQL из Docker контейнера: $POSTGRES_CONTAINER${NC}" + fi + fi +fi + +HAS_PG_DUMP=false +if [ -n "$PG_DUMP_CMD" ]; then + HAS_PG_DUMP=true +else + echo -e "${YELLOW}Предупреждение: pg_dump не найден. Сравнение схем будет пропущено.${NC}" + echo " Для полного тестирования установите PostgreSQL client tools" +fi + +# Проверяем наличие директории миграций +if [ ! -d "$MIGRATIONS_PATH" ]; then + echo -e "${RED}Ошибка: Директория миграций не найдена: $MIGRATIONS_PATH${NC}" + exit 1 +fi + +# Проверяем наличие baseline миграции +if [ ! -f "$MIGRATIONS_PATH/000001_baseline.up.sql" ]; then + echo -e "${RED}Ошибка: Baseline миграция не найдена: $MIGRATIONS_PATH/000001_baseline.up.sql${NC}" + exit 1 +fi + +echo "Параметры подключения:" +echo " Host: $DB_HOST" +echo " Port: $DB_PORT" +echo " User: $DB_USER" +echo " Test DB: $TEST_DB_NAME" +echo "" + +# Проверяем подключение к БД +echo "1. Проверка подключения к БД..." +if [ -n "$POSTGRES_CONTAINER" ]; then + # Используем Docker + echo "SELECT 1;" | $PG_PSQL_CMD -U $DB_USER -d postgres > /dev/null 2>&1 +elif [ -n "$PG_PSQL_CMD" ]; then + # Используем локальный psql + PGPASSWORD=$DB_PASSWORD $PG_PSQL_CMD \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d postgres \ + -c "SELECT 1;" > /dev/null 2>&1 +else + # Пытаемся через стандартный psql + PGPASSWORD=$DB_PASSWORD psql \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d postgres \ + -c "SELECT 1;" > /dev/null 2>&1 +fi + +if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось подключиться к БД${NC}" + exit 1 +fi + +echo -e "${GREEN}✓ Подключение успешно${NC}" +echo "" + +# Создаем тестовую БД +echo "2. Создание тестовой БД..." +if [ -n "$POSTGRES_CONTAINER" ]; then + echo "CREATE DATABASE $TEST_DB_NAME;" | $PG_PSQL_CMD -U $DB_USER -d postgres > /dev/null 2>&1 +elif [ -n "$PG_PSQL_CMD" ]; then + PGPASSWORD=$DB_PASSWORD $PG_PSQL_CMD \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d postgres \ + -c "CREATE DATABASE $TEST_DB_NAME;" > /dev/null 2>&1 +else + PGPASSWORD=$DB_PASSWORD psql \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d postgres \ + -c "CREATE DATABASE $TEST_DB_NAME;" > /dev/null 2>&1 +fi + +if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось создать тестовую БД${NC}" + exit 1 +fi + +echo -e "${GREEN}✓ Тестовая БД создана: $TEST_DB_NAME${NC}" +echo "" + +# Ждем немного, чтобы БД точно создалась +sleep 1 + +# Проверяем, что БД создана +echo "3. Проверка существования тестовой БД..." +if [ -n "$POSTGRES_CONTAINER" ]; then + if echo "SELECT 1 FROM pg_database WHERE datname='$TEST_DB_NAME';" | $PG_PSQL_CMD -U $DB_USER -d postgres -t | grep -q 1; then + echo -e "${GREEN}✓ БД подтверждена${NC}" + else + echo -e "${RED}Ошибка: БД не найдена после создания${NC}" + exit 1 + fi +fi +echo "" + +# Применяем baseline миграцию +echo "4. Применение baseline миграции..." +cd "$(dirname "$0")" || exit 1 + +if [ -n "$POSTGRES_CONTAINER" ]; then + # Для Docker контейнеров используем psql напрямую, так как migrate может иметь проблемы с подключением + echo -e "${BLUE}Применение миграции через psql (Docker)...${NC}" + if [ -f "$MIGRATIONS_PATH/000001_baseline.up.sql" ]; then + if cat "$MIGRATIONS_PATH/000001_baseline.up.sql" | $PG_PSQL_CMD -U $DB_USER -d $TEST_DB_NAME > /dev/null 2>&1; then + echo -e "${GREEN}✓ Миграция применена через psql${NC}" + # Создаем таблицу schema_migrations вручную для migrate + echo "CREATE TABLE IF NOT EXISTS schema_migrations (version bigint NOT NULL PRIMARY KEY, dirty boolean NOT NULL);" | $PG_PSQL_CMD -U $DB_USER -d $TEST_DB_NAME > /dev/null 2>&1 + echo "INSERT INTO schema_migrations (version, dirty) VALUES (1, false) ON CONFLICT (version) DO UPDATE SET dirty = false;" | $PG_PSQL_CMD -U $DB_USER -d $TEST_DB_NAME > /dev/null 2>&1 + MIGRATE_SUCCESS=false # Устанавливаем в false, чтобы использовать psql для проверки версии + else + echo -e "${RED}Ошибка: Не удалось применить миграцию через psql${NC}" + exit 1 + fi + else + echo -e "${RED}Ошибка: Файл миграции не найден${NC}" + exit 1 + fi + DATABASE_URL="postgres://$DB_USER:$DB_PASSWORD@localhost:$DB_PORT/$TEST_DB_NAME?sslmode=disable" +else + DATABASE_URL="postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$TEST_DB_NAME?sslmode=disable" + if ! migrate -path "$MIGRATIONS_PATH" -database "$DATABASE_URL" up; then + echo -e "${RED}Ошибка: Не удалось применить baseline миграцию${NC}" + exit 1 + fi +fi + +echo -e "${GREEN}✓ Baseline миграция применена${NC}" +echo "" + +# Проверяем версию миграции +echo "5. Проверка версии миграции..." +if [ -n "$POSTGRES_CONTAINER" ] && [ "${MIGRATE_SUCCESS:-false}" = "false" ]; then + # Проверяем версию через psql + VERSION=$(echo "SELECT version FROM schema_migrations;" | $PG_PSQL_CMD -U $DB_USER -d $TEST_DB_NAME -t 2>/dev/null | tr -d ' ' | head -1) + if [ -n "$VERSION" ] && [ "$VERSION" != "" ]; then + echo " Версия: $VERSION" + if [ "$VERSION" = "1" ]; then + echo -e "${GREEN}✓ Версия миграции корректна${NC}" + else + echo -e "${YELLOW}⚠ Неожиданная версия миграции: $VERSION${NC}" + fi + else + echo -e "${YELLOW}⚠ Не удалось определить версию миграции${NC}" + fi +else + # Используем migrate для проверки версии + VERSION=$(migrate -path "$MIGRATIONS_PATH" -database "$DATABASE_URL" version 2>&1) + echo " Версия: $VERSION" + + if echo "$VERSION" | grep -qE "^1"; then + echo -e "${GREEN}✓ Версия миграции корректна${NC}" + else + echo -e "${YELLOW}⚠ Неожиданная версия миграции${NC}" + fi +fi +echo "" + +# Экспортируем схему из тестовой БД (если pg_dump доступен) +if [ "$HAS_PG_DUMP" = true ]; then + echo "6. Экспорт схемы из тестовой БД..." + if [ -n "$POSTGRES_CONTAINER" ]; then + $PG_DUMP_CMD -U $DB_USER -d $TEST_DB_NAME --schema-only --no-owner --no-privileges > "$TMP_DIR/baseline_schema.sql" + else + PGPASSWORD=$DB_PASSWORD $PG_DUMP_CMD \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d $TEST_DB_NAME \ + --schema-only \ + --no-owner \ + --no-privileges \ + -f "$TMP_DIR/baseline_schema.sql" + fi + + if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось экспортировать схему${NC}" + exit 1 + fi + + echo -e "${GREEN}✓ Схема экспортирована${NC}" + echo "" + + # Пытаемся экспортировать схему из production БД для сравнения + echo "7. Экспорт схемы из production БД для сравнения..." + if [ -n "$POSTGRES_CONTAINER" ]; then + if $PG_DUMP_CMD -U $DB_USER -d $DB_NAME --schema-only --no-owner --no-privileges > "$TMP_DIR/production_schema.sql" 2>/dev/null; then + PROD_EXPORT_SUCCESS=true + else + PROD_EXPORT_SUCCESS=false + fi + else + if PGPASSWORD=$DB_PASSWORD $PG_DUMP_CMD \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d $DB_NAME \ + --schema-only \ + --no-owner \ + --no-privileges \ + -f "$TMP_DIR/production_schema.sql" 2>/dev/null; then + PROD_EXPORT_SUCCESS=true + else + PROD_EXPORT_SUCCESS=false + fi + fi + + if [ "$PROD_EXPORT_SUCCESS" = true ]; then + + echo -e "${GREEN}✓ Схема production экспортирована${NC}" + echo "" + + # Сравниваем схемы + echo "8. Сравнение схем..." + + # Подсчитываем объекты + echo -e "${BLUE}Таблицы:${NC}" + BASELINE_TABLES=$(grep -c "CREATE TABLE" "$TMP_DIR/baseline_schema.sql" || echo "0") + PROD_TABLES=$(grep -c "CREATE TABLE" "$TMP_DIR/production_schema.sql" || echo "0") + echo " Baseline: $BASELINE_TABLES" + echo " Production: $PROD_TABLES" + + if [ "$BASELINE_TABLES" -eq "$PROD_TABLES" ]; then + echo -e " ${GREEN}✓ Количество таблиц совпадает${NC}" + else + echo -e " ${YELLOW}⚠ Количество таблиц не совпадает${NC}" + fi + + echo "" + echo -e "${BLUE}Индексы:${NC}" + BASELINE_INDEXES=$(grep -c "CREATE.*INDEX" "$TMP_DIR/baseline_schema.sql" || echo "0") + PROD_INDEXES=$(grep -c "CREATE.*INDEX" "$TMP_DIR/production_schema.sql" || echo "0") + echo " Baseline: $BASELINE_INDEXES" + echo " Production: $PROD_INDEXES" + + if [ "$BASELINE_INDEXES" -eq "$PROD_INDEXES" ]; then + echo -e " ${GREEN}✓ Количество индексов совпадает${NC}" + else + echo -e " ${YELLOW}⚠ Количество индексов не совпадает${NC}" + fi + + echo "" + echo -e "${BLUE}Materialized Views:${NC}" + BASELINE_MV=$(grep -c "CREATE MATERIALIZED VIEW" "$TMP_DIR/baseline_schema.sql" || echo "0") + PROD_MV=$(grep -c "CREATE MATERIALIZED VIEW" "$TMP_DIR/production_schema.sql" || echo "0") + echo " Baseline: $BASELINE_MV" + echo " Production: $PROD_MV" + + if [ "$BASELINE_MV" -eq "$PROD_MV" ]; then + echo -e " ${GREEN}✓ Количество materialized views совпадает${NC}" + else + echo -e " ${YELLOW}⚠ Количество materialized views не совпадает${NC}" + fi + + echo "" + echo "Для детального сравнения выполните:" + echo " diff $TMP_DIR/baseline_schema.sql $TMP_DIR/production_schema.sql" + echo "" + echo "Или используйте:" + echo " diff -u $TMP_DIR/baseline_schema.sql $TMP_DIR/production_schema.sql | less" + + else + echo -e "${YELLOW}⚠ Не удалось экспортировать схему production БД${NC}" + echo " Продолжаем без сравнения" + echo "" + echo "Схема baseline сохранена в: $TMP_DIR/baseline_schema.sql" + fi +else + echo "6. Пропуск экспорта схемы (pg_dump недоступен)" + echo "" + echo -e "${YELLOW}Для полного тестирования установите PostgreSQL client tools:${NC}" + echo " macOS: brew install postgresql" + echo " или используйте Docker контейнер с PostgreSQL" + echo "" +fi + +echo "" +echo "=== Тестирование завершено ===" +echo "" +echo -e "${GREEN}✓ Baseline миграция успешно применена к чистой БД${NC}" +echo "" diff --git a/play-life-backend/validate_baseline.sh b/play-life-backend/validate_baseline.sh new file mode 100755 index 0000000..dad376f --- /dev/null +++ b/play-life-backend/validate_baseline.sh @@ -0,0 +1,144 @@ +#!/bin/bash + +# Скрипт для проверки полноты baseline миграции +# Сравнивает текущую схему БД с baseline миграцией + +set -e + +# Цвета для вывода +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Получаем переменные окружения +DB_HOST=${DB_HOST:-localhost} +DB_PORT=${DB_PORT:-5432} +DB_USER=${DB_USER:-playeng} +DB_PASSWORD=${DB_PASSWORD:-playeng} +DB_NAME=${DB_NAME:-playeng} + +echo "=== Проверка полноты baseline миграции ===" +echo "" + +# Проверяем наличие pg_dump +if ! command -v pg_dump &> /dev/null; then + echo -e "${RED}Ошибка: pg_dump не найден. Установите PostgreSQL client tools.${NC}" + exit 1 +fi + +# Создаем временную директорию +TMP_DIR=$(mktemp -d) +trap "rm -rf $TMP_DIR" EXIT + +echo "1. Экспортируем текущую схему БД..." +PGPASSWORD=$DB_PASSWORD pg_dump \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d $DB_NAME \ + --schema-only \ + --no-owner \ + --no-privileges \ + -f "$TMP_DIR/current_schema.sql" + +if [ $? -ne 0 ]; then + echo -e "${RED}Ошибка: Не удалось экспортировать схему БД${NC}" + exit 1 +fi + +echo -e "${GREEN}✓ Схема экспортирована${NC}" +echo "" + +# Применяем baseline миграцию к временной БД для сравнения +echo "2. Создаем временную БД для проверки baseline..." +TEMP_DB_NAME="playeng_baseline_test_$$" +PGPASSWORD=$DB_PASSWORD psql \ + -h $DB_HOST \ + -p $DB_PORT \ + -U $DB_USER \ + -d postgres \ + -c "CREATE DATABASE $TEMP_DB_NAME;" > /dev/null 2>&1 + +if [ $? -ne 0 ]; then + echo -e "${YELLOW}Предупреждение: Не удалось создать временную БД. Продолжаем без неё.${NC}" + TEMP_DB_NAME="" +else + echo -e "${GREEN}✓ Временная БД создана${NC}" +fi + +# Очистка временной БД при выходе +if [ -n "$TEMP_DB_NAME" ]; then + trap "PGPASSWORD=$DB_PASSWORD psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d postgres -c 'DROP DATABASE IF EXISTS $TEMP_DB_NAME;' > /dev/null 2>&1; rm -rf $TMP_DIR" EXIT +fi + +echo "" +echo "3. Анализ схемы..." + +# Извлекаем только CREATE TABLE, CREATE INDEX, CREATE VIEW и т.д. из текущей схемы +grep -E "^(CREATE|ALTER|COMMENT)" "$TMP_DIR/current_schema.sql" | \ + sed 's/--.*$//' | \ + tr -d '\n' | \ + sed 's/;/;\n/g' | \ + sort > "$TMP_DIR/current_clean.sql" + +# Извлекаем из baseline миграции +BASELINE_FILE="play-life-backend/migrations/000001_baseline.up.sql" +if [ ! -f "$BASELINE_FILE" ]; then + echo -e "${RED}Ошибка: Baseline файл не найден: $BASELINE_FILE${NC}" + exit 1 +fi + +grep -E "^(CREATE|ALTER|COMMENT)" "$BASELINE_FILE" | \ + sed 's/--.*$//' | \ + tr -d '\n' | \ + sed 's/;/;\n/g' | \ + sort > "$TMP_DIR/baseline_clean.sql" + +echo "" +echo "4. Сравнение..." + +# Сравниваем количество таблиц +CURRENT_TABLES=$(grep -c "CREATE TABLE" "$TMP_DIR/current_schema.sql" || echo "0") +BASELINE_TABLES=$(grep -c "CREATE TABLE" "$BASELINE_FILE" || echo "0") + +echo " Текущая БД: $CURRENT_TABLES таблиц" +echo " Baseline: $BASELINE_TABLES таблиц" + +if [ "$CURRENT_TABLES" -ne "$BASELINE_TABLES" ]; then + echo -e "${YELLOW}⚠ Количество таблиц не совпадает${NC}" +else + echo -e "${GREEN}✓ Количество таблиц совпадает${NC}" +fi + +# Сравниваем количество индексов +CURRENT_INDEXES=$(grep -c "CREATE.*INDEX" "$TMP_DIR/current_schema.sql" || echo "0") +BASELINE_INDEXES=$(grep -c "CREATE.*INDEX" "$BASELINE_FILE" || echo "0") + +echo " Текущая БД: $CURRENT_INDEXES индексов" +echo " Baseline: $BASELINE_INDEXES индексов" + +if [ "$CURRENT_INDEXES" -ne "$BASELINE_INDEXES" ]; then + echo -e "${YELLOW}⚠ Количество индексов не совпадает${NC}" +else + echo -e "${GREEN}✓ Количество индексов совпадает${NC}" +fi + +# Проверяем наличие materialized view +CURRENT_MV=$(grep -c "CREATE MATERIALIZED VIEW" "$TMP_DIR/current_schema.sql" || echo "0") +BASELINE_MV=$(grep -c "CREATE MATERIALIZED VIEW" "$BASELINE_FILE" || echo "0") + +echo " Текущая БД: $CURRENT_MV materialized views" +echo " Baseline: $BASELINE_MV materialized views" + +if [ "$CURRENT_MV" -ne "$BASELINE_MV" ]; then + echo -e "${YELLOW}⚠ Количество materialized views не совпадает${NC}" +else + echo -e "${GREEN}✓ Количество materialized views совпадает${NC}" +fi + +echo "" +echo "=== Проверка завершена ===" +echo "" +echo "Для детального сравнения выполните:" +echo " diff $TMP_DIR/current_schema.sql $BASELINE_FILE" diff --git a/play-life-llm/.gitignore b/play-life-llm/.gitignore new file mode 100644 index 0000000..d04f3e8 --- /dev/null +++ b/play-life-llm/.gitignore @@ -0,0 +1,12 @@ +# Env with secrets (Tavily API key, etc.) +.env + +# Binary +play-life-llm +*.exe + +# IDE / OS +.idea/ +.vscode/ +*.swp +.DS_Store diff --git a/play-life-llm/Dockerfile b/play-life-llm/Dockerfile new file mode 100644 index 0000000..e180e34 --- /dev/null +++ b/play-life-llm/Dockerfile @@ -0,0 +1,19 @@ +# Build stage +FROM golang:1.24-alpine AS builder +WORKDIR /app +ENV GOPROXY=https://proxy.golang.org,direct +ENV GOSUMDB=sum.golang.org +COPY go.mod go.sum ./ +RUN go mod download +COPY . . +RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o play-life-llm . + +# Runtime stage +FROM alpine:latest +RUN apk --no-cache add ca-certificates wget +WORKDIR /app +COPY --from=builder /app/play-life-llm . +EXPOSE 8090 +HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \ + CMD wget -q -O- http://localhost:8090/health || exit 1 +CMD ["./play-life-llm"] diff --git a/play-life-llm/env.example b/play-life-llm/env.example new file mode 100644 index 0000000..b2289d3 --- /dev/null +++ b/play-life-llm/env.example @@ -0,0 +1,12 @@ +# Ollama API base URL (default: http://localhost:11434) +# For Docker on Mac/Windows use: http://host.docker.internal:11434 +OLLAMA_HOST=http://localhost:11434 + +# Tavily API key for web search (required when model uses web_search tool) +TAVILY_API_KEY= + +# HTTP server port (default: 8090) +PORT=8090 + +# Default Ollama model (default: llama3.1:70b) +OLLAMA_MODEL=llama3.1:70b diff --git a/play-life-llm/go.mod b/play-life-llm/go.mod new file mode 100644 index 0000000..1e9667d --- /dev/null +++ b/play-life-llm/go.mod @@ -0,0 +1,5 @@ +module play-life-llm + +go 1.24.0 + +require github.com/gorilla/mux v1.8.1 diff --git a/play-life-llm/go.sum b/play-life-llm/go.sum new file mode 100644 index 0000000..7128337 --- /dev/null +++ b/play-life-llm/go.sum @@ -0,0 +1,2 @@ +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= diff --git a/play-life-llm/internal/handler/ask.go b/play-life-llm/internal/handler/ask.go new file mode 100644 index 0000000..d562815 --- /dev/null +++ b/play-life-llm/internal/handler/ask.go @@ -0,0 +1,177 @@ +package handler + +import ( + "encoding/json" + "log" + "net/http" + + "play-life-llm/internal/ollama" + "play-life-llm/internal/tavily" +) + +// AskRequest is the POST /ask body. +type AskRequest struct { + Prompt string `json:"prompt"` + ResponseSchema interface{} `json:"response_schema"` + Model string `json:"model,omitempty"` + // AllowWebSearch: если true, в запрос к Ollama добавляются tools (web_search), и при вызове модели выполняется поиск через Tavily. Если false (по умолчанию), tools не передаются — модель просто возвращает JSON по схеме (подходит для простых запросов без интернета). + AllowWebSearch bool `json:"allow_web_search,omitempty"` +} + +// AskResponse is the successful response (result is JSON by schema). +type AskResponse struct { + Result json.RawMessage `json:"result"` +} + +// AskHandler handles POST /ask: prompt + response_schema -> LLM with optional web search, returns JSON. +type AskHandler struct { + Ollama *ollama.Client + Tavily *tavily.Client + DefaultModel string +} + +// ServeHTTP implements http.Handler. +func (h *AskHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + + var req AskRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + sendError(w, "invalid JSON body", http.StatusBadRequest) + return + } + if req.Prompt == "" { + sendError(w, "prompt is required", http.StatusBadRequest) + return + } + if req.ResponseSchema == nil { + sendError(w, "response_schema is required", http.StatusBadRequest) + return + } + + model := req.Model + if model == "" { + model = h.DefaultModel + } + if model == "" { + model = "llama3.1:70b" + } + + var tools []ollama.Tool + if req.AllowWebSearch { + tools = []ollama.Tool{ollama.WebSearchTool()} + } + messages := []ollama.ChatMessage{} + if req.AllowWebSearch { + messages = append(messages, ollama.ChatMessage{ + Role: "system", + Content: "When the user asks for current, recent, or real-time information (weather, prices, news, etc.), you MUST call the web_search tool with a suitable query. Do not answer from memory — use the tool and then summarize the results in your response.", + }) + // Гарантированный запрос в Tavily: предпоиск по промпту пользователя, результат подмешивается в контекст. + searchQuery := req.Prompt + if len(searchQuery) > 200 { + searchQuery = searchQuery[:200] + } + log.Printf("tavily pre-search: query=%q", searchQuery) + preSearchResult, err := h.Tavily.Search(searchQuery) + if err != nil { + log.Printf("tavily pre-search error: %v", err) + preSearchResult = "search failed: " + err.Error() + } else { + log.Printf("tavily pre-search ok: %d bytes", len(preSearchResult)) + } + messages = append(messages, ollama.ChatMessage{ + Role: "system", + Content: "Relevant web search result for the user's question (use this to answer; if not enough, you may call web_search again):\n\n" + preSearchResult, + }) + } + messages = append(messages, ollama.ChatMessage{ + Role: "user", Content: req.Prompt, + }) + + const maxToolRounds = 20 + for round := 0; round < maxToolRounds; round++ { + chatReq := &ollama.ChatRequest{ + Model: model, + Messages: messages, + Stream: false, + Format: req.ResponseSchema, + Tools: tools, + } + resp, err := h.Ollama.Chat(chatReq) + if err != nil { + log.Printf("ollama chat error: %v", err) + sendError(w, "ollama request failed: "+err.Error(), http.StatusBadGateway) + return + } + + messages = append(messages, resp.Message) + + if n := len(resp.Message.ToolCalls); n > 0 { + log.Printf("ollama returned %d tool_calls", n) + } + if len(resp.Message.ToolCalls) == 0 { + // Final answer: message.content is JSON by schema + content := resp.Message.Content + if content == "" { + sendError(w, "empty response from model", http.StatusBadGateway) + return + } + // Return as { "result": } so client gets valid JSON + var raw json.RawMessage + if err := json.Unmarshal([]byte(content), &raw); err != nil { + // If not valid JSON, return as string inside result + raw = json.RawMessage(`"` + escapeJSONString(content) + `"`) + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _ = json.NewEncoder(w).Encode(AskResponse{Result: raw}) + return + } + + // Execute tool calls (web_search via Tavily) + for _, tc := range resp.Message.ToolCalls { + if tc.Function.Name != "web_search" { + messages = append(messages, ollama.ChatMessage{ + Role: "tool", ToolName: tc.Function.Name, Content: "unknown tool", + }) + continue + } + query := ollama.QueryFromToolCall(tc) + if query == "" { + // Некоторые модели подставляют в arguments не "query", а другие поля — используем промпт пользователя как поисковый запрос + query = req.Prompt + if len(query) > 200 { + query = query[:200] + } + log.Printf("web_search: query empty in tool_call, using user prompt (first 200 chars)") + } + log.Printf("tavily search: query=%q", query) + searchResult, err := h.Tavily.Search(query) + if err != nil { + log.Printf("tavily search error: %v", err) + searchResult = "search failed: " + err.Error() + } else { + log.Printf("tavily search ok: %d bytes", len(searchResult)) + } + messages = append(messages, ollama.ChatMessage{ + Role: "tool", ToolName: "web_search", Content: searchResult, + }) + } + } + // Too many tool rounds + sendError(w, "too many tool-call rounds", http.StatusBadGateway) +} + +func sendError(w http.ResponseWriter, msg string, code int) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(code) + _ = json.NewEncoder(w).Encode(map[string]string{"error": msg}) +} + +func escapeJSONString(s string) string { + b, _ := json.Marshal(s) + return string(b[1 : len(b)-1]) +} diff --git a/play-life-llm/internal/handler/health.go b/play-life-llm/internal/handler/health.go new file mode 100644 index 0000000..3acdfed --- /dev/null +++ b/play-life-llm/internal/handler/health.go @@ -0,0 +1,17 @@ +package handler + +import ( + "encoding/json" + "net/http" +) + +// Health returns 200 with {"status": "ok"} for Docker healthcheck. +func Health(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _ = json.NewEncoder(w).Encode(map[string]string{"status": "ok"}) +} diff --git a/play-life-llm/internal/ollama/client.go b/play-life-llm/internal/ollama/client.go new file mode 100644 index 0000000..64e326a --- /dev/null +++ b/play-life-llm/internal/ollama/client.go @@ -0,0 +1,148 @@ +package ollama + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "time" +) + +const defaultTimeout = 10 * time.Minute + +// Client calls Ollama /api/chat. +type Client struct { + BaseURL string + HTTPClient *http.Client +} + +// NewClient creates an Ollama client. baseURL is e.g. "http://localhost:11434". +func NewClient(baseURL string) *Client { + return &Client{ + BaseURL: baseURL, + HTTPClient: &http.Client{ + Timeout: defaultTimeout, + }, + } +} + +// ChatRequest matches Ollama POST /api/chat body. +type ChatRequest struct { + Model string `json:"model"` + Messages []ChatMessage `json:"messages"` + Stream bool `json:"stream"` + Format interface{} `json:"format,omitempty"` // "json" or JSON schema object + Tools []Tool `json:"tools,omitempty"` +} + +// ChatMessage is one message in the conversation. +type ChatMessage struct { + Role string `json:"role"` // "user", "assistant", "system", "tool" + Content string `json:"content,omitempty"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + ToolName string `json:"tool_name,omitempty"` // for role "tool" +} + +// Tool defines a function the model may call. +type Tool struct { + Type string `json:"type"` + Function ToolFunc `json:"function"` +} + +// ToolFunc describes the function. +type ToolFunc struct { + Name string `json:"name"` + Description string `json:"description"` + Parameters interface{} `json:"parameters"` +} + +// ToolCall is a model request to call a tool. +type ToolCall struct { + Type string `json:"type"` + Function ToolCallFn `json:"function"` +} + +// ToolCallFn holds name and arguments. +// Arguments may come from Ollama as a JSON object or as a JSON string. +type ToolCallFn struct { + Name string `json:"name"` + Arguments interface{} `json:"arguments"` // object or string +} + +// QueryFromToolCall returns the "query" argument from a web_search tool call. +// Ollama may send arguments as a map or as a JSON string. +func QueryFromToolCall(tc ToolCall) string { + switch v := tc.Function.Arguments.(type) { + case map[string]interface{}: + if q, _ := v["query"].(string); q != "" { + return q + } + case string: + var m map[string]interface{} + if json.Unmarshal([]byte(v), &m) == nil { + if q, _ := m["query"].(string); q != "" { + return q + } + } + } + return "" +} + +// ChatResponse is the Ollama /api/chat response. +type ChatResponse struct { + Message ChatMessage `json:"message"` + Done bool `json:"done"` +} + +// Chat sends a chat request and returns the response. +func (c *Client) Chat(req *ChatRequest) (*ChatResponse, error) { + body, err := json.Marshal(req) + if err != nil { + return nil, fmt.Errorf("marshal request: %w", err) + } + url := c.BaseURL + "/api/chat" + httpReq, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("new request: %w", err) + } + httpReq.Header.Set("Content-Type", "application/json") + + resp, err := c.HTTPClient.Do(httpReq) + if err != nil { + return nil, fmt.Errorf("do request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + b, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("ollama returned %d: %s", resp.StatusCode, string(b)) + } + + var out ChatResponse + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return nil, fmt.Errorf("decode response: %w", err) + } + return &out, nil +} + +// WebSearchTool returns the tool definition for web_search (Tavily). +func WebSearchTool() Tool { + return Tool{ + Type: "function", + Function: ToolFunc{ + Name: "web_search", + Description: "Search the web for current information. Use when you need up-to-date or factual information from the internet.", + Parameters: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "query": map[string]interface{}{ + "type": "string", + "description": "Search query", + }, + }, + "required": []string{"query"}, + }, + }, + } +} diff --git a/play-life-llm/internal/server/server.go b/play-life-llm/internal/server/server.go new file mode 100644 index 0000000..e0d035f --- /dev/null +++ b/play-life-llm/internal/server/server.go @@ -0,0 +1,35 @@ +package server + +import ( + "net/http" + + "play-life-llm/internal/handler" + "play-life-llm/internal/ollama" + "play-life-llm/internal/tavily" + + "github.com/gorilla/mux" +) + +// Config holds server and client configuration. +type Config struct { + OllamaHost string + TavilyAPIKey string + DefaultModel string +} + +// NewRouter returns an HTTP router with /health and /ask registered. +func NewRouter(cfg Config) http.Handler { + ollamaClient := ollama.NewClient(cfg.OllamaHost) + tavilyClient := tavily.NewClient(cfg.TavilyAPIKey) + + askHandler := &handler.AskHandler{ + Ollama: ollamaClient, + Tavily: tavilyClient, + DefaultModel: cfg.DefaultModel, + } + + r := mux.NewRouter() + r.HandleFunc("/health", handler.Health).Methods(http.MethodGet) + r.Handle("/ask", askHandler).Methods(http.MethodPost) + return r +} diff --git a/play-life-llm/internal/tavily/client.go b/play-life-llm/internal/tavily/client.go new file mode 100644 index 0000000..45348f0 --- /dev/null +++ b/play-life-llm/internal/tavily/client.go @@ -0,0 +1,104 @@ +package tavily + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "time" +) + +const ( + baseURL = "https://api.tavily.com" + searchPath = "/search" + timeout = 30 * time.Second +) + +// Client calls Tavily Search API. +type Client struct { + APIKey string + HTTPClient *http.Client +} + +// NewClient creates a Tavily client. apiKey is required for search. +func NewClient(apiKey string) *Client { + return &Client{ + APIKey: apiKey, + HTTPClient: &http.Client{ + Timeout: timeout, + }, + } +} + +// SearchRequest is the POST body for /search. +type SearchRequest struct { + Query string `json:"query"` + SearchDepth string `json:"search_depth,omitempty"` // basic, advanced, etc. + MaxResults int `json:"max_results,omitempty"` +} + +// SearchResult is one result item. +type SearchResult struct { + Title string `json:"title"` + URL string `json:"url"` + Content string `json:"content"` +} + +// SearchResponse is the Tavily search response. +type SearchResponse struct { + Query string `json:"query"` + Answer string `json:"answer,omitempty"` + Results []SearchResult `json:"results"` +} + +// Search runs a web search and returns a single text suitable for passing to Ollama as tool result. +func (c *Client) Search(query string) (string, error) { + if c.APIKey == "" { + return "", fmt.Errorf("tavily: API key not set") + } + body, err := json.Marshal(SearchRequest{ + Query: query, + MaxResults: 5, + }) + if err != nil { + return "", fmt.Errorf("marshal request: %w", err) + } + + url := baseURL + searchPath + req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return "", fmt.Errorf("new request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+c.APIKey) + + resp, err := c.HTTPClient.Do(req) + if err != nil { + return "", fmt.Errorf("do request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("tavily returned %d", resp.StatusCode) + } + + var out SearchResponse + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return "", fmt.Errorf("decode response: %w", err) + } + + // Build a single text for the model: prefer answer if present, else concatenate results. + if out.Answer != "" { + return out.Answer, nil + } + var b bytes.Buffer + for i, r := range out.Results { + if i > 0 { + b.WriteString("\n\n") + } + b.WriteString(r.Title) + b.WriteString(": ") + b.WriteString(r.Content) + } + return b.String(), nil +} diff --git a/play-life-llm/main.go b/play-life-llm/main.go new file mode 100644 index 0000000..24891ea --- /dev/null +++ b/play-life-llm/main.go @@ -0,0 +1,36 @@ +package main + +import ( + "log" + "net/http" + "os" + + "play-life-llm/internal/server" +) + +func main() { + ollamaHost := getEnv("OLLAMA_HOST", "http://localhost:11434") + tavilyAPIKey := getEnv("TAVILY_API_KEY", "") + port := getEnv("PORT", "8090") + defaultModel := getEnv("OLLAMA_MODEL", "llama3.1:70b") + + cfg := server.Config{ + OllamaHost: ollamaHost, + TavilyAPIKey: tavilyAPIKey, + DefaultModel: defaultModel, + } + router := server.NewRouter(cfg) + + addr := ":" + port + log.Printf("play-life-llm listening on %s", addr) + if err := http.ListenAndServe(addr, router); err != nil { + log.Fatal(err) + } +} + +func getEnv(key, defaultVal string) string { + if v := os.Getenv(key); v != "" { + return v + } + return defaultVal +} diff --git a/play-life-web/.dockerignore b/play-life-web/.dockerignore new file mode 100644 index 0000000..7547f40 --- /dev/null +++ b/play-life-web/.dockerignore @@ -0,0 +1,12 @@ +node_modules +dist +.git +.gitignore +README.md +.env +.env.local +.DS_Store +*.log +.vscode +.idea + diff --git a/play-life-web/.gitignore b/play-life-web/.gitignore new file mode 100644 index 0000000..29a4216 --- /dev/null +++ b/play-life-web/.gitignore @@ -0,0 +1,30 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Environment variables +.env +.env.local +.env.*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + diff --git a/play-life-web/Dockerfile b/play-life-web/Dockerfile new file mode 100644 index 0000000..37459ad --- /dev/null +++ b/play-life-web/Dockerfile @@ -0,0 +1,30 @@ +# Build stage +FROM node:20-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install dependencies +RUN npm ci + +# Copy source code +COPY . . + +# Build the application +RUN npm run build + +# Production stage +FROM nginx:alpine + +# Copy built files from builder +COPY --from=builder /app/dist /usr/share/nginx/html + +# Copy nginx configuration +COPY nginx.conf /etc/nginx/conf.d/default.conf + +EXPOSE 80 + +CMD ["nginx", "-g", "daemon off;"] + diff --git a/play-life-web/README.md b/play-life-web/README.md new file mode 100644 index 0000000..d352a44 --- /dev/null +++ b/play-life-web/README.md @@ -0,0 +1,105 @@ +# PlayLifeWeb + +Веб-приложение для отображения статистики проектов. + +## Возможности + +- **Текущая неделя**: Отображение статистики на текущий момент с ProgressBar для каждого проекта +- **Полная статистика**: График нарастающей статистики по всем проектам + +## Технологии + +- React 18 +- Vite +- Chart.js (react-chartjs-2) +- Tailwind CSS +- Docker + +## Установка и запуск + +### Локальная разработка + +1. Установите зависимости: +```bash +npm install +``` + +2. Запустите dev-сервер: +```bash +npm run dev +``` + +Приложение будет доступно по адресу `http://localhost:3000` + +### Сборка для production + +```bash +npm run build +``` + +### Запуск через Docker + +1. Создайте файл `.env` в корне проекта (можно скопировать из `env.example`): +```bash +cp env.example .env +``` + +2. Соберите образ: +```bash +docker-compose build +``` + +3. Запустите контейнер: +```bash +docker-compose up -d +``` + +Приложение будет доступно по адресу `http://localhost:3000` + +**Примечание:** API запросы автоматически проксируются к бэкенду через nginx. Не требуется настройка URL API. + +### Остановка Docker контейнера + +```bash +docker-compose down +``` + +## Структура проекта + +``` +play-life-web/ +├── src/ +│ ├── components/ +│ │ ├── CurrentWeek.jsx # Компонент текущей недели +│ │ ├── FullStatistics.jsx # Компонент полной статистики +│ │ └── ProjectProgressBar.jsx # Компонент ProgressBar +│ ├── App.jsx # Главный компонент приложения +│ ├── main.jsx # Точка входа +│ └── index.css # Глобальные стили +├── Dockerfile # Docker образ +├── docker-compose.yml # Docker Compose конфигурация +├── nginx.conf # Nginx конфигурация +└── package.json # Зависимости проекта +``` + +## API Endpoints + +Приложение использует относительные пути для API запросов. Проксирование настроено автоматически: + +- **Development**: Vite dev server проксирует запросы к `http://localhost:8080` +- **Production**: Nginx проксирует запросы к бэкенд контейнеру + +Endpoints, которые используются: +- `/playlife-feed` - данные текущей недели +- `/d2dc349a-0d13-49b2-a8f0-1ab094bfba9b` - полная статистика +- `/projects` - список проектов +- `/project/priority` - обновление приоритетов проектов +- `/api/*` - остальные API endpoints (слова, конфигурации, тесты) + +## Особенности реализации + +- ProgressBar отображает текущее значение (`total_score`) и выделяет диапазон целей (`min_goal_score` - `max_goal_score`) +- График полной статистики показывает нарастающую сумму баллов по неделям +- Все проекты отображаются на одном графике с разными цветами +- Адаптивный дизайн для различных размеров экранов + diff --git a/play-life-web/build-and-save.sh b/play-life-web/build-and-save.sh new file mode 100644 index 0000000..d2a7fff --- /dev/null +++ b/play-life-web/build-and-save.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# Добавляем Docker в PATH +export PATH="/Applications/Docker.app/Contents/Resources/bin:$PATH" + +echo "Ожидание запуска Docker daemon..." +# Ждем до 60 секунд, пока Docker daemon запустится +for i in {1..60}; do + if docker ps >/dev/null 2>&1; then + echo "Docker daemon запущен!" + break + fi + if [ $i -eq 60 ]; then + echo "Ошибка: Docker daemon не запустился. Пожалуйста, запустите Docker Desktop вручную." + exit 1 + fi + sleep 1 +done + +echo "Сборка Docker образа..." +docker build \ + -t play-life-web:latest . + +if [ $? -eq 0 ]; then + echo "Образ успешно собран!" + echo "Сохранение образа в play-life-web.tar..." + docker save play-life-web:latest -o play-life-web.tar + + if [ $? -eq 0 ]; then + echo "Образ успешно сохранен в play-life-web.tar" + ls -lh play-life-web.tar + else + echo "Ошибка при сохранении образа" + exit 1 + fi +else + echo "Ошибка при сборке образа" + exit 1 +fi + + + diff --git a/play-life-web/build-docker-image.sh b/play-life-web/build-docker-image.sh new file mode 100644 index 0000000..2e4834a --- /dev/null +++ b/play-life-web/build-docker-image.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# Скрипт для сборки Docker образа и сохранения в .tar файл + +IMAGE_NAME="play-life-web" +IMAGE_TAG="latest" +TAR_FILE="play-life-web.tar" + +echo "Сборка Docker образа..." +docker build \ + -t "$IMAGE_NAME:$IMAGE_TAG" . + +if [ $? -eq 0 ]; then + echo "Образ успешно собран!" + echo "Сохранение образа в $TAR_FILE..." + docker save "$IMAGE_NAME:$IMAGE_TAG" -o "$TAR_FILE" + + if [ $? -eq 0 ]; then + echo "Образ успешно сохранен в $TAR_FILE" + ls -lh "$TAR_FILE" + else + echo "Ошибка при сохранении образа" + exit 1 + fi +else + echo "Ошибка при сборке образа" + exit 1 +fi + diff --git a/play-life-web/docker-compose.yml b/play-life-web/docker-compose.yml new file mode 100644 index 0000000..e99d136 --- /dev/null +++ b/play-life-web/docker-compose.yml @@ -0,0 +1,21 @@ +version: '3.8' + +services: + play-life-web: + build: + context: . + dockerfile: Dockerfile + container_name: play-life-web + ports: + - "${WEB_PORT:-3000}:80" + restart: unless-stopped + networks: + - play-life-network + env_file: + - ../.env + - .env # Локальный .env имеет приоритет + +networks: + play-life-network: + driver: bridge + diff --git a/play-life-web/env.example b/play-life-web/env.example new file mode 100644 index 0000000..1e7add8 --- /dev/null +++ b/play-life-web/env.example @@ -0,0 +1,6 @@ +# API URLs для PlayLifeWeb +# Скопируйте этот файл в .env и укажите свои значения + +# Play Life Web Port (по умолчанию: 3000) +WEB_PORT=3000 + diff --git a/play-life-web/generate-icons.cjs b/play-life-web/generate-icons.cjs new file mode 100644 index 0000000..df8a33b --- /dev/null +++ b/play-life-web/generate-icons.cjs @@ -0,0 +1,84 @@ +// Скрипт для генерации базовых PWA иконок +// Требует: npm install sharp + +const sharp = require('sharp'); +const fs = require('fs'); +const path = require('path'); + +const publicDir = path.join(__dirname, 'public'); + +// Создаем SVG шаблон для обычной иконки (со скругленными углами) +const createIconSVG = (size) => ` + + + + + + + + + + + + +`; + +// Создаем SVG шаблон для maskable иконки (без скругления, контент в безопасной зоне 80%) +const createMaskableIconSVG = (size) => ` + + + + + + + + + + + + +`; + +async function generateIcons() { + // Создаем базовые SVG + const baseSVG = createIconSVG(512); + const svgBuffer = Buffer.from(baseSVG); + + const maskableSVG = createMaskableIconSVG(512); + const maskableSvgBuffer = Buffer.from(maskableSVG); + + // Генерируем иконки разных размеров + const sizes = [ + { name: 'favicon-new.ico', size: 32 }, + { name: 'apple-touch-icon.png', size: 180 }, + { name: 'pwa-192x192.png', size: 192 }, + { name: 'pwa-512x512.png', size: 512 }, + { name: 'pwa-maskable-192x192.png', size: 192, maskable: true }, + { name: 'pwa-maskable-512x512.png', size: 512, maskable: true } + ]; + + for (const icon of sizes) { + // Для maskable иконок используем специальный SVG с контентом в безопасной зоне + const sourceBuffer = icon.maskable ? maskableSvgBuffer : svgBuffer; + const image = sharp(sourceBuffer).resize(icon.size, icon.size); + + const outputPath = path.join(publicDir, icon.name); + await image.png().toFile(outputPath); + console.log(`✓ Создана иконка: ${icon.name} (${icon.size}x${icon.size})`); + } + + console.log('\n✓ Все иконки успешно созданы!'); +} + +// Проверяем наличие sharp +try { + require('sharp'); + generateIcons().catch(console.error); +} catch (e) { + console.log('Для генерации иконок необходимо установить sharp:'); + console.log('npm install sharp --save-dev'); + console.log('\nИли создайте иконки вручную используя онлайн генераторы:'); + console.log('- https://realfavicongenerator.net/'); + console.log('- https://www.pwabuilder.com/imageGenerator'); +} + diff --git a/play-life-web/index.html b/play-life-web/index.html new file mode 100644 index 0000000..96b57da --- /dev/null +++ b/play-life-web/index.html @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + PlayLife + + +
+ + + + diff --git a/play-life-web/nginx.conf b/play-life-web/nginx.conf new file mode 100644 index 0000000..6ce1a5d --- /dev/null +++ b/play-life-web/nginx.conf @@ -0,0 +1,111 @@ +server { + listen 80; + server_name localhost; + root /usr/share/nginx/html; + index index.html; + + # Gzip compression + gzip on; + gzip_vary on; + gzip_min_length 1024; + gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json; + + # Proxy API requests to backend + location /api/ { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy admin panel to backend (must be before location /) + location ^~ /admin { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy project endpoints to backend (must be before location /) + location ^~ /project/ { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy other API endpoints to backend + location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|webhook/|weekly_goals/setup|project_score_sample_mv/refresh)$ { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Service Worker должен быть без кэширования + location /sw.js { + add_header Cache-Control "no-cache"; + expires 0; + } + + # Manifest тоже без долгого кэширования + location /manifest.webmanifest { + add_header Cache-Control "no-cache"; + expires 0; + } + + # Раздача загруженных файлов (картинки wishlist) - проксируем через backend + location ^~ /uploads/ { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + expires 30d; + add_header Cache-Control "public, immutable"; + } + + # Статические HTML страницы (Terms и Privacy) + location = /terms { + try_files /terms.html =404; + add_header Cache-Control "public, max-age=3600"; + } + + location = /privacy { + try_files /privacy.html =404; + add_header Cache-Control "public, max-age=3600"; + } + + # Handle React Router (SPA) + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } +} + diff --git a/play-life-web/package-lock.json b/play-life-web/package-lock.json new file mode 100644 index 0000000..29486a9 --- /dev/null +++ b/play-life-web/package-lock.json @@ -0,0 +1,7948 @@ +{ + "name": "play-life-web", + "version": "4.17.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "play-life-web", + "version": "4.17.1", + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "chart.js": "^4.4.0", + "react": "^18.2.0", + "react-chartjs-2": "^5.2.0", + "react-circular-progressbar": "^2.2.0", + "react-day-picker": "^9.13.0", + "react-dom": "^18.2.0", + "react-easy-crop": "^5.5.6" + }, + "devDependencies": { + "@types/react": "^18.2.43", + "@types/react-dom": "^18.2.17", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.16", + "postcss": "^8.4.32", + "sharp": "^0.34.5", + "tailwindcss": "^3.3.6", + "vite": "^5.0.8", + "vite-plugin-pwa": "^1.2.0" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@apideck/better-ajv-errors": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.6.tgz", + "integrity": "sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-schema": "^0.4.0", + "jsonpointer": "^5.0.0", + "leven": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "ajv": ">=8" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", + "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.4", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.5.tgz", + "integrity": "sha512-q3WC4JfdODypvxArsJQROfupPBq9+lMwjKq7C33GhbFYJsufD0yd/ziwD+hJucLeWsnFPWZjsU2DNFqBPE7jwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.28.5", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.5", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.28.5.tgz", + "integrity": "sha512-N1EhvLtHzOvj7QQOUCCS3NrPJP8c5W6ZXCHDn7Yialuy1iu4r5EmIYkXlKNqT99Ciw+W0mDqWoR6HWMZlFP3hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "regexpu-core": "^6.3.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", + "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "debug": "^4.4.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.22.10" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", + "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", + "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.27.1.tgz", + "integrity": "sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.1", + "@babel/helper-wrap-function": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz", + "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", + "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.28.3.tgz", + "integrity": "sha512-zdf983tNfLZFletc0RRXYrHrucBEg95NIFMkn6K9dbeMYnsgHaSBGcQqdsCSStG2PYwRre0Qc2NNSCXbG+xc6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.3", + "@babel/types": "^7.28.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.5" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.28.5.tgz", + "integrity": "sha512-87GDMS3tsmMSi/3bWOte1UblL+YUTFMV8SZPZ2eSEL17s74Cw/l63rR6NmGVKMYW2GYi85nE+/d6Hw5N0bEk2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.27.1.tgz", + "integrity": "sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.27.1.tgz", + "integrity": "sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.27.1.tgz", + "integrity": "sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/plugin-transform-optional-chaining": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.13.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.28.3.tgz", + "integrity": "sha512-b6YTX108evsvE4YgWyQ921ZAFFQm3Bn+CA3+ZXlNVnPhx+UfsVURoPjfGAPCjBgrqo30yX/C2nZGX96DxvR9Iw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", + "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.27.1.tgz", + "integrity": "sha512-UT/Jrhw57xg4ILHLFnzFpPDlMbcdEicaAtjPQpbj9wa8T4r5KVWCimHcL/460g8Ht0DMxDyjsLgiWSkVjnwPFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", + "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-unicode-sets-regex": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", + "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz", + "integrity": "sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.28.0.tgz", + "integrity": "sha512-BEOdvX4+M765icNPZeidyADIvQ1m1gmunXufXxvRESy/jNNyfovIqUyE7MVgGBjWktCoJlzvFA1To2O4ymIO3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-remap-async-to-generator": "^7.27.1", + "@babel/traverse": "^7.28.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-to-generator": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.27.1.tgz", + "integrity": "sha512-NREkZsZVJS4xmTr8qzE5y8AfIPqsdQfRuUiLRTEzb7Qii8iFWCyDKaUV2c0rCuh4ljDZ98ALHP/PetiBV2nddA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-remap-async-to-generator": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.27.1.tgz", + "integrity": "sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.5.tgz", + "integrity": "sha512-45DmULpySVvmq9Pj3X9B+62Xe+DJGov27QravQJU1LLcapR6/10i+gYVAucGGJpHBp5mYxIMK4nDAT/QDLr47g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-properties": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.27.1.tgz", + "integrity": "sha512-D0VcalChDMtuRvJIu3U/fwWjf8ZMykz5iZsg77Nuj821vCKI3zCyRLwRdWbsuJ/uRwZhZ002QtCqIkwC/ZkvbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.28.3.tgz", + "integrity": "sha512-LtPXlBbRoc4Njl/oh1CeD/3jC+atytbnf/UqLoqTDcEYGUPj022+rvfkbDYieUrSj3CaV4yHDByPE+T2HwfsJg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.28.3", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.4.tgz", + "integrity": "sha512-cFOlhIYPBv/iBoc+KS3M6et2XPtbT2HiCRfBXWtfpc9OAyostldxIf9YAYB6ypURBBbx+Qv6nyrLzASfJe+hBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-globals": "^7.28.0", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/traverse": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz", + "integrity": "sha512-lj9PGWvMTVksbWiDT2tW68zGS/cyo4AkZ/QTp0sQT0mjPopCmrSkzxeXkznjqBxzDI6TclZhOJbBmbBLjuOZUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/template": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.5.tgz", + "integrity": "sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-dotall-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.27.1.tgz", + "integrity": "sha512-gEbkDVGRvjj7+T1ivxrfgygpT7GUd4vmODtYpbs0gZATdkX8/iSnOtZSxiZnsgm1YjTgjI6VKBGSJJevkrclzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-keys": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.27.1.tgz", + "integrity": "sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.27.1.tgz", + "integrity": "sha512-hkGcueTEzuhB30B3eJCbCYeCaaEQOmQR0AdvzpD4LoN0GXMWzzGSuRrxR2xTnCrvNbVwK9N6/jQ92GSLfiZWoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-dynamic-import": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.27.1.tgz", + "integrity": "sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-explicit-resource-management": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.0.tgz", + "integrity": "sha512-K8nhUcn3f6iB+P3gwCv/no7OdzOZQcKchW6N389V6PD8NUWKZHzndOd9sPDVbMoBsbmjMqlB4L9fm+fEFNVlwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/plugin-transform-destructuring": "^7.28.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-exponentiation-operator": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.28.5.tgz", + "integrity": "sha512-D4WIMaFtwa2NizOp+dnoFjRez/ClKiC2BqqImwKd1X28nqBtZEyCYJ2ozQrrzlxAFrcrjxo39S6khe9RNDlGzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-export-namespace-from": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.27.1.tgz", + "integrity": "sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.27.1.tgz", + "integrity": "sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.27.1.tgz", + "integrity": "sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-json-strings": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.27.1.tgz", + "integrity": "sha512-6WVLVJiTjqcQauBhn1LkICsR2H+zm62I3h9faTDKt1qP4jn2o72tSvqMwtGFKGTpojce0gJs+76eZ2uCHRZh0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.27.1.tgz", + "integrity": "sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-logical-assignment-operators": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.28.5.tgz", + "integrity": "sha512-axUuqnUTBuXyHGcJEVVh9pORaN6wC5bYfE7FGzPiaWa3syib9m7g+/IT/4VgCOe2Upef43PHzeAvcrVek6QuuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.27.1.tgz", + "integrity": "sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-amd": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.27.1.tgz", + "integrity": "sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz", + "integrity": "sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.28.5.tgz", + "integrity": "sha512-vn5Jma98LCOeBy/KpeQhXcV2WZgaRUtjwQmjoBuLNlOmkg0fB5pdvYVeWRYI69wWKwK2cD1QbMiUQnoujWvrew==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-umd": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.27.1.tgz", + "integrity": "sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.27.1.tgz", + "integrity": "sha512-SstR5JYy8ddZvD6MhV0tM/j16Qds4mIpJTOd1Yu9J9pJjH93bxHECF7pgtc28XvkzTD6Pxcm/0Z73Hvk7kb3Ng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-new-target": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.27.1.tgz", + "integrity": "sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.27.1.tgz", + "integrity": "sha512-aGZh6xMo6q9vq1JGcw58lZ1Z0+i0xB2x0XaauNIUXd6O1xXc3RwoWEBlsTQrY4KQ9Jf0s5rgD6SiNkaUdJegTA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-numeric-separator": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.27.1.tgz", + "integrity": "sha512-fdPKAcujuvEChxDBJ5c+0BTaS6revLV7CJL08e4m3de8qJfNIuCc2nc7XJYOjBoTMJeqSmwXJ0ypE14RCjLwaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.4.tgz", + "integrity": "sha512-373KA2HQzKhQCYiRVIRr+3MjpCObqzDlyrM6u4I201wL8Mp2wHf7uB8GhDwis03k2ti8Zr65Zyyqs1xOxUF/Ew==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/plugin-transform-destructuring": "^7.28.0", + "@babel/plugin-transform-parameters": "^7.27.7", + "@babel/traverse": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.27.1.tgz", + "integrity": "sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-catch-binding": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.27.1.tgz", + "integrity": "sha512-txEAEKzYrHEX4xSZN4kJ+OfKXFVSWKB2ZxM9dpcE3wT7smwkNmXo5ORRlVzMVdJbD+Q8ILTgSD7959uj+3Dm3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-chaining": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.28.5.tgz", + "integrity": "sha512-N6fut9IZlPnjPwgiQkXNhb+cT8wQKFlJNqcZkWlcTqkcqx6/kU4ynGmLFoa4LViBSirn05YAwk+sQBbPfxtYzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.27.7.tgz", + "integrity": "sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-methods": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.27.1.tgz", + "integrity": "sha512-10FVt+X55AjRAYI9BrdISN9/AQWHqldOeZDUoLyif1Kn05a56xVBXb8ZouL8pZ9jem8QpXaOt8TS7RHUIS+GPA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.27.1.tgz", + "integrity": "sha512-5J+IhqTi1XPa0DXF83jYOaARrX+41gOewWbkPyjMNRDqgOCqdffGh8L3f/Ek5utaEBZExjSAzcyjmV9SSAWObQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.1", + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.27.1.tgz", + "integrity": "sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regenerator": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.4.tgz", + "integrity": "sha512-+ZEdQlBoRg9m2NnzvEeLgtvBMO4tkFBw5SQIUgLICgTrumLoU7lr+Oghi6km2PFj+dbUt2u1oby2w3BDO9YQnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regexp-modifiers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.27.1.tgz", + "integrity": "sha512-TtEciroaiODtXvLZv4rmfMhkCv8jx3wgKpL68PuiPh2M4fvz5jhsA7697N1gMvkvr/JTF13DrFYyEbY9U7cVPA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-reserved-words": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.27.1.tgz", + "integrity": "sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.27.1.tgz", + "integrity": "sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.27.1.tgz", + "integrity": "sha512-kpb3HUqaILBJcRFVhFUs6Trdd4mkrzcGXss+6/mxUd273PfbWqSDHRzMT2234gIg2QYfAjvXLSquP1xECSg09Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-sticky-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.27.1.tgz", + "integrity": "sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.27.1.tgz", + "integrity": "sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typeof-symbol": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.27.1.tgz", + "integrity": "sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-escapes": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.27.1.tgz", + "integrity": "sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-property-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.27.1.tgz", + "integrity": "sha512-uW20S39PnaTImxp39O5qFlHLS9LJEmANjMG7SxIhap8rCHqu0Ik+tLEPX5DKmHn6CsWQ7j3lix2tFOa5YtL12Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.27.1.tgz", + "integrity": "sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-sets-regex": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.27.1.tgz", + "integrity": "sha512-EtkOujbc4cgvb0mlpQefi4NTPBzhSIevblFevACNLUspmrALgmEBdL/XfnyyITfd8fKBZrZys92zOWcik7j9Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/preset-env": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.28.5.tgz", + "integrity": "sha512-S36mOoi1Sb6Fz98fBfE+UZSpYw5mJm0NUHtIKrOuNcqeFauy1J6dIvXm2KRVKobOSaGq4t/hBXdN4HGU3wL9Wg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-validator-option": "^7.27.1", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.28.5", + "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.27.1", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.27.1", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.27.1", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.28.3", + "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", + "@babel/plugin-syntax-import-assertions": "^7.27.1", + "@babel/plugin-syntax-import-attributes": "^7.27.1", + "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", + "@babel/plugin-transform-arrow-functions": "^7.27.1", + "@babel/plugin-transform-async-generator-functions": "^7.28.0", + "@babel/plugin-transform-async-to-generator": "^7.27.1", + "@babel/plugin-transform-block-scoped-functions": "^7.27.1", + "@babel/plugin-transform-block-scoping": "^7.28.5", + "@babel/plugin-transform-class-properties": "^7.27.1", + "@babel/plugin-transform-class-static-block": "^7.28.3", + "@babel/plugin-transform-classes": "^7.28.4", + "@babel/plugin-transform-computed-properties": "^7.27.1", + "@babel/plugin-transform-destructuring": "^7.28.5", + "@babel/plugin-transform-dotall-regex": "^7.27.1", + "@babel/plugin-transform-duplicate-keys": "^7.27.1", + "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.27.1", + "@babel/plugin-transform-dynamic-import": "^7.27.1", + "@babel/plugin-transform-explicit-resource-management": "^7.28.0", + "@babel/plugin-transform-exponentiation-operator": "^7.28.5", + "@babel/plugin-transform-export-namespace-from": "^7.27.1", + "@babel/plugin-transform-for-of": "^7.27.1", + "@babel/plugin-transform-function-name": "^7.27.1", + "@babel/plugin-transform-json-strings": "^7.27.1", + "@babel/plugin-transform-literals": "^7.27.1", + "@babel/plugin-transform-logical-assignment-operators": "^7.28.5", + "@babel/plugin-transform-member-expression-literals": "^7.27.1", + "@babel/plugin-transform-modules-amd": "^7.27.1", + "@babel/plugin-transform-modules-commonjs": "^7.27.1", + "@babel/plugin-transform-modules-systemjs": "^7.28.5", + "@babel/plugin-transform-modules-umd": "^7.27.1", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.27.1", + "@babel/plugin-transform-new-target": "^7.27.1", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.27.1", + "@babel/plugin-transform-numeric-separator": "^7.27.1", + "@babel/plugin-transform-object-rest-spread": "^7.28.4", + "@babel/plugin-transform-object-super": "^7.27.1", + "@babel/plugin-transform-optional-catch-binding": "^7.27.1", + "@babel/plugin-transform-optional-chaining": "^7.28.5", + "@babel/plugin-transform-parameters": "^7.27.7", + "@babel/plugin-transform-private-methods": "^7.27.1", + "@babel/plugin-transform-private-property-in-object": "^7.27.1", + "@babel/plugin-transform-property-literals": "^7.27.1", + "@babel/plugin-transform-regenerator": "^7.28.4", + "@babel/plugin-transform-regexp-modifiers": "^7.27.1", + "@babel/plugin-transform-reserved-words": "^7.27.1", + "@babel/plugin-transform-shorthand-properties": "^7.27.1", + "@babel/plugin-transform-spread": "^7.27.1", + "@babel/plugin-transform-sticky-regex": "^7.27.1", + "@babel/plugin-transform-template-literals": "^7.27.1", + "@babel/plugin-transform-typeof-symbol": "^7.27.1", + "@babel/plugin-transform-unicode-escapes": "^7.27.1", + "@babel/plugin-transform-unicode-property-regex": "^7.27.1", + "@babel/plugin-transform-unicode-regex": "^7.27.1", + "@babel/plugin-transform-unicode-sets-regex": "^7.27.1", + "@babel/preset-modules": "0.1.6-no-external-plugins", + "babel-plugin-polyfill-corejs2": "^0.4.14", + "babel-plugin-polyfill-corejs3": "^0.13.0", + "babel-plugin-polyfill-regenerator": "^0.6.5", + "core-js-compat": "^3.43.0", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-modules": { + "version": "0.1.6-no-external-plugins", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", + "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.5", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@date-fns/tz": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@date-fns/tz/-/tz-1.4.1.tgz", + "integrity": "sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA==", + "license": "MIT" + }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", + "integrity": "sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz", + "integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "@dnd-kit/accessibility": "^3.1.1", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-10.0.0.tgz", + "integrity": "sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==", + "license": "MIT", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@img/colour": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz", + "integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@img/sharp-darwin-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-darwin-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-ppc64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.4.tgz", + "integrity": "sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-riscv64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-riscv64/-/sharp-libvips-linux-riscv64-1.2.4.tgz", + "integrity": "sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", + "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-linux-arm": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-ppc64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.5.tgz", + "integrity": "sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-ppc64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-riscv64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-riscv64/-/sharp-linux-riscv64-0.34.5.tgz", + "integrity": "sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-riscv64": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-s390x": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", + "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.2.4" + } + }, + "node_modules/@img/sharp-linux-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" + } + }, + "node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-wasm32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", + "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.7.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.5.tgz", + "integrity": "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-ia32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", + "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.11.tgz", + "integrity": "sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@kurkle/color": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz", + "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==", + "license": "MIT" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/plugin-node-resolve": { + "version": "15.3.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.3.1.tgz", + "integrity": "sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-terser": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-0.4.4.tgz", + "integrity": "sha512-XHeJC5Bgvs8LfukDwWZp7yeqin6ns8RTl2B9avbejt6tZqsqvVoWI7ZTQrcNsfKEDWBTnTxM8nMDkO2IFFbd0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "serialize-javascript": "^6.0.1", + "smob": "^1.0.0", + "terser": "^5.17.4" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.54.0.tgz", + "integrity": "sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.54.0.tgz", + "integrity": "sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.54.0.tgz", + "integrity": "sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.54.0.tgz", + "integrity": "sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.54.0.tgz", + "integrity": "sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.54.0.tgz", + "integrity": "sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.54.0.tgz", + "integrity": "sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.54.0.tgz", + "integrity": "sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.54.0.tgz", + "integrity": "sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.54.0.tgz", + "integrity": "sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.54.0.tgz", + "integrity": "sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.54.0.tgz", + "integrity": "sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.54.0.tgz", + "integrity": "sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.54.0.tgz", + "integrity": "sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.54.0.tgz", + "integrity": "sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.54.0.tgz", + "integrity": "sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.54.0.tgz", + "integrity": "sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.54.0.tgz", + "integrity": "sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.54.0.tgz", + "integrity": "sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.54.0.tgz", + "integrity": "sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.54.0.tgz", + "integrity": "sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.54.0.tgz", + "integrity": "sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@surma/rollup-plugin-off-main-thread": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz", + "integrity": "sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "ejs": "^3.1.6", + "json5": "^2.2.0", + "magic-string": "^0.25.0", + "string.prototype.matchall": "^4.0.6" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.27", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.27.tgz", + "integrity": "sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@types/resolve": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", + "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.23", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz", + "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001760", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.4.14", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.14.tgz", + "integrity": "sha512-Co2Y9wX854ts6U8gAAPXfn0GmAyctHuK8n0Yhfjd6t30g7yvKjspvvOo9yG+z52PZRgFErt7Ka2pYnXCjLKEpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.7", + "@babel/helper-define-polyfill-provider": "^0.6.5", + "semver": "^6.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.13.0.tgz", + "integrity": "sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.5", + "core-js-compat": "^3.43.0" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.5.tgz", + "integrity": "sha512-ISqQ2frbiNU9vIJkzg7dlPpznPZ4jOiUQ1uSmB0fEHeowtN3COYRsXr/xexn64NpU13P06jc/L5TgiJXOgrbEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.5" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.11", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz", + "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001761", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001761.tgz", + "integrity": "sha512-JF9ptu1vP2coz98+5051jZ4PwQgd2ni8A+gYSN7EA7dPKIMf0pDlSUxhdmVOaV3/fYK5uWBkgSXJaRLr4+3A6g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chart.js": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz", + "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/core-js-compat": { + "version": "3.47.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.47.0.tgz", + "integrity": "sha512-IGfuznZ/n7Kp9+nypamBhvwdwLsW6KC8IOaURw2doAK5e98AG3acVLdh0woOnEqCfUtS+Vu882JE4k/DAm3ItQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/date-fns-jalali": { + "version": "4.1.0-0", + "resolved": "https://registry.npmjs.org/date-fns-jalali/-/date-fns-jalali-4.1.0-0.tgz", + "integrity": "sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true, + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, + "license": "ISC" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-abstract": { + "version": "1.24.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz", + "integrity": "sha512-zHXBLhP+QehSSbsS9Pt23Gg964240DPd6QCf8WpkqEXxQ7fhdZzYsocOr5u7apWonsS5EjZDmTF+/slGMyasvw==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-to-primitive": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/generator-function": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", + "integrity": "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-own-enumerable-property-symbols": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", + "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==", + "dev": true, + "license": "ISC" + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-11.1.0.tgz", + "integrity": "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "foreground-child": "^3.3.1", + "jackspeak": "^4.1.1", + "minimatch": "^10.1.1", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^2.0.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/idb": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/idb/-/idb-7.1.1.tgz", + "integrity": "sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.2.tgz", + "integrity": "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.4", + "generator-function": "^2.0.0", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", + "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", + "integrity": "sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", + "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/jackspeak": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz", + "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jake": { + "version": "10.9.4", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.4.tgz", + "integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.6", + "filelist": "^1.0.4", + "picocolors": "^1.1.1" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true, + "license": "(AFL-2.1 OR BSD-3-Clause)" + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonpointer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", + "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/magic-string": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz", + "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "sourcemap-codec": "^1.4.8" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", + "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/brace-expansion": "^5.0.0" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-wheel": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/normalize-wheel/-/normalize-wheel-1.0.1.tgz", + "integrity": "sha512-1OnlAPZ3zgrk8B91HyRj+eVv+kS5u+Z0SCsak6Xil/kmgEia50ga7zfkumayonZrImffAxPU/5WcyGhzetHNPA==", + "license": "BSD-3-Clause" + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz", + "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/pretty-bytes": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.1.tgz", + "integrity": "sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-chartjs-2": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.1.tgz", + "integrity": "sha512-h5IPXKg9EXpjoBzUfyWJvllMjG2mQ4EiuHQFhms/AjUm0XSZHhyRy2xVmLXHKrtcdrPO4mnGqRtYoD0vp95A0A==", + "license": "MIT", + "peerDependencies": { + "chart.js": "^4.1.1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/react-circular-progressbar": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/react-circular-progressbar/-/react-circular-progressbar-2.2.0.tgz", + "integrity": "sha512-cgyqEHOzB0nWMZjKfWN3MfSa1LV3OatcDjPz68lchXQUEiBD5O1WsAtoVK4/DSL0B4USR//cTdok4zCBkq8X5g==", + "license": "MIT", + "peerDependencies": { + "react": ">=0.14.0" + } + }, + "node_modules/react-day-picker": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/react-day-picker/-/react-day-picker-9.13.0.tgz", + "integrity": "sha512-euzj5Hlq+lOHqI53NiuNhCP8HWgsPf/bBAVijR50hNaY1XwjKjShAnIe8jm8RD2W9IJUvihDIZ+KrmqfFzNhFQ==", + "license": "MIT", + "dependencies": { + "@date-fns/tz": "^1.4.1", + "date-fns": "^4.1.0", + "date-fns-jalali": "^4.1.0-0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "individual", + "url": "https://github.com/sponsors/gpbl" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-easy-crop": { + "version": "5.5.6", + "resolved": "https://registry.npmjs.org/react-easy-crop/-/react-easy-crop-5.5.6.tgz", + "integrity": "sha512-Jw3/ozs8uXj3NpL511Suc4AHY+mLRO23rUgipXvNYKqezcFSYHxe4QXibBymkOoY6oOtLVMPO2HNPRHYvMPyTw==", + "license": "MIT", + "dependencies": { + "normalize-wheel": "^1.0.1", + "tslib": "^2.0.1" + }, + "peerDependencies": { + "react": ">=16.4.0", + "react-dom": ">=16.4.0" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", + "dev": true, + "license": "MIT" + }, + "node_modules/regenerate-unicode-properties": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.2.tgz", + "integrity": "sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "regenerate": "^1.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regexpu-core": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.4.0.tgz", + "integrity": "sha512-0ghuzq67LI9bLXpOX/ISfve/Mq33a4aFRzoQYhnnok1JOFpmE/A2TBGkNVenOGEeSBCjIiWcc6MVOG5HEQv0sA==", + "dev": true, + "license": "MIT", + "dependencies": { + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^10.2.2", + "regjsgen": "^0.8.0", + "regjsparser": "^0.13.0", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.2.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regjsgen": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.8.0.tgz", + "integrity": "sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/regjsparser": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.13.0.tgz", + "integrity": "sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "jsesc": "~3.1.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.54.0.tgz", + "integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.54.0", + "@rollup/rollup-android-arm64": "4.54.0", + "@rollup/rollup-darwin-arm64": "4.54.0", + "@rollup/rollup-darwin-x64": "4.54.0", + "@rollup/rollup-freebsd-arm64": "4.54.0", + "@rollup/rollup-freebsd-x64": "4.54.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.54.0", + "@rollup/rollup-linux-arm-musleabihf": "4.54.0", + "@rollup/rollup-linux-arm64-gnu": "4.54.0", + "@rollup/rollup-linux-arm64-musl": "4.54.0", + "@rollup/rollup-linux-loong64-gnu": "4.54.0", + "@rollup/rollup-linux-ppc64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-musl": "4.54.0", + "@rollup/rollup-linux-s390x-gnu": "4.54.0", + "@rollup/rollup-linux-x64-gnu": "4.54.0", + "@rollup/rollup-linux-x64-musl": "4.54.0", + "@rollup/rollup-openharmony-arm64": "4.54.0", + "@rollup/rollup-win32-arm64-msvc": "4.54.0", + "@rollup/rollup-win32-ia32-msvc": "4.54.0", + "@rollup/rollup-win32-x64-gnu": "4.54.0", + "@rollup/rollup-win32-x64-msvc": "4.54.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/sharp": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", + "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@img/colour": "^1.0.0", + "detect-libc": "^2.1.2", + "semver": "^7.7.3" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.34.5", + "@img/sharp-darwin-x64": "0.34.5", + "@img/sharp-libvips-darwin-arm64": "1.2.4", + "@img/sharp-libvips-darwin-x64": "1.2.4", + "@img/sharp-libvips-linux-arm": "1.2.4", + "@img/sharp-libvips-linux-arm64": "1.2.4", + "@img/sharp-libvips-linux-ppc64": "1.2.4", + "@img/sharp-libvips-linux-riscv64": "1.2.4", + "@img/sharp-libvips-linux-s390x": "1.2.4", + "@img/sharp-libvips-linux-x64": "1.2.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", + "@img/sharp-libvips-linuxmusl-x64": "1.2.4", + "@img/sharp-linux-arm": "0.34.5", + "@img/sharp-linux-arm64": "0.34.5", + "@img/sharp-linux-ppc64": "0.34.5", + "@img/sharp-linux-riscv64": "0.34.5", + "@img/sharp-linux-s390x": "0.34.5", + "@img/sharp-linux-x64": "0.34.5", + "@img/sharp-linuxmusl-arm64": "0.34.5", + "@img/sharp-linuxmusl-x64": "0.34.5", + "@img/sharp-wasm32": "0.34.5", + "@img/sharp-win32-arm64": "0.34.5", + "@img/sharp-win32-ia32": "0.34.5", + "@img/sharp-win32-x64": "0.34.5" + } + }, + "node_modules/sharp/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/smob": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/smob/-/smob-1.5.0.tgz", + "integrity": "sha512-g6T+p7QO8npa+/hNx9ohv1E5pVCmWrVCUzUXJyLdMmftX6ER0oiWY/w9knEonLpnOp6b6FenKnMfR8gqwWdwig==", + "dev": true, + "license": "MIT" + }, + "node_modules/source-map": { + "version": "0.8.0-beta.0", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz", + "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==", + "deprecated": "The work that was done in this beta branch won't be included in future versions", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "whatwg-url": "^7.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sourcemap-codec": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", + "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==", + "deprecated": "Please use @jridgewell/sourcemap-codec instead", + "dev": true, + "license": "MIT" + }, + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.12", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.12.tgz", + "integrity": "sha512-6CC9uyBL+/48dYizRf7H7VAYCMCNTBeM78x/VTUe9bFEaxBepPJDa1Ow99LqI/1yF7kuy7Q3cQsYMrcjGUcskA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "regexp.prototype.flags": "^1.5.3", + "set-function-name": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/stringify-object": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", + "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "get-own-enumerable-property-symbols": "^3.0.0", + "is-obj": "^1.0.1", + "is-regexp": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-comments/-/strip-comments-2.0.1.tgz", + "integrity": "sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/sucrase": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/temp-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz", + "integrity": "sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/tempy": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tempy/-/tempy-0.6.0.tgz", + "integrity": "sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-stream": "^2.0.0", + "temp-dir": "^2.0.0", + "type-fest": "^0.16.0", + "unique-string": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/terser": { + "version": "5.44.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.44.1.tgz", + "integrity": "sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==", + "dev": true, + "license": "BSD-2-Clause", + "peer": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.15.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tr46": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", + "integrity": "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-fest": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz", + "integrity": "sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/unbox-primitive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/unicode-canonical-property-names-ecmascript": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.1.tgz", + "integrity": "sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-value-ecmascript": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.1.tgz", + "integrity": "sha512-JQ84qTuMg4nVkx8ga4A16a1epI9H6uTXAknqxkGF/aFfRLw1xC/Bp24HNLaZhHSkWd3+84t8iXnp1J0kYcZHhg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-property-aliases-ecmascript": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.2.0.tgz", + "integrity": "sha512-hpbDzxUY9BFwX+UeBnxv3Sh1q7HFxj48DTmXchNgRa46lO8uj3/1iEn3MiNUYTg1g9ctIqXCCERn8gYZhHC5lQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "crypto-random-string": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/upath": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4", + "yarn": "*" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite-plugin-pwa": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/vite-plugin-pwa/-/vite-plugin-pwa-1.2.0.tgz", + "integrity": "sha512-a2xld+SJshT9Lgcv8Ji4+srFJL4k/1bVbd1x06JIkvecpQkwkvCncD1+gSzcdm3s+owWLpMJerG3aN5jupJEVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.6", + "pretty-bytes": "^6.1.1", + "tinyglobby": "^0.2.10", + "workbox-build": "^7.4.0", + "workbox-window": "^7.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vite-pwa/assets-generator": "^1.0.0", + "vite": "^3.1.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0", + "workbox-build": "^7.4.0", + "workbox-window": "^7.4.0" + }, + "peerDependenciesMeta": { + "@vite-pwa/assets-generator": { + "optional": true + } + } + }, + "node_modules/webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", + "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash.sortby": "^4.7.0", + "tr46": "^1.0.1", + "webidl-conversions": "^4.0.2" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/workbox-background-sync": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-background-sync/-/workbox-background-sync-7.4.0.tgz", + "integrity": "sha512-8CB9OxKAgKZKyNMwfGZ1XESx89GryWTfI+V5yEj8sHjFH8MFelUwYXEyldEK6M6oKMmn807GoJFUEA1sC4XS9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "idb": "^7.0.1", + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-broadcast-update": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-broadcast-update/-/workbox-broadcast-update-7.4.0.tgz", + "integrity": "sha512-+eZQwoktlvo62cI0b+QBr40v5XjighxPq3Fzo9AWMiAosmpG5gxRHgTbGGhaJv/q/MFVxwFNGh/UwHZ/8K88lA==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-build": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-build/-/workbox-build-7.4.0.tgz", + "integrity": "sha512-Ntk1pWb0caOFIvwz/hfgrov/OJ45wPEhI5PbTywQcYjyZiVhT3UrwwUPl6TRYbTm4moaFYithYnl1lvZ8UjxcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@apideck/better-ajv-errors": "^0.3.1", + "@babel/core": "^7.24.4", + "@babel/preset-env": "^7.11.0", + "@babel/runtime": "^7.11.2", + "@rollup/plugin-babel": "^5.2.0", + "@rollup/plugin-node-resolve": "^15.2.3", + "@rollup/plugin-replace": "^2.4.1", + "@rollup/plugin-terser": "^0.4.3", + "@surma/rollup-plugin-off-main-thread": "^2.2.3", + "ajv": "^8.6.0", + "common-tags": "^1.8.0", + "fast-json-stable-stringify": "^2.1.0", + "fs-extra": "^9.0.1", + "glob": "^11.0.1", + "lodash": "^4.17.20", + "pretty-bytes": "^5.3.0", + "rollup": "^2.79.2", + "source-map": "^0.8.0-beta.0", + "stringify-object": "^3.3.0", + "strip-comments": "^2.0.1", + "tempy": "^0.6.0", + "upath": "^1.2.0", + "workbox-background-sync": "7.4.0", + "workbox-broadcast-update": "7.4.0", + "workbox-cacheable-response": "7.4.0", + "workbox-core": "7.4.0", + "workbox-expiration": "7.4.0", + "workbox-google-analytics": "7.4.0", + "workbox-navigation-preload": "7.4.0", + "workbox-precaching": "7.4.0", + "workbox-range-requests": "7.4.0", + "workbox-recipes": "7.4.0", + "workbox-routing": "7.4.0", + "workbox-strategies": "7.4.0", + "workbox-streams": "7.4.0", + "workbox-sw": "7.4.0", + "workbox-window": "7.4.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/workbox-build/node_modules/@rollup/plugin-babel": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz", + "integrity": "sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.10.4", + "@rollup/pluginutils": "^3.1.0" + }, + "engines": { + "node": ">= 10.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0", + "@types/babel__core": "^7.1.9", + "rollup": "^1.20.0||^2.0.0" + }, + "peerDependenciesMeta": { + "@types/babel__core": { + "optional": true + } + } + }, + "node_modules/workbox-build/node_modules/@rollup/plugin-replace": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz", + "integrity": "sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rollup/pluginutils": "^3.1.0", + "magic-string": "^0.25.7" + }, + "peerDependencies": { + "rollup": "^1.20.0 || ^2.0.0" + } + }, + "node_modules/workbox-build/node_modules/@rollup/pluginutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.1.0.tgz", + "integrity": "sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "0.0.39", + "estree-walker": "^1.0.1", + "picomatch": "^2.2.2" + }, + "engines": { + "node": ">= 8.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0" + } + }, + "node_modules/workbox-build/node_modules/@types/estree": { + "version": "0.0.39", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz", + "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/workbox-build/node_modules/estree-walker": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-1.0.1.tgz", + "integrity": "sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==", + "dev": true, + "license": "MIT" + }, + "node_modules/workbox-build/node_modules/pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/workbox-build/node_modules/rollup": { + "version": "2.79.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.2.tgz", + "integrity": "sha512-fS6iqSPZDs3dr/y7Od6y5nha8dW1YnbgtsyotCVvoFGKbERG++CVRFv1meyGDE1SNItQA8BrnCw7ScdAhRJ3XQ==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=10.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/workbox-cacheable-response": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-cacheable-response/-/workbox-cacheable-response-7.4.0.tgz", + "integrity": "sha512-0Fb8795zg/x23ISFkAc7lbWes6vbw34DGFIMw31cwuHPgDEC/5EYm6m/ZkylLX0EnEbbOyOCLjKgFS/Z5g0HeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-core": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-7.4.0.tgz", + "integrity": "sha512-6BMfd8tYEnN4baG4emG9U0hdXM4gGuDU3ectXuVHnj71vwxTFI7WOpQJC4siTOlVtGqCUtj0ZQNsrvi6kZZTAQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/workbox-expiration": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-expiration/-/workbox-expiration-7.4.0.tgz", + "integrity": "sha512-V50p4BxYhtA80eOvulu8xVfPBgZbkxJ1Jr8UUn0rvqjGhLDqKNtfrDfjJKnLz2U8fO2xGQJTx/SKXNTzHOjnHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "idb": "^7.0.1", + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-google-analytics": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-google-analytics/-/workbox-google-analytics-7.4.0.tgz", + "integrity": "sha512-MVPXQslRF6YHkzGoFw1A4GIB8GrKym/A5+jYDUSL+AeJw4ytQGrozYdiZqUW1TPQHW8isBCBtyFJergUXyNoWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-background-sync": "7.4.0", + "workbox-core": "7.4.0", + "workbox-routing": "7.4.0", + "workbox-strategies": "7.4.0" + } + }, + "node_modules/workbox-navigation-preload": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-navigation-preload/-/workbox-navigation-preload-7.4.0.tgz", + "integrity": "sha512-etzftSgdQfjMcfPgbfaZCfM2QuR1P+4o8uCA2s4rf3chtKTq/Om7g/qvEOcZkG6v7JZOSOxVYQiOu6PbAZgU6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-precaching": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-precaching/-/workbox-precaching-7.4.0.tgz", + "integrity": "sha512-VQs37T6jDqf1rTxUJZXRl3yjZMf5JX/vDPhmx2CPgDDKXATzEoqyRqhYnRoxl6Kr0rqaQlp32i9rtG5zTzIlNg==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0", + "workbox-routing": "7.4.0", + "workbox-strategies": "7.4.0" + } + }, + "node_modules/workbox-range-requests": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-range-requests/-/workbox-range-requests-7.4.0.tgz", + "integrity": "sha512-3Vq854ZNuP6Y0KZOQWLaLC9FfM7ZaE+iuQl4VhADXybwzr4z/sMmnLgTeUZLq5PaDlcJBxYXQ3U91V7dwAIfvw==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-recipes": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-recipes/-/workbox-recipes-7.4.0.tgz", + "integrity": "sha512-kOkWvsAn4H8GvAkwfJTbwINdv4voFoiE9hbezgB1sb/0NLyTG4rE7l6LvS8lLk5QIRIto+DjXLuAuG3Vmt3cxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-cacheable-response": "7.4.0", + "workbox-core": "7.4.0", + "workbox-expiration": "7.4.0", + "workbox-precaching": "7.4.0", + "workbox-routing": "7.4.0", + "workbox-strategies": "7.4.0" + } + }, + "node_modules/workbox-routing": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-7.4.0.tgz", + "integrity": "sha512-C/ooj5uBWYAhAqwmU8HYQJdOjjDKBp9MzTQ+otpMmd+q0eF59K+NuXUek34wbL0RFrIXe/KKT+tUWcZcBqxbHQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-strategies": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-7.4.0.tgz", + "integrity": "sha512-T4hVqIi5A4mHi92+5EppMX3cLaVywDp8nsyUgJhOZxcfSV/eQofcOA6/EMo5rnTNmNTpw0rUgjAI6LaVullPpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0" + } + }, + "node_modules/workbox-streams": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-streams/-/workbox-streams-7.4.0.tgz", + "integrity": "sha512-QHPBQrey7hQbnTs5GrEVoWz7RhHJXnPT+12qqWM378orDMo5VMJLCkCM1cnCk+8Eq92lccx/VgRZ7WAzZWbSLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "workbox-core": "7.4.0", + "workbox-routing": "7.4.0" + } + }, + "node_modules/workbox-sw": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-sw/-/workbox-sw-7.4.0.tgz", + "integrity": "sha512-ltU+Kr3qWR6BtbdlMnCjobZKzeV1hN+S6UvDywBrwM19TTyqA03X66dzw1tEIdJvQ4lYKkBFox6IAEhoSEZ8Xw==", + "dev": true, + "license": "MIT" + }, + "node_modules/workbox-window": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/workbox-window/-/workbox-window-7.4.0.tgz", + "integrity": "sha512-/bIYdBLAVsNR3v7gYGaV4pQW3M3kEPx5E8vDxGvxo6khTrGtSSCS7QiFKv9ogzBgZiy0OXLP9zO28U/1nF1mfw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/trusted-types": "^2.0.2", + "workbox-core": "7.4.0" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/play-life-web/package.json b/play-life-web/package.json new file mode 100644 index 0000000..cd68a04 --- /dev/null +++ b/play-life-web/package.json @@ -0,0 +1,33 @@ +{ + "name": "play-life-web", + "version": "5.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "chart.js": "^4.4.0", + "react": "^18.2.0", + "react-chartjs-2": "^5.2.0", + "react-circular-progressbar": "^2.2.0", + "react-day-picker": "^9.13.0", + "react-dom": "^18.2.0", + "react-easy-crop": "^5.5.6" + }, + "devDependencies": { + "@types/react": "^18.2.43", + "@types/react-dom": "^18.2.17", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.16", + "postcss": "^8.4.32", + "sharp": "^0.34.5", + "tailwindcss": "^3.3.6", + "vite": "^5.0.8", + "vite-plugin-pwa": "^1.2.0" + } +} diff --git a/play-life-web/postcss.config.js b/play-life-web/postcss.config.js new file mode 100644 index 0000000..b4a6220 --- /dev/null +++ b/play-life-web/postcss.config.js @@ -0,0 +1,7 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} + diff --git a/play-life-web/public/apple-touch-icon.png b/play-life-web/public/apple-touch-icon.png new file mode 100644 index 0000000..f7bf8a1 Binary files /dev/null and b/play-life-web/public/apple-touch-icon.png differ diff --git a/play-life-web/public/favicon-new.ico b/play-life-web/public/favicon-new.ico new file mode 100644 index 0000000..6f8bc47 Binary files /dev/null and b/play-life-web/public/favicon-new.ico differ diff --git a/play-life-web/public/favicon.svg b/play-life-web/public/favicon.svg new file mode 100644 index 0000000..f2db8ea --- /dev/null +++ b/play-life-web/public/favicon.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/play-life-web/public/privacy.html b/play-life-web/public/privacy.html new file mode 100644 index 0000000..ecbd20d --- /dev/null +++ b/play-life-web/public/privacy.html @@ -0,0 +1,159 @@ + + + + + + Политика конфиденциальности - Play Life + + + +
+

Политика конфиденциальности

+ +

Дата вступления в силу: 1 января 2024 года

+ +

1. Введение

+

Play Life ("мы", "наш", "нас") уважает вашу конфиденциальность и обязуется защищать ваши личные данные. Настоящая Политика конфиденциальности объясняет, как мы собираем, используем, храним и защищаем вашу информацию при использовании нашего приложения.

+ +

2. Собираемая информация

+

Мы собираем следующие типы информации:

+ +

2.1. Информация, предоставляемая вами

+
    +
  • Имя и адрес электронной почты при регистрации
  • +
  • Данные о ваших проектах, задачах и целях
  • +
  • Списки желаний и связанная информация
  • +
  • Словари и слова для изучения
  • +
+ +

2.2. Информация из интеграций

+
    +
  • Todoist: Информация о ваших задачах (только при подключении интеграции)
  • +
  • Telegram: ID пользователя Telegram (только при подключении бота)
  • +
  • Fitbit: Данные о физической активности, включая шаги, этажи и активные зоны минут (только при подключении интеграции)
  • +
+ +

2.3. Автоматически собираемая информация

+
    +
  • Данные об использовании приложения (логи доступа, ошибки)
  • +
  • Техническая информация (версия браузера, тип устройства)
  • +
+ +

3. Использование информации

+

Мы используем собранную информацию для:

+
    +
  • Предоставления и улучшения функциональности приложения
  • +
  • Обработки ваших запросов и транзакций
  • +
  • Отправки уведомлений и обновлений (если вы подписаны)
  • +
  • Обеспечения безопасности и предотвращения мошенничества
  • +
  • Соблюдения юридических обязательств
  • +
+ +

4. Хранение данных

+

Ваши данные хранятся на защищенных серверах. Мы применяем соответствующие технические и организационные меры для защиты ваших данных от несанкционированного доступа, изменения, раскрытия или уничтожения.

+ +

5. Обмен данными

+

Мы не продаем и не передаем ваши личные данные третьим лицам, за исключением:

+
    +
  • Когда это необходимо для предоставления услуг (например, интеграции с Fitbit, Todoist, Telegram)
  • +
  • Когда это требуется по закону или по запросу государственных органов
  • +
  • С вашего явного согласия
  • +
+ +

6. Интеграции с третьими сторонами

+

При использовании интеграций с Fitbit, Todoist или Telegram, ваши данные могут передаваться этим сервисам в соответствии с их политиками конфиденциальности:

+
    +
  • Fitbit: Мы получаем доступ только к данным о физической активности (шаги, этажи, активные зоны минут) с вашего явного разрешения через OAuth.
  • +
  • Todoist: Мы получаем доступ только к информации о завершенных задачах для синхронизации с вашими проектами.
  • +
  • Telegram: Мы получаем только ваш Telegram ID для связи с ботом.
  • +
+ +

7. Ваши права

+

Вы имеете право:

+
    +
  • Получить доступ к вашим личным данным
  • +
  • Исправить неточные данные
  • +
  • Удалить ваши данные
  • +
  • Отозвать согласие на обработку данных
  • +
  • Ограничить обработку ваших данных
  • +
  • Получить копию ваших данных в структурированном формате
  • +
+

Для осуществления этих прав свяжитесь с нами через приложение.

+ +

8. Cookies и аналогичные технологии

+

Мы используем cookies и аналогичные технологии для улучшения работы приложения, анализа использования и персонализации контента. Вы можете управлять настройками cookies в вашем браузере.

+ +

9. Безопасность

+

Мы применяем различные меры безопасности для защиты ваших данных, включая шифрование, контроль доступа и регулярные проверки безопасности. Однако ни один метод передачи через Интернет или электронного хранения не является на 100% безопасным.

+ +

10. Хранение данных

+

Мы храним ваши данные до тех пор, пока это необходимо для предоставления услуг или до тех пор, пока вы не попросите нас удалить их. Некоторые данные могут храниться дольше в соответствии с требованиями законодательства.

+ +

11. Дети

+

Наше приложение не предназначено для лиц младше 13 лет. Мы сознательно не собираем личную информацию от детей младше 13 лет.

+ +

12. Изменения в политике

+

Мы можем периодически обновлять настоящую Политику конфиденциальности. Мы уведомим вас о любых существенных изменениях, разместив новую политику на этой странице и обновив дату "Последнее обновление".

+ +

13. Контактная информация

+

Если у вас есть вопросы или запросы относительно настоящей Политики конфиденциальности или обработки ваших данных, пожалуйста, свяжитесь с нами через приложение.

+ +

14. Применимое законодательство

+

Настоящая Политика конфиденциальности регулируется законодательством Российской Федерации, включая Федеральный закон "О персональных данных" № 152-ФЗ.

+ +
+

Последнее обновление: 1 января 2024 года

+
+
+ + diff --git a/play-life-web/public/pwa-192x192.png b/play-life-web/public/pwa-192x192.png new file mode 100644 index 0000000..81839e3 Binary files /dev/null and b/play-life-web/public/pwa-192x192.png differ diff --git a/play-life-web/public/pwa-512x512.png b/play-life-web/public/pwa-512x512.png new file mode 100644 index 0000000..a61fee5 Binary files /dev/null and b/play-life-web/public/pwa-512x512.png differ diff --git a/play-life-web/public/pwa-maskable-192x192.png b/play-life-web/public/pwa-maskable-192x192.png new file mode 100644 index 0000000..5c6b3a3 Binary files /dev/null and b/play-life-web/public/pwa-maskable-192x192.png differ diff --git a/play-life-web/public/pwa-maskable-512x512.png b/play-life-web/public/pwa-maskable-512x512.png new file mode 100644 index 0000000..a6343da Binary files /dev/null and b/play-life-web/public/pwa-maskable-512x512.png differ diff --git a/play-life-web/public/terms.html b/play-life-web/public/terms.html new file mode 100644 index 0000000..252f4ff --- /dev/null +++ b/play-life-web/public/terms.html @@ -0,0 +1,128 @@ + + + + + + Условия использования - Play Life + + + +
+

Условия использования

+ +

Дата вступления в силу: 1 января 2024 года

+ +

1. Принятие условий

+

Используя приложение Play Life, вы соглашаетесь с настоящими Условиями использования. Если вы не согласны с какими-либо условиями, пожалуйста, не используйте наше приложение.

+ +

2. Описание сервиса

+

Play Life — это приложение для отслеживания продуктивности и личных целей, которое позволяет пользователям:

+
    +
  • Отслеживать прогресс по проектам и задачам
  • +
  • Управлять списками желаний
  • +
  • Изучать слова и создавать словари
  • +
  • Интегрироваться с внешними сервисами (Todoist, Telegram, Fitbit)
  • +
+ +

3. Регистрация и учетные записи

+

Для использования некоторых функций приложения требуется создание учетной записи. Вы обязуетесь:

+
    +
  • Предоставлять точную и актуальную информацию
  • +
  • Поддерживать безопасность вашей учетной записи
  • +
  • Нести ответственность за все действия, совершенные под вашей учетной записью
  • +
  • Немедленно уведомлять нас о любом несанкционированном использовании
  • +
+ +

4. Использование сервиса

+

Вы соглашаетесь использовать Play Life только в законных целях и не будете:

+
    +
  • Нарушать какие-либо применимые законы или нормативные акты
  • +
  • Передавать вредоносное программное обеспечение или код
  • +
  • Пытаться получить несанкционированный доступ к сервису
  • +
  • Использовать сервис для спама или рассылки нежелательных сообщений
  • +
  • Нарушать права интеллектуальной собственности других лиц
  • +
+ +

5. Интеграции с третьими сторонами

+

Play Life может интегрироваться с внешними сервисами (Todoist, Telegram, Fitbit). Использование этих интеграций регулируется условиями использования соответствующих сервисов. Мы не несем ответственности за действия или политики этих третьих сторон.

+ +

6. Интеллектуальная собственность

+

Все материалы, содержащиеся в Play Life, включая, но не ограничиваясь текстом, графикой, логотипами, иконками, изображениями, являются собственностью Play Life или их соответствующих владельцев и защищены законами об авторском праве.

+ +

7. Конфиденциальность

+

Использование ваших личных данных регулируется нашей Политикой конфиденциальности. Используя Play Life, вы соглашаетесь с обработкой ваших данных в соответствии с этой политикой.

+ +

8. Отказ от ответственности

+

Play Life предоставляется "как есть" без каких-либо гарантий, явных или подразумеваемых. Мы не гарантируем, что сервис будет бесперебойным, безопасным или безошибочным.

+ +

9. Ограничение ответственности

+

В максимальной степени, разрешенной законом, Play Life не несет ответственности за любые прямые, косвенные, случайные, особые или последующие убытки, возникающие в результате использования или невозможности использования сервиса.

+ +

10. Изменения в условиях

+

Мы оставляем за собой право изменять настоящие Условия использования в любое время. Изменения вступают в силу с момента их публикации. Продолжение использования сервиса после внесения изменений означает ваше согласие с новыми условиями.

+ +

11. Прекращение использования

+

Мы можем приостановить или прекратить ваш доступ к сервису в любое время, с уведомлением или без него, по любой причине, включая нарушение настоящих Условий использования.

+ +

12. Применимое право

+

Настоящие Условия использования регулируются и толкуются в соответствии с законодательством Российской Федерации.

+ +

13. Контактная информация

+

Если у вас есть вопросы относительно настоящих Условий использования, пожалуйста, свяжитесь с нами через приложение.

+ +
+

Последнее обновление: 1 января 2024 года

+
+
+ + diff --git a/play-life-web/src/App.jsx b/play-life-web/src/App.jsx new file mode 100644 index 0000000..cfe1aac --- /dev/null +++ b/play-life-web/src/App.jsx @@ -0,0 +1,1574 @@ +import React, { useState, useEffect, useCallback, useRef } from 'react' +import CurrentWeek from './components/CurrentWeek' +import FullStatistics from './components/FullStatistics' +import ProjectPriorityManager from './components/ProjectPriorityManager' +import WordList from './components/WordList' +import AddWords from './components/AddWords' +import DictionaryList from './components/DictionaryList' +import TestWords from './components/TestWords' +import Profile from './components/Profile' +import TaskList from './components/TaskList' +import TaskForm from './components/TaskForm.jsx' +import Wishlist from './components/Wishlist' +import WishlistForm from './components/WishlistForm' +import WishlistDetail from './components/WishlistDetail' +import BoardForm from './components/BoardForm' +import BoardJoinPreview from './components/BoardJoinPreview' +import TodoistIntegration from './components/TodoistIntegration' +import TelegramIntegration from './components/TelegramIntegration' +import FitbitIntegration from './components/FitbitIntegration' +import Tracking from './components/Tracking' +import TrackingAccess from './components/TrackingAccess' +import TrackingInviteAccept from './components/TrackingInviteAccept' +import { AuthProvider, useAuth } from './components/auth/AuthContext' +import AuthScreen from './components/auth/AuthScreen' +import PWAUpdatePrompt from './components/PWAUpdatePrompt' + +// API endpoints (используем относительные пути, проксирование настроено в nginx/vite) +const CURRENT_WEEK_API_URL = '/playlife-feed' +const FULL_STATISTICS_API_URL = '/d2dc349a-0d13-49b2-a8f0-1ab094bfba9b' + +// Определяем основные табы (без крестика) и глубокие табы (с крестиком) +const mainTabs = ['current', 'tasks', 'wishlist', 'profile'] +const deepTabs = ['add-words', 'test', 'task-form', 'wishlist-form', 'wishlist-detail', 'board-form', 'board-join', 'words', 'dictionaries', 'todoist-integration', 'telegram-integration', 'fitbit-integration', 'full', 'priorities', 'tracking', 'tracking-access', 'tracking-invite'] + +function AppContent() { + const { authFetch, isAuthenticated, loading: authLoading } = useAuth() + const prevIsAuthenticatedRef = useRef(null) + + // Все хуки должны быть объявлены до условных возвратов + const [activeTab, setActiveTab] = useState('current') + const [selectedProject, setSelectedProject] = useState(null) + const [loadedTabs, setLoadedTabs] = useState({ + current: false, + priorities: false, + full: false, + words: false, + 'add-words': false, + dictionaries: false, + test: false, + tasks: false, + 'task-form': false, + wishlist: false, + 'wishlist-form': false, + 'wishlist-detail': false, + 'board-form': false, + 'board-join': false, + profile: false, + 'todoist-integration': false, + 'telegram-integration': false, + 'fitbit-integration': false, + tracking: false, + 'tracking-access': false, + 'tracking-invite': false, + }) + + // Отслеживаем, какие табы уже были загружены (для предотвращения повторных загрузок) + const [tabsInitialized, setTabsInitialized] = useState({ + current: false, + priorities: false, + full: false, + words: false, + 'add-words': false, + dictionaries: false, + test: false, + tasks: false, + 'task-form': false, + wishlist: false, + 'wishlist-form': false, + 'wishlist-detail': false, + 'board-form': false, + 'board-join': false, + profile: false, + 'todoist-integration': false, + 'telegram-integration': false, + 'fitbit-integration': false, + tracking: false, + 'tracking-access': false, + 'tracking-invite': false, + }) + + // Параметры для навигации между вкладками + const [tabParams, setTabParams] = useState({}) + + // Предыдущий таб для возврата из модальных окон + const [previousTab, setPreviousTab] = useState(null) + + // Модальное окно выбора типа задачи + const [showAddModal, setShowAddModal] = useState(false) + + // Ref для функции открытия модала добавления записи в CurrentWeek + const currentWeekAddModalRef = useRef(null) + + // Кеширование данных + const [currentWeekData, setCurrentWeekData] = useState(null) + const [fullStatisticsData, setFullStatisticsData] = useState(null) + const [tasksData, setTasksData] = useState(null) + const [todayEntriesData, setTodayEntriesData] = useState(null) + + // Состояния загрузки для каждого таба (показываются только при первой загрузке) + const [currentWeekLoading, setCurrentWeekLoading] = useState(false) + const [fullStatisticsLoading, setFullStatisticsLoading] = useState(false) + const [prioritiesLoading, setPrioritiesLoading] = useState(false) + const [tasksLoading, setTasksLoading] = useState(false) + const [todayEntriesLoading, setTodayEntriesLoading] = useState(false) + + // Состояния фоновой загрузки (не показываются визуально) + const [currentWeekBackgroundLoading, setCurrentWeekBackgroundLoading] = useState(false) + const [fullStatisticsBackgroundLoading, setFullStatisticsBackgroundLoading] = useState(false) + const [prioritiesBackgroundLoading, setPrioritiesBackgroundLoading] = useState(false) + const [tasksBackgroundLoading, setTasksBackgroundLoading] = useState(false) + const [todayEntriesBackgroundLoading, setTodayEntriesBackgroundLoading] = useState(false) + + // Ошибки + const [currentWeekError, setCurrentWeekError] = useState(null) + const [fullStatisticsError, setFullStatisticsError] = useState(null) + const [prioritiesError, setPrioritiesError] = useState(null) + const [tasksError, setTasksError] = useState(null) + const [todayEntriesError, setTodayEntriesError] = useState(null) + + // Состояние для кнопки Refresh (если она есть) + const [isRefreshing, setIsRefreshing] = useState(false) + const [prioritiesRefreshTrigger, setPrioritiesRefreshTrigger] = useState(0) + const [dictionariesRefreshTrigger, setDictionariesRefreshTrigger] = useState(0) + const [wordsRefreshTrigger, setWordsRefreshTrigger] = useState(0) + const [wishlistRefreshTrigger, setWishlistRefreshTrigger] = useState(0) + + + + // Восстанавливаем последний выбранный таб после перезагрузки + const [isInitialized, setIsInitialized] = useState(false) + + // Переключение на экран прогрессии после успешной авторизации + useEffect(() => { + // Обновляем ref только после того, как authLoading стал false + if (!authLoading) { + const wasNotAuthenticated = prevIsAuthenticatedRef.current === false + prevIsAuthenticatedRef.current = isAuthenticated + + // Проверяем, что это новая авторизация (переход с false на true) + // и что инициализация уже завершена (чтобы не конфликтовать с восстановлением из URL/localStorage) + if (wasNotAuthenticated && isAuthenticated && isInitialized) { + // Переключаемся на экран прогресса только если нет таба в URL + const urlParams = new URLSearchParams(window.location.search) + const tabFromUrl = urlParams.get('tab') + + // Если в URL нет таба, переключаемся на current (экран прогресса) + if (!tabFromUrl) { + setActiveTab('current') + setLoadedTabs(prev => ({ ...prev, current: true })) + // Очищаем URL, так как current - это основной таб + const url = new URL(window.location) + url.searchParams.delete('tab') + url.searchParams.forEach((value, key) => { + url.searchParams.delete(key) + }) + window.history.replaceState({ tab: 'current' }, '', url) + } + } + } + }, [isAuthenticated, isInitialized, authLoading]) + + // Инициализация из URL (только для глубоких табов) или localStorage + useEffect(() => { + if (isInitialized) return + + try { + // Проверяем путь /invite/:token для присоединения к доске + const path = window.location.pathname + if (path.startsWith('/invite/')) { + const token = path.replace('/invite/', '') + if (token) { + setActiveTab('board-join') + setLoadedTabs(prev => ({ ...prev, 'board-join': true })) + setTabParams({ inviteToken: token }) + setIsInitialized(true) + // Очищаем путь, оставляем только параметры + window.history.replaceState({}, '', '/?tab=board-join&inviteToken=' + token) + return + } + } + + // Проверяем путь /tracking/invite/:token + if (path.startsWith('/tracking/invite/')) { + const token = path.replace('/tracking/invite/', '') + if (token) { + setActiveTab('tracking-invite') + setLoadedTabs(prev => ({ ...prev, 'tracking-invite': true })) + setTabParams({ inviteToken: token }) + setIsInitialized(true) + window.history.replaceState({}, '', '/?tab=tracking-invite&inviteToken=' + token) + return + } + } + + // Проверяем параметры OAuth callback от Fitbit + const urlParams = new URLSearchParams(window.location.search) + const integration = urlParams.get('integration') + if (integration === 'fitbit') { + setActiveTab('fitbit-integration') + setLoadedTabs(prev => ({ ...prev, 'fitbit-integration': true })) + setIsInitialized(true) + // Перезаписываем URL с tab параметром и сохраняем integration/status для компонента + const status = urlParams.get('status') + const message = urlParams.get('message') + let newUrl = '/?tab=fitbit-integration&integration=fitbit' + if (status) newUrl += `&status=${status}` + if (message) newUrl += `&message=${message}` + window.history.replaceState({}, '', newUrl) + return + } + + // Проверяем URL только для глубоких табов + const tabFromUrl = urlParams.get('tab') + const validTabs = ['current', 'priorities', 'full', 'words', 'add-words', 'dictionaries', 'test', 'tasks', 'task-form', 'wishlist', 'wishlist-form', 'wishlist-detail', 'board-form', 'board-join', 'profile', 'todoist-integration', 'telegram-integration', 'fitbit-integration', 'tracking', 'tracking-access', 'tracking-invite'] + + if (tabFromUrl && validTabs.includes(tabFromUrl) && deepTabs.includes(tabFromUrl)) { + // Если в URL есть глубокий таб, восстанавливаем его + setActiveTab(tabFromUrl) + setLoadedTabs(prev => ({ ...prev, [tabFromUrl]: true })) + + // Восстанавливаем параметры из URL + const params = {} + urlParams.forEach((value, key) => { + if (key !== 'tab') { + try { + params[key] = JSON.parse(value) + } catch { + params[key] = value + } + } + }) + if (Object.keys(params).length > 0) { + setTabParams(params) + // Если это экран full с selectedProject, восстанавливаем его + if (tabFromUrl === 'full' && params.selectedProject) { + setSelectedProject(params.selectedProject) + } + } + } else { + // Если в URL нет глубокого таба, проверяем localStorage для основного таба + const savedTab = window.localStorage?.getItem('activeTab') + if (savedTab && validTabs.includes(savedTab)) { + setActiveTab(savedTab) + setLoadedTabs(prev => ({ ...prev, [savedTab]: true })) + } + // Очищаем URL от параметров таба, если это основной таб + if (tabFromUrl && mainTabs.includes(tabFromUrl)) { + const url = new URL(window.location) + url.searchParams.delete('tab') + url.searchParams.forEach((value, key) => { + url.searchParams.delete(key) + }) + window.history.replaceState({}, '', url) + } + } + setIsInitialized(true) + } catch (err) { + console.warn('Не удалось прочитать активный таб', err) + setIsInitialized(true) + } + }, [isInitialized]) + + const markTabAsLoaded = useCallback((tab) => { + setLoadedTabs(prev => (prev[tab] ? prev : { ...prev, [tab]: true })) + }, []) + + // Функция для обновления URL (только для глубоких табов) + const updateUrl = useCallback((tab, params = {}, previousTab = null) => { + if (!deepTabs.includes(tab)) { + // Для основных табов не обновляем URL + return + } + + const url = new URL(window.location) + url.searchParams.set('tab', tab) + + // Удаляем старые параметры таба + const keysToRemove = [] + url.searchParams.forEach((value, key) => { + if (key !== 'tab') { + keysToRemove.push(key) + } + }) + keysToRemove.forEach(key => url.searchParams.delete(key)) + + // Добавляем новые параметры + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined && value !== null) { + url.searchParams.set(key, typeof value === 'object' ? JSON.stringify(value) : value) + } + }) + + // Сохраняем предыдущий таб в state для восстановления при "Назад" + window.history.pushState({ tab, params, previousTab }, '', url) + }, []) // deepTabs - константа, не нужно в зависимостях + + // Функция для очистки URL (при возврате к основному табу) + const clearUrl = useCallback((tab = null, usePushState = false) => { + const url = new URL(window.location) + const hasTabParam = url.searchParams.has('tab') + if (hasTabParam) { + url.searchParams.delete('tab') + url.searchParams.forEach((value, key) => { + url.searchParams.delete(key) + }) + // Сохраняем текущий таб в state для восстановления при "Назад" + if (usePushState && tab) { + window.history.pushState({ tab }, '', url) + } else { + window.history.replaceState(tab ? { tab } : {}, '', url) + } + } else if (tab) { + // Если URL уже чистый, но нужно сохранить state таба + if (usePushState) { + window.history.pushState({ tab }, '', url) + } else { + window.history.replaceState({ tab }, '', url) + } + } + }, []) + + // Функция для обновления URL без создания новой записи в истории (для обновления параметров того же таба) + const replaceUrl = useCallback((tab, params = {}) => { + if (!deepTabs.includes(tab)) { + return + } + + const url = new URL(window.location) + url.searchParams.set('tab', tab) + + // Удаляем старые параметры таба + const keysToRemove = [] + url.searchParams.forEach((value, key) => { + if (key !== 'tab') { + keysToRemove.push(key) + } + }) + keysToRemove.forEach(key => url.searchParams.delete(key)) + + // Добавляем новые параметры + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined && value !== null) { + url.searchParams.set(key, typeof value === 'object' ? JSON.stringify(value) : value) + } + }) + + // Сохраняем текущий state, чтобы не потерять previousTab + const currentState = window.history.state || {} + window.history.replaceState({ ...currentState, tab, params }, '', url) + }, []) + + const fetchCurrentWeekData = useCallback(async (isBackground = false) => { + try { + if (isBackground) { + setCurrentWeekBackgroundLoading(true) + } else { + setCurrentWeekLoading(true) + } + setCurrentWeekError(null) + const response = await authFetch(CURRENT_WEEK_API_URL) + if (!response.ok) { + throw new Error('Ошибка загрузки данных') + } + const jsonData = await response.json() + // Обрабатываем ответ: приходит массив с одним объектом [{total: ..., projects: [...]}] + let projects = [] + let total = null + let groupProgress1 = null + let groupProgress2 = null + let groupProgress0 = null + + if (Array.isArray(jsonData) && jsonData.length > 0) { + // Если ответ - массив, проверяем первый элемент + const firstItem = jsonData[0] + if (firstItem && typeof firstItem === 'object') { + // Если первый элемент - объект с полями total и projects + if (firstItem.projects && Array.isArray(firstItem.projects)) { + projects = firstItem.projects + total = firstItem.total !== undefined ? firstItem.total : null + groupProgress1 = firstItem.group_progress_1 !== undefined ? firstItem.group_progress_1 : null + groupProgress2 = firstItem.group_progress_2 !== undefined ? firstItem.group_progress_2 : null + groupProgress0 = firstItem.group_progress_0 !== undefined ? firstItem.group_progress_0 : null + } else { + // Если это просто массив проектов + projects = jsonData + } + } else { + // Если это массив проектов напрямую + projects = jsonData + } + } else if (jsonData && typeof jsonData === 'object' && !Array.isArray(jsonData)) { + // Если ответ - объект напрямую + projects = jsonData.projects || jsonData.data || [] + total = jsonData.total !== undefined ? jsonData.total : null + groupProgress1 = jsonData.group_progress_1 !== undefined ? jsonData.group_progress_1 : null + groupProgress2 = jsonData.group_progress_2 !== undefined ? jsonData.group_progress_2 : null + groupProgress0 = jsonData.group_progress_0 !== undefined ? jsonData.group_progress_0 : null + } + + setCurrentWeekData({ + projects: Array.isArray(projects) ? projects : [], + total: total, + group_progress_1: groupProgress1, + group_progress_2: groupProgress2, + group_progress_0: groupProgress0 + }) + } catch (err) { + setCurrentWeekError(err.message) + console.error('Ошибка загрузки данных текущей недели:', err) + } finally { + if (isBackground) { + setCurrentWeekBackgroundLoading(false) + } else { + setCurrentWeekLoading(false) + } + } + }, [authFetch]) + + const fetchFullStatisticsData = useCallback(async (isBackground = false) => { + try { + if (isBackground) { + setFullStatisticsBackgroundLoading(true) + } else { + setFullStatisticsLoading(true) + } + setFullStatisticsError(null) + const response = await authFetch(FULL_STATISTICS_API_URL) + if (!response.ok) { + throw new Error('Ошибка загрузки данных') + } + const jsonData = await response.json() + setFullStatisticsData(jsonData) + } catch (err) { + setFullStatisticsError(err.message) + console.error('Ошибка загрузки данных полной статистики:', err) + } finally { + if (isBackground) { + setFullStatisticsBackgroundLoading(false) + } else { + setFullStatisticsLoading(false) + } + } + }, [authFetch]) + + const fetchTasksData = useCallback(async (isBackground = false) => { + try { + if (isBackground) { + setTasksBackgroundLoading(true) + } else { + setTasksLoading(true) + } + setTasksError(null) + const response = await authFetch('/api/tasks') + if (!response.ok) { + throw new Error('Ошибка загрузки данных') + } + const jsonData = await response.json() + setTasksData(jsonData) + } catch (err) { + console.error('Ошибка загрузки списка задач:', err) + setTasksError(err.message || 'Ошибка загрузки данных') + } finally { + if (isBackground) { + setTasksBackgroundLoading(false) + } else { + setTasksLoading(false) + } + } + }, [authFetch]) + + const fetchTodayEntries = useCallback(async (isBackground = false, projectName = null, date = null) => { + try { + if (isBackground) { + setTodayEntriesBackgroundLoading(true) + } else { + setTodayEntriesLoading(true) + } + setTodayEntriesError(null) + + // Формируем URL с опциональными параметрами project и date + let url = '/api/today-entries' + const params = [] + if (projectName) { + params.push(`project=${encodeURIComponent(projectName)}`) + } + if (date) { + params.push(`date=${encodeURIComponent(date)}`) + } + if (params.length > 0) { + url += `?${params.join('&')}` + } + + const response = await authFetch(url) + if (!response.ok) { + throw new Error('Ошибка загрузки данных') + } + const jsonData = await response.json() + setTodayEntriesData(Array.isArray(jsonData) ? jsonData : []) + } catch (err) { + setTodayEntriesError(err.message || 'Ошибка загрузки данных') + console.error('Ошибка загрузки today entries:', err) + } finally { + if (isBackground) { + setTodayEntriesBackgroundLoading(false) + } else { + setTodayEntriesLoading(false) + } + } + }, [authFetch]) + + // Используем ref для отслеживания инициализации табов (чтобы избежать лишних пересозданий функции) + const tabsInitializedRef = useRef({ + current: false, + priorities: false, + full: false, + words: false, + 'add-words': false, + dictionaries: false, + test: false, + tasks: false, + 'task-form': false, + profile: false, + 'todoist-integration': false, + 'telegram-integration': false, + 'fitbit-integration': false, + tracking: false, + 'tracking-access': false, + 'tracking-invite': false, + }) + + // Используем ref для отслеживания кеша (чтобы не зависеть от состояния в useCallback) + const cacheRef = useRef({ + current: null, + full: null, + tasks: null, + todayEntries: null, + }) + + // Refs для отслеживания активного таба + const prevActiveTabRef = useRef(null) + const lastLoadedTabRef = useRef(null) // Отслеживаем последний загруженный таб, чтобы избежать двойной загрузки + + // Обновляем ref при изменении данных + useEffect(() => { + cacheRef.current.current = currentWeekData + }, [currentWeekData]) + + useEffect(() => { + cacheRef.current.full = fullStatisticsData + }, [fullStatisticsData]) + + useEffect(() => { + cacheRef.current.tasks = tasksData + }, [tasksData]) + + useEffect(() => { + cacheRef.current.todayEntries = todayEntriesData + }, [todayEntriesData]) + + // Функция для загрузки данных таба + const loadTabData = useCallback((tab, isBackground = false, projectName = null) => { + if (tab === 'current') { + const hasCache = cacheRef.current.current !== null + const isInitialized = tabsInitializedRef.current.current + + if (!isInitialized) { + // Первая загрузка таба - загружаем с индикатором + fetchCurrentWeekData(false) + tabsInitializedRef.current.current = true + setTabsInitialized(prev => ({ ...prev, current: true })) + } else if (hasCache && isBackground) { + // Возврат на таб с кешем - фоновая загрузка + fetchCurrentWeekData(true) + } + // Если нет кеша и это не первая загрузка - ничего не делаем (данные уже загружаются) + } else if (tab === 'full') { + const hasCache = cacheRef.current.full !== null + const hasCurrentWeekCache = cacheRef.current.current !== null + const isInitialized = tabsInitializedRef.current.full + + if (!isInitialized) { + // Первая загрузка таба + if (hasCache) { + // Если есть кеш - используем фоновую загрузку, показываем старые данные + fetchFullStatisticsData(true) + } else { + // Если кеша нет - загружаем с индикатором + fetchFullStatisticsData(false) + } + // Также запускаем фоновую загрузку currentWeekData, если его нет + if (!hasCurrentWeekCache) { + fetchCurrentWeekData(true) + } + // todayEntries будет загружен в FullStatistics компоненте при выборе дня + tabsInitializedRef.current.full = true + setTabsInitialized(prev => ({ ...prev, full: true })) + } else if (hasCache && isBackground) { + // Возврат на таб с кешем - фоновая загрузка + fetchFullStatisticsData(true) + // Также запускаем фоновую загрузку currentWeekData, если его нет + if (!hasCurrentWeekCache) { + fetchCurrentWeekData(true) + } + // todayEntries будет загружен в FullStatistics компоненте при выборе дня + } + } else if (tab === 'priorities') { + const isInitialized = tabsInitializedRef.current.priorities + + if (!isInitialized) { + // Первая загрузка таба + setPrioritiesRefreshTrigger(prev => prev + 1) + tabsInitializedRef.current.priorities = true + setTabsInitialized(prev => ({ ...prev, priorities: true })) + } else if (isBackground) { + // Возврат на таб - фоновая загрузка + setPrioritiesRefreshTrigger(prev => prev + 1) + } + } else if (tab === 'dictionaries') { + const isInitialized = tabsInitializedRef.current['dictionaries'] + + if (!isInitialized) { + // Первая загрузка таба + setDictionariesRefreshTrigger(prev => prev + 1) + tabsInitializedRef.current['dictionaries'] = true + setTabsInitialized(prev => ({ ...prev, 'dictionaries': true })) + } else if (isBackground) { + // Возврат на таб - фоновая загрузка + setDictionariesRefreshTrigger(prev => prev + 1) + } + } else if (tab === 'tasks') { + const hasCache = cacheRef.current.tasks !== null + const isInitialized = tabsInitializedRef.current.tasks + + if (!isInitialized) { + // Первая загрузка таба - загружаем с индикатором + fetchTasksData(false) + tabsInitializedRef.current.tasks = true + setTabsInitialized(prev => ({ ...prev, tasks: true })) + } else if (hasCache && isBackground) { + // Возврат на таб с кешем - фоновая загрузка + fetchTasksData(true) + } + } + }, [fetchCurrentWeekData, fetchFullStatisticsData, fetchTasksData, fetchTodayEntries]) + + // Функция для обновления всех данных (для кнопки Refresh, если она есть) + const refreshAllData = useCallback(async () => { + setIsRefreshing(true) + setPrioritiesError(null) + setCurrentWeekError(null) + setFullStatisticsError(null) + + // Триггерим обновление приоритетов + setPrioritiesRefreshTrigger(prev => prev + 1) + + // Загружаем все данные параллельно (не фоново) + await Promise.all([ + fetchCurrentWeekData(false), + fetchFullStatisticsData(false), + ]) + + setIsRefreshing(false) + }, [fetchCurrentWeekData, fetchFullStatisticsData]) + + // Обработчик кнопки "назад" в браузере (только для глубоких табов) + useEffect(() => { + const handlePopState = (event) => { + // Проверяем, есть ли открытые модальные окна в DOM + const taskDetailModal = document.querySelector('.task-detail-modal-overlay') + const wishlistDetailModal = document.querySelector('.wishlist-detail-modal-overlay') + + // Если есть открытые модальные окна, не обрабатываем здесь - компоненты сами закроют их + if (taskDetailModal || wishlistDetailModal) { + return + } + + // Если это модальное окно, не обрабатываем здесь - компоненты сами закроют его + if (event.state && event.state.modalOpen) { + // Если модальных окон нет в DOM, это устаревшая запись — пропускаем её + if (!taskDetailModal && !wishlistDetailModal) { + window.history.back() + } + return + } + + const validTabs = ['current', 'priorities', 'full', 'words', 'add-words', 'dictionaries', 'test', 'tasks', 'task-form', 'wishlist', 'wishlist-form', 'wishlist-detail', 'profile', 'todoist-integration', 'telegram-integration', 'tracking', 'tracking-access', 'tracking-invite'] + + // Проверяем state текущей записи истории (куда мы вернулись) + if (event.state && event.state.tab) { + const { tab, params = {} } = event.state + + if (validTabs.includes(tab)) { + setActiveTab(tab) + setTabParams(params) + markTabAsLoaded(tab) + // Если это экран full, устанавливаем selectedProject только если он есть в params + if (tab === 'full') { + setSelectedProject(params.selectedProject || null) + } + return + } + } + + // Если state пустой или не содержит таб, пытаемся восстановить из URL + const urlParams = new URLSearchParams(window.location.search) + const tabFromUrl = urlParams.get('tab') + + if (tabFromUrl && validTabs.includes(tabFromUrl) && deepTabs.includes(tabFromUrl)) { + // Если в URL есть глубокий таб, восстанавливаем его + setActiveTab(tabFromUrl) + markTabAsLoaded(tabFromUrl) + + const params = {} + urlParams.forEach((value, key) => { + if (key !== 'tab') { + try { + params[key] = JSON.parse(value) + } catch { + params[key] = value + } + } + }) + setTabParams(params) + // Если это экран full, устанавливаем selectedProject только если он есть в params + if (tabFromUrl === 'full') { + setSelectedProject(params.selectedProject || null) + } + } else { + // Если в URL нет глубокого таба, значит мы вернулись на основной таб + // Проверяем state - если там есть tab, используем его + if (event.state && event.state.tab && validTabs.includes(event.state.tab)) { + setActiveTab(event.state.tab) + setTabParams({}) + markTabAsLoaded(event.state.tab) + setSelectedProject(null) + clearUrl(event.state.tab) + } else { + // Если state пустой, используем сохраненный таб из localStorage + const savedTab = window.localStorage?.getItem('activeTab') + const validMainTab = savedTab && validTabs.includes(savedTab) ? savedTab : 'current' + setActiveTab(validMainTab) + setTabParams({}) + markTabAsLoaded(validMainTab) + setSelectedProject(null) + clearUrl(validMainTab) + } + } + } + + window.addEventListener('popstate', handlePopState) + + return () => { + window.removeEventListener('popstate', handlePopState) + } + }, [markTabAsLoaded, clearUrl]) // mainTabs и deepTabs - константы, не нужно в зависимостях + + // Обновляем данные при возвращении экрана в фокус (фоново) + useEffect(() => { + const handleFocus = () => { + if (document.visibilityState === 'visible') { + // Загружаем данные активного таба фоново + const projectName = activeTab === 'full' ? selectedProject : null + loadTabData(activeTab, true, projectName) + } + } + + window.addEventListener('focus', handleFocus) + document.addEventListener('visibilitychange', handleFocus) + + return () => { + window.removeEventListener('focus', handleFocus) + document.removeEventListener('visibilitychange', handleFocus) + } + }, [activeTab, loadTabData]) + + const handleProjectClick = (projectName) => { + setSelectedProject(projectName) + markTabAsLoaded('full') + setTabParams({ selectedProject: projectName }) + updateUrl('full', { selectedProject: projectName }, activeTab) + setActiveTab('full') + } + + const handleTabChange = (tab, params = {}) => { + if (tab === 'full' && activeTab === 'full') { + // При повторном клике на "Полная статистика" сбрасываем выбранный проект + setSelectedProject(null) + setTabParams({}) + updateUrl('full', {}, activeTab) + } else if (tab !== activeTab || tab === 'task-form' || tab === 'wishlist-form' || (tab === 'words' && Object.keys(params).length > 0)) { + // Для task-form и wishlist-form всегда обновляем параметры, даже если это тот же таб + markTabAsLoaded(tab) + + // Определяем, является ли текущий таб глубоким + const isCurrentTabDeep = deepTabs.includes(activeTab) + const isNewTabDeep = deepTabs.includes(tab) + const isCurrentTabMain = mainTabs.includes(activeTab) + const isNewTabMain = mainTabs.includes(tab) + + { + // Для task-form и wishlist-form явно удаляем параметры, только если нет никаких параметров + // task-form может иметь taskId (редактирование), wishlistId (создание из желания), returnTo (возврат после создания), или isTest (создание теста) + const isTaskFormWithNoParams = tab === 'task-form' && params.taskId === undefined && params.wishlistId === undefined && params.returnTo === undefined && params.isTest === undefined + // Проверяем, что boardId не null и не undefined (null означает "нет доски", но это валидное значение) + const hasBoardId = params.boardId !== null && params.boardId !== undefined + const isWishlistFormWithNoParams = tab === 'wishlist-form' && params.wishlistId === undefined && params.newTaskId === undefined && !hasBoardId + if (isTaskFormWithNoParams || isWishlistFormWithNoParams) { + setTabParams({}) + if (isNewTabMain) { + clearUrl() + } else if (isNewTabDeep) { + updateUrl(tab, {}, activeTab) + } + } else { + setTabParams(params) + // Обновляем URL только для глубоких табов + if (isNewTabDeep) { + // Проверяем, была ли последняя запись в истории от модального окна + const currentState = window.history.state || {} + const isFromModal = currentState.modalOpen === true + const isNavigatingToForm = tab === 'task-form' || tab === 'wishlist-form' + + if (isFromModal && isNavigatingToForm) { + // Заменяем запись модального окна на запись формы редактирования + // Используем replaceState вместо pushState, сохраняя activeTab как previousTab + const url = new URL(window.location) + url.searchParams.set('tab', tab) + // Удаляем старые параметры + const keysToRemove = [] + url.searchParams.forEach((value, key) => { + if (key !== 'tab') keysToRemove.push(key) + }) + keysToRemove.forEach(key => url.searchParams.delete(key)) + // Добавляем новые параметры + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined && value !== null) { + url.searchParams.set(key, typeof value === 'object' ? JSON.stringify(value) : value) + } + }) + window.history.replaceState({ tab, params, previousTab: activeTab }, '', url) + } else { + // Сохраняем текущий таб как предыдущий при переходе на глубокий таб + updateUrl(tab, params, activeTab) + } + } else if (isNewTabMain && isCurrentTabDeep) { + // При переходе с глубокого таба на основной - очищаем URL и сохраняем таб в state + clearUrl(tab) + } else if (isNewTabMain && isCurrentTabMain) { + // При переходе между основными табами - сохраняем таб в state без изменения URL, НЕ создаем новую запись в истории + clearUrl(tab, false) + } + } + } + + setActiveTab(tab) + if (tab === 'current') { + setSelectedProject(null) + } else if (tab === 'full') { + // Если переходим на full без selectedProject в params, очищаем выбранный проект + if (!params.selectedProject) { + setSelectedProject(null) + } + } + // Обновляем список слов при возврате из экрана добавления слов + if (activeTab === 'add-words' && tab === 'words') { + setWordsRefreshTrigger(prev => prev + 1) + } + // Обновляем список задач при возврате из экрана редактирования или теста + // Используем фоновую загрузку, чтобы не показывать индикатор загрузки + if ((activeTab === 'task-form' || activeTab === 'test') && tab === 'tasks') { + fetchTasksData(true) + } + // Сохраняем предыдущий таб при открытии wishlist-form или wishlist-detail + if ((tab === 'wishlist-form' || tab === 'wishlist-detail') && activeTab !== tab) { + setPreviousTab(activeTab) + } + + // Обновляем список желаний при возврате из экрана редактирования + if (activeTab === 'wishlist-form' && tab !== 'wishlist-form') { + // Сохраняем boardId из параметров или текущих tabParams + const savedBoardId = params.boardId || tabParams.boardId + // Параметры уже установлены в строке 649, но мы можем их обновить, чтобы сохранить boardId + if (savedBoardId && tab === 'wishlist') { + setTabParams(prev => ({ ...prev, boardId: savedBoardId })) + } + if (tab === 'wishlist') { + setWishlistRefreshTrigger(prev => prev + 1) + } + } + + // Обновляем список желаний при возврате из экрана детализации + if (activeTab === 'wishlist-detail' && tab !== 'wishlist-detail') { + if (tab === 'wishlist') { + setWishlistRefreshTrigger(prev => prev + 1) + } + } + // Загрузка данных произойдет в useEffect при изменении activeTab + } + } + + // Обработчики для кнопки добавления задачи + const handleAddClick = () => { + setShowAddModal(true) + } + + const handleAddTask = () => { + setShowAddModal(false) + handleNavigate('task-form', { taskId: undefined, isTest: false }) + } + + const handleAddTest = () => { + setShowAddModal(false) + handleNavigate('task-form', { taskId: undefined, isTest: true }) + } + + // Обработчик навигации для компонентов + const handleNavigate = (tab, params = {}) => { + handleTabChange(tab, params) + } + + // Загружаем данные при открытии таба (когда таб становится активным) + useEffect(() => { + if (!activeTab || !loadedTabs[activeTab]) return + + const isFirstLoad = !tabsInitializedRef.current[activeTab] + const isReturningToTab = prevActiveTabRef.current !== null && prevActiveTabRef.current !== activeTab + + // Проверяем, не загружали ли мы уже этот таб в этом рендере + const tabKey = `${activeTab}-${isFirstLoad ? 'first' : 'return'}` + if (lastLoadedTabRef.current === tabKey) { + return // Уже загружали + } + + // Обновляем список слов при возврате из экрана добавления слов + if (prevActiveTabRef.current === 'add-words' && activeTab === 'words') { + setWordsRefreshTrigger(prev => prev + 1) + } + + if (isFirstLoad) { + // Первая загрузка таба + lastLoadedTabRef.current = tabKey + const projectName = activeTab === 'full' ? selectedProject : null + loadTabData(activeTab, false, projectName) + } else if (isReturningToTab) { + // Возврат на таб - фоновая загрузка + lastLoadedTabRef.current = tabKey + const projectName = activeTab === 'full' ? selectedProject : null + loadTabData(activeTab, true, projectName) + } + + prevActiveTabRef.current = activeTab + }, [activeTab, loadedTabs, loadTabData, selectedProject]) + + // Обновляем todayEntries при изменении selectedProject для таба 'full' + // НЕ загружаем данные при открытии таба - это делает компонент FullStatistics + // Загружаем только при изменении selectedProject, если таб уже открыт + useEffect(() => { + if (activeTab === 'full' && prevActiveTabRef.current === 'full') { + // Таб уже был открыт, просто изменился selectedProject + // Данные будут загружены компонентом FullStatistics с правильной датой + } + }, [selectedProject, activeTab]) + + + + // Определяем общее состояние загрузки и ошибок для кнопки Refresh + const isAnyLoading = currentWeekLoading || fullStatisticsLoading || prioritiesLoading || isRefreshing + const hasAnyError = currentWeekError || fullStatisticsError || prioritiesError + + // Сохраняем выбранный таб, чтобы восстановить его после перезагрузки + useEffect(() => { + try { + window.localStorage?.setItem('activeTab', activeTab) + } catch (err) { + console.warn('Не удалось сохранить активный таб в localStorage', err) + } + }, [activeTab]) + + // Show loading while checking auth + if (authLoading) { + return ( +
+
Загрузка...
+
+ ) + } + + // Show auth screen if not authenticated + if (!isAuthenticated) { + prevIsAuthenticatedRef.current = false + return + } + + // Определяем, нужно ли скрывать нижнюю панель (для fullscreen экранов) + const isFullscreenTab = activeTab === 'test' || activeTab === 'add-words' || activeTab === 'task-form' || activeTab === 'wishlist-form' || activeTab === 'wishlist-detail' || activeTab === 'todoist-integration' || activeTab === 'telegram-integration' || activeTab === 'fitbit-integration' || activeTab === 'full' || activeTab === 'priorities' || activeTab === 'words' || activeTab === 'dictionaries' || activeTab === 'tracking' || activeTab === 'tracking-access' || activeTab === 'tracking-invite' + + // Функция для получения классов скролл-контейнера для каждого таба + // Каждый таб имеет свой изолированный скролл-контейнер для автоматического сохранения позиции скролла + const getTabContainerClasses = (tabName) => { + const isActive = activeTab === tabName + const baseClasses = 'absolute inset-0 overflow-y-auto' + // Активный таб: z-10 (сверху), неактивные: z-0 + invisible + opacity-0 (мгновенное скрытие) + const visibilityClasses = isActive ? 'z-10' : 'z-0 invisible opacity-0 pointer-events-none' + + // Определяем padding для каждого таба + let paddingClasses = '' + if (tabName === 'current' || tabName === 'tasks' || tabName === 'wishlist' || tabName === 'profile') { + paddingClasses = 'pb-20' + } else if (tabName === 'words' || tabName === 'dictionaries') { + paddingClasses = 'pb-16' + } + + return `${baseClasses} ${paddingClasses} ${visibilityClasses}`.trim() + } + + // Функция для определения отступов внутреннего контейнера + const getInnerContainerClasses = (tabName) => { + if (tabName === 'tasks' || tabName === 'wishlist' || tabName === 'profile') { + return 'max-w-7xl mx-auto p-4 md:p-8' + } + if (tabName === 'current') { + return 'max-w-7xl mx-auto p-4 md:p-6' + } + if (tabName === 'full' || tabName === 'priorities' || tabName === 'dictionaries' || tabName === 'words') { + return 'max-w-7xl mx-auto px-4 md:px-8 py-0' + } + // Fullscreen табы без отступов + return 'max-w-7xl mx-auto p-0' + } + + return ( +
+ {/* Контейнер табов - каждый таб имеет свой изолированный скролл */} +
+ {loadedTabs.current && ( +
+
+ { + currentWeekAddModalRef.current = setOpenFn + }} + /> +
+
+ )} + + {loadedTabs.priorities && ( +
+
+ +
+
+ )} + + {loadedTabs.full && ( +
+
+ { + setSelectedProject(null) + setTabParams({}) + replaceUrl('full', {}) + }} + data={fullStatisticsData} + loading={fullStatisticsLoading} + error={fullStatisticsError} + todayEntries={todayEntriesData} + todayEntriesLoading={todayEntriesLoading || todayEntriesBackgroundLoading} + todayEntriesError={todayEntriesError} + onRetryTodayEntries={() => fetchTodayEntries(false, selectedProject, null)} + fetchTodayEntries={fetchTodayEntries} + onRetry={fetchFullStatisticsData} + currentWeekData={currentWeekData} + onNavigate={handleNavigate} + activeTab={activeTab} + /> +
+
+ )} + + {loadedTabs.words && ( +
+
+ +
+
+ )} + + {loadedTabs['add-words'] && ( +
+
+ +
+
+ )} + + {loadedTabs.dictionaries && ( +
+
+ +
+
+ )} + + {loadedTabs.test && ( +
+
+ +
+
+ )} + + {loadedTabs.tasks && ( +
+
+ fetchTasksData(false)} + onRefresh={(isBackground = false) => fetchTasksData(isBackground)} + /> +
+
+ )} + + {loadedTabs['task-form'] && ( +
+
+ +
+
+ )} + + {loadedTabs.wishlist && ( +
+
+ +
+
+ )} + + {loadedTabs['wishlist-form'] && ( +
+
+ +
+
+ )} + + {loadedTabs['board-form'] && ( +
+
+ setWishlistRefreshTrigger(prev => prev + 1)} + /> +
+
+ )} + + {loadedTabs['board-join'] && ( +
+
+ +
+
+ )} + + {loadedTabs.profile && ( +
+
+ +
+
+ )} + + {loadedTabs['todoist-integration'] && ( +
+
+ +
+
+ )} + + {loadedTabs['telegram-integration'] && ( +
+
+ +
+
+ )} + + {loadedTabs['fitbit-integration'] && ( +
+
+ +
+
+ )} + + {loadedTabs.tracking && ( +
+
+ +
+
+ )} + + {loadedTabs['tracking-access'] && ( +
+
+ +
+
+ )} + + {loadedTabs['tracking-invite'] && ( +
+
+ +
+
+ )} +
+ + {/* Кнопка добавления задачи (только для таба задач) */} + {!isFullscreenTab && activeTab === 'tasks' && ( + + )} + + {/* Кнопка добавления желания (только для таба wishlist) */} + {!isFullscreenTab && activeTab === 'wishlist' && ( + + )} + + {/* Кнопка добавления записи (только для таба current - экран прогресса) */} + {!isFullscreenTab && activeTab === 'current' && ( + + )} + + {/* Кнопка добавления словаря (только для таба dictionaries) */} + {activeTab === 'dictionaries' && ( + + )} + + {/* Кнопка добавления слов (только для таба words) */} + {activeTab === 'words' && ( + + )} + + {!isFullscreenTab && ( +
+
+ + + + +
+
+ )} + + {/* Модальное окно выбора типа задачи */} + {showAddModal && ( +
setShowAddModal(false)}> +
e.stopPropagation()}> +
+

Что добавить?

+
+
+ + +
+
+
+ )} +
+ ) +} + +function App() { + return ( + + + + + ) +} + +export default App + + diff --git a/play-life-web/src/components/AddWords.css b/play-life-web/src/components/AddWords.css new file mode 100644 index 0000000..86f9c21 --- /dev/null +++ b/play-life-web/src/components/AddWords.css @@ -0,0 +1,231 @@ +.add-words { + padding-left: 1rem; + padding-right: 1rem; +} + +@media (min-width: 768px) { + .add-words { + padding-left: 1.5rem; + padding-right: 1.5rem; + } +} + +.add-words h2 { + margin-top: 2rem; + margin-bottom: 1rem; + color: #2c3e50; + font-size: 2rem; +} + +.description { + margin-bottom: 1.5rem; + color: #666; + font-size: 0.95rem; +} + +.markdown-input { + width: 100%; + padding: 1rem; + border: 2px solid #ddd; + border-radius: 4px; + font-family: 'Courier New', monospace; + font-size: 0.9rem; + resize: vertical; + margin-bottom: 1rem; + transition: border-color 0.2s; +} + +.markdown-input:focus { + outline: none; + border-color: #3498db; +} + +.tabs-container { + display: flex; + gap: 0.5rem; + margin-bottom: 1.5rem; + border-bottom: 2px solid #e0e0e0; +} + +.tab-button { + background: none; + border: none; + padding: 0.75rem 1.5rem; + font-size: 1rem; + color: #666; + cursor: pointer; + border-bottom: 3px solid transparent; + margin-bottom: -2px; + transition: color 0.2s, border-color 0.2s; +} + +.tab-button:hover { + color: #3498db; +} + +.tab-button.active { + color: #3498db; + border-bottom-color: #3498db; + font-weight: 600; +} + +.tab-content { + margin-bottom: 1rem; +} + +.word-pairs-list { + display: flex; + flex-direction: column; + gap: 0.75rem; +} + +.word-pair-item { + display: flex; + flex-direction: row; + gap: 0.75rem; + width: 100%; + box-sizing: border-box; +} + +.word-input, +.translation-input { + flex: 1; + min-width: 0; + padding: 0.75rem; + border: 2px solid #ddd; + border-radius: 4px; + font-size: 1rem; + transition: border-color 0.2s; + box-sizing: border-box; +} + +.word-input:focus, +.translation-input:focus { + outline: none; + border-color: #3498db; +} + +.remove-pair-button { + background: transparent; + border: none; + border-radius: 6px; + color: #9ca3af; + cursor: pointer; + width: 2rem; + min-width: 2rem; + padding: 0; + display: flex; + align-items: center; + justify-content: center; + transition: all 0.2s ease; + box-sizing: border-box; + flex-shrink: 0; + align-self: stretch; +} + +.remove-pair-button svg { + display: block; + margin: 0; + vertical-align: middle; +} + +.remove-pair-button:hover { + background-color: rgba(239, 68, 68, 0.1); + color: #ef4444; +} + +.remove-pair-button:active { + background-color: rgba(239, 68, 68, 0.2); + transform: scale(0.95); +} + +.add-pair-button { + width: 100%; + padding: 0.75rem; + background-color: #f8f9fa; + border: 2px dashed #ddd; + border-radius: 4px; + font-size: 1.5rem; + line-height: 1; + color: #666; + cursor: pointer; + transition: background-color 0.2s, border-color 0.2s, color 0.2s; + display: flex; + align-items: center; + justify-content: center; + box-sizing: border-box; + min-height: calc(0.75rem * 2 + 1rem + 4px); +} + +.add-pair-button:hover { + background-color: #e9ecef; + border-color: #3498db; + color: #3498db; +} + +.submit-button { + width: 100%; + background-color: #3498db; + color: white; + border: none; + padding: 0.75rem 2rem; + border-radius: 8px; + font-size: 1rem; + cursor: pointer; + transition: background-color 0.2s; + margin-bottom: 1rem; +} + +.submit-button:hover:not(:disabled) { + background-color: #2980b9; +} + +.submit-button:disabled { + background-color: #bdc3c7; + cursor: not-allowed; +} + +.message { + margin-top: 1rem; + padding: 1rem; + border-radius: 4px; + font-weight: 500; +} + +.message.success { + background-color: #d4edda; + color: #155724; + border: 1px solid #c3e6cb; +} + +.message.error { + background-color: #f8d7da; + color: #721c24; + border: 1px solid #f5c6cb; +} + +.close-x-button { + position: fixed; + top: 1rem; + right: 1rem; + background: rgba(255, 255, 255, 0.9); + border: none; + font-size: 1.5rem; + color: #7f8c8d; + cursor: pointer; + width: 40px; + height: 40px; + display: flex; + align-items: center; + justify-content: center; + border-radius: 50%; + transition: background-color 0.2s, color 0.2s; + z-index: 1600; + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15); +} + +.close-x-button:hover { + background-color: #ffffff; + color: #2c3e50; +} + diff --git a/play-life-web/src/components/AddWords.jsx b/play-life-web/src/components/AddWords.jsx new file mode 100644 index 0000000..791e8fe --- /dev/null +++ b/play-life-web/src/components/AddWords.jsx @@ -0,0 +1,325 @@ +import React, { useState, useEffect } from 'react' +import { useAuth } from './auth/AuthContext' +import './AddWords.css' + +const API_URL = '/api' + +function AddWords({ onNavigate, dictionaryId, dictionaryName }) { + const { authFetch } = useAuth() + const [activeTab, setActiveTab] = useState('words') + const [markdownText, setMarkdownText] = useState('') + const [wordPairs, setWordPairs] = useState([{ word: '', translation: '' }]) + const [message, setMessage] = useState('') + const [loading, setLoading] = useState(false) + const [currentDictionaryName, setCurrentDictionaryName] = useState(dictionaryName || '') + const [dictionaryLoading, setDictionaryLoading] = useState(false) + + // Fetch dictionary name if not provided and dictionaryId exists + useEffect(() => { + if (dictionaryName) { + setCurrentDictionaryName(dictionaryName) + } else if (dictionaryId) { + fetchDictionaryName(dictionaryId) + } + }, [dictionaryId, dictionaryName]) + + const fetchDictionaryName = async (dictId) => { + if (!dictId) return + + setDictionaryLoading(true) + try { + const response = await authFetch(`${API_URL}/dictionaries`) + if (!response.ok) { + throw new Error('Ошибка при загрузке словарей') + } + const dictionaries = await response.json() + const dict = dictionaries.find(d => d.id === dictId) + if (dict) { + setCurrentDictionaryName(dict.name) + } + } catch (err) { + console.error('Error fetching dictionary name:', err) + } finally { + setDictionaryLoading(false) + } + } + + // Hide add button if dictionary name is not set + const canAddWords = currentDictionaryName && currentDictionaryName.trim() !== '' + + const handleAddPair = () => { + setWordPairs([...wordPairs, { word: '', translation: '' }]) + } + + const handleRemovePair = (index) => { + const newPairs = wordPairs.filter((_, i) => i !== index) + setWordPairs(newPairs) + } + + const handlePairChange = (index, field, value) => { + const newPairs = [...wordPairs] + newPairs[index][field] = value + setWordPairs(newPairs) + } + + const parseMarkdownTable = (text) => { + const lines = text.split('\n') + const words = [] + let foundTable = false + let headerFound = false + + for (let i = 0; i < lines.length; i++) { + const line = lines[i].trim() + + // Skip empty lines + if (!line) continue + + // Look for table start (markdown table with |) + if (line.includes('|') && line.includes('Слово')) { + foundTable = true + headerFound = true + continue + } + + // Skip separator line (|---|---|) + if (foundTable && line.match(/^\|[\s\-|:]+\|$/)) { + continue + } + + // Parse table rows + if (foundTable && headerFound && line.includes('|')) { + const cells = line + .split('|') + .map(cell => (cell || '').trim()) + .filter(cell => cell && cell.length > 0) + + if (cells.length >= 2) { + // Remove markdown formatting (**bold**, etc.) + const word = cells[0].replace(/\*\*/g, '').trim() + const translation = cells[1].replace(/\*\*/g, '').trim() + + if (word && translation) { + words.push({ + name: word, + translation: translation, + description: '' + }) + } + } + } + } + + return words + } + + const handleSubmit = async (e) => { + e.preventDefault() + setMessage('') + setLoading(true) + + try { + let words = [] + + if (activeTab === 'table') { + words = parseMarkdownTable(markdownText) + if (words.length === 0) { + setMessage('Не удалось найти слова в таблице. Убедитесь, что таблица содержит колонки "Слово" и "Перевод".') + setLoading(false) + return + } + } else if (activeTab === 'words') { + // Filter out empty pairs and convert to words format + words = wordPairs + .filter(pair => pair.word.trim() && pair.translation.trim()) + .map(pair => ({ + name: pair.word.trim(), + translation: pair.translation.trim(), + description: '' + })) + + if (words.length === 0) { + setMessage('Добавьте хотя бы одно слово с переводом.') + setLoading(false) + return + } + } + + // Add dictionary_id to each word if dictionaryId is provided + const wordsWithDictionary = words.map(word => ({ + ...word, + dictionary_id: dictionaryId !== undefined && dictionaryId !== null ? dictionaryId : undefined + })) + + const response = await authFetch(`${API_URL}/words`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ words: wordsWithDictionary }), + }) + + if (!response.ok) { + throw new Error('Ошибка при добавлении слов') + } + + const data = await response.json() + const addedCount = data?.added || 0 + setMessage(`Успешно добавлено ${addedCount} слов(а)!`) + + // Reset form based on active tab + if (activeTab === 'table') { + setMarkdownText('') + } else if (activeTab === 'words') { + setWordPairs([{ word: '', translation: '' }]) + } + } catch (error) { + setMessage(`Ошибка: ${error.message}`) + } finally { + setLoading(false) + } + } + + // Check if form can be submitted + const canSubmit = () => { + if (!canAddWords) return false + if (activeTab === 'table') { + return markdownText.trim().length > 0 + } else if (activeTab === 'words') { + return wordPairs.some(pair => pair.word.trim() && pair.translation.trim()) + } + return false + } + + const handleClose = () => { + window.history.back() + } + + // Show loading state while fetching dictionary name + if (dictionaryLoading) { + return ( +
+
+
+
+
Загрузка...
+
+
+
+ ) + } + + return ( +
+ +

Добавить слова

+ +
+ + +
+ +
+
+ {activeTab === 'table' && ( + <> +

+ Вставьте текст в формате Markdown с таблицей, содержащей колонки "Слово" и "Перевод" +

+