Compare commits
259 Commits
a611f05959
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9bb4b2d68 | ||
|
|
d1024e0570 | ||
|
|
9a0db31679 | ||
|
|
571ec4a8bc | ||
|
|
a22c559d66 | ||
|
|
60a0efafad | ||
|
|
48fa192cdc | ||
|
|
06cc1a4b3b | ||
|
|
e2966aedd1 | ||
|
|
d561683e12 | ||
|
|
fdc3e01443 | ||
|
|
d4f0064aa7 | ||
|
|
87126a480a | ||
|
|
837ddbe4ed | ||
|
|
44bbb46a1a | ||
|
|
1795a66ee1 | ||
|
|
84b5aa9390 | ||
|
|
e8a766205f | ||
|
|
6e7ebb9aa3 | ||
|
|
101f4e27ed | ||
|
|
f1c12fd81a | ||
|
|
664adcfaa5 | ||
|
|
8acfaf19ac | ||
|
|
2fde471076 | ||
|
|
5f05b77d36 | ||
|
|
eb68eca63f | ||
|
|
b82db8d80f | ||
|
|
b3403ff23a | ||
|
|
b8373eb986 | ||
|
|
df17ecf943 | ||
|
|
42ea241b7c | ||
|
|
3a06d9148c | ||
|
|
b1f4fdd449 | ||
|
|
01cd0e9003 | ||
|
|
6dc3ec828f | ||
|
|
0a8ff4dfab | ||
|
|
dff929c52c | ||
|
|
2104fea5e2 | ||
|
|
caa8ac6ebb | ||
|
|
49f67ec36d | ||
|
|
171befdf05 | ||
|
|
0c6ba5c8fb | ||
|
|
1b7e2cd887 | ||
|
|
53593fdc3d | ||
|
|
b888d056e4 | ||
|
|
cbf20ce679 | ||
|
|
91bba98b65 | ||
|
|
c927f55fd6 | ||
|
|
ae13b2bcac | ||
|
|
85e9a6f48b | ||
|
|
7309deb98f | ||
|
|
10f370b0da | ||
|
|
f59453783a | ||
|
|
5ea58476cb | ||
|
|
1876595005 | ||
|
|
a4dcc62a37 | ||
|
|
8749f21ac8 | ||
|
|
912ae7a857 | ||
|
|
7ec76ea59b | ||
|
|
4f69481efe | ||
|
|
b85b85a27f | ||
|
|
710adff385 | ||
|
|
9c915d4675 | ||
|
|
7e0f979ae3 | ||
|
|
d42535f36e | ||
|
|
95985f97f2 | ||
|
|
7889922d9b | ||
|
|
97753f4465 | ||
|
|
b1ffb7ba7d | ||
|
|
c232bb40a3 | ||
|
|
3bd864d41a | ||
|
|
193b4138d9 | ||
|
|
06b7c614ed | ||
|
|
b51b9421be | ||
|
|
0dca57964d | ||
|
|
95ed1b48fe | ||
|
|
6f76c4a25c | ||
|
|
c8a47ff408 | ||
|
|
4ce8ba66cc | ||
|
|
c42cdfe35b | ||
|
|
4971b2a305 | ||
|
|
64493b9c1f | ||
|
|
1df00bbefd | ||
|
|
0b5106458a | ||
|
|
02c8b7537a | ||
|
|
2a61b17187 | ||
|
|
3624cfffbd | ||
|
|
a35797a1f9 | ||
|
|
20778d6d39 | ||
|
|
ac1f6c3a47 | ||
|
|
25f193a061 | ||
|
|
ea1720506a | ||
|
|
dc50433eb1 | ||
|
|
4b28d90d68 | ||
|
|
fb1ccd7831 | ||
|
|
636f53eb04 | ||
|
|
786a03bf86 | ||
|
|
eb5e5a5476 | ||
|
|
ebd1398a81 | ||
|
|
3cac8d0452 | ||
|
|
e962f49407 | ||
|
|
79fa0538f9 | ||
|
|
99b0eba701 | ||
|
|
23f16a8bef | ||
|
|
a441a3d1e7 | ||
|
|
7957776f53 | ||
|
|
a693d3fa4b | ||
|
|
3a1b836ece | ||
|
|
17e6bbf9f1 | ||
|
|
c9a8b994eb | ||
|
|
b47d50f51c | ||
|
|
37d5c87a55 | ||
|
|
c911950cc1 | ||
|
|
2ec5860d78 | ||
|
|
5d257cd0f8 | ||
|
|
e7ce7b2092 | ||
|
|
81de26e586 | ||
|
|
4169285394 | ||
|
|
c8fead4034 | ||
|
|
01631ff13b | ||
|
|
60fca2d93c | ||
|
|
cd51b097c8 | ||
|
|
fd416b4bd5 | ||
|
|
537ad9e06e | ||
|
|
b0155e6cbe | ||
|
|
999fa15267 | ||
|
|
67334dde3c | ||
|
|
7e51b0cb9f | ||
|
|
3a6f223aac | ||
|
|
e1b6fcb918 | ||
|
|
f54a0fff14 | ||
|
|
80800da839 | ||
|
|
c7b684491c | ||
|
|
7f51411175 | ||
|
|
25317997e5 | ||
|
|
7c5b80b314 | ||
|
|
368f10bcdd | ||
|
|
ff16f98736 | ||
|
|
027063dfb9 | ||
|
|
7fdcbb75da | ||
|
|
2b7b056562 | ||
|
|
20773a29b7 | ||
|
|
6caed05c9f | ||
|
|
92453def91 | ||
|
|
9f3637113d | ||
|
|
98427f5d0e | ||
|
|
81dc23b501 | ||
|
|
91d4a7337c | ||
|
|
0f1f5e3943 | ||
|
|
8ea71ef95f | ||
|
|
e457113fc9 | ||
|
|
c04422ed69 | ||
|
|
7bbd732d72 | ||
|
|
ad52cf93ea | ||
|
|
ba6d823354 | ||
|
|
e41abb2bff | ||
|
|
2236f95ffa | ||
|
|
07c4deaf70 | ||
|
|
cea2c341a2 | ||
|
|
bff62c0b8f | ||
|
|
a5e3396017 | ||
|
|
41f8df36a9 | ||
|
|
76049b3da5 | ||
|
|
9cfb988960 | ||
|
|
242183a422 | ||
|
|
29bd50acab | ||
|
|
72da547b80 | ||
|
|
1fe3819be6 | ||
|
|
1f5f3299f8 | ||
|
|
7012f1c8ed | ||
|
|
2128e1b69c | ||
|
|
5b53615d1a | ||
|
|
b05bd51b5b | ||
|
|
9345b5ab5c | ||
|
|
bad198ce29 | ||
|
|
405d30bead | ||
|
|
d355928aa9 | ||
|
|
af2aaa4168 | ||
|
|
826996c5cd | ||
|
|
dfccba4e55 | ||
|
|
f1c590de43 | ||
|
|
9f37d8b518 | ||
|
|
0275d9aecf | ||
|
|
3ce408a6b1 | ||
|
|
e89f0879c6 | ||
|
|
73ce74bc7c | ||
|
|
867e8803bd | ||
|
|
49eff37399 | ||
|
|
8a036df1b4 | ||
|
|
65f21cd025 | ||
|
|
a76d1d40cb | ||
|
|
6e9e2db23e | ||
|
|
d6d40f4f86 | ||
|
|
9c814d62b2 | ||
|
|
9a066c88ac | ||
|
|
22f6807eb2 | ||
|
|
59d376b999 | ||
|
|
0463c237c0 | ||
|
|
0ee689151e | ||
|
|
126f9ec919 | ||
|
|
736f08887a | ||
|
|
106defc3af | ||
|
|
42cf825de1 | ||
|
|
a60bfe97dc | ||
|
|
09ab87b6dd | ||
|
|
f5e10c143f | ||
|
|
8965e43341 | ||
|
|
62d36dca17 | ||
|
|
e3e9084792 | ||
|
|
f1ee6082dd | ||
|
|
479ffb2483 | ||
|
|
c22e56e68a | ||
|
|
b9482dc86d | ||
|
|
e66a3cecce | ||
|
|
8023319ee4 | ||
|
|
794947ea89 | ||
|
|
cdd10d50c0 | ||
|
|
43df4d76ce | ||
|
|
a169da9387 | ||
|
|
e0ffefc904 | ||
|
|
df3cced995 | ||
|
|
36dd96976f | ||
|
|
c3d366b9c2 | ||
|
|
0c5f7fa9d9 | ||
|
|
23184f4b66 | ||
|
|
b65dc30a9b | ||
|
|
0162db46b3 | ||
|
|
6b95326a86 | ||
|
|
56da114210 | ||
|
|
d90df473a2 | ||
|
|
78ef1e78dc | ||
|
|
c3d2c0d6a6 | ||
|
|
ff9fec7d7a | ||
|
|
56e29230ff | ||
|
|
ebe71f073c | ||
|
|
8ffbfc6afd | ||
|
|
f34d35febf | ||
|
|
fc7464021e | ||
|
|
f7d340fc70 | ||
|
|
763b13358e | ||
|
|
de29e3f602 | ||
|
|
a780b46175 | ||
|
|
3278eef2c5 | ||
|
|
5ac3c931b9 | ||
|
|
89e66d6093 | ||
|
|
b15e1dd615 | ||
|
|
dfe9f5b9a0 | ||
|
|
2428ca5fd0 | ||
|
|
e955494dc8 | ||
|
|
25f0c2697a | ||
|
|
56d413f761 | ||
|
|
f266508d04 | ||
|
|
5c5fc07481 | ||
|
|
4e270cb322 | ||
|
|
ba0f34c91b | ||
|
|
a886cf13e8 | ||
|
|
8e29acd25e | ||
|
|
6dbb0f8d90 | ||
|
|
6174475509 |
8
.claude/settings.json
Normal file
8
.claude/settings.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(curl -s -o /dev/null -w \"%{http_code}\" http://localhost:8080/priorities/confirm)",
|
||||||
|
"Bash(curl -s -o /dev/null -w \"%{http_code}\" -X POST http://localhost:8080/priorities/confirm -H \"Content-Type: application/json\" -d '[]')"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,6 +14,13 @@
|
|||||||
"type": "shell",
|
"type": "shell",
|
||||||
"cwd": "${workspaceFolder}"
|
"cwd": "${workspaceFolder}"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "runLLM",
|
||||||
|
"description": "Запуск/перезапуск play-life-llm (обычно на отдельной машине)",
|
||||||
|
"command": "./runLLM.sh",
|
||||||
|
"type": "shell",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "backupFromProd",
|
"name": "backupFromProd",
|
||||||
"description": "Создание дампа базы данных с продакшена",
|
"description": "Создание дампа базы данных с продакшена",
|
||||||
|
|||||||
37
.cursor/plans/normalized_total_score_fix_migration.plan.md
Normal file
37
.cursor/plans/normalized_total_score_fix_migration.plan.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
name: normalized_total_score fix migration
|
||||||
|
overview: "Новая миграция 000023: пересоздать weekly_report_mv с max_goal_score и удалить max_score из weekly_goals."
|
||||||
|
todos:
|
||||||
|
- id: migration-up
|
||||||
|
content: "Добавить 000023 up: DROP MV, CREATE MV с max_goal_score (из 000020), DROP COLUMN IF EXISTS max_score"
|
||||||
|
status: pending
|
||||||
|
- id: migration-down
|
||||||
|
content: "Добавить 000023 down: восстановить MV со старой формулой (max_score) и колонку max_score"
|
||||||
|
status: pending
|
||||||
|
- id: verify-local
|
||||||
|
content: Применить миграцию локально и проверить Релокация 2026-08 (normalized 32.74 и 21.55)
|
||||||
|
status: pending
|
||||||
|
isProject: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# План: Исправить normalized_total_score через новую миграцию
|
||||||
|
|
||||||
|
## Проблема
|
||||||
|
|
||||||
|
На проде (и в локальной копии продовой БД) `normalized_total_score` не учитывает `max_goal_score`: в определении материализованного представления `weekly_report_mv` до сих пор используется колонка `wg.max_score`, которая не заполняется (всегда NULL) → формула всегда даёт `normalized_total_score = total_score`.
|
||||||
|
|
||||||
|
## Решение
|
||||||
|
|
||||||
|
Новая миграция (не менять 000020/000022):
|
||||||
|
|
||||||
|
1. **Пересоздать `weekly_report_mv**` с определением из 000020: в формуле использовать `max_goal_score`, тот же подзапрос по `n.created_date` и фильтр «только прошлые недели».
|
||||||
|
2. **Удалить колонку `max_score` из `weekly_goals**`, если есть: `ALTER TABLE weekly_goals DROP COLUMN IF EXISTS max_score;`
|
||||||
|
|
||||||
|
После применения и `REFRESH` (или при следующем кроне) для прошлых недель normalized будет ограничиваться целями (например, Релокация 2026-08: 39.14 → 32.74).
|
||||||
|
|
||||||
|
## Todos
|
||||||
|
|
||||||
|
- **migration-up** — Добавить миграцию `000023_fix_weekly_report_mv_use_max_goal_score.up.sql`: DROP MV, CREATE MV с max_goal_score (копия определения из 000020), DROP COLUMN IF EXISTS max_score в weekly_goals
|
||||||
|
- **migration-down** — Добавить `000023_fix_weekly_report_mv_use_max_goal_score.down.sql`: восстановить MV со старой формулой (max_score) и колонку max_score в weekly_goals
|
||||||
|
- **verify-local** — Применить миграцию локально и проверить по Релокации за 2026-08: normalized_total_score = 32.74 (project_id 27) и 21.55 (project_id 592)
|
||||||
|
|
||||||
@@ -0,0 +1,260 @@
|
|||||||
|
# План: Изменить сортировку заблокированных желаний по времени разблокировки
|
||||||
|
|
||||||
|
## Цель
|
||||||
|
Изменить сортировку желаний:
|
||||||
|
1. Разблокированные - по цене от меньшего к большему
|
||||||
|
2. Заблокированные без целей-задач - по сроку разблокировки (максимальное время среди проектов)
|
||||||
|
3. Заблокированные с целями-задачами - по сроку разблокировки (максимальное время среди проектов)
|
||||||
|
|
||||||
|
## Статус реализации
|
||||||
|
|
||||||
|
**Уже реализовано:**
|
||||||
|
- ✅ `calculateProjectUnlockWeeks` - функция расчета недель разблокировки
|
||||||
|
- ✅ `calculateLockedSortValue` - использует `calculateProjectUnlockWeeks` и возвращает недели
|
||||||
|
- ✅ `getProjectMedian` - упрощенная версия без fallback (используется как есть)
|
||||||
|
|
||||||
|
**Требуется реализовать:**
|
||||||
|
- ⏳ Создать миграцию для `projects_median_mv` (миграции нет, но используется в коде)
|
||||||
|
- ⏳ В `getWishlistHandler`: заменить `calculateUnlockedSortValue` на прямую сортировку по цене для разблокированных
|
||||||
|
- ⏳ В `getWishlistHandler`: разделить заблокированные на группы (с задачами/без задач) и сортировать каждую группу
|
||||||
|
- ⏳ В `getBoardItemsHandler`: заменить `calculateUnlockedSortValue` на прямую сортировку по цене для разблокированных
|
||||||
|
- ⏳ В `getBoardItemsHandler`: разделить заблокированные на группы (с задачами/без задач) и сортировать каждую группу
|
||||||
|
|
||||||
|
## Изменения
|
||||||
|
|
||||||
|
### 1. Создать миграцию для projects_median_mv
|
||||||
|
|
||||||
|
**Статус:** `getProjectMedian` уже использует `projects_median_mv`, но миграции для неё нет в списке миграций. Нужно создать миграцию.
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/migrations/000007_add_projects_median_mv.up.sql`
|
||||||
|
|
||||||
|
Убедиться, что materialized view включает `user_id`:
|
||||||
|
```sql
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 12 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/migrations/000007_add_projects_median_mv.down.sql`
|
||||||
|
|
||||||
|
```sql
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Изменить calculateLockedSortValue для расчета времени
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go` (строки 12488-12561)
|
||||||
|
|
||||||
|
**Статус:** Функция уже реализована и использует `calculateProjectUnlockWeeks`. Проверить, что логика соответствует требованиям:
|
||||||
|
- Учитывает только условия типа `project_points`
|
||||||
|
- Использует правильного владельца условия (`conditionOwnerID`)
|
||||||
|
- Возвращает максимальное количество недель среди всех условий проектов
|
||||||
|
- Возвращает 999999.0 если нет условий по проектам или все выполнены
|
||||||
|
|
||||||
|
**Текущая реализация уже корректна**, изменения не требуются.
|
||||||
|
|
||||||
|
**Важно:**
|
||||||
|
- Функция уже использует `calculateProjectUnlockWeeks` для расчета (уже реализовано)
|
||||||
|
- Функция НЕ должна учитывать задачи, только проекты. Разделение на группы с задачами и без задач будет в сортировке.
|
||||||
|
- Функция уже правильно обрабатывает владельца условия через `conditionOwnerID` (не использует `userID` напрямую)
|
||||||
|
|
||||||
|
### 3. Обновить сортировку в getWishlistHandler
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go` (строки 9933-9951)
|
||||||
|
|
||||||
|
**Текущее состояние:**
|
||||||
|
- Разблокированные: используют `calculateUnlockedSortValue` (сумма баллов) - **нужно заменить на цену**
|
||||||
|
- Заблокированные: сортируются по `calculateLockedSortValue` (недели) - **нужно разделить на группы**
|
||||||
|
|
||||||
|
**Изменить:**
|
||||||
|
1. Разблокированные: сортировка по цене от меньшего к большему (заменить `calculateUnlockedSortValue`)
|
||||||
|
2. Заблокированные: разделить на группы (с задачами/без задач) и сортировать каждую группу по времени
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Сортируем разблокированные по цене от меньшего к большему
|
||||||
|
// ЗАМЕНА: было calculateUnlockedSortValue, стало прямая сортировка по цене
|
||||||
|
sort.Slice(unlocked, func(i, j int) bool {
|
||||||
|
priceI := 0.0
|
||||||
|
priceJ := 0.0
|
||||||
|
if unlocked[i].Price != nil {
|
||||||
|
priceI = *unlocked[i].Price
|
||||||
|
}
|
||||||
|
if unlocked[j].Price != nil {
|
||||||
|
priceJ = *unlocked[j].Price
|
||||||
|
}
|
||||||
|
if priceI == priceJ {
|
||||||
|
return unlocked[i].ID < unlocked[j].ID
|
||||||
|
}
|
||||||
|
return priceI < priceJ // Сортировка по цене от меньшего к большему (заменяет calculateUnlockedSortValue)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Разделяем заблокированные на группы
|
||||||
|
lockedWithoutTasks := []WishlistItem{}
|
||||||
|
lockedWithTasks := []WishlistItem{}
|
||||||
|
|
||||||
|
for _, item := range locked {
|
||||||
|
hasUncompletedTasks := false
|
||||||
|
for _, cond := range item.UnlockConditions {
|
||||||
|
if cond.Type == "task_completion" && (cond.TaskCompleted == nil || !*cond.TaskCompleted) {
|
||||||
|
hasUncompletedTasks = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if hasUncompletedTasks {
|
||||||
|
lockedWithTasks = append(lockedWithTasks, item)
|
||||||
|
} else {
|
||||||
|
lockedWithoutTasks = append(lockedWithoutTasks, item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Сортируем каждую группу по времени разблокировки
|
||||||
|
sort.Slice(lockedWithoutTasks, func(i, j int) bool {
|
||||||
|
valueI := a.calculateLockedSortValue(lockedWithoutTasks[i], userID)
|
||||||
|
valueJ := a.calculateLockedSortValue(lockedWithoutTasks[j], userID)
|
||||||
|
if valueI == valueJ {
|
||||||
|
return lockedWithoutTasks[i].ID < lockedWithoutTasks[j].ID
|
||||||
|
}
|
||||||
|
return valueI < valueJ
|
||||||
|
})
|
||||||
|
|
||||||
|
sort.Slice(lockedWithTasks, func(i, j int) bool {
|
||||||
|
valueI := a.calculateLockedSortValue(lockedWithTasks[i], userID)
|
||||||
|
valueJ := a.calculateLockedSortValue(lockedWithTasks[j], userID)
|
||||||
|
if valueI == valueJ {
|
||||||
|
return lockedWithTasks[i].ID < lockedWithTasks[j].ID
|
||||||
|
}
|
||||||
|
return valueI < valueJ
|
||||||
|
})
|
||||||
|
|
||||||
|
// Объединяем: сначала без задач, потом с задачами
|
||||||
|
locked = append(lockedWithoutTasks, lockedWithTasks...)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Обновить сортировку в getBoardItemsHandler
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go` (строки 12222-12240)
|
||||||
|
|
||||||
|
**Текущее состояние:**
|
||||||
|
- Разблокированные: используют `calculateUnlockedSortValue` (сумма баллов) - **нужно заменить на цену**
|
||||||
|
- Заблокированные: сортируются по `calculateLockedSortValue` (недели) - **нужно разделить на группы**
|
||||||
|
|
||||||
|
**Изменить аналогично getWishlistHandler:**
|
||||||
|
1. Разблокированные: сортировка по цене от меньшего к большему (заменить `calculateUnlockedSortValue`)
|
||||||
|
2. Заблокированные: разделить на группы (с задачами/без задач) и сортировать каждую группу по времени
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Сортируем разблокированные по цене от меньшего к большему
|
||||||
|
// ЗАМЕНА: было calculateUnlockedSortValue, стало прямая сортировка по цене
|
||||||
|
sort.Slice(unlocked, func(i, j int) bool {
|
||||||
|
priceI := 0.0
|
||||||
|
priceJ := 0.0
|
||||||
|
if unlocked[i].Price != nil {
|
||||||
|
priceI = *unlocked[i].Price
|
||||||
|
}
|
||||||
|
if unlocked[j].Price != nil {
|
||||||
|
priceJ = *unlocked[j].Price
|
||||||
|
}
|
||||||
|
if priceI == priceJ {
|
||||||
|
return unlocked[i].ID < unlocked[j].ID
|
||||||
|
}
|
||||||
|
return priceI < priceJ
|
||||||
|
})
|
||||||
|
|
||||||
|
// РАЗДЕЛЕНИЕ НА ГРУППЫ: Заблокированные с задачами и без задач
|
||||||
|
// ЗАМЕНА: было просто sort.Slice(locked, ...), стало разделение на группы
|
||||||
|
lockedWithoutTasks := []WishlistItem{}
|
||||||
|
lockedWithTasks := []WishlistItem{}
|
||||||
|
|
||||||
|
for _, item := range locked {
|
||||||
|
hasUncompletedTasks := false
|
||||||
|
for _, cond := range item.UnlockConditions {
|
||||||
|
if cond.Type == "task_completion" && (cond.TaskCompleted == nil || !*cond.TaskCompleted) {
|
||||||
|
hasUncompletedTasks = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if hasUncompletedTasks {
|
||||||
|
lockedWithTasks = append(lockedWithTasks, item)
|
||||||
|
} else {
|
||||||
|
lockedWithoutTasks = append(lockedWithoutTasks, item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Сортируем каждую группу по времени разблокировки
|
||||||
|
sort.Slice(lockedWithoutTasks, func(i, j int) bool {
|
||||||
|
valueI := a.calculateLockedSortValue(lockedWithoutTasks[i], userID)
|
||||||
|
valueJ := a.calculateLockedSortValue(lockedWithoutTasks[j], userID)
|
||||||
|
if valueI == valueJ {
|
||||||
|
return lockedWithoutTasks[i].ID < lockedWithoutTasks[j].ID
|
||||||
|
}
|
||||||
|
return valueI < valueJ
|
||||||
|
})
|
||||||
|
|
||||||
|
sort.Slice(lockedWithTasks, func(i, j int) bool {
|
||||||
|
valueI := a.calculateLockedSortValue(lockedWithTasks[i], userID)
|
||||||
|
valueJ := a.calculateLockedSortValue(lockedWithTasks[j], userID)
|
||||||
|
if valueI == valueJ {
|
||||||
|
return lockedWithTasks[i].ID < lockedWithTasks[j].ID
|
||||||
|
}
|
||||||
|
return valueI < valueJ
|
||||||
|
})
|
||||||
|
|
||||||
|
// Объединяем: сначала без задач, потом с задачами
|
||||||
|
locked = append(lockedWithoutTasks, lockedWithTasks...)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Итоговый порядок элементов
|
||||||
|
|
||||||
|
1. **Разблокированные** - отсортированы по цене от меньшего к большему
|
||||||
|
2. **Заблокированные без целей-задач** - отсортированы по максимальному времени разблокировки (среди всех проектов) от меньшего к большему
|
||||||
|
3. **Заблокированные с целями-задачами** - отсортированы по максимальному времени разблокировки (среди всех проектов) от меньшего к большему
|
||||||
|
|
||||||
|
## Обработка краевых случаев
|
||||||
|
|
||||||
|
- **Если медиана проекта = 0 или отсутствует**: `calculateProjectUnlockWeeks` возвращает 99999, что обрабатывается в `calculateLockedSortValue` (не учитывается в maxWeeks, если >= 99999)
|
||||||
|
- **Если нет условий**: возвращать 999999.0 (отсутствие условий = все условия выполнены)
|
||||||
|
- **Если все условия выполнены**: возвращать 999999.0
|
||||||
|
- **Если цена не указана (NULL)**: считать как 0.0
|
||||||
|
- **Если нет условий по проектам** (только задачи или нет условий): возвращать 999999.0
|
||||||
|
|
||||||
|
## Зависимости
|
||||||
|
|
||||||
|
- `projects_median_mv` должна существовать (проверить наличие миграции или создать при необходимости)
|
||||||
|
- Функция `getProjectMedian` уже реализована (упрощенная версия без fallback)
|
||||||
|
- Функция `calculateProjectUnlockWeeks` уже реализована и используется в `calculateLockedSortValue`
|
||||||
|
|
||||||
|
## Финальный шаг: Перезапуск приложения
|
||||||
|
|
||||||
|
**После выполнения всех изменений:**
|
||||||
|
|
||||||
|
Выполнить команду для перезапуска фронтенда и бэкенда:
|
||||||
|
```bash
|
||||||
|
./run.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Это пересоберет и перезапустит:
|
||||||
|
- Backend сервер (с пересборкой)
|
||||||
|
- Frontend приложение (с пересборкой)
|
||||||
|
- База данных
|
||||||
@@ -0,0 +1,392 @@
|
|||||||
|
# План: Создать общие функции расчета и форматирования срока разблокировки
|
||||||
|
|
||||||
|
## Цель
|
||||||
|
|
||||||
|
Создать универсальные функции для расчета и форматирования срока разблокировки проекта, которые будут использоваться везде где необходимо считать остаточный срок.
|
||||||
|
|
||||||
|
## Изменения
|
||||||
|
|
||||||
|
### 1. Создать функцию расчета срока разблокировки (бэкенд)
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go`
|
||||||
|
|
||||||
|
Создать функцию `calculateProjectUnlockWeeks`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// calculateProjectUnlockWeeks рассчитывает срок разблокировки проекта в неделях
|
||||||
|
// projectID - ID проекта
|
||||||
|
// requiredPoints - необходимое количество баллов
|
||||||
|
// startDate - дата начала подсчета (может быть nil - за всё время)
|
||||||
|
// userID - ID пользователя (владельца условия)
|
||||||
|
// Возвращает количество недель (float64):
|
||||||
|
// - > 0: условие не выполнено, возвращает количество недель
|
||||||
|
// - 0: условие уже выполнено (remaining <= 0)
|
||||||
|
// - 99999: медиана отсутствует или равна 0 (нельзя рассчитать)
|
||||||
|
func (a *App) calculateProjectUnlockWeeks(projectID int, requiredPoints float64, startDate sql.NullTime, userID int) float64 {
|
||||||
|
// 1. Получаем текущие баллы от startDate
|
||||||
|
currentPoints, err := a.calculateProjectPointsFromDate(projectID, startDate, userID)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error calculating project points for project %d, user %d: %v", projectID, userID, err)
|
||||||
|
return 99999 // Ошибка расчета - возвращаем 99999
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Вычисляем остаток
|
||||||
|
remaining := requiredPoints - currentPoints
|
||||||
|
if remaining <= 0 {
|
||||||
|
// Условие уже выполнено
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Получаем медиану проекта
|
||||||
|
median, err := a.getProjectMedian(projectID)
|
||||||
|
if err != nil || median <= 0 {
|
||||||
|
// Если медиана отсутствует или равна 0, возвращаем 99999 (нельзя рассчитать)
|
||||||
|
// Это нормальная ситуация, не логируем
|
||||||
|
return 99999
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Рассчитываем недели
|
||||||
|
weeks := remaining / median
|
||||||
|
return weeks
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Примечание:** Функция возвращает:
|
||||||
|
|
||||||
|
- `0`: условие уже выполнено (remaining <= 0)
|
||||||
|
- `> 0 && < 99999`: количество недель до выполнения условия
|
||||||
|
- `99999`: медиана отсутствует или равна 0 (нельзя рассчитать) или ошибка расчета
|
||||||
|
|
||||||
|
````
|
||||||
|
|
||||||
|
### 2. Создать функцию форматирования срока (бэкенд)
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go`
|
||||||
|
|
||||||
|
Создать функцию `formatWeeksText`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// formatWeeksText форматирует количество недель в текстовый формат
|
||||||
|
// weeks - количество недель (float64)
|
||||||
|
// Возвращает строку: "2 недели", "<1 недели", "5 недель", "∞ недель" и т.д.
|
||||||
|
func formatWeeksText(weeks float64) string {
|
||||||
|
// Если weeks == 0, условие уже выполнено - не показываем срок
|
||||||
|
if weeks == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Если weeks >= 99999, это означает что медиана отсутствует или нельзя рассчитать
|
||||||
|
if weeks >= 99999 {
|
||||||
|
return "∞ недель"
|
||||||
|
}
|
||||||
|
|
||||||
|
if weeks < 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if weeks < 1 {
|
||||||
|
return "<1 недели"
|
||||||
|
}
|
||||||
|
|
||||||
|
weeksRounded := math.Round(weeks)
|
||||||
|
weeksInt := int(weeksRounded)
|
||||||
|
|
||||||
|
// Правильное склонение для русского языка
|
||||||
|
var weekWord string
|
||||||
|
lastDigit := weeksInt % 10
|
||||||
|
lastTwoDigits := weeksInt % 100
|
||||||
|
|
||||||
|
if lastTwoDigits >= 11 && lastTwoDigits <= 14 {
|
||||||
|
weekWord = "недель"
|
||||||
|
} else if lastDigit == 1 {
|
||||||
|
weekWord = "неделя"
|
||||||
|
} else if lastDigit >= 2 && lastDigit <= 4 {
|
||||||
|
weekWord = "недели"
|
||||||
|
} else {
|
||||||
|
weekWord = "недель"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%d %s", weeksInt, weekWord)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Примечание:**
|
||||||
|
|
||||||
|
- Форматирование на бэкенде, так как сортировка происходит на бэкенде. Фронтенд получает уже отформатированную строку.
|
||||||
|
- При `weeks == 0` (условие выполнено) возвращается пустая строка (не показываем срок)
|
||||||
|
- При `weeks >= 99999` (медиана отсутствует, нельзя рассчитать или ошибка расчета) возвращается "∞ недель"
|
||||||
|
|
||||||
|
### 3. Использовать функции в calculateLockedSortValue
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go` (строки 12314-12337)
|
||||||
|
|
||||||
|
Обновить функцию для использования `calculateProjectUnlockWeeks`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (a *App) calculateLockedSortValue(item WishlistItem, userID int) float64 {
|
||||||
|
// Если нет условий, возвращаем большое значение (отсутствие условий = все выполнены)
|
||||||
|
if len(item.UnlockConditions) == 0 {
|
||||||
|
return 999999.0
|
||||||
|
}
|
||||||
|
|
||||||
|
maxWeeks := 0.0
|
||||||
|
hasProjectConditions := false
|
||||||
|
|
||||||
|
for _, condition := range item.UnlockConditions {
|
||||||
|
if condition.Type == "project_points" {
|
||||||
|
hasProjectConditions = true
|
||||||
|
if condition.RequiredPoints != nil {
|
||||||
|
var startDate sql.NullTime
|
||||||
|
if condition.StartDate != nil {
|
||||||
|
date, err := time.Parse("2006-01-02", *condition.StartDate)
|
||||||
|
if err == nil {
|
||||||
|
startDate = sql.NullTime{Time: date, Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ВАЖНО: Используем владельца условия из condition.UserID
|
||||||
|
// Если condition.UserID есть - это владелец условия
|
||||||
|
// Если нет - получаем владельца желания из БД (для старых условий)
|
||||||
|
// НЕ используем текущего пользователя (userID), так как условие может принадлежать другому пользователю
|
||||||
|
conditionOwnerID := 0
|
||||||
|
if condition.UserID != nil {
|
||||||
|
conditionOwnerID = *condition.UserID
|
||||||
|
} else {
|
||||||
|
// Если нет владельца условия, получаем владельца желания из БД
|
||||||
|
var itemOwnerID int
|
||||||
|
err := a.DB.QueryRow(`SELECT user_id FROM wishlist_items WHERE id = $1`, item.ID).Scan(&itemOwnerID)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error getting wishlist item owner for item %d: %v", item.ID, err)
|
||||||
|
continue // Пропускаем условие, если не можем получить владельца
|
||||||
|
}
|
||||||
|
conditionOwnerID = itemOwnerID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Получаем projectID из условия
|
||||||
|
if condition.ProjectID != nil {
|
||||||
|
weeks := a.calculateProjectUnlockWeeks(
|
||||||
|
*condition.ProjectID,
|
||||||
|
*condition.RequiredPoints,
|
||||||
|
startDate,
|
||||||
|
conditionOwnerID, // Владелец условия, а не текущий пользователь
|
||||||
|
)
|
||||||
|
// weeks > 0 && < 99999 означает, что условие еще не выполнено и расчет успешен
|
||||||
|
// weeks == 0 означает условие выполнено
|
||||||
|
// weeks == 99999 означает медиана отсутствует (нельзя рассчитать) или ошибка расчета
|
||||||
|
if weeks > 0 && weeks < 99999 {
|
||||||
|
if weeks > maxWeeks {
|
||||||
|
maxWeeks = weeks
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Если были условия по проектам, но все выполнены (maxWeeks = 0)
|
||||||
|
if hasProjectConditions && maxWeeks == 0.0 {
|
||||||
|
return 999999.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Если не было условий по проектам (только задачи или нет условий)
|
||||||
|
if !hasProjectConditions {
|
||||||
|
return 999999.0
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxWeeks
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Использовать функции в API endpoint для расчета недель
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go`
|
||||||
|
|
||||||
|
Обновить endpoint `/api/wishlist/calculate-weeks` (из плана "добавить расчет недель в форму"):
|
||||||
|
|
||||||
|
**Важно:** Использовать владельца условия, а не текущего пользователя!
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (a *App) calculateWeeksHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// ... валидация и получение параметров ...
|
||||||
|
|
||||||
|
// Определяем владельца условия:
|
||||||
|
// 1. Если передан condition_user_id в запросе - используем его (для существующего условия)
|
||||||
|
// 2. Иначе используем текущего пользователя (для нового условия)
|
||||||
|
conditionOwnerID := userID // userID из контекста (текущий пользователь)
|
||||||
|
if req.ConditionUserID != nil && *req.ConditionUserID > 0 {
|
||||||
|
conditionOwnerID = *req.ConditionUserID
|
||||||
|
}
|
||||||
|
|
||||||
|
var startDate sql.NullTime
|
||||||
|
if req.StartDate != "" {
|
||||||
|
date, err := time.Parse("2006-01-02", req.StartDate)
|
||||||
|
if err == nil {
|
||||||
|
startDate = sql.NullTime{Time: date, Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Используем владельца условия, а не текущего пользователя
|
||||||
|
weeks := a.calculateProjectUnlockWeeks(req.ProjectID, req.RequiredPoints, startDate, conditionOwnerID)
|
||||||
|
|
||||||
|
response := map[string]interface{}{
|
||||||
|
"weeks_text": formatWeeksText(weeks), // Отформатированная строка для отображения
|
||||||
|
}
|
||||||
|
|
||||||
|
// weeks используется только для сортировки на бэкенде, на клиент не отправляется
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Структура запроса:**
|
||||||
|
|
||||||
|
```go
|
||||||
|
type CalculateWeeksRequest struct {
|
||||||
|
ProjectID int `json:"project_id"`
|
||||||
|
RequiredPoints float64 `json:"required_points"`
|
||||||
|
StartDate string `json:"start_date,omitempty"`
|
||||||
|
ConditionUserID *int `json:"condition_user_id,omitempty"` // Владелец условия (если условие существует)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Добавить weeks_text в UnlockConditionDisplay
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go`
|
||||||
|
|
||||||
|
Добавить поле `WeeksText *string` в структуру `UnlockConditionDisplay`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type UnlockConditionDisplay struct {
|
||||||
|
// ... существующие поля ...
|
||||||
|
WeeksText *string `json:"weeks_text,omitempty"` // Отформатированный текст срока разблокировки
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
При загрузке условий типа `project_points` рассчитывать и форматировать срок:
|
||||||
|
|
||||||
|
```go
|
||||||
|
if condition.Type == "project_points" && condition.RequiredPoints != nil && condition.ProjectID != nil {
|
||||||
|
var startDate sql.NullTime
|
||||||
|
if condition.StartDate != nil {
|
||||||
|
date, err := time.Parse("2006-01-02", *condition.StartDate)
|
||||||
|
if err == nil {
|
||||||
|
startDate = sql.NullTime{Time: date, Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ВАЖНО: Используем владельца условия из condition.UserID, а не текущего пользователя
|
||||||
|
// Если condition.UserID есть - это владелец условия
|
||||||
|
// Если нет - используем владельца желания (itemOwnerID), но НЕ текущего пользователя (userID)
|
||||||
|
conditionOwnerID := itemOwnerID // Владелец желания как fallback
|
||||||
|
if condition.UserID != nil {
|
||||||
|
conditionOwnerID = *condition.UserID // Владелец условия (приоритет)
|
||||||
|
}
|
||||||
|
|
||||||
|
weeks := a.calculateProjectUnlockWeeks(
|
||||||
|
*condition.ProjectID,
|
||||||
|
*condition.RequiredPoints,
|
||||||
|
startDate,
|
||||||
|
conditionOwnerID, // Владелец условия, а не текущий пользователь
|
||||||
|
)
|
||||||
|
|
||||||
|
// Форматируем всегда (при weeks == 0 вернет пустую строку, при weeks >= 99999 вернет "∞ недель")
|
||||||
|
weeksText := formatWeeksText(weeks)
|
||||||
|
condition.WeeksText = &weeksText
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Важно:**
|
||||||
|
|
||||||
|
- `condition.UserID` - это владелец условия (из `wishlist_conditions.user_id`)
|
||||||
|
- `itemOwnerID` - это владелец желания (fallback для старых условий)
|
||||||
|
- `userID` (текущий пользователь) НЕ используется, так как условие может принадлежать другому пользователю
|
||||||
|
|
||||||
|
### 6. Использовать weeks_text на фронтенде
|
||||||
|
|
||||||
|
**Файл:** `play-life-web/src/components/WishlistDetail.jsx`
|
||||||
|
|
||||||
|
Использовать готовый `weeks_text` из условия (приходит уже отформатированным из API):
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// В renderUnlockConditions:
|
||||||
|
{progress.remaining > 0 && condition.weeks_text && (
|
||||||
|
<span className="progress-remaining">
|
||||||
|
Осталось: {Math.round(progress.remaining)} ({condition.weeks_text})
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Файл:** `play-life-web/src/components/WishlistForm.jsx`
|
||||||
|
|
||||||
|
Использовать `weeks_text` из ответа API для отображения недель в форме редактирования условия. Форматирование уже выполнено на бэкенде.
|
||||||
|
|
||||||
|
### 7. Обновить загрузку медианы в условиях (опционально)
|
||||||
|
|
||||||
|
**Файл:** `play-life-backend/main.go`
|
||||||
|
|
||||||
|
При загрузке условий типа `project_points` медиана не нужна отдельно, так как `calculateProjectUnlockWeeks` сама получит её и вернет уже отформатированный `weeks_text`.
|
||||||
|
|
||||||
|
## Места использования функций
|
||||||
|
|
||||||
|
1. **calculateProjectUnlockWeeks** (бэкенд):
|
||||||
|
|
||||||
|
- `calculateLockedSortValue` - для сортировки заблокированных желаний (использует числовое значение)
|
||||||
|
- `calculateWeeksHandler` - API endpoint для расчета недель (использует для расчета, но на клиент отправляется только отформатированная строка)
|
||||||
|
- При загрузке условий для расчета `weeks_text` (используется внутри, на клиент не отправляется)
|
||||||
|
- Любые другие места, где нужно рассчитать срок разблокировки
|
||||||
|
|
||||||
|
2. **formatWeeksText** (бэкенд):
|
||||||
|
|
||||||
|
- При загрузке условий в `UnlockConditionDisplay.WeeksText` (отправляется на клиент для отображения)
|
||||||
|
- В API endpoint `/api/wishlist/calculate-weeks` (отправляется на клиент для отображения в форме)
|
||||||
|
- Форматирование на бэкенде, так как сортировка происходит на бэкенде по числовому значению `weeks`
|
||||||
|
|
||||||
|
## Выявленные и исправленные проблемы
|
||||||
|
|
||||||
|
1. **Проблема с userID в calculateLockedSortValue**:
|
||||||
|
|
||||||
|
- **Проблема**: Использовался текущий пользователь (`userID`), но условие может принадлежать другому пользователю
|
||||||
|
- **Исправление**: Используется `conditionOwnerID` из `condition.UserID` (владелец условия). Если `condition.UserID` отсутствует, условие пропускается (некорректное состояние)
|
||||||
|
|
||||||
|
2. **Обработка отсутствия медианы**:
|
||||||
|
|
||||||
|
- **Решение**: При отсутствии медианы возвращается `99999` (нельзя рассчитать). В `formatWeeksText` это значение преобразуется в "∞ недель". Такие условия не учитываются при сортировке по времени разблокировки (проверка `weeks > 0 && weeks < 99999`)
|
||||||
|
|
||||||
|
3. **Форматирование и передача данных**:
|
||||||
|
|
||||||
|
- **Решение**: Форматирование на бэкенде, так как сортировка происходит на бэкенде по числовому значению `weeks`
|
||||||
|
- Числовое значение `weeks` используется только на бэкенде для сортировки, на клиент не отправляется
|
||||||
|
- На клиент отправляется только отформатированная строка `weeks_text` для отображения
|
||||||
|
- Фронтенд просто отображает готовую строку без дополнительного форматирования
|
||||||
|
- Это исключает дублирование логики и обеспечивает единообразие форматирования
|
||||||
|
|
||||||
|
4. **Использование правильного userID (владельца условия)**:
|
||||||
|
|
||||||
|
- **Проблема**: В функцию `calculateProjectUnlockWeeks` может передаваться текущий пользователь вместо владельца условия
|
||||||
|
- **Решение**:
|
||||||
|
- В `calculateLockedSortValue`: используется `condition.UserID` (владелец условия)
|
||||||
|
- В `calculateWeeksHandler`: используется `condition_user_id` из запроса (если передан) или текущий пользователь (для нового условия)
|
||||||
|
- При загрузке условий: используется `condition.UserID` или `itemOwnerID` (владелец желания), но НЕ текущий пользователь
|
||||||
|
- **Важно**: Условие может принадлежать другому пользователю (на общих досках), поэтому нужно использовать именно владельца условия
|
||||||
|
|
||||||
|
## Зависимости
|
||||||
|
|
||||||
|
- Функция `getProjectMedian` должна быть создана (из плана сортировки)
|
||||||
|
- Функция `calculateProjectPointsFromDate` уже существует
|
||||||
|
|
||||||
|
## Финальный шаг: Перезапуск приложения
|
||||||
|
|
||||||
|
**После выполнения всех изменений:**
|
||||||
|
|
||||||
|
Выполнить команду для перезапуска фронтенда и бэкенда:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./run.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Это пересоберет и перезапустит:
|
||||||
|
|
||||||
|
- Backend сервер (с пересборкой)
|
||||||
|
- Frontend приложение (с пересборкой)
|
||||||
|
- База данных
|
||||||
@@ -13,4 +13,6 @@ alwaysApply: true
|
|||||||
- React компонентами и стилями в `play-life-web/src/`
|
- React компонентами и стилями в `play-life-web/src/`
|
||||||
- Docker конфигурациями (`docker-compose.yml`, `Dockerfile`)
|
- Docker конфигурациями (`docker-compose.yml`, `Dockerfile`)
|
||||||
|
|
||||||
**Команда для перезапуска:** `./run.sh` или `bash run.sh` в корне проекта.
|
При изменениях в `play-life-llm/` (если LLM запущен на этой машине) выполни `./runLLM.sh`.
|
||||||
|
|
||||||
|
**Команды для перезапуска:** `./run.sh` (web + backend + db) или `bash run.sh` в корне проекта. Для LLM на этой машине: `./runLLM.sh`.
|
||||||
|
|||||||
@@ -36,29 +36,39 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Patch DNS for Local Network
|
- name: Patch DNS for Local Network
|
||||||
if: steps.version_check.outputs.changed == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
# Записываем IP Synology прямо в контейнер сборки
|
# Записываем IP Synology прямо в контейнер сборки
|
||||||
echo "192.168.50.55 dungeonsiege.synology.me" | sudo tee -a /etc/hosts
|
echo "192.168.50.55 dungeonsiege.synology.me" | sudo tee -a /etc/hosts
|
||||||
|
|
||||||
- name: Log in to Gitea Registry
|
- name: Build Docker Image
|
||||||
if: steps.version_check.outputs.changed == 'true'
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.GIT_TOKEN }}" | docker login dungeonsiege.synology.me -u ${{ secrets.GIT_USERNAME }} --password-stdin
|
|
||||||
|
|
||||||
- name: Build and Push
|
|
||||||
id: build
|
id: build
|
||||||
if: steps.version_check.outputs.changed == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
REGISTRY="dungeonsiege.synology.me/poignatov/play-life"
|
REGISTRY="dungeonsiege.synology.me/poignatov/play-life"
|
||||||
VER="${{ steps.version_check.outputs.current }}"
|
VER="${{ steps.version_check.outputs.current }}"
|
||||||
|
|
||||||
echo "Building Docker image..."
|
echo "Building Docker image..."
|
||||||
echo "Registry: $REGISTRY"
|
echo "Registry: $REGISTRY"
|
||||||
echo "Tags: latest, $VER"
|
echo "Tag: latest"
|
||||||
|
|
||||||
# Собираем один раз
|
# Собираем образ
|
||||||
docker build -t $REGISTRY:latest -t $REGISTRY:$VER .
|
docker build -t $REGISTRY:latest .
|
||||||
|
|
||||||
|
echo "✅ Successfully built image: $REGISTRY:latest"
|
||||||
|
|
||||||
|
- name: Log in to Gitea Registry
|
||||||
|
if: steps.version_check.outputs.changed == 'true'
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.GIT_TOKEN }}" | docker login dungeonsiege.synology.me -u ${{ secrets.GIT_USERNAME }} --password-stdin
|
||||||
|
|
||||||
|
- name: Push Docker Image
|
||||||
|
id: push
|
||||||
|
if: steps.version_check.outputs.changed == 'true'
|
||||||
|
run: |
|
||||||
|
REGISTRY="dungeonsiege.synology.me/poignatov/play-life"
|
||||||
|
VER="${{ steps.version_check.outputs.current }}"
|
||||||
|
|
||||||
|
# Тегируем образ версией
|
||||||
|
docker tag $REGISTRY:latest $REGISTRY:$VER
|
||||||
|
|
||||||
# Пушим оба тега
|
# Пушим оба тега
|
||||||
echo "Pushing image to registry..."
|
echo "Pushing image to registry..."
|
||||||
@@ -69,35 +79,137 @@ jobs:
|
|||||||
echo " - $REGISTRY:latest"
|
echo " - $REGISTRY:latest"
|
||||||
echo " - $REGISTRY:$VER"
|
echo " - $REGISTRY:$VER"
|
||||||
|
|
||||||
- name: Send Telegram notification (success)
|
- name: Send Telegram notification (build success)
|
||||||
if: success() && steps.version_check.outputs.changed == 'true'
|
if: success() && steps.version_check.outputs.changed == 'false'
|
||||||
uses: appleboy/telegram-action@master
|
uses: appleboy/telegram-action@master
|
||||||
with:
|
with:
|
||||||
to: ${{ secrets.TELEGRAM_TO }}
|
to: ${{ secrets.TELEGRAM_TO }}
|
||||||
token: ${{ secrets.TELEGRAM_TOKEN }}
|
token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||||
|
format: markdown
|
||||||
message: |
|
message: |
|
||||||
✅ Успешная публикация play-life!
|
*play-life*
|
||||||
|
`${{ steps.version_check.outputs.commit_message }}`
|
||||||
|
|
||||||
${{ steps.version_check.outputs.commit_message }}
|
Build: ✅
|
||||||
|
Registration: ⏭️
|
||||||
|
Deploy: ⏭️
|
||||||
|
|
||||||
- name: Send Telegram notification (failure)
|
- name: Deploy to Production Server
|
||||||
if: failure()
|
id: deploy
|
||||||
|
if: steps.version_check.outputs.changed == 'true'
|
||||||
|
uses: appleboy/ssh-action@master
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.DEPLOY_HOST }}
|
||||||
|
username: ${{ secrets.DEPLOY_USER }}
|
||||||
|
password: ${{ secrets.DEPLOY_PASSWORD }}
|
||||||
|
script: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Расширяем PATH для Synology (при SSH сессии PATH минимальный)
|
||||||
|
export PATH="/usr/local/bin:/usr/syno/bin:$PATH"
|
||||||
|
|
||||||
|
REGISTRY="dungeonsiege.synology.me/poignatov/play-life"
|
||||||
|
DEPLOY_PATH="/volume1/docker/play-life"
|
||||||
|
|
||||||
|
echo "🚀 Начинаю деплой на production сервер..."
|
||||||
|
echo "PATH: $PATH"
|
||||||
|
|
||||||
|
# Проверяем наличие docker
|
||||||
|
if ! command -v docker >/dev/null 2>&1; then
|
||||||
|
echo "❌ Docker не найден в PATH!"
|
||||||
|
echo "Пробуем найти docker..."
|
||||||
|
which docker || find /usr -name "docker" -type f 2>/dev/null | head -5
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
DOCKER_CMD="docker"
|
||||||
|
|
||||||
|
# Определяем docker-compose (может быть docker compose или docker-compose)
|
||||||
|
if command -v docker-compose >/dev/null 2>&1; then
|
||||||
|
DOCKER_COMPOSE_CMD="docker-compose"
|
||||||
|
elif docker compose version >/dev/null 2>&1; then
|
||||||
|
DOCKER_COMPOSE_CMD="docker compose"
|
||||||
|
else
|
||||||
|
echo "❌ Docker Compose не найден!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Используем: $DOCKER_CMD и $DOCKER_COMPOSE_CMD"
|
||||||
|
|
||||||
|
# Переходим в директорию проекта
|
||||||
|
cd $DEPLOY_PATH
|
||||||
|
|
||||||
|
# Логинимся в registry
|
||||||
|
echo "${{ secrets.GIT_TOKEN }}" | $DOCKER_CMD login dungeonsiege.synology.me -u ${{ secrets.GIT_USERNAME }} --password-stdin
|
||||||
|
|
||||||
|
# Обновляем образ
|
||||||
|
echo "📥 Обновляю образ из registry..."
|
||||||
|
$DOCKER_CMD pull $REGISTRY:latest
|
||||||
|
|
||||||
|
# Перезапускаем контейнеры
|
||||||
|
echo "🔄 Перезапускаю контейнеры..."
|
||||||
|
$DOCKER_COMPOSE_CMD -f docker-compose.prod.yml up -d --force-recreate
|
||||||
|
|
||||||
|
# Проверяем статус
|
||||||
|
echo "✅ Деплой завершен успешно"
|
||||||
|
$DOCKER_COMPOSE_CMD -f docker-compose.prod.yml ps
|
||||||
|
|
||||||
|
- name: Send Telegram notification (publish success)
|
||||||
|
if: steps.build.outcome == 'success' && steps.version_check.outputs.changed == 'true' && steps.push.outcome == 'success' && steps.deploy.outcome == 'success'
|
||||||
uses: appleboy/telegram-action@master
|
uses: appleboy/telegram-action@master
|
||||||
with:
|
with:
|
||||||
to: ${{ secrets.TELEGRAM_TO }}
|
to: ${{ secrets.TELEGRAM_TO }}
|
||||||
token: ${{ secrets.TELEGRAM_TOKEN }}
|
token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||||
|
format: markdown
|
||||||
message: |
|
message: |
|
||||||
❌ Ошибка сборки или публикации play-life!
|
*play-life*
|
||||||
|
`${{ steps.version_check.outputs.commit_message }}`
|
||||||
|
|
||||||
${{ steps.version_check.outputs.commit_message }}
|
Build: ✅
|
||||||
|
Registration: ✅
|
||||||
|
Deploy: ✅
|
||||||
|
|
||||||
- name: Send Telegram notification (skipped)
|
- name: Send Telegram notification (push failure)
|
||||||
if: steps.version_check.outputs.changed == 'false'
|
if: steps.build.outcome == 'success' && steps.version_check.outputs.changed == 'true' && steps.push.outcome == 'failure'
|
||||||
uses: appleboy/telegram-action@master
|
uses: appleboy/telegram-action@master
|
||||||
with:
|
with:
|
||||||
to: ${{ secrets.TELEGRAM_TO }}
|
to: ${{ secrets.TELEGRAM_TO }}
|
||||||
token: ${{ secrets.TELEGRAM_TOKEN }}
|
token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||||
|
format: markdown
|
||||||
message: |
|
message: |
|
||||||
ℹ️ Пропущена публикация play-life
|
*play-life*
|
||||||
|
`${{ steps.version_check.outputs.commit_message }}`
|
||||||
|
|
||||||
${{ steps.version_check.outputs.commit_message }}
|
Build: ✅
|
||||||
|
Registration: ❌
|
||||||
|
Deploy: ⏭️
|
||||||
|
|
||||||
|
- name: Send Telegram notification (deploy failure)
|
||||||
|
if: steps.build.outcome == 'success' && steps.push.outcome == 'success' && steps.version_check.outputs.changed == 'true' && steps.deploy.outcome == 'failure'
|
||||||
|
uses: appleboy/telegram-action@master
|
||||||
|
with:
|
||||||
|
to: ${{ secrets.TELEGRAM_TO }}
|
||||||
|
token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||||
|
format: markdown
|
||||||
|
message: |
|
||||||
|
*play-life*
|
||||||
|
`${{ steps.version_check.outputs.commit_message }}`
|
||||||
|
|
||||||
|
Build: ✅
|
||||||
|
Registration: ✅
|
||||||
|
Deploy: ❌
|
||||||
|
|
||||||
|
- name: Send Telegram notification (build failure)
|
||||||
|
if: steps.build.outcome == 'failure'
|
||||||
|
uses: appleboy/telegram-action@master
|
||||||
|
with:
|
||||||
|
to: ${{ secrets.TELEGRAM_TO }}
|
||||||
|
token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||||
|
format: markdown
|
||||||
|
message: |
|
||||||
|
*play-life*
|
||||||
|
`${{ steps.version_check.outputs.commit_message }}`
|
||||||
|
|
||||||
|
Build: ❌
|
||||||
|
Registration: ⏭️
|
||||||
|
Deploy: ⏭️
|
||||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"claudeCode.allowDangerouslySkipPermissions": true
|
||||||
|
}
|
||||||
19
.vscode/tasks.json
vendored
19
.vscode/tasks.json
vendored
@@ -39,6 +39,25 @@
|
|||||||
"problemMatcher": [],
|
"problemMatcher": [],
|
||||||
"detail": "Перезапуск Play Life: перезапуск всех контейнеров"
|
"detail": "Перезапуск Play Life: перезапуск всех контейнеров"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"label": "runLLM",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./runLLM.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": false
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "shared",
|
||||||
|
"showReuseMessage": true,
|
||||||
|
"clear": false
|
||||||
|
},
|
||||||
|
"problemMatcher": [],
|
||||||
|
"detail": "Запуск/перезапуск play-life-llm (обычно на отдельной машине)"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"label": "backupFromProd",
|
"label": "backupFromProd",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
|
|||||||
65
CLAUDE.md
Normal file
65
CLAUDE.md
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Правила проекта
|
||||||
|
|
||||||
|
## Миграции базы данных
|
||||||
|
|
||||||
|
**ВАЖНО:** Если меняется структура базы данных — пиши НОВУЮ миграцию.
|
||||||
|
НИ В КОЕМ СЛУЧАЕ не меняй старые миграции, можно добавлять только новые.
|
||||||
|
Старой миграцией считается та, что была уже ранее закоммичена.
|
||||||
|
|
||||||
|
## Перезапуск приложения после изменений
|
||||||
|
|
||||||
|
После применения всех изменений в бэкенде (`play-life-backend/`) или фронтенде (`play-life-web/`), а также после изменений в `docker-compose.yml`, **ОБЯЗАТЕЛЬНО** выполни команду `./run.sh` для перезапуска всех сервисов.
|
||||||
|
|
||||||
|
Применяется при работе с:
|
||||||
|
- Go кодом в `play-life-backend/`
|
||||||
|
- Миграциями базы данных в `play-life-backend/migrations/`
|
||||||
|
- React компонентами и стилями в `play-life-web/src/`
|
||||||
|
- Docker конфигурациями (`docker-compose.yml`, `Dockerfile`)
|
||||||
|
|
||||||
|
При изменениях в `play-life-llm/` (если LLM запущен на этой машине) выполни `./runLLM.sh`.
|
||||||
|
|
||||||
|
**Команды:** `./run.sh` (web + backend + db) или `bash run.sh` в корне проекта. Для LLM: `./runLLM.sh`.
|
||||||
|
|
||||||
|
## Поднятие версии и пуш
|
||||||
|
|
||||||
|
Когда пользователь просит **поднять версию и запушить**:
|
||||||
|
|
||||||
|
### 1. Определи тип версии
|
||||||
|
|
||||||
|
- **major** — первая цифра (1.1.1 → 2.0.0), минор и патч обнуляются
|
||||||
|
- **minor** — вторая цифра (1.0.1 → 1.1.0), патч обнуляется
|
||||||
|
- **patch** — третья цифра (1.0.0 → 1.0.1)
|
||||||
|
|
||||||
|
Любая часть версии может быть больше 9 (10, 11, 12 и т.д.).
|
||||||
|
|
||||||
|
**Если тип версии непонятен из контекста — обязательно спроси у пользователя!**
|
||||||
|
|
||||||
|
### 2. Обнови версию в файлах
|
||||||
|
|
||||||
|
- `VERSION` (в корне проекта)
|
||||||
|
- `play-life-web/package.json` (поле `"version"`)
|
||||||
|
|
||||||
|
### 3. Составь commit message
|
||||||
|
|
||||||
|
Выполни `git diff --staged` и `git diff`, проанализируй изменения. Составь **короткий commit message** (максимум 50 символов) на русском языке. Формат: `"1.2.3: Описание изменений"`.
|
||||||
|
|
||||||
|
### 4. Закоммить и запушить
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add -A
|
||||||
|
git commit -m "<commit message>"
|
||||||
|
git push
|
||||||
|
```
|
||||||
|
|
||||||
|
## Пуш без поднятия версии
|
||||||
|
|
||||||
|
Когда пользователь просит просто запушить (без поднятия версии):
|
||||||
|
|
||||||
|
1. Выполни `git diff --staged` и `git diff`, составь короткий commit message на русском (максимум 50 символов)
|
||||||
|
2. `git add -A && git commit -m "<commit message>" && git push`
|
||||||
|
|
||||||
|
**Примеры:**
|
||||||
|
- "Подними патч и запушь" → поднять patch
|
||||||
|
- "Bump minor and push" → поднять minor
|
||||||
|
- "Подними версию и запушь" → спросить какой тип
|
||||||
|
- "Запушь изменения" → пушить без изменения версии
|
||||||
75
check-repo-fs.sh
Executable file
75
check-repo-fs.sh
Executable file
@@ -0,0 +1,75 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Скрипт для проверки файловой системы репозитория Gitea
|
||||||
|
# Выполните на сервере с административным доступом
|
||||||
|
|
||||||
|
REPO_PATH="/poignatov/play-life.git"
|
||||||
|
GITEA_USER="git" # или пользователь, под которым работает Gitea
|
||||||
|
|
||||||
|
echo "=== Проверка существования репозитория ==="
|
||||||
|
if [ -d "$REPO_PATH" ]; then
|
||||||
|
echo "✓ Репозиторий существует"
|
||||||
|
else
|
||||||
|
echo "✗ Репозиторий НЕ найден: $REPO_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка прав доступа ==="
|
||||||
|
ls -ld "$REPO_PATH"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка владельца ==="
|
||||||
|
OWNER=$(stat -c '%U:%G' "$REPO_PATH" 2>/dev/null || stat -f '%Su:%Sg' "$REPO_PATH" 2>/dev/null)
|
||||||
|
echo "Владелец: $OWNER"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка размера репозитория ==="
|
||||||
|
du -sh "$REPO_PATH"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка свободного места ==="
|
||||||
|
df -h "$REPO_PATH" | tail -1
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка ключевых файлов Git ==="
|
||||||
|
if [ -f "$REPO_PATH/config" ]; then
|
||||||
|
echo "✓ config существует"
|
||||||
|
else
|
||||||
|
echo "✗ config НЕ найден"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d "$REPO_PATH/objects" ]; then
|
||||||
|
echo "✓ objects/ существует"
|
||||||
|
echo " Количество объектов: $(find "$REPO_PATH/objects" -type f | wc -l)"
|
||||||
|
else
|
||||||
|
echo "✗ objects/ НЕ найден"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$REPO_PATH/HEAD" ]; then
|
||||||
|
echo "✓ HEAD существует"
|
||||||
|
echo " Текущая ветка: $(cat "$REPO_PATH/HEAD")"
|
||||||
|
else
|
||||||
|
echo "✗ HEAD НЕ найден"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$REPO_PATH/refs/heads/main" ]; then
|
||||||
|
echo "✓ refs/heads/main существует"
|
||||||
|
echo " Последний коммит: $(cat "$REPO_PATH/refs/heads/main")"
|
||||||
|
else
|
||||||
|
echo "✗ refs/heads/main НЕ найден"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка целостности репозитория ==="
|
||||||
|
cd "$REPO_PATH"
|
||||||
|
if git fsck --no-progress 2>&1 | head -20; then
|
||||||
|
echo "✓ Репозиторий цел"
|
||||||
|
else
|
||||||
|
echo "✗ Обнаружены проблемы с целостностью"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Проверка логов Gitea ==="
|
||||||
|
echo "Проверьте логи Gitea на наличие ошибок:"
|
||||||
|
echo " - /var/log/gitea/gitea.log"
|
||||||
|
echo " - или в директории, указанной в конфиге Gitea"
|
||||||
27
docker-compose.prod.yml
Normal file
27
docker-compose.prod.yml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
# Production конфигурация для Synology
|
||||||
|
# Использует образ из registry вместо локальной сборки
|
||||||
|
# База данных postgres запущена отдельно (не в этом compose)
|
||||||
|
|
||||||
|
services:
|
||||||
|
play-life:
|
||||||
|
image: dungeonsiege.synology.me/poignatov/play-life:latest
|
||||||
|
container_name: play-life-prod
|
||||||
|
ports:
|
||||||
|
- "3080:80"
|
||||||
|
volumes:
|
||||||
|
- /volume1/docker/play-life/uploads:/app/uploads:rw
|
||||||
|
restart: always
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
# Подключаемся к общей сети playlife-net
|
||||||
|
# Перед первым запуском нужно создать сеть и подключить postgres:
|
||||||
|
# docker network create playlife-net
|
||||||
|
# docker network connect playlife-net postgres1
|
||||||
|
networks:
|
||||||
|
- playlife-net
|
||||||
|
|
||||||
|
networks:
|
||||||
|
playlife-net:
|
||||||
|
external: true
|
||||||
@@ -14,6 +14,8 @@ services:
|
|||||||
POSTGRES_DB: ${DB_NAME:-playeng}
|
POSTGRES_DB: ${DB_NAME:-playeng}
|
||||||
ports:
|
ports:
|
||||||
- "${DB_PORT:-5432}:5432"
|
- "${DB_PORT:-5432}:5432"
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-playeng}"]
|
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-playeng}"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@@ -59,6 +61,22 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
|
|
||||||
|
# LLM сервис (Ollama + Tavily), свой Docker и свой env
|
||||||
|
llm:
|
||||||
|
build:
|
||||||
|
context: ./play-life-llm
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: play-life-llm
|
||||||
|
ports:
|
||||||
|
- "8090:8090"
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- ./play-life-llm/.env
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
name: play-life_postgres_data
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
default:
|
default:
|
||||||
name: play-life-network
|
name: play-life-network
|
||||||
|
|||||||
20
env.example
20
env.example
@@ -62,6 +62,26 @@ TODOIST_CLIENT_SECRET=
|
|||||||
# Получить в Developer Console: "Client secret for webhooks"
|
# Получить в Developer Console: "Client secret for webhooks"
|
||||||
TODOIST_WEBHOOK_SECRET=
|
TODOIST_WEBHOOK_SECRET=
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Fitbit Integration Configuration
|
||||||
|
# ============================================
|
||||||
|
# Fitbit приложение для интеграции с Play Life
|
||||||
|
# Настроить в: https://dev.fitbit.com/apps
|
||||||
|
#
|
||||||
|
# В настройках Fitbit приложения указать:
|
||||||
|
# - OAuth 2.0 Application Type: Server
|
||||||
|
# - Callback URL: <WEBHOOK_BASE_URL>/api/integrations/fitbit/oauth/callback
|
||||||
|
# - Default Access Type: Read-Only
|
||||||
|
# - Scopes: activity, profile
|
||||||
|
# - Terms of Service URL: <WEBHOOK_BASE_URL>/terms
|
||||||
|
# - Privacy Policy URL: <WEBHOOK_BASE_URL>/privacy
|
||||||
|
|
||||||
|
# Client ID Fitbit приложения
|
||||||
|
FITBIT_CLIENT_ID=
|
||||||
|
|
||||||
|
# Client Secret Fitbit приложения
|
||||||
|
FITBIT_CLIENT_SECRET=
|
||||||
|
|
||||||
# ============================================
|
# ============================================
|
||||||
# Authentication Configuration
|
# Authentication Configuration
|
||||||
# ============================================
|
# ============================================
|
||||||
|
|||||||
4
init.sh
4
init.sh
@@ -47,12 +47,12 @@ docker images | grep -E "postgres:(15|16|17|18|latest)" | awk '{print $3}' | xar
|
|||||||
echo -e "${GREEN} ✅ Старые образы postgres удалены${NC}"
|
echo -e "${GREEN} ✅ Старые образы postgres удалены${NC}"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# 2. Поднятие всех сервисов
|
# 2. Поднятие сервисов (без LLM — он обычно на отдельной машине, см. ./runLLM.sh)
|
||||||
echo -e "${YELLOW}2. Поднятие сервисов через Docker Compose...${NC}"
|
echo -e "${YELLOW}2. Поднятие сервисов через Docker Compose...${NC}"
|
||||||
echo " - База данных PostgreSQL 18.0 (порт: $DB_PORT)"
|
echo " - База данных PostgreSQL 18.0 (порт: $DB_PORT)"
|
||||||
echo " - Backend сервер (порт: $PORT)"
|
echo " - Backend сервер (порт: $PORT)"
|
||||||
echo " - Frontend приложение (порт: $WEB_PORT)"
|
echo " - Frontend приложение (порт: $WEB_PORT)"
|
||||||
docker-compose up -d --build
|
docker-compose up -d --build db backend play-life-web
|
||||||
echo -e "${GREEN} ✅ Сервисы запущены${NC}"
|
echo -e "${GREEN} ✅ Сервисы запущены${NC}"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
|
|||||||
@@ -49,8 +49,34 @@ server {
|
|||||||
proxy_cache_bypass $http_upgrade;
|
proxy_cache_bypass $http_upgrade;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Proxy admin panel to backend (must be before location /)
|
||||||
|
location ^~ /admin {
|
||||||
|
proxy_pass http://localhost:8080;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Proxy project endpoints to backend (must be before location /)
|
||||||
|
location ^~ /project/ {
|
||||||
|
proxy_pass http://localhost:8080;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
# Proxy other API endpoints to backend
|
# Proxy other API endpoints to backend
|
||||||
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|project/priority|project/move|project/delete|project/create|message/post|weekly_goals/setup|admin|admin\.html)$ {
|
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|weekly_goals/setup|project_score_sample_mv/refresh|priorities/confirm)$ {
|
||||||
proxy_pass http://localhost:8080;
|
proxy_pass http://localhost:8080;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
|||||||
@@ -161,43 +161,55 @@
|
|||||||
color: white;
|
color: white;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.auth-error {
|
||||||
|
background: white;
|
||||||
|
padding: 30px;
|
||||||
|
border-radius: 10px;
|
||||||
|
text-align: center;
|
||||||
|
max-width: 500px;
|
||||||
|
margin: 50px auto;
|
||||||
|
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.auth-error h2 {
|
||||||
|
color: #f44336;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.auth-error p {
|
||||||
|
color: #666;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.auth-error a {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 10px 20px;
|
||||||
|
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||||
|
color: white;
|
||||||
|
text-decoration: none;
|
||||||
|
border-radius: 5px;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.auth-error a:hover {
|
||||||
|
opacity: 0.9;
|
||||||
|
}
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="container">
|
<div id="authErrorContainer" style="display: none;">
|
||||||
|
<div class="auth-error">
|
||||||
|
<h2>⚠️ Требуется авторизация</h2>
|
||||||
|
<p id="authErrorMessage">Для доступа к админ-панели необходимо войти в систему как администратор.</p>
|
||||||
|
<a href="/" target="_self">Перейти на главную страницу</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="container" id="mainContainer">
|
||||||
<h1>🎯 Play Life Backend - Admin Panel</h1>
|
<h1>🎯 Play Life Backend - Admin Panel</h1>
|
||||||
|
|
||||||
<div class="grid">
|
<div class="grid">
|
||||||
<!-- Message Post Card -->
|
|
||||||
<div class="card">
|
|
||||||
<h2>
|
|
||||||
📨 Message Post
|
|
||||||
<span class="status" id="messageStatus" style="display: none;"></span>
|
|
||||||
</h2>
|
|
||||||
<textarea id="messageText" placeholder="Введите сообщение с паттернами **Project+10.5** или **Project-5.0**...
|
|
||||||
|
|
||||||
Пример:
|
|
||||||
Сегодня работал над проектами:
|
|
||||||
**Frontend+15.5**
|
|
||||||
**Backend+8.0**
|
|
||||||
**Design-2.5**"></textarea>
|
|
||||||
<button onclick="sendMessage()">Отправить сообщение</button>
|
|
||||||
<div id="messageResult"></div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Daily Report Trigger Card -->
|
|
||||||
<div class="card">
|
|
||||||
<h2>
|
|
||||||
📈 Daily Report Trigger
|
|
||||||
<span class="status" id="dailyReportStatus" style="display: none;"></span>
|
|
||||||
</h2>
|
|
||||||
<p style="margin-bottom: 15px; color: #666;">
|
|
||||||
Нажмите кнопку для отправки ежедневного отчёта по Score и Целям в Telegram (обычно отправляется автоматически в 23:59).
|
|
||||||
</p>
|
|
||||||
<button onclick="triggerDailyReport()">Отправить отчёт</button>
|
|
||||||
<div id="dailyReportResult"></div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Weekly Goals Setup Card -->
|
<!-- Weekly Goals Setup Card -->
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<h2>
|
<h2>
|
||||||
@@ -210,16 +222,80 @@
|
|||||||
<button onclick="setupWeeklyGoals()">Обновить цели</button>
|
<button onclick="setupWeeklyGoals()">Обновить цели</button>
|
||||||
<div id="goalsResult"></div>
|
<div id="goalsResult"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Project score sample MV Card -->
|
||||||
|
<div class="card">
|
||||||
|
<h2>
|
||||||
|
📊 project_score_sample_mv
|
||||||
|
<span class="status" id="mvStatus" style="display: none;"></span>
|
||||||
|
</h2>
|
||||||
|
<p style="margin-bottom: 15px; color: #666;">
|
||||||
|
Обновить материализованное представление и показать данные текущего пользователя (по одному представителю на вариант баллов проекта).
|
||||||
|
</p>
|
||||||
|
<button onclick="refreshProjectScoreSampleMv()">Обновить project_score_sample_mv</button>
|
||||||
|
<div id="mvResult"></div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
// Получаем токен из localStorage
|
||||||
|
function getAuthToken() {
|
||||||
|
return localStorage.getItem('access_token');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Проверяем авторизацию при загрузке страницы
|
||||||
|
function checkAuth() {
|
||||||
|
const token = getAuthToken();
|
||||||
|
if (!token) {
|
||||||
|
showAuthError('Токен авторизации не найден. Пожалуйста, войдите в систему.');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Показываем сообщение об ошибке авторизации
|
||||||
|
function showAuthError(message) {
|
||||||
|
document.getElementById('authErrorContainer').style.display = 'block';
|
||||||
|
document.getElementById('mainContainer').style.display = 'none';
|
||||||
|
document.getElementById('authErrorMessage').textContent = message;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Обрабатываем ошибки авторизации
|
||||||
|
function handleAuthError(response) {
|
||||||
|
if (response.status === 401) {
|
||||||
|
showAuthError('Сессия истекла. Пожалуйста, войдите в систему снова.');
|
||||||
|
return true;
|
||||||
|
} else if (response.status === 403) {
|
||||||
|
showAuthError('У вас нет прав доступа к админ-панели. Требуются права администратора.');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Получаем заголовки с авторизацией
|
||||||
|
function getAuthHeaders() {
|
||||||
|
const token = getAuthToken();
|
||||||
|
const headers = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
};
|
||||||
|
if (token) {
|
||||||
|
headers['Authorization'] = `Bearer ${token}`;
|
||||||
|
}
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
|
||||||
function getApiUrl() {
|
function getApiUrl() {
|
||||||
// Автоматически определяем URL текущего хоста
|
// Автоматически определяем URL текущего хоста
|
||||||
// Админка обслуживается тем же бекендом, поэтому используем текущий origin
|
// Админка обслуживается тем же бекендом, поэтому используем текущий origin
|
||||||
return window.location.origin;
|
return window.location.origin;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Проверяем авторизацию при загрузке страницы
|
||||||
|
if (!checkAuth()) {
|
||||||
|
// Страница уже скрыта в checkAuth
|
||||||
|
}
|
||||||
|
|
||||||
function showStatus(elementId, status, text) {
|
function showStatus(elementId, status, text) {
|
||||||
const statusEl = document.getElementById(elementId);
|
const statusEl = document.getElementById(elementId);
|
||||||
statusEl.textContent = text;
|
statusEl.textContent = text;
|
||||||
@@ -254,44 +330,6 @@
|
|||||||
resultEl.appendChild(div);
|
resultEl.appendChild(div);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sendMessage() {
|
|
||||||
const text = document.getElementById('messageText').value.trim();
|
|
||||||
if (!text) {
|
|
||||||
alert('Пожалуйста, введите сообщение');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
showStatus('messageStatus', 'loading', 'Отправка...');
|
|
||||||
showResult('messageResult', null, false, true);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${getApiUrl()}/message/post`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
body: {
|
|
||||||
text: text
|
|
||||||
}
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
showStatus('messageStatus', 'success', 'Успешно');
|
|
||||||
showResult('messageResult', data, false);
|
|
||||||
} else {
|
|
||||||
showStatus('messageStatus', 'error', 'Ошибка');
|
|
||||||
showResult('messageResult', data, true);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
showStatus('messageStatus', 'error', 'Ошибка');
|
|
||||||
showResult('messageResult', { error: error.message }, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function setupWeeklyGoals() {
|
async function setupWeeklyGoals() {
|
||||||
showStatus('goalsStatus', 'loading', 'Обновление...');
|
showStatus('goalsStatus', 'loading', 'Обновление...');
|
||||||
showResult('goalsResult', null, false, true);
|
showResult('goalsResult', null, false, true);
|
||||||
@@ -299,11 +337,13 @@
|
|||||||
try {
|
try {
|
||||||
const response = await fetch(`${getApiUrl()}/weekly_goals/setup`, {
|
const response = await fetch(`${getApiUrl()}/weekly_goals/setup`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: getAuthHeaders()
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (handleAuthError(response)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
@@ -319,39 +359,35 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function triggerDailyReport() {
|
async function refreshProjectScoreSampleMv() {
|
||||||
showStatus('dailyReportStatus', 'loading', 'Отправка...');
|
showStatus('mvStatus', 'loading', 'Обновление...');
|
||||||
showResult('dailyReportResult', null, false, true);
|
showResult('mvResult', null, false, true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${getApiUrl()}/daily-report/trigger`, {
|
const response = await fetch(`${getApiUrl()}/project_score_sample_mv/refresh`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: getAuthHeaders()
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (handleAuthError(response)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
showStatus('dailyReportStatus', 'success', 'Успешно');
|
showStatus('mvStatus', 'success', 'Успешно');
|
||||||
showResult('dailyReportResult', data, false);
|
showResult('mvResult', data, false);
|
||||||
} else {
|
} else {
|
||||||
showStatus('dailyReportStatus', 'error', 'Ошибка');
|
showStatus('mvStatus', 'error', 'Ошибка');
|
||||||
showResult('dailyReportResult', data, true);
|
showResult('mvResult', data, true);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
showStatus('dailyReportStatus', 'error', 'Ошибка');
|
showStatus('mvStatus', 'error', 'Ошибка');
|
||||||
showResult('dailyReportResult', { error: error.message }, true);
|
showResult('mvResult', { error: error.message }, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Разрешаем отправку формы по Enter (Ctrl+Enter для textarea)
|
|
||||||
document.getElementById('messageText').addEventListener('keydown', function(e) {
|
|
||||||
if (e.ctrlKey && e.key === 'Enter') {
|
|
||||||
sendMessage();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,7 @@
|
|||||||
|
-- Migration: Remove task drafts tables
|
||||||
|
-- Date: 2026-01-26
|
||||||
|
--
|
||||||
|
-- This migration removes tables created for task drafts
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS task_draft_subtasks;
|
||||||
|
DROP TABLE IF EXISTS task_drafts;
|
||||||
45
play-life-backend/migrations/000005_add_task_drafts.up.sql
Normal file
45
play-life-backend/migrations/000005_add_task_drafts.up.sql
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
-- Migration: Add task drafts tables
|
||||||
|
-- Date: 2026-01-26
|
||||||
|
--
|
||||||
|
-- This migration creates tables for storing task drafts:
|
||||||
|
-- 1. task_drafts - main table for task drafts with progression value and auto_complete flag
|
||||||
|
-- 2. task_draft_subtasks - stores only checked subtask IDs for each draft
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Table: task_drafts
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE task_drafts (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
task_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE,
|
||||||
|
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
progression_value NUMERIC(10,4),
|
||||||
|
auto_complete BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||||
|
UNIQUE(task_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_task_drafts_task_id ON task_drafts(task_id);
|
||||||
|
CREATE INDEX idx_task_drafts_user_id ON task_drafts(user_id);
|
||||||
|
CREATE INDEX idx_task_drafts_auto_complete ON task_drafts(auto_complete) WHERE auto_complete = TRUE;
|
||||||
|
|
||||||
|
COMMENT ON TABLE task_drafts IS 'Stores draft states for tasks with progression value and auto-complete flag';
|
||||||
|
COMMENT ON COLUMN task_drafts.progression_value IS 'Saved progression value from user input';
|
||||||
|
COMMENT ON COLUMN task_drafts.auto_complete IS 'Flag indicating task should be auto-completed at end of day (23:55)';
|
||||||
|
COMMENT ON COLUMN task_drafts.task_id IS 'Reference to task. UNIQUE constraint ensures one draft per task';
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Table: task_draft_subtasks
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE task_draft_subtasks (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
task_draft_id INTEGER REFERENCES task_drafts(id) ON DELETE CASCADE,
|
||||||
|
subtask_id INTEGER REFERENCES tasks(id) ON DELETE CASCADE,
|
||||||
|
UNIQUE(task_draft_id, subtask_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_task_draft_subtasks_task_draft_id ON task_draft_subtasks(task_draft_id);
|
||||||
|
CREATE INDEX idx_task_draft_subtasks_subtask_id ON task_draft_subtasks(subtask_id);
|
||||||
|
|
||||||
|
COMMENT ON TABLE task_draft_subtasks IS 'Stores only checked subtask IDs for each draft. If subtask is not in this table, it means it is unchecked';
|
||||||
|
COMMENT ON COLUMN task_draft_subtasks.subtask_id IS 'Reference to subtask task. Only checked subtasks are stored here';
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
-- Migration: Revert wishlist_id unique index fix
|
||||||
|
-- Date: 2026-01-30
|
||||||
|
--
|
||||||
|
-- This migration reverts the composite unique index back to the original
|
||||||
|
-- unique index that only checked wishlist_id.
|
||||||
|
|
||||||
|
-- Drop the composite unique index
|
||||||
|
DROP INDEX IF EXISTS idx_tasks_wishlist_id_user_id_unique;
|
||||||
|
|
||||||
|
-- Restore the original unique index on wishlist_id only
|
||||||
|
CREATE UNIQUE INDEX idx_tasks_wishlist_id_unique
|
||||||
|
ON tasks(wishlist_id)
|
||||||
|
WHERE wishlist_id IS NOT NULL AND deleted = FALSE;
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
-- Migration: Fix wishlist_id unique index to allow multiple users
|
||||||
|
-- Date: 2026-01-30
|
||||||
|
--
|
||||||
|
-- This migration fixes the unique index on wishlist_id to allow multiple users
|
||||||
|
-- to create tasks for the same wishlist item. The old index only checked wishlist_id,
|
||||||
|
-- but now we need a composite unique index on (wishlist_id, user_id).
|
||||||
|
|
||||||
|
-- Drop the old unique index that only checked wishlist_id
|
||||||
|
DROP INDEX IF EXISTS idx_tasks_wishlist_id_unique;
|
||||||
|
|
||||||
|
-- Create a new composite unique index on (wishlist_id, user_id)
|
||||||
|
-- This allows multiple users to have tasks for the same wishlist item,
|
||||||
|
-- but prevents the same user from having multiple tasks for the same wishlist item
|
||||||
|
CREATE UNIQUE INDEX idx_tasks_wishlist_id_user_id_unique
|
||||||
|
ON tasks(wishlist_id, user_id)
|
||||||
|
WHERE wishlist_id IS NOT NULL AND deleted = FALSE;
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
-- Migration: Drop projects_median_mv materialized view
|
||||||
|
-- Date: 2026-01-30
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
-- Migration: Add projects_median_mv materialized view
|
||||||
|
-- Date: 2026-01-30
|
||||||
|
--
|
||||||
|
-- This migration creates a materialized view that calculates the median score
|
||||||
|
-- for each project based on the last 12 weeks of historical data from weekly_report_mv.
|
||||||
|
-- The view includes user_id to support multi-tenant queries.
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 12 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 12 weeks of historical data. Includes user_id for multi-tenant support.';
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
-- Migration: Revert median calculation back to 12 weeks
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration reverts projects_median_mv back to using 12 weeks.
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 12 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 12 weeks of historical data. Includes user_id for multi-tenant support.';
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
-- Migration: Change median calculation from 12 weeks to 4 weeks
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration updates projects_median_mv to calculate median based on
|
||||||
|
-- the last 4 weeks instead of 12 weeks.
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 4 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 4 weeks of historical data. Includes user_id for multi-tenant support.';
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
-- Migration: Remove is_admin field from users table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration reverts the addition of is_admin field.
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_users_is_admin;
|
||||||
|
|
||||||
|
ALTER TABLE users
|
||||||
|
DROP COLUMN IF EXISTS is_admin;
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
-- Migration: Add is_admin field to users table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration adds is_admin boolean field to users table to identify admin users.
|
||||||
|
-- Default value is FALSE, so existing users will not become admins automatically.
|
||||||
|
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN is_admin BOOLEAN NOT NULL DEFAULT FALSE;
|
||||||
|
|
||||||
|
CREATE INDEX idx_users_is_admin ON users(is_admin);
|
||||||
|
|
||||||
|
COMMENT ON COLUMN users.is_admin IS 'Indicates if the user has admin privileges';
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
-- Migration: Remove project_id field from wishlist_items table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration reverts the addition of project_id field.
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_wishlist_items_project_id;
|
||||||
|
|
||||||
|
ALTER TABLE wishlist_items
|
||||||
|
DROP COLUMN IF EXISTS project_id;
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
-- Migration: Add project_id field to wishlist_items table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration adds project_id field to wishlist_items table to allow
|
||||||
|
-- grouping wishlist items by project. The field is nullable, so existing
|
||||||
|
-- items without a project will remain valid.
|
||||||
|
|
||||||
|
ALTER TABLE wishlist_items
|
||||||
|
ADD COLUMN project_id INTEGER REFERENCES projects(id) ON DELETE SET NULL;
|
||||||
|
|
||||||
|
CREATE INDEX idx_wishlist_items_project_id ON wishlist_items(project_id);
|
||||||
|
|
||||||
|
COMMENT ON COLUMN wishlist_items.project_id IS 'Project this wishlist item belongs to (optional)';
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
-- Migration: Remove color field from projects table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration removes the color field from projects table.
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_projects_color;
|
||||||
|
|
||||||
|
ALTER TABLE projects
|
||||||
|
DROP COLUMN IF EXISTS color;
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
-- Migration: Add color field to projects table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration adds color field to projects table to allow
|
||||||
|
-- custom color selection for projects. The field is NOT NULL,
|
||||||
|
-- and existing projects will be assigned colors from a predefined palette.
|
||||||
|
|
||||||
|
-- Добавляем поле color
|
||||||
|
ALTER TABLE projects
|
||||||
|
ADD COLUMN color VARCHAR(7) NOT NULL DEFAULT '#3B82F6';
|
||||||
|
|
||||||
|
-- Палитра из 30 контрастных цветов (синхронизирована с backend и frontend)
|
||||||
|
-- Заполняем существующие проекты цветами из палитры
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
colors TEXT[] := ARRAY[
|
||||||
|
'#EF4444', '#F97316', '#F59E0B', '#EAB308', '#84CC16',
|
||||||
|
'#22C55E', '#10B981', '#14B8A6', '#06B6D4', '#0EA5E9',
|
||||||
|
'#3B82F6', '#6366F1', '#8B5CF6', '#A855F7', '#D946EF',
|
||||||
|
'#EC4899', '#F43F5E', '#DC2626', '#EA580C', '#CA8A04',
|
||||||
|
'#65A30D', '#16A34A', '#059669', '#0D9488', '#0891B2',
|
||||||
|
'#0284C7', '#2563EB', '#4F46E5', '#7C3AED', '#9333EA'
|
||||||
|
];
|
||||||
|
project_record RECORD;
|
||||||
|
color_index INTEGER := 0;
|
||||||
|
BEGIN
|
||||||
|
-- Обновляем существующие проекты, присваивая им цвета из палитры
|
||||||
|
FOR project_record IN
|
||||||
|
SELECT id FROM projects ORDER BY id
|
||||||
|
LOOP
|
||||||
|
UPDATE projects
|
||||||
|
SET color = colors[1 + (color_index % array_length(colors, 1))]
|
||||||
|
WHERE id = project_record.id;
|
||||||
|
|
||||||
|
color_index := color_index + 1;
|
||||||
|
END LOOP;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Убираем DEFAULT, так как теперь все проекты имеют цвет
|
||||||
|
ALTER TABLE projects
|
||||||
|
ALTER COLUMN color DROP DEFAULT;
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_projects_color ON projects(color);
|
||||||
|
|
||||||
|
COMMENT ON COLUMN projects.color IS 'Project color in HEX format (e.g., #FF5733)';
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
-- Migration: Remove position field from tasks table
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration removes the position field from tasks table.
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_tasks_parent_position;
|
||||||
|
|
||||||
|
ALTER TABLE tasks
|
||||||
|
DROP COLUMN IF EXISTS position;
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
-- Migration: Add position field to tasks table for subtasks ordering
|
||||||
|
-- Date: 2026-02-02
|
||||||
|
--
|
||||||
|
-- This migration adds position field to tasks table to allow
|
||||||
|
-- custom ordering of subtasks. The field is NULL for regular tasks
|
||||||
|
-- and contains position number for subtasks (tasks with parent_task_id).
|
||||||
|
|
||||||
|
-- Добавляем поле position
|
||||||
|
ALTER TABLE tasks
|
||||||
|
ADD COLUMN position INTEGER;
|
||||||
|
|
||||||
|
-- Заполняем позиции для всех существующих подзадач
|
||||||
|
-- Позиции присваиваются по порядку id в рамках каждой родительской задачи
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
parent_record RECORD;
|
||||||
|
subtask_record RECORD;
|
||||||
|
pos INTEGER;
|
||||||
|
BEGIN
|
||||||
|
-- Для каждой родительской задачи
|
||||||
|
FOR parent_record IN
|
||||||
|
SELECT DISTINCT parent_task_id
|
||||||
|
FROM tasks
|
||||||
|
WHERE parent_task_id IS NOT NULL
|
||||||
|
ORDER BY parent_task_id
|
||||||
|
LOOP
|
||||||
|
pos := 0;
|
||||||
|
-- Обновляем подзадачи этой родительской задачи
|
||||||
|
FOR subtask_record IN
|
||||||
|
SELECT id
|
||||||
|
FROM tasks
|
||||||
|
WHERE parent_task_id = parent_record.parent_task_id
|
||||||
|
AND deleted = FALSE
|
||||||
|
ORDER BY id
|
||||||
|
LOOP
|
||||||
|
UPDATE tasks
|
||||||
|
SET position = pos
|
||||||
|
WHERE id = subtask_record.id;
|
||||||
|
|
||||||
|
pos := pos + 1;
|
||||||
|
END LOOP;
|
||||||
|
END LOOP;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- Создаем индекс для быстрой сортировки подзадач
|
||||||
|
CREATE INDEX idx_tasks_parent_position ON tasks(parent_task_id, position)
|
||||||
|
WHERE parent_task_id IS NOT NULL AND deleted = FALSE;
|
||||||
|
|
||||||
|
COMMENT ON COLUMN tasks.position IS 'Position of subtask within parent task. NULL for regular tasks.';
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
DROP TABLE IF EXISTS tracking_invite_tokens;
|
||||||
|
DROP TABLE IF EXISTS user_tracking;
|
||||||
24
play-life-backend/migrations/000013_add_user_tracking.up.sql
Normal file
24
play-life-backend/migrations/000013_add_user_tracking.up.sql
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
-- Таблица отслеживания между пользователями
|
||||||
|
CREATE TABLE user_tracking (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
tracker_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
tracked_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT unique_tracking_pair UNIQUE (tracker_id, tracked_id),
|
||||||
|
CONSTRAINT no_self_tracking CHECK (tracker_id != tracked_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_user_tracking_tracker ON user_tracking(tracker_id);
|
||||||
|
CREATE INDEX idx_user_tracking_tracked ON user_tracking(tracked_id);
|
||||||
|
|
||||||
|
-- Таблица токенов приглашений (живут 1 час)
|
||||||
|
CREATE TABLE tracking_invite_tokens (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
token VARCHAR(64) NOT NULL UNIQUE,
|
||||||
|
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_tracking_invite_tokens_token ON tracking_invite_tokens(token);
|
||||||
|
CREATE INDEX idx_tracking_invite_tokens_user ON tracking_invite_tokens(user_id);
|
||||||
36
play-life-backend/migrations/000014_add_group_name.down.sql
Normal file
36
play-life-backend/migrations/000014_add_group_name.down.sql
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
-- Migration: Remove group_name field from wishlist_items and tasks tables
|
||||||
|
-- Date: 2026-02-XX
|
||||||
|
--
|
||||||
|
-- This migration reverses the changes made in 000014_add_group_name.up.sql
|
||||||
|
|
||||||
|
-- Step 1: Drop materialized view
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS user_group_suggestions_mv;
|
||||||
|
|
||||||
|
-- Step 2: Drop indexes on group_name
|
||||||
|
DROP INDEX IF EXISTS idx_tasks_group_name;
|
||||||
|
DROP INDEX IF EXISTS idx_wishlist_items_group_name;
|
||||||
|
|
||||||
|
-- Step 3: Remove group_name from tasks
|
||||||
|
ALTER TABLE tasks
|
||||||
|
DROP COLUMN group_name;
|
||||||
|
|
||||||
|
-- Step 4: Add back project_id to wishlist_items
|
||||||
|
ALTER TABLE wishlist_items
|
||||||
|
ADD COLUMN project_id INTEGER REFERENCES projects(id) ON DELETE SET NULL;
|
||||||
|
|
||||||
|
-- Step 5: Try to restore project_id from group_name (if possible)
|
||||||
|
-- Note: This is best-effort, as group_name might not match project names exactly
|
||||||
|
UPDATE wishlist_items wi
|
||||||
|
SET project_id = p.id
|
||||||
|
FROM projects p
|
||||||
|
WHERE wi.group_name = p.name
|
||||||
|
AND wi.group_name IS NOT NULL
|
||||||
|
AND wi.group_name != ''
|
||||||
|
AND p.deleted = FALSE;
|
||||||
|
|
||||||
|
-- Step 6: Create index on project_id
|
||||||
|
CREATE INDEX idx_wishlist_items_project_id ON wishlist_items(project_id);
|
||||||
|
|
||||||
|
-- Step 7: Remove group_name from wishlist_items
|
||||||
|
ALTER TABLE wishlist_items
|
||||||
|
DROP COLUMN group_name;
|
||||||
60
play-life-backend/migrations/000014_add_group_name.up.sql
Normal file
60
play-life-backend/migrations/000014_add_group_name.up.sql
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
-- Migration: Add group_name field to wishlist_items and tasks tables
|
||||||
|
-- Date: 2026-02-XX
|
||||||
|
--
|
||||||
|
-- This migration:
|
||||||
|
-- 1. Adds group_name field to wishlist_items (replacing project_id)
|
||||||
|
-- 2. Migrates existing data from project_id to group_name
|
||||||
|
-- 3. Removes project_id column from wishlist_items
|
||||||
|
-- 4. Adds group_name field to tasks
|
||||||
|
-- 5. Creates materialized view for group suggestions
|
||||||
|
|
||||||
|
-- Step 1: Add group_name to wishlist_items
|
||||||
|
ALTER TABLE wishlist_items
|
||||||
|
ADD COLUMN group_name VARCHAR(255);
|
||||||
|
|
||||||
|
-- Step 2: Migrate existing data from project_id to group_name
|
||||||
|
UPDATE wishlist_items wi
|
||||||
|
SET group_name = p.name
|
||||||
|
FROM projects p
|
||||||
|
WHERE wi.project_id = p.id AND wi.project_id IS NOT NULL;
|
||||||
|
|
||||||
|
-- Step 3: Remove project_id column and its index
|
||||||
|
DROP INDEX IF EXISTS idx_wishlist_items_project_id;
|
||||||
|
ALTER TABLE wishlist_items
|
||||||
|
DROP COLUMN project_id;
|
||||||
|
|
||||||
|
-- Step 4: Add group_name to tasks
|
||||||
|
ALTER TABLE tasks
|
||||||
|
ADD COLUMN group_name VARCHAR(255);
|
||||||
|
|
||||||
|
-- Step 5: Create indexes on group_name
|
||||||
|
CREATE INDEX idx_wishlist_items_group_name ON wishlist_items(group_name) WHERE group_name IS NOT NULL;
|
||||||
|
CREATE INDEX idx_tasks_group_name ON tasks(group_name) WHERE group_name IS NOT NULL;
|
||||||
|
|
||||||
|
-- Step 6: Create materialized view for group suggestions
|
||||||
|
CREATE MATERIALIZED VIEW user_group_suggestions_mv AS
|
||||||
|
SELECT DISTINCT user_id, group_name FROM (
|
||||||
|
-- Желания пользователя (собственные)
|
||||||
|
SELECT wi.user_id, wi.group_name FROM wishlist_items wi
|
||||||
|
WHERE wi.deleted = FALSE AND wi.group_name IS NOT NULL AND wi.group_name != ''
|
||||||
|
UNION
|
||||||
|
-- Желания с досок, на которых пользователь участник
|
||||||
|
SELECT wbm.user_id, wi.group_name FROM wishlist_items wi
|
||||||
|
JOIN wishlist_board_members wbm ON wi.board_id = wbm.board_id
|
||||||
|
WHERE wi.deleted = FALSE AND wi.group_name IS NOT NULL AND wi.group_name != ''
|
||||||
|
UNION
|
||||||
|
-- Задачи пользователя
|
||||||
|
SELECT t.user_id, t.group_name FROM tasks t
|
||||||
|
WHERE t.deleted = FALSE AND t.group_name IS NOT NULL AND t.group_name != ''
|
||||||
|
UNION
|
||||||
|
-- Имена проектов пользователя
|
||||||
|
SELECT p.user_id, p.name FROM projects p
|
||||||
|
WHERE p.deleted = FALSE
|
||||||
|
) sub;
|
||||||
|
|
||||||
|
-- Step 7: Create unique index for CONCURRENT refresh
|
||||||
|
CREATE UNIQUE INDEX idx_user_group_suggestions_mv_user_group ON user_group_suggestions_mv(user_id, group_name);
|
||||||
|
|
||||||
|
COMMENT ON COLUMN wishlist_items.group_name IS 'Group name for wishlist item (free text, replaces project_id)';
|
||||||
|
COMMENT ON COLUMN tasks.group_name IS 'Group name for task (free text)';
|
||||||
|
COMMENT ON MATERIALIZED VIEW user_group_suggestions_mv IS 'Materialized view for group name suggestions per user';
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
DROP TABLE IF EXISTS fitbit_daily_stats;
|
||||||
|
DROP TABLE IF EXISTS fitbit_integrations;
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
-- Fitbit integrations table (depends on users)
|
||||||
|
CREATE TABLE fitbit_integrations (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
fitbit_user_id VARCHAR(255),
|
||||||
|
access_token TEXT,
|
||||||
|
refresh_token TEXT,
|
||||||
|
token_expires_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
goal_steps_min INTEGER DEFAULT 8000,
|
||||||
|
goal_steps_max INTEGER DEFAULT 10000,
|
||||||
|
goal_floors_min INTEGER DEFAULT 8,
|
||||||
|
goal_floors_max INTEGER DEFAULT 10,
|
||||||
|
goal_azm_min INTEGER DEFAULT 22,
|
||||||
|
goal_azm_max INTEGER DEFAULT 44,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT fitbit_integrations_user_id_unique UNIQUE (user_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_fitbit_integrations_user_id ON fitbit_integrations(user_id);
|
||||||
|
CREATE UNIQUE INDEX idx_fitbit_integrations_fitbit_user_id ON fitbit_integrations(fitbit_user_id) WHERE fitbit_user_id IS NOT NULL;
|
||||||
|
|
||||||
|
-- Fitbit daily stats table (depends on users and fitbit_integrations)
|
||||||
|
CREATE TABLE fitbit_daily_stats (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
steps INTEGER DEFAULT 0,
|
||||||
|
floors INTEGER DEFAULT 0,
|
||||||
|
active_zone_minutes INTEGER DEFAULT 0,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT fitbit_daily_stats_user_date_unique UNIQUE (user_id, date)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_fitbit_daily_stats_user_id ON fitbit_daily_stats(user_id);
|
||||||
|
CREATE INDEX idx_fitbit_daily_stats_date ON fitbit_daily_stats(date);
|
||||||
|
CREATE INDEX idx_fitbit_daily_stats_user_date ON fitbit_daily_stats(user_id, date);
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- Migration: Drop project_score_sample_mv materialized view
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
-- Migration: Add project_score_sample_mv materialized view
|
||||||
|
--
|
||||||
|
-- One row per (project_id, score, user_id): sum of nodes.score per entry,
|
||||||
|
-- representative entry_message (latest by date). Used for admin display and reporting.
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): sum of nodes per entry, representative entry_message (latest by date).';
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
-- Revert to previous MV definition (one row per project_id, score, user_id)
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): sum of nodes per entry, representative entry_message (latest by date).';
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
-- Migration: Make entry_message unique per (project_id, user_id) in project_score_sample_mv
|
||||||
|
--
|
||||||
|
-- One row per (project_id, user_id, entry_message): choose the row with latest created_date.
|
||||||
|
-- Ensures the same entry_message does not repeat for different score values.
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
),
|
||||||
|
with_message AS (
|
||||||
|
SELECT
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (project_id, user_id, entry_message)
|
||||||
|
project_id,
|
||||||
|
score,
|
||||||
|
entry_message,
|
||||||
|
user_id,
|
||||||
|
created_date
|
||||||
|
FROM with_message
|
||||||
|
ORDER BY project_id, user_id, entry_message, created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): representative row (latest by date). entry_message is unique per project and user.';
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
-- Revert to one row per (project_id, user_id, entry_message)
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
),
|
||||||
|
with_message AS (
|
||||||
|
SELECT
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (project_id, user_id, entry_message)
|
||||||
|
project_id,
|
||||||
|
score,
|
||||||
|
entry_message,
|
||||||
|
user_id,
|
||||||
|
created_date
|
||||||
|
FROM with_message
|
||||||
|
ORDER BY project_id, user_id, entry_message, created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): representative row (latest by date).';
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
-- Migration: One row per (project_id, user_id, score) in project_score_sample_mv
|
||||||
|
--
|
||||||
|
-- For each score value (per project and user) exactly one record; representative entry_message (latest by date).
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): one record per score, representative entry_message (latest by date).';
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
-- Revert to one row per (project_id, score, user_id)
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): one record per score, representative entry_message (latest by date).';
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
-- Migration: One entry_message per (project_id, user_id) in project_score_sample_mv
|
||||||
|
--
|
||||||
|
-- One record per score (per project, user) and one record per entry_message per project.
|
||||||
|
-- DISTINCT ON (project_id, user_id, entry_message): same message with different scores → one row (latest by date).
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||||
|
WITH entry_scores AS (
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
n.entry_id,
|
||||||
|
n.user_id,
|
||||||
|
SUM(n.score) AS score,
|
||||||
|
MAX(n.created_date) AS created_date
|
||||||
|
FROM nodes n
|
||||||
|
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||||
|
),
|
||||||
|
with_message AS (
|
||||||
|
SELECT
|
||||||
|
es.project_id,
|
||||||
|
es.score,
|
||||||
|
e.text AS entry_message,
|
||||||
|
es.user_id,
|
||||||
|
es.created_date
|
||||||
|
FROM entry_scores es
|
||||||
|
JOIN entries e ON e.id = es.entry_id
|
||||||
|
)
|
||||||
|
SELECT DISTINCT ON (project_id, user_id, entry_message)
|
||||||
|
project_id,
|
||||||
|
score,
|
||||||
|
entry_message,
|
||||||
|
user_id,
|
||||||
|
created_date
|
||||||
|
FROM with_message
|
||||||
|
ORDER BY project_id, user_id, entry_message, created_date DESC
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||||
|
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): one record per score (chosen row), one entry_message per project; representative = latest by date.';
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
-- Migration: Restore max_score column and MV using max_score for normalized_total_score
|
||||||
|
|
||||||
|
ALTER TABLE weekly_goals ADD COLUMN max_score NUMERIC(10,4);
|
||||||
|
UPDATE weekly_goals SET max_score = max_goal_score WHERE max_score IS NULL;
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW weekly_report_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
agg.report_year,
|
||||||
|
agg.report_week,
|
||||||
|
COALESCE(agg.total_score, 0.0000) AS total_score,
|
||||||
|
CASE
|
||||||
|
WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000)
|
||||||
|
ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score)
|
||||||
|
END AS normalized_total_score
|
||||||
|
FROM
|
||||||
|
projects p
|
||||||
|
LEFT JOIN
|
||||||
|
(
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
EXTRACT(ISOYEAR FROM n.created_date)::INTEGER AS report_year,
|
||||||
|
EXTRACT(WEEK FROM n.created_date)::INTEGER AS report_week,
|
||||||
|
SUM(n.score) AS total_score
|
||||||
|
FROM
|
||||||
|
nodes n
|
||||||
|
WHERE
|
||||||
|
(EXTRACT(ISOYEAR FROM n.created_date)::INTEGER < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND EXTRACT(WEEK FROM n.created_date)::INTEGER < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
GROUP BY
|
||||||
|
1, 2, 3
|
||||||
|
) agg
|
||||||
|
ON p.id = agg.project_id
|
||||||
|
LEFT JOIN
|
||||||
|
weekly_goals wg
|
||||||
|
ON wg.project_id = p.id
|
||||||
|
AND wg.goal_year = agg.report_year
|
||||||
|
AND wg.goal_week = agg.report_week
|
||||||
|
WHERE
|
||||||
|
p.deleted = FALSE
|
||||||
|
ORDER BY
|
||||||
|
p.id, agg.report_year, agg.report_week
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_weekly_report_mv_project_year_week
|
||||||
|
ON weekly_report_mv(project_id, report_year, report_week);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_score snapshot. Contains only historical data (excludes current week). Uses nodes.created_date (denormalized) instead of entries.created_date.';
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
-- Migration: Remove max_score from weekly_goals, use max_goal_score for normalized_total_score
|
||||||
|
-- normalized_total_score is now computed from max_goal_score (current goal) instead of max_score (snapshot).
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW weekly_report_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
agg.report_year,
|
||||||
|
agg.report_week,
|
||||||
|
COALESCE(agg.total_score, 0.0000) AS total_score,
|
||||||
|
CASE
|
||||||
|
WHEN wg.max_goal_score IS NULL THEN COALESCE(agg.total_score, 0.0000)
|
||||||
|
ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_goal_score)
|
||||||
|
END AS normalized_total_score
|
||||||
|
FROM
|
||||||
|
projects p
|
||||||
|
LEFT JOIN
|
||||||
|
(
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
EXTRACT(ISOYEAR FROM n.created_date)::INTEGER AS report_year,
|
||||||
|
EXTRACT(WEEK FROM n.created_date)::INTEGER AS report_week,
|
||||||
|
SUM(n.score) AS total_score
|
||||||
|
FROM
|
||||||
|
nodes n
|
||||||
|
WHERE
|
||||||
|
(EXTRACT(ISOYEAR FROM n.created_date)::INTEGER < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND EXTRACT(WEEK FROM n.created_date)::INTEGER < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
GROUP BY
|
||||||
|
1, 2, 3
|
||||||
|
) agg
|
||||||
|
ON p.id = agg.project_id
|
||||||
|
LEFT JOIN
|
||||||
|
weekly_goals wg
|
||||||
|
ON wg.project_id = p.id
|
||||||
|
AND wg.goal_year = agg.report_year
|
||||||
|
AND wg.goal_week = agg.report_week
|
||||||
|
WHERE
|
||||||
|
p.deleted = FALSE
|
||||||
|
ORDER BY
|
||||||
|
p.id, agg.report_year, agg.report_week
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_weekly_report_mv_project_year_week
|
||||||
|
ON weekly_report_mv(project_id, report_year, report_week);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_goal_score. Contains only historical data (excludes current week). Uses nodes.created_date (denormalized) instead of entries.created_date.';
|
||||||
|
|
||||||
|
ALTER TABLE weekly_goals DROP COLUMN max_score;
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
-- Откат: удаляем новые колонки
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS steps_task_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS floors_task_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS steps_goal_task_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS steps_goal_subtask_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS floors_goal_task_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS floors_goal_subtask_id;
|
||||||
|
|
||||||
|
ALTER TABLE fitbit_daily_stats DROP COLUMN IF EXISTS goal_steps;
|
||||||
|
ALTER TABLE fitbit_daily_stats DROP COLUMN IF EXISTS goal_floors;
|
||||||
|
|
||||||
|
-- Восстанавливаем старые колонки
|
||||||
|
ALTER TABLE fitbit_daily_stats ADD COLUMN active_zone_minutes INTEGER DEFAULT 0;
|
||||||
|
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN goal_steps_min INTEGER DEFAULT 8000;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN goal_steps_max INTEGER DEFAULT 10000;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN goal_floors_min INTEGER DEFAULT 8;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN goal_floors_max INTEGER DEFAULT 10;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN goal_azm_min INTEGER DEFAULT 22;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN goal_azm_max INTEGER DEFAULT 44;
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
-- =============================================
|
||||||
|
-- Удаляем старые колонки целей (goals) из fitbit_integrations
|
||||||
|
-- Теперь цели берутся из Fitbit API
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS goal_steps_min;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS goal_steps_max;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS goal_floors_min;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS goal_floors_max;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS goal_azm_min;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS goal_azm_max;
|
||||||
|
|
||||||
|
-- =============================================
|
||||||
|
-- Удаляем AZM колонку из fitbit_daily_stats
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_daily_stats DROP COLUMN IF EXISTS active_zone_minutes;
|
||||||
|
|
||||||
|
-- =============================================
|
||||||
|
-- Добавляем колонки для кэширования целей из Fitbit API
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_daily_stats ADD COLUMN goal_steps INTEGER;
|
||||||
|
ALTER TABLE fitbit_daily_stats ADD COLUMN goal_floors INTEGER;
|
||||||
|
|
||||||
|
-- =============================================
|
||||||
|
-- Добавляем привязки к задачам для записи прогресса
|
||||||
|
-- steps_task_id - задача куда записывать шаги как progression_value
|
||||||
|
-- floors_task_id - задача куда записывать этажи как progression_value
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN steps_task_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN floors_task_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
|
||||||
|
-- =============================================
|
||||||
|
-- Добавляем привязки для целей (goals)
|
||||||
|
-- Для каждой цели храним И задачу И подзадачу
|
||||||
|
-- steps_goal_task_id - родительская задача для цели шагов
|
||||||
|
-- steps_goal_subtask_id - подзадача внутри неё, которая будет checked/unchecked
|
||||||
|
-- floors_goal_task_id - родительская задача для цели этажей
|
||||||
|
-- floors_goal_subtask_id - подзадача внутри неё
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN steps_goal_task_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN steps_goal_subtask_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN floors_goal_task_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN floors_goal_subtask_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
-- Restore max_score for rollback (snapshot of goal; can be repopulated from max_goal_score)
|
||||||
|
|
||||||
|
ALTER TABLE weekly_goals ADD COLUMN IF NOT EXISTS max_score NUMERIC(10,4);
|
||||||
|
UPDATE weekly_goals SET max_score = max_goal_score WHERE max_score IS NULL;
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
-- Migration: Drop weekly_goals.max_score if still present (e.g. prod where 000020 wasn't applied)
|
||||||
|
-- normalized_total_score in weekly_report_mv uses max_goal_score; max_score is unused.
|
||||||
|
|
||||||
|
ALTER TABLE weekly_goals DROP COLUMN IF EXISTS max_score;
|
||||||
@@ -0,0 +1,81 @@
|
|||||||
|
-- Migration: Rollback to MV using max_score and restore max_score column.
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
|
ALTER TABLE weekly_goals ADD COLUMN IF NOT EXISTS max_score NUMERIC(10,4);
|
||||||
|
UPDATE weekly_goals SET max_score = max_goal_score WHERE max_score IS NULL;
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW weekly_report_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
agg.report_year,
|
||||||
|
agg.report_week,
|
||||||
|
COALESCE(agg.total_score, 0.0000) AS total_score,
|
||||||
|
CASE
|
||||||
|
WHEN wg.max_score IS NULL THEN COALESCE(agg.total_score, 0.0000)
|
||||||
|
ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_score)
|
||||||
|
END AS normalized_total_score
|
||||||
|
FROM
|
||||||
|
projects p
|
||||||
|
LEFT JOIN
|
||||||
|
(
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
EXTRACT(ISOYEAR FROM n.created_date)::INTEGER AS report_year,
|
||||||
|
EXTRACT(WEEK FROM n.created_date)::INTEGER AS report_week,
|
||||||
|
SUM(n.score) AS total_score
|
||||||
|
FROM
|
||||||
|
nodes n
|
||||||
|
WHERE
|
||||||
|
(EXTRACT(ISOYEAR FROM n.created_date)::INTEGER < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND EXTRACT(WEEK FROM n.created_date)::INTEGER < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
GROUP BY
|
||||||
|
1, 2, 3
|
||||||
|
) agg
|
||||||
|
ON p.id = agg.project_id
|
||||||
|
LEFT JOIN
|
||||||
|
weekly_goals wg
|
||||||
|
ON wg.project_id = p.id
|
||||||
|
AND wg.goal_year = agg.report_year
|
||||||
|
AND wg.goal_week = agg.report_week
|
||||||
|
WHERE
|
||||||
|
p.deleted = FALSE
|
||||||
|
ORDER BY
|
||||||
|
p.id, agg.report_year, agg.report_week
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_weekly_report_mv_project_year_week
|
||||||
|
ON weekly_report_mv(project_id, report_year, report_week);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_score snapshot. Contains only historical data (excludes current week). Uses nodes.created_date (denormalized) instead of entries.created_date.';
|
||||||
|
|
||||||
|
-- Recreate projects_median_mv (last 4 weeks per 000008)
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 4 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 4 weeks of historical data. Includes user_id for multi-tenant support.';
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
-- Migration: Fix weekly_report_mv to use max_goal_score for normalized_total_score.
|
||||||
|
-- Safe to run on DBs where 000020 was not applied (MV still uses max_score, column exists but is NULL).
|
||||||
|
-- projects_median_mv depends on weekly_report_mv, so we drop and recreate it.
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv;
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW weekly_report_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
agg.report_year,
|
||||||
|
agg.report_week,
|
||||||
|
COALESCE(agg.total_score, 0.0000) AS total_score,
|
||||||
|
CASE
|
||||||
|
WHEN wg.max_goal_score IS NULL THEN COALESCE(agg.total_score, 0.0000)
|
||||||
|
ELSE LEAST(COALESCE(agg.total_score, 0.0000), wg.max_goal_score)
|
||||||
|
END AS normalized_total_score
|
||||||
|
FROM
|
||||||
|
projects p
|
||||||
|
LEFT JOIN
|
||||||
|
(
|
||||||
|
SELECT
|
||||||
|
n.project_id,
|
||||||
|
EXTRACT(ISOYEAR FROM n.created_date)::INTEGER AS report_year,
|
||||||
|
EXTRACT(WEEK FROM n.created_date)::INTEGER AS report_week,
|
||||||
|
SUM(n.score) AS total_score
|
||||||
|
FROM
|
||||||
|
nodes n
|
||||||
|
WHERE
|
||||||
|
(EXTRACT(ISOYEAR FROM n.created_date)::INTEGER < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (EXTRACT(ISOYEAR FROM n.created_date)::INTEGER = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND EXTRACT(WEEK FROM n.created_date)::INTEGER < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
GROUP BY
|
||||||
|
1, 2, 3
|
||||||
|
) agg
|
||||||
|
ON p.id = agg.project_id
|
||||||
|
LEFT JOIN
|
||||||
|
weekly_goals wg
|
||||||
|
ON wg.project_id = p.id
|
||||||
|
AND wg.goal_year = agg.report_year
|
||||||
|
AND wg.goal_week = agg.report_week
|
||||||
|
WHERE
|
||||||
|
p.deleted = FALSE
|
||||||
|
ORDER BY
|
||||||
|
p.id, agg.report_year, agg.report_week
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_weekly_report_mv_project_year_week
|
||||||
|
ON weekly_report_mv(project_id, report_year, report_week);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations at year boundaries. Includes all projects via LEFT JOIN. Adds normalized_total_score using weekly_goals.max_goal_score. Contains only historical data (excludes current week). Uses nodes.created_date (denormalized) instead of entries.created_date.';
|
||||||
|
|
||||||
|
-- Recreate projects_median_mv (depends on weekly_report_mv, last 4 weeks per 000008)
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 4 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 4 weeks of historical data. Includes user_id for multi-tenant support.';
|
||||||
|
|
||||||
|
ALTER TABLE weekly_goals DROP COLUMN IF EXISTS max_score;
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
-- Migration: Recreate projects_median_mv (rollback of 000024)
|
||||||
|
-- Definition: last 4 weeks per 000008/000023
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW projects_median_mv AS
|
||||||
|
SELECT
|
||||||
|
p.id AS project_id,
|
||||||
|
p.user_id,
|
||||||
|
PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY normalized_total_score) AS median_score
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
project_id,
|
||||||
|
normalized_total_score,
|
||||||
|
report_year,
|
||||||
|
report_week,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn
|
||||||
|
FROM weekly_report_mv
|
||||||
|
WHERE
|
||||||
|
(report_year < EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER)
|
||||||
|
OR (report_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER
|
||||||
|
AND report_week < EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER)
|
||||||
|
) sub
|
||||||
|
JOIN projects p ON p.id = sub.project_id
|
||||||
|
WHERE rn <= 4 AND p.deleted = FALSE
|
||||||
|
GROUP BY p.id, p.user_id
|
||||||
|
WITH DATA;
|
||||||
|
|
||||||
|
CREATE INDEX idx_projects_median_mv_project_id ON projects_median_mv(project_id);
|
||||||
|
CREATE INDEX idx_projects_median_mv_user_id ON projects_median_mv(user_id);
|
||||||
|
|
||||||
|
COMMENT ON MATERIALIZED VIEW projects_median_mv IS 'Materialized view calculating median score for each project based on last 4 weeks of historical data. Includes user_id for multi-tenant support.';
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
-- Migration: Drop projects_median_mv (unlock weeks now use weekly_goals.min_goal_score)
|
||||||
|
-- Date: 2026-02-24
|
||||||
|
|
||||||
|
DROP MATERIALIZED VIEW IF EXISTS projects_median_mv;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- Remove rejected column from wishlist_items
|
||||||
|
DROP INDEX IF EXISTS idx_wishlist_items_rejected;
|
||||||
|
ALTER TABLE wishlist_items DROP COLUMN IF EXISTS rejected;
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
-- Add rejected column to wishlist_items
|
||||||
|
ALTER TABLE wishlist_items ADD COLUMN rejected BOOLEAN DEFAULT FALSE;
|
||||||
|
|
||||||
|
-- Create index for filtering by rejected status
|
||||||
|
CREATE INDEX idx_wishlist_items_rejected ON wishlist_items(rejected) WHERE rejected = TRUE;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE IF EXISTS shopping_items;
|
||||||
|
DROP TABLE IF EXISTS shopping_board_members;
|
||||||
|
DROP TABLE IF EXISTS shopping_boards;
|
||||||
50
play-life-backend/migrations/000026_shopping_list.up.sql
Normal file
50
play-life-backend/migrations/000026_shopping_list.up.sql
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
-- Shopping boards (аналог wishlist_boards)
|
||||||
|
CREATE TABLE shopping_boards (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
invite_token VARCHAR(64) UNIQUE,
|
||||||
|
invite_enabled BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
deleted BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_shopping_boards_owner_id ON shopping_boards(owner_id);
|
||||||
|
CREATE INDEX idx_shopping_boards_invite_token ON shopping_boards(invite_token) WHERE invite_token IS NOT NULL;
|
||||||
|
CREATE INDEX idx_shopping_boards_owner_deleted ON shopping_boards(owner_id, deleted);
|
||||||
|
|
||||||
|
-- Shopping board members (аналог wishlist_board_members)
|
||||||
|
CREATE TABLE shopping_board_members (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
board_id INTEGER NOT NULL REFERENCES shopping_boards(id) ON DELETE CASCADE,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
joined_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT unique_shopping_board_member UNIQUE (board_id, user_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_shopping_board_members_board_id ON shopping_board_members(board_id);
|
||||||
|
CREATE INDEX idx_shopping_board_members_user_id ON shopping_board_members(user_id);
|
||||||
|
|
||||||
|
-- Shopping items (товары)
|
||||||
|
CREATE TABLE shopping_items (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
board_id INTEGER NOT NULL REFERENCES shopping_boards(id) ON DELETE CASCADE,
|
||||||
|
author_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
group_name VARCHAR(255),
|
||||||
|
volume_base NUMERIC(10,4) NOT NULL DEFAULT 1,
|
||||||
|
repetition_period INTERVAL,
|
||||||
|
next_show_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
completed INTEGER DEFAULT 0,
|
||||||
|
last_completed_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
deleted BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_shopping_items_board_id ON shopping_items(board_id);
|
||||||
|
CREATE INDEX idx_shopping_items_user_id ON shopping_items(user_id);
|
||||||
|
CREATE INDEX idx_shopping_items_deleted ON shopping_items(deleted);
|
||||||
|
CREATE INDEX idx_shopping_items_next_show_at ON shopping_items(next_show_at);
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE shopping_items DROP COLUMN description;
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE shopping_items ADD COLUMN description TEXT;
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE IF EXISTS shopping_item_history;
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE shopping_item_history (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
item_id INTEGER NOT NULL REFERENCES shopping_items(id) ON DELETE CASCADE,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
volume NUMERIC(10,4) NOT NULL DEFAULT 1,
|
||||||
|
completed_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_shopping_item_history_item_id ON shopping_item_history(item_id);
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE tasks DROP COLUMN IF EXISTS purchase_config_id;
|
||||||
|
DROP TABLE IF EXISTS purchase_config_boards;
|
||||||
|
DROP TABLE IF EXISTS purchase_configs;
|
||||||
24
play-life-backend/migrations/000029_purchase_tasks.up.sql
Normal file
24
play-life-backend/migrations/000029_purchase_tasks.up.sql
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
-- Purchase task configurations
|
||||||
|
CREATE TABLE purchase_configs (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_purchase_configs_user_id ON purchase_configs(user_id);
|
||||||
|
|
||||||
|
-- Purchase config board/group associations
|
||||||
|
CREATE TABLE purchase_config_boards (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
purchase_config_id INTEGER NOT NULL REFERENCES purchase_configs(id) ON DELETE CASCADE,
|
||||||
|
board_id INTEGER NOT NULL REFERENCES shopping_boards(id) ON DELETE CASCADE,
|
||||||
|
group_name VARCHAR(255),
|
||||||
|
UNIQUE (purchase_config_id, board_id, group_name)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_purchase_config_boards_config_id ON purchase_config_boards(purchase_config_id);
|
||||||
|
CREATE INDEX idx_purchase_config_boards_board_id ON purchase_config_boards(board_id);
|
||||||
|
|
||||||
|
-- Add purchase_config_id to tasks
|
||||||
|
ALTER TABLE tasks ADD COLUMN purchase_config_id INTEGER REFERENCES purchase_configs(id) ON DELETE SET NULL;
|
||||||
|
CREATE INDEX idx_tasks_purchase_config_id ON tasks(purchase_config_id);
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
DROP COLUMN IF EXISTS priorities_confirmed_year,
|
||||||
|
DROP COLUMN IF EXISTS priorities_confirmed_week;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN priorities_confirmed_year INTEGER NOT NULL DEFAULT 0,
|
||||||
|
ADD COLUMN priorities_confirmed_week INTEGER NOT NULL DEFAULT 0;
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE IF EXISTS shopping_volume_records;
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
-- Отдельная таблица записей об остатках (создаётся при каждом выполнении и переносе)
|
||||||
|
CREATE TABLE shopping_volume_records (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
item_id INTEGER NOT NULL REFERENCES shopping_items(id) ON DELETE CASCADE,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||||
|
action_type VARCHAR(20) NOT NULL,
|
||||||
|
volume_remaining NUMERIC(10,4),
|
||||||
|
volume_purchased NUMERIC(10,4),
|
||||||
|
daily_consumption NUMERIC(10,4),
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_shopping_volume_records_item_id ON shopping_volume_records(item_id);
|
||||||
|
|
||||||
|
-- Создаём начальные записи для всех существующих товаров (остаток 0, дата = created_at)
|
||||||
|
INSERT INTO shopping_volume_records (item_id, user_id, action_type, volume_remaining, volume_purchased, created_at)
|
||||||
|
SELECT id, user_id, 'create', 0, 0, created_at
|
||||||
|
FROM shopping_items
|
||||||
|
WHERE deleted = FALSE;
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE shopping_volume_records DROP COLUMN IF EXISTS next_show_at;
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE shopping_volume_records ADD COLUMN next_show_at TIMESTAMP;
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE IF EXISTS board_archives;
|
||||||
10
play-life-backend/migrations/000033_board_archives.up.sql
Normal file
10
play-life-backend/migrations/000033_board_archives.up.sql
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE board_archives (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||||
|
board_type VARCHAR(20) NOT NULL, -- 'wishlist' or 'shopping'
|
||||||
|
board_id INTEGER NOT NULL,
|
||||||
|
archived_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
||||||
|
UNIQUE(user_id, board_type, board_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_board_archives_user_type ON board_archives(user_id, board_type);
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE tasks DROP COLUMN default_auto_complete;
|
||||||
|
ALTER TABLE tasks DROP COLUMN default_progress;
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
ALTER TABLE tasks ADD COLUMN default_auto_complete BOOLEAN DEFAULT FALSE;
|
||||||
|
ALTER TABLE tasks ADD COLUMN default_progress NUMERIC(10,4);
|
||||||
|
-- Для существующих задач: default_progress = progression_base
|
||||||
|
UPDATE tasks SET default_progress = progression_base WHERE progression_base IS NOT NULL;
|
||||||
8
play-life-backend/migrations/000035_chores.down.sql
Normal file
8
play-life-backend/migrations/000035_chores.down.sql
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
DROP INDEX IF EXISTS idx_tasks_chore_id_user_id_unique;
|
||||||
|
DROP INDEX IF EXISTS idx_tasks_chore_id;
|
||||||
|
ALTER TABLE tasks DROP COLUMN IF EXISTS chore_id;
|
||||||
|
DROP TABLE IF EXISTS chore_item_history;
|
||||||
|
DROP TABLE IF EXISTS chore_items;
|
||||||
|
DROP TABLE IF EXISTS chore_board_members;
|
||||||
|
DROP TABLE IF EXISTS chore_boards;
|
||||||
|
DELETE FROM board_archives WHERE board_type = 'chores';
|
||||||
69
play-life-backend/migrations/000035_chores.up.sql
Normal file
69
play-life-backend/migrations/000035_chores.up.sql
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
-- Chore boards (аналог shopping_boards)
|
||||||
|
CREATE TABLE chore_boards (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
invite_token VARCHAR(64) UNIQUE,
|
||||||
|
invite_enabled BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
deleted BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_chore_boards_owner_id ON chore_boards(owner_id);
|
||||||
|
CREATE INDEX idx_chore_boards_invite_token ON chore_boards(invite_token) WHERE invite_token IS NOT NULL;
|
||||||
|
CREATE INDEX idx_chore_boards_owner_deleted ON chore_boards(owner_id, deleted);
|
||||||
|
|
||||||
|
-- Chore board members (аналог shopping_board_members)
|
||||||
|
CREATE TABLE chore_board_members (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
board_id INTEGER NOT NULL REFERENCES chore_boards(id) ON DELETE CASCADE,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
joined_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT unique_chore_board_member UNIQUE (board_id, user_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_chore_board_members_board_id ON chore_board_members(board_id);
|
||||||
|
CREATE INDEX idx_chore_board_members_user_id ON chore_board_members(user_id);
|
||||||
|
|
||||||
|
-- Chore items (дела)
|
||||||
|
CREATE TABLE chore_items (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
board_id INTEGER NOT NULL REFERENCES chore_boards(id) ON DELETE CASCADE,
|
||||||
|
author_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
group_name VARCHAR(255),
|
||||||
|
repetition_period INTERVAL,
|
||||||
|
repetition_date TEXT,
|
||||||
|
next_show_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
completed INTEGER DEFAULT 0,
|
||||||
|
last_completed_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
deleted BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_chore_items_board_id ON chore_items(board_id);
|
||||||
|
CREATE INDEX idx_chore_items_user_id ON chore_items(user_id);
|
||||||
|
CREATE INDEX idx_chore_items_deleted ON chore_items(deleted);
|
||||||
|
CREATE INDEX idx_chore_items_next_show_at ON chore_items(next_show_at);
|
||||||
|
|
||||||
|
-- Chore item history (история выполнений)
|
||||||
|
CREATE TABLE chore_item_history (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
item_id INTEGER NOT NULL REFERENCES chore_items(id) ON DELETE CASCADE,
|
||||||
|
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
completed_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_chore_item_history_item_id ON chore_item_history(item_id);
|
||||||
|
|
||||||
|
-- Add chore_id to tasks table
|
||||||
|
ALTER TABLE tasks ADD COLUMN chore_id INTEGER REFERENCES chore_items(id) ON DELETE SET NULL;
|
||||||
|
CREATE INDEX idx_tasks_chore_id ON tasks(chore_id);
|
||||||
|
CREATE UNIQUE INDEX idx_tasks_chore_id_user_id_unique
|
||||||
|
ON tasks(chore_id, user_id)
|
||||||
|
WHERE chore_id IS NOT NULL AND deleted = FALSE;
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE fitbit_daily_stats DROP COLUMN IF EXISTS cardio_load;
|
||||||
|
ALTER TABLE fitbit_daily_stats DROP COLUMN IF EXISTS goal_cardio_load;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS cardio_load_task_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS cardio_load_goal_task_id;
|
||||||
|
ALTER TABLE fitbit_integrations DROP COLUMN IF EXISTS cardio_load_goal_subtask_id;
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
-- =============================================
|
||||||
|
-- Добавляем cardio_load в fitbit_daily_stats
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_daily_stats ADD COLUMN cardio_load INTEGER;
|
||||||
|
ALTER TABLE fitbit_daily_stats ADD COLUMN goal_cardio_load INTEGER;
|
||||||
|
|
||||||
|
-- =============================================
|
||||||
|
-- Добавляем привязки задач для cardio_load в fitbit_integrations
|
||||||
|
-- cardio_load_task_id - задача куда записывать кардионагрузку как progression_value
|
||||||
|
-- cardio_load_goal_task_id - родительская задача для цели кардионагрузки
|
||||||
|
-- cardio_load_goal_subtask_id - подзадача внутри неё, которая будет checked/unchecked
|
||||||
|
-- =============================================
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN cardio_load_task_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN cardio_load_goal_task_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE fitbit_integrations ADD COLUMN cardio_load_goal_subtask_id INTEGER REFERENCES tasks(id) ON DELETE SET NULL;
|
||||||
@@ -45,7 +45,6 @@ docker-compose exec db psql -U playeng -d playeng -f /migrations/001_create_sche
|
|||||||
- `goal_week` (INTEGER NOT NULL)
|
- `goal_week` (INTEGER NOT NULL)
|
||||||
- `min_goal_score` (NUMERIC(10,4) NOT NULL, DEFAULT 0)
|
- `min_goal_score` (NUMERIC(10,4) NOT NULL, DEFAULT 0)
|
||||||
- `max_goal_score` (NUMERIC(10,4))
|
- `max_goal_score` (NUMERIC(10,4))
|
||||||
- `max_score` (NUMERIC(10,4), NULL) — snapshot max на неделю (заполняется только для новых недель)
|
|
||||||
- `priority` (SMALLINT)
|
- `priority` (SMALLINT)
|
||||||
- UNIQUE CONSTRAINT: `(project_id, goal_year, goal_week)`
|
- UNIQUE CONSTRAINT: `(project_id, goal_year, goal_week)`
|
||||||
|
|
||||||
@@ -56,7 +55,7 @@ docker-compose exec db psql -U playeng -d playeng -f /migrations/001_create_sche
|
|||||||
- `report_year` (INTEGER)
|
- `report_year` (INTEGER)
|
||||||
- `report_week` (INTEGER)
|
- `report_week` (INTEGER)
|
||||||
- `total_score` (NUMERIC)
|
- `total_score` (NUMERIC)
|
||||||
- `normalized_total_score` (NUMERIC)
|
- `normalized_total_score` (NUMERIC) — ограничение total_score по `max_goal_score` (миграция 000020 удалила колонку `max_score`, normalized считается по `max_goal_score`)
|
||||||
|
|
||||||
## Миграции
|
## Миграции
|
||||||
|
|
||||||
|
|||||||
BIN
play-life-backend/play-eng-backend
Executable file
BIN
play-life-backend/play-eng-backend
Executable file
Binary file not shown.
12
play-life-llm/.gitignore
vendored
Normal file
12
play-life-llm/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Env with secrets (Tavily API key, etc.)
|
||||||
|
.env
|
||||||
|
|
||||||
|
# Binary
|
||||||
|
play-life-llm
|
||||||
|
*.exe
|
||||||
|
|
||||||
|
# IDE / OS
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
.DS_Store
|
||||||
19
play-life-llm/Dockerfile
Normal file
19
play-life-llm/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Build stage
|
||||||
|
FROM golang:1.24-alpine AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
ENV GOPROXY=https://proxy.golang.org,direct
|
||||||
|
ENV GOSUMDB=sum.golang.org
|
||||||
|
COPY go.mod go.sum ./
|
||||||
|
RUN go mod download
|
||||||
|
COPY . .
|
||||||
|
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o play-life-llm .
|
||||||
|
|
||||||
|
# Runtime stage
|
||||||
|
FROM alpine:latest
|
||||||
|
RUN apk --no-cache add ca-certificates wget
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --from=builder /app/play-life-llm .
|
||||||
|
EXPOSE 8090
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||||
|
CMD wget -q -O- http://localhost:8090/health || exit 1
|
||||||
|
CMD ["./play-life-llm"]
|
||||||
12
play-life-llm/env.example
Normal file
12
play-life-llm/env.example
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Ollama API base URL (default: http://localhost:11434)
|
||||||
|
# For Docker on Mac/Windows use: http://host.docker.internal:11434
|
||||||
|
OLLAMA_HOST=http://localhost:11434
|
||||||
|
|
||||||
|
# Tavily API key for web search (required when model uses web_search tool)
|
||||||
|
TAVILY_API_KEY=
|
||||||
|
|
||||||
|
# HTTP server port (default: 8090)
|
||||||
|
PORT=8090
|
||||||
|
|
||||||
|
# Default Ollama model (default: llama3.1:70b)
|
||||||
|
OLLAMA_MODEL=llama3.1:70b
|
||||||
5
play-life-llm/go.mod
Normal file
5
play-life-llm/go.mod
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
module play-life-llm
|
||||||
|
|
||||||
|
go 1.24.0
|
||||||
|
|
||||||
|
require github.com/gorilla/mux v1.8.1
|
||||||
2
play-life-llm/go.sum
Normal file
2
play-life-llm/go.sum
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||||
|
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||||
177
play-life-llm/internal/handler/ask.go
Normal file
177
play-life-llm/internal/handler/ask.go
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"play-life-llm/internal/ollama"
|
||||||
|
"play-life-llm/internal/tavily"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AskRequest is the POST /ask body.
|
||||||
|
type AskRequest struct {
|
||||||
|
Prompt string `json:"prompt"`
|
||||||
|
ResponseSchema interface{} `json:"response_schema"`
|
||||||
|
Model string `json:"model,omitempty"`
|
||||||
|
// AllowWebSearch: если true, в запрос к Ollama добавляются tools (web_search), и при вызове модели выполняется поиск через Tavily. Если false (по умолчанию), tools не передаются — модель просто возвращает JSON по схеме (подходит для простых запросов без интернета).
|
||||||
|
AllowWebSearch bool `json:"allow_web_search,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AskResponse is the successful response (result is JSON by schema).
|
||||||
|
type AskResponse struct {
|
||||||
|
Result json.RawMessage `json:"result"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AskHandler handles POST /ask: prompt + response_schema -> LLM with optional web search, returns JSON.
|
||||||
|
type AskHandler struct {
|
||||||
|
Ollama *ollama.Client
|
||||||
|
Tavily *tavily.Client
|
||||||
|
DefaultModel string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServeHTTP implements http.Handler.
|
||||||
|
func (h *AskHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req AskRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
sendError(w, "invalid JSON body", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.Prompt == "" {
|
||||||
|
sendError(w, "prompt is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.ResponseSchema == nil {
|
||||||
|
sendError(w, "response_schema is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
model := req.Model
|
||||||
|
if model == "" {
|
||||||
|
model = h.DefaultModel
|
||||||
|
}
|
||||||
|
if model == "" {
|
||||||
|
model = "llama3.1:70b"
|
||||||
|
}
|
||||||
|
|
||||||
|
var tools []ollama.Tool
|
||||||
|
if req.AllowWebSearch {
|
||||||
|
tools = []ollama.Tool{ollama.WebSearchTool()}
|
||||||
|
}
|
||||||
|
messages := []ollama.ChatMessage{}
|
||||||
|
if req.AllowWebSearch {
|
||||||
|
messages = append(messages, ollama.ChatMessage{
|
||||||
|
Role: "system",
|
||||||
|
Content: "When the user asks for current, recent, or real-time information (weather, prices, news, etc.), you MUST call the web_search tool with a suitable query. Do not answer from memory — use the tool and then summarize the results in your response.",
|
||||||
|
})
|
||||||
|
// Гарантированный запрос в Tavily: предпоиск по промпту пользователя, результат подмешивается в контекст.
|
||||||
|
searchQuery := req.Prompt
|
||||||
|
if len(searchQuery) > 200 {
|
||||||
|
searchQuery = searchQuery[:200]
|
||||||
|
}
|
||||||
|
log.Printf("tavily pre-search: query=%q", searchQuery)
|
||||||
|
preSearchResult, err := h.Tavily.Search(searchQuery)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("tavily pre-search error: %v", err)
|
||||||
|
preSearchResult = "search failed: " + err.Error()
|
||||||
|
} else {
|
||||||
|
log.Printf("tavily pre-search ok: %d bytes", len(preSearchResult))
|
||||||
|
}
|
||||||
|
messages = append(messages, ollama.ChatMessage{
|
||||||
|
Role: "system",
|
||||||
|
Content: "Relevant web search result for the user's question (use this to answer; if not enough, you may call web_search again):\n\n" + preSearchResult,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
messages = append(messages, ollama.ChatMessage{
|
||||||
|
Role: "user", Content: req.Prompt,
|
||||||
|
})
|
||||||
|
|
||||||
|
const maxToolRounds = 20
|
||||||
|
for round := 0; round < maxToolRounds; round++ {
|
||||||
|
chatReq := &ollama.ChatRequest{
|
||||||
|
Model: model,
|
||||||
|
Messages: messages,
|
||||||
|
Stream: false,
|
||||||
|
Format: req.ResponseSchema,
|
||||||
|
Tools: tools,
|
||||||
|
}
|
||||||
|
resp, err := h.Ollama.Chat(chatReq)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("ollama chat error: %v", err)
|
||||||
|
sendError(w, "ollama request failed: "+err.Error(), http.StatusBadGateway)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
messages = append(messages, resp.Message)
|
||||||
|
|
||||||
|
if n := len(resp.Message.ToolCalls); n > 0 {
|
||||||
|
log.Printf("ollama returned %d tool_calls", n)
|
||||||
|
}
|
||||||
|
if len(resp.Message.ToolCalls) == 0 {
|
||||||
|
// Final answer: message.content is JSON by schema
|
||||||
|
content := resp.Message.Content
|
||||||
|
if content == "" {
|
||||||
|
sendError(w, "empty response from model", http.StatusBadGateway)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Return as { "result": <parsed JSON> } so client gets valid JSON
|
||||||
|
var raw json.RawMessage
|
||||||
|
if err := json.Unmarshal([]byte(content), &raw); err != nil {
|
||||||
|
// If not valid JSON, return as string inside result
|
||||||
|
raw = json.RawMessage(`"` + escapeJSONString(content) + `"`)
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
_ = json.NewEncoder(w).Encode(AskResponse{Result: raw})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute tool calls (web_search via Tavily)
|
||||||
|
for _, tc := range resp.Message.ToolCalls {
|
||||||
|
if tc.Function.Name != "web_search" {
|
||||||
|
messages = append(messages, ollama.ChatMessage{
|
||||||
|
Role: "tool", ToolName: tc.Function.Name, Content: "unknown tool",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
query := ollama.QueryFromToolCall(tc)
|
||||||
|
if query == "" {
|
||||||
|
// Некоторые модели подставляют в arguments не "query", а другие поля — используем промпт пользователя как поисковый запрос
|
||||||
|
query = req.Prompt
|
||||||
|
if len(query) > 200 {
|
||||||
|
query = query[:200]
|
||||||
|
}
|
||||||
|
log.Printf("web_search: query empty in tool_call, using user prompt (first 200 chars)")
|
||||||
|
}
|
||||||
|
log.Printf("tavily search: query=%q", query)
|
||||||
|
searchResult, err := h.Tavily.Search(query)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("tavily search error: %v", err)
|
||||||
|
searchResult = "search failed: " + err.Error()
|
||||||
|
} else {
|
||||||
|
log.Printf("tavily search ok: %d bytes", len(searchResult))
|
||||||
|
}
|
||||||
|
messages = append(messages, ollama.ChatMessage{
|
||||||
|
Role: "tool", ToolName: "web_search", Content: searchResult,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Too many tool rounds
|
||||||
|
sendError(w, "too many tool-call rounds", http.StatusBadGateway)
|
||||||
|
}
|
||||||
|
|
||||||
|
func sendError(w http.ResponseWriter, msg string, code int) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(code)
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]string{"error": msg})
|
||||||
|
}
|
||||||
|
|
||||||
|
func escapeJSONString(s string) string {
|
||||||
|
b, _ := json.Marshal(s)
|
||||||
|
return string(b[1 : len(b)-1])
|
||||||
|
}
|
||||||
17
play-life-llm/internal/handler/health.go
Normal file
17
play-life-llm/internal/handler/health.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Health returns 200 with {"status": "ok"} for Docker healthcheck.
|
||||||
|
func Health(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodGet {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
_ = json.NewEncoder(w).Encode(map[string]string{"status": "ok"})
|
||||||
|
}
|
||||||
148
play-life-llm/internal/ollama/client.go
Normal file
148
play-life-llm/internal/ollama/client.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package ollama
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const defaultTimeout = 10 * time.Minute
|
||||||
|
|
||||||
|
// Client calls Ollama /api/chat.
|
||||||
|
type Client struct {
|
||||||
|
BaseURL string
|
||||||
|
HTTPClient *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClient creates an Ollama client. baseURL is e.g. "http://localhost:11434".
|
||||||
|
func NewClient(baseURL string) *Client {
|
||||||
|
return &Client{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
HTTPClient: &http.Client{
|
||||||
|
Timeout: defaultTimeout,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatRequest matches Ollama POST /api/chat body.
|
||||||
|
type ChatRequest struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Messages []ChatMessage `json:"messages"`
|
||||||
|
Stream bool `json:"stream"`
|
||||||
|
Format interface{} `json:"format,omitempty"` // "json" or JSON schema object
|
||||||
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatMessage is one message in the conversation.
|
||||||
|
type ChatMessage struct {
|
||||||
|
Role string `json:"role"` // "user", "assistant", "system", "tool"
|
||||||
|
Content string `json:"content,omitempty"`
|
||||||
|
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||||
|
ToolName string `json:"tool_name,omitempty"` // for role "tool"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tool defines a function the model may call.
|
||||||
|
type Tool struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Function ToolFunc `json:"function"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToolFunc describes the function.
|
||||||
|
type ToolFunc struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Parameters interface{} `json:"parameters"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToolCall is a model request to call a tool.
|
||||||
|
type ToolCall struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Function ToolCallFn `json:"function"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToolCallFn holds name and arguments.
|
||||||
|
// Arguments may come from Ollama as a JSON object or as a JSON string.
|
||||||
|
type ToolCallFn struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Arguments interface{} `json:"arguments"` // object or string
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryFromToolCall returns the "query" argument from a web_search tool call.
|
||||||
|
// Ollama may send arguments as a map or as a JSON string.
|
||||||
|
func QueryFromToolCall(tc ToolCall) string {
|
||||||
|
switch v := tc.Function.Arguments.(type) {
|
||||||
|
case map[string]interface{}:
|
||||||
|
if q, _ := v["query"].(string); q != "" {
|
||||||
|
return q
|
||||||
|
}
|
||||||
|
case string:
|
||||||
|
var m map[string]interface{}
|
||||||
|
if json.Unmarshal([]byte(v), &m) == nil {
|
||||||
|
if q, _ := m["query"].(string); q != "" {
|
||||||
|
return q
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatResponse is the Ollama /api/chat response.
|
||||||
|
type ChatResponse struct {
|
||||||
|
Message ChatMessage `json:"message"`
|
||||||
|
Done bool `json:"done"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chat sends a chat request and returns the response.
|
||||||
|
func (c *Client) Chat(req *ChatRequest) (*ChatResponse, error) {
|
||||||
|
body, err := json.Marshal(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("marshal request: %w", err)
|
||||||
|
}
|
||||||
|
url := c.BaseURL + "/api/chat"
|
||||||
|
httpReq, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("new request: %w", err)
|
||||||
|
}
|
||||||
|
httpReq.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(httpReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("do request: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
b, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, fmt.Errorf("ollama returned %d: %s", resp.StatusCode, string(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
var out ChatResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
|
||||||
|
return nil, fmt.Errorf("decode response: %w", err)
|
||||||
|
}
|
||||||
|
return &out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WebSearchTool returns the tool definition for web_search (Tavily).
|
||||||
|
func WebSearchTool() Tool {
|
||||||
|
return Tool{
|
||||||
|
Type: "function",
|
||||||
|
Function: ToolFunc{
|
||||||
|
Name: "web_search",
|
||||||
|
Description: "Search the web for current information. Use when you need up-to-date or factual information from the internet.",
|
||||||
|
Parameters: map[string]interface{}{
|
||||||
|
"type": "object",
|
||||||
|
"properties": map[string]interface{}{
|
||||||
|
"query": map[string]interface{}{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Search query",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": []string{"query"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
35
play-life-llm/internal/server/server.go
Normal file
35
play-life-llm/internal/server/server.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"play-life-llm/internal/handler"
|
||||||
|
"play-life-llm/internal/ollama"
|
||||||
|
"play-life-llm/internal/tavily"
|
||||||
|
|
||||||
|
"github.com/gorilla/mux"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config holds server and client configuration.
|
||||||
|
type Config struct {
|
||||||
|
OllamaHost string
|
||||||
|
TavilyAPIKey string
|
||||||
|
DefaultModel string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRouter returns an HTTP router with /health and /ask registered.
|
||||||
|
func NewRouter(cfg Config) http.Handler {
|
||||||
|
ollamaClient := ollama.NewClient(cfg.OllamaHost)
|
||||||
|
tavilyClient := tavily.NewClient(cfg.TavilyAPIKey)
|
||||||
|
|
||||||
|
askHandler := &handler.AskHandler{
|
||||||
|
Ollama: ollamaClient,
|
||||||
|
Tavily: tavilyClient,
|
||||||
|
DefaultModel: cfg.DefaultModel,
|
||||||
|
}
|
||||||
|
|
||||||
|
r := mux.NewRouter()
|
||||||
|
r.HandleFunc("/health", handler.Health).Methods(http.MethodGet)
|
||||||
|
r.Handle("/ask", askHandler).Methods(http.MethodPost)
|
||||||
|
return r
|
||||||
|
}
|
||||||
104
play-life-llm/internal/tavily/client.go
Normal file
104
play-life-llm/internal/tavily/client.go
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
package tavily
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
baseURL = "https://api.tavily.com"
|
||||||
|
searchPath = "/search"
|
||||||
|
timeout = 30 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
// Client calls Tavily Search API.
|
||||||
|
type Client struct {
|
||||||
|
APIKey string
|
||||||
|
HTTPClient *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClient creates a Tavily client. apiKey is required for search.
|
||||||
|
func NewClient(apiKey string) *Client {
|
||||||
|
return &Client{
|
||||||
|
APIKey: apiKey,
|
||||||
|
HTTPClient: &http.Client{
|
||||||
|
Timeout: timeout,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchRequest is the POST body for /search.
|
||||||
|
type SearchRequest struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
SearchDepth string `json:"search_depth,omitempty"` // basic, advanced, etc.
|
||||||
|
MaxResults int `json:"max_results,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchResult is one result item.
|
||||||
|
type SearchResult struct {
|
||||||
|
Title string `json:"title"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchResponse is the Tavily search response.
|
||||||
|
type SearchResponse struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
Answer string `json:"answer,omitempty"`
|
||||||
|
Results []SearchResult `json:"results"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search runs a web search and returns a single text suitable for passing to Ollama as tool result.
|
||||||
|
func (c *Client) Search(query string) (string, error) {
|
||||||
|
if c.APIKey == "" {
|
||||||
|
return "", fmt.Errorf("tavily: API key not set")
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(SearchRequest{
|
||||||
|
Query: query,
|
||||||
|
MaxResults: 5,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("marshal request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
url := baseURL + searchPath
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("new request: %w", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("do request: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return "", fmt.Errorf("tavily returned %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
var out SearchResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
|
||||||
|
return "", fmt.Errorf("decode response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a single text for the model: prefer answer if present, else concatenate results.
|
||||||
|
if out.Answer != "" {
|
||||||
|
return out.Answer, nil
|
||||||
|
}
|
||||||
|
var b bytes.Buffer
|
||||||
|
for i, r := range out.Results {
|
||||||
|
if i > 0 {
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
}
|
||||||
|
b.WriteString(r.Title)
|
||||||
|
b.WriteString(": ")
|
||||||
|
b.WriteString(r.Content)
|
||||||
|
}
|
||||||
|
return b.String(), nil
|
||||||
|
}
|
||||||
36
play-life-llm/main.go
Normal file
36
play-life-llm/main.go
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"play-life-llm/internal/server"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
ollamaHost := getEnv("OLLAMA_HOST", "http://localhost:11434")
|
||||||
|
tavilyAPIKey := getEnv("TAVILY_API_KEY", "")
|
||||||
|
port := getEnv("PORT", "8090")
|
||||||
|
defaultModel := getEnv("OLLAMA_MODEL", "llama3.1:70b")
|
||||||
|
|
||||||
|
cfg := server.Config{
|
||||||
|
OllamaHost: ollamaHost,
|
||||||
|
TavilyAPIKey: tavilyAPIKey,
|
||||||
|
DefaultModel: defaultModel,
|
||||||
|
}
|
||||||
|
router := server.NewRouter(cfg)
|
||||||
|
|
||||||
|
addr := ":" + port
|
||||||
|
log.Printf("play-life-llm listening on %s", addr)
|
||||||
|
if err := http.ListenAndServe(addr, router); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEnv(key, defaultVal string) string {
|
||||||
|
if v := os.Getenv(key); v != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return defaultVal
|
||||||
|
}
|
||||||
@@ -12,6 +12,7 @@
|
|||||||
<meta name="apple-mobile-web-app-capable" content="yes" />
|
<meta name="apple-mobile-web-app-capable" content="yes" />
|
||||||
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent" />
|
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent" />
|
||||||
<meta name="apple-mobile-web-app-title" content="PlayLife" />
|
<meta name="apple-mobile-web-app-title" content="PlayLife" />
|
||||||
|
<meta name="screen-orientation" content="portrait" />
|
||||||
<meta name="description" content="Трекер продуктивности и изучения слов" />
|
<meta name="description" content="Трекер продуктивности и изучения слов" />
|
||||||
|
|
||||||
<title>PlayLife</title>
|
<title>PlayLife</title>
|
||||||
|
|||||||
@@ -23,8 +23,34 @@ server {
|
|||||||
proxy_cache_bypass $http_upgrade;
|
proxy_cache_bypass $http_upgrade;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Proxy admin panel to backend (must be before location /)
|
||||||
|
location ^~ /admin {
|
||||||
|
proxy_pass http://backend:8080;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Proxy project endpoints to backend (must be before location /)
|
||||||
|
location ^~ /project/ {
|
||||||
|
proxy_pass http://backend:8080;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
# Proxy other API endpoints to backend
|
# Proxy other API endpoints to backend
|
||||||
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|project/priority|project/move|project/delete|project/create|message/post|webhook/|weekly_goals/setup|admin|admin\.html)$ {
|
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|webhook/|weekly_goals/setup|project_score_sample_mv/refresh|priorities/confirm)$ {
|
||||||
proxy_pass http://backend:8080;
|
proxy_pass http://backend:8080;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
@@ -60,6 +86,17 @@ server {
|
|||||||
add_header Cache-Control "public, immutable";
|
add_header Cache-Control "public, immutable";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Статические HTML страницы (Terms и Privacy)
|
||||||
|
location = /terms {
|
||||||
|
try_files /terms.html =404;
|
||||||
|
add_header Cache-Control "public, max-age=3600";
|
||||||
|
}
|
||||||
|
|
||||||
|
location = /privacy {
|
||||||
|
try_files /privacy.html =404;
|
||||||
|
add_header Cache-Control "public, max-age=3600";
|
||||||
|
}
|
||||||
|
|
||||||
# Handle React Router (SPA)
|
# Handle React Router (SPA)
|
||||||
location / {
|
location / {
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
|
|||||||
48
play-life-web/package-lock.json
generated
48
play-life-web/package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "play-life-web",
|
"name": "play-life-web",
|
||||||
"version": "3.28.1",
|
"version": "4.17.1",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "play-life-web",
|
"name": "play-life-web",
|
||||||
"version": "3.28.1",
|
"version": "4.17.1",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@dnd-kit/core": "^6.3.1",
|
"@dnd-kit/core": "^6.3.1",
|
||||||
"@dnd-kit/sortable": "^10.0.0",
|
"@dnd-kit/sortable": "^10.0.0",
|
||||||
@@ -15,6 +15,7 @@
|
|||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-chartjs-2": "^5.2.0",
|
"react-chartjs-2": "^5.2.0",
|
||||||
"react-circular-progressbar": "^2.2.0",
|
"react-circular-progressbar": "^2.2.0",
|
||||||
|
"react-day-picker": "^9.13.0",
|
||||||
"react-dom": "^18.2.0",
|
"react-dom": "^18.2.0",
|
||||||
"react-easy-crop": "^5.5.6"
|
"react-easy-crop": "^5.5.6"
|
||||||
},
|
},
|
||||||
@@ -1618,6 +1619,12 @@
|
|||||||
"node": ">=6.9.0"
|
"node": ">=6.9.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@date-fns/tz": {
|
||||||
|
"version": "1.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@date-fns/tz/-/tz-1.4.1.tgz",
|
||||||
|
"integrity": "sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@dnd-kit/accessibility": {
|
"node_modules/@dnd-kit/accessibility": {
|
||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz",
|
||||||
@@ -3871,6 +3878,22 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/date-fns": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/kossnocorp"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/date-fns-jalali": {
|
||||||
|
"version": "4.1.0-0",
|
||||||
|
"resolved": "https://registry.npmjs.org/date-fns-jalali/-/date-fns-jalali-4.1.0-0.tgz",
|
||||||
|
"integrity": "sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/debug": {
|
"node_modules/debug": {
|
||||||
"version": "4.4.3",
|
"version": "4.4.3",
|
||||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||||
@@ -5923,6 +5946,27 @@
|
|||||||
"react": ">=0.14.0"
|
"react": ">=0.14.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/react-day-picker": {
|
||||||
|
"version": "9.13.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-day-picker/-/react-day-picker-9.13.0.tgz",
|
||||||
|
"integrity": "sha512-euzj5Hlq+lOHqI53NiuNhCP8HWgsPf/bBAVijR50hNaY1XwjKjShAnIe8jm8RD2W9IJUvihDIZ+KrmqfFzNhFQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@date-fns/tz": "^1.4.1",
|
||||||
|
"date-fns": "^4.1.0",
|
||||||
|
"date-fns-jalali": "^4.1.0-0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://github.com/sponsors/gpbl"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": ">=16.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react-dom": {
|
"node_modules/react-dom": {
|
||||||
"version": "18.3.1",
|
"version": "18.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user