From 4f8a7933777da39d7d56e5d8e7ead2988df7bfa1 Mon Sep 17 00:00:00 2001 From: poignatov Date: Mon, 29 Dec 2025 20:01:55 +0300 Subject: [PATCH] Initial commit --- .DS_Store | Bin 0 -> 6148 bytes .dockerignore | 9 + .gitignore | 6 + BUILD_INSTRUCTIONS.md | 81 + Dockerfile | 58 + ENV_SETUP.md | 297 ++ build-and-save.ps1 | 25 + build-and-save.sh | 26 + docker-compose.yml | 66 + env.example | 61 + nginx-unified.conf | 76 + nginx.conf | 29 + play-life-backend/.gitignore | 34 + play-life-backend/Dockerfile | 57 + play-life-backend/ENV_SETUP.md | 76 + play-life-backend/admin.html | 358 ++ play-life-backend/docker-compose.yml | 43 + play-life-backend/env.example | 17 + play-life-backend/go.mod | 14 + play-life-backend/go.sum | 8 + play-life-backend/main.go | 3672 +++++++++++++++++ .../migrations/001_create_schema.sql | 105 + .../migrations/002_add_dictionaries.sql | 53 + .../003_remove_words_unique_constraint.sql | 11 + .../004_add_config_dictionaries.sql | 21 + .../migrations/005_fix_weekly_report_mv.sql | 29 + play-life-backend/migrations/README.md | 81 + play-life-backend/start_backend.sh | 20 + play-life-web/.dockerignore | 12 + play-life-web/.gitignore | 30 + play-life-web/Dockerfile | 30 + play-life-web/README.md | 105 + play-life-web/build-and-save.sh | 42 + play-life-web/build-docker-image.sh | 29 + play-life-web/docker-compose.yml | 21 + play-life-web/env.example | 6 + play-life-web/index.html | 14 + play-life-web/nginx.conf | 50 + play-life-web/package-lock.json | 2706 ++++++++++++ play-life-web/package.json | 28 + play-life-web/postcss.config.js | 7 + play-life-web/src/App.jsx | 531 +++ play-life-web/src/components/AddConfig.css | 222 + play-life-web/src/components/AddConfig.jsx | 344 ++ play-life-web/src/components/AddWords.css | 106 + play-life-web/src/components/AddWords.jsx | 163 + play-life-web/src/components/CurrentWeek.jsx | 200 + .../src/components/FullStatistics.jsx | 289 ++ .../src/components/ProjectPriorityManager.jsx | 724 ++++ .../src/components/ProjectProgressBar.jsx | 158 + .../src/components/TestConfigSelection.css | 347 ++ .../src/components/TestConfigSelection.jsx | 278 ++ play-life-web/src/components/TestWords.css | 485 +++ play-life-web/src/components/TestWords.jsx | 490 +++ .../src/components/WeekProgressChart.jsx | 160 + play-life-web/src/components/WordList.css | 248 ++ play-life-web/src/components/WordList.jsx | 246 ++ play-life-web/src/index.css | 48 + play-life-web/src/main.jsx | 11 + play-life-web/src/utils/projectUtils.js | 104 + play-life-web/tailwind.config.js | 12 + play-life-web/vite.config.js | 51 + supervisord.conf | 25 + 63 files changed, 13655 insertions(+) create mode 100644 .DS_Store create mode 100644 .dockerignore create mode 100644 .gitignore create mode 100644 BUILD_INSTRUCTIONS.md create mode 100644 Dockerfile create mode 100644 ENV_SETUP.md create mode 100644 build-and-save.ps1 create mode 100644 build-and-save.sh create mode 100644 docker-compose.yml create mode 100644 env.example create mode 100644 nginx-unified.conf create mode 100644 nginx.conf create mode 100644 play-life-backend/.gitignore create mode 100644 play-life-backend/Dockerfile create mode 100644 play-life-backend/ENV_SETUP.md create mode 100644 play-life-backend/admin.html create mode 100644 play-life-backend/docker-compose.yml create mode 100644 play-life-backend/env.example create mode 100644 play-life-backend/go.mod create mode 100644 play-life-backend/go.sum create mode 100644 play-life-backend/main.go create mode 100644 play-life-backend/migrations/001_create_schema.sql create mode 100644 play-life-backend/migrations/002_add_dictionaries.sql create mode 100644 play-life-backend/migrations/003_remove_words_unique_constraint.sql create mode 100644 play-life-backend/migrations/004_add_config_dictionaries.sql create mode 100644 play-life-backend/migrations/005_fix_weekly_report_mv.sql create mode 100644 play-life-backend/migrations/README.md create mode 100644 play-life-backend/start_backend.sh create mode 100644 play-life-web/.dockerignore create mode 100644 play-life-web/.gitignore create mode 100644 play-life-web/Dockerfile create mode 100644 play-life-web/README.md create mode 100644 play-life-web/build-and-save.sh create mode 100644 play-life-web/build-docker-image.sh create mode 100644 play-life-web/docker-compose.yml create mode 100644 play-life-web/env.example create mode 100644 play-life-web/index.html create mode 100644 play-life-web/nginx.conf create mode 100644 play-life-web/package-lock.json create mode 100644 play-life-web/package.json create mode 100644 play-life-web/postcss.config.js create mode 100644 play-life-web/src/App.jsx create mode 100644 play-life-web/src/components/AddConfig.css create mode 100644 play-life-web/src/components/AddConfig.jsx create mode 100644 play-life-web/src/components/AddWords.css create mode 100644 play-life-web/src/components/AddWords.jsx create mode 100644 play-life-web/src/components/CurrentWeek.jsx create mode 100644 play-life-web/src/components/FullStatistics.jsx create mode 100644 play-life-web/src/components/ProjectPriorityManager.jsx create mode 100644 play-life-web/src/components/ProjectProgressBar.jsx create mode 100644 play-life-web/src/components/TestConfigSelection.css create mode 100644 play-life-web/src/components/TestConfigSelection.jsx create mode 100644 play-life-web/src/components/TestWords.css create mode 100644 play-life-web/src/components/TestWords.jsx create mode 100644 play-life-web/src/components/WeekProgressChart.jsx create mode 100644 play-life-web/src/components/WordList.css create mode 100644 play-life-web/src/components/WordList.jsx create mode 100644 play-life-web/src/index.css create mode 100644 play-life-web/src/main.jsx create mode 100644 play-life-web/src/utils/projectUtils.js create mode 100644 play-life-web/tailwind.config.js create mode 100644 play-life-web/vite.config.js create mode 100644 supervisord.conf diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..f91544d947a477ef74a973aebaa7d33eff8d1655 GIT binary patch literal 6148 zcmeHK&2G~`5T0!V*$`FA0i+%+_1Z#0DiGpg(s1Bf^K$?c>^ig-j@OD4tSXA+GrR)t zz_oACH{d0B1tj=(cOs|)LAf9l?MSoVcy~VA^I5y=B_h$A#vP&-5lOhl+6^?nF+Hw* z$+ozs4ix4d2b5AmZ)r#eOWCf&Dc}_N-xSbucZYJ!+mOyNYk$t)60LKkV*%hn?;|I*KePS%SV4>#A`D0vvuC+eK5{am7U@(77F}QiVHVh0siW;x$%{{0-XX*fprzo z>w^c^=rdLp&C-F!907nebQ?pge-t#wWAqs-ix`0kO9fi0;3I~xbo6_i=QCCoEu91( zJ_J8m@Cikjr{n&fNhjf3bgfgsDR5PRZF||$`~TxVpZ~9t+?7+nDX>urh+04B_fQh= ztxLt}z1D-jfos!wl|@5A!`rb5^j5qJH-<4!2SA^(vWOm-`w=iQxW*~)PZjt9qYH-F literal 0 HcmV?d00001 diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..d41e1bf --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +# Игнорируем node_modules при копировании +play-life-web/node_modules +play-life-web/dist +play-life-web/.git +play-life-backend/.git +*.md +.git +.gitignore + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b122779 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +.env +*.log +main +dist/ +node_modules/ +*.tar diff --git a/BUILD_INSTRUCTIONS.md b/BUILD_INSTRUCTIONS.md new file mode 100644 index 0000000..b1d3f16 --- /dev/null +++ b/BUILD_INSTRUCTIONS.md @@ -0,0 +1,81 @@ +# Инструкция по сборке единого Docker образа + +Этот проект содержит единый Dockerfile для сборки frontend и backend в один образ. + +## Структура + +- `Dockerfile` - единый Dockerfile для сборки frontend и backend +- `nginx-unified.conf` - конфигурация nginx для единого образа +- `supervisord.conf` - конфигурация supervisor для запуска nginx и backend +- `build-and-save.sh` - скрипт для сборки и сохранения в tar (Linux/Mac) +- `build-and-save.ps1` - скрипт для сборки и сохранения в tar (Windows PowerShell) + +## Сборка образа + +### Linux/Mac: +```bash +./build-and-save.sh +``` + +### Windows PowerShell: +```powershell +.\build-and-save.ps1 +``` + +### Вручную: +```bash +# Сборка образа +docker build -t play-life-unified:latest . + +# Сохранение в tar +docker save play-life-unified:latest -o play-life-unified.tar +``` + +## Загрузка образа на другой машине + +```bash +docker load -i play-life-unified.tar +``` + +## Запуск контейнера + +```bash +docker run -d \ + -p 80:80 \ + --env-file .env \ + --name play-life \ + play-life-unified:latest +``` + +## Переменные окружения + +Создайте файл `.env` на основе `env.example` с необходимыми переменными: + +- `DB_HOST` - хост базы данных (по умолчанию: localhost) +- `DB_PORT` - порт базы данных (по умолчанию: 5432) +- `DB_USER` - пользователь БД +- `DB_PASSWORD` - пароль БД +- `DB_NAME` - имя БД +- `TELEGRAM_BOT_TOKEN` - токен Telegram бота (опционально) +- `TELEGRAM_CHAT_ID` - ID чата Telegram (опционально) +- `TELEGRAM_WEBHOOK_BASE_URL` - базовый URL для webhook (опционально) +- `TODOIST_WEBHOOK_SECRET` - секрет для Todoist webhook (опционально) + +**Важно:** Backend внутри контейнера всегда работает на порту 8080. Nginx проксирует запросы с порта 80 на backend. + +## Проверка работы + +После запуска контейнера: + +- Frontend доступен по адресу: `http://localhost` +- API доступен через nginx: `http://localhost/api/...` +- Admin панель: `http://localhost/admin.html` + +## Логи + +Логи доступны через supervisor: +```bash +docker exec play-life cat /var/log/supervisor/backend.out.log +docker exec play-life cat /var/log/supervisor/nginx.out.log +``` + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..2cdb0dd --- /dev/null +++ b/Dockerfile @@ -0,0 +1,58 @@ +# Multi-stage build для единого образа frontend + backend + +# Stage 1: Build Frontend +FROM node:20-alpine AS frontend-builder +WORKDIR /app/frontend +COPY play-life-web/package*.json ./ +RUN npm ci +# Копируем исходники (node_modules исключены через .dockerignore) +COPY play-life-web/ . +RUN npm run build + +# Stage 2: Build Backend +FROM golang:1.21-alpine AS backend-builder +WORKDIR /app/backend +COPY play-life-backend/go.mod play-life-backend/go.sum ./ +RUN go mod download +COPY play-life-backend/ . +RUN go mod tidy +RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o main . + +# Stage 3: Final image +FROM alpine:latest + +# Устанавливаем необходимые пакеты +RUN apk --no-cache add \ + ca-certificates \ + nginx \ + supervisor \ + curl + +# Создаем директории +WORKDIR /app + +# Копируем собранный frontend +COPY --from=frontend-builder /app/frontend/dist /usr/share/nginx/html + +# Копируем собранный backend +COPY --from=backend-builder /app/backend/main /app/backend/main +COPY play-life-backend/admin.html /app/backend/admin.html + +# Копируем конфигурацию nginx +COPY nginx.conf /etc/nginx/nginx.conf +COPY nginx-unified.conf /etc/nginx/conf.d/default.conf + +# Копируем конфигурацию supervisor для запуска backend +COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf + +# Создаем директории для логов +RUN mkdir -p /var/log/supervisor && \ + mkdir -p /var/log/nginx && \ + mkdir -p /var/run + +# Открываем порт 80 +EXPOSE 80 + +# Запускаем supervisor, который запустит nginx и backend +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] + diff --git a/ENV_SETUP.md b/ENV_SETUP.md new file mode 100644 index 0000000..81c17e7 --- /dev/null +++ b/ENV_SETUP.md @@ -0,0 +1,297 @@ +# Настройка единого .env файла + +Все приложения проекта используют единый файл `.env` в корне проекта. + +## Быстрый старт + +1. Скопируйте файл `.env.example` в `.env`: + ```bash + cp .env.example .env + ``` + +2. Отредактируйте `.env` и укажите свои значения: + ```bash + nano .env + # или + vim .env + ``` + +3. **ВАЖНО**: Файл `.env` уже добавлен в `.gitignore` и не будет попадать в git. + +## Структура переменных окружения + +### Database Configuration +- `DB_HOST` - хост базы данных (по умолчанию: localhost) +- `DB_PORT` - порт базы данных (по умолчанию: 5432) +- `DB_USER` - пользователь БД (по умолчанию: playeng) +- `DB_PASSWORD` - пароль БД (по умолчанию: playeng) +- `DB_NAME` - имя БД (по умолчанию: playeng) + +### Backend Server Configuration +- `PORT` - порт бэкенд сервера (по умолчанию: 8080) + - В production всегда используется порт 8080 внутри контейнера + - Nginx автоматически проксирует запросы к `http://backend:8080` + +### Frontend Configuration (play-life-web) +- `VITE_PORT` - порт для dev-сервера Vite (по умолчанию: 3000) +- `WEB_PORT` - порт для production контейнера (по умолчанию: 3001) + +**Примечание:** API запросы автоматически проксируются к бэкенду. В development режиме Vite проксирует запросы к `http://localhost:8080`. В production nginx проксирует запросы к бэкенд контейнеру. Не требуется настройка `VITE_API_BASE_URL`. + +### Telegram Bot Configuration (опционально) +- `TELEGRAM_BOT_TOKEN` - токен бота от @BotFather +- `TELEGRAM_CHAT_ID` - ID чата для отправки сообщений +- `TELEGRAM_WEBHOOK_BASE_URL` - базовый URL для автоматической настройки webhook. Webhook будет настроен автоматически при старте сервера на `/webhook/telegram`. Если не указан, webhook нужно настраивать вручную. + + **Примеры значений:** + - Production с HTTPS: `https://your-domain.com` (порт не нужен для стандартных 80/443) + - Локальная разработка с ngrok: `https://abc123.ngrok.io` (порт не нужен) + - Прямой доступ на нестандартном порту: `http://your-server:8080` (порт обязателен) + +### Todoist Webhook Configuration (опционально) +- `TODOIST_WEBHOOK_SECRET` - секрет для проверки подлинности webhook от Todoist (если задан, все запросы должны содержать заголовок `X-Todoist-Webhook-Secret` с этим значением) + +## Настройка интеграции с Todoist + +Интеграция с Todoist позволяет автоматически обрабатывать закрытые задачи и добавлять их в базу данных play-life. + +### Как это работает + +1. При закрытии задачи в Todoist отправляется webhook на ваш сервер +2. Сервер извлекает `title` (content) и `description` из закрытой задачи +3. Склеивает их в один текст: `title + "\n" + description` +4. Обрабатывает текст через существующую логику `processMessage`, которая: + - Парсит ноды в формате `**[Project][+/-][Score]**` + - Сохраняет данные в базу данных + - Отправляет уведомление в Telegram (если настроено) + +### Настройка webhook в Todoist + +1. Откройте настройки Todoist: https://todoist.com/app/settings/integrations +2. Перейдите в раздел "Webhooks" или "Integrations" +3. Создайте новый webhook: + - **URL**: `http://your-server:8080/webhook/todoist` + - Для локальной разработки: `http://localhost:8080/webhook/todoist` + - Для production: укажите публичный URL вашего сервера + - **Event**: выберите `item:completed` (закрытие задачи) +4. Сохраните webhook + +### Безопасность (опционально) + +Для защиты webhook от несанкционированного доступа: + +1. Установите секрет в `.env`: + ```bash + TODOIST_WEBHOOK_SECRET=your_secret_key_here + ``` + +2. Настройте Todoist для отправки секрета в заголовке: + - В настройках webhook добавьте заголовок: `X-Todoist-Webhook-Secret: your_secret_key_here` + - Или используйте встроенные механизмы безопасности Todoist, если они доступны + +**Примечание**: Если `TODOIST_WEBHOOK_SECRET` не задан, проверка секрета не выполняется. + +### Формат задач в Todoist + +Для корректной обработки задачи должны содержать ноды в формате: +``` +**[ProjectName][+/-][Score]** +``` + +Примеры: +- `**[Work]+5.5**` - добавить 5.5 баллов к проекту "Work" +- `**[Health]-2.0**` - вычесть 2.0 баллов из проекта "Health" + +Ноды можно размещать как в `title` (content), так и в `description` задачи. Они будут обработаны при закрытии задачи. + +### Тестирование + +Для тестирования интеграции: + +1. Создайте задачу в Todoist с нодами, например: + - Title: `Test task` + - Description: `**[TestProject]+10.0**` + +2. Закройте задачу в Todoist + +3. Проверьте логи сервера - должно появиться сообщение: + ``` + Processing Todoist task: title='Test task', description='**[TestProject]+10.0**' + Successfully processed Todoist task, found 1 nodes + ``` + +4. Проверьте базу данных или веб-интерфейс - данные должны быть добавлены + + +## Использование + +### Локальная разработка + +Все приложения автоматически читают переменные из корневого `.env` файла: + +- **play-life-backend**: читает из `../.env` и `.env` (локальный имеет приоритет) +- **play-life-web**: читает из `../.env` и `.env` (локальный имеет приоритет) + +### Docker Compose + +Для запуска всех приложений в одном образе используйте корневой `docker-compose.yml`: + +```bash +docker-compose up --build +``` + +Все сервисы автоматически загружают переменные из корневого `.env` файла. + +### Отдельные приложения + +Если нужно запустить отдельные приложения, они также будут использовать корневой `.env`: + +```bash +# Backend +cd play-life-backend +docker-compose up + +# Frontend +cd play-life-web +docker-compose up +``` + +## Приоритет переменных окружения + +1. Переменные окружения системы (высший приоритет) +2. Локальный `.env` в директории приложения +3. Корневой `.env` файл +4. Значения по умолчанию в коде + +## Примеры использования + +### Изменение порта базы данных + +```bash +# В .env +DB_PORT=5433 +``` + +### Изменение порта бэкенда + +```bash +# В .env +PORT=9090 +``` + +### Изменение порта фронтенда + +```bash +# В .env +VITE_PORT=4000 # для development +WEB_PORT=4001 # для production Docker контейнера +``` + +После изменения `.env` файла перезапустите соответствующие сервисы. + +## Настройка интеграции с Telegram (webhook для сообщений пользователя) + +Интеграция с Telegram позволяет автоматически обрабатывать сообщения, отправленные пользователем в чат бота, и добавлять их в базу данных play-life. + +### Как это работает + +1. Пользователь отправляет сообщение в чат с ботом в Telegram +2. Telegram отправляет webhook на ваш сервер с информацией о сообщении и entities (форматирование) +3. Сервер извлекает жирный текст из entities (type === 'bold') +4. Парсит жирный текст по формату `project+/-score` (без `**`) +5. Обрабатывает текст и сохраняет данные в базу данных +6. **НЕ отправляет сообщение обратно в Telegram** (в отличие от других интеграций) + +### Отличия от других интеграций + +- **Формат нод**: `project+/-score` (без `**`), например: `Work+5.5` или `Health-2.0` +- **Определение жирного текста**: через entities от Telegram, а не через markdown `**` +- **Без обратной отправки**: сообщение не отправляется обратно в Telegram + +### Настройка webhook в Telegram + +#### Автоматическая настройка (рекомендуется) + +1. Создайте бота через [@BotFather](https://t.me/botfather) в Telegram +2. Получите токен бота и добавьте его в `.env`: + ```bash + TELEGRAM_BOT_TOKEN=your_bot_token_here + TELEGRAM_CHAT_ID=123456789 + TELEGRAM_WEBHOOK_BASE_URL=https://your-domain.com + ``` + + **Важно о портах:** + - Если сервер доступен на стандартных портах (HTTP 80 или HTTPS 443), порт можно не указывать + - Если сервер работает на нестандартном порту и доступен напрямую, укажите порт: `http://your-server:8080` + - Если используется reverse proxy (nginx, etc.), указывайте внешний URL без порта: `https://your-domain.com` + +3. Запустите сервер - webhook будет настроен автоматически при старте! + + Для локальной разработки можно использовать ngrok или аналогичный сервис: + ```bash + # Установите ngrok: https://ngrok.com/ + ngrok http 8080 + # Используйте полученный URL в TELEGRAM_WEBHOOK_BASE_URL (без порта) + # Например: TELEGRAM_WEBHOOK_BASE_URL=https://abc123.ngrok.io + ``` + +4. Проверьте логи сервера - должно появиться сообщение: + ``` + Telegram webhook configured successfully: https://abc123.ngrok.io/webhook/telegram + ``` + +#### Ручная настройка (если не указан TELEGRAM_WEBHOOK_BASE_URL) + +Если вы не указали `TELEGRAM_WEBHOOK_BASE_URL`, webhook нужно настроить вручную: + +```bash +curl -X POST "https://api.telegram.org/bot/setWebhook" \ + -H "Content-Type: application/json" \ + -d '{ + "url": "http://your-server:8080/webhook/telegram" + }' +``` + +Проверьте, что webhook установлен: +```bash +curl "https://api.telegram.org/bot/getWebhookInfo" +``` + +### Формат сообщений в Telegram + +Для корректной обработки сообщения должны содержать жирный текст в формате: +``` +project+/-score +``` + +Примеры: +- `Work+5.5` (жирным) - добавить 5.5 баллов к проекту "Work" +- `Health-2.0` (жирным) - вычесть 2.0 баллов из проекта "Health" + +**Важно**: Текст должен быть выделен жирным шрифтом в Telegram (через форматирование сообщения, не через `**`). + +### Тестирование + +Для тестирования интеграции: + +1. Откройте чат с вашим ботом в Telegram +2. Отправьте сообщение с жирным текстом в формате `project+/-score`, например: + - Напишите: `Test message` + - Выделите `Work+10.0` жирным шрифтом (через форматирование) + - Отправьте сообщение + +3. Проверьте логи сервера - должно появиться сообщение: + ``` + Processing Telegram message: text='Test message', entities count=1 + Successfully processed Telegram message, found 1 nodes + ``` + +4. Проверьте базу данных или веб-интерфейс - данные должны быть добавлены + +### Примечания + +- Webhook должен быть доступен из интернета (для production используйте публичный URL) +- Для локальной разработки используйте ngrok или аналогичный сервис для туннелирования +- Сообщения обрабатываются только если содержат жирный текст в правильном формате +- Сообщения **не отправляются обратно** в Telegram (в отличие от других интеграций) + diff --git a/build-and-save.ps1 b/build-and-save.ps1 new file mode 100644 index 0000000..1f5d8f5 --- /dev/null +++ b/build-and-save.ps1 @@ -0,0 +1,25 @@ +# PowerShell скрипт для сборки единого Docker образа и сохранения в tar + +$ErrorActionPreference = "Stop" + +$IMAGE_NAME = "play-life-unified" +$IMAGE_TAG = if ($env:IMAGE_TAG) { $env:IMAGE_TAG } else { "latest" } +$TAR_FILE = if ($env:TAR_FILE) { $env:TAR_FILE } else { "play-life-unified.tar" } + +Write-Host "🔨 Сборка единого Docker образа..." -ForegroundColor Cyan +docker build -t "${IMAGE_NAME}:${IMAGE_TAG}" . + +Write-Host "💾 Сохранение образа в tar файл..." -ForegroundColor Cyan +docker save "${IMAGE_NAME}:${IMAGE_TAG}" -o "${TAR_FILE}" + +$fileSize = (Get-Item "${TAR_FILE}").Length / 1MB +Write-Host "✅ Образ успешно сохранен в ${TAR_FILE}" -ForegroundColor Green +Write-Host "📦 Размер файла: $([math]::Round($fileSize, 2)) MB" -ForegroundColor Green + +Write-Host "" +Write-Host "Для загрузки образа на другой машине используйте:" -ForegroundColor Yellow +Write-Host " docker load -i ${TAR_FILE}" -ForegroundColor White +Write-Host "" +Write-Host "Для запуска контейнера используйте:" -ForegroundColor Yellow +Write-Host " docker run -d -p 80:80 --env-file .env ${IMAGE_NAME}:${IMAGE_TAG}" -ForegroundColor White + diff --git a/build-and-save.sh b/build-and-save.sh new file mode 100644 index 0000000..f0ecf72 --- /dev/null +++ b/build-and-save.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# Скрипт для сборки единого Docker образа и сохранения в tar + +set -e + +IMAGE_NAME="play-life-unified" +IMAGE_TAG="${IMAGE_TAG:-latest}" +TAR_FILE="${TAR_FILE:-play-life-unified.tar}" + +echo "🔨 Сборка единого Docker образа..." +docker build -t "${IMAGE_NAME}:${IMAGE_TAG}" . + +echo "💾 Сохранение образа в tar файл..." +docker save "${IMAGE_NAME}:${IMAGE_TAG}" -o "${TAR_FILE}" + +echo "✅ Образ успешно сохранен в ${TAR_FILE}" +echo "📦 Размер файла: $(du -h ${TAR_FILE} | cut -f1)" + +echo "" +echo "Для загрузки образа на другой машине используйте:" +echo " docker load -i ${TAR_FILE}" +echo "" +echo "Для запуска контейнера используйте:" +echo " docker run -d -p 80:80 --env-file .env ${IMAGE_NAME}:${IMAGE_TAG}" + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..bc13ad9 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,66 @@ +version: '3.8' + +# Единый docker-compose для всех приложений в одном образе +# Использует корневой .env файл + +services: + # База данных PostgreSQL + db: + image: postgres:15-alpine + restart: unless-stopped + environment: + POSTGRES_USER: ${DB_USER:-playeng} + POSTGRES_PASSWORD: ${DB_PASSWORD:-playeng} + POSTGRES_DB: ${DB_NAME:-playeng} + ports: + - "${DB_PORT:-5432}:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-playeng}"] + interval: 10s + timeout: 5s + retries: 5 + env_file: + - .env + + # Backend сервер (Go) + backend: + build: + context: ./play-life-backend + dockerfile: Dockerfile + ports: + - "${PORT:-8080}:8080" + environment: + DB_HOST: db + DB_PORT: 5432 + DB_USER: ${DB_USER:-playeng} + DB_PASSWORD: ${DB_PASSWORD:-playeng} + DB_NAME: ${DB_NAME:-playeng} + PORT: ${PORT:-8080} + TELEGRAM_BOT_TOKEN: ${TELEGRAM_BOT_TOKEN:-} + TELEGRAM_CHAT_ID: ${TELEGRAM_CHAT_ID:-} + depends_on: + db: + condition: service_healthy + volumes: + - ./play-life-backend/migrations:/migrations + env_file: + - .env + + # Frontend приложение play-life-web + play-life-web: + build: + context: ./play-life-web + dockerfile: Dockerfile + container_name: play-life-web + ports: + - "${WEB_PORT:-3001}:80" + restart: unless-stopped + depends_on: + - backend + env_file: + - .env + +networks: + default: + name: play-life-network + diff --git a/env.example b/env.example new file mode 100644 index 0000000..1202b38 --- /dev/null +++ b/env.example @@ -0,0 +1,61 @@ +# ============================================ +# Единый файл конфигурации для всех проектов +# Backend и Play-Life-Web +# ============================================ + +# ============================================ +# Database Configuration +# ============================================ +DB_HOST=localhost +DB_PORT=5432 +DB_USER=playeng +DB_PASSWORD=playeng +DB_NAME=playeng + +# ============================================ +# Backend Server Configuration +# ============================================ +# Порт для backend сервера (по умолчанию: 8080) +# В production всегда используется порт 8080 внутри контейнера +PORT=8080 + +# ============================================ +# Play Life Web Configuration +# ============================================ +# Порт для frontend приложения play-life-web +WEB_PORT=3001 + +# ============================================ +# Telegram Bot Configuration (optional) +# ============================================ +# Get token from @BotFather in Telegram: https://t.me/botfather +# To get chat ID: send a message to your bot, then visit: https://api.telegram.org/bot/getUpdates +# Look for "chat":{"id":123456789} - that number is your chat ID +TELEGRAM_BOT_TOKEN=your_telegram_bot_token_here +TELEGRAM_CHAT_ID=123456789 +# Base URL для автоматической настройки webhook +# Примеры: +# - Для production с HTTPS: https://your-domain.com +# - Для локальной разработки с ngrok: https://abc123.ngrok.io +# - Для прямого доступа на нестандартном порту: http://your-server:8080 +# Webhook будет настроен автоматически при старте сервера на: /webhook/telegram +# Если не указан, webhook нужно настраивать вручную +TELEGRAM_WEBHOOK_BASE_URL=https://your-domain.com + +# ============================================ +# Todoist Webhook Configuration (optional) +# ============================================ +# Секрет для проверки подлинности webhook от Todoist +# Если задан, все запросы должны содержать заголовок X-Todoist-Webhook-Secret с этим значением +# Оставьте пустым, если не хотите использовать проверку секрета +TODOIST_WEBHOOK_SECRET= + +# ============================================ +# Scheduler Configuration +# ============================================ +# Часовой пояс для планировщика задач (например: Europe/Moscow, America/New_York, UTC) +# Используется для автоматической фиксации целей на неделю каждый понедельник в 6:00 +# По умолчанию: UTC +# Список доступных часовых поясов: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones +TIMEZONE=UTC + diff --git a/nginx-unified.conf b/nginx-unified.conf new file mode 100644 index 0000000..1aa9403 --- /dev/null +++ b/nginx-unified.conf @@ -0,0 +1,76 @@ +server { + listen 80; + server_name localhost; + root /usr/share/nginx/html; + index index.html; + + # Gzip compression + gzip on; + gzip_vary on; + gzip_min_length 1024; + gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json; + + # Proxy API requests to backend (localhost внутри контейнера) + location /api/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy webhook endpoints to backend + location /webhook/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy daily-report endpoints to backend + location /daily-report/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy other API endpoints to backend + location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|project/priority|message/post|weekly_goals/setup|admin|admin\.html)$ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Handle React Router (SPA) + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } +} + diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 0000000..60a79af --- /dev/null +++ b/nginx.conf @@ -0,0 +1,29 @@ +user nginx; +worker_processes auto; +error_log /var/log/nginx/error.log warn; +pid /var/run/nginx.pid; + +events { + worker_connections 1024; +} + +http { + include /etc/nginx/mime.types; + default_type application/octet-stream; + + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + access_log /var/log/nginx/access.log main; + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + + # Include server configurations + include /etc/nginx/conf.d/*.conf; +} + diff --git a/play-life-backend/.gitignore b/play-life-backend/.gitignore new file mode 100644 index 0000000..e8f54ae --- /dev/null +++ b/play-life-backend/.gitignore @@ -0,0 +1,34 @@ +# Environment variables with secrets +.env + +# Go build artifacts +main +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool +*.out + +# Dependency directories +vendor/ + +# Go workspace file +go.work + +# IDE +.idea/ +.vscode/ +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + diff --git a/play-life-backend/Dockerfile b/play-life-backend/Dockerfile new file mode 100644 index 0000000..efa06d9 --- /dev/null +++ b/play-life-backend/Dockerfile @@ -0,0 +1,57 @@ +# Multi-stage build для единого образа frontend + backend + +# Stage 1: Build Frontend +FROM node:20-alpine AS frontend-builder +WORKDIR /app/frontend +COPY play-life-web/package*.json ./ +RUN npm ci +# Копируем РёСЃС…РѕРґРЅРёРєРё (node_modules исключены через .dockerignore) +COPY play-life-web/ . +RUN npm run build + +# Stage 2: Build Backend +FROM golang:1.21-alpine AS backend-builder +WORKDIR /app/backend +COPY play-life-backend/go.mod play-life-backend/go.sum ./ +RUN go mod download +COPY play-life-backend/ . +RUN go mod tidy +RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o main . + +# Stage 3: Final image +FROM alpine:latest + +# Устанавливаем необходимые пакеты +RUN apk --no-cache add \ + ca-certificates \ + nginx \ + supervisor \ + curl + +# Создаем директории +WORKDIR /app + +# Копируем собранный frontend +COPY --from=frontend-builder /app/frontend/dist /usr/share/nginx/html + +# Копируем собранный backend +COPY --from=backend-builder /app/backend/main /app/backend/main +COPY play-life-backend/admin.html /app/backend/admin.html + +# Копируем конфигурацию nginx +COPY nginx.conf /etc/nginx/nginx.conf +COPY nginx-unified.conf /etc/nginx/conf.d/default.conf + +# Копируем конфигурацию supervisor для запуска backend +COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf + +# Создаем директории для логов +RUN mkdir -p /var/log/supervisor && \ + mkdir -p /var/log/nginx && \ + mkdir -p /var/run + +# Открываем РїРѕСЂС‚ 80 +EXPOSE 80 + +# Запускаем supervisor, который запустит nginx Рё backend +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] \ No newline at end of file diff --git a/play-life-backend/ENV_SETUP.md b/play-life-backend/ENV_SETUP.md new file mode 100644 index 0000000..129b945 --- /dev/null +++ b/play-life-backend/ENV_SETUP.md @@ -0,0 +1,76 @@ +# Настройка переменных окружения + +## Быстрый старт + +1. Скопируйте файл `env.example` в `.env`: + ```bash + cp env.example .env + ``` + +2. Откройте `.env` и заполните реальные значения: + ```bash + nano .env + # или + vim .env + ``` + +3. **ВАЖНО**: Файл `.env` уже добавлен в `.gitignore` и не будет попадать в git. + +## Переменные окружения + +### Обязательные (для работы приложения) + +- `DB_HOST` - хост базы данных (по умолчанию: localhost) +- `DB_PORT` - порт базы данных (по умолчанию: 5432) +- `DB_USER` - пользователь БД (по умолчанию: playeng) +- `DB_PASSWORD` - пароль БД (по умолчанию: playeng) +- `DB_NAME` - имя БД (по умолчанию: playeng) +- `PORT` - порт сервера (по умолчанию: 8080) + +### Опциональные (для Telegram интеграции) + +- `TELEGRAM_BOT_TOKEN` - токен бота от @BotFather +- `TELEGRAM_CHAT_ID` - ID чата для отправки сообщений + +## Использование в коде + +Приложение автоматически читает переменные окружения через `os.Getenv()`. + +Для загрузки `.env` файла в локальной разработке можно использовать: + +### Вариант 1: Установить переменные вручную +```bash +export DB_PASSWORD=your_password +export TELEGRAM_BOT_TOKEN=your_token +go run main.go +``` + +### Вариант 2: Использовать библиотеку godotenv (рекомендуется) + +1. Установить библиотеку: + ```bash + go get github.com/joho/godotenv + ``` + +2. Добавить в начало `main()`: + ```go + import "github.com/joho/godotenv" + + func main() { + // Загрузить .env файл + godotenv.Load() + // ... остальной код + } + ``` + +### Вариант 3: Использовать docker-compose + +В `docker-compose.yml` уже настроена передача переменных окружения из `.env` файла. + +## Безопасность + +- ✅ Файл `.env` добавлен в `.gitignore` +- ✅ Файл `env.example` содержит только шаблоны без реальных значений +- ✅ Никогда не коммитьте `.env` в git +- ✅ Используйте разные токены для dev/prod окружений + diff --git a/play-life-backend/admin.html b/play-life-backend/admin.html new file mode 100644 index 0000000..11c1ec5 --- /dev/null +++ b/play-life-backend/admin.html @@ -0,0 +1,358 @@ + + + + + + Play Life Backend - Admin Panel + + + +
+

🎯 Play Life Backend - Admin Panel

+ +
+ +
+

+ 📨 Message Post + +

+ + +
+
+ + +
+

+ 📈 Daily Report Trigger + +

+

+ Нажмите кнопку для отправки ежедневного отчёта по Score и Целям в Telegram (обычно отправляется автоматически в 11:59). +

+ +
+
+ + +
+

+ 🎯 Weekly Goals Setup + +

+

+ Нажмите кнопку для установки целей на текущую неделю на основе медианы за последние 3 месяца (с отправкой в чат). Обычно срабатывает автоматически в начале недели. +

+ +
+
+
+
+ + + + + diff --git a/play-life-backend/docker-compose.yml b/play-life-backend/docker-compose.yml new file mode 100644 index 0000000..bdd5ad1 --- /dev/null +++ b/play-life-backend/docker-compose.yml @@ -0,0 +1,43 @@ +version: '3.8' + +services: + db: + image: postgres:15-alpine + restart: unless-stopped + environment: + POSTGRES_USER: ${DB_USER:-playeng} + POSTGRES_PASSWORD: ${DB_PASSWORD:-playeng} + POSTGRES_DB: ${DB_NAME:-playeng} + ports: + - "${DB_PORT:-5432}:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-playeng}"] + interval: 10s + timeout: 5s + retries: 5 + env_file: + - ../.env + - .env # Локальный .env имеет приоритет + + backend: + build: . + ports: + - "${PORT:-8080}:8080" + environment: + DB_HOST: db + DB_PORT: 5432 + DB_USER: ${DB_USER:-playeng} + DB_PASSWORD: ${DB_PASSWORD:-playeng} + DB_NAME: ${DB_NAME:-playeng} + PORT: ${PORT:-8080} + TELEGRAM_BOT_TOKEN: ${TELEGRAM_BOT_TOKEN:-} + TELEGRAM_CHAT_ID: ${TELEGRAM_CHAT_ID:-} + depends_on: + db: + condition: service_healthy + volumes: + - ./migrations:/migrations + env_file: + - ../.env + - .env # Локальный .env имеет приоритет + diff --git a/play-life-backend/env.example b/play-life-backend/env.example new file mode 100644 index 0000000..84a0a07 --- /dev/null +++ b/play-life-backend/env.example @@ -0,0 +1,17 @@ +# Database Configuration +DB_HOST=localhost +DB_PORT=5432 +DB_USER=playeng +DB_PASSWORD=playeng +DB_NAME=playeng + +# Server Configuration +PORT=8080 + +# Telegram Bot Configuration (optional - for direct Telegram integration) +# Get token from @BotFather in Telegram: https://t.me/botfather +# To get chat ID: send a message to your bot, then visit: https://api.telegram.org/bot/getUpdates +# Look for "chat":{"id":123456789} - that number is your chat ID +TELEGRAM_BOT_TOKEN=your_telegram_bot_token_here +TELEGRAM_CHAT_ID=123456789 + diff --git a/play-life-backend/go.mod b/play-life-backend/go.mod new file mode 100644 index 0000000..19d3627 --- /dev/null +++ b/play-life-backend/go.mod @@ -0,0 +1,14 @@ +module play-eng-backend + +go 1.21 + +require ( + github.com/gorilla/mux v1.8.1 + github.com/lib/pq v1.10.9 + github.com/robfig/cron/v3 v3.0.1 +) + +require ( + github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 // indirect + github.com/joho/godotenv v1.5.1 // indirect +) diff --git a/play-life-backend/go.sum b/play-life-backend/go.sum new file mode 100644 index 0000000..c9763d0 --- /dev/null +++ b/play-life-backend/go.sum @@ -0,0 +1,8 @@ +github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 h1:wG8n/XJQ07TmjbITcGiUaOtXxdrINDz1b0J1w0SzqDc= +github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1/go.mod h1:A2S0CWkNylc2phvKXWBBdD3K0iGnDBGbzRpISP2zBl8= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= diff --git a/play-life-backend/main.go b/play-life-backend/main.go new file mode 100644 index 0000000..c69be66 --- /dev/null +++ b/play-life-backend/main.go @@ -0,0 +1,3672 @@ +package main + +import ( + "bytes" + "database/sql" + "encoding/json" + "fmt" + "io" + "log" + "math" + "net/http" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + "sync" + "time" + "unicode/utf16" + + "github.com/go-telegram-bot-api/telegram-bot-api/v5" + "github.com/gorilla/mux" + "github.com/joho/godotenv" + _ "github.com/lib/pq" + "github.com/robfig/cron/v3" +) + +type Word struct { + ID int `json:"id"` + Name string `json:"name"` + Translation string `json:"translation"` + Description string `json:"description"` + Success int `json:"success"` + Failure int `json:"failure"` + LastSuccess *string `json:"last_success_at,omitempty"` + LastFailure *string `json:"last_failure_at,omitempty"` +} + +type WordRequest struct { + Name string `json:"name"` + Translation string `json:"translation"` + Description string `json:"description"` + DictionaryID *int `json:"dictionary_id,omitempty"` +} + +type WordsRequest struct { + Words []WordRequest `json:"words"` +} + +type TestProgressUpdate struct { + ID int `json:"id"` + Success int `json:"success"` + Failure int `json:"failure"` + LastSuccessAt *string `json:"last_success_at,omitempty"` + LastFailureAt *string `json:"last_failure_at,omitempty"` +} + +type TestProgressRequest struct { + Words []TestProgressUpdate `json:"words"` + ConfigID *int `json:"config_id,omitempty"` +} + +type Config struct { + ID int `json:"id"` + Name string `json:"name"` + WordsCount int `json:"words_count"` + MaxCards *int `json:"max_cards,omitempty"` + TryMessage string `json:"try_message"` +} + +type ConfigRequest struct { + Name string `json:"name"` + WordsCount int `json:"words_count"` + MaxCards *int `json:"max_cards,omitempty"` + TryMessage string `json:"try_message"` + DictionaryIDs []int `json:"dictionary_ids,omitempty"` +} + +type Dictionary struct { + ID int `json:"id"` + Name string `json:"name"` + WordsCount int `json:"wordsCount"` +} + +type DictionaryRequest struct { + Name string `json:"name"` +} + +type TestConfigsAndDictionariesResponse struct { + Configs []Config `json:"configs"` + Dictionaries []Dictionary `json:"dictionaries"` +} + +type WeeklyProjectStats struct { + ProjectName string `json:"project_name"` + TotalScore float64 `json:"total_score"` + MinGoalScore float64 `json:"min_goal_score"` + MaxGoalScore *float64 `json:"max_goal_score,omitempty"` + Priority *int `json:"priority,omitempty"` + CalculatedScore float64 `json:"calculated_score"` +} + +type WeeklyStatsResponse struct { + Total *float64 `json:"total,omitempty"` + Projects []WeeklyProjectStats `json:"projects"` +} + +type MessagePostRequest struct { + Body struct { + Text string `json:"text"` + } `json:"body"` +} + +type ProcessedNode struct { + Project string `json:"project"` + Score float64 `json:"score"` +} + +type ProcessedEntry struct { + Text string `json:"text"` + CreatedDate string `json:"createdDate"` + Nodes []ProcessedNode `json:"nodes"` + Raw string `json:"raw"` + Markdown string `json:"markdown"` +} + +type WeeklyGoalSetup struct { + ProjectName string `json:"project_name"` + MinGoalScore float64 `json:"min_goal_score"` + MaxGoalScore float64 `json:"max_goal_score"` +} + +type Project struct { + ProjectID int `json:"project_id"` + ProjectName string `json:"project_name"` + Priority *int `json:"priority,omitempty"` +} + +type ProjectPriorityUpdate struct { + ID int `json:"id"` + Priority *int `json:"priority"` +} + +type ProjectPriorityRequest struct { + Body []ProjectPriorityUpdate `json:"body"` +} + +type FullStatisticsItem struct { + ProjectName string `json:"project_name"` + ReportYear int `json:"report_year"` + ReportWeek int `json:"report_week"` + TotalScore float64 `json:"total_score"` + MinGoalScore float64 `json:"min_goal_score"` + MaxGoalScore float64 `json:"max_goal_score"` +} + +type TodoistWebhook struct { + EventName string `json:"event_name"` + EventData map[string]interface{} `json:"event_data"` +} + +type TelegramEntity struct { + Type string `json:"type"` + Offset int `json:"offset"` + Length int `json:"length"` +} + +type TelegramMessage struct { + Text string `json:"text"` + Entities []TelegramEntity `json:"entities"` +} + +type TelegramWebhook struct { + Message TelegramMessage `json:"message"` +} + +// TelegramUpdate - структура для Telegram webhook (обычно это Update объект) +type TelegramUpdate struct { + UpdateID int `json:"update_id"` + Message TelegramMessage `json:"message"` +} + +type App struct { + DB *sql.DB + webhookMutex sync.Mutex + lastWebhookTime map[int]time.Time // config_id -> last webhook time + telegramBot *tgbotapi.BotAPI + telegramChatID int64 +} + +func setCORSHeaders(w http.ResponseWriter) { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") +} + +func sendErrorWithCORS(w http.ResponseWriter, message string, statusCode int) { + setCORSHeaders(w) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(statusCode) + json.NewEncoder(w).Encode(map[string]interface{}{ + "error": message, + }) +} + +func (a *App) getWordsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + // Get dictionary_id from query parameter + dictionaryIDStr := r.URL.Query().Get("dictionary_id") + var dictionaryID *int + if dictionaryIDStr != "" { + if id, err := strconv.Atoi(dictionaryIDStr); err == nil { + dictionaryID = &id + } + } + + query := ` + SELECT + w.id, + w.name, + w.translation, + w.description, + COALESCE(p.success, 0) as success, + COALESCE(p.failure, 0) as failure, + CASE WHEN p.last_success_at IS NOT NULL THEN p.last_success_at::text ELSE NULL END as last_success_at, + CASE WHEN p.last_failure_at IS NOT NULL THEN p.last_failure_at::text ELSE NULL END as last_failure_at + FROM words w + LEFT JOIN progress p ON w.id = p.word_id + WHERE ($1::INTEGER IS NULL OR w.dictionary_id = $1) + ORDER BY w.id + ` + + var rows *sql.Rows + var err error + if dictionaryID != nil { + rows, err = a.DB.Query(query, *dictionaryID) + } else { + rows, err = a.DB.Query(query, nil) + } + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + words := make([]Word, 0) + for rows.Next() { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + words = append(words, word) + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(words) +} + +func (a *App) addWordsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + var req WordsRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + tx, err := a.DB.Begin() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + stmt, err := tx.Prepare(` + INSERT INTO words (name, translation, description, dictionary_id) + VALUES ($1, $2, $3, COALESCE($4, 0)) + RETURNING id + `) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + var addedCount int + for _, wordReq := range req.Words { + var id int + dictionaryID := 0 + if wordReq.DictionaryID != nil { + dictionaryID = *wordReq.DictionaryID + } + err := stmt.QueryRow(wordReq.Name, wordReq.Translation, wordReq.Description, dictionaryID).Scan(&id) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + addedCount++ + } + + if err := tx.Commit(); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": fmt.Sprintf("Added %d words", addedCount), + "added": addedCount, + }) +} + +func (a *App) getTestWordsHandler(w http.ResponseWriter, r *http.Request) { + log.Printf("getTestWordsHandler called: %s %s", r.Method, r.URL.Path) + setCORSHeaders(w) + + if r.Method == "OPTIONS" { + w.WriteHeader(http.StatusOK) + return + } + + // Get config_id from query parameter (required) + configIDStr := r.URL.Query().Get("config_id") + if configIDStr == "" { + sendErrorWithCORS(w, "config_id parameter is required", http.StatusBadRequest) + return + } + + configID, err := strconv.Atoi(configIDStr) + if err != nil { + sendErrorWithCORS(w, "invalid config_id parameter", http.StatusBadRequest) + return + } + + // Get words_count from config + var wordsCount int + err = a.DB.QueryRow("SELECT words_count FROM configs WHERE id = $1", configID).Scan(&wordsCount) + if err != nil { + if err == sql.ErrNoRows { + sendErrorWithCORS(w, "config not found", http.StatusNotFound) + return + } + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Get dictionary IDs for this config + var dictionaryIDs []int + dictQuery := ` + SELECT dictionary_id + FROM config_dictionaries + WHERE config_id = $1 + ` + dictRows, err := a.DB.Query(dictQuery, configID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer dictRows.Close() + + for dictRows.Next() { + var dictID int + if err := dictRows.Scan(&dictID); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaryIDs = append(dictionaryIDs, dictID) + } + + // If no dictionaries are selected for config, use all dictionaries (no filter) + var dictFilter string + var dictArgs []interface{} + if len(dictionaryIDs) > 0 { + placeholders := make([]string, len(dictionaryIDs)) + for i := range dictionaryIDs { + placeholders[i] = fmt.Sprintf("$%d", i+1) + } + dictFilter = fmt.Sprintf("w.dictionary_id IN (%s)", strings.Join(placeholders, ",")) + for _, dictID := range dictionaryIDs { + dictArgs = append(dictArgs, dictID) + } + } else { + dictFilter = "1=1" // No filter + } + + // Calculate group sizes (use ceiling to ensure we don't lose words due to rounding) + group1Count := int(float64(wordsCount) * 0.3) // 30% + group2Count := int(float64(wordsCount) * 0.4) // 40% + // group3Count is calculated dynamically based on actual words collected from groups 1 and 2 + + // Base query parts + baseSelect := ` + w.id, + w.name, + w.translation, + w.description, + COALESCE(p.success, 0) as success, + COALESCE(p.failure, 0) as failure, + CASE WHEN p.last_success_at IS NOT NULL THEN p.last_success_at::text ELSE NULL END as last_success_at, + CASE WHEN p.last_failure_at IS NOT NULL THEN p.last_failure_at::text ELSE NULL END as last_failure_at + ` + baseFrom := ` + FROM words w + LEFT JOIN progress p ON w.id = p.word_id + WHERE ` + dictFilter + + // Group 1: success <= 3, sorted by success ASC, then last_success_at ASC (NULL first) + group1Query := ` + SELECT ` + baseSelect + ` + ` + baseFrom + ` + AND COALESCE(p.success, 0) <= 3 + ORDER BY + COALESCE(p.success, 0) ASC, + CASE WHEN p.last_success_at IS NULL THEN 0 ELSE 1 END, + p.last_success_at ASC + LIMIT $` + fmt.Sprintf("%d", len(dictArgs)+1) + + group1Args := append(dictArgs, group1Count*2) // Get more to ensure uniqueness + group1Rows, err := a.DB.Query(group1Query, group1Args...) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer group1Rows.Close() + + group1Words := make([]Word, 0) + group1WordIDs := make(map[int]bool) + for group1Rows.Next() && len(group1Words) < group1Count { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := group1Rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + group1Words = append(group1Words, word) + group1WordIDs[word.ID] = true + } + + // Group 2: (failure - success) >= 5, sorted by (failure - success) DESC, then last_success_at ASC (NULL first) + // Exclude words already in group1 + group2Exclude := "" + group2Args := make([]interface{}, 0) + group2Args = append(group2Args, dictArgs...) + if len(group1WordIDs) > 0 { + excludePlaceholders := make([]string, 0, len(group1WordIDs)) + idx := len(dictArgs) + 1 + for wordID := range group1WordIDs { + excludePlaceholders = append(excludePlaceholders, fmt.Sprintf("$%d", idx)) + group2Args = append(group2Args, wordID) + idx++ + } + group2Exclude = " AND w.id NOT IN (" + strings.Join(excludePlaceholders, ",") + ")" + } + + group2Query := ` + SELECT ` + baseSelect + ` + ` + baseFrom + ` + AND (COALESCE(p.failure, 0) - COALESCE(p.success, 0)) >= 5 + ` + group2Exclude + ` + ORDER BY + (COALESCE(p.failure, 0) - COALESCE(p.success, 0)) DESC, + CASE WHEN p.last_success_at IS NULL THEN 0 ELSE 1 END, + p.last_success_at ASC + LIMIT $` + fmt.Sprintf("%d", len(group2Args)+1) + + group2Args = append(group2Args, group2Count*2) // Get more to ensure uniqueness + group2Rows, err := a.DB.Query(group2Query, group2Args...) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer group2Rows.Close() + + group2Words := make([]Word, 0) + group2WordIDs := make(map[int]bool) + for group2Rows.Next() && len(group2Words) < group2Count { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := group2Rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + group2Words = append(group2Words, word) + group2WordIDs[word.ID] = true + } + + // Group 3: All remaining words, sorted by last_success_at ASC (NULL first) + // Exclude words already in group1 and group2 + allExcludedIDs := make(map[int]bool) + for id := range group1WordIDs { + allExcludedIDs[id] = true + } + for id := range group2WordIDs { + allExcludedIDs[id] = true + } + + group3Exclude := "" + group3Args := make([]interface{}, 0) + group3Args = append(group3Args, dictArgs...) + if len(allExcludedIDs) > 0 { + excludePlaceholders := make([]string, 0, len(allExcludedIDs)) + idx := len(dictArgs) + 1 + for wordID := range allExcludedIDs { + excludePlaceholders = append(excludePlaceholders, fmt.Sprintf("$%d", idx)) + group3Args = append(group3Args, wordID) + idx++ + } + group3Exclude = " AND w.id NOT IN (" + strings.Join(excludePlaceholders, ",") + ")" + } + + // Calculate how many words we still need from group 3 + wordsCollected := len(group1Words) + len(group2Words) + group3Needed := wordsCount - wordsCollected + + log.Printf("Word selection: wordsCount=%d, group1=%d, group2=%d, collected=%d, group3Needed=%d", + wordsCount, len(group1Words), len(group2Words), wordsCollected, group3Needed) + + group3Words := make([]Word, 0) + if group3Needed > 0 { + group3Query := ` + SELECT ` + baseSelect + ` + ` + baseFrom + ` + ` + group3Exclude + ` + ORDER BY + CASE WHEN p.last_success_at IS NULL THEN 0 ELSE 1 END, + p.last_success_at ASC + LIMIT $` + fmt.Sprintf("%d", len(group3Args)+1) + + group3Args = append(group3Args, group3Needed) + group3Rows, err := a.DB.Query(group3Query, group3Args...) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer group3Rows.Close() + + for group3Rows.Next() { + var word Word + var lastSuccess, lastFailure sql.NullString + + err := group3Rows.Scan( + &word.ID, + &word.Name, + &word.Translation, + &word.Description, + &word.Success, + &word.Failure, + &lastSuccess, + &lastFailure, + ) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if lastSuccess.Valid { + word.LastSuccess = &lastSuccess.String + } + if lastFailure.Valid { + word.LastFailure = &lastFailure.String + } + + group3Words = append(group3Words, word) + } + } + + // Combine all groups + words := make([]Word, 0) + words = append(words, group1Words...) + words = append(words, group2Words...) + words = append(words, group3Words...) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(words) +} + +func (a *App) updateTestProgressHandler(w http.ResponseWriter, r *http.Request) { + log.Printf("updateTestProgressHandler called: %s %s", r.Method, r.URL.Path) + setCORSHeaders(w) + + if r.Method == "OPTIONS" { + w.WriteHeader(http.StatusOK) + return + } + + var req TestProgressRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding request: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusBadRequest) + return + } + + log.Printf("Received %d word updates, config_id: %v", len(req.Words), req.ConfigID) + + tx, err := a.DB.Begin() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + stmt, err := tx.Prepare(` + INSERT INTO progress (word_id, success, failure, last_success_at, last_failure_at) + VALUES ($1, $2, $3, $4, $5) + ON CONFLICT (word_id) + DO UPDATE SET + success = EXCLUDED.success, + failure = EXCLUDED.failure, + last_success_at = COALESCE(EXCLUDED.last_success_at, progress.last_success_at), + last_failure_at = COALESCE(EXCLUDED.last_failure_at, progress.last_failure_at) + `) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + for _, wordUpdate := range req.Words { + // Convert pointers to values for logging + lastSuccessStr := "nil" + if wordUpdate.LastSuccessAt != nil { + lastSuccessStr = *wordUpdate.LastSuccessAt + } + lastFailureStr := "nil" + if wordUpdate.LastFailureAt != nil { + lastFailureStr = *wordUpdate.LastFailureAt + } + log.Printf("Updating word %d: success=%d, failure=%d, last_success_at=%s, last_failure_at=%s", + wordUpdate.ID, wordUpdate.Success, wordUpdate.Failure, lastSuccessStr, lastFailureStr) + + // Convert pointers to sql.NullString for proper NULL handling + var lastSuccess, lastFailure interface{} + if wordUpdate.LastSuccessAt != nil && *wordUpdate.LastSuccessAt != "" { + lastSuccess = *wordUpdate.LastSuccessAt + } else { + lastSuccess = nil + } + if wordUpdate.LastFailureAt != nil && *wordUpdate.LastFailureAt != "" { + lastFailure = *wordUpdate.LastFailureAt + } else { + lastFailure = nil + } + + _, err := stmt.Exec( + wordUpdate.ID, + wordUpdate.Success, + wordUpdate.Failure, + lastSuccess, + lastFailure, + ) + if err != nil { + log.Printf("Error executing update for word %d: %v", wordUpdate.ID, err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + } + + if err := tx.Commit(); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // If config_id is provided, send webhook with try_message + if req.ConfigID != nil { + configID := *req.ConfigID + + // Use mutex to prevent duplicate webhook sends + a.webhookMutex.Lock() + lastTime, exists := a.lastWebhookTime[configID] + now := time.Now() + + // Only send webhook if it hasn't been sent in the last 5 seconds for this config + shouldSend := !exists || now.Sub(lastTime) > 5*time.Second + + if shouldSend { + a.lastWebhookTime[configID] = now + } + a.webhookMutex.Unlock() + + if !shouldSend { + log.Printf("Webhook skipped for config_id %d (sent recently)", configID) + } else { + var tryMessage sql.NullString + err := a.DB.QueryRow("SELECT try_message FROM configs WHERE id = $1", configID).Scan(&tryMessage) + if err == nil && tryMessage.Valid && tryMessage.String != "" { + // Process message directly (backend always runs together with frontend) + _, err := a.processMessage(tryMessage.String) + if err != nil { + log.Printf("Error processing message: %v", err) + // Remove from map on error so it can be retried + a.webhookMutex.Lock() + delete(a.lastWebhookTime, configID) + a.webhookMutex.Unlock() + } else { + log.Printf("Message processed successfully for config_id %d", configID) + } + } else if err != nil && err != sql.ErrNoRows { + log.Printf("Error fetching config: %v", err) + } else if err == nil && (!tryMessage.Valid || tryMessage.String == "") { + log.Printf("Webhook skipped for config_id %d (try_message is empty)", configID) + } + } + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Progress updated successfully", + }) +} + +func (a *App) getConfigsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + query := ` + SELECT id, name, words_count, max_cards, try_message + FROM configs + ORDER BY id + ` + + rows, err := a.DB.Query(query) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + configs := make([]Config, 0) + for rows.Next() { + var config Config + var maxCards sql.NullInt64 + err := rows.Scan( + &config.ID, + &config.Name, + &config.WordsCount, + &maxCards, + &config.TryMessage, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + if maxCards.Valid { + maxCardsVal := int(maxCards.Int64) + config.MaxCards = &maxCardsVal + } + configs = append(configs, config) + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(configs) +} + +func (a *App) getDictionariesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + query := ` + SELECT + d.id, + d.name, + COALESCE(COUNT(w.id), 0) as words_count + FROM dictionaries d + LEFT JOIN words w ON d.id = w.dictionary_id + GROUP BY d.id, d.name + ORDER BY d.id + ` + + rows, err := a.DB.Query(query) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + dictionaries := make([]Dictionary, 0) + for rows.Next() { + var dict Dictionary + err := rows.Scan( + &dict.ID, + &dict.Name, + &dict.WordsCount, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaries = append(dictionaries, dict) + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(dictionaries) +} + +func (a *App) addDictionaryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + var req DictionaryRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name == "" { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"error": "Имя словаря обязательно"}) + return + } + + var id int + err := a.DB.QueryRow(` + INSERT INTO dictionaries (name) + VALUES ($1) + RETURNING id + `, req.Name).Scan(&id) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "id": id, + "name": req.Name, + }) +} + +func (a *App) updateDictionaryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + vars := mux.Vars(r) + dictionaryID := vars["id"] + + var req DictionaryRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name == "" { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"error": "Имя словаря обязательно"}) + return + } + + result, err := a.DB.Exec(` + UPDATE dictionaries + SET name = $1 + WHERE id = $2 + `, req.Name, dictionaryID) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + http.Error(w, "Dictionary not found", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Dictionary updated successfully", + }) +} + +func (a *App) deleteDictionaryHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + vars := mux.Vars(r) + dictionaryID := vars["id"] + + // Prevent deletion of default dictionary (id = 0) + if dictionaryID == "0" { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"error": "Cannot delete default dictionary"}) + return + } + + tx, err := a.DB.Begin() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Delete all words from this dictionary (progress will be deleted automatically due to CASCADE) + _, err = tx.Exec(` + DELETE FROM words + WHERE dictionary_id = $1 + `, dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Delete all config-dictionary associations (will be deleted automatically due to CASCADE, but doing explicitly for clarity) + _, err = tx.Exec(` + DELETE FROM config_dictionaries + WHERE dictionary_id = $1 + `, dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Delete the dictionary + result, err := tx.Exec("DELETE FROM dictionaries WHERE id = $1", dictionaryID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + sendErrorWithCORS(w, "Dictionary not found", http.StatusNotFound) + return + } + + if err := tx.Commit(); err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Dictionary deleted successfully. All words and configuration associations have been deleted.", + }) +} + +func (a *App) getConfigDictionariesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + vars := mux.Vars(r) + configID := vars["id"] + + query := ` + SELECT dictionary_id + FROM config_dictionaries + WHERE config_id = $1 + ORDER BY dictionary_id + ` + + rows, err := a.DB.Query(query, configID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + dictionaryIDs := make([]int, 0) + for rows.Next() { + var dictID int + err := rows.Scan(&dictID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaryIDs = append(dictionaryIDs, dictID) + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "dictionary_ids": dictionaryIDs, + }) +} + +func (a *App) getTestConfigsAndDictionariesHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + // Get configs + configsQuery := ` + SELECT id, name, words_count, max_cards, try_message + FROM configs + ORDER BY id + ` + + configsRows, err := a.DB.Query(configsQuery) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer configsRows.Close() + + configs := make([]Config, 0) + for configsRows.Next() { + var config Config + var maxCards sql.NullInt64 + err := configsRows.Scan( + &config.ID, + &config.Name, + &config.WordsCount, + &maxCards, + &config.TryMessage, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + if maxCards.Valid { + maxCardsVal := int(maxCards.Int64) + config.MaxCards = &maxCardsVal + } + configs = append(configs, config) + } + + // Get dictionaries + dictsQuery := ` + SELECT + d.id, + d.name, + COALESCE(COUNT(w.id), 0) as words_count + FROM dictionaries d + LEFT JOIN words w ON d.id = w.dictionary_id + GROUP BY d.id, d.name + ORDER BY d.id + ` + + dictsRows, err := a.DB.Query(dictsQuery) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer dictsRows.Close() + + dictionaries := make([]Dictionary, 0) + for dictsRows.Next() { + var dict Dictionary + err := dictsRows.Scan( + &dict.ID, + &dict.Name, + &dict.WordsCount, + ) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + dictionaries = append(dictionaries, dict) + } + + response := TestConfigsAndDictionariesResponse{ + Configs: configs, + Dictionaries: dictionaries, + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(response) +} + +func (a *App) addConfigHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + w.Header().Set("Access-Control-Allow-Origin", "*") + w.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + w.WriteHeader(http.StatusOK) + return + } + + var req ConfigRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name == "" { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"message": "Имя обязательно для заполнения"}) + return + } + if req.WordsCount <= 0 { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"message": "Количество слов должно быть больше 0"}) + return + } + + tx, err := a.DB.Begin() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + var id int + err = tx.QueryRow(` + INSERT INTO configs (name, words_count, max_cards, try_message) + VALUES ($1, $2, $3, $4) + RETURNING id + `, req.Name, req.WordsCount, req.MaxCards, req.TryMessage).Scan(&id) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Insert dictionary associations if provided + if len(req.DictionaryIDs) > 0 { + stmt, err := tx.Prepare(` + INSERT INTO config_dictionaries (config_id, dictionary_id) + VALUES ($1, $2) + `) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + for _, dictID := range req.DictionaryIDs { + _, err := stmt.Exec(id, dictID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + } + } + + if err := tx.Commit(); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Config created successfully", + "id": id, + }) +} + +func (a *App) updateConfigHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + vars := mux.Vars(r) + configID := vars["id"] + + var req ConfigRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + if req.Name == "" { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"message": "Имя обязательно для заполнения"}) + return + } + if req.WordsCount <= 0 { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + w.WriteHeader(http.StatusBadRequest) + json.NewEncoder(w).Encode(map[string]string{"message": "Количество слов должно быть больше 0"}) + return + } + + tx, err := a.DB.Begin() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + result, err := tx.Exec(` + UPDATE configs + SET name = $1, words_count = $2, max_cards = $3, try_message = $4 + WHERE id = $5 + `, req.Name, req.WordsCount, req.MaxCards, req.TryMessage, configID) + + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + http.Error(w, "Config not found", http.StatusNotFound) + return + } + + // Delete existing dictionary associations + _, err = tx.Exec("DELETE FROM config_dictionaries WHERE config_id = $1", configID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Insert new dictionary associations if provided + if len(req.DictionaryIDs) > 0 { + stmt, err := tx.Prepare(` + INSERT INTO config_dictionaries (config_id, dictionary_id) + VALUES ($1, $2) + `) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + defer stmt.Close() + + for _, dictID := range req.DictionaryIDs { + _, err := stmt.Exec(configID, dictID) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + } + } + + if err := tx.Commit(); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Access-Control-Allow-Origin", "*") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Config updated successfully", + }) +} + +func (a *App) deleteConfigHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + vars := mux.Vars(r) + configID := vars["id"] + + result, err := a.DB.Exec("DELETE FROM configs WHERE id = $1", configID) + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if rowsAffected == 0 { + sendErrorWithCORS(w, "Config not found", http.StatusNotFound) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Config deleted successfully", + }) +} + +func (a *App) getWeeklyStatsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + log.Printf("getWeeklyStatsHandler called from %s, path: %s", r.RemoteAddr, r.URL.Path) + + // Опционально обновляем materialized view перед запросом + // Это можно сделать через query parameter ?refresh=true + if r.URL.Query().Get("refresh") == "true" { + _, err := a.DB.Exec("REFRESH MATERIALIZED VIEW weekly_report_mv") + if err != nil { + log.Printf("Warning: Failed to refresh materialized view: %v", err) + // Продолжаем выполнение даже если обновление не удалось + } + } + + query := ` + SELECT + p.name AS project_name, + -- Используем COALESCE для установки total_score в 0.0000, если нет данных в weekly_report_mv + COALESCE(wr.total_score, 0.0000) AS total_score, + wg.min_goal_score, + wg.max_goal_score, + wg.priority + FROM + weekly_goals wg + JOIN + projects p ON wg.project_id = p.id + LEFT JOIN + weekly_report_mv wr + ON wg.project_id = wr.project_id + AND wg.goal_year = wr.report_year + AND wg.goal_week = wr.report_week + WHERE + -- Фильтруем ТОЛЬКО по целям текущего года и недели + wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + ORDER BY + total_score DESC + ` + + rows, err := a.DB.Query(query) + if err != nil { + log.Printf("Error querying weekly stats: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + defer rows.Close() + + projects := make([]WeeklyProjectStats, 0) + // Группы для расчета среднего по priority + groups := make(map[int][]float64) + + for rows.Next() { + var project WeeklyProjectStats + var maxGoalScore sql.NullFloat64 + var priority sql.NullInt64 + + err := rows.Scan( + &project.ProjectName, + &project.TotalScore, + &project.MinGoalScore, + &maxGoalScore, + &priority, + ) + if err != nil { + log.Printf("Error scanning weekly stats row: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + if maxGoalScore.Valid { + maxGoalVal := maxGoalScore.Float64 + project.MaxGoalScore = &maxGoalVal + } + + var priorityVal int + if priority.Valid { + priorityVal = int(priority.Int64) + project.Priority = &priorityVal + } + + // Расчет calculated_score по формуле из n8n + totalScore := project.TotalScore + minGoalScore := project.MinGoalScore + var maxGoalScoreVal float64 + if project.MaxGoalScore != nil { + maxGoalScoreVal = *project.MaxGoalScore + } + + // Параметры бонуса в зависимости от priority + var extraBonusLimit float64 = 20 + if priorityVal == 1 { + extraBonusLimit = 50 + } else if priorityVal == 2 { + extraBonusLimit = 35 + } + + // Расчет базового прогресса + var baseProgress float64 + if minGoalScore > 0 { + baseProgress = (min(totalScore, minGoalScore) / minGoalScore) * 100.0 + } + + // Расчет экстра прогресса + var extraProgress float64 + denominator := maxGoalScoreVal - minGoalScore + if denominator > 0 && totalScore > minGoalScore { + excess := min(totalScore, maxGoalScoreVal) - minGoalScore + extraProgress = (excess / denominator) * extraBonusLimit + } + + resultScore := baseProgress + extraProgress + project.CalculatedScore = roundToTwoDecimals(resultScore) + + // Группировка для итогового расчета + if _, exists := groups[priorityVal]; !exists { + groups[priorityVal] = make([]float64, 0) + } + groups[priorityVal] = append(groups[priorityVal], project.CalculatedScore) + + projects = append(projects, project) + } + + // Находим среднее внутри каждой группы + groupAverages := make([]float64, 0) + for _, scores := range groups { + if len(scores) > 0 { + sum := 0.0 + for _, score := range scores { + sum += score + } + avg := sum / float64(len(scores)) + groupAverages = append(groupAverages, avg) + } + } + + // Находим среднее между всеми группами + var total *float64 + if len(groupAverages) > 0 { + sum := 0.0 + for _, avg := range groupAverages { + sum += avg + } + overallProgress := sum / float64(len(groupAverages)) + overallProgressRounded := roundToFourDecimals(overallProgress) + total = &overallProgressRounded + } + + response := WeeklyStatsResponse{ + Total: total, + Projects: projects, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +func (a *App) initDB() error { + createDictionariesTable := ` + CREATE TABLE IF NOT EXISTS dictionaries ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + ` + + createWordsTable := ` + CREATE TABLE IF NOT EXISTS words ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + translation TEXT NOT NULL, + description TEXT + ) + ` + + createProgressTable := ` + CREATE TABLE IF NOT EXISTS progress ( + id SERIAL PRIMARY KEY, + word_id INTEGER NOT NULL REFERENCES words(id) ON DELETE CASCADE, + success INTEGER DEFAULT 0, + failure INTEGER DEFAULT 0, + last_success_at TIMESTAMP, + last_failure_at TIMESTAMP, + UNIQUE(word_id) + ) + ` + + createConfigsTable := ` + CREATE TABLE IF NOT EXISTS configs ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + words_count INTEGER NOT NULL, + max_cards INTEGER, + try_message TEXT + ) + ` + + createConfigDictionariesTable := ` + CREATE TABLE IF NOT EXISTS config_dictionaries ( + config_id INTEGER NOT NULL REFERENCES configs(id) ON DELETE CASCADE, + dictionary_id INTEGER NOT NULL REFERENCES dictionaries(id) ON DELETE CASCADE, + PRIMARY KEY (config_id, dictionary_id) + ) + ` + + createConfigDictionariesIndexes := []string{ + `CREATE INDEX IF NOT EXISTS idx_config_dictionaries_config_id ON config_dictionaries(config_id)`, + `CREATE INDEX IF NOT EXISTS idx_config_dictionaries_dictionary_id ON config_dictionaries(dictionary_id)`, + } + + // Alter existing table to make try_message nullable if it's not already + alterConfigsTable := ` + ALTER TABLE configs + ALTER COLUMN try_message DROP NOT NULL + ` + + // Alter existing table to add max_cards column if it doesn't exist + alterConfigsTableMaxCards := ` + ALTER TABLE configs + ADD COLUMN IF NOT EXISTS max_cards INTEGER + ` + + // Create dictionaries table first + if _, err := a.DB.Exec(createDictionariesTable); err != nil { + return err + } + + // Insert default dictionary "Все слова" with id = 0 + // PostgreSQL SERIAL starts from 1, so we need to set sequence to -1 first + insertDefaultDictionary := ` + DO $$ + BEGIN + -- Set sequence to -1 so next value will be 0 + PERFORM setval('dictionaries_id_seq', -1, false); + + -- Insert the default dictionary with id = 0 + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; + + -- Set the sequence to start from 1 (so next auto-increment will be 1) + PERFORM setval('dictionaries_id_seq', 1, false); + EXCEPTION + WHEN others THEN + -- If sequence doesn't exist or other error, try without sequence manipulation + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; + END $$; + ` + if _, err := a.DB.Exec(insertDefaultDictionary); err != nil { + log.Printf("Warning: Failed to insert default dictionary: %v. Trying alternative method.", err) + // Alternative: try to insert without sequence manipulation + _, err2 := a.DB.Exec(`INSERT INTO dictionaries (id, name) VALUES (0, 'Все слова') ON CONFLICT (id) DO NOTHING`) + if err2 != nil { + log.Printf("Warning: Alternative insert also failed: %v", err2) + } + } + + if _, err := a.DB.Exec(createWordsTable); err != nil { + return err + } + + // Add dictionary_id column to words if it doesn't exist + // First check if column exists, if not add it + checkColumnExists := ` + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name='words' AND column_name='dictionary_id' + ` + var columnExists int + err := a.DB.QueryRow(checkColumnExists).Scan(&columnExists) + if err == nil && columnExists == 0 { + // Column doesn't exist, add it + alterWordsTable := ` + ALTER TABLE words + ADD COLUMN dictionary_id INTEGER DEFAULT 0 + ` + if _, err := a.DB.Exec(alterWordsTable); err != nil { + log.Printf("Warning: Failed to add dictionary_id column: %v", err) + } else { + // Add foreign key constraint + addForeignKey := ` + ALTER TABLE words + ADD CONSTRAINT words_dictionary_id_fkey + FOREIGN KEY (dictionary_id) REFERENCES dictionaries(id) + ` + a.DB.Exec(addForeignKey) + } + } + + // Update existing words to have dictionary_id = 0 + updateWordsDictionaryID := ` + UPDATE words + SET dictionary_id = 0 + WHERE dictionary_id IS NULL + ` + a.DB.Exec(updateWordsDictionaryID) + + // Make dictionary_id NOT NULL after setting default values (if column exists) + if columnExists > 0 || err == nil { + alterWordsTableNotNull := ` + DO $$ + BEGIN + ALTER TABLE words + ALTER COLUMN dictionary_id SET NOT NULL, + ALTER COLUMN dictionary_id SET DEFAULT 0; + EXCEPTION + WHEN others THEN + -- Ignore if already NOT NULL + NULL; + END $$; + ` + a.DB.Exec(alterWordsTableNotNull) + } + + // Create index on dictionary_id + createDictionaryIndex := ` + CREATE INDEX IF NOT EXISTS idx_words_dictionary_id ON words(dictionary_id) + ` + a.DB.Exec(createDictionaryIndex) + + // Remove unique constraint on words.name if it exists + removeUniqueConstraint := ` + ALTER TABLE words + DROP CONSTRAINT IF EXISTS words_name_key; + + ALTER TABLE words + DROP CONSTRAINT IF EXISTS words_name_unique; + ` + a.DB.Exec(removeUniqueConstraint) + + if _, err := a.DB.Exec(createProgressTable); err != nil { + return err + } + + if _, err := a.DB.Exec(createConfigsTable); err != nil { + return err + } + + // Try to alter existing table to make try_message nullable + // Ignore error if column is already nullable or table doesn't exist + a.DB.Exec(alterConfigsTable) + + // Try to alter existing table to add max_cards column + // Ignore error if column already exists + a.DB.Exec(alterConfigsTableMaxCards) + + // Create config_dictionaries table + if _, err := a.DB.Exec(createConfigDictionariesTable); err != nil { + return err + } + + // Create indexes for config_dictionaries + for _, indexSQL := range createConfigDictionariesIndexes { + if _, err := a.DB.Exec(indexSQL); err != nil { + log.Printf("Warning: Failed to create config_dictionaries index: %v", err) + } + } + + return nil +} + +func (a *App) initPlayLifeDB() error { + // Создаем таблицу projects + createProjectsTable := ` + CREATE TABLE IF NOT EXISTS projects ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + priority SMALLINT, + CONSTRAINT unique_project_name UNIQUE (name) + ) + ` + + // Создаем таблицу entries + createEntriesTable := ` + CREATE TABLE IF NOT EXISTS entries ( + id SERIAL PRIMARY KEY, + text TEXT NOT NULL, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP + ) + ` + + // Создаем таблицу nodes + createNodesTable := ` + CREATE TABLE IF NOT EXISTS nodes ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + entry_id INTEGER NOT NULL REFERENCES entries(id) ON DELETE CASCADE, + score NUMERIC(8,4) + ) + ` + + // Создаем индексы для nodes + createNodesIndexes := []string{ + `CREATE INDEX IF NOT EXISTS idx_nodes_project_id ON nodes(project_id)`, + `CREATE INDEX IF NOT EXISTS idx_nodes_entry_id ON nodes(entry_id)`, + } + + // Создаем таблицу weekly_goals + createWeeklyGoalsTable := ` + CREATE TABLE IF NOT EXISTS weekly_goals ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + goal_year INTEGER NOT NULL, + goal_week INTEGER NOT NULL, + min_goal_score NUMERIC(10,4) NOT NULL DEFAULT 0, + max_goal_score NUMERIC(10,4), + actual_score NUMERIC(10,4) DEFAULT 0, + priority SMALLINT, + CONSTRAINT weekly_goals_project_id_goal_year_goal_week_key UNIQUE (project_id, goal_year, goal_week) + ) + ` + + // Создаем индекс для weekly_goals + createWeeklyGoalsIndex := ` + CREATE INDEX IF NOT EXISTS idx_weekly_goals_project_id ON weekly_goals(project_id) + ` + + // Выполняем создание таблиц + if _, err := a.DB.Exec(createProjectsTable); err != nil { + return fmt.Errorf("failed to create projects table: %w", err) + } + + if _, err := a.DB.Exec(createEntriesTable); err != nil { + return fmt.Errorf("failed to create entries table: %w", err) + } + + if _, err := a.DB.Exec(createNodesTable); err != nil { + return fmt.Errorf("failed to create nodes table: %w", err) + } + + for _, indexSQL := range createNodesIndexes { + if _, err := a.DB.Exec(indexSQL); err != nil { + log.Printf("Warning: Failed to create index: %v", err) + } + } + + if _, err := a.DB.Exec(createWeeklyGoalsTable); err != nil { + return fmt.Errorf("failed to create weekly_goals table: %w", err) + } + + if _, err := a.DB.Exec(createWeeklyGoalsIndex); err != nil { + log.Printf("Warning: Failed to create weekly_goals index: %v", err) + } + + // Создаем materialized view (может потребоваться удаление старого, если он существует) + dropMaterializedView := `DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv` + a.DB.Exec(dropMaterializedView) // Игнорируем ошибку, если view не существует + + createMaterializedView := ` + CREATE MATERIALIZED VIEW weekly_report_mv AS + SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + COALESCE(agg.total_score, 0.0000) AS total_score + FROM + projects p + LEFT JOIN + ( + SELECT + n.project_id, + EXTRACT(YEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + ON p.id = agg.project_id + ORDER BY + p.id, agg.report_year, agg.report_week + ` + + if _, err := a.DB.Exec(createMaterializedView); err != nil { + return fmt.Errorf("failed to create weekly_report_mv: %w", err) + } + + // Создаем индекс для materialized view + createMVIndex := ` + CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week) + ` + if _, err := a.DB.Exec(createMVIndex); err != nil { + log.Printf("Warning: Failed to create materialized view index: %v", err) + } + + return nil +} + +// startWeeklyGoalsScheduler запускает планировщик для автоматической фиксации целей на неделю +// каждый понедельник в 6:00 утра в указанном часовом поясе +func (a *App) startWeeklyGoalsScheduler() { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + + // Загружаем часовой пояс + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } else { + log.Printf("Scheduler timezone set to: %s", timezoneStr) + } + + // Создаем планировщик с указанным часовым поясом + c := cron.New(cron.WithLocation(loc)) + + // Добавляем задачу: каждый понедельник в 6:00 утра + // Cron выражение: "0 6 * * 1" означает: минута=0, час=6, любой день месяца, любой месяц, понедельник (1) + _, err = c.AddFunc("0 6 * * 1", func() { + log.Printf("Scheduled task: Setting up weekly goals (timezone: %s)", timezoneStr) + if err := a.setupWeeklyGoals(); err != nil { + log.Printf("Error in scheduled weekly goals setup: %v", err) + } + }) + + if err != nil { + log.Printf("Error adding cron job for weekly goals: %v", err) + return + } + + // Запускаем планировщик + c.Start() + log.Println("Weekly goals scheduler started: every Monday at 6:00 AM") + + // Планировщик будет работать в фоновом режиме +} + +// getWeeklyStatsData получает данные о проектах и их целях (без HTTP обработки) +func (a *App) getWeeklyStatsData() (*WeeklyStatsResponse, error) { + // Обновляем materialized view перед запросом + _, err := a.DB.Exec("REFRESH MATERIALIZED VIEW weekly_report_mv") + if err != nil { + log.Printf("Warning: Failed to refresh materialized view: %v", err) + // Продолжаем выполнение даже если обновление не удалось + } + + query := ` + SELECT + p.name AS project_name, + -- Используем COALESCE для установки total_score в 0.0000, если нет данных в weekly_report_mv + COALESCE(wr.total_score, 0.0000) AS total_score, + wg.min_goal_score, + wg.max_goal_score, + wg.priority + FROM + weekly_goals wg + JOIN + projects p ON wg.project_id = p.id + LEFT JOIN + weekly_report_mv wr + ON wg.project_id = wr.project_id + AND wg.goal_year = wr.report_year + AND wg.goal_week = wr.report_week + WHERE + -- Фильтруем ТОЛЬКО по целям текущего года и недели + wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + ORDER BY + total_score DESC + ` + + rows, err := a.DB.Query(query) + if err != nil { + log.Printf("Error querying weekly stats: %v", err) + return nil, fmt.Errorf("error querying weekly stats: %w", err) + } + defer rows.Close() + + projects := make([]WeeklyProjectStats, 0) + // Группы для расчета среднего по priority + groups := make(map[int][]float64) + + for rows.Next() { + var project WeeklyProjectStats + var maxGoalScore sql.NullFloat64 + var priority sql.NullInt64 + + err := rows.Scan( + &project.ProjectName, + &project.TotalScore, + &project.MinGoalScore, + &maxGoalScore, + &priority, + ) + if err != nil { + log.Printf("Error scanning weekly stats row: %v", err) + return nil, fmt.Errorf("error scanning weekly stats row: %w", err) + } + + if maxGoalScore.Valid { + maxGoalVal := maxGoalScore.Float64 + project.MaxGoalScore = &maxGoalVal + } + + var priorityVal int + if priority.Valid { + priorityVal = int(priority.Int64) + project.Priority = &priorityVal + } + + // Расчет calculated_score по формуле из n8n + totalScore := project.TotalScore + minGoalScore := project.MinGoalScore + var maxGoalScoreVal float64 + if project.MaxGoalScore != nil { + maxGoalScoreVal = *project.MaxGoalScore + } + + // Параметры бонуса в зависимости от priority + var extraBonusLimit float64 = 20 + if priorityVal == 1 { + extraBonusLimit = 50 + } else if priorityVal == 2 { + extraBonusLimit = 35 + } + + // Расчет базового прогресса + var baseProgress float64 + if minGoalScore > 0 { + baseProgress = (min(totalScore, minGoalScore) / minGoalScore) * 100.0 + } + + // Расчет экстра прогресса + var extraProgress float64 + denominator := maxGoalScoreVal - minGoalScore + if denominator > 0 && totalScore > minGoalScore { + excess := min(totalScore, maxGoalScoreVal) - minGoalScore + extraProgress = (excess / denominator) * extraBonusLimit + } + + resultScore := baseProgress + extraProgress + project.CalculatedScore = roundToTwoDecimals(resultScore) + + // Группировка для итогового расчета + if _, exists := groups[priorityVal]; !exists { + groups[priorityVal] = make([]float64, 0) + } + groups[priorityVal] = append(groups[priorityVal], project.CalculatedScore) + + projects = append(projects, project) + } + + // Находим среднее внутри каждой группы + groupAverages := make([]float64, 0) + for _, scores := range groups { + if len(scores) > 0 { + sum := 0.0 + for _, score := range scores { + sum += score + } + avg := sum / float64(len(scores)) + groupAverages = append(groupAverages, avg) + } + } + + // Находим среднее между всеми группами + var total *float64 + if len(groupAverages) > 0 { + sum := 0.0 + for _, avg := range groupAverages { + sum += avg + } + overallProgress := sum / float64(len(groupAverages)) + overallProgressRounded := roundToFourDecimals(overallProgress) + total = &overallProgressRounded + } + + response := WeeklyStatsResponse{ + Total: total, + Projects: projects, + } + + return &response, nil +} + +// formatDailyReport форматирует данные проектов в сообщение для Telegram +// Формат аналогичен JS коду из n8n +func (a *App) formatDailyReport(data *WeeklyStatsResponse) string { + if data == nil || len(data.Projects) == 0 { + return "" + } + + // Заголовок сообщения + markdownMessage := "*📈 Отчет по Score и Целям за текущую неделю:*\n\n" + + // Простой вывод списка проектов + for _, item := range data.Projects { + projectName := item.ProjectName + if projectName == "" { + projectName = "Без названия" + } + + actualScore := item.TotalScore + minGoal := item.MinGoalScore + var maxGoal float64 + hasMaxGoal := false + if item.MaxGoalScore != nil { + maxGoal = *item.MaxGoalScore + hasMaxGoal = true + } + + // Форматирование Score (+/-) + scoreFormatted := "" + if actualScore >= 0 { + scoreFormatted = fmt.Sprintf("+%.2f", actualScore) + } else { + scoreFormatted = fmt.Sprintf("%.2f", actualScore) + } + + // Форматирование текста целей + // Проверяем, что minGoal валиден (не NaN, как в JS коде: !isNaN(minGoal)) + goalText := "" + if !math.IsNaN(minGoal) { + if hasMaxGoal && !math.IsNaN(maxGoal) { + goalText = fmt.Sprintf(" (Цель: %.1f–%.1f)", minGoal, maxGoal) + } else { + goalText = fmt.Sprintf(" (Цель: мин. %.1f)", minGoal) + } + } + + // Собираем строку: Проект: +Score (Цели) + markdownMessage += fmt.Sprintf("*%s*: %s%s\n", projectName, scoreFormatted, goalText) + } + + // Выводим итоговый total из корня JSON + if data.Total != nil { + markdownMessage += "\n---\n" + markdownMessage += fmt.Sprintf("*Общее выполнение целей*: %.1f%%", *data.Total) + } + + return markdownMessage +} + +// sendDailyReport получает данные, форматирует и отправляет отчет в Telegram +func (a *App) sendDailyReport() error { + log.Printf("Scheduled task: Sending daily report") + + // Получаем данные + data, err := a.getWeeklyStatsData() + if err != nil { + log.Printf("Error getting weekly stats data: %v", err) + return fmt.Errorf("error getting weekly stats data: %w", err) + } + + // Форматируем сообщение + message := a.formatDailyReport(data) + if message == "" { + log.Println("No data to send in daily report") + return nil + } + + // Отправляем сообщение в Telegram (без попытки разбирать на nodes) + a.sendTelegramMessage(message) + return nil +} + +// startDailyReportScheduler запускает планировщик для ежедневного отчета +// каждый день в 11:59 в указанном часовом поясе +func (a *App) startDailyReportScheduler() { + // Получаем часовой пояс из переменной окружения (по умолчанию UTC) + timezoneStr := getEnv("TIMEZONE", "UTC") + + // Загружаем часовой пояс + loc, err := time.LoadLocation(timezoneStr) + if err != nil { + log.Printf("Warning: Invalid timezone '%s': %v. Using UTC instead.", timezoneStr, err) + loc = time.UTC + } else { + log.Printf("Daily report scheduler timezone set to: %s", timezoneStr) + } + + // Создаем планировщик с указанным часовым поясом + c := cron.New(cron.WithLocation(loc)) + + // Добавляем задачу: каждый день в 11:59 + // Cron выражение: "59 11 * * *" означает: минута=59, час=11, любой день месяца, любой месяц, любой день недели + _, err = c.AddFunc("59 11 * * *", func() { + log.Printf("Scheduled task: Sending daily report (timezone: %s)", timezoneStr) + if err := a.sendDailyReport(); err != nil { + log.Printf("Error in scheduled daily report: %v", err) + } + }) + + if err != nil { + log.Printf("Error adding cron job for daily report: %v", err) + return + } + + // Запускаем планировщик + c.Start() + log.Println("Daily report scheduler started: every day at 11:59 AM") + + // Планировщик будет работать в фоновом режиме +} + +func main() { + // Загружаем переменные окружения из .env файла (если существует) + // Сначала пробуем загрузить из корня проекта, затем из текущей директории + // Игнорируем ошибку, если файл не найден + godotenv.Load("../.env") // Пробуем корневой .env + godotenv.Load(".env") // Пробуем локальный .env + + dbHost := getEnv("DB_HOST", "localhost") + dbPort := getEnv("DB_PORT", "5432") + dbUser := getEnv("DB_USER", "playeng") + dbPassword := getEnv("DB_PASSWORD", "playeng") + dbName := getEnv("DB_NAME", "playeng") + + // Логируем параметры подключения к БД (без пароля) + log.Printf("Database connection parameters: host=%s port=%s user=%s dbname=%s", dbHost, dbPort, dbUser, dbName) + + dsn := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", + dbHost, dbPort, dbUser, dbPassword, dbName) + + var db *sql.DB + var err error + + // Retry connection + for i := 0; i < 10; i++ { + db, err = sql.Open("postgres", dsn) + if err == nil { + err = db.Ping() + if err == nil { + break + } + } + if i < 9 { + time.Sleep(2 * time.Second) + } + } + + if err != nil { + log.Fatal("Failed to connect to database:", err) + } + log.Printf("Successfully connected to database: %s@%s:%s/%s", dbUser, dbHost, dbPort, dbName) + defer db.Close() + + // Инициализируем Telegram бота (если токен указан) + var telegramBot *tgbotapi.BotAPI + var telegramChatID int64 + telegramToken := getEnv("TELEGRAM_BOT_TOKEN", "") + telegramChatIDStr := getEnv("TELEGRAM_CHAT_ID", "") + telegramWebhookBaseURL := getEnv("TELEGRAM_WEBHOOK_BASE_URL", "") + + if telegramToken != "" && telegramChatIDStr != "" { + bot, err := tgbotapi.NewBotAPI(telegramToken) + if err != nil { + log.Printf("Warning: Failed to initialize Telegram bot: %v. Telegram notifications will be disabled.", err) + } else { + telegramBot = bot + chatID, err := strconv.ParseInt(telegramChatIDStr, 10, 64) + if err != nil { + log.Printf("Warning: Invalid TELEGRAM_CHAT_ID format: %v. Telegram notifications will be disabled.", err) + telegramBot = nil + } else { + telegramChatID = chatID + log.Printf("Telegram bot initialized successfully. Chat ID: %d", telegramChatID) + } + } + } else { + log.Println("Telegram bot token or chat ID not provided. Telegram notifications disabled.") + } + + // Настраиваем webhook для Telegram (если указан base URL) + if telegramToken != "" && telegramWebhookBaseURL != "" { + webhookURL := strings.TrimRight(telegramWebhookBaseURL, "/") + "/webhook/telegram" + if err := setupTelegramWebhook(telegramToken, webhookURL); err != nil { + log.Printf("Warning: Failed to setup Telegram webhook: %v. Webhook will not be configured.", err) + } else { + log.Printf("Telegram webhook configured successfully: %s", webhookURL) + } + } else if telegramToken != "" { + log.Println("TELEGRAM_WEBHOOK_BASE_URL not provided. Telegram webhook will not be configured automatically.") + } + + app := &App{ + DB: db, + lastWebhookTime: make(map[int]time.Time), + telegramBot: telegramBot, + telegramChatID: telegramChatID, + } + + // Инициализируем БД для play-life проекта + if err := app.initPlayLifeDB(); err != nil { + log.Fatal("Failed to initialize play-life database:", err) + } + log.Println("Play-life database initialized successfully") + + // Инициализируем БД для слов, словарей и конфигураций + if err := app.initDB(); err != nil { + log.Fatal("Failed to initialize words/dictionaries database:", err) + } + log.Println("Words/dictionaries database initialized successfully") + + // Запускаем планировщик для автоматической фиксации целей на неделю + app.startWeeklyGoalsScheduler() + + // Запускаем планировщик для ежедневного отчета в 11:59 + app.startDailyReportScheduler() + + r := mux.NewRouter() + r.HandleFunc("/api/words", app.getWordsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/api/words", app.addWordsHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/api/test/words", app.getTestWordsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/api/test/progress", app.updateTestProgressHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/api/configs", app.getConfigsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/api/configs", app.addConfigHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/api/configs/{id}", app.updateConfigHandler).Methods("PUT", "OPTIONS") + r.HandleFunc("/api/configs/{id}", app.deleteConfigHandler).Methods("DELETE", "OPTIONS") + r.HandleFunc("/api/configs/{id}/dictionaries", app.getConfigDictionariesHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/api/dictionaries", app.getDictionariesHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/api/dictionaries", app.addDictionaryHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/api/dictionaries/{id}", app.updateDictionaryHandler).Methods("PUT", "OPTIONS") + r.HandleFunc("/api/dictionaries/{id}", app.deleteDictionaryHandler).Methods("DELETE", "OPTIONS") + r.HandleFunc("/api/test-configs-and-dictionaries", app.getTestConfigsAndDictionariesHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/api/weekly-stats", app.getWeeklyStatsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/playlife-feed", app.getWeeklyStatsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/message/post", app.messagePostHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/webhook/message/post", app.messagePostHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/webhook/todoist", app.todoistWebhookHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/webhook/telegram", app.telegramWebhookHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/weekly_goals/setup", app.weeklyGoalsSetupHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/daily-report/trigger", app.dailyReportTriggerHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/projects", app.getProjectsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/project/priority", app.setProjectPriorityHandler).Methods("POST", "OPTIONS") + r.HandleFunc("/d2dc349a-0d13-49b2-a8f0-1ab094bfba9b", app.getFullStatisticsHandler).Methods("GET", "OPTIONS") + r.HandleFunc("/admin", app.adminHandler).Methods("GET") + r.HandleFunc("/admin.html", app.adminHandler).Methods("GET") + + port := getEnv("PORT", "8080") + log.Printf("Server starting on port %s", port) + log.Printf("Registered routes: /api/words (GET, POST), /api/test/words (GET), /api/test/progress (POST), /api/configs (GET, POST, PUT, DELETE), /api/dictionaries (GET, POST, PUT, DELETE), /api/test-configs-and-dictionaries (GET), /api/weekly-stats (GET), /playlife-feed (GET), /message/post (POST), /webhook/message/post (POST), /webhook/todoist (POST), /webhook/telegram (POST), /weekly_goals/setup (POST), /daily-report/trigger (POST), /projects (GET), /project/priority (POST), /d2dc349a-0d13-49b2-a8f0-1ab094bfba9b (GET), /admin (GET)") + log.Printf("Admin panel available at: http://localhost:%s/admin.html", port) + log.Fatal(http.ListenAndServe(":"+port, r)) +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +// getMapKeys возвращает список ключей из map +func getMapKeys(m map[string]interface{}) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + return keys +} + +// setupTelegramWebhook настраивает webhook для Telegram бота +func setupTelegramWebhook(botToken, webhookURL string) error { + apiURL := fmt.Sprintf("https://api.telegram.org/bot%s/setWebhook", botToken) + + payload := map[string]string{ + "url": webhookURL, + } + + jsonData, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("failed to marshal webhook payload: %w", err) + } + + // Создаем HTTP клиент с таймаутом + client := &http.Client{ + Timeout: 10 * time.Second, + } + + resp, err := client.Post(apiURL, "application/json", bytes.NewBuffer(jsonData)) + if err != nil { + return fmt.Errorf("failed to send webhook setup request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + bodyBytes, _ := io.ReadAll(resp.Body) + return fmt.Errorf("telegram API returned status %d: %s", resp.StatusCode, string(bodyBytes)) + } + + var result map[string]interface{} + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return fmt.Errorf("failed to decode response: %w", err) + } + + if ok, _ := result["ok"].(bool); !ok { + description, _ := result["description"].(string) + return fmt.Errorf("telegram API returned error: %s", description) + } + + return nil +} + +// Вспомогательные функции для расчетов +func min(a, b float64) float64 { + if a < b { + return a + } + return b +} + +func max(a, b float64) float64 { + if a > b { + return a + } + return b +} + +func roundToTwoDecimals(val float64) float64 { + return float64(int(val*100+0.5)) / 100.0 +} + +func roundToFourDecimals(val float64) float64 { + return float64(int(val*10000+0.5)) / 10000.0 +} + +func (a *App) sendTelegramMessage(text string) { + log.Printf("sendTelegramMessage called with text length: %d", len(text)) + log.Printf("Telegram bot status: bot=%v, chatID=%d", a.telegramBot != nil, a.telegramChatID) + + if a.telegramBot == nil || a.telegramChatID == 0 { + // Telegram не настроен, пропускаем отправку + log.Printf("WARNING: Telegram bot not initialized (bot=%v, chatID=%d), skipping message send", a.telegramBot != nil, a.telegramChatID) + return + } + + // Конвертируем **текст** в *текст* для Markdown (Legacy) + // Markdown (Legacy) использует одинарную звездочку для жирного текста + // Используем регулярное выражение для замены только парных ** + telegramText := regexp.MustCompile(`\*\*([^*]+)\*\*`).ReplaceAllString(text, "*$1*") + log.Printf("Sending Telegram message (converted text length: %d): %s", len(telegramText), telegramText) + + msg := tgbotapi.NewMessage(a.telegramChatID, telegramText) + msg.ParseMode = "Markdown" // Markdown (Legacy) format + + _, err := a.telegramBot.Send(msg) + if err != nil { + log.Printf("ERROR sending Telegram message: %v", err) + } else { + log.Printf("Telegram message sent successfully to chat ID %d", a.telegramChatID) + } +} + +// utf16OffsetToUTF8 конвертирует UTF-16 offset в UTF-8 byte offset +func utf16OffsetToUTF8(text string, utf16Offset int) int { + utf16Runes := utf16.Encode([]rune(text)) + if utf16Offset >= len(utf16Runes) { + return len(text) + } + + // Конвертируем UTF-16 кодовые единицы обратно в UTF-8 байты + runes := utf16.Decode(utf16Runes[:utf16Offset]) + return len(string(runes)) +} + +// utf16LengthToUTF8 конвертирует UTF-16 length в UTF-8 byte length +func utf16LengthToUTF8(text string, utf16Offset, utf16Length int) int { + utf16Runes := utf16.Encode([]rune(text)) + if utf16Offset+utf16Length > len(utf16Runes) { + utf16Length = len(utf16Runes) - utf16Offset + } + if utf16Length <= 0 { + return 0 + } + + // Конвертируем UTF-16 кодовые единицы в UTF-8 байты + startRunes := utf16.Decode(utf16Runes[:utf16Offset]) + endRunes := utf16.Decode(utf16Runes[:utf16Offset+utf16Length]) + + startBytes := len(string(startRunes)) + endBytes := len(string(endRunes)) + + return endBytes - startBytes +} + +// processTelegramMessage обрабатывает сообщение из Telegram с использованием entities +// Логика отличается от processMessage: использует entities для определения жирного текста +// и не отправляет сообщение обратно в Telegram +func (a *App) processTelegramMessage(fullText string, entities []TelegramEntity) (*ProcessedEntry, error) { + fullText = strings.TrimSpace(fullText) + + // Регулярное выражение: project+/-score (без **) + scoreRegex := regexp.MustCompile(`^([а-яА-ЯёЁ\w]+)([+-])(\d+(?:\.\d+)?)$`) + + // Массив для хранения извлеченных элементов {project, score} + scoreNodes := make([]ProcessedNode, 0) + workingText := fullText + placeholderIndex := 0 + + // Находим все элементы, выделенные жирным шрифтом + boldEntities := make([]TelegramEntity, 0) + for _, entity := range entities { + if entity.Type == "bold" { + boldEntities = append(boldEntities, entity) + } + } + + // Сортируем в ПРЯМОМ порядке (по offset), чтобы гарантировать, что ${0} соответствует первому в тексте + sort.Slice(boldEntities, func(i, j int) bool { + return boldEntities[i].Offset < boldEntities[j].Offset + }) + + // Массив для хранения данных, которые будут использоваться для замены в обратном порядке + type ReplacementData struct { + Start int + Length int + Placeholder string + } + replacementData := make([]ReplacementData, 0) + + for _, entity := range boldEntities { + // Telegram использует UTF-16 для offset и length, конвертируем в UTF-8 байты + start := utf16OffsetToUTF8(fullText, entity.Offset) + length := utf16LengthToUTF8(fullText, entity.Offset, entity.Length) + + // Извлекаем чистый жирный текст + if start+length > len(fullText) { + continue // Пропускаем некорректные entities + } + boldText := strings.TrimSpace(fullText[start : start+length]) + + // Проверяем соответствие формату + match := scoreRegex.FindStringSubmatch(boldText) + + if match != nil && len(match) == 4 { + // Создаем элемент node + project := match[1] + sign := match[2] + rawScore, err := strconv.ParseFloat(match[3], 64) + if err != nil { + log.Printf("Error parsing score: %v", err) + continue + } + score := rawScore + if sign == "-" { + score = -rawScore + } + + // Добавляем в массив nodes (по порядку) + scoreNodes = append(scoreNodes, ProcessedNode{ + Project: project, + Score: score, + }) + + // Создаем данные для замены + replacementData = append(replacementData, ReplacementData{ + Start: start, + Length: length, + Placeholder: fmt.Sprintf("${%d}", placeholderIndex), + }) + + placeholderIndex++ + } + } + + // Теперь выполняем замены в ОБРАТНОМ порядке, чтобы offset не "смещались" + sort.Slice(replacementData, func(i, j int) bool { + return replacementData[i].Start > replacementData[j].Start + }) + + for _, item := range replacementData { + // Заменяем сегмент в workingText, используя оригинальные offset и length + if item.Start+item.Length <= len(workingText) { + workingText = workingText[:item.Start] + item.Placeholder + workingText[item.Start+item.Length:] + } + } + + // Удаляем пустые строки и лишние пробелы + lines := strings.Split(workingText, "\n") + cleanedLines := make([]string, 0) + for _, line := range lines { + trimmed := strings.TrimSpace(line) + if trimmed != "" { + cleanedLines = append(cleanedLines, trimmed) + } + } + processedText := strings.Join(cleanedLines, "\n") + + // Используем текущее время в формате ISO 8601 (UTC) + createdDate := time.Now().UTC().Format(time.RFC3339) + + // Вставляем данные в БД только если есть nodes + if len(scoreNodes) > 0 { + err := a.insertMessageData(processedText, createdDate, scoreNodes) + if err != nil { + log.Printf("Error inserting message data: %v", err) + return nil, fmt.Errorf("error inserting data: %w", err) + } + } else { + // Если nodes нет, используем исходный текст для processedText + processedText = fullText + log.Printf("No nodes found in Telegram message, message will not be saved to database") + } + + // Формируем ответ + response := &ProcessedEntry{ + Text: processedText, + CreatedDate: createdDate, + Nodes: scoreNodes, + Raw: fullText, + Markdown: fullText, // Для Telegram markdown не нужен + } + + // НЕ отправляем сообщение обратно в Telegram (в отличие от processMessage) + + return response, nil +} + +// processMessage обрабатывает текст сообщения: парсит ноды, сохраняет в БД и отправляет в Telegram +func (a *App) processMessage(rawText string) (*ProcessedEntry, error) { + return a.processMessageInternal(rawText, true) +} + +// processMessageWithoutTelegram обрабатывает текст сообщения: парсит ноды, сохраняет в БД, но НЕ отправляет в Telegram +func (a *App) processMessageWithoutTelegram(rawText string) (*ProcessedEntry, error) { + return a.processMessageInternal(rawText, false) +} + +// processMessageInternal - внутренняя функция обработки сообщения +// sendToTelegram определяет, нужно ли отправлять сообщение в Telegram +func (a *App) processMessageInternal(rawText string, sendToTelegram bool) (*ProcessedEntry, error) { + rawText = strings.TrimSpace(rawText) + + // Регулярное выражение для поиска **[Project][+| -][Score]** + regex := regexp.MustCompile(`\*\*(.+?)([+-])([\d.]+)\*\*`) + + nodes := make([]ProcessedNode, 0) + nodeCounter := 0 + + // Ищем все node и заменяем их в тексте на плейсхолдеры ${0}, ${1} и т.д. + processedText := regex.ReplaceAllStringFunc(rawText, func(fullMatch string) string { + matches := regex.FindStringSubmatch(fullMatch) + if len(matches) != 4 { + return fullMatch + } + + projectName := strings.TrimSpace(matches[1]) + sign := matches[2] + scoreString := matches[3] + + score, err := strconv.ParseFloat(scoreString, 64) + if err != nil { + log.Printf("Error parsing score: %v", err) + return fullMatch + } + + if sign == "-" { + score = -score + } + + // Добавляем данные в массив nodes + nodes = append(nodes, ProcessedNode{ + Project: projectName, + Score: score, + }) + + placeholder := fmt.Sprintf("${%d}", nodeCounter) + nodeCounter++ + return placeholder + }) + + // Удаляем пустые строки и лишние пробелы + lines := strings.Split(processedText, "\n") + cleanedLines := make([]string, 0) + for _, line := range lines { + trimmed := strings.TrimSpace(line) + if trimmed != "" { + cleanedLines = append(cleanedLines, trimmed) + } + } + processedText = strings.Join(cleanedLines, "\n") + + // Формируем Markdown (Legacy) контент: заменяем ** на * + markdownText := strings.ReplaceAll(rawText, "**", "*") + + // Используем текущее время + createdDate := time.Now().UTC().Format(time.RFC3339) + + // Вставляем данные в БД только если есть nodes + if len(nodes) > 0 { + err := a.insertMessageData(processedText, createdDate, nodes) + if err != nil { + log.Printf("Error inserting message data: %v", err) + return nil, fmt.Errorf("error inserting data: %w", err) + } + } else { + // Если nodes нет, используем исходный текст для processedText + processedText = rawText + if sendToTelegram { + log.Printf("No nodes found in text, message will be sent to Telegram but not saved to database") + } else { + log.Printf("No nodes found in text, message will be ignored (not saved to database and not sent to Telegram)") + } + } + + // Формируем ответ + response := &ProcessedEntry{ + Text: processedText, + CreatedDate: createdDate, + Nodes: nodes, + Raw: rawText, + Markdown: markdownText, + } + + // Отправляем дублирующее сообщение в Telegram только если указано + if sendToTelegram { + a.sendTelegramMessage(rawText) + } + + return response, nil +} + +func (a *App) messagePostHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Парсим входящий запрос - может быть как {body: {text: ...}}, так и {text: ...} + var rawReq map[string]interface{} + if err := json.NewDecoder(r.Body).Decode(&rawReq); err != nil { + log.Printf("Error decoding message post request: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Извлекаем text из разных возможных структур + var rawText string + if body, ok := rawReq["body"].(map[string]interface{}); ok { + if text, ok := body["text"].(string); ok { + rawText = text + } + } + + // Если не нашли в body, пробуем напрямую + if rawText == "" { + if text, ok := rawReq["text"].(string); ok { + rawText = text + } + } + + // Проверка на наличие нужного поля + if rawText == "" { + sendErrorWithCORS(w, "Missing 'text' field in body", http.StatusBadRequest) + return + } + + // Обрабатываем сообщение + response, err := a.processMessage(rawText) + if err != nil { + log.Printf("Error processing message: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) +} + +func (a *App) insertMessageData(entryText string, createdDate string, nodes []ProcessedNode) error { + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + return fmt.Errorf("failed to begin transaction: %w", err) + } + defer tx.Rollback() + + // 1. UPSERT проектов + projectNames := make(map[string]bool) + for _, node := range nodes { + projectNames[node.Project] = true + } + + // Вставляем проекты + for projectName := range projectNames { + _, err := tx.Exec(` + INSERT INTO projects (name) + VALUES ($1) + ON CONFLICT (name) DO UPDATE + SET name = EXCLUDED.name + `, projectName) + if err != nil { + return fmt.Errorf("failed to upsert project %s: %w", projectName, err) + } + } + + // 2. Вставляем entry + var entryID int + err = tx.QueryRow(` + INSERT INTO entries (text, created_date) + VALUES ($1, $2) + RETURNING id + `, entryText, createdDate).Scan(&entryID) + if err != nil { + return fmt.Errorf("failed to insert entry: %w", err) + } + + // 3. Вставляем nodes + for _, node := range nodes { + _, err := tx.Exec(` + INSERT INTO nodes (project_id, entry_id, score) + SELECT p.id, $1, $2 + FROM projects p + WHERE p.name = $3 + `, entryID, node.Score, node.Project) + if err != nil { + return fmt.Errorf("failed to insert node for project %s: %w", node.Project, err) + } + } + + // Обновляем materialized view после вставки данных + _, err = tx.Exec("REFRESH MATERIALIZED VIEW weekly_report_mv") + if err != nil { + log.Printf("Warning: Failed to refresh materialized view: %v", err) + // Не возвращаем ошибку, так как это не критично + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + return fmt.Errorf("failed to commit transaction: %w", err) + } + + return nil +} + +// setupWeeklyGoals выполняет установку целей на неделю (без HTTP обработки) +func (a *App) setupWeeklyGoals() error { + // 1. Выполняем SQL запрос для установки целей + setupQuery := ` + WITH current_info AS ( + -- Сегодня это будет 2026 год / 1 неделя + SELECT + EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER AS c_year, + EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER AS c_week + ), + goal_metrics AS ( + -- Считаем медиану на основе последних 12 записей из вьюхи + SELECT + project_id, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY total_score) AS median_score + FROM ( + SELECT + project_id, + total_score, + -- Нумеруем недели от новых к старым + ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY report_year DESC, report_week DESC) as rn + FROM weekly_report_mv + ) sub + WHERE rn <= 12 -- Берем историю за последние 12 недель активности + GROUP BY project_id + ) + INSERT INTO weekly_goals ( + project_id, + goal_year, + goal_week, + min_goal_score, + max_goal_score, + priority + ) + SELECT + p.id, + ci.c_year, + ci.c_week, + COALESCE(gm.median_score, 0) AS min_goal_score, + -- Логика max_score в зависимости от приоритета + CASE + WHEN p.priority = 1 THEN COALESCE(gm.median_score, 0) * 1.5 + WHEN p.priority = 2 THEN COALESCE(gm.median_score, 0) * 1.3 + ELSE COALESCE(gm.median_score, 0) * 1.2 + END + (CASE WHEN COALESCE(gm.median_score, 0) = 0 THEN 10 ELSE 0 END) AS max_goal_score, + p.priority + FROM projects p + CROSS JOIN current_info ci + LEFT JOIN goal_metrics gm ON p.id = gm.project_id + ON CONFLICT (project_id, goal_year, goal_week) DO UPDATE + SET + min_goal_score = EXCLUDED.min_goal_score, + max_goal_score = EXCLUDED.max_goal_score, + priority = EXCLUDED.priority + ` + + _, err := a.DB.Exec(setupQuery) + if err != nil { + log.Printf("Error setting up weekly goals: %v", err) + return fmt.Errorf("error setting up weekly goals: %w", err) + } + + log.Println("Weekly goals setup completed successfully") + + // Отправляем сообщение в Telegram с зафиксированными целями + if err := a.sendWeeklyGoalsTelegramMessage(); err != nil { + log.Printf("Error sending weekly goals Telegram message: %v", err) + // Не возвращаем ошибку, так как фиксация целей уже выполнена успешно + } + + return nil +} + +// sendWeeklyGoalsTelegramMessage получает зафиксированные цели и отправляет их в Telegram +func (a *App) sendWeeklyGoalsTelegramMessage() error { + // Получаем цели из базы данных + selectQuery := ` + SELECT + p.name AS project_name, + wg.min_goal_score, + wg.max_goal_score + FROM + weekly_goals wg + JOIN + projects p ON wg.project_id = p.id + WHERE + wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + ORDER BY + p.name + ` + + rows, err := a.DB.Query(selectQuery) + if err != nil { + return fmt.Errorf("error querying weekly goals: %w", err) + } + defer rows.Close() + + goals := make([]WeeklyGoalSetup, 0) + for rows.Next() { + var goal WeeklyGoalSetup + var maxGoalScore sql.NullFloat64 + + err := rows.Scan( + &goal.ProjectName, + &goal.MinGoalScore, + &maxGoalScore, + ) + if err != nil { + log.Printf("Error scanning weekly goal row: %v", err) + continue + } + + if maxGoalScore.Valid { + goal.MaxGoalScore = maxGoalScore.Float64 + } else { + // Если maxGoalScore не установлен (NULL), используем NaN для корректной проверки в форматировании + goal.MaxGoalScore = math.NaN() + } + + goals = append(goals, goal) + } + + // Форматируем сообщение + message := a.formatWeeklyGoalsMessage(goals) + if message == "" { + log.Println("No goals to send in Telegram message") + return nil + } + + // Отправляем сообщение в Telegram + a.sendTelegramMessage(message) + return nil +} + +// formatWeeklyGoalsMessage форматирует список целей в сообщение для Telegram +// Формат аналогичен JS коду из n8n +func (a *App) formatWeeklyGoalsMessage(goals []WeeklyGoalSetup) string { + if len(goals) == 0 { + return "" + } + + // Заголовок сообщения: "Цели на неделю" + markdownMessage := "*🎯 Цели на неделю:*\n\n" + + // Обработка каждого проекта + for _, goal := range goals { + // Пропускаем проекты без названия + if goal.ProjectName == "" { + continue + } + + // Получаем и форматируем цели + minGoal := goal.MinGoalScore + maxGoal := goal.MaxGoalScore + + var goalText string + + // Форматируем текст цели, если они существуют + // Проверяем, что minGoal валиден (не NaN) + // В JS коде проверяется isNaN, поэтому проверяем только на NaN + if !math.IsNaN(minGoal) { + minGoalFormatted := fmt.Sprintf("%.2f", minGoal) + + // Формируем диапазон: [MIN] или [MIN - MAX] + // maxGoal должен быть валиден (не NaN) для отображения диапазона + if !math.IsNaN(maxGoal) { + maxGoalFormatted := fmt.Sprintf("%.2f", maxGoal) + // Формат: *Проект*: от 15.00 до 20.00 + goalText = fmt.Sprintf(" от %s до %s", minGoalFormatted, maxGoalFormatted) + } else { + // Формат: *Проект*: мин. 15.00 + goalText = fmt.Sprintf(" мин. %s", minGoalFormatted) + } + } else { + // Если minGoal не установлен (NaN), пропускаем вывод цели + continue + } + + // Форматирование строки для Markdown (Legacy): *Название*: Цель + markdownMessage += fmt.Sprintf("*%s*:%s\n", goal.ProjectName, goalText) + } + + return markdownMessage +} + +func (a *App) weeklyGoalsSetupHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + err := a.setupWeeklyGoals() + if err != nil { + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Получаем установленные цели для ответа + selectQuery := ` + SELECT + p.name AS project_name, + wg.min_goal_score, + wg.max_goal_score + FROM + weekly_goals wg + JOIN + projects p ON wg.project_id = p.id + WHERE + wg.goal_year = EXTRACT(ISOYEAR FROM CURRENT_DATE)::INTEGER + AND wg.goal_week = EXTRACT(WEEK FROM CURRENT_DATE)::INTEGER + ORDER BY + p.name + ` + + rows, err := a.DB.Query(selectQuery) + if err != nil { + log.Printf("Error querying weekly goals: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying weekly goals: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + goals := make([]WeeklyGoalSetup, 0) + for rows.Next() { + var goal WeeklyGoalSetup + var maxGoalScore sql.NullFloat64 + + err := rows.Scan( + &goal.ProjectName, + &goal.MinGoalScore, + &maxGoalScore, + ) + if err != nil { + log.Printf("Error scanning weekly goal row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + + if maxGoalScore.Valid { + goal.MaxGoalScore = maxGoalScore.Float64 + } else { + goal.MaxGoalScore = 0.0 + } + + goals = append(goals, goal) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(goals) +} + +// dailyReportTriggerHandler обрабатывает запрос на отправку ежедневного отчёта +func (a *App) dailyReportTriggerHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + log.Printf("Manual trigger: Sending daily report") + err := a.sendDailyReport() + if err != nil { + log.Printf("Error in manual daily report trigger: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "message": "Daily report sent successfully", + }) +} + +func (a *App) adminHandler(w http.ResponseWriter, r *http.Request) { + // Пробуем найти файл admin.html в разных местах + var adminPath string + + // 1. Пробуем в текущей рабочей директории + if _, err := os.Stat("admin.html"); err == nil { + adminPath = "admin.html" + } else { + // 2. Пробуем в директории play-life-backend относительно текущей директории + adminPath = filepath.Join("play-life-backend", "admin.html") + if _, err := os.Stat(adminPath); err != nil { + // 3. Пробуем получить путь к исполняемому файлу и искать рядом + if execPath, err := os.Executable(); err == nil { + execDir := filepath.Dir(execPath) + adminPath = filepath.Join(execDir, "admin.html") + if _, err := os.Stat(adminPath); err != nil { + // 4. Последняя попытка - просто "admin.html" + adminPath = "admin.html" + } + } else { + adminPath = "admin.html" + } + } + } + + http.ServeFile(w, r, adminPath) +} + +func (a *App) getProjectsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + query := ` + SELECT + id AS project_id, + name AS project_name, + priority + FROM + projects + ORDER BY + priority ASC NULLS LAST, + project_name + ` + + rows, err := a.DB.Query(query) + if err != nil { + log.Printf("Error querying projects: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying projects: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + projects := make([]Project, 0) + for rows.Next() { + var project Project + var priority sql.NullInt64 + + err := rows.Scan( + &project.ProjectID, + &project.ProjectName, + &priority, + ) + if err != nil { + log.Printf("Error scanning project row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + + if priority.Valid { + priorityVal := int(priority.Int64) + project.Priority = &priorityVal + } + + projects = append(projects, project) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(projects) +} + +func (a *App) setProjectPriorityHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Читаем тело запроса один раз + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + log.Printf("Error reading request body: %v", err) + sendErrorWithCORS(w, "Error reading request body", http.StatusBadRequest) + return + } + defer r.Body.Close() + + // Парсим входящий запрос - может быть как {body: [...]}, так и просто массив + var projectsToUpdate []ProjectPriorityUpdate + + // Сначала пробуем декодировать как прямой массив + var directArray []interface{} + arrayErr := json.Unmarshal(bodyBytes, &directArray) + if arrayErr == nil && len(directArray) > 0 { + // Успешно декодировали как массив + log.Printf("Received direct array format with %d items", len(directArray)) + for _, item := range directArray { + if itemMap, ok := item.(map[string]interface{}); ok { + var project ProjectPriorityUpdate + + // Извлекаем id + if idVal, ok := itemMap["id"].(float64); ok { + project.ID = int(idVal) + } else if idVal, ok := itemMap["id"].(int); ok { + project.ID = idVal + } else { + log.Printf("Invalid id in request item: %v", itemMap) + continue + } + + // Извлекаем priority (может быть null, undefined, или числом) + if priorityVal, ok := itemMap["priority"]; ok && priorityVal != nil { + // Проверяем, не является ли это строкой "null" + if strVal, ok := priorityVal.(string); ok && (strVal == "null" || strVal == "NULL") { + project.Priority = nil + } else if numVal, ok := priorityVal.(float64); ok { + priorityInt := int(numVal) + project.Priority = &priorityInt + } else if numVal, ok := priorityVal.(int); ok { + project.Priority = &numVal + } else { + project.Priority = nil + } + } else { + project.Priority = nil + } + + projectsToUpdate = append(projectsToUpdate, project) + } + } + } + + // Если не получилось как массив (ошибка декодирования), пробуем как объект с body + // НЕ пытаемся декодировать как объект, если массив декодировался успешно (даже если пустой) + if len(projectsToUpdate) == 0 && arrayErr != nil { + log.Printf("Failed to decode as array (error: %v), trying as object", arrayErr) + var rawReq map[string]interface{} + if err := json.Unmarshal(bodyBytes, &rawReq); err != nil { + log.Printf("Error decoding project priority request as object: %v, body: %s", err, string(bodyBytes)) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Извлекаем массив проектов из body + if body, ok := rawReq["body"].([]interface{}); ok { + log.Printf("Received body format with %d items", len(body)) + for _, item := range body { + if itemMap, ok := item.(map[string]interface{}); ok { + var project ProjectPriorityUpdate + + // Извлекаем id + if idVal, ok := itemMap["id"].(float64); ok { + project.ID = int(idVal) + } else if idVal, ok := itemMap["id"].(int); ok { + project.ID = idVal + } else { + log.Printf("Invalid id in request item: %v", itemMap) + continue + } + + // Извлекаем priority (может быть null, undefined, или числом) + if priorityVal, ok := itemMap["priority"]; ok && priorityVal != nil { + // Проверяем, не является ли это строкой "null" + if strVal, ok := priorityVal.(string); ok && (strVal == "null" || strVal == "NULL") { + project.Priority = nil + } else if numVal, ok := priorityVal.(float64); ok { + priorityInt := int(numVal) + project.Priority = &priorityInt + } else if numVal, ok := priorityVal.(int); ok { + project.Priority = &numVal + } else { + project.Priority = nil + } + } else { + project.Priority = nil + } + + projectsToUpdate = append(projectsToUpdate, project) + } + } + } + } + + if len(projectsToUpdate) == 0 { + log.Printf("No projects to update after parsing. Body was: %s", string(bodyBytes)) + sendErrorWithCORS(w, "No projects to update", http.StatusBadRequest) + return + } + + log.Printf("Successfully parsed %d projects to update", len(projectsToUpdate)) + + // Начинаем транзакцию + tx, err := a.DB.Begin() + if err != nil { + log.Printf("Error beginning transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error beginning transaction: %v", err), http.StatusInternalServerError) + return + } + defer tx.Rollback() + + // Обновляем приоритеты для каждого проекта + for _, project := range projectsToUpdate { + if project.Priority == nil { + _, err = tx.Exec(` + UPDATE projects + SET priority = NULL + WHERE id = $1 + `, project.ID) + } else { + _, err = tx.Exec(` + UPDATE projects + SET priority = $1 + WHERE id = $2 + `, *project.Priority, project.ID) + } + + if err != nil { + log.Printf("Error updating project %d priority: %v", project.ID, err) + tx.Rollback() + sendErrorWithCORS(w, fmt.Sprintf("Error updating project %d: %v", project.ID, err), http.StatusInternalServerError) + return + } + } + + // Коммитим транзакцию + if err := tx.Commit(); err != nil { + log.Printf("Error committing transaction: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error committing transaction: %v", err), http.StatusInternalServerError) + return + } + + // Возвращаем успешный ответ + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": fmt.Sprintf("Updated priorities for %d projects", len(projectsToUpdate)), + "updated": len(projectsToUpdate), + }) +} + +func (a *App) todoistWebhookHandler(w http.ResponseWriter, r *http.Request) { + // Логирование входящего запроса + log.Printf("=== Todoist Webhook Request ===") + log.Printf("Method: %s", r.Method) + log.Printf("URL: %s", r.URL.String()) + log.Printf("RemoteAddr: %s", r.RemoteAddr) + log.Printf("Headers:") + for key, values := range r.Header { + for _, value := range values { + log.Printf(" %s: %s", key, value) + } + } + + if r.Method == "OPTIONS" { + log.Printf("OPTIONS request, returning OK") + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Читаем тело запроса для логирования + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + log.Printf("Error reading request body: %v", err) + sendErrorWithCORS(w, "Error reading request body", http.StatusBadRequest) + return + } + + // Логируем сырое тело запроса + log.Printf("Request body (raw): %s", string(bodyBytes)) + log.Printf("Request body length: %d bytes", len(bodyBytes)) + + // Создаем новый reader из прочитанных байтов для парсинга + r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) + + // Опциональная проверка секрета webhook (если задан в переменных окружения) + todoistWebhookSecret := getEnv("TODOIST_WEBHOOK_SECRET", "") + log.Printf("Webhook secret check: configured=%v", todoistWebhookSecret != "") + if todoistWebhookSecret != "" { + providedSecret := r.Header.Get("X-Todoist-Webhook-Secret") + log.Printf("Provided secret in header: %v (length: %d)", providedSecret != "", len(providedSecret)) + if providedSecret != todoistWebhookSecret { + log.Printf("Invalid Todoist webhook secret provided (expected length: %d, provided length: %d)", len(todoistWebhookSecret), len(providedSecret)) + sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized) + return + } + log.Printf("Webhook secret validated successfully") + } + + // Парсим webhook от Todoist + var webhook TodoistWebhook + if err := json.NewDecoder(r.Body).Decode(&webhook); err != nil { + log.Printf("Error decoding Todoist webhook: %v", err) + log.Printf("Failed to parse body as JSON: %s", string(bodyBytes)) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Логируем структуру webhook после парсинга + log.Printf("Parsed webhook structure:") + log.Printf(" EventName: %s", webhook.EventName) + log.Printf(" EventData keys: %v", getMapKeys(webhook.EventData)) + if eventDataJSON, err := json.MarshalIndent(webhook.EventData, " ", " "); err == nil { + log.Printf(" EventData content:\n%s", string(eventDataJSON)) + } else { + log.Printf(" EventData (marshal error): %v", err) + } + + // Проверяем, что это событие закрытия задачи + if webhook.EventName != "item:completed" { + log.Printf("Received Todoist event '%s', ignoring (only processing 'item:completed')", webhook.EventName) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "message": "Event ignored", + "event": webhook.EventName, + }) + return + } + + // Извлекаем content (title) и description из event_data + log.Printf("Extracting content and description from event_data...") + var title, description string + + if content, ok := webhook.EventData["content"].(string); ok { + title = strings.TrimSpace(content) + log.Printf(" Found 'content' (title): '%s' (length: %d)", title, len(title)) + } else { + log.Printf(" 'content' not found or not a string (type: %T, value: %v)", webhook.EventData["content"], webhook.EventData["content"]) + } + + if desc, ok := webhook.EventData["description"].(string); ok { + description = strings.TrimSpace(desc) + log.Printf(" Found 'description': '%s' (length: %d)", description, len(description)) + } else { + log.Printf(" 'description' not found or not a string (type: %T, value: %v)", webhook.EventData["description"], webhook.EventData["description"]) + } + + // Склеиваем title и description + // Логика: если есть оба - склеиваем через \n, если только один - используем его + var combinedText string + if title != "" && description != "" { + combinedText = title + "\n" + description + log.Printf(" Both title and description present, combining them") + } else if title != "" { + combinedText = title + log.Printf(" Only title present, using title only") + } else if description != "" { + combinedText = description + log.Printf(" Only description present, using description only") + } else { + combinedText = "" + log.Printf(" WARNING: Both title and description are empty!") + } + log.Printf("Combined text result: '%s' (length: %d)", combinedText, len(combinedText)) + + // Проверяем, что есть хотя бы title или description + if combinedText == "" { + log.Printf("ERROR: Todoist webhook: no content or description found in event_data") + log.Printf(" title='%s' (empty: %v), description='%s' (empty: %v)", title, title == "", description, description == "") + log.Printf("Available keys in event_data: %v", getMapKeys(webhook.EventData)) + sendErrorWithCORS(w, "Missing 'content' or 'description' in event_data", http.StatusBadRequest) + return + } + + log.Printf("Processing Todoist task: title='%s' (len=%d), description='%s' (len=%d), combined='%s' (len=%d)", + title, len(title), description, len(description), combinedText, len(combinedText)) + + // Обрабатываем сообщение через существующую логику (без отправки в Telegram) + log.Printf("Calling processMessageWithoutTelegram with combined text...") + response, err := a.processMessageWithoutTelegram(combinedText) + if err != nil { + log.Printf("ERROR processing Todoist message: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + // Проверяем наличие nodes - если их нет, игнорируем сообщение + if len(response.Nodes) == 0 { + log.Printf("Todoist webhook: no nodes found in message, ignoring (not saving to database and not sending to Telegram)") + log.Printf("=== Todoist Webhook Request Ignored (No Nodes) ===") + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Message ignored (no nodes found)", + "ignored": true, + }) + return + } + + log.Printf("Successfully processed Todoist task, found %d nodes", len(response.Nodes)) + if len(response.Nodes) > 0 { + log.Printf("Nodes details:") + for i, node := range response.Nodes { + log.Printf(" Node %d: Project='%s', Score=%f", i+1, node.Project, node.Score) + } + + // Отправляем сообщение в Telegram после успешной обработки + log.Printf("Preparing to send message to Telegram...") + log.Printf("Combined text to send: '%s'", combinedText) + a.sendTelegramMessage(combinedText) + log.Printf("sendTelegramMessage call completed") + } else { + log.Printf("No nodes found, skipping Telegram message") + } + + log.Printf("=== Todoist Webhook Request Completed Successfully ===") + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Task processed successfully", + "result": response, + }) +} + +func (a *App) telegramWebhookHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + // Парсим webhook от Telegram + var update TelegramUpdate + if err := json.NewDecoder(r.Body).Decode(&update); err != nil { + log.Printf("Error decoding Telegram webhook: %v", err) + sendErrorWithCORS(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Проверяем, что есть message + if update.Message.Text == "" { + log.Printf("Telegram webhook: no text in message") + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "message": "No text in message, ignored", + }) + return + } + + fullText := update.Message.Text + entities := update.Message.Entities + if entities == nil { + entities = []TelegramEntity{} + } + + log.Printf("Processing Telegram message: text='%s', entities count=%d", fullText, len(entities)) + + // Обрабатываем сообщение через новую логику (с entities, без отправки обратно в Telegram) + response, err := a.processTelegramMessage(fullText, entities) + if err != nil { + log.Printf("Error processing Telegram message: %v", err) + sendErrorWithCORS(w, err.Error(), http.StatusInternalServerError) + return + } + + log.Printf("Successfully processed Telegram message, found %d nodes", len(response.Nodes)) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "message": "Message processed successfully", + "result": response, + }) +} + +func (a *App) getFullStatisticsHandler(w http.ResponseWriter, r *http.Request) { + if r.Method == "OPTIONS" { + setCORSHeaders(w) + w.WriteHeader(http.StatusOK) + return + } + setCORSHeaders(w) + + query := ` + SELECT + p.name AS project_name, + -- Определяем год и неделю, беря значение из той таблицы, где оно не NULL + COALESCE(wr.report_year, wg.goal_year) AS report_year, + COALESCE(wr.report_week, wg.goal_week) AS report_week, + + -- Фактический score: COALESCE(NULL, 0.0000) + COALESCE(wr.total_score, 0.0000) AS total_score, + + -- Минимальная цель: COALESCE(NULL, 0.0000) + COALESCE(wg.min_goal_score, 0.0000) AS min_goal_score, + + -- Максимальная цель: COALESCE(NULL, 0.0000) + COALESCE(wg.max_goal_score, 0.0000) AS max_goal_score + FROM + weekly_report_mv wr + FULL OUTER JOIN + weekly_goals wg + -- Слияние по всем трем ключевым полям + ON wr.project_id = wg.project_id + AND wr.report_year = wg.goal_year + AND wr.report_week = wg.goal_week + JOIN + projects p + -- Присоединяем имя проекта, используя ID из той таблицы, где он не NULL + ON p.id = COALESCE(wr.project_id, wg.project_id) + ORDER BY + report_year DESC, + report_week DESC, + project_name + ` + + rows, err := a.DB.Query(query) + if err != nil { + log.Printf("Error querying full statistics: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error querying full statistics: %v", err), http.StatusInternalServerError) + return + } + defer rows.Close() + + statistics := make([]FullStatisticsItem, 0) + for rows.Next() { + var item FullStatisticsItem + + err := rows.Scan( + &item.ProjectName, + &item.ReportYear, + &item.ReportWeek, + &item.TotalScore, + &item.MinGoalScore, + &item.MaxGoalScore, + ) + if err != nil { + log.Printf("Error scanning full statistics row: %v", err) + sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError) + return + } + + statistics = append(statistics, item) + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(statistics) +} + diff --git a/play-life-backend/migrations/001_create_schema.sql b/play-life-backend/migrations/001_create_schema.sql new file mode 100644 index 0000000..6539fe0 --- /dev/null +++ b/play-life-backend/migrations/001_create_schema.sql @@ -0,0 +1,105 @@ +-- Migration: Create database schema for play-life project +-- This script creates all tables and materialized views needed for the project + +-- ============================================ +-- Table: projects +-- ============================================ +CREATE TABLE IF NOT EXISTS projects ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + priority SMALLINT, + CONSTRAINT unique_project_name UNIQUE (name) +); + +-- ============================================ +-- Table: entries +-- ============================================ +-- This table stores entries with creation dates +-- Used in weekly_report_mv for grouping by week +CREATE TABLE IF NOT EXISTS entries ( + id SERIAL PRIMARY KEY, + text TEXT NOT NULL, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- ============================================ +-- Table: nodes +-- ============================================ +-- This table stores nodes linked to projects and entries +-- Contains score information used in weekly reports +CREATE TABLE IF NOT EXISTS nodes ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + entry_id INTEGER NOT NULL REFERENCES entries(id) ON DELETE CASCADE, + score NUMERIC(8,4) +); + +-- Create index on project_id for better join performance +CREATE INDEX IF NOT EXISTS idx_nodes_project_id ON nodes(project_id); +-- Create index on entry_id for better join performance +CREATE INDEX IF NOT EXISTS idx_nodes_entry_id ON nodes(entry_id); + +-- ============================================ +-- Table: weekly_goals +-- ============================================ +-- This table stores weekly goals for projects +CREATE TABLE IF NOT EXISTS weekly_goals ( + id SERIAL PRIMARY KEY, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + goal_year INTEGER NOT NULL, + goal_week INTEGER NOT NULL, + min_goal_score NUMERIC(10,4) NOT NULL DEFAULT 0, + max_goal_score NUMERIC(10,4), + actual_score NUMERIC(10,4) DEFAULT 0, + priority SMALLINT, + CONSTRAINT weekly_goals_project_id_goal_year_goal_week_key UNIQUE (project_id, goal_year, goal_week) +); + +-- Create index on project_id for better join performance +CREATE INDEX IF NOT EXISTS idx_weekly_goals_project_id ON weekly_goals(project_id); + +-- ============================================ +-- Materialized View: weekly_report_mv +-- ============================================ +CREATE MATERIALIZED VIEW IF NOT EXISTS weekly_report_mv AS +SELECT + p.id AS project_id, + agg.report_year, + agg.report_week, + -- Используем COALESCE для установки total_score в 0.0000, если нет данных (NULL) + COALESCE(agg.total_score, 0.0000) AS total_score +FROM + projects p +LEFT JOIN + ( + -- 1. Предварительная агрегация: суммируем score по неделям + SELECT + n.project_id, + EXTRACT(YEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score + FROM + nodes n + JOIN + entries e ON n.entry_id = e.id + GROUP BY + 1, 2, 3 + ) agg + -- 2. Присоединяем агрегированные данные ко ВСЕМ проектам + ON p.id = agg.project_id +ORDER BY + p.id, agg.report_year, agg.report_week; + +-- Create index on materialized view for better query performance +CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE projects IS 'Projects table storing project information with priority'; +COMMENT ON TABLE entries IS 'Entries table storing entry creation timestamps'; +COMMENT ON TABLE nodes IS 'Nodes table linking projects, entries and storing scores'; +COMMENT ON TABLE weekly_goals IS 'Weekly goals for projects'; +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project'; + diff --git a/play-life-backend/migrations/002_add_dictionaries.sql b/play-life-backend/migrations/002_add_dictionaries.sql new file mode 100644 index 0000000..8b140b7 --- /dev/null +++ b/play-life-backend/migrations/002_add_dictionaries.sql @@ -0,0 +1,53 @@ +-- Migration: Add dictionaries table and dictionary_id to words +-- This script creates the dictionaries table and adds dictionary_id field to words table + +-- ============================================ +-- Table: dictionaries +-- ============================================ +CREATE TABLE IF NOT EXISTS dictionaries ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL +); + +-- Insert default dictionary "Все слова" with id = 0 +-- Note: PostgreSQL SERIAL starts from 1, so we need to use a workaround +-- First, set the sequence to allow inserting 0, then insert, then reset sequence +DO $$ +BEGIN + -- Set sequence to -1 so next value will be 0 + PERFORM setval('dictionaries_id_seq', -1, false); + + -- Insert the default dictionary with id = 0 + INSERT INTO dictionaries (id, name) + VALUES (0, 'Все слова') + ON CONFLICT (id) DO NOTHING; + + -- Set the sequence to start from 1 (so next auto-increment will be 1) + PERFORM setval('dictionaries_id_seq', 1, false); +END $$; + +-- ============================================ +-- Alter words table: Add dictionary_id column +-- ============================================ +ALTER TABLE words +ADD COLUMN IF NOT EXISTS dictionary_id INTEGER DEFAULT 0 REFERENCES dictionaries(id); + +-- Update all existing words to have dictionary_id = 0 +UPDATE words +SET dictionary_id = 0 +WHERE dictionary_id IS NULL; + +-- Make dictionary_id NOT NULL after setting default values +ALTER TABLE words +ALTER COLUMN dictionary_id SET NOT NULL, +ALTER COLUMN dictionary_id SET DEFAULT 0; + +-- Create index on dictionary_id for better join performance +CREATE INDEX IF NOT EXISTS idx_words_dictionary_id ON words(dictionary_id); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE dictionaries IS 'Dictionaries table storing dictionary information'; +COMMENT ON COLUMN words.dictionary_id IS 'Reference to dictionary. Default is 0 (Все слова)'; + diff --git a/play-life-backend/migrations/003_remove_words_unique_constraint.sql b/play-life-backend/migrations/003_remove_words_unique_constraint.sql new file mode 100644 index 0000000..deed27e --- /dev/null +++ b/play-life-backend/migrations/003_remove_words_unique_constraint.sql @@ -0,0 +1,11 @@ +-- Migration: Remove UNIQUE constraint from words.name +-- This script removes the unique constraint on the name column in the words table + +-- Drop the unique constraint on words.name if it exists +ALTER TABLE words +DROP CONSTRAINT IF EXISTS words_name_key; + +-- Also try to drop constraint if it was created with different name +ALTER TABLE words +DROP CONSTRAINT IF EXISTS words_name_unique; + diff --git a/play-life-backend/migrations/004_add_config_dictionaries.sql b/play-life-backend/migrations/004_add_config_dictionaries.sql new file mode 100644 index 0000000..7d2bc69 --- /dev/null +++ b/play-life-backend/migrations/004_add_config_dictionaries.sql @@ -0,0 +1,21 @@ +-- Migration: Add config_dictionaries table (many-to-many relationship) +-- This script creates the config_dictionaries table linking configs and dictionaries + +-- ============================================ +-- Table: config_dictionaries +-- ============================================ +CREATE TABLE IF NOT EXISTS config_dictionaries ( + config_id INTEGER NOT NULL REFERENCES configs(id) ON DELETE CASCADE, + dictionary_id INTEGER NOT NULL REFERENCES dictionaries(id) ON DELETE CASCADE, + PRIMARY KEY (config_id, dictionary_id) +); + +-- Create indexes for better query performance +CREATE INDEX IF NOT EXISTS idx_config_dictionaries_config_id ON config_dictionaries(config_id); +CREATE INDEX IF NOT EXISTS idx_config_dictionaries_dictionary_id ON config_dictionaries(dictionary_id); + +-- ============================================ +-- Comments for documentation +-- ============================================ +COMMENT ON TABLE config_dictionaries IS 'Many-to-many relationship table linking configs and dictionaries. If no dictionaries are selected for a config, all dictionaries will be used.'; + diff --git a/play-life-backend/migrations/005_fix_weekly_report_mv.sql b/play-life-backend/migrations/005_fix_weekly_report_mv.sql new file mode 100644 index 0000000..45ecd4f --- /dev/null +++ b/play-life-backend/migrations/005_fix_weekly_report_mv.sql @@ -0,0 +1,29 @@ +-- Migration: Fix weekly_report_mv to use ISOYEAR instead of YEAR +-- This fixes incorrect week calculations at year boundaries +-- Date: 2024 + +-- Drop existing materialized view +DROP MATERIALIZED VIEW IF EXISTS weekly_report_mv; + +-- Recreate materialized view with ISOYEAR +CREATE MATERIALIZED VIEW weekly_report_mv AS +SELECT + n.project_id, + -- 🔑 ГЛАВНОЕ ИСПРАВЛЕНИЕ: Используем ISOYEAR + EXTRACT(ISOYEAR FROM e.created_date)::INTEGER AS report_year, + EXTRACT(WEEK FROM e.created_date)::INTEGER AS report_week, + SUM(n.score) AS total_score +FROM + nodes n +JOIN + entries e ON n.entry_id = e.id +GROUP BY + 1, 2, 3 +WITH DATA; + +-- Recreate index +CREATE INDEX IF NOT EXISTS idx_weekly_report_mv_project_year_week + ON weekly_report_mv(project_id, report_year, report_week); + +COMMENT ON MATERIALIZED VIEW weekly_report_mv IS 'Materialized view aggregating weekly scores by project using ISOYEAR for correct week calculations'; + diff --git a/play-life-backend/migrations/README.md b/play-life-backend/migrations/README.md new file mode 100644 index 0000000..fa6c607 --- /dev/null +++ b/play-life-backend/migrations/README.md @@ -0,0 +1,81 @@ +# Database Migrations + +Этот каталог содержит SQL миграции для создания структуры базы данных проекта play-life. + +## Использование + +### Создание базы данных с нуля + +Выполните миграцию для создания всех таблиц и представлений: + +```bash +psql -U your_user -d your_database -f 001_create_schema.sql +``` + +Или через docker-compose: + +```bash +docker-compose exec db psql -U playeng -d playeng -f /migrations/001_create_schema.sql +``` + +## Структура базы данных + +### Таблицы + +1. **projects** - Проекты + - `id` (SERIAL PRIMARY KEY) + - `name` (VARCHAR(255) NOT NULL, UNIQUE) + - `priority` (SMALLINT) + +2. **entries** - Записи с текстом и датами создания + - `id` (SERIAL PRIMARY KEY) + - `text` (TEXT NOT NULL) + - `created_date` (TIMESTAMP WITH TIME ZONE NOT NULL, DEFAULT CURRENT_TIMESTAMP) + +3. **nodes** - Узлы, связывающие проекты и записи + - `id` (SERIAL PRIMARY KEY) + - `project_id` (INTEGER NOT NULL, FK -> projects.id ON DELETE CASCADE) + - `entry_id` (INTEGER NOT NULL, FK -> entries.id ON DELETE CASCADE) + - `score` (NUMERIC(8,4)) + +4. **weekly_goals** - Недельные цели для проектов + - `id` (SERIAL PRIMARY KEY) + - `project_id` (INTEGER NOT NULL, FK -> projects.id ON DELETE CASCADE) + - `goal_year` (INTEGER NOT NULL) + - `goal_week` (INTEGER NOT NULL) + - `min_goal_score` (NUMERIC(10,4) NOT NULL, DEFAULT 0) + - `max_goal_score` (NUMERIC(10,4)) + - `actual_score` (NUMERIC(10,4), DEFAULT 0) + - `priority` (SMALLINT) + - UNIQUE CONSTRAINT: `(project_id, goal_year, goal_week)` + +### Materialized View + +- **weekly_report_mv** - Агрегированные данные по неделям для каждого проекта + - `project_id` (INTEGER) + - `report_year` (INTEGER) + - `report_week` (INTEGER) + - `total_score` (NUMERIC) + +## Обновление Materialized View + +После изменения данных в таблицах `nodes` или `entries`, необходимо обновить materialized view: + +```sql +REFRESH MATERIALIZED VIEW weekly_report_mv; +``` + +## Связи между таблицами + +- `nodes.project_id` → `projects.id` (ON DELETE CASCADE) +- `nodes.entry_id` → `entries.id` (ON DELETE CASCADE) +- `weekly_goals.project_id` → `projects.id` (ON DELETE CASCADE) + +## Индексы + +Созданы индексы для оптимизации запросов: +- `idx_nodes_project_id` на `nodes(project_id)` +- `idx_nodes_entry_id` на `nodes(entry_id)` +- `idx_weekly_goals_project_id` на `weekly_goals(project_id)` +- `idx_weekly_report_mv_project_year_week` на `weekly_report_mv(project_id, report_year, report_week)` + diff --git a/play-life-backend/start_backend.sh b/play-life-backend/start_backend.sh new file mode 100644 index 0000000..0ee7445 --- /dev/null +++ b/play-life-backend/start_backend.sh @@ -0,0 +1,20 @@ +#!/bin/bash +cd "$(dirname "$0")" + +# Настройки подключения к БД (можно изменить через переменные окружения) +export DB_HOST=${DB_HOST:-localhost} +export DB_PORT=${DB_PORT:-5432} +export DB_USER=${DB_USER:-postgres} +export DB_PASSWORD=${DB_PASSWORD:-postgres} +export DB_NAME=${DB_NAME:-playlife} +export PORT=${PORT:-8080} + +echo "Starting backend server..." +echo "DB_HOST: $DB_HOST" +echo "DB_PORT: $DB_PORT" +echo "DB_USER: $DB_USER" +echo "DB_NAME: $DB_NAME" +echo "PORT: $PORT" +echo "" + +go run main.go diff --git a/play-life-web/.dockerignore b/play-life-web/.dockerignore new file mode 100644 index 0000000..7547f40 --- /dev/null +++ b/play-life-web/.dockerignore @@ -0,0 +1,12 @@ +node_modules +dist +.git +.gitignore +README.md +.env +.env.local +.DS_Store +*.log +.vscode +.idea + diff --git a/play-life-web/.gitignore b/play-life-web/.gitignore new file mode 100644 index 0000000..29a4216 --- /dev/null +++ b/play-life-web/.gitignore @@ -0,0 +1,30 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Environment variables +.env +.env.local +.env.*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + diff --git a/play-life-web/Dockerfile b/play-life-web/Dockerfile new file mode 100644 index 0000000..37459ad --- /dev/null +++ b/play-life-web/Dockerfile @@ -0,0 +1,30 @@ +# Build stage +FROM node:20-alpine AS builder + +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install dependencies +RUN npm ci + +# Copy source code +COPY . . + +# Build the application +RUN npm run build + +# Production stage +FROM nginx:alpine + +# Copy built files from builder +COPY --from=builder /app/dist /usr/share/nginx/html + +# Copy nginx configuration +COPY nginx.conf /etc/nginx/conf.d/default.conf + +EXPOSE 80 + +CMD ["nginx", "-g", "daemon off;"] + diff --git a/play-life-web/README.md b/play-life-web/README.md new file mode 100644 index 0000000..d352a44 --- /dev/null +++ b/play-life-web/README.md @@ -0,0 +1,105 @@ +# PlayLifeWeb + +Веб-приложение для отображения статистики проектов. + +## Возможности + +- **Текущая неделя**: Отображение статистики на текущий момент с ProgressBar для каждого проекта +- **Полная статистика**: График нарастающей статистики по всем проектам + +## Технологии + +- React 18 +- Vite +- Chart.js (react-chartjs-2) +- Tailwind CSS +- Docker + +## Установка и запуск + +### Локальная разработка + +1. Установите зависимости: +```bash +npm install +``` + +2. Запустите dev-сервер: +```bash +npm run dev +``` + +Приложение будет доступно по адресу `http://localhost:3000` + +### Сборка для production + +```bash +npm run build +``` + +### Запуск через Docker + +1. Создайте файл `.env` в корне проекта (можно скопировать из `env.example`): +```bash +cp env.example .env +``` + +2. Соберите образ: +```bash +docker-compose build +``` + +3. Запустите контейнер: +```bash +docker-compose up -d +``` + +Приложение будет доступно по адресу `http://localhost:3000` + +**Примечание:** API запросы автоматически проксируются к бэкенду через nginx. Не требуется настройка URL API. + +### Остановка Docker контейнера + +```bash +docker-compose down +``` + +## Структура проекта + +``` +play-life-web/ +├── src/ +│ ├── components/ +│ │ ├── CurrentWeek.jsx # Компонент текущей недели +│ │ ├── FullStatistics.jsx # Компонент полной статистики +│ │ └── ProjectProgressBar.jsx # Компонент ProgressBar +│ ├── App.jsx # Главный компонент приложения +│ ├── main.jsx # Точка входа +│ └── index.css # Глобальные стили +├── Dockerfile # Docker образ +├── docker-compose.yml # Docker Compose конфигурация +├── nginx.conf # Nginx конфигурация +└── package.json # Зависимости проекта +``` + +## API Endpoints + +Приложение использует относительные пути для API запросов. Проксирование настроено автоматически: + +- **Development**: Vite dev server проксирует запросы к `http://localhost:8080` +- **Production**: Nginx проксирует запросы к бэкенд контейнеру + +Endpoints, которые используются: +- `/playlife-feed` - данные текущей недели +- `/d2dc349a-0d13-49b2-a8f0-1ab094bfba9b` - полная статистика +- `/projects` - список проектов +- `/project/priority` - обновление приоритетов проектов +- `/api/*` - остальные API endpoints (слова, конфигурации, тесты) + +## Особенности реализации + +- ProgressBar отображает текущее значение (`total_score`) и выделяет диапазон целей (`min_goal_score` - `max_goal_score`) +- График полной статистики показывает нарастающую сумму баллов по неделям +- Все проекты отображаются на одном графике с разными цветами +- Адаптивный дизайн для различных размеров экранов + diff --git a/play-life-web/build-and-save.sh b/play-life-web/build-and-save.sh new file mode 100644 index 0000000..d2a7fff --- /dev/null +++ b/play-life-web/build-and-save.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# Добавляем Docker в PATH +export PATH="/Applications/Docker.app/Contents/Resources/bin:$PATH" + +echo "Ожидание запуска Docker daemon..." +# Ждем до 60 секунд, пока Docker daemon запустится +for i in {1..60}; do + if docker ps >/dev/null 2>&1; then + echo "Docker daemon запущен!" + break + fi + if [ $i -eq 60 ]; then + echo "Ошибка: Docker daemon не запустился. Пожалуйста, запустите Docker Desktop вручную." + exit 1 + fi + sleep 1 +done + +echo "Сборка Docker образа..." +docker build \ + -t play-life-web:latest . + +if [ $? -eq 0 ]; then + echo "Образ успешно собран!" + echo "Сохранение образа в play-life-web.tar..." + docker save play-life-web:latest -o play-life-web.tar + + if [ $? -eq 0 ]; then + echo "Образ успешно сохранен в play-life-web.tar" + ls -lh play-life-web.tar + else + echo "Ошибка при сохранении образа" + exit 1 + fi +else + echo "Ошибка при сборке образа" + exit 1 +fi + + + diff --git a/play-life-web/build-docker-image.sh b/play-life-web/build-docker-image.sh new file mode 100644 index 0000000..2e4834a --- /dev/null +++ b/play-life-web/build-docker-image.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# Скрипт для сборки Docker образа и сохранения в .tar файл + +IMAGE_NAME="play-life-web" +IMAGE_TAG="latest" +TAR_FILE="play-life-web.tar" + +echo "Сборка Docker образа..." +docker build \ + -t "$IMAGE_NAME:$IMAGE_TAG" . + +if [ $? -eq 0 ]; then + echo "Образ успешно собран!" + echo "Сохранение образа в $TAR_FILE..." + docker save "$IMAGE_NAME:$IMAGE_TAG" -o "$TAR_FILE" + + if [ $? -eq 0 ]; then + echo "Образ успешно сохранен в $TAR_FILE" + ls -lh "$TAR_FILE" + else + echo "Ошибка при сохранении образа" + exit 1 + fi +else + echo "Ошибка при сборке образа" + exit 1 +fi + diff --git a/play-life-web/docker-compose.yml b/play-life-web/docker-compose.yml new file mode 100644 index 0000000..e99d136 --- /dev/null +++ b/play-life-web/docker-compose.yml @@ -0,0 +1,21 @@ +version: '3.8' + +services: + play-life-web: + build: + context: . + dockerfile: Dockerfile + container_name: play-life-web + ports: + - "${WEB_PORT:-3000}:80" + restart: unless-stopped + networks: + - play-life-network + env_file: + - ../.env + - .env # Локальный .env имеет приоритет + +networks: + play-life-network: + driver: bridge + diff --git a/play-life-web/env.example b/play-life-web/env.example new file mode 100644 index 0000000..1e7add8 --- /dev/null +++ b/play-life-web/env.example @@ -0,0 +1,6 @@ +# API URLs для PlayLifeWeb +# Скопируйте этот файл в .env и укажите свои значения + +# Play Life Web Port (по умолчанию: 3000) +WEB_PORT=3000 + diff --git a/play-life-web/index.html b/play-life-web/index.html new file mode 100644 index 0000000..3dd1647 --- /dev/null +++ b/play-life-web/index.html @@ -0,0 +1,14 @@ + + + + + + + PlayLife - Статистика + + +
+ + + + diff --git a/play-life-web/nginx.conf b/play-life-web/nginx.conf new file mode 100644 index 0000000..536b528 --- /dev/null +++ b/play-life-web/nginx.conf @@ -0,0 +1,50 @@ +server { + listen 80; + server_name localhost; + root /usr/share/nginx/html; + index index.html; + + # Gzip compression + gzip on; + gzip_vary on; + gzip_min_length 1024; + gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml+rss application/json; + + # Proxy API requests to backend + location /api/ { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Proxy other API endpoints to backend + location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|project/priority|message/post|webhook/|weekly_goals/setup|admin|admin\.html)$ { + proxy_pass http://backend:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + } + + # Handle React Router (SPA) + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } +} + diff --git a/play-life-web/package-lock.json b/play-life-web/package-lock.json new file mode 100644 index 0000000..8d18d5e --- /dev/null +++ b/play-life-web/package-lock.json @@ -0,0 +1,2706 @@ +{ + "name": "play-life-web", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "play-life-web", + "version": "1.0.0", + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "chart.js": "^4.4.0", + "react": "^18.2.0", + "react-chartjs-2": "^5.2.0", + "react-dom": "^18.2.0" + }, + "devDependencies": { + "@types/react": "^18.2.43", + "@types/react-dom": "^18.2.17", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.16", + "postcss": "^8.4.32", + "tailwindcss": "^3.3.6", + "vite": "^5.0.8" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", + "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-module-transforms": "^7.28.3", + "@babel/helpers": "^7.28.4", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", + "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", + "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", + "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.5" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.5", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.5", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.5", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", + "integrity": "sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz", + "integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "@dnd-kit/accessibility": "^3.1.1", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-10.0.0.tgz", + "integrity": "sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==", + "license": "MIT", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@kurkle/color": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz", + "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==", + "license": "MIT" + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.54.0.tgz", + "integrity": "sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.54.0.tgz", + "integrity": "sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.54.0.tgz", + "integrity": "sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.54.0.tgz", + "integrity": "sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.54.0.tgz", + "integrity": "sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.54.0.tgz", + "integrity": "sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.54.0.tgz", + "integrity": "sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.54.0.tgz", + "integrity": "sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.54.0.tgz", + "integrity": "sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.54.0.tgz", + "integrity": "sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.54.0.tgz", + "integrity": "sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.54.0.tgz", + "integrity": "sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.54.0.tgz", + "integrity": "sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.54.0.tgz", + "integrity": "sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.54.0.tgz", + "integrity": "sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.54.0.tgz", + "integrity": "sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.54.0.tgz", + "integrity": "sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.54.0.tgz", + "integrity": "sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.54.0.tgz", + "integrity": "sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.54.0.tgz", + "integrity": "sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.54.0.tgz", + "integrity": "sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.54.0.tgz", + "integrity": "sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.27", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.27.tgz", + "integrity": "sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/autoprefixer": { + "version": "10.4.23", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz", + "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001760", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.11", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz", + "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001761", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001761.tgz", + "integrity": "sha512-JF9ptu1vP2coz98+5051jZ4PwQgd2ni8A+gYSN7EA7dPKIMf0pDlSUxhdmVOaV3/fYK5uWBkgSXJaRLr4+3A6g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chart.js": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz", + "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, + "license": "ISC" + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-chartjs-2": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.1.tgz", + "integrity": "sha512-h5IPXKg9EXpjoBzUfyWJvllMjG2mQ4EiuHQFhms/AjUm0XSZHhyRy2xVmLXHKrtcdrPO4mnGqRtYoD0vp95A0A==", + "license": "MIT", + "peerDependencies": { + "chart.js": "^4.1.1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.54.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.54.0.tgz", + "integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.54.0", + "@rollup/rollup-android-arm64": "4.54.0", + "@rollup/rollup-darwin-arm64": "4.54.0", + "@rollup/rollup-darwin-x64": "4.54.0", + "@rollup/rollup-freebsd-arm64": "4.54.0", + "@rollup/rollup-freebsd-x64": "4.54.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.54.0", + "@rollup/rollup-linux-arm-musleabihf": "4.54.0", + "@rollup/rollup-linux-arm64-gnu": "4.54.0", + "@rollup/rollup-linux-arm64-musl": "4.54.0", + "@rollup/rollup-linux-loong64-gnu": "4.54.0", + "@rollup/rollup-linux-ppc64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-gnu": "4.54.0", + "@rollup/rollup-linux-riscv64-musl": "4.54.0", + "@rollup/rollup-linux-s390x-gnu": "4.54.0", + "@rollup/rollup-linux-x64-gnu": "4.54.0", + "@rollup/rollup-linux-x64-musl": "4.54.0", + "@rollup/rollup-openharmony-arm64": "4.54.0", + "@rollup/rollup-win32-arm64-msvc": "4.54.0", + "@rollup/rollup-win32-ia32-msvc": "4.54.0", + "@rollup/rollup-win32-x64-gnu": "4.54.0", + "@rollup/rollup-win32-x64-msvc": "4.54.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sucrase": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/play-life-web/package.json b/play-life-web/package.json new file mode 100644 index 0000000..2c5c3f9 --- /dev/null +++ b/play-life-web/package.json @@ -0,0 +1,28 @@ +{ + "name": "play-life-web", + "version": "1.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "chart.js": "^4.4.0", + "react": "^18.2.0", + "react-chartjs-2": "^5.2.0", + "react-dom": "^18.2.0" + }, + "devDependencies": { + "@types/react": "^18.2.43", + "@types/react-dom": "^18.2.17", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.16", + "postcss": "^8.4.32", + "tailwindcss": "^3.3.6", + "vite": "^5.0.8" + } +} diff --git a/play-life-web/postcss.config.js b/play-life-web/postcss.config.js new file mode 100644 index 0000000..b4a6220 --- /dev/null +++ b/play-life-web/postcss.config.js @@ -0,0 +1,7 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} + diff --git a/play-life-web/src/App.jsx b/play-life-web/src/App.jsx new file mode 100644 index 0000000..3090bc3 --- /dev/null +++ b/play-life-web/src/App.jsx @@ -0,0 +1,531 @@ +import { useState, useEffect, useCallback, useRef } from 'react' +import CurrentWeek from './components/CurrentWeek' +import FullStatistics from './components/FullStatistics' +import ProjectPriorityManager from './components/ProjectPriorityManager' +import WordList from './components/WordList' +import AddWords from './components/AddWords' +import TestConfigSelection from './components/TestConfigSelection' +import AddConfig from './components/AddConfig' +import TestWords from './components/TestWords' + +// API endpoints (используем относительные пути, проксирование настроено в nginx/vite) +const CURRENT_WEEK_API_URL = '/playlife-feed' +const FULL_STATISTICS_API_URL = '/d2dc349a-0d13-49b2-a8f0-1ab094bfba9b' + +function App() { + const [activeTab, setActiveTab] = useState('current') + const [selectedProject, setSelectedProject] = useState(null) + const [loadedTabs, setLoadedTabs] = useState({ + current: false, + priorities: false, + full: false, + words: false, + 'add-words': false, + 'test-config': false, + 'add-config': false, + test: false, + }) + + // Отслеживаем, какие табы уже были загружены (для предотвращения повторных загрузок) + const [tabsInitialized, setTabsInitialized] = useState({ + current: false, + priorities: false, + full: false, + words: false, + 'add-words': false, + 'test-config': false, + 'add-config': false, + test: false, + }) + + // Параметры для навигации между вкладками + const [tabParams, setTabParams] = useState({}) + + // Кеширование данных + const [currentWeekData, setCurrentWeekData] = useState(null) + const [fullStatisticsData, setFullStatisticsData] = useState(null) + + // Состояния загрузки для каждого таба (показываются только при первой загрузке) + const [currentWeekLoading, setCurrentWeekLoading] = useState(false) + const [fullStatisticsLoading, setFullStatisticsLoading] = useState(false) + const [prioritiesLoading, setPrioritiesLoading] = useState(false) + + // Состояния фоновой загрузки (не показываются визуально) + const [currentWeekBackgroundLoading, setCurrentWeekBackgroundLoading] = useState(false) + const [fullStatisticsBackgroundLoading, setFullStatisticsBackgroundLoading] = useState(false) + const [prioritiesBackgroundLoading, setPrioritiesBackgroundLoading] = useState(false) + + // Ошибки + const [currentWeekError, setCurrentWeekError] = useState(null) + const [fullStatisticsError, setFullStatisticsError] = useState(null) + const [prioritiesError, setPrioritiesError] = useState(null) + + // Состояние для кнопки Refresh (если она есть) + const [isRefreshing, setIsRefreshing] = useState(false) + const [prioritiesRefreshTrigger, setPrioritiesRefreshTrigger] = useState(0) + const [testConfigRefreshTrigger, setTestConfigRefreshTrigger] = useState(0) + const [wordsRefreshTrigger, setWordsRefreshTrigger] = useState(0) + + // Восстанавливаем последний выбранный таб после перезагрузки + const [isInitialized, setIsInitialized] = useState(false) + + useEffect(() => { + if (isInitialized) return + + try { + const savedTab = window.localStorage?.getItem('activeTab') + const validTabs = ['current', 'priorities', 'full', 'words', 'add-words', 'test-config', 'add-config', 'test'] + if (savedTab && validTabs.includes(savedTab)) { + setActiveTab(savedTab) + setLoadedTabs(prev => ({ ...prev, [savedTab]: true })) + setIsInitialized(true) + } else { + setIsInitialized(true) + } + } catch (err) { + console.warn('Не удалось прочитать активный таб из localStorage', err) + setIsInitialized(true) + } + }, [isInitialized]) + + const markTabAsLoaded = useCallback((tab) => { + setLoadedTabs(prev => (prev[tab] ? prev : { ...prev, [tab]: true })) + }, []) + + const fetchCurrentWeekData = useCallback(async (isBackground = false) => { + try { + if (isBackground) { + setCurrentWeekBackgroundLoading(true) + } else { + setCurrentWeekLoading(true) + } + setCurrentWeekError(null) + console.log('Fetching current week data from:', CURRENT_WEEK_API_URL) + const response = await fetch(CURRENT_WEEK_API_URL) + if (!response.ok) { + throw new Error('Ошибка загрузки данных') + } + const jsonData = await response.json() + // Обрабатываем ответ: приходит массив с одним объектом [{total: ..., projects: [...]}] + let projects = [] + let total = null + + if (Array.isArray(jsonData) && jsonData.length > 0) { + // Если ответ - массив, проверяем первый элемент + const firstItem = jsonData[0] + if (firstItem && typeof firstItem === 'object') { + // Если первый элемент - объект с полями total и projects + if (firstItem.projects && Array.isArray(firstItem.projects)) { + projects = firstItem.projects + total = firstItem.total !== undefined ? firstItem.total : null + } else { + // Если это просто массив проектов + projects = jsonData + } + } else { + // Если это массив проектов напрямую + projects = jsonData + } + } else if (jsonData && typeof jsonData === 'object' && !Array.isArray(jsonData)) { + // Если ответ - объект напрямую + projects = jsonData.projects || jsonData.data || [] + total = jsonData.total !== undefined ? jsonData.total : null + } + + setCurrentWeekData({ + projects: Array.isArray(projects) ? projects : [], + total: total + }) + } catch (err) { + setCurrentWeekError(err.message) + console.error('Ошибка загрузки данных текущей недели:', err) + } finally { + if (isBackground) { + setCurrentWeekBackgroundLoading(false) + } else { + setCurrentWeekLoading(false) + } + } + }, []) + + const fetchFullStatisticsData = useCallback(async (isBackground = false) => { + try { + if (isBackground) { + setFullStatisticsBackgroundLoading(true) + } else { + setFullStatisticsLoading(true) + } + setFullStatisticsError(null) + const response = await fetch(FULL_STATISTICS_API_URL) + if (!response.ok) { + throw new Error('Ошибка загрузки данных') + } + const jsonData = await response.json() + setFullStatisticsData(jsonData) + } catch (err) { + setFullStatisticsError(err.message) + console.error('Ошибка загрузки данных полной статистики:', err) + } finally { + if (isBackground) { + setFullStatisticsBackgroundLoading(false) + } else { + setFullStatisticsLoading(false) + } + } + }, []) + + // Используем ref для отслеживания инициализации табов (чтобы избежать лишних пересозданий функции) + const tabsInitializedRef = useRef({ + current: false, + priorities: false, + full: false, + words: false, + 'add-words': false, + 'test-config': false, + 'add-config': false, + test: false, + }) + + // Используем ref для отслеживания кеша (чтобы не зависеть от состояния в useCallback) + const cacheRef = useRef({ + current: null, + full: null, + }) + + // Обновляем ref при изменении данных + useEffect(() => { + cacheRef.current.current = currentWeekData + }, [currentWeekData]) + + useEffect(() => { + cacheRef.current.full = fullStatisticsData + }, [fullStatisticsData]) + + // Функция для загрузки данных таба + const loadTabData = useCallback((tab, isBackground = false) => { + if (tab === 'current') { + const hasCache = cacheRef.current.current !== null + const isInitialized = tabsInitializedRef.current.current + + if (!isInitialized) { + // Первая загрузка таба - загружаем с индикатором + fetchCurrentWeekData(false) + tabsInitializedRef.current.current = true + setTabsInitialized(prev => ({ ...prev, current: true })) + } else if (hasCache && isBackground) { + // Возврат на таб с кешем - фоновая загрузка + fetchCurrentWeekData(true) + } + // Если нет кеша и это не первая загрузка - ничего не делаем (данные уже загружаются) + } else if (tab === 'full') { + const hasCache = cacheRef.current.full !== null + const isInitialized = tabsInitializedRef.current.full + + if (!isInitialized) { + // Первая загрузка таба - загружаем с индикатором + fetchFullStatisticsData(false) + tabsInitializedRef.current.full = true + setTabsInitialized(prev => ({ ...prev, full: true })) + } else if (hasCache && isBackground) { + // Возврат на таб с кешем - фоновая загрузка + fetchFullStatisticsData(true) + } + } else if (tab === 'priorities') { + const isInitialized = tabsInitializedRef.current.priorities + + if (!isInitialized) { + // Первая загрузка таба + setPrioritiesRefreshTrigger(prev => prev + 1) + tabsInitializedRef.current.priorities = true + setTabsInitialized(prev => ({ ...prev, priorities: true })) + } else if (isBackground) { + // Возврат на таб - фоновая загрузка + setPrioritiesRefreshTrigger(prev => prev + 1) + } + } else if (tab === 'test-config') { + const isInitialized = tabsInitializedRef.current['test-config'] + + if (!isInitialized) { + // Первая загрузка таба + setTestConfigRefreshTrigger(prev => prev + 1) + tabsInitializedRef.current['test-config'] = true + setTabsInitialized(prev => ({ ...prev, 'test-config': true })) + } else if (isBackground) { + // Возврат на таб - фоновая загрузка + setTestConfigRefreshTrigger(prev => prev + 1) + } + } + }, [fetchCurrentWeekData, fetchFullStatisticsData]) + + // Функция для обновления всех данных (для кнопки Refresh, если она есть) + const refreshAllData = useCallback(async () => { + setIsRefreshing(true) + setPrioritiesError(null) + setCurrentWeekError(null) + setFullStatisticsError(null) + + // Триггерим обновление приоритетов + setPrioritiesRefreshTrigger(prev => prev + 1) + + // Загружаем все данные параллельно (не фоново) + await Promise.all([ + fetchCurrentWeekData(false), + fetchFullStatisticsData(false), + ]) + + setIsRefreshing(false) + }, [fetchCurrentWeekData, fetchFullStatisticsData]) + + // Обновляем данные при возвращении экрана в фокус (фоново) + useEffect(() => { + const handleFocus = () => { + if (document.visibilityState === 'visible') { + // Загружаем данные активного таба фоново + loadTabData(activeTab, true) + } + } + + window.addEventListener('focus', handleFocus) + document.addEventListener('visibilitychange', handleFocus) + + return () => { + window.removeEventListener('focus', handleFocus) + document.removeEventListener('visibilitychange', handleFocus) + } + }, [activeTab, loadTabData]) + + const handleProjectClick = (projectName) => { + setSelectedProject(projectName) + markTabAsLoaded('full') + setActiveTab('full') + } + + const handleTabChange = (tab, params = {}) => { + if (tab === 'full' && activeTab === 'full') { + // При повторном клике на "Полная статистика" сбрасываем выбранный проект + setSelectedProject(null) + } else if (tab !== activeTab) { + markTabAsLoaded(tab) + // Сбрасываем tabParams при переходе с add-config на другой таб + if (activeTab === 'add-config' && tab !== 'add-config') { + setTabParams({}) + } else { + setTabParams(params) + } + setActiveTab(tab) + if (tab === 'current') { + setSelectedProject(null) + } + // Обновляем список слов при возврате из экрана добавления слов + if (activeTab === 'add-words' && tab === 'words') { + setWordsRefreshTrigger(prev => prev + 1) + } + // Загрузка данных произойдет в useEffect при изменении activeTab + } + } + + // Обработчик навигации для компонентов + const handleNavigate = (tab, params = {}) => { + handleTabChange(tab, params) + } + + // Загружаем данные при открытии таба (когда таб становится активным) + const prevActiveTabRef = useRef(null) + const lastLoadedTabRef = useRef(null) // Отслеживаем последний загруженный таб, чтобы избежать двойной загрузки + + useEffect(() => { + if (!activeTab || !loadedTabs[activeTab]) return + + const isFirstLoad = !tabsInitializedRef.current[activeTab] + const isReturningToTab = prevActiveTabRef.current !== null && prevActiveTabRef.current !== activeTab + + // Проверяем, не загружали ли мы уже этот таб в этом рендере + const tabKey = `${activeTab}-${isFirstLoad ? 'first' : 'return'}` + if (lastLoadedTabRef.current === tabKey) { + return // Уже загружали + } + + if (isFirstLoad) { + // Первая загрузка таба + lastLoadedTabRef.current = tabKey + loadTabData(activeTab, false) + } else if (isReturningToTab) { + // Возврат на таб - фоновая загрузка + lastLoadedTabRef.current = tabKey + loadTabData(activeTab, true) + } + + prevActiveTabRef.current = activeTab + }, [activeTab, loadedTabs, loadTabData]) + + // Определяем общее состояние загрузки и ошибок для кнопки Refresh + const isAnyLoading = currentWeekLoading || fullStatisticsLoading || prioritiesLoading || isRefreshing + const hasAnyError = currentWeekError || fullStatisticsError || prioritiesError + + // Сохраняем выбранный таб, чтобы восстановить его после перезагрузки + useEffect(() => { + try { + window.localStorage?.setItem('activeTab', activeTab) + } catch (err) { + console.warn('Не удалось сохранить активный таб в localStorage', err) + } + }, [activeTab]) + + // Определяем, нужно ли скрывать нижнюю панель (для fullscreen экранов) + const isFullscreenTab = activeTab === 'test' || activeTab === 'add-words' || activeTab === 'add-config' + + return ( +
+
+
+ {loadedTabs.current && ( +
+ +
+ )} + + {loadedTabs.priorities && ( +
+ +
+ )} + + {loadedTabs.full && ( +
+ setSelectedProject(null)} + data={fullStatisticsData} + loading={fullStatisticsLoading} + error={fullStatisticsError} + onRetry={fetchFullStatisticsData} + currentWeekData={currentWeekData} + onNavigate={handleNavigate} + /> +
+ )} + + {loadedTabs.words && ( +
+ +
+ )} + + {loadedTabs['add-words'] && ( +
+ +
+ )} + + {loadedTabs['test-config'] && ( +
+ +
+ )} + + {loadedTabs['add-config'] && ( +
+ +
+ )} + + {loadedTabs.test && ( +
+ +
+ )} +
+
+ + {!isFullscreenTab && ( +
+
+ + +
+
+ )} +
+ ) +} + +export default App + + diff --git a/play-life-web/src/components/AddConfig.css b/play-life-web/src/components/AddConfig.css new file mode 100644 index 0000000..6cbf6dc --- /dev/null +++ b/play-life-web/src/components/AddConfig.css @@ -0,0 +1,222 @@ +.add-config { + padding-left: 1rem; + padding-right: 1rem; +} + +@media (min-width: 768px) { + .add-config { + padding-left: 1.5rem; + padding-right: 1.5rem; + } +} + +.add-config h2 { + margin-top: 2rem; + margin-bottom: 1rem; + color: #2c3e50; + font-size: 2rem; +} + +.form-group { + margin-bottom: 1.5rem; +} + +.form-group label { + display: block; + margin-bottom: 0.5rem; + color: #2c3e50; + font-weight: 500; +} + +.form-input, +.form-textarea { + width: 100%; + padding: 0.75rem; + border: 2px solid #ddd; + border-radius: 4px; + font-size: 1rem; + transition: border-color 0.2s; + font-family: inherit; +} + +.form-textarea { + resize: vertical; + min-height: 100px; +} + +.form-input:focus, +.form-textarea:focus { + outline: none; + border-color: #3498db; +} + +.submit-button { + background-color: #3498db; + color: white; + border: none; + padding: 0.75rem 2rem; + border-radius: 8px; + font-size: 1rem; + cursor: pointer; + transition: background-color 0.2s; + width: 100%; +} + +.submit-button:hover:not(:disabled) { + background-color: #2980b9; +} + +.submit-button:disabled { + background-color: #bdc3c7; + cursor: not-allowed; +} + +.message { + margin-top: 1rem; + padding: 1rem; + border-radius: 4px; + font-weight: 500; +} + +.message.success { + background-color: #d4edda; + color: #155724; + border: 1px solid #c3e6cb; +} + +.message.error { + background-color: #f8d7da; + color: #721c24; + border: 1px solid #f5c6cb; +} + +.stepper-container { + display: flex; + align-items: center; + gap: 0.5rem; +} + +.stepper-button { + background-color: #3498db; + color: white; + border: none; + width: 40px; + height: 40px; + border-radius: 8px; + font-size: 1.5rem; + font-weight: bold; + cursor: pointer; + transition: background-color 0.2s, transform 0.1s; + display: flex; + align-items: center; + justify-content: center; + flex-shrink: 0; +} + +.stepper-button:hover:not(:disabled) { + background-color: #2980b9; + transform: translateY(-1px); +} + +.stepper-button:disabled { + background-color: #bdc3c7; + cursor: not-allowed; + opacity: 0.6; +} + +.stepper-input { + flex: 1; + padding: 0.75rem; + border: 2px solid #ddd; + border-radius: 4px; + font-size: 1rem; + text-align: center; + transition: border-color 0.2s; + font-family: inherit; +} + +.stepper-input:focus { + outline: none; + border-color: #3498db; +} + +.close-x-button { + position: fixed; + top: 1rem; + right: 1rem; + background: rgba(255, 255, 255, 0.9); + border: none; + font-size: 1.5rem; + color: #7f8c8d; + cursor: pointer; + width: 40px; + height: 40px; + display: flex; + align-items: center; + justify-content: center; + border-radius: 50%; + transition: background-color 0.2s, color 0.2s; + z-index: 1600; + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15); +} + +.close-x-button:hover { + background-color: #ffffff; + color: #2c3e50; +} + +.dictionaries-hint { + font-size: 0.875rem; + color: #7f8c8d; + margin-bottom: 0.75rem; + font-style: italic; +} + +.dictionaries-checkbox-list { + display: flex; + flex-direction: column; + gap: 0; + max-height: 200px; + overflow-y: auto; + padding: 0.5rem; + border: 2px solid #ddd; + border-radius: 4px; + background-color: #f9f9f9; +} + +.dictionary-checkbox-label { + display: flex; + align-items: center; + cursor: pointer; + padding: 0.5rem; + border-radius: 4px; + transition: background-color 0.2s; +} + +.dictionary-checkbox-label:hover { + background-color: #e8f4f8; +} + +.dictionary-checkbox-label input[type="checkbox"] { + width: 18px; + height: 18px; + min-width: 18px; + min-height: 18px; + margin: 0; + margin-right: 0.75rem; + padding: 0; + cursor: pointer; + accent-color: #3498db; + flex-shrink: 0; + align-self: center; + vertical-align: middle; +} + +.dictionary-checkbox-label span { + color: #2c3e50; + font-size: 0.95rem; + line-height: 18px; + display: inline-block; + vertical-align: middle; +} + diff --git a/play-life-web/src/components/AddConfig.jsx b/play-life-web/src/components/AddConfig.jsx new file mode 100644 index 0000000..bc08ad4 --- /dev/null +++ b/play-life-web/src/components/AddConfig.jsx @@ -0,0 +1,344 @@ +import React, { useState, useEffect } from 'react' +import './AddConfig.css' + +const API_URL = '/api' + +function AddConfig({ onNavigate, editingConfig: initialEditingConfig }) { + const [name, setName] = useState('') + const [tryMessage, setTryMessage] = useState('') + const [wordsCount, setWordsCount] = useState('10') + const [maxCards, setMaxCards] = useState('') + const [message, setMessage] = useState('') + const [loading, setLoading] = useState(false) + const [dictionaries, setDictionaries] = useState([]) + const [selectedDictionaryIds, setSelectedDictionaryIds] = useState([]) + const [loadingDictionaries, setLoadingDictionaries] = useState(false) + + // Load dictionaries + useEffect(() => { + const loadDictionaries = async () => { + setLoadingDictionaries(true) + try { + const response = await fetch(`${API_URL}/test-configs-and-dictionaries`) + if (!response.ok) { + throw new Error('Ошибка при загрузке словарей') + } + const data = await response.json() + setDictionaries(Array.isArray(data.dictionaries) ? data.dictionaries : []) + } catch (err) { + console.error('Failed to load dictionaries:', err) + } finally { + setLoadingDictionaries(false) + } + } + loadDictionaries() + }, []) + + // Load selected dictionaries when editing + useEffect(() => { + const loadSelectedDictionaries = async () => { + if (initialEditingConfig?.id) { + try { + const response = await fetch(`${API_URL}/configs/${initialEditingConfig.id}/dictionaries`) + if (response.ok) { + const data = await response.json() + setSelectedDictionaryIds(Array.isArray(data.dictionary_ids) ? data.dictionary_ids : []) + } + } catch (err) { + console.error('Failed to load selected dictionaries:', err) + } + } else { + setSelectedDictionaryIds([]) + } + } + loadSelectedDictionaries() + }, [initialEditingConfig]) + + useEffect(() => { + if (initialEditingConfig) { + setName(initialEditingConfig.name) + setTryMessage(initialEditingConfig.try_message) + setWordsCount(String(initialEditingConfig.words_count)) + setMaxCards(initialEditingConfig.max_cards ? String(initialEditingConfig.max_cards) : '') + } else { + // Сбрасываем состояние при открытии в режиме добавления + setName('') + setTryMessage('') + setWordsCount('10') + setMaxCards('') + setMessage('') + setSelectedDictionaryIds([]) + } + }, [initialEditingConfig]) + + // Сбрасываем состояние при размонтировании компонента + useEffect(() => { + return () => { + setName('') + setTryMessage('') + setWordsCount('10') + setMaxCards('') + setMessage('') + setLoading(false) + } + }, []) + + const handleSubmit = async (e) => { + e.preventDefault() + setMessage('') + setLoading(true) + + if (!name.trim()) { + setMessage('Имя обязательно для заполнения.') + setLoading(false) + return + } + + try { + const url = initialEditingConfig + ? `${API_URL}/configs/${initialEditingConfig.id}` + : `${API_URL}/configs` + const method = initialEditingConfig ? 'PUT' : 'POST' + + const response = await fetch(url, { + method: method, + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + name: name.trim(), + try_message: tryMessage.trim() || '', + words_count: wordsCount === '' ? 0 : parseInt(wordsCount) || 0, + max_cards: maxCards === '' ? null : parseInt(maxCards) || null, + dictionary_ids: selectedDictionaryIds.length > 0 ? selectedDictionaryIds : undefined, + }), + }) + + if (!response.ok) { + const errorData = await response.json().catch(() => ({})) + const errorMessage = errorData.message || response.statusText || `Ошибка при ${initialEditingConfig ? 'обновлении' : 'создании'} конфигурации` + throw new Error(errorMessage) + } + + if (!initialEditingConfig) { + setName('') + setTryMessage('') + setWordsCount('10') + setMaxCards('') + } + + // Navigate back immediately + onNavigate?.('test-config') + } catch (error) { + setMessage(`Ошибка: ${error.message}`) + } finally { + setLoading(false) + } + } + + const getNumericValue = () => { + return wordsCount === '' ? 0 : parseInt(wordsCount) || 0 + } + + const getMaxCardsNumericValue = () => { + return maxCards === '' ? 0 : parseInt(maxCards) || 0 + } + + const handleDecrease = () => { + const numValue = getNumericValue() + if (numValue > 0) { + setWordsCount(String(numValue - 1)) + } + } + + const handleIncrease = () => { + const numValue = getNumericValue() + setWordsCount(String(numValue + 1)) + } + + const handleMaxCardsDecrease = () => { + const numValue = getMaxCardsNumericValue() + if (numValue > 0) { + setMaxCards(String(numValue - 1)) + } else { + setMaxCards('') + } + } + + const handleMaxCardsIncrease = () => { + const numValue = getMaxCardsNumericValue() + const newValue = numValue + 1 + setMaxCards(String(newValue)) + } + + const handleClose = () => { + // Сбрасываем состояние при закрытии + setName('') + setTryMessage('') + setWordsCount('10') + setMaxCards('') + setMessage('') + onNavigate?.('test-config') + } + + return ( +
+ +

Конфигурация теста

+ +
+
+ + setName(e.target.value)} + placeholder="Название конфига" + required + /> +
+ +
+ +