Compare commits
2 Commits
405d30bead
...
9345b5ab5c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9345b5ab5c | ||
|
|
bad198ce29 |
@@ -61,6 +61,18 @@ services:
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
# LLM сервис (Ollama + Tavily), свой Docker и свой env
|
||||
llm:
|
||||
build:
|
||||
context: ./play-life-llm
|
||||
dockerfile: Dockerfile
|
||||
container_name: play-life-llm
|
||||
ports:
|
||||
- "8090:8090"
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- ./play-life-llm/.env
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
name: play-life_postgres_data
|
||||
|
||||
@@ -76,7 +76,7 @@ server {
|
||||
}
|
||||
|
||||
# Proxy other API endpoints to backend
|
||||
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|weekly_goals/setup)$ {
|
||||
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|weekly_goals/setup|project_score_sample_mv/refresh)$ {
|
||||
proxy_pass http://localhost:8080;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
|
||||
@@ -222,6 +222,19 @@
|
||||
<button onclick="setupWeeklyGoals()">Обновить цели</button>
|
||||
<div id="goalsResult"></div>
|
||||
</div>
|
||||
|
||||
<!-- Project score sample MV Card -->
|
||||
<div class="card">
|
||||
<h2>
|
||||
📊 project_score_sample_mv
|
||||
<span class="status" id="mvStatus" style="display: none;"></span>
|
||||
</h2>
|
||||
<p style="margin-bottom: 15px; color: #666;">
|
||||
Обновить материализованное представление и показать данные текущего пользователя (по одному представителю на вариант баллов проекта).
|
||||
</p>
|
||||
<button onclick="refreshProjectScoreSampleMv()">Обновить project_score_sample_mv</button>
|
||||
<div id="mvResult"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -346,6 +359,35 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function refreshProjectScoreSampleMv() {
|
||||
showStatus('mvStatus', 'loading', 'Обновление...');
|
||||
showResult('mvResult', null, false, true);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${getApiUrl()}/project_score_sample_mv/refresh`, {
|
||||
method: 'POST',
|
||||
headers: getAuthHeaders()
|
||||
});
|
||||
|
||||
if (handleAuthError(response)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
showStatus('mvStatus', 'success', 'Успешно');
|
||||
showResult('mvResult', data, false);
|
||||
} else {
|
||||
showStatus('mvStatus', 'error', 'Ошибка');
|
||||
showResult('mvResult', data, true);
|
||||
}
|
||||
} catch (error) {
|
||||
showStatus('mvStatus', 'error', 'Ошибка');
|
||||
showResult('mvResult', { error: error.message }, true);
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -193,6 +193,15 @@ type WeeklyGoalSetup struct {
|
||||
MaxGoalScore float64 `json:"max_goal_score"`
|
||||
}
|
||||
|
||||
// ProjectScoreSampleMvRow represents one row from project_score_sample_mv
|
||||
type ProjectScoreSampleMvRow struct {
|
||||
ProjectID int `json:"project_id"`
|
||||
Score float64 `json:"score"`
|
||||
EntryMessage string `json:"entry_message"`
|
||||
UserID *int `json:"user_id,omitempty"`
|
||||
CreatedDate time.Time `json:"created_date"`
|
||||
}
|
||||
|
||||
type Project struct {
|
||||
ProjectID int `json:"project_id"`
|
||||
ProjectName string `json:"project_name"`
|
||||
@@ -3135,6 +3144,14 @@ func (a *App) startWeeklyGoalsScheduler() {
|
||||
log.Printf("Projects median materialized view refreshed successfully")
|
||||
}
|
||||
|
||||
// Обновляем project_score_sample_mv
|
||||
_, err = a.DB.Exec("REFRESH MATERIALIZED VIEW project_score_sample_mv")
|
||||
if err != nil {
|
||||
log.Printf("Error refreshing project_score_sample_mv: %v", err)
|
||||
} else {
|
||||
log.Printf("Project score sample materialized view refreshed successfully")
|
||||
}
|
||||
|
||||
// Затем настраиваем цели на новую неделю
|
||||
if err := a.setupWeeklyGoals(); err != nil {
|
||||
log.Printf("Error in scheduled weekly goals setup: %v", err)
|
||||
@@ -4171,6 +4188,7 @@ func main() {
|
||||
adminAPIRoutes.HandleFunc("/message/post", app.messagePostHandler).Methods("POST", "OPTIONS")
|
||||
adminAPIRoutes.HandleFunc("/weekly_goals/setup", app.weeklyGoalsSetupHandler).Methods("POST", "OPTIONS")
|
||||
adminAPIRoutes.HandleFunc("/daily-report/trigger", app.dailyReportTriggerHandler).Methods("POST", "OPTIONS")
|
||||
adminAPIRoutes.HandleFunc("/project_score_sample_mv/refresh", app.projectScoreSampleMvRefreshHandler).Methods("POST", "OPTIONS")
|
||||
|
||||
// Static files handler для uploads (public, no auth required) - ДО protected!
|
||||
// Backend работает из /app/backend/, но uploads находится в /app/uploads/
|
||||
@@ -5462,6 +5480,67 @@ func (a *App) dailyReportTriggerHandler(w http.ResponseWriter, r *http.Request)
|
||||
})
|
||||
}
|
||||
|
||||
// projectScoreSampleMvRefreshHandler refreshes project_score_sample_mv and returns rows for the current user
|
||||
func (a *App) projectScoreSampleMvRefreshHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method == "OPTIONS" {
|
||||
setCORSHeaders(w)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
setCORSHeaders(w)
|
||||
|
||||
_, err := a.DB.Exec("REFRESH MATERIALIZED VIEW project_score_sample_mv")
|
||||
if err != nil {
|
||||
log.Printf("Error refreshing project_score_sample_mv: %v", err)
|
||||
sendErrorWithCORS(w, fmt.Sprintf("Error refreshing MV: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
userID, ok := getUserIDFromContext(r)
|
||||
if !ok {
|
||||
sendErrorWithCORS(w, "Unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
rows, err := a.DB.Query(`
|
||||
SELECT project_id, score, entry_message, user_id, created_date
|
||||
FROM project_score_sample_mv
|
||||
WHERE user_id = $1
|
||||
ORDER BY project_id, score
|
||||
`, userID)
|
||||
if err != nil {
|
||||
log.Printf("Error querying project_score_sample_mv: %v", err)
|
||||
sendErrorWithCORS(w, fmt.Sprintf("Error querying MV: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
data := make([]ProjectScoreSampleMvRow, 0)
|
||||
for rows.Next() {
|
||||
var row ProjectScoreSampleMvRow
|
||||
var userIDNull sql.NullInt64
|
||||
err := rows.Scan(&row.ProjectID, &row.Score, &row.EntryMessage, &userIDNull, &row.CreatedDate)
|
||||
if err != nil {
|
||||
log.Printf("Error scanning project_score_sample_mv row: %v", err)
|
||||
sendErrorWithCORS(w, fmt.Sprintf("Error scanning data: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if userIDNull.Valid {
|
||||
uid := int(userIDNull.Int64)
|
||||
row.UserID = &uid
|
||||
}
|
||||
data = append(data, row)
|
||||
}
|
||||
if err = rows.Err(); err != nil {
|
||||
log.Printf("Error iterating project_score_sample_mv rows: %v", err)
|
||||
sendErrorWithCORS(w, fmt.Sprintf("Error reading data: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(data)
|
||||
}
|
||||
|
||||
func (a *App) adminHandler(w http.ResponseWriter, r *http.Request) {
|
||||
// Пробуем найти файл admin.html в разных местах
|
||||
var adminPath string
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Migration: Drop project_score_sample_mv materialized view
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
@@ -0,0 +1,31 @@
|
||||
-- Migration: Add project_score_sample_mv materialized view
|
||||
--
|
||||
-- One row per (project_id, score, user_id): sum of nodes.score per entry,
|
||||
-- representative entry_message (latest by date). Used for admin display and reporting.
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
)
|
||||
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): sum of nodes per entry, representative entry_message (latest by date).';
|
||||
@@ -0,0 +1,30 @@
|
||||
-- Revert to previous MV definition (one row per project_id, score, user_id)
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
)
|
||||
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): sum of nodes per entry, representative entry_message (latest by date).';
|
||||
@@ -0,0 +1,42 @@
|
||||
-- Migration: Make entry_message unique per (project_id, user_id) in project_score_sample_mv
|
||||
--
|
||||
-- One row per (project_id, user_id, entry_message): choose the row with latest created_date.
|
||||
-- Ensures the same entry_message does not repeat for different score values.
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
),
|
||||
with_message AS (
|
||||
SELECT
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
)
|
||||
SELECT DISTINCT ON (project_id, user_id, entry_message)
|
||||
project_id,
|
||||
score,
|
||||
entry_message,
|
||||
user_id,
|
||||
created_date
|
||||
FROM with_message
|
||||
ORDER BY project_id, user_id, entry_message, created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): representative row (latest by date). entry_message is unique per project and user.';
|
||||
@@ -0,0 +1,39 @@
|
||||
-- Revert to one row per (project_id, user_id, entry_message)
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
),
|
||||
with_message AS (
|
||||
SELECT
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
)
|
||||
SELECT DISTINCT ON (project_id, user_id, entry_message)
|
||||
project_id,
|
||||
score,
|
||||
entry_message,
|
||||
user_id,
|
||||
created_date
|
||||
FROM with_message
|
||||
ORDER BY project_id, user_id, entry_message, created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): representative row (latest by date).';
|
||||
@@ -0,0 +1,32 @@
|
||||
-- Migration: One row per (project_id, user_id, score) in project_score_sample_mv
|
||||
--
|
||||
-- For each score value (per project and user) exactly one record; representative entry_message (latest by date).
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
)
|
||||
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): one record per score, representative entry_message (latest by date).';
|
||||
@@ -0,0 +1,30 @@
|
||||
-- Revert to one row per (project_id, score, user_id)
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
)
|
||||
SELECT DISTINCT ON (es.project_id, es.score, es.user_id)
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
ORDER BY es.project_id, es.score, es.user_id, es.created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, score, user_id): one record per score, representative entry_message (latest by date).';
|
||||
@@ -0,0 +1,42 @@
|
||||
-- Migration: One entry_message per (project_id, user_id) in project_score_sample_mv
|
||||
--
|
||||
-- One record per score (per project, user) and one record per entry_message per project.
|
||||
-- DISTINCT ON (project_id, user_id, entry_message): same message with different scores → one row (latest by date).
|
||||
|
||||
DROP MATERIALIZED VIEW IF EXISTS project_score_sample_mv;
|
||||
|
||||
CREATE MATERIALIZED VIEW project_score_sample_mv AS
|
||||
WITH entry_scores AS (
|
||||
SELECT
|
||||
n.project_id,
|
||||
n.entry_id,
|
||||
n.user_id,
|
||||
SUM(n.score) AS score,
|
||||
MAX(n.created_date) AS created_date
|
||||
FROM nodes n
|
||||
GROUP BY n.project_id, n.entry_id, n.user_id
|
||||
),
|
||||
with_message AS (
|
||||
SELECT
|
||||
es.project_id,
|
||||
es.score,
|
||||
e.text AS entry_message,
|
||||
es.user_id,
|
||||
es.created_date
|
||||
FROM entry_scores es
|
||||
JOIN entries e ON e.id = es.entry_id
|
||||
)
|
||||
SELECT DISTINCT ON (project_id, user_id, entry_message)
|
||||
project_id,
|
||||
score,
|
||||
entry_message,
|
||||
user_id,
|
||||
created_date
|
||||
FROM with_message
|
||||
ORDER BY project_id, user_id, entry_message, created_date DESC
|
||||
WITH DATA;
|
||||
|
||||
CREATE INDEX idx_project_score_sample_mv_project_score_user ON project_score_sample_mv(project_id, score, user_id);
|
||||
CREATE INDEX idx_project_score_sample_mv_user_id ON project_score_sample_mv(user_id);
|
||||
|
||||
COMMENT ON MATERIALIZED VIEW project_score_sample_mv IS 'One row per (project_id, user_id, entry_message): one record per score (chosen row), one entry_message per project; representative = latest by date.';
|
||||
12
play-life-llm/.gitignore
vendored
Normal file
12
play-life-llm/.gitignore
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Env with secrets (Tavily API key, etc.)
|
||||
.env
|
||||
|
||||
# Binary
|
||||
play-life-llm
|
||||
*.exe
|
||||
|
||||
# IDE / OS
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
.DS_Store
|
||||
19
play-life-llm/Dockerfile
Normal file
19
play-life-llm/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
# Build stage
|
||||
FROM golang:1.24-alpine AS builder
|
||||
WORKDIR /app
|
||||
ENV GOPROXY=https://proxy.golang.org,direct
|
||||
ENV GOSUMDB=sum.golang.org
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o play-life-llm .
|
||||
|
||||
# Runtime stage
|
||||
FROM alpine:latest
|
||||
RUN apk --no-cache add ca-certificates wget
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/play-life-llm .
|
||||
EXPOSE 8090
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD wget -q -O- http://localhost:8090/health || exit 1
|
||||
CMD ["./play-life-llm"]
|
||||
12
play-life-llm/env.example
Normal file
12
play-life-llm/env.example
Normal file
@@ -0,0 +1,12 @@
|
||||
# Ollama API base URL (default: http://localhost:11434)
|
||||
# For Docker on Mac/Windows use: http://host.docker.internal:11434
|
||||
OLLAMA_HOST=http://localhost:11434
|
||||
|
||||
# Tavily API key for web search (required when model uses web_search tool)
|
||||
TAVILY_API_KEY=
|
||||
|
||||
# HTTP server port (default: 8090)
|
||||
PORT=8090
|
||||
|
||||
# Default Ollama model (default: llama3.1:70b)
|
||||
OLLAMA_MODEL=llama3.1:70b
|
||||
5
play-life-llm/go.mod
Normal file
5
play-life-llm/go.mod
Normal file
@@ -0,0 +1,5 @@
|
||||
module play-life-llm
|
||||
|
||||
go 1.24.0
|
||||
|
||||
require github.com/gorilla/mux v1.8.1
|
||||
2
play-life-llm/go.sum
Normal file
2
play-life-llm/go.sum
Normal file
@@ -0,0 +1,2 @@
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
177
play-life-llm/internal/handler/ask.go
Normal file
177
play-life-llm/internal/handler/ask.go
Normal file
@@ -0,0 +1,177 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"play-life-llm/internal/ollama"
|
||||
"play-life-llm/internal/tavily"
|
||||
)
|
||||
|
||||
// AskRequest is the POST /ask body.
|
||||
type AskRequest struct {
|
||||
Prompt string `json:"prompt"`
|
||||
ResponseSchema interface{} `json:"response_schema"`
|
||||
Model string `json:"model,omitempty"`
|
||||
// AllowWebSearch: если true, в запрос к Ollama добавляются tools (web_search), и при вызове модели выполняется поиск через Tavily. Если false (по умолчанию), tools не передаются — модель просто возвращает JSON по схеме (подходит для простых запросов без интернета).
|
||||
AllowWebSearch bool `json:"allow_web_search,omitempty"`
|
||||
}
|
||||
|
||||
// AskResponse is the successful response (result is JSON by schema).
|
||||
type AskResponse struct {
|
||||
Result json.RawMessage `json:"result"`
|
||||
}
|
||||
|
||||
// AskHandler handles POST /ask: prompt + response_schema -> LLM with optional web search, returns JSON.
|
||||
type AskHandler struct {
|
||||
Ollama *ollama.Client
|
||||
Tavily *tavily.Client
|
||||
DefaultModel string
|
||||
}
|
||||
|
||||
// ServeHTTP implements http.Handler.
|
||||
func (h *AskHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
var req AskRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
sendError(w, "invalid JSON body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if req.Prompt == "" {
|
||||
sendError(w, "prompt is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if req.ResponseSchema == nil {
|
||||
sendError(w, "response_schema is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
model := req.Model
|
||||
if model == "" {
|
||||
model = h.DefaultModel
|
||||
}
|
||||
if model == "" {
|
||||
model = "llama3.1:70b"
|
||||
}
|
||||
|
||||
var tools []ollama.Tool
|
||||
if req.AllowWebSearch {
|
||||
tools = []ollama.Tool{ollama.WebSearchTool()}
|
||||
}
|
||||
messages := []ollama.ChatMessage{}
|
||||
if req.AllowWebSearch {
|
||||
messages = append(messages, ollama.ChatMessage{
|
||||
Role: "system",
|
||||
Content: "When the user asks for current, recent, or real-time information (weather, prices, news, etc.), you MUST call the web_search tool with a suitable query. Do not answer from memory — use the tool and then summarize the results in your response.",
|
||||
})
|
||||
// Гарантированный запрос в Tavily: предпоиск по промпту пользователя, результат подмешивается в контекст.
|
||||
searchQuery := req.Prompt
|
||||
if len(searchQuery) > 200 {
|
||||
searchQuery = searchQuery[:200]
|
||||
}
|
||||
log.Printf("tavily pre-search: query=%q", searchQuery)
|
||||
preSearchResult, err := h.Tavily.Search(searchQuery)
|
||||
if err != nil {
|
||||
log.Printf("tavily pre-search error: %v", err)
|
||||
preSearchResult = "search failed: " + err.Error()
|
||||
} else {
|
||||
log.Printf("tavily pre-search ok: %d bytes", len(preSearchResult))
|
||||
}
|
||||
messages = append(messages, ollama.ChatMessage{
|
||||
Role: "system",
|
||||
Content: "Relevant web search result for the user's question (use this to answer; if not enough, you may call web_search again):\n\n" + preSearchResult,
|
||||
})
|
||||
}
|
||||
messages = append(messages, ollama.ChatMessage{
|
||||
Role: "user", Content: req.Prompt,
|
||||
})
|
||||
|
||||
const maxToolRounds = 20
|
||||
for round := 0; round < maxToolRounds; round++ {
|
||||
chatReq := &ollama.ChatRequest{
|
||||
Model: model,
|
||||
Messages: messages,
|
||||
Stream: false,
|
||||
Format: req.ResponseSchema,
|
||||
Tools: tools,
|
||||
}
|
||||
resp, err := h.Ollama.Chat(chatReq)
|
||||
if err != nil {
|
||||
log.Printf("ollama chat error: %v", err)
|
||||
sendError(w, "ollama request failed: "+err.Error(), http.StatusBadGateway)
|
||||
return
|
||||
}
|
||||
|
||||
messages = append(messages, resp.Message)
|
||||
|
||||
if n := len(resp.Message.ToolCalls); n > 0 {
|
||||
log.Printf("ollama returned %d tool_calls", n)
|
||||
}
|
||||
if len(resp.Message.ToolCalls) == 0 {
|
||||
// Final answer: message.content is JSON by schema
|
||||
content := resp.Message.Content
|
||||
if content == "" {
|
||||
sendError(w, "empty response from model", http.StatusBadGateway)
|
||||
return
|
||||
}
|
||||
// Return as { "result": <parsed JSON> } so client gets valid JSON
|
||||
var raw json.RawMessage
|
||||
if err := json.Unmarshal([]byte(content), &raw); err != nil {
|
||||
// If not valid JSON, return as string inside result
|
||||
raw = json.RawMessage(`"` + escapeJSONString(content) + `"`)
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_ = json.NewEncoder(w).Encode(AskResponse{Result: raw})
|
||||
return
|
||||
}
|
||||
|
||||
// Execute tool calls (web_search via Tavily)
|
||||
for _, tc := range resp.Message.ToolCalls {
|
||||
if tc.Function.Name != "web_search" {
|
||||
messages = append(messages, ollama.ChatMessage{
|
||||
Role: "tool", ToolName: tc.Function.Name, Content: "unknown tool",
|
||||
})
|
||||
continue
|
||||
}
|
||||
query := ollama.QueryFromToolCall(tc)
|
||||
if query == "" {
|
||||
// Некоторые модели подставляют в arguments не "query", а другие поля — используем промпт пользователя как поисковый запрос
|
||||
query = req.Prompt
|
||||
if len(query) > 200 {
|
||||
query = query[:200]
|
||||
}
|
||||
log.Printf("web_search: query empty in tool_call, using user prompt (first 200 chars)")
|
||||
}
|
||||
log.Printf("tavily search: query=%q", query)
|
||||
searchResult, err := h.Tavily.Search(query)
|
||||
if err != nil {
|
||||
log.Printf("tavily search error: %v", err)
|
||||
searchResult = "search failed: " + err.Error()
|
||||
} else {
|
||||
log.Printf("tavily search ok: %d bytes", len(searchResult))
|
||||
}
|
||||
messages = append(messages, ollama.ChatMessage{
|
||||
Role: "tool", ToolName: "web_search", Content: searchResult,
|
||||
})
|
||||
}
|
||||
}
|
||||
// Too many tool rounds
|
||||
sendError(w, "too many tool-call rounds", http.StatusBadGateway)
|
||||
}
|
||||
|
||||
func sendError(w http.ResponseWriter, msg string, code int) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(code)
|
||||
_ = json.NewEncoder(w).Encode(map[string]string{"error": msg})
|
||||
}
|
||||
|
||||
func escapeJSONString(s string) string {
|
||||
b, _ := json.Marshal(s)
|
||||
return string(b[1 : len(b)-1])
|
||||
}
|
||||
17
play-life-llm/internal/handler/health.go
Normal file
17
play-life-llm/internal/handler/health.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Health returns 200 with {"status": "ok"} for Docker healthcheck.
|
||||
func Health(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_ = json.NewEncoder(w).Encode(map[string]string{"status": "ok"})
|
||||
}
|
||||
148
play-life-llm/internal/ollama/client.go
Normal file
148
play-life-llm/internal/ollama/client.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package ollama
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
const defaultTimeout = 10 * time.Minute
|
||||
|
||||
// Client calls Ollama /api/chat.
|
||||
type Client struct {
|
||||
BaseURL string
|
||||
HTTPClient *http.Client
|
||||
}
|
||||
|
||||
// NewClient creates an Ollama client. baseURL is e.g. "http://localhost:11434".
|
||||
func NewClient(baseURL string) *Client {
|
||||
return &Client{
|
||||
BaseURL: baseURL,
|
||||
HTTPClient: &http.Client{
|
||||
Timeout: defaultTimeout,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ChatRequest matches Ollama POST /api/chat body.
|
||||
type ChatRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []ChatMessage `json:"messages"`
|
||||
Stream bool `json:"stream"`
|
||||
Format interface{} `json:"format,omitempty"` // "json" or JSON schema object
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
}
|
||||
|
||||
// ChatMessage is one message in the conversation.
|
||||
type ChatMessage struct {
|
||||
Role string `json:"role"` // "user", "assistant", "system", "tool"
|
||||
Content string `json:"content,omitempty"`
|
||||
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
|
||||
ToolName string `json:"tool_name,omitempty"` // for role "tool"
|
||||
}
|
||||
|
||||
// Tool defines a function the model may call.
|
||||
type Tool struct {
|
||||
Type string `json:"type"`
|
||||
Function ToolFunc `json:"function"`
|
||||
}
|
||||
|
||||
// ToolFunc describes the function.
|
||||
type ToolFunc struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Parameters interface{} `json:"parameters"`
|
||||
}
|
||||
|
||||
// ToolCall is a model request to call a tool.
|
||||
type ToolCall struct {
|
||||
Type string `json:"type"`
|
||||
Function ToolCallFn `json:"function"`
|
||||
}
|
||||
|
||||
// ToolCallFn holds name and arguments.
|
||||
// Arguments may come from Ollama as a JSON object or as a JSON string.
|
||||
type ToolCallFn struct {
|
||||
Name string `json:"name"`
|
||||
Arguments interface{} `json:"arguments"` // object or string
|
||||
}
|
||||
|
||||
// QueryFromToolCall returns the "query" argument from a web_search tool call.
|
||||
// Ollama may send arguments as a map or as a JSON string.
|
||||
func QueryFromToolCall(tc ToolCall) string {
|
||||
switch v := tc.Function.Arguments.(type) {
|
||||
case map[string]interface{}:
|
||||
if q, _ := v["query"].(string); q != "" {
|
||||
return q
|
||||
}
|
||||
case string:
|
||||
var m map[string]interface{}
|
||||
if json.Unmarshal([]byte(v), &m) == nil {
|
||||
if q, _ := m["query"].(string); q != "" {
|
||||
return q
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// ChatResponse is the Ollama /api/chat response.
|
||||
type ChatResponse struct {
|
||||
Message ChatMessage `json:"message"`
|
||||
Done bool `json:"done"`
|
||||
}
|
||||
|
||||
// Chat sends a chat request and returns the response.
|
||||
func (c *Client) Chat(req *ChatRequest) (*ChatResponse, error) {
|
||||
body, err := json.Marshal(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("marshal request: %w", err)
|
||||
}
|
||||
url := c.BaseURL + "/api/chat"
|
||||
httpReq, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("new request: %w", err)
|
||||
}
|
||||
httpReq.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.HTTPClient.Do(httpReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("do request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("ollama returned %d: %s", resp.StatusCode, string(b))
|
||||
}
|
||||
|
||||
var out ChatResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
|
||||
return nil, fmt.Errorf("decode response: %w", err)
|
||||
}
|
||||
return &out, nil
|
||||
}
|
||||
|
||||
// WebSearchTool returns the tool definition for web_search (Tavily).
|
||||
func WebSearchTool() Tool {
|
||||
return Tool{
|
||||
Type: "function",
|
||||
Function: ToolFunc{
|
||||
Name: "web_search",
|
||||
Description: "Search the web for current information. Use when you need up-to-date or factual information from the internet.",
|
||||
Parameters: map[string]interface{}{
|
||||
"type": "object",
|
||||
"properties": map[string]interface{}{
|
||||
"query": map[string]interface{}{
|
||||
"type": "string",
|
||||
"description": "Search query",
|
||||
},
|
||||
},
|
||||
"required": []string{"query"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
35
play-life-llm/internal/server/server.go
Normal file
35
play-life-llm/internal/server/server.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"play-life-llm/internal/handler"
|
||||
"play-life-llm/internal/ollama"
|
||||
"play-life-llm/internal/tavily"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
// Config holds server and client configuration.
|
||||
type Config struct {
|
||||
OllamaHost string
|
||||
TavilyAPIKey string
|
||||
DefaultModel string
|
||||
}
|
||||
|
||||
// NewRouter returns an HTTP router with /health and /ask registered.
|
||||
func NewRouter(cfg Config) http.Handler {
|
||||
ollamaClient := ollama.NewClient(cfg.OllamaHost)
|
||||
tavilyClient := tavily.NewClient(cfg.TavilyAPIKey)
|
||||
|
||||
askHandler := &handler.AskHandler{
|
||||
Ollama: ollamaClient,
|
||||
Tavily: tavilyClient,
|
||||
DefaultModel: cfg.DefaultModel,
|
||||
}
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/health", handler.Health).Methods(http.MethodGet)
|
||||
r.Handle("/ask", askHandler).Methods(http.MethodPost)
|
||||
return r
|
||||
}
|
||||
104
play-life-llm/internal/tavily/client.go
Normal file
104
play-life-llm/internal/tavily/client.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package tavily
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
baseURL = "https://api.tavily.com"
|
||||
searchPath = "/search"
|
||||
timeout = 30 * time.Second
|
||||
)
|
||||
|
||||
// Client calls Tavily Search API.
|
||||
type Client struct {
|
||||
APIKey string
|
||||
HTTPClient *http.Client
|
||||
}
|
||||
|
||||
// NewClient creates a Tavily client. apiKey is required for search.
|
||||
func NewClient(apiKey string) *Client {
|
||||
return &Client{
|
||||
APIKey: apiKey,
|
||||
HTTPClient: &http.Client{
|
||||
Timeout: timeout,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// SearchRequest is the POST body for /search.
|
||||
type SearchRequest struct {
|
||||
Query string `json:"query"`
|
||||
SearchDepth string `json:"search_depth,omitempty"` // basic, advanced, etc.
|
||||
MaxResults int `json:"max_results,omitempty"`
|
||||
}
|
||||
|
||||
// SearchResult is one result item.
|
||||
type SearchResult struct {
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
// SearchResponse is the Tavily search response.
|
||||
type SearchResponse struct {
|
||||
Query string `json:"query"`
|
||||
Answer string `json:"answer,omitempty"`
|
||||
Results []SearchResult `json:"results"`
|
||||
}
|
||||
|
||||
// Search runs a web search and returns a single text suitable for passing to Ollama as tool result.
|
||||
func (c *Client) Search(query string) (string, error) {
|
||||
if c.APIKey == "" {
|
||||
return "", fmt.Errorf("tavily: API key not set")
|
||||
}
|
||||
body, err := json.Marshal(SearchRequest{
|
||||
Query: query,
|
||||
MaxResults: 5,
|
||||
})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("marshal request: %w", err)
|
||||
}
|
||||
|
||||
url := baseURL + searchPath
|
||||
req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("new request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("do request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", fmt.Errorf("tavily returned %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var out SearchResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&out); err != nil {
|
||||
return "", fmt.Errorf("decode response: %w", err)
|
||||
}
|
||||
|
||||
// Build a single text for the model: prefer answer if present, else concatenate results.
|
||||
if out.Answer != "" {
|
||||
return out.Answer, nil
|
||||
}
|
||||
var b bytes.Buffer
|
||||
for i, r := range out.Results {
|
||||
if i > 0 {
|
||||
b.WriteString("\n\n")
|
||||
}
|
||||
b.WriteString(r.Title)
|
||||
b.WriteString(": ")
|
||||
b.WriteString(r.Content)
|
||||
}
|
||||
return b.String(), nil
|
||||
}
|
||||
36
play-life-llm/main.go
Normal file
36
play-life-llm/main.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"play-life-llm/internal/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
ollamaHost := getEnv("OLLAMA_HOST", "http://localhost:11434")
|
||||
tavilyAPIKey := getEnv("TAVILY_API_KEY", "")
|
||||
port := getEnv("PORT", "8090")
|
||||
defaultModel := getEnv("OLLAMA_MODEL", "llama3.1:70b")
|
||||
|
||||
cfg := server.Config{
|
||||
OllamaHost: ollamaHost,
|
||||
TavilyAPIKey: tavilyAPIKey,
|
||||
DefaultModel: defaultModel,
|
||||
}
|
||||
router := server.NewRouter(cfg)
|
||||
|
||||
addr := ":" + port
|
||||
log.Printf("play-life-llm listening on %s", addr)
|
||||
if err := http.ListenAndServe(addr, router); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func getEnv(key, defaultVal string) string {
|
||||
if v := os.Getenv(key); v != "" {
|
||||
return v
|
||||
}
|
||||
return defaultVal
|
||||
}
|
||||
@@ -50,7 +50,7 @@ server {
|
||||
}
|
||||
|
||||
# Proxy other API endpoints to backend
|
||||
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|webhook/|weekly_goals/setup)$ {
|
||||
location ~ ^/(playlife-feed|d2dc349a-0d13-49b2-a8f0-1ab094bfba9b|projects|message/post|webhook/|weekly_goals/setup|project_score_sample_mv/refresh)$ {
|
||||
proxy_pass http://backend:8080;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "play-life-web",
|
||||
"version": "4.27.3",
|
||||
"version": "5.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
|
||||
18
run.sh
18
run.sh
@@ -15,12 +15,18 @@ NC='\033[0m' # No Color
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
# Проверка наличия .env файла
|
||||
# Проверка наличия .env файла (backend, web)
|
||||
if [ ! -f ".env" ]; then
|
||||
echo -e "${RED}❌ Файл .env не найден!${NC}"
|
||||
echo " Создайте файл .env на основе env.example"
|
||||
exit 1
|
||||
fi
|
||||
# Создаём play-life-llm/.env из env.example при отсутствии (для сервиса llm)
|
||||
if [ ! -f "play-life-llm/.env" ]; then
|
||||
echo -e "${YELLOW}Создаём play-life-llm/.env из env.example...${NC}"
|
||||
cp play-life-llm/env.example play-life-llm/.env
|
||||
echo " Задайте TAVILY_API_KEY и OLLAMA_HOST (для Docker: http://host.docker.internal:11434) в play-life-llm/.env"
|
||||
fi
|
||||
|
||||
# Загружаем переменные окружения
|
||||
export $(cat .env | grep -v '^#' | grep -v '^$' | xargs)
|
||||
@@ -41,15 +47,20 @@ if docker-compose ps | grep -q "Up"; then
|
||||
echo -e "${YELLOW}Перезапуск существующих контейнеров...${NC}"
|
||||
echo " - Backend сервер (с пересборкой)"
|
||||
echo " - Frontend приложение (с пересборкой)"
|
||||
echo " - LLM сервис (с пересборкой)"
|
||||
echo " - База данных"
|
||||
# Пересобираем и перезапускаем веб-сервер с новыми изменениями
|
||||
# Пересобираем и перезапускаем веб-приложение
|
||||
echo -e "${BLUE}Пересборка веб-приложения...${NC}"
|
||||
docker-compose build --no-cache play-life-web
|
||||
docker-compose up -d --force-recreate play-life-web
|
||||
# Пересобираем и перезапускаем бэкенд с новыми изменениями
|
||||
# Пересобираем и перезапускаем бэкенд
|
||||
echo -e "${BLUE}Пересборка бэкенда...${NC}"
|
||||
docker-compose build --no-cache backend
|
||||
docker-compose up -d --force-recreate backend
|
||||
# Пересобираем и перезапускаем LLM сервис
|
||||
echo -e "${BLUE}Пересборка LLM сервиса...${NC}"
|
||||
docker-compose build --no-cache llm
|
||||
docker-compose up -d --force-recreate llm
|
||||
# Перезапускаем базу данных
|
||||
docker-compose restart db
|
||||
echo -e "${GREEN}✅ Контейнеры перезапущены${NC}"
|
||||
@@ -58,6 +69,7 @@ else
|
||||
echo " - База данных PostgreSQL 15 (порт: $DB_PORT)"
|
||||
echo " - Backend сервер (порт: $PORT)"
|
||||
echo " - Frontend приложение (порт: $WEB_PORT)"
|
||||
echo " - LLM сервис (порт: 8090)"
|
||||
docker-compose up -d --build --force-recreate
|
||||
echo -e "${GREEN}✅ Контейнеры запущены${NC}"
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user