Adiciona endpoint de arquivamento e ajustes de infra

- Adiciona rota API para arquivar tickets por ID
- Atualiza configuracao do Prisma para PostgreSQL
- Simplifica workflow CI/CD
- Adiciona src/generated ao gitignore
- Atualiza documentacao e dependencias

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
rever-tecnologia 2025-12-11 13:25:36 -03:00
parent 33a59634e7
commit 2682b6e8ac
13 changed files with 253 additions and 165 deletions

View file

@ -0,0 +1,31 @@
{
"permissions": {
"allow": [
"Bash(ssh:*)",
"Bash(bun run lint)",
"Bash(bun run prisma:generate:*)",
"Bash(bun run build:bun:*)",
"WebSearch",
"Bash(bun add:*)",
"Bash(bun run tauri:*)",
"Bash(curl:*)",
"Bash(dir \"D:\\Projetos IA\\sistema-de-chamados\")",
"Bash(findstr:*)",
"Bash(cat:*)",
"Bash(chmod:*)",
"Bash(find:*)",
"Bash(grep:*)",
"WebFetch(domain:medium.com)",
"WebFetch(domain:henrywithu.com)",
"WebFetch(domain:hub.docker.com)",
"Bash(python3:*)",
"WebFetch(domain:www.npmjs.com)",
"WebFetch(domain:docs.strapi.io)",
"Bash(tablename)",
"Bash(\"\"\" OWNER TO renan; FROM pg_tables WHERE schemaname = public;\"\" | docker exec -i c95ebc27eb82 psql -U sistema -d strapi_blog\")",
"Bash(sequence_name)",
"Bash(\"\"\" OWNER TO renan; FROM information_schema.sequences WHERE sequence_schema = public;\"\" | docker exec -i c95ebc27eb82 psql -U sistema -d strapi_blog\")",
"Bash(git add:*)"
]
}
}

View file

@ -19,8 +19,9 @@ REPORTS_CRON_SECRET=reports-cron-secret
# Diretório para arquivamento local de tickets (JSONL/backup)
ARCHIVE_DIR=./archives
# SQLite database (local dev)
DATABASE_URL=file:./prisma/db.dev.sqlite
# PostgreSQL database
# Para desenvolvimento local, use Docker: docker run -d -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18
DATABASE_URL=postgresql://postgres:dev@localhost:5432/sistema_chamados
# SMTP Configuration (production values in docs/SMTP.md)
SMTP_HOST=smtp.c.inova.com.br

View file

@ -25,6 +25,7 @@ jobs:
changes:
name: Detect changes
runs-on: ubuntu-latest
timeout-minutes: 5
outputs:
convex: ${{ steps.filter.outputs.convex }}
web: ${{ steps.filter.outputs.web }}
@ -52,6 +53,7 @@ jobs:
deploy:
name: Deploy (VPS Linux)
needs: changes
timeout-minutes: 30
# Executa em qualquer push na main (independente do filtro) ou quando disparado manualmente
if: ${{ github.event_name == 'workflow_dispatch' || github.ref == 'refs/heads/main' }}
runs-on: [ self-hosted, linux, vps ]
@ -156,26 +158,18 @@ jobs:
run: |
echo "Waiting for Convex container..."
CID=""
# Aguarda ate 60s (12 tentativas x 5s) pelo container ficar pronto
# Nao forca restart - deixa o Swarm gerenciar via health checks
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt: container not ready yet; forcing service restart and sleeping 5s"
docker service ps sistema_convex_backend || true
docker service update --force sistema_convex_backend || true
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -z "$CID" ]; then
echo "No live container after restarts — performing hard restart (scale 0/1)"
docker service scale sistema_convex_backend=0 || true
sleep 5
docker service scale sistema_convex_backend=1 || true
sleep 10
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
fi
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
@ -184,16 +178,13 @@ jobs:
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME não encontrado; não foi possível extrair a chave admin"
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
if [ -z "$KEY" ]; then
docker service ps sistema_convex_backend || true
exit 1
fi
if [ -z "$KEY" ]; then
echo "ERRO: Nao foi possivel obter a chave admin do Convex"
docker service ps sistema_convex_backend || true
exit 1
fi
@ -269,28 +260,6 @@ jobs:
"$EFFECTIVE_APP_DIR"/ "$DEST"/
echo "Published build to: $DEST"
- name: Ensure SQLite volume ownership (sistema_db)
run: |
set -e
VOLUME_NAME="sistema_sistema_db"
if ! docker volume inspect "$VOLUME_NAME" >/dev/null 2>&1; then
echo "Volume $VOLUME_NAME não encontrado; pulando ajuste de permissões"
exit 0
fi
echo "Ajustando permissões em $VOLUME_NAME..."
docker run --rm -v "$VOLUME_NAME":/data alpine:3 sh -lc '
set -e
chown -R 1000:1000 /data 2>/dev/null || true
chmod -R ug+rwX /data 2>/dev/null || true
if [ ! -e /data/db.sqlite ]; then
touch /data/db.sqlite
chown 1000:1000 /data/db.sqlite 2>/dev/null || true
chmod 660 /data/db.sqlite 2>/dev/null || true
fi
ls -ld /data && ls -l /data/db.sqlite
'
echo "Permissões do volume ajustadas com sucesso"
- name: Swarm deploy (stack.yml)
run: |
APP_DIR_STABLE="$HOME/apps/sistema"
@ -313,9 +282,24 @@ jobs:
echo "NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-<not set>}"
APP_DIR="$APP_DIR_STABLE" RELEASE_SHA=${{ github.sha }} docker stack deploy --with-registry-auth -c stack.yml sistema
# Removido: "Ensure Convex service envs" - as env vars já são passadas pelo stack.yml
# via substituição de variáveis do .env. Fazer docker service update aqui causava
# um rolling update adicional desnecessário com ~60s de downtime.
- name: Wait for services to be healthy
run: |
echo "Aguardando servicos ficarem saudaveis..."
# Aguarda ate 3 minutos (18 tentativas x 10s) pelos servicos
for i in $(seq 1 18); do
WEB_STATUS=$(docker service ls --filter "name=sistema_web" --format "{{.Replicas}}" 2>/dev/null || echo "0/0")
CONVEX_STATUS=$(docker service ls --filter "name=sistema_convex_backend" --format "{{.Replicas}}" 2>/dev/null || echo "0/0")
echo "Tentativa $i/18: web=$WEB_STATUS convex=$CONVEX_STATUS"
# Verifica se web tem 2/2 replicas e convex tem 1/1
if echo "$WEB_STATUS" | grep -q "2/2" && echo "$CONVEX_STATUS" | grep -q "1/1"; then
echo "Todos os servicos estao saudaveis!"
exit 0
fi
sleep 10
done
echo "AVISO: Timeout aguardando servicos. Status atual:"
docker service ls --filter "label=com.docker.stack.namespace=sistema"
# Nao falha o deploy, apenas avisa (o Swarm continua o rolling update em background)
- name: Smoke test — register + heartbeat
run: |
@ -384,6 +368,7 @@ jobs:
convex_deploy:
name: Deploy Convex functions
needs: changes
timeout-minutes: 20
# Executa quando convex/** mudar ou via workflow_dispatch
if: ${{ github.event_name == 'workflow_dispatch' || needs.changes.outputs.convex == 'true' }}
runs-on: [ self-hosted, linux, vps ]
@ -430,24 +415,18 @@ jobs:
run: |
echo "Waiting for Convex container..."
CID=""
# Aguarda ate 60s (12 tentativas x 5s) pelo container ficar pronto
# Nao forca restart - deixa o Swarm gerenciar via health checks
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt: container not ready yet; sleeping 5s"
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -z "$CID" ]; then
echo "No live container after restarts — performing hard restart (scale 0/1)"
docker service scale sistema_convex_backend=0 || true
sleep 5
docker service scale sistema_convex_backend=1 || true
sleep 10
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
fi
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
@ -456,12 +435,13 @@ jobs:
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME não encontrado; não foi possível extrair a chave admin"
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
if [ -z "$KEY" ]; then
echo "ERRO: Nao foi possivel obter a chave admin do Convex"
docker service ps sistema_convex_backend || true
exit 1
fi
@ -543,6 +523,7 @@ jobs:
desktop_release:
name: Desktop Release (Windows)
timeout-minutes: 30
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
runs-on: [ self-hosted, windows, desktop ]
defaults:
@ -589,6 +570,7 @@ jobs:
diagnose_convex:
name: Diagnose Convex (env + register test)
timeout-minutes: 10
if: ${{ github.event_name == 'workflow_dispatch' }}
runs-on: [ self-hosted, linux, vps ]
steps:
@ -604,24 +586,17 @@ jobs:
run: |
echo "Waiting for Convex container..."
CID=""
# Aguarda ate 60s (12 tentativas x 5s) pelo container ficar pronto
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt: container not ready yet; sleeping 5s"
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -z "$CID" ]; then
echo "No live container after restarts — performing hard restart (scale 0/1)"
docker service scale sistema_convex_backend=0 || true
sleep 5
docker service scale sistema_convex_backend=1 || true
sleep 10
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
fi
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
@ -630,7 +605,7 @@ jobs:
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME não encontrado; não foi possível extrair a chave admin"
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT

57
.gitignore vendored
View file

@ -1,36 +1,36 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
*.sqlite
# external experiments
nova-calendar-main/
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# env files (can opt-in for committing if needed)
@ -63,3 +63,6 @@ Screenshot*.png
# ferramentas externas
rustdesk/
# Prisma generated files
src/generated/

View file

@ -1,6 +1,6 @@
## Sistema de Chamados
Aplicação **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Auth** para gestão de tickets da Rever. A stack ainda inclui **Prisma 6** (SQLite padrão para DEV), **Tailwind** e **Turbopack** como bundler padrão (webpack permanece disponível como fallback). Todo o código-fonte fica na raiz do monorepo seguindo as convenções do App Router.
Aplicacao **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Auth** para gestao de tickets da Rever. A stack ainda inclui **Prisma 7** (PostgreSQL), **Tailwind** e **Turbopack** como bundler padrao (webpack permanece disponivel como fallback). Todo o codigo-fonte fica na raiz do monorepo seguindo as convencoes do App Router.
## Requisitos
@ -17,7 +17,7 @@ Aplicação **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better
```
2. Ajuste o arquivo `.env` (ou crie a partir de `.env.example`) e confirme os valores de:
- `NEXT_PUBLIC_CONVEX_URL` (gerado pelo Convex Dev)
- `BETTER_AUTH_SECRET`, `BETTER_AUTH_URL`, `DATABASE_URL` (por padrão `file:./db.dev.sqlite`, que mapeia para `prisma/db.dev.sqlite`)
- `BETTER_AUTH_SECRET`, `BETTER_AUTH_URL`, `DATABASE_URL` (PostgreSQL, ex: `postgresql://postgres:dev@localhost:5432/sistema_chamados`)
3. Aplique as migrações e gere o client Prisma:
```bash
bunx prisma migrate deploy
@ -31,16 +31,19 @@ Aplicação **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better
### Resetar rapidamente o ambiente local
1. Garanta que `DATABASE_URL` aponte para o arquivo desejado (ex.: `file:./db.dev.sqlite` para desenvolvimento, `file:./db.sqlite` em produção local).
2. Aplique as migrações no arquivo informado:
1. Suba um PostgreSQL local (Docker recomendado):
```bash
DATABASE_URL=file:./db.dev.sqlite bunx prisma migrate deploy
docker run -d --name postgres-dev -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18
```
3. Recrie/garanta as contas padrão de login:
2. Aplique as migracoes:
```bash
DATABASE_URL=file:./db.dev.sqlite bun run auth:seed
bunx prisma migrate deploy
```
4. Suba o servidor normalmente com `bun run dev`. Esses três comandos bastam para reconstruir o ambiente sempre que trocar de computador.
3. Recrie/garanta as contas padrao de login:
```bash
bun run auth:seed
```
4. Suba o servidor normalmente com `bun run dev`.
### Subir serviços locais
@ -66,7 +69,7 @@ Aplicação **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better
### Guia de DEV (Prisma, Auth e Desktop/Tauri)
Para fluxos detalhados de desenvolvimento — banco de dados local (SQLite/Prisma), seed do Better Auth, ajustes do Prisma CLI no DEV e build do Desktop (Tauri) — consulte `docs/DEV.md`.
Para fluxos detalhados de desenvolvimento — banco de dados local (PostgreSQL/Prisma), seed do Better Auth, ajustes do Prisma CLI no DEV e build do Desktop (Tauri) — consulte `docs/DEV.md`.
## Scripts úteis
@ -78,8 +81,8 @@ Para fluxos detalhados de desenvolvimento — banco de dados local (SQLite/Prism
- `bun test` — suíte de testes unitários usando o runner do Bun (o teste de screenshot fica automaticamente ignorado se o matcher não existir).
- `bun run build` — executa `next build --turbopack` (runtime Node, caso prefira evitar o `--bun`).
- `bun run build:webpack` — executa `next build --webpack` como fallback oficial.
- `bun run auth:seed` — atualiza/cria contas padrão do Better Auth (credenciais em `agents.md`).
- `bunx prisma migrate deploy` — aplica migrações ao banco SQLite local.
- `bun run auth:seed` — atualiza/cria contas padrao do Better Auth (credenciais em `agents.md`).
- `bunx prisma migrate deploy` — aplica migracoes ao banco PostgreSQL.
- `bun run convex:dev` — roda o Convex em modo desenvolvimento com Node, gerando tipos em `convex/_generated`.
## Transferir dispositivo entre colaboradores
@ -97,7 +100,7 @@ Sem o reset de agente, o Convex reaproveita o token anterior e o inventário con
- `app/` dentro de `src/` — rotas e layouts do Next.js (App Router).
- `components/` — componentes reutilizáveis (UI, formulários, layouts).
- `convex/` — queries, mutations e seeds do Convex.
- `prisma/` — schema, migrações e banco SQLite (`prisma/db.sqlite`).
- `prisma/` — schema e migracoes do Prisma (PostgreSQL).
- `scripts/` — utilitários em Node para sincronização e seeds adicionais.
- `agents.md` — guia operacional e contexto funcional (em PT-BR).
- `PROXIMOS_PASSOS.md` — backlog de melhorias futuras.

View file

@ -11,7 +11,7 @@
"@hookform/resolvers": "5.2.2",
"@noble/hashes": "2.0.1",
"@paper-design/shaders-react": "0.0.68",
"@prisma/adapter-better-sqlite3": "^7.0.0",
"@prisma/adapter-pg": "^7.1.0",
"@prisma/client": "^7.0.0",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-avatar": "^1.1.10",
@ -40,7 +40,6 @@
"@tiptap/starter-kit": "3.13.0",
"@tiptap/suggestion": "3.13.0",
"better-auth": "^1.3.26",
"better-sqlite3": "12.5.0",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"convex": "^1.29.2",
@ -50,6 +49,7 @@
"next": "16.0.8",
"next-themes": "^0.4.6",
"pdfkit": "^0.17.2",
"pg": "^8.16.3",
"postcss": "^8.5.6",
"react": "^19.2.1",
"react-day-picker": "9.12.0",
@ -74,6 +74,7 @@
"@types/jsdom": "27.0.0",
"@types/node": "24.10.1",
"@types/pdfkit": "^0.17.3",
"@types/pg": "^8.16.0",
"@types/react": "^19",
"@types/react-dom": "^19",
"@types/sanitize-html": "^2.16.0",
@ -427,7 +428,7 @@
"@popperjs/core": ["@popperjs/core@2.11.8", "", {}, "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A=="],
"@prisma/adapter-better-sqlite3": ["@prisma/adapter-better-sqlite3@7.1.0", "", { "dependencies": { "@prisma/driver-adapter-utils": "7.1.0", "better-sqlite3": "^12.4.5" } }, "sha512-Ex4CimAONWMoUrhU27lpGXb4MdX/59qj+4PBTIuPVJLXZfTxSWuU8KowlRtq1w5iE91WiwMgU1KgeBOKJ81nEA=="],
"@prisma/adapter-pg": ["@prisma/adapter-pg@7.1.0", "", { "dependencies": { "@prisma/driver-adapter-utils": "7.1.0", "pg": "^8.16.3", "postgres-array": "3.0.4" } }, "sha512-DSAnUwkKfX4bUzhkrjGN4IBQzwg0nvFw2W17H0Oa532I5w9nLtTJ9mAEGDs1nUBEGRAsa0c7qsf8CSgfJ4DsBQ=="],
"@prisma/client": ["@prisma/client@7.1.0", "", { "dependencies": { "@prisma/client-runtime-utils": "7.1.0" }, "peerDependencies": { "prisma": "*", "typescript": ">=5.4.0" }, "optionalPeers": ["prisma", "typescript"] }, "sha512-qf7GPYHmS/xybNiSOpzv9wBo+UwqfL2PeyX+08v+KVHDI0AlSCQIh5bBySkH3alu06NX9wy98JEnckhMHoMFfA=="],
@ -821,6 +822,8 @@
"@types/pdfkit": ["@types/pdfkit@0.17.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-odAmVuuguRxKh1X4pbMrJMp8ecwNqHRw6lweupvzK+wuyNmi6wzlUlGVZ9EqMvp3Bs2+L9Ty0sRlrvKL+gsQZg=="],
"@types/pg": ["@types/pg@8.16.0", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ=="],
"@types/react": ["@types/react@19.2.7", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg=="],
"@types/react-dom": ["@types/react-dom@19.2.3", "", { "peerDependencies": { "@types/react": "^19.2.0" } }, "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ=="],
@ -1639,6 +1642,22 @@
"perfect-debounce": ["perfect-debounce@1.0.0", "", {}, "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA=="],
"pg": ["pg@8.16.3", "", { "dependencies": { "pg-connection-string": "^2.9.1", "pg-pool": "^3.10.1", "pg-protocol": "^1.10.3", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.2.7" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw=="],
"pg-cloudflare": ["pg-cloudflare@1.2.7", "", {}, "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg=="],
"pg-connection-string": ["pg-connection-string@2.9.1", "", {}, "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w=="],
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
"pg-pool": ["pg-pool@3.10.1", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg=="],
"pg-protocol": ["pg-protocol@1.10.3", "", {}, "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ=="],
"pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="],
"pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="],
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
"picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
@ -1665,6 +1684,14 @@
"postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="],
"postgres-array": ["postgres-array@3.0.4", "", {}, "sha512-nAUSGfSDGOaOAEGwqsRY27GPOea7CNipJPOA7lPbdEpx5Kg3qzdP0AaWC5MlhTWV9s4hFX39nomVZ+C4tnGOJQ=="],
"postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="],
"postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="],
"postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="],
"prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="],
"prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="],
@ -1853,6 +1880,8 @@
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
"sqlstring": ["sqlstring@2.3.3", "", {}, "sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg=="],
"stable-hash": ["stable-hash@0.0.5", "", {}, "sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA=="],
@ -2035,6 +2064,8 @@
"xmlchars": ["xmlchars@2.2.0", "", {}, "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw=="],
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
@ -2163,6 +2194,8 @@
"node-abi/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
"pg-types/postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="],
"png-to-ico/@types/node": ["@types/node@22.19.1", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-LCCV0HdSZZZb34qifBsyWlUmok6W7ouER+oQIGBScS8EsZsQbrtFTUrDX4hOl+CS6p7cnNC4td+qrSVGSCTUfQ=="],
"prop-types/react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],

View file

@ -8,32 +8,42 @@ Este documento consolida o estado atual do ambiente de desenvolvimento, descreve
- **Node.js**: mantenha a versão 20.9+ instalada para ferramentas auxiliares (Prisma CLI, scripts legados em Node) quando não estiver usando o runtime do Bun.
- **Next.js 16**: Projeto roda em `next@16.0.8` com Turbopack como bundler padrão (dev e build); webpack continua disponível como fallback.
- **Lint/Test/Build**: `bun run lint`, `bun test`, `bun run build:bun`. O test runner do Bun já roda em modo não interativo; utilize `bunx vitest --watch` apenas quando precisar do modo watch manualmente.
- **Banco DEV**: SQLite em `prisma/prisma/db.dev.sqlite`. Defina `DATABASE_URL="file:./prisma/db.dev.sqlite"` ao chamar CLI do Prisma.
- **Banco DEV**: PostgreSQL local (Docker recomendado). Defina `DATABASE_URL` apontando para seu PostgreSQL.
- **Desktop (Tauri)**: fonte em `apps/desktop`. Usa Radix tabs + componentes shadcn-like, integra com os endpoints `/api/machines/*` e suporta atualização automática via GitHub Releases.
- **CI**: Workflow `Quality Checks` roda lint/test/build para pushes e PRs na `main`, além do pipeline de deploy existente.
## Banco de dados (Prisma)
## Banco de dados (Prisma + PostgreSQL)
1. Gere/atualize o schema local:
1. Suba um PostgreSQL local (Docker recomendado):
```bash
docker run -d --name postgres-dev -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18
```
2. Configure a `DATABASE_URL` no seu `.env`:
```
DATABASE_URL=postgresql://postgres:dev@localhost:5432/sistema_chamados
```
3. Gere/atualize o schema local:
```bash
bun install
DATABASE_URL="file:./prisma/db.dev.sqlite" bunx prisma db push
DATABASE_URL="file:./prisma/db.dev.sqlite" bun run prisma:generate
DATABASE_URL="file:./prisma/db.dev.sqlite" bun run auth:seed
bunx prisma db push
bun run prisma:generate
bun run auth:seed
```
2. Rode o app Next.js:
4. Rode o app Next.js:
```bash
bun run dev:bun
```
> Alternativas: `bun run dev` (Node) ou `bun run dev:webpack` se precisar do fallback oficial.
3. Credenciais padrão (seed): `admin@sistema.dev / admin123`.
4. Herdou dados antigos? Execute `node scripts/remove-legacy-demo-users.mjs` para limpar contas demo legadas.
> **Por quê inline?** Evitamos declarar `DATABASE_URL` em `prisma/.env` porque o Prisma lê também o `.env` da raiz (produção). O override inline garante isolamento do banco DEV.
5. Credenciais padrao (seed): `admin@sistema.dev / admin123`.
6. Herdou dados antigos? Execute `node scripts/remove-legacy-demo-users.mjs` para limpar contas demo legadas.
## Next.js 16 (estável)
@ -177,7 +187,7 @@ PY
| Sintoma | Causa | Correção |
| --- | --- | --- |
| `ERR_BUN_LOCKFILE_OUTDATED` no pipeline | Dependências do desktop alteradas sem atualizar o `bun.lock` | Rodar `bun install` (raiz e `apps/desktop`) e commitar o lockfile. |
| Prisma falha com `P2021` / tabelas Better Auth inexistentes | CLI leu `.env` da raiz (produção) | Usar `DATABASE_URL="file:./prisma/db.dev.sqlite"` nos comandos. |
| Prisma falha com `P2021` / tabelas Better Auth inexistentes | Banco nao foi inicializado | Rodar `bunx prisma db push` e `bun run auth:seed`. |
| Vitest trava em modo watch | Script `bun test` sem `--run` e CI detecta TTY | Ajustado para `vitest --run --passWithNoTests`. Localmente, use `bun test -- --watch` se quiser. |
| Desktop não encontra updater | Falta `latest.json` ou assinatura inválida | Publicar release com `*.sig` e `latest.json` apontando para os pacotes corretos. |

View file

@ -33,6 +33,7 @@
"@hookform/resolvers": "5.2.2",
"@noble/hashes": "2.0.1",
"@paper-design/shaders-react": "0.0.68",
"@prisma/adapter-pg": "^7.1.0",
"@prisma/client": "^7.0.0",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-avatar": "^1.1.10",
@ -70,6 +71,7 @@
"next": "16.0.8",
"next-themes": "^0.4.6",
"pdfkit": "^0.17.2",
"pg": "^8.16.3",
"postcss": "^8.5.6",
"react": "^19.2.1",
"react-day-picker": "9.12.0",
@ -94,6 +96,7 @@
"@types/jsdom": "27.0.0",
"@types/node": "24.10.1",
"@types/pdfkit": "^0.17.3",
"@types/pg": "^8.16.0",
"@types/react": "^19",
"@types/react-dom": "^19",
"@types/sanitize-html": "^2.16.0",

View file

@ -1,52 +1,22 @@
import "dotenv/config"
import dotenv from "dotenv"
import path from "node:path"
import { defineConfig } from "prisma/config"
const PROJECT_ROOT = process.cwd()
const PRISMA_DIR = path.join(PROJECT_ROOT, "prisma")
// Carrega .env e .env.local (local tem prioridade)
dotenv.config({ path: ".env" })
dotenv.config({ path: ".env.local", override: true })
function resolveFileUrl(url: string) {
if (!url.startsWith("file:")) {
return url
function getDatabaseUrl() {
const url = process.env.DATABASE_URL?.trim()
if (!url) {
throw new Error(
"DATABASE_URL is required. Set it to a PostgreSQL connection string."
)
}
const filePath = url.slice("file:".length)
if (filePath.startsWith("//")) {
return url
}
if (path.isAbsolute(filePath)) {
return `file:${path.normalize(filePath)}`
}
const normalized = path.normalize(filePath)
const prismaPrefix = `prisma${path.sep}`
const relativeToPrisma = normalized.startsWith(prismaPrefix)
? normalized.slice(prismaPrefix.length)
: normalized
const absolutePath = path.resolve(PRISMA_DIR, relativeToPrisma)
if (!absolutePath.startsWith(PROJECT_ROOT)) {
throw new Error(`DATABASE_URL path escapes project directory: ${filePath}`)
}
return `file:${absolutePath}`
}
function normalizeDatasourceUrl(envUrl?: string | null) {
const trimmed = envUrl?.trim()
if (trimmed) {
return resolveFileUrl(trimmed)
}
if (process.env.NODE_ENV === "production") {
return "file:/app/data/db.sqlite"
}
return resolveFileUrl("file:./db.dev.sqlite")
return url
}
export default defineConfig({
@ -55,6 +25,6 @@ export default defineConfig({
path: path.join("prisma", "migrations"),
},
datasource: {
url: normalizeDatasourceUrl(process.env.DATABASE_URL),
url: getDatabaseUrl(),
},
})

View file

@ -7,7 +7,6 @@ generator client {
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
enum UserRole {

View file

@ -0,0 +1,46 @@
import { NextResponse } from "next/server"
import { requireAuthenticatedSession } from "@/lib/auth-server"
import { isStaff } from "@/lib/authz"
import { env } from "@/lib/env"
import { findArchivedTicket } from "@/server/archive/local-tickets"
function getInternalSecret() {
return env.INTERNAL_HEALTH_TOKEN ?? env.REPORTS_CRON_SECRET ?? null
}
export async function GET(_: Request, context: { params: { ticketId: string } }) {
const cronSecret = getInternalSecret()
const headerSecret = _.headers.get("x-cron-secret")?.trim()
const querySecret = new URL(_.url).searchParams.get("token")?.trim()
const hasSecret = cronSecret && (headerSecret === cronSecret || querySecret === cronSecret)
if (!hasSecret) {
const session = await requireAuthenticatedSession()
const role = session.user.role ?? "agent"
if (!isStaff(role)) {
return NextResponse.json({ error: "Acesso negado" }, { status: 403 })
}
}
const ticketId = context.params.ticketId
const found = await findArchivedTicket(ticketId)
if (!found) {
return NextResponse.json({ error: "Ticket arquivado não encontrado" }, { status: 404 })
}
return NextResponse.json(
{
ok: true,
ticketId,
file: found.file,
archivedAt: found.record.archivedAt,
tenantId: found.record.tenantId,
ticket: found.record.ticket,
comments: found.record.comments,
events: found.record.events,
attachments: (found.record as { attachments?: unknown }).attachments ?? [],
},
{ status: 200 }
)
}

View file

@ -1,3 +1,5 @@
import { Pool } from "pg"
import { PrismaPg } from "@prisma/adapter-pg"
import { PrismaClient } from "@/generated/prisma/client"
type PrismaClientInstance = InstanceType<typeof PrismaClient>
@ -12,6 +14,7 @@ export type PrismaDelegateClient = PrismaClientInstance
declare global {
var prisma: PrismaClientInstance | undefined
var pgPool: Pool | undefined
}
// PostgreSQL connection - requires DATABASE_URL environment variable
@ -21,11 +24,21 @@ if (!databaseUrl) {
throw new Error("DATABASE_URL environment variable is required for PostgreSQL connection")
}
export const prisma = global.prisma ?? new PrismaClient()
// Create PostgreSQL connection pool (reused in development to prevent connection exhaustion)
const pool = global.pgPool ?? new Pool({
connectionString: databaseUrl,
})
// Create Prisma adapter with the pg pool
const adapter = new PrismaPg(pool)
// Create Prisma client with the adapter
export const prisma = global.prisma ?? new PrismaClient({ adapter })
if (process.env.NODE_ENV !== "production") {
global.prisma = prisma
console.log("[prisma] Using PostgreSQL database")
global.pgPool = pool
console.log("[prisma] Using PostgreSQL database with pg adapter")
}
export * from "@/generated/prisma/client"

View file

@ -30,7 +30,9 @@ services:
REPORTS_CRON_SECRET: "${REPORTS_CRON_SECRET}"
REPORTS_CRON_BASE_URL: "${REPORTS_CRON_BASE_URL}"
# PostgreSQL connection string (usa o servico 'postgres' existente na rede traefik_public)
DATABASE_URL: "postgresql://${POSTGRES_USER:-sistema}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB:-sistema_chamados}"
# connection_limit: maximo de conexoes por replica (2 replicas x 10 = 20 conexoes)
# pool_timeout: tempo maximo para aguardar conexao disponivel
DATABASE_URL: "postgresql://${POSTGRES_USER:-sistema}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB:-sistema_chamados}?connection_limit=10&pool_timeout=10"
# Evita apt-get na inicialização porque a imagem já vem com toolchain pronta
SKIP_APT_BOOTSTRAP: "true"
# Usado para forçar novo rollout a cada deploy (setado pelo CI)
@ -166,7 +168,6 @@ services:
- traefik_public
volumes:
sistema_db: # Mantido para rollback caso necessário (SQLite)
convex_data:
networks: