Compare commits

..

No commits in common. "main" and "feat/desktop-portal-hide-filters" have entirely different histories.

389 changed files with 6099 additions and 57500 deletions

View file

@ -1,91 +0,0 @@
{
"permissions": {
"allow": [
"Bash(ssh:*)",
"Bash(bun run lint)",
"Bash(bun run prisma:generate:*)",
"Bash(bun run build:bun:*)",
"WebSearch",
"Bash(bun add:*)",
"Bash(bun run tauri:*)",
"Bash(curl:*)",
"Bash(dir \"D:\\Projetos IA\\sistema-de-chamados\")",
"Bash(findstr:*)",
"Bash(cat:*)",
"Bash(chmod:*)",
"Bash(find:*)",
"Bash(grep:*)",
"WebFetch(domain:medium.com)",
"WebFetch(domain:henrywithu.com)",
"WebFetch(domain:hub.docker.com)",
"Bash(python3:*)",
"WebFetch(domain:www.npmjs.com)",
"WebFetch(domain:docs.strapi.io)",
"Bash(tablename)",
"Bash(\"\"\" OWNER TO renan; FROM pg_tables WHERE schemaname = public;\"\" | docker exec -i c95ebc27eb82 psql -U sistema -d strapi_blog\")",
"Bash(sequence_name)",
"Bash(\"\"\" OWNER TO renan; FROM information_schema.sequences WHERE sequence_schema = public;\"\" | docker exec -i c95ebc27eb82 psql -U sistema -d strapi_blog\")",
"Bash(git add:*)",
"Bash(git commit:*)",
"Bash(git push:*)",
"Bash(cargo check:*)",
"Bash(bun run:*)",
"Bash(icacls \"D:\\Projetos IA\\sistema-de-chamados\\codex_ed25519\")",
"Bash(copy \"D:\\Projetos IA\\sistema-de-chamados\\codex_ed25519\" \"%TEMP%\\codex_key\")",
"Bash(icacls \"%TEMP%\\codex_key\" /inheritance:r /grant:r \"%USERNAME%:R\")",
"Bash(cmd /c \"echo %TEMP%\")",
"Bash(cmd /c \"dir \"\"%TEMP%\\codex_key\"\"\")",
"Bash(where:*)",
"Bash(ssh-keygen:*)",
"Bash(/c/Program\\ Files/Git/usr/bin/ssh:*)",
"Bash(npx convex deploy:*)",
"Bash(dir \"%LOCALAPPDATA%\\Raven\")",
"Bash(dir \"%APPDATA%\\Raven\")",
"Bash(dir \"%LOCALAPPDATA%\\com.raven.app\")",
"Bash(dir \"%APPDATA%\\com.raven.app\")",
"Bash(tasklist:*)",
"Bash(dir /s /b %LOCALAPPDATA%*raven*)",
"Bash(cmd /c \"tasklist | findstr /i raven\")",
"Bash(cmd /c \"dir /s /b %LOCALAPPDATA%\\*raven* 2>nul\")",
"Bash(powershell -Command \"Get-Process | Where-Object {$_ProcessName -like ''*raven*'' -or $_ProcessName -like ''*appsdesktop*''} | Select-Object ProcessName, Id\")",
"Bash(node:*)",
"Bash(bun scripts/test-all-emails.tsx:*)",
"Bash(bun scripts/send-test-react-email.tsx:*)",
"Bash(dir:*)",
"Bash(git reset:*)",
"Bash(npx convex:*)",
"Bash(bun tsc:*)",
"Bash(scp:*)",
"Bash(docker run:*)",
"Bash(cmd /c \"docker run -d --name postgres-dev -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18\")",
"Bash(cmd /c \"docker ps -a --filter name=postgres-dev\")",
"Bash(cmd /c \"docker --version && docker ps -a\")",
"Bash(powershell -Command \"docker --version\")",
"Bash(powershell -Command \"docker run -d --name postgres-dev -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18\")",
"Bash(dir \"D:\\Projetos IA\\sistema-de-chamados\" /b)",
"Bash(bunx prisma migrate:*)",
"Bash(bunx prisma db push:*)",
"Bash(bun run auth:seed:*)",
"Bash(set DATABASE_URL=postgresql://postgres:dev@localhost:5432/sistema_chamados:*)",
"Bash(bun tsx:*)",
"Bash(DATABASE_URL=\"postgresql://postgres:dev@localhost:5432/sistema_chamados\" bun tsx:*)",
"Bash(docker stop:*)",
"Bash(docker rm:*)",
"Bash(git commit -m \"$(cat <<''EOF''\nfeat(checklist): exibe descricao do template e do item no ticket\n\n- Adiciona campo templateDescription ao schema do checklist\n- Copia descricao do template ao aplicar checklist no ticket\n- Exibe ambas descricoes na visualizacao do ticket (template em italico)\n- Adiciona documentacao de desenvolvimento local (docs/LOCAL-DEV.md)\n- Corrige prisma-client.mjs para usar PostgreSQL em vez de SQLite\n\n🤖 Generated with [Claude Code](https://claude.com/claude-code)\n\nCo-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>\nEOF\n)\")",
"Bash(timeout 90 git push:*)",
"Bash(docker ps:*)",
"Bash(docker start:*)",
"Bash(docker inspect:*)",
"Bash(docker exec:*)",
"Bash(timeout 90 git push)",
"Bash(bun test:*)",
"Bash(git restore:*)",
"Bash(cd:*)",
"Bash(dir \"D:\\Projetos IA\\sistema-de-chamados\\src\\components\\ui\" /b)",
"Bash(timeout 120 bun:*)",
"Bash(bun run tauri:build:*)",
"Bash(git remote:*)",
"Bash(powershell.exe -NoProfile -ExecutionPolicy Bypass -File \"D:/Projetos IA/sistema-de-chamados/scripts/test-windows-collection.ps1\")"
]
}
}

View file

@ -5,33 +5,26 @@ NEXT_PUBLIC_APP_URL=http://localhost:3000
# Better Auth
BETTER_AUTH_URL=http://localhost:3000
BETTER_AUTH_SECRET=your-secret-key-at-least-32-chars-long
BETTER_AUTH_SECRET=change-me-in-prod
# Convex (dev server URL)
NEXT_PUBLIC_CONVEX_URL=http://127.0.0.1:3210
CONVEX_INTERNAL_URL=http://127.0.0.1:3210
# Intervalo (ms) para aceitar token revogado ao sincronizar acessos remotos (opcional)
REMOTE_ACCESS_TOKEN_GRACE_MS=900000
# Token interno opcional para o dashboard de saude (/admin/health) e queries internas
INTERNAL_HEALTH_TOKEN=dev-health-token
# Segredo para crons HTTP (reutilize em prod se preferir um unico token)
REPORTS_CRON_SECRET=reports-cron-secret
# Diretório para arquivamento local de tickets (JSONL/backup)
ARCHIVE_DIR=./archives
# PostgreSQL database (versao 18)
# Para desenvolvimento local, use Docker:
# docker run -d --name postgres-chamados -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18
DATABASE_URL=postgresql://postgres:dev@localhost:5432/sistema_chamados
# SQLite database (local dev)
DATABASE_URL=file:./prisma/db.dev.sqlite
# SMTP Configuration (production values in docs/SMTP.md)
SMTP_HOST=smtp.c.inova.com.br
SMTP_PORT=587
SMTP_SECURE=false
SMTP_USER=envio@rever.com.br
SMTP_PASS=CAAJQm6ZT6AUdhXRTDYu
SMTP_FROM_NAME=Sistema de Chamados
SMTP_FROM_EMAIL=envio@rever.com.br
# Optional SMTP (dev)
# SMTP_ADDRESS=localhost
# SMTP_PORT=1025
# SMTP_TLS=false
# SMTP_USERNAME=
# SMTP_PASSWORD=
# SMTP_AUTHENTICATION=login
# SMTP_ENABLE_STARTTLS_AUTO=false
# MAILER_SENDER_EMAIL=no-reply@example.com
# Dev-only bypass to simplify local testing (do NOT enable in prod)
# DEV_BYPASS_AUTH=0

View file

@ -1,492 +0,0 @@
name: CI/CD Web + Desktop
on:
push:
branches: [ main ]
tags:
- 'v*.*.*'
workflow_dispatch:
inputs:
force_web_deploy:
description: 'Forcar deploy do Web (ignorar filtro)?'
type: boolean
required: false
default: false
force_convex_deploy:
description: 'Forcar deploy do Convex (ignorar filtro)?'
type: boolean
required: false
default: false
env:
APP_DIR: /srv/apps/sistema
VPS_UPDATES_DIR: /var/www/updates
jobs:
changes:
name: Detect changes
runs-on: [ self-hosted, linux, vps ]
timeout-minutes: 5
outputs:
convex: ${{ steps.filter.outputs.convex }}
web: ${{ steps.filter.outputs.web }}
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Paths filter
id: filter
uses: https://github.com/dorny/paths-filter@v3
with:
filters: |
convex:
- 'convex/**'
web:
- 'src/**'
- 'public/**'
- 'prisma/**'
- 'next.config.ts'
- 'package.json'
- 'bun.lock'
- 'tsconfig.json'
- 'middleware.ts'
- 'stack.yml'
deploy:
name: Deploy (VPS Linux)
needs: changes
timeout-minutes: 30
if: ${{ github.event_name == 'workflow_dispatch' || github.ref == 'refs/heads/main' }}
runs-on: [ self-hosted, linux, vps ]
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Determine APP_DIR (fallback safe path)
id: appdir
run: |
TS=$(date +%s)
FALLBACK_DIR="$HOME/apps/web.build.$TS"
mkdir -p "$FALLBACK_DIR"
echo "Using APP_DIR (fallback)=$FALLBACK_DIR"
echo "EFFECTIVE_APP_DIR=$FALLBACK_DIR" >> "$GITHUB_ENV"
- name: Setup Bun
uses: https://github.com/oven-sh/setup-bun@v2
with:
bun-version: 1.3.4
- name: Sync workspace to APP_DIR (preserving local env)
run: |
mkdir -p "$EFFECTIVE_APP_DIR"
RSYNC_FLAGS="-az --inplace --no-times --no-perms --no-owner --no-group --delete"
EXCLUDE_ENV="--exclude '.env*' --exclude 'apps/desktop/.env*' --exclude 'convex/.env*'"
if [ "$EFFECTIVE_APP_DIR" != "${APP_DIR:-/srv/apps/sistema}" ]; then
EXCLUDE_ENV=""
fi
rsync $RSYNC_FLAGS \
--filter='protect .next.old*' \
--exclude '.next.old*' \
--filter='protect node_modules' \
--filter='protect node_modules/**' \
--filter='protect .pnpm-store' \
--filter='protect .pnpm-store/**' \
--filter='protect .env' \
--filter='protect .env*' \
--filter='protect apps/desktop/.env*' \
--filter='protect convex/.env*' \
--exclude '.git' \
--exclude '.next' \
--exclude 'node_modules' \
--exclude 'node_modules/**' \
--exclude '.pnpm-store' \
--exclude '.pnpm-store/**' \
$EXCLUDE_ENV \
./ "$EFFECTIVE_APP_DIR"/
- name: Acquire Convex admin key
id: key
run: |
echo "Waiting for Convex container..."
CID=""
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "No running convex container detected; attempting offline admin key extraction..."
VOLUME="sistema_convex_data"
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
if [ -z "$KEY" ]; then
echo "ERRO: Nao foi possivel obter a chave admin do Convex"
docker service ps sistema_convex_backend || true
exit 1
fi
- name: Copy production .env if present
run: |
DEFAULT_DIR="${APP_DIR:-/srv/apps/sistema}"
if [ "$EFFECTIVE_APP_DIR" != "$DEFAULT_DIR" ] && [ -f "$DEFAULT_DIR/.env" ]; then
echo "Copying production .env from $DEFAULT_DIR to $EFFECTIVE_APP_DIR"
cp -f "$DEFAULT_DIR/.env" "$EFFECTIVE_APP_DIR/.env"
fi
- name: Ensure Next.js cache directory exists and is writable
run: |
cd "$EFFECTIVE_APP_DIR"
mkdir -p .next/cache
chmod -R u+rwX .next || true
- name: Cache Next.js build cache (.next/cache)
uses: https://github.com/actions/cache@v4
with:
path: ${{ env.EFFECTIVE_APP_DIR }}/.next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('bun.lock') }}-${{ hashFiles('next.config.ts') }}
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('bun.lock') }}-
${{ runner.os }}-nextjs-
- name: Lint check (fail fast before build)
run: |
cd "$EFFECTIVE_APP_DIR"
docker run --rm \
-v "$EFFECTIVE_APP_DIR":/app \
-w /app \
sistema_web:node22-bun \
bash -lc "set -euo pipefail; bun install --frozen-lockfile --filter '!appsdesktop'; bun run lint"
- name: Install and build (Next.js)
env:
PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING: "1"
run: |
cd "$EFFECTIVE_APP_DIR"
docker run --rm \
-e PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING="$PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING" \
-e NODE_OPTIONS="--max-old-space-size=4096" \
-v "$EFFECTIVE_APP_DIR":/app \
-w /app \
sistema_web:node22-bun \
bash -lc "set -euo pipefail; bun install --frozen-lockfile --filter '!appsdesktop'; bun run prisma:generate; bun run build:bun"
- name: Fix Docker-created file permissions
run: |
# Docker cria arquivos como root - corrigir para o usuario runner (UID 1000)
docker run --rm -v "$EFFECTIVE_APP_DIR":/target alpine:3 \
chown -R 1000:1000 /target
echo "Permissoes do build corrigidas"
- name: Atualizar symlink do APP_DIR estavel (deploy atomico)
run: |
set -euo pipefail
ROOT="$HOME/apps"
STABLE_LINK="$ROOT/sistema.current"
mkdir -p "$ROOT"
# Sanidade: se esses arquivos nao existirem, o container vai falhar no boot.
test -f "$EFFECTIVE_APP_DIR/scripts/start-web.sh" || { echo "ERROR: scripts/start-web.sh nao encontrado em $EFFECTIVE_APP_DIR" >&2; exit 1; }
test -f "$EFFECTIVE_APP_DIR/stack.yml" || { echo "ERROR: stack.yml nao encontrado em $EFFECTIVE_APP_DIR" >&2; exit 1; }
test -d "$EFFECTIVE_APP_DIR/node_modules" || { echo "ERROR: node_modules nao encontrado em $EFFECTIVE_APP_DIR (necessario para next start)" >&2; exit 1; }
test -d "$EFFECTIVE_APP_DIR/.next" || { echo "ERROR: .next nao encontrado em $EFFECTIVE_APP_DIR (build nao gerado)" >&2; exit 1; }
PREV=""
if [ -L "$STABLE_LINK" ]; then
PREV="$(readlink -f "$STABLE_LINK" || true)"
fi
echo "PREV_APP_DIR=$PREV" >> "$GITHUB_ENV"
ln -sfn "$EFFECTIVE_APP_DIR" "$STABLE_LINK"
# Compat: mantem $HOME/apps/sistema como symlink quando possivel (nao mexe se for pasta).
if [ -L "$ROOT/sistema" ] || [ ! -e "$ROOT/sistema" ]; then
ln -sfn "$STABLE_LINK" "$ROOT/sistema"
fi
echo "APP_DIR estavel -> $(readlink -f "$STABLE_LINK")"
- name: Swarm deploy (stack.yml)
run: |
APP_DIR_STABLE="$HOME/apps/sistema.current"
if [ ! -d "$APP_DIR_STABLE" ]; then
echo "ERROR: Stable APP_DIR does not exist: $APP_DIR_STABLE" >&2; exit 1
fi
cd "$APP_DIR_STABLE"
set -o allexport
if [ -f .env ]; then
echo "Loading .env from $APP_DIR_STABLE"
. ./.env
else
echo "WARNING: No .env found at $APP_DIR_STABLE - stack vars may be empty!"
fi
set +o allexport
echo "Using APP_DIR (stable)=$APP_DIR_STABLE"
echo "NEXT_PUBLIC_CONVEX_URL=${NEXT_PUBLIC_CONVEX_URL:-<not set>}"
echo "NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-<not set>}"
APP_DIR="$APP_DIR_STABLE" RELEASE_SHA=${{ github.sha }} docker stack deploy --with-registry-auth -c stack.yml sistema
- name: Wait for services to be healthy
run: |
echo "Aguardando servicos ficarem saudaveis..."
for i in $(seq 1 18); do
WEB_STATUS=$(docker service ls --filter "name=sistema_web" --format "{{.Replicas}}" 2>/dev/null || echo "0/0")
CONVEX_STATUS=$(docker service ls --filter "name=sistema_convex_backend" --format "{{.Replicas}}" 2>/dev/null || echo "0/0")
echo "Tentativa $i/18: web=$WEB_STATUS convex=$CONVEX_STATUS"
if echo "$WEB_STATUS" | grep -q "2/2" && echo "$CONVEX_STATUS" | grep -q "1/1"; then
echo "Todos os servicos estao saudaveis!"
exit 0
fi
sleep 10
done
echo "ERRO: Timeout aguardando servicos. Status atual:"
docker service ls --filter "label=com.docker.stack.namespace=sistema" || true
docker service ps sistema_web --no-trunc || true
docker service logs sistema_web --since 5m --raw 2>/dev/null | tail -n 200 || true
if [ -n "${PREV_APP_DIR:-}" ]; then
echo "Rollback: revertendo APP_DIR estavel para: $PREV_APP_DIR"
ln -sfn "$PREV_APP_DIR" "$HOME/apps/sistema.current"
cd "$HOME/apps/sistema.current"
set -o allexport
if [ -f .env ]; then
. ./.env
fi
set +o allexport
APP_DIR="$HOME/apps/sistema.current" RELEASE_SHA=${{ github.sha }} docker stack deploy --with-registry-auth -c stack.yml sistema || true
fi
exit 1
- name: Cleanup old build workdirs (keep last 2)
run: |
set -e
ROOT="$HOME/apps"
KEEP=2
PATTERN='web.build.*'
ACTIVE="$(readlink -f "$HOME/apps/sistema.current" 2>/dev/null || true)"
echo "Scanning $ROOT for old $PATTERN dirs"
LIST=$(find "$ROOT" -maxdepth 1 -type d -name "$PATTERN" | sort -r || true)
echo "$LIST" | sed -n "1,${KEEP}p" | sed 's/^/Keeping: /' || true
echo "$LIST" | sed "1,${KEEP}d" | while read dir; do
[ -z "$dir" ] && continue
if [ -n "$ACTIVE" ] && [ "$(readlink -f "$dir")" = "$ACTIVE" ]; then
echo "Skipping active dir (in use by APP_DIR): $dir"; continue
fi
echo "Removing $dir"
chmod -R u+rwX "$dir" 2>/dev/null || true
rm -rf "$dir" || {
echo "Local rm failed, falling back to docker (root) cleanup for $dir..."
docker run --rm -v "$dir":/target alpine:3 sh -lc 'chown -R 1000:1000 /target 2>/dev/null || true; chmod -R u+rwX /target 2>/dev/null || true; rm -rf /target/* /target/.[!.]* /target/..?* 2>/dev/null || true' || true
rm -rf "$dir" 2>/dev/null || rmdir "$dir" 2>/dev/null || true
}
done
echo "Disk usage (top 10 under $ROOT):"
du -sh "$ROOT"/* 2>/dev/null | sort -rh | head -n 10 || true
convex_deploy:
name: Deploy Convex functions
needs: changes
timeout-minutes: 20
if: ${{ github.event_name == 'workflow_dispatch' || needs.changes.outputs.convex == 'true' }}
runs-on: [ self-hosted, linux, vps ]
env:
APP_DIR: /srv/apps/sistema
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Determine APP_DIR (fallback safe path)
id: appdir
run: |
TS=$(date +%s)
FALLBACK_DIR="$HOME/apps/convex.build.$TS"
mkdir -p "$FALLBACK_DIR"
echo "Using APP_DIR (fallback)=$FALLBACK_DIR"
echo "EFFECTIVE_APP_DIR=$FALLBACK_DIR" >> "$GITHUB_ENV"
- name: Sync workspace to APP_DIR (preserving local env)
run: |
mkdir -p "$EFFECTIVE_APP_DIR"
RSYNC_FLAGS="-az --inplace --no-times --no-perms --no-owner --no-group --delete"
rsync $RSYNC_FLAGS \
--filter='protect .next.old*' \
--exclude '.next.old*' \
--exclude '.env*' \
--exclude 'apps/desktop/.env*' \
--exclude 'convex/.env*' \
--filter='protect node_modules' \
--filter='protect node_modules/**' \
--filter='protect .pnpm-store' \
--filter='protect .pnpm-store/**' \
--exclude '.git' \
--exclude '.next' \
--exclude 'node_modules' \
--exclude 'node_modules/**' \
--exclude '.pnpm-store' \
--exclude '.pnpm-store/**' \
./ "$EFFECTIVE_APP_DIR"/
- name: Acquire Convex admin key
id: key
run: |
echo "Waiting for Convex container..."
CID=""
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "No running convex container detected; attempting offline admin key extraction..."
VOLUME="sistema_convex_data"
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
if [ -z "$KEY" ]; then
echo "ERRO: Nao foi possivel obter a chave admin do Convex"
docker service ps sistema_convex_backend || true
exit 1
fi
- name: Bring convex.json from live app if present
run: |
if [ -f "$APP_DIR/convex.json" ]; then
echo "Copying $APP_DIR/convex.json -> $EFFECTIVE_APP_DIR/convex.json"
cp -f "$APP_DIR/convex.json" "$EFFECTIVE_APP_DIR/convex.json"
else
echo "No existing convex.json found at $APP_DIR; convex CLI will need self-hosted vars"
fi
- name: Set Convex env vars (self-hosted)
env:
CONVEX_SELF_HOSTED_URL: https://convex.esdrasrenan.com.br
CONVEX_SELF_HOSTED_ADMIN_KEY: ${{ steps.key.outputs.ADMIN_KEY }}
MACHINE_PROVISIONING_SECRET: ${{ secrets.MACHINE_PROVISIONING_SECRET }}
MACHINE_TOKEN_TTL_MS: ${{ secrets.MACHINE_TOKEN_TTL_MS }}
FLEET_SYNC_SECRET: ${{ secrets.FLEET_SYNC_SECRET }}
run: |
set -e
docker run --rm -i \
-v "$EFFECTIVE_APP_DIR":/app \
-w /app \
-e CONVEX_SELF_HOSTED_URL \
-e CONVEX_SELF_HOSTED_ADMIN_KEY \
-e MACHINE_PROVISIONING_SECRET \
-e MACHINE_TOKEN_TTL_MS \
-e FLEET_SYNC_SECRET \
-e CONVEX_TMPDIR=/app/.convex-tmp \
node:20-bullseye bash -lc "set -euo pipefail; curl -fsSL https://bun.sh/install | bash >/tmp/bun-install.log; export BUN_INSTALL=\"\${BUN_INSTALL:-/root/.bun}\"; export PATH=\"\$BUN_INSTALL/bin:\$PATH\"; export CONVEX_TMPDIR=/app/.convex-tmp; bun install --frozen-lockfile; \
if [ -n \"$MACHINE_PROVISIONING_SECRET\" ]; then bunx convex env set MACHINE_PROVISIONING_SECRET \"$MACHINE_PROVISIONING_SECRET\"; fi; \
if [ -n \"$MACHINE_TOKEN_TTL_MS\" ]; then bunx convex env set MACHINE_TOKEN_TTL_MS \"$MACHINE_TOKEN_TTL_MS\"; fi; \
if [ -n \"$FLEET_SYNC_SECRET\" ]; then bunx convex env set FLEET_SYNC_SECRET \"$FLEET_SYNC_SECRET\"; fi; \
bunx convex env list"
- name: Prepare Convex deploy workspace
run: |
cd "$EFFECTIVE_APP_DIR"
if [ -f .env ]; then
echo "Renaming .env -> .env.bak (Convex self-hosted deploy)"
mv -f .env .env.bak
fi
mkdir -p .convex-tmp
- name: Deploy functions to Convex self-hosted
env:
CONVEX_SELF_HOSTED_URL: https://convex.esdrasrenan.com.br
CONVEX_SELF_HOSTED_ADMIN_KEY: ${{ steps.key.outputs.ADMIN_KEY }}
run: |
docker run --rm -i \
-v "$EFFECTIVE_APP_DIR":/app \
-w /app \
-e CI=true \
-e CONVEX_SELF_HOSTED_URL \
-e CONVEX_SELF_HOSTED_ADMIN_KEY \
-e CONVEX_TMPDIR=/app/.convex-tmp \
node:20-bullseye bash -lc "set -euo pipefail; curl -fsSL https://bun.sh/install | bash >/tmp/bun-install.log; export BUN_INSTALL=\"\${BUN_INSTALL:-/root/.bun}\"; export PATH=\"\$BUN_INSTALL/bin:\$PATH\"; export CONVEX_TMPDIR=/app/.convex-tmp; bun install --frozen-lockfile; bunx convex deploy"
- name: Cleanup old convex build workdirs (keep last 2)
run: |
set -e
ROOT="$HOME/apps"
KEEP=2
PATTERN='convex.build.*'
LIST=$(find "$ROOT" -maxdepth 1 -type d -name "$PATTERN" | sort -r || true)
echo "$LIST" | sed -n "1,${KEEP}p" | sed 's/^/Keeping: /' || true
echo "$LIST" | sed "1,${KEEP}d" | while read dir; do
[ -z "$dir" ] && continue
echo "Removing $dir"
chmod -R u+rwX "$dir" 2>/dev/null || true
rm -rf "$dir" || {
echo "Local rm failed, falling back to docker (root) cleanup for $dir..."
docker run --rm -v "$dir":/target alpine:3 sh -lc 'chown -R 1000:1000 /target 2>/dev/null || true; chmod -R u+rwX /target 2>/dev/null || true; rm -rf /target/* /target/.[!.]* /target/..?* 2>/dev/null || true' || true
rm -rf "$dir" 2>/dev/null || rmdir "$dir" 2>/dev/null || true
}
done
# NOTA: Job comentado porque nao ha runner Windows configurado.
# Descomentar quando configurar um runner com labels: [self-hosted, windows, desktop]
#
# desktop_release:
# name: Desktop Release (Windows)
# timeout-minutes: 30
# if: ${{ startsWith(github.ref, 'refs/tags/v') }}
# runs-on: [ self-hosted, windows, desktop ]
# defaults:
# run:
# working-directory: apps/desktop
# steps:
# - name: Checkout
# uses: https://github.com/actions/checkout@v4
#
# - name: Setup pnpm
# uses: https://github.com/pnpm/action-setup@v4
# with:
# version: 10.20.0
#
# - name: Setup Node.js
# uses: https://github.com/actions/setup-node@v4
# with:
# node-version: 20
#
# - name: Install deps (desktop)
# run: pnpm install --frozen-lockfile
#
# - name: Build with Tauri
# uses: https://github.com/tauri-apps/tauri-action@v0
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
# TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
# with:
# projectPath: apps/desktop
#
# - name: Upload bundles to VPS
# run: |
# # Upload via SCP (configurar chave SSH no runner Windows)
# # scp -r src-tauri/target/release/bundle/* user@vps:/var/www/updates/
# echo "TODO: Configurar upload para VPS"

View file

@ -1,54 +0,0 @@
name: Quality Checks
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
lint-test-build:
name: Lint, Test and Build
runs-on: [ self-hosted, linux, vps ]
env:
BETTER_AUTH_SECRET: test-secret
NEXT_PUBLIC_APP_URL: http://localhost:3000
BETTER_AUTH_URL: http://localhost:3000
NEXT_PUBLIC_CONVEX_URL: http://localhost:3210
DATABASE_URL: file:./prisma/db.dev.sqlite
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup Bun
uses: https://github.com/oven-sh/setup-bun@v2
with:
bun-version: 1.3.4
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Cache Next.js build cache
uses: https://github.com/actions/cache@v4
with:
path: |
${{ github.workspace }}/.next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('bun.lock') }}-${{ hashFiles('**/*.{js,jsx,ts,tsx}') }}
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('bun.lock') }}-
- name: Generate Prisma client
env:
PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING: "1"
run: bun run prisma:generate
- name: Lint
run: bun run lint
- name: Test
run: bun test
- name: Build
run: bun run build:bun

View file

@ -25,7 +25,6 @@ jobs:
changes:
name: Detect changes
runs-on: ubuntu-latest
timeout-minutes: 5
outputs:
convex: ${{ steps.filter.outputs.convex }}
web: ${{ steps.filter.outputs.web }}
@ -53,7 +52,6 @@ jobs:
deploy:
name: Deploy (VPS Linux)
needs: changes
timeout-minutes: 30
# Executa em qualquer push na main (independente do filtro) ou quando disparado manualmente
if: ${{ github.event_name == 'workflow_dispatch' || github.ref == 'refs/heads/main' }}
runs-on: [ self-hosted, linux, vps ]
@ -156,38 +154,11 @@ jobs:
- name: Acquire Convex admin key
id: key
run: |
echo "Waiting for Convex container..."
CID=""
# Aguarda ate 60s (12 tentativas x 5s) pelo container ficar pronto
# Nao forca restart - deixa o Swarm gerenciar via health checks
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "No running convex container detected; attempting offline admin key extraction..."
VOLUME="sistema_convex_data"
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -z "$CID" ]; then echo "No convex container"; exit 1; fi
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
if [ -z "$KEY" ]; then
echo "ERRO: Nao foi possivel obter a chave admin do Convex"
docker service ps sistema_convex_backend || true
exit 1
fi
- name: Copy production .env if present
run: |
@ -217,27 +188,12 @@ jobs:
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('pnpm-lock.yaml', 'bun.lock') }}-
- name: Lint check (fail fast before build)
run: |
cd "$EFFECTIVE_APP_DIR"
docker run --rm \
-v "$EFFECTIVE_APP_DIR":/app \
-w /app \
sistema_web:node22-bun \
bash -lc "set -euo pipefail; bun install --frozen-lockfile --filter '!appsdesktop'; bun run lint"
- name: Install and build (Next.js)
env:
PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING: "1"
run: |
cd "$EFFECTIVE_APP_DIR"
docker run --rm \
-e PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING="$PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING" \
-e NODE_OPTIONS="--max-old-space-size=4096" \
-v "$EFFECTIVE_APP_DIR":/app \
-w /app \
sistema_web:node22-bun \
bash -lc "set -euo pipefail; bun install --frozen-lockfile --filter '!appsdesktop'; bun run prisma:generate; bun run build:bun"
bun install --frozen-lockfile --filter '!appsdesktop'
bun run prisma:generate
bun run build:bun
- name: Publish build to stable APP_DIR directory
run: |
@ -262,44 +218,38 @@ jobs:
- name: Swarm deploy (stack.yml)
run: |
cd "$EFFECTIVE_APP_DIR"
# Exporta variáveis do .env para substituição no stack (ex.: MACHINE_PROVISIONING_SECRET)
set -o allexport
if [ -f .env ]; then . ./.env; fi
set +o allexport
APP_DIR_STABLE="$HOME/apps/sistema"
if [ ! -d "$APP_DIR_STABLE" ]; then
echo "ERROR: Stable APP_DIR does not exist: $APP_DIR_STABLE" >&2; exit 1
fi
cd "$APP_DIR_STABLE"
# Exporta variáveis do .env (do diretório de produção) para substituição no stack
# IMPORTANTE: Usar o .env do APP_DIR_STABLE, não do EFFECTIVE_APP_DIR (build temporário)
set -o allexport
if [ -f .env ]; then
echo "Loading .env from $APP_DIR_STABLE"
. ./.env
else
echo "WARNING: No .env found at $APP_DIR_STABLE - stack vars may be empty!"
fi
set +o allexport
echo "Using APP_DIR (stable)=$APP_DIR_STABLE"
echo "NEXT_PUBLIC_CONVEX_URL=${NEXT_PUBLIC_CONVEX_URL:-<not set>}"
echo "NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-<not set>}"
APP_DIR="$APP_DIR_STABLE" RELEASE_SHA=${{ github.sha }} docker stack deploy --with-registry-auth -c stack.yml sistema
- name: Wait for services to be healthy
- name: Ensure Convex service envs and restart
run: |
echo "Aguardando servicos ficarem saudaveis..."
# Aguarda ate 3 minutos (18 tentativas x 10s) pelos servicos
for i in $(seq 1 18); do
WEB_STATUS=$(docker service ls --filter "name=sistema_web" --format "{{.Replicas}}" 2>/dev/null || echo "0/0")
CONVEX_STATUS=$(docker service ls --filter "name=sistema_convex_backend" --format "{{.Replicas}}" 2>/dev/null || echo "0/0")
echo "Tentativa $i/18: web=$WEB_STATUS convex=$CONVEX_STATUS"
# Verifica se web tem 2/2 replicas e convex tem 1/1
if echo "$WEB_STATUS" | grep -q "2/2" && echo "$CONVEX_STATUS" | grep -q "1/1"; then
echo "Todos os servicos estao saudaveis!"
exit 0
fi
sleep 10
done
echo "AVISO: Timeout aguardando servicos. Status atual:"
docker service ls --filter "label=com.docker.stack.namespace=sistema"
# Nao falha o deploy, apenas avisa (o Swarm continua o rolling update em background)
cd "$EFFECTIVE_APP_DIR"
set -o allexport
if [ -f .env ]; then . ./.env; fi
set +o allexport
echo "Ensuring Convex envs on service: sistema_convex_backend"
if [ -n "${MACHINE_PROVISIONING_SECRET:-}" ]; then
docker service update --env-add MACHINE_PROVISIONING_SECRET="${MACHINE_PROVISIONING_SECRET}" sistema_convex_backend || true
fi
if [ -n "${MACHINE_TOKEN_TTL_MS:-}" ]; then
docker service update --env-add MACHINE_TOKEN_TTL_MS="${MACHINE_TOKEN_TTL_MS}" sistema_convex_backend || true
fi
if [ -n "${FLEET_SYNC_SECRET:-}" ]; then
docker service update --env-add FLEET_SYNC_SECRET="${FLEET_SYNC_SECRET}" sistema_convex_backend || true
fi
echo "Current envs:"
docker service inspect sistema_convex_backend --format '{{range .Spec.TaskTemplate.ContainerSpec.Env}}{{println .}}{{end}}' || true
echo "Forcing service restart..."
docker service update --force sistema_convex_backend || true
- name: Smoke test — register + heartbeat
run: |
@ -359,16 +309,14 @@ jobs:
run: |
docker service update --force sistema_web
# Comentado: o stack deploy já atualiza os serviços com update_config.order: start-first
# Forçar update aqui causa downtime porque ignora a estratégia de rolling update
# - name: Restart Convex backend service (optional)
# run: |
# docker service update --force sistema_convex_backend
- name: Restart Convex backend service (optional)
run: |
# Fail the job if the convex backend cannot restart
docker service update --force sistema_convex_backend
convex_deploy:
name: Deploy Convex functions
needs: changes
timeout-minutes: 20
# Executa quando convex/** mudar ou via workflow_dispatch
if: ${{ github.event_name == 'workflow_dispatch' || needs.changes.outputs.convex == 'true' }}
runs-on: [ self-hosted, linux, vps ]
@ -413,38 +361,11 @@ jobs:
- name: Acquire Convex admin key
id: key
run: |
echo "Waiting for Convex container..."
CID=""
# Aguarda ate 60s (12 tentativas x 5s) pelo container ficar pronto
# Nao forca restart - deixa o Swarm gerenciar via health checks
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "No running convex container detected; attempting offline admin key extraction..."
VOLUME="sistema_convex_data"
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -z "$CID" ]; then echo "No convex container"; exit 1; fi
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
if [ -z "$KEY" ]; then
echo "ERRO: Nao foi possivel obter a chave admin do Convex"
docker service ps sistema_convex_backend || true
exit 1
fi
- name: Bring convex.json from live app if present
run: |
@ -472,22 +393,19 @@ jobs:
-e MACHINE_PROVISIONING_SECRET \
-e MACHINE_TOKEN_TTL_MS \
-e FLEET_SYNC_SECRET \
-e CONVEX_TMPDIR=/app/.convex-tmp \
node:20-bullseye bash -lc "set -euo pipefail; curl -fsSL https://bun.sh/install | bash >/tmp/bun-install.log; export BUN_INSTALL=\"\${BUN_INSTALL:-/root/.bun}\"; export PATH=\"\$BUN_INSTALL/bin:\$PATH\"; export CONVEX_TMPDIR=/app/.convex-tmp; bun install --frozen-lockfile; \
node:20-bullseye bash -lc "set -euo pipefail; curl -fsSL https://bun.sh/install | bash >/tmp/bun-install.log; export BUN_INSTALL=\"\${BUN_INSTALL:-/root/.bun}\"; export PATH=\"\$BUN_INSTALL/bin:\$PATH\"; bun install --frozen-lockfile; \
if [ -n \"$MACHINE_PROVISIONING_SECRET\" ]; then bunx convex env set MACHINE_PROVISIONING_SECRET \"$MACHINE_PROVISIONING_SECRET\"; fi; \
if [ -n \"$MACHINE_TOKEN_TTL_MS\" ]; then bunx convex env set MACHINE_TOKEN_TTL_MS \"$MACHINE_TOKEN_TTL_MS\"; fi; \
if [ -n \"$FLEET_SYNC_SECRET\" ]; then bunx convex env set FLEET_SYNC_SECRET \"$FLEET_SYNC_SECRET\"; fi; \
bunx convex env list"
- name: Prepare Convex deploy workspace
- name: Ensure .env is not present for Convex deploy
run: |
cd "$EFFECTIVE_APP_DIR"
if [ -f .env ]; then
echo "Renaming .env -> .env.bak (Convex self-hosted deploy)"
mv -f .env .env.bak
fi
# Dedicated tmp dir outside convex/_generated so CLI cleanups don't remove it
mkdir -p .convex-tmp
- name: Deploy functions to Convex self-hosted
env:
CONVEX_SELF_HOSTED_URL: https://convex.esdrasrenan.com.br
@ -499,8 +417,7 @@ jobs:
-e CI=true \
-e CONVEX_SELF_HOSTED_URL \
-e CONVEX_SELF_HOSTED_ADMIN_KEY \
-e CONVEX_TMPDIR=/app/.convex-tmp \
node:20-bullseye bash -lc "set -euo pipefail; curl -fsSL https://bun.sh/install | bash >/tmp/bun-install.log; export BUN_INSTALL=\"\${BUN_INSTALL:-/root/.bun}\"; export PATH=\"\$BUN_INSTALL/bin:\$PATH\"; export CONVEX_TMPDIR=/app/.convex-tmp; bun install --frozen-lockfile; bunx convex deploy"
node:20-bullseye bash -lc "set -euo pipefail; curl -fsSL https://bun.sh/install | bash >/tmp/bun-install.log; export BUN_INSTALL=\"\${BUN_INSTALL:-/root/.bun}\"; export PATH=\"\$BUN_INSTALL/bin:\$PATH\"; bun install --frozen-lockfile; bunx convex deploy"
- name: Cleanup old convex build workdirs (keep last 2)
run: |
@ -523,7 +440,6 @@ jobs:
desktop_release:
name: Desktop Release (Windows)
timeout-minutes: 30
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
runs-on: [ self-hosted, windows, desktop ]
defaults:
@ -570,7 +486,6 @@ jobs:
diagnose_convex:
name: Diagnose Convex (env + register test)
timeout-minutes: 10
if: ${{ github.event_name == 'workflow_dispatch' }}
runs-on: [ self-hosted, linux, vps ]
steps:
@ -584,30 +499,9 @@ jobs:
- name: Acquire Convex admin key
id: key
run: |
echo "Waiting for Convex container..."
CID=""
# Aguarda ate 60s (12 tentativas x 5s) pelo container ficar pronto
for attempt in $(seq 1 12); do
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -n "$CID" ]; then
echo "Convex container ready (CID=$CID)"
break
fi
echo "Attempt $attempt/12: container not ready yet; waiting 5s..."
sleep 5
done
CONVEX_IMAGE="ghcr.io/get-convex/convex-backend:latest"
if [ -n "$CID" ]; then
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "No running convex container detected; attempting offline admin key extraction..."
VOLUME="sistema_convex_data"
if docker volume inspect "$VOLUME" >/dev/null 2>&1; then
KEY=$(docker run --rm --entrypoint /bin/sh -v "$VOLUME":/convex/data "$CONVEX_IMAGE" -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
else
echo "Volume $VOLUME nao encontrado; nao foi possivel extrair a chave admin"
fi
fi
CID=$(docker ps --format '{{.ID}} {{.Names}}' | awk '/sistema_convex_backend/{print $1; exit}')
if [ -z "$CID" ]; then echo "No convex container"; exit 1; fi
KEY=$(docker exec -i "$CID" /bin/sh -lc './generate_admin_key.sh' | tr -d '\r' | grep -o 'convex-self-hosted|[^ ]*' | tail -n1)
echo "ADMIN_KEY=$KEY" >> $GITHUB_OUTPUT
echo "Admin key acquired? $([ -n "$KEY" ] && echo yes || echo no)"
- name: List Convex env and set missing

View file

@ -48,8 +48,6 @@ jobs:
${{ runner.os }}-nextjs-${{ hashFiles('pnpm-lock.yaml', 'bun.lock') }}-
- name: Generate Prisma client
env:
PRISMA_ENGINES_CHECKSUM_IGNORE_MISSING: "1"
run: bun run prisma:generate
- name: Lint

65
.gitignore vendored
View file

@ -1,40 +1,36 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions
# testing
/coverage
# next.js
/.next/
/out/
# React Email
/.react-email/
/emails/out/
# production
/build
# misc
.DS_Store
*.pem
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
*.sqlite
# external experiments
nova-calendar-main/
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# env files (can opt-in for committing if needed)
@ -64,10 +60,3 @@ Screenshot*.png
*:\:Zone.Identifier
# Infrastructure secrets
.ci.env
# ferramentas externas
rustdesk/
# Prisma generated files
src/generated/
apps/desktop/service/target/

View file

@ -1,29 +0,0 @@
# Runtime image with Node 22 + Bun 1.3.4 and build toolchain preinstalled
FROM node:22-bullseye-slim
ENV BUN_INSTALL=/root/.bun
ENV PATH="$BUN_INSTALL/bin:$PATH"
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
ca-certificates \
curl \
gnupg \
unzip \
build-essential \
python3 \
make \
pkg-config \
git \
&& rm -rf /var/lib/apt/lists/*
# Install Bun 1.3.4
RUN curl -fsSL https://bun.sh/install \
| bash -s -- bun-v1.3.4 \
&& ln -sf /root/.bun/bin/bun /usr/local/bin/bun \
&& ln -sf /root/.bun/bin/bun /usr/local/bin/bunx
WORKDIR /app
# We'll mount the app code at runtime; image just provides runtimes/toolchains.
CMD ["bash"]

View file

@ -1,13 +1,12 @@
## Sistema de Chamados
Aplicacao **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Auth** para gestao de tickets da Rever. A stack ainda inclui **Prisma 7** (PostgreSQL), **Tailwind** e **Turbopack** como bundler padrao (webpack permanece disponivel como fallback). Todo o codigo-fonte fica na raiz do monorepo seguindo as convencoes do App Router.
Aplicação **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Auth** para gestão de tickets da Rever. A stack ainda inclui **Prisma 6** (SQLite padrão para DEV), **Tailwind** e **Turbopack** em desenvolvimento (o build de produção roda com o webpack padrão do Next). Todo o código-fonte fica na raiz do monorepo seguindo as convenções do App Router.
## Requisitos
- Bun >= 1.3 (recomendado 1.3.1). Após instalar via script oficial, adicione `export PATH="$HOME/.bun/bin:$PATH"` ao seu shell (ex.: `.bashrc`) para ter `bun` disponível globalmente.
- Node.js >= 20 (necessário para ferramentas auxiliares como Prisma CLI e Next.js em modo fallback).
- CLI do Convex (`bunx convex dev` instalará automaticamente no primeiro uso, se ainda não estiver presente).
- GitHub Actions/autodeploy dependem dessas versões e do CLI do Convex disponível; use `npx convex --help` para confirmar.
## Configuração rápida
@ -17,7 +16,7 @@ Aplicacao **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Au
```
2. Ajuste o arquivo `.env` (ou crie a partir de `.env.example`) e confirme os valores de:
- `NEXT_PUBLIC_CONVEX_URL` (gerado pelo Convex Dev)
- `BETTER_AUTH_SECRET`, `BETTER_AUTH_URL`, `DATABASE_URL` (PostgreSQL, ex: `postgresql://postgres:dev@localhost:5432/sistema_chamados`)
- `BETTER_AUTH_SECRET`, `BETTER_AUTH_URL`, `DATABASE_URL` (por padrão `file:./db.dev.sqlite`, que mapeia para `prisma/db.dev.sqlite`)
3. Aplique as migrações e gere o client Prisma:
```bash
bunx prisma migrate deploy
@ -31,25 +30,22 @@ Aplicacao **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Au
### Resetar rapidamente o ambiente local
1. Suba um PostgreSQL local (Docker recomendado):
1. Garanta que `DATABASE_URL` aponte para o arquivo desejado (ex.: `file:./db.dev.sqlite` para desenvolvimento, `file:./db.sqlite` em produção local).
2. Aplique as migrações no arquivo informado:
```bash
docker run -d --name postgres-dev -p 5432:5432 -e POSTGRES_PASSWORD=dev -e POSTGRES_DB=sistema_chamados postgres:18
DATABASE_URL=file:./db.dev.sqlite bunx prisma migrate deploy
```
2. Aplique as migracoes:
3. Recrie/garanta as contas padrão de login:
```bash
bunx prisma migrate deploy
DATABASE_URL=file:./db.dev.sqlite bun run auth:seed
```
3. Recrie/garanta as contas padrao de login:
```bash
bun run auth:seed
```
4. Suba o servidor normalmente com `bun run dev`.
4. Suba o servidor normalmente com `bun run dev`. Esses três comandos bastam para reconstruir o ambiente sempre que trocar de computador.
### Subir serviços locais
- (Opcional) Para re-sincronizar manualmente as filas padrão, execute `bun run queues:ensure`.
- Em um terminal, rode o backend em tempo real do Convex com `bun run convex:dev:bun` (ou `bun run convex:dev` para o runtime Node).
- Em outro terminal, suba o frontend Next.js (Turbopack) com `bun run dev:bun` (`bun run dev:webpack` serve como fallback).
- Em outro terminal, suba o frontend Next.js (Turpoback) com `bun run dev:bun` (`bun run dev:webpack` serve como fallback).
- Com o Convex rodando, acesse `http://localhost:3000/dev/seed` uma vez para popular dados de demonstração (tickets, usuários, comentários).
> Se o CLI perguntar sobre configuração do projeto Convex, escolha criar um novo deployment local (opção padrão) e confirme. As credenciais são armazenadas em `.convex/` automaticamente.
@ -69,20 +65,20 @@ Aplicacao **Next.js 16 (App Router)** com **React 19**, **Convex** e **Better Au
### Guia de DEV (Prisma, Auth e Desktop/Tauri)
Para fluxos detalhados de desenvolvimento — banco de dados local (PostgreSQL/Prisma), seed do Better Auth, ajustes do Prisma CLI no DEV e build do Desktop (Tauri) — consulte `docs/DEV.md`.
Para fluxos detalhados de desenvolvimento — banco de dados local (SQLite/Prisma), seed do Better Auth, ajustes do Prisma CLI no DEV e build do Desktop (Tauri) — consulte `docs/DEV.md`.
## Scripts úteis
- `bun run dev:bun` — padrão atual para o Next.js com runtime Bun (`bun run dev:webpack` permanece como fallback).
- `bun run convex:dev:bun` — runtime Bun para o Convex (`bun run convex:dev` mantém o fluxo antigo usando Node).
- `bun run build:bun` / `bun run start:bun` — build e serve com Bun usando Turbopack (padrão atual).
- `bun run build:bun` / `bun run start:bun` — build e serve com Bun; `bun run build` mantém o fallback Node.
- `bun run dev:webpack` — fallback do Next.js em modo desenvolvimento (webpack).
- `bun run lint` — ESLint com as regras do projeto.
- `bun test` — suíte de testes unitários usando o runner do Bun (o teste de screenshot fica automaticamente ignorado se o matcher não existir).
- `bun run build` — executa `next build --turbopack` (runtime Node, caso prefira evitar o `--bun`).
- `bun run build:webpack` — executa `next build --webpack` como fallback oficial.
- `bun run auth:seed` — atualiza/cria contas padrao do Better Auth (credenciais em `agents.md`).
- `bunx prisma migrate deploy` — aplica migracoes ao banco PostgreSQL.
- `bun run build` — executa `next build --webpack` (webpack padrão do Next).
- `bun run build:turbopack` — executa `next build --turbopack` para reproduzir/debugar problemas.
- `bun run auth:seed` — atualiza/cria contas padrão do Better Auth (credenciais em `agents.md`).
- `bunx prisma migrate deploy` — aplica migrações ao banco SQLite local.
- `bun run convex:dev` — roda o Convex em modo desenvolvimento com Node, gerando tipos em `convex/_generated`.
## Transferir dispositivo entre colaboradores
@ -100,7 +96,7 @@ Sem o reset de agente, o Convex reaproveita o token anterior e o inventário con
- `app/` dentro de `src/` — rotas e layouts do Next.js (App Router).
- `components/` — componentes reutilizáveis (UI, formulários, layouts).
- `convex/` — queries, mutations e seeds do Convex.
- `prisma/` — schema e migracoes do Prisma (PostgreSQL).
- `prisma/` — schema, migrações e banco SQLite (`prisma/db.sqlite`).
- `scripts/` — utilitários em Node para sincronização e seeds adicionais.
- `agents.md` — guia operacional e contexto funcional (em PT-BR).
- `PROXIMOS_PASSOS.md` — backlog de melhorias futuras.
@ -117,7 +113,7 @@ Consulte `PROXIMOS_PASSOS.md` para acompanhar o backlog funcional e o progresso
- `bun install` é o fluxo padrão (o arquivo `bun.lock` deve ser versionado; use `bun install --frozen-lockfile` em CI).
- `bun run dev:bun`, `bun run convex:dev:bun`, `bun run build:bun` e `bun run start:bun` já estão configurados; internamente executam `bun run --bun <script>` para usar o runtime do Bun sem abrir mão dos scripts existentes. O `cross-env` garante os valores esperados de `NODE_ENV` (`development`/`production`).
- O bundler padrão é o Turbopack; se precisar comparar/debugar com webpack, use `bun run build:webpack`.
- Se precisar validar o bundler experimental, use `bun run build:turbopack`; para o fluxo estável mantenha `bun run build` (webpack).
- `bun test` utiliza o test runner do Bun. O teste de snapshot de screenshot é automaticamente ignorado quando o matcher não está disponível; testes de navegador completos continuam via `bun run test:browser` (Vitest + Playwright).
<!-- ci: smoke test 3 -->

View file

@ -19,10 +19,10 @@ Os demais colaboradores reais são provisionados via **Convites & acessos**. Cas
- Seeds de usuários/tickets demo: `convex/seed.ts`.
- Para DEV: rode `bun run convex:dev:bun` e acesse `/dev/seed` uma vez para popular dados realistas.
## Stack atual (18/12/2025)
- **Next.js**: `16.0.10` (Turbopack por padrão; webpack fica como fallback).
## Stack atual (06/11/2025)
- **Next.js**: `16.0.1` (Turbopack em desenvolvimento; builds de produção usam webpack).
- Whitelist de domínios em `src/config/allowed-hosts.ts` é aplicada pelo `middleware.ts`.
- **React / React DOM**: `19.2.1`.
- **React / React DOM**: `19.2.0`.
- **Trilha de testes**: Vitest (`bun test`) sem modo watch por padrão (`--run --passWithNoTests`).
- **CI**: workflow `Quality Checks` (`.github/workflows/quality-checks.yml`) roda `bun install`, `bun run prisma:generate`, `bun run lint`, `bun test`, `bun run build:bun`. Variáveis críticas (`BETTER_AUTH_SECRET`, `NEXT_PUBLIC_APP_URL`, etc.) são definidas apenas no runner — não afetam a VPS.
- **Disciplina pós-mudanças**: sempre que fizer alterações locais, rode **obrigatoriamente** `bun run lint`, `bun run build:bun` e `bun test` antes de entregar ou abrir PR. Esses comandos são mandatórios também para os agentes/automations, garantindo que o projeto continua íntegro.
@ -38,7 +38,7 @@ Os demais colaboradores reais são provisionados via **Convites & acessos**. Cas
BETTER_AUTH_URL=http://localhost:3000
BETTER_AUTH_SECRET=dev-only-long-random-string
NEXT_PUBLIC_CONVEX_URL=http://127.0.0.1:3210
DATABASE_URL=postgresql://postgres:dev@localhost:5432/sistema_chamados
DATABASE_URL=file:./prisma/db.dev.sqlite
```
3. `bun run auth:seed`
4. (Opcional) `bun run queues:ensure`
@ -47,8 +47,8 @@ Os demais colaboradores reais são provisionados via **Convites & acessos**. Cas
7. Acesse `http://localhost:3000` e valide login com os usuários padrão.
### Banco de dados
- Local (DEV): PostgreSQL local (ex.: `postgres:18`) com `DATABASE_URL=postgresql://postgres:dev@localhost:5432/sistema_chamados`.
- Produção: PostgreSQL no Swarm (serviço `postgres` em uso hoje; `postgres18` provisionado para migração). Migrations em PROD devem apontar para o `DATABASE_URL` ativo (ver `docs/OPERATIONS.md`).
- Local (DEV): `DATABASE_URL=file:./prisma/db.dev.sqlite` (guardado em `prisma/prisma/`).
- Produção: SQLite persistido no volume Swarm `sistema_sistema_db`. Migrations em PROD devem apontar para esse volume (ver `docs/DEPLOY-RUNBOOK.md`).
- Limpeza de legados: `node scripts/remove-legacy-demo-users.mjs` remove contas demo antigas (Cliente Demo, gestores fictícios etc.).
### Verificações antes de PR/deploy
@ -90,7 +90,7 @@ bun run build:bun
- **Testes unitários/integrados (Vitest)**:
- Cobertura atual inclui utilitários (`tests/*.test.ts`), rotas `/api/machines/*` e `sendSmtpMail`.
- Executar `bun test -- --watch` apenas quando precisar de modo interativo.
- **Build**: `bun run build:bun` (`next build --turbopack`). Quando precisar do fallback oficial, rode `bun run build:webpack`.
- **Build**: `bun run build:bun` (`next build --webpack`, webpack). Para reproduzir problemas do bundler experimental, use `bun run build:turbopack`.
- **CI**: falhas mais comuns
- `ERR_BUN_LOCKFILE_OUTDATED`: confirme que o `bun.lock` foi regenerado (`bun install`) após alterar dependências, especialmente do app desktop.
- Variáveis Better Auth ausentes (`BETTER_AUTH_SECRET`): definidas no workflow (`Quality Checks`).
@ -104,12 +104,12 @@ bun run build:bun
ln -sfn /home/renan/apps/sistema.build.<novo> /home/renan/apps/sistema.current
docker service update --force sistema_web
```
- Resolver `P3009` (migration falhou) no PostgreSQL ativo:
- Resolver `P3009` (migration falhou) sempre no volume `sistema_sistema_db`:
```bash
docker service scale sistema_web=0
docker run --rm -it --network traefik_public \
--env-file /home/renan/apps/sistema.current/.env \
docker run --rm -it -e DATABASE_URL=file:/app/data/db.sqlite \
-v /home/renan/apps/sistema.current:/app \
-v sistema_sistema_db:/app/data -w /app \
oven/bun:1 bash -lc "bun install --frozen-lockfile && bun x prisma migrate resolve --rolled-back <migration> && bun x prisma migrate deploy"
docker service scale sistema_web=1
```
@ -164,51 +164,8 @@ bun run build:bun
- **Docs complementares**:
- `docs/DEV.md` — guia diário atualizado.
- `docs/STATUS-2025-10-16.md` — snapshot do estado atual e backlog.
- `docs/OPERATIONS.md` — runbook do Swarm.
- `docs/DEPLOY-RUNBOOK.md` — runbook do Swarm.
- `docs/admin-inventory-ui.md`, `docs/plano-app-desktop-maquinas.md` — detalhes do inventário/agente.
## Regras de Codigo
### Tooltips Nativos do Navegador
**NAO use o atributo `title` em elementos HTML** (button, span, a, div, etc).
O atributo `title` causa tooltips nativos do navegador que sao inconsistentes visualmente e nao seguem o design system da aplicacao.
```tsx
// ERRADO - causa tooltip nativo do navegador
<button title="Remover item">
<Trash2 className="size-4" />
</button>
// CORRETO - sem tooltip nativo
<button>
<Trash2 className="size-4" />
</button>
// CORRETO - se precisar de tooltip, use o componente Tooltip do shadcn/ui
<Tooltip>
<TooltipTrigger asChild>
<button>
<Trash2 className="size-4" />
</button>
</TooltipTrigger>
<TooltipContent>Remover item</TooltipContent>
</Tooltip>
```
**Excecoes:**
- Props `title` de componentes customizados (CardTitle, DialogTitle, etc) sao permitidas pois nao geram tooltips nativos.
### Acessibilidade
Para manter acessibilidade em botoes apenas com icone, prefira usar `aria-label`:
```tsx
<button aria-label="Remover item">
<Trash2 className="size-4" />
</button>
```
---
_Última atualização: 18/12/2025 (Next.js 16, build padrão com Turbopack e fallback webpack documentado)._
_Última atualização: 06/11/2025 (Next.js 16, build de produção com webpack, fluxos desktop + portal documentados)._

View file

@ -13,10 +13,6 @@ VITE_API_BASE_URL=
VITE_RUSTDESK_CONFIG_STRING=
VITE_RUSTDESK_DEFAULT_PASSWORD=FMQ9MA>e73r.FI<b*34Vmx_8P
# Assinatura Tauri (dev/CI). Em producao, pode sobrescrever por env seguro.
TAURI_SIGNING_PRIVATE_KEY=dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5WkhWOUtzd1BvV0ZlSjEvNzYwaHYxdEloNnV4cmZlNGhha1BNbmNtZEkrZ0FBQkFBQUFBQUFBQUFBQUlBQUFBQS9JbCtsd3VFbHN4empFRUNiU0dva1hKK3ZYUzE2S1V6Q1FhYkRUWGtGMTBkUmJodi9PaXVub3hEMisyTXJoYU5UeEdwZU9aMklacG9ualNWR1NaTm1PMVBpVXYrNTltZU1YOFdwYzdkOHd2STFTc0x4ZktpNXFENnFTdW0xNzY3WC9EcGlIRGFmK2c9Cg==
TAURI_SIGNING_PRIVATE_KEY_PASSWORD=revertech
# Opcional: IP do host para desenvolvimento com HMR fora do localhost
# Ex.: 192.168.0.10
TAURI_DEV_HOST=

View file

@ -8,29 +8,23 @@
"build": "tsc && vite build",
"preview": "vite preview",
"tauri": "node ./scripts/tauri-with-stub.mjs",
"gen:icon": "node ./scripts/build-icon.mjs",
"build:service": "cd service && cargo build --release",
"build:all": "bun run build:service && bun run tauri build"
"gen:icon": "node ./scripts/build-icon.mjs"
},
"dependencies": {
"@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-tabs": "^1.1.13",
"@tauri-apps/api": "^2.9.1",
"@tauri-apps/plugin-dialog": "^2.4.2",
"@tauri-apps/api": "^2",
"@tauri-apps/plugin-opener": "^2",
"@tauri-apps/plugin-process": "^2",
"@tauri-apps/plugin-store": "^2",
"@tauri-apps/plugin-updater": "^2",
"convex": "^1.31.0",
"lucide-react": "^0.544.0",
"react": "^19.0.0",
"react-dom": "^19.0.0"
},
"devDependencies": {
"png-to-ico": "^3.0.1",
"@tauri-apps/cli": "^2",
"@vitejs/plugin-react": "^4.3.4",
"baseline-browser-mapping": "^2.9.2",
"png-to-ico": "^3.0.1",
"typescript": "~5.6.2",
"vite": "^6.0.3"
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 MiB

View file

@ -1,11 +1,9 @@
import { spawn } from "node:child_process"
import { fileURLToPath } from "node:url"
import { dirname, resolve } from "node:path"
import { existsSync } from "node:fs"
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
const appRoot = resolve(__dirname, "..")
const pathKey = process.platform === "win32" ? "Path" : "PATH"
const currentPath = process.env[pathKey] ?? process.env[pathKey.toUpperCase()] ?? ""
@ -25,26 +23,10 @@ if (!process.env.TAURI_BUNDLE_TARGETS) {
}
}
// Assinatura: fallback seguro para builds locais/CI. Em prod, pode sobrescrever por env.
if (!process.env.TAURI_SIGNING_PRIVATE_KEY) {
process.env.TAURI_SIGNING_PRIVATE_KEY =
"dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5WkhWOUtzd1BvV0ZlSjEvNzYwaHYxdEloNnV4cmZlNGhha1BNbmNtZEkrZ0FBQkFBQUFBQUFBQUFBQUlBQUFBQS9JbCtsd3VFbHN4empFRUNiU0dva1hKK3ZYUzE2S1V6Q1FhYkRUWGtGMTBkUmJodi9PaXVub3hEMisyTXJoYU5UeEdwZU9aMklacG9ualNWR1NaTm1PMVBpVXYrNTltZU1YOFdwYzdkOHd2STFTc0x4ZktpNXFENnFTdW0xNzY3WC9EcGlIRGFmK2c9Cg=="
}
if (!process.env.TAURI_SIGNING_PRIVATE_KEY_PASSWORD) {
process.env.TAURI_SIGNING_PRIVATE_KEY_PASSWORD = "revertech"
}
const winTauriPath = resolve(appRoot, "node_modules", ".bin", "tauri.cmd")
const usingWinTauri = process.platform === "win32" && existsSync(winTauriPath)
const executable = process.platform === "win32" && usingWinTauri ? "cmd.exe" : "tauri"
const args =
process.platform === "win32" && usingWinTauri
? ["/C", winTauriPath, ...process.argv.slice(2)]
: process.argv.slice(2)
const child = spawn(executable, args, {
const executable = process.platform === "win32" ? "tauri.cmd" : "tauri"
const child = spawn(executable, process.argv.slice(2), {
stdio: "inherit",
shell: false,
cwd: appRoot,
shell: process.platform === "win32",
})
child.on("exit", (code, signal) => {

File diff suppressed because it is too large Load diff

View file

@ -1,70 +0,0 @@
[package]
name = "raven-service"
version = "0.1.0"
description = "Raven Windows Service - Executa operacoes privilegiadas para o Raven Desktop"
authors = ["Esdras Renan"]
edition = "2021"
[[bin]]
name = "raven-service"
path = "src/main.rs"
[dependencies]
# Windows Service
windows-service = "0.7"
# Async runtime
tokio = { version = "1", features = ["rt-multi-thread", "macros", "sync", "time", "io-util", "net", "signal"] }
# IPC via Named Pipes
interprocess = { version = "2", features = ["tokio"] }
# Serialization
serde = { version = "1", features = ["derive"] }
serde_json = "1"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Windows Registry
winreg = "0.55"
# Error handling
thiserror = "1.0"
# HTTP client (para RustDesk)
reqwest = { version = "0.12", features = ["json", "rustls-tls", "blocking"], default-features = false }
# Date/time
chrono = { version = "0.4", features = ["serde"] }
# Crypto (para RustDesk ID)
sha2 = "0.10"
# UUID para request IDs
uuid = { version = "1", features = ["v4"] }
# Parking lot para locks
parking_lot = "0.12"
# Once cell para singletons
once_cell = "1.19"
[target.'cfg(windows)'.dependencies]
windows = { version = "0.58", features = [
"Win32_Foundation",
"Win32_Security",
"Win32_System_Services",
"Win32_System_Threading",
"Win32_System_Pipes",
"Win32_System_IO",
"Win32_System_SystemServices",
"Win32_Storage_FileSystem",
] }
[profile.release]
opt-level = "z"
lto = true
codegen-units = 1
strip = true

View file

@ -1,290 +0,0 @@
//! Modulo IPC - Servidor de Named Pipes
//!
//! Implementa comunicacao entre o Raven UI e o Raven Service
//! usando Named Pipes do Windows com protocolo JSON-RPC simplificado.
use crate::{rustdesk, usb_policy};
use serde::{Deserialize, Serialize};
use std::io::{BufRead, BufReader, Write};
use thiserror::Error;
use tracing::{debug, info, warn};
#[derive(Debug, Error)]
pub enum IpcError {
#[error("Erro de IO: {0}")]
Io(#[from] std::io::Error),
#[error("Erro de serializacao: {0}")]
Json(#[from] serde_json::Error),
}
/// Requisicao JSON-RPC simplificada
#[derive(Debug, Deserialize)]
pub struct Request {
pub id: String,
pub method: String,
#[serde(default)]
pub params: serde_json::Value,
}
/// Resposta JSON-RPC simplificada
#[derive(Debug, Serialize)]
pub struct Response {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub result: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Debug, Serialize)]
pub struct ErrorResponse {
pub code: i32,
pub message: String,
}
impl Response {
pub fn success(id: String, result: serde_json::Value) -> Self {
Self {
id,
result: Some(result),
error: None,
}
}
pub fn error(id: String, code: i32, message: String) -> Self {
Self {
id,
result: None,
error: Some(ErrorResponse { code, message }),
}
}
}
/// Inicia o servidor de Named Pipes
pub async fn run_server(pipe_name: &str) -> Result<(), IpcError> {
info!("Iniciando servidor IPC em: {}", pipe_name);
loop {
match accept_connection(pipe_name).await {
Ok(()) => {
debug!("Conexao processada com sucesso");
}
Err(e) => {
warn!("Erro ao processar conexao: {}", e);
}
}
}
}
/// Aceita uma conexao e processa requisicoes
async fn accept_connection(pipe_name: &str) -> Result<(), IpcError> {
use windows::Win32::Foundation::INVALID_HANDLE_VALUE;
use windows::Win32::Security::{
InitializeSecurityDescriptor, SetSecurityDescriptorDacl,
PSECURITY_DESCRIPTOR, SECURITY_ATTRIBUTES, SECURITY_DESCRIPTOR,
};
use windows::Win32::Storage::FileSystem::PIPE_ACCESS_DUPLEX;
use windows::Win32::System::Pipes::{
ConnectNamedPipe, CreateNamedPipeW, DisconnectNamedPipe,
PIPE_READMODE_MESSAGE, PIPE_TYPE_MESSAGE, PIPE_UNLIMITED_INSTANCES, PIPE_WAIT,
};
use windows::Win32::System::SystemServices::SECURITY_DESCRIPTOR_REVISION;
use windows::core::PCWSTR;
// Cria o named pipe com seguranca que permite acesso a todos os usuarios
let pipe_name_wide: Vec<u16> = pipe_name.encode_utf16().chain(std::iter::once(0)).collect();
// Cria security descriptor com DACL nulo (permite acesso a todos)
let mut sd = SECURITY_DESCRIPTOR::default();
unsafe {
let sd_ptr = PSECURITY_DESCRIPTOR(&mut sd as *mut _ as *mut _);
let _ = InitializeSecurityDescriptor(sd_ptr, SECURITY_DESCRIPTOR_REVISION);
// DACL nulo = acesso irrestrito
let _ = SetSecurityDescriptorDacl(sd_ptr, true, None, false);
}
let sa = SECURITY_ATTRIBUTES {
nLength: std::mem::size_of::<SECURITY_ATTRIBUTES>() as u32,
lpSecurityDescriptor: &mut sd as *mut _ as *mut _,
bInheritHandle: false.into(),
};
let pipe_handle = unsafe {
CreateNamedPipeW(
PCWSTR::from_raw(pipe_name_wide.as_ptr()),
PIPE_ACCESS_DUPLEX,
PIPE_TYPE_MESSAGE | PIPE_READMODE_MESSAGE | PIPE_WAIT,
PIPE_UNLIMITED_INSTANCES,
4096, // out buffer
4096, // in buffer
0, // default timeout
Some(&sa), // seguranca permissiva
)
};
// Verifica se o handle e valido
if pipe_handle == INVALID_HANDLE_VALUE {
return Err(IpcError::Io(std::io::Error::last_os_error()));
}
// Aguarda conexao de um cliente
info!("Aguardando conexao de cliente...");
let connect_result = unsafe {
ConnectNamedPipe(pipe_handle, None)
};
if let Err(e) = connect_result {
// ERROR_PIPE_CONNECTED (535) significa que o cliente ja estava conectado
// o que e aceitavel
let error_code = e.code().0 as u32;
if error_code != 535 {
warn!("Erro ao aguardar conexao: {:?}", e);
}
}
info!("Cliente conectado");
// Processa requisicoes do cliente
let result = process_client(pipe_handle);
// Desconecta o cliente
unsafe {
let _ = DisconnectNamedPipe(pipe_handle);
}
result
}
/// Processa requisicoes de um cliente conectado
fn process_client(pipe_handle: windows::Win32::Foundation::HANDLE) -> Result<(), IpcError> {
use std::os::windows::io::{FromRawHandle, RawHandle};
use std::fs::File;
// Cria File handle a partir do pipe
let raw_handle = pipe_handle.0 as RawHandle;
let file = unsafe { File::from_raw_handle(raw_handle) };
let reader = BufReader::new(file.try_clone()?);
let mut writer = file;
// Le linhas (cada linha e uma requisicao JSON)
for line in reader.lines() {
let line = match line {
Ok(l) => l,
Err(e) => {
if e.kind() == std::io::ErrorKind::BrokenPipe {
info!("Cliente desconectou");
break;
}
return Err(e.into());
}
};
if line.is_empty() {
continue;
}
debug!("Requisicao recebida: {}", line);
// Parse da requisicao
let response = match serde_json::from_str::<Request>(&line) {
Ok(request) => handle_request(request),
Err(e) => Response::error(
"unknown".to_string(),
-32700,
format!("Parse error: {}", e),
),
};
// Serializa e envia resposta
let response_json = serde_json::to_string(&response)?;
debug!("Resposta: {}", response_json);
writeln!(writer, "{}", response_json)?;
writer.flush()?;
}
// IMPORTANTE: Nao fechar o handle aqui, pois DisconnectNamedPipe precisa dele
std::mem::forget(writer);
Ok(())
}
/// Processa uma requisicao e retorna a resposta
fn handle_request(request: Request) -> Response {
info!("Processando metodo: {}", request.method);
match request.method.as_str() {
"health_check" => handle_health_check(request.id),
"apply_usb_policy" => handle_apply_usb_policy(request.id, request.params),
"get_usb_policy" => handle_get_usb_policy(request.id),
"provision_rustdesk" => handle_provision_rustdesk(request.id, request.params),
"get_rustdesk_status" => handle_get_rustdesk_status(request.id),
_ => Response::error(
request.id,
-32601,
format!("Metodo nao encontrado: {}", request.method),
),
}
}
// =============================================================================
// Handlers de Requisicoes
// =============================================================================
fn handle_health_check(id: String) -> Response {
Response::success(
id,
serde_json::json!({
"status": "ok",
"service": "RavenService",
"version": env!("CARGO_PKG_VERSION"),
"timestamp": chrono::Utc::now().timestamp_millis()
}),
)
}
fn handle_apply_usb_policy(id: String, params: serde_json::Value) -> Response {
let policy = match params.get("policy").and_then(|p| p.as_str()) {
Some(p) => p,
None => {
return Response::error(id, -32602, "Parametro 'policy' e obrigatorio".to_string())
}
};
match usb_policy::apply_policy(policy) {
Ok(result) => Response::success(id, serde_json::to_value(result).unwrap()),
Err(e) => Response::error(id, -32000, format!("Erro ao aplicar politica: {}", e)),
}
}
fn handle_get_usb_policy(id: String) -> Response {
match usb_policy::get_current_policy() {
Ok(policy) => Response::success(
id,
serde_json::json!({
"policy": policy
}),
),
Err(e) => Response::error(id, -32000, format!("Erro ao obter politica: {}", e)),
}
}
fn handle_provision_rustdesk(id: String, params: serde_json::Value) -> Response {
let config_string = params.get("config").and_then(|c| c.as_str()).map(String::from);
let password = params.get("password").and_then(|p| p.as_str()).map(String::from);
let machine_id = params.get("machineId").and_then(|m| m.as_str()).map(String::from);
match rustdesk::ensure_rustdesk(config_string.as_deref(), password.as_deref(), machine_id.as_deref()) {
Ok(result) => Response::success(id, serde_json::to_value(result).unwrap()),
Err(e) => Response::error(id, -32000, format!("Erro ao provisionar RustDesk: {}", e)),
}
}
fn handle_get_rustdesk_status(id: String) -> Response {
match rustdesk::get_status() {
Ok(status) => Response::success(id, serde_json::to_value(status).unwrap()),
Err(e) => Response::error(id, -32000, format!("Erro ao obter status: {}", e)),
}
}

View file

@ -1,268 +0,0 @@
//! Raven Service - Servico Windows para operacoes privilegiadas
//!
//! Este servico roda como LocalSystem e executa operacoes que requerem
//! privilegios de administrador, como:
//! - Aplicar politicas de USB
//! - Provisionar e configurar RustDesk
//! - Modificar chaves de registro em HKEY_LOCAL_MACHINE
//!
//! O app Raven UI comunica com este servico via Named Pipes.
mod ipc;
mod rustdesk;
mod usb_policy;
use std::ffi::OsString;
use std::time::Duration;
use tracing::{error, info};
use windows_service::{
define_windows_service,
service::{
ServiceControl, ServiceControlAccept, ServiceExitCode, ServiceState, ServiceStatus,
ServiceType,
},
service_control_handler::{self, ServiceControlHandlerResult},
service_dispatcher,
};
const SERVICE_NAME: &str = "RavenService";
const SERVICE_DISPLAY_NAME: &str = "Raven Desktop Service";
const SERVICE_DESCRIPTION: &str = "Servico do Raven Desktop para operacoes privilegiadas (USB, RustDesk)";
const PIPE_NAME: &str = r"\\.\pipe\RavenService";
define_windows_service!(ffi_service_main, service_main);
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Configura logging
init_logging();
// Verifica argumentos de linha de comando
let args: Vec<String> = std::env::args().collect();
if args.len() > 1 {
match args[1].as_str() {
"install" => {
install_service()?;
return Ok(());
}
"uninstall" => {
uninstall_service()?;
return Ok(());
}
"run" => {
// Modo de teste: roda sem registrar como servico
info!("Executando em modo de teste (nao como servico)");
run_standalone()?;
return Ok(());
}
_ => {}
}
}
// Inicia como servico Windows
info!("Iniciando Raven Service...");
service_dispatcher::start(SERVICE_NAME, ffi_service_main)?;
Ok(())
}
fn init_logging() {
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
// Tenta criar diretorio de logs
let log_dir = std::env::var("PROGRAMDATA")
.map(|p| std::path::PathBuf::from(p).join("RavenService").join("logs"))
.unwrap_or_else(|_| std::path::PathBuf::from("C:\\ProgramData\\RavenService\\logs"));
let _ = std::fs::create_dir_all(&log_dir);
// Arquivo de log
let log_file = log_dir.join("service.log");
let file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&log_file)
.ok();
let filter = EnvFilter::try_from_default_env()
.unwrap_or_else(|_| EnvFilter::new("info"));
if let Some(file) = file {
tracing_subscriber::registry()
.with(filter)
.with(fmt::layer().with_writer(file).with_ansi(false))
.init();
} else {
tracing_subscriber::registry()
.with(filter)
.with(fmt::layer())
.init();
}
}
fn service_main(arguments: Vec<OsString>) {
if let Err(e) = run_service(arguments) {
error!("Erro ao executar servico: {}", e);
}
}
fn run_service(_arguments: Vec<OsString>) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
info!("Servico iniciando...");
// Canal para shutdown
let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>();
let shutdown_tx = std::sync::Arc::new(std::sync::Mutex::new(Some(shutdown_tx)));
// Registra handler de controle do servico
let shutdown_tx_clone = shutdown_tx.clone();
let status_handle = service_control_handler::register(SERVICE_NAME, move |control| {
match control {
ServiceControl::Stop | ServiceControl::Shutdown => {
info!("Recebido comando de parada");
if let Ok(mut guard) = shutdown_tx_clone.lock() {
if let Some(tx) = guard.take() {
let _ = tx.send(());
}
}
ServiceControlHandlerResult::NoError
}
ServiceControl::Interrogate => ServiceControlHandlerResult::NoError,
_ => ServiceControlHandlerResult::NotImplemented,
}
})?;
// Atualiza status para Running
status_handle.set_service_status(ServiceStatus {
service_type: ServiceType::OWN_PROCESS,
current_state: ServiceState::Running,
controls_accepted: ServiceControlAccept::STOP | ServiceControlAccept::SHUTDOWN,
exit_code: ServiceExitCode::Win32(0),
checkpoint: 0,
wait_hint: Duration::default(),
process_id: None,
})?;
info!("Servico em execucao, aguardando conexoes...");
// Cria runtime Tokio
let runtime = tokio::runtime::Runtime::new()?;
// Executa servidor IPC
runtime.block_on(async {
tokio::select! {
result = ipc::run_server(PIPE_NAME) => {
if let Err(e) = result {
error!("Erro no servidor IPC: {}", e);
}
}
_ = async {
let _ = shutdown_rx.await;
} => {
info!("Shutdown solicitado");
}
}
});
// Atualiza status para Stopped
status_handle.set_service_status(ServiceStatus {
service_type: ServiceType::OWN_PROCESS,
current_state: ServiceState::Stopped,
controls_accepted: ServiceControlAccept::empty(),
exit_code: ServiceExitCode::Win32(0),
checkpoint: 0,
wait_hint: Duration::default(),
process_id: None,
})?;
info!("Servico parado");
Ok(())
}
fn run_standalone() -> Result<(), Box<dyn std::error::Error>> {
let runtime = tokio::runtime::Runtime::new()?;
runtime.block_on(async {
info!("Servidor IPC iniciando em modo standalone...");
tokio::select! {
result = ipc::run_server(PIPE_NAME) => {
if let Err(e) = result {
error!("Erro no servidor IPC: {}", e);
}
}
_ = tokio::signal::ctrl_c() => {
info!("Ctrl+C recebido, encerrando...");
}
}
});
Ok(())
}
fn install_service() -> Result<(), Box<dyn std::error::Error>> {
use windows_service::{
service::{ServiceAccess, ServiceErrorControl, ServiceInfo, ServiceStartType},
service_manager::{ServiceManager, ServiceManagerAccess},
};
info!("Instalando servico...");
let manager = ServiceManager::local_computer(None::<&str>, ServiceManagerAccess::CREATE_SERVICE)?;
let exe_path = std::env::current_exe()?;
let service_info = ServiceInfo {
name: OsString::from(SERVICE_NAME),
display_name: OsString::from(SERVICE_DISPLAY_NAME),
service_type: ServiceType::OWN_PROCESS,
start_type: ServiceStartType::AutoStart,
error_control: ServiceErrorControl::Normal,
executable_path: exe_path,
launch_arguments: vec![],
dependencies: vec![],
account_name: None, // LocalSystem
account_password: None,
};
let service = manager.create_service(&service_info, ServiceAccess::CHANGE_CONFIG)?;
// Define descricao
service.set_description(SERVICE_DESCRIPTION)?;
info!("Servico instalado com sucesso: {}", SERVICE_NAME);
println!("Servico '{}' instalado com sucesso!", SERVICE_DISPLAY_NAME);
println!("Para iniciar: sc start {}", SERVICE_NAME);
Ok(())
}
fn uninstall_service() -> Result<(), Box<dyn std::error::Error>> {
use windows_service::{
service::ServiceAccess,
service_manager::{ServiceManager, ServiceManagerAccess},
};
info!("Desinstalando servico...");
let manager = ServiceManager::local_computer(None::<&str>, ServiceManagerAccess::CONNECT)?;
let service = manager.open_service(
SERVICE_NAME,
ServiceAccess::STOP | ServiceAccess::DELETE | ServiceAccess::QUERY_STATUS,
)?;
// Tenta parar o servico primeiro
let status = service.query_status()?;
if status.current_state != ServiceState::Stopped {
info!("Parando servico...");
let _ = service.stop();
std::thread::sleep(Duration::from_secs(2));
}
// Remove o servico
service.delete()?;
info!("Servico desinstalado com sucesso");
println!("Servico '{}' removido com sucesso!", SERVICE_DISPLAY_NAME);
Ok(())
}

View file

@ -1,846 +0,0 @@
//! Modulo RustDesk - Provisionamento e gerenciamento do RustDesk
//!
//! Gerencia a instalacao, configuracao e provisionamento do RustDesk.
//! Como o servico roda como LocalSystem, nao precisa de elevacao.
use chrono::Utc;
use once_cell::sync::Lazy;
use parking_lot::Mutex;
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::env;
use std::ffi::OsStr;
use std::fs::{self, File, OpenOptions};
use std::io::{self, Write};
use std::os::windows::process::CommandExt;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::thread;
use std::time::Duration;
use thiserror::Error;
use tracing::{error, info, warn};
const RELEASES_API: &str = "https://api.github.com/repos/rustdesk/rustdesk/releases/latest";
const USER_AGENT: &str = "RavenService/1.0";
const SERVER_HOST: &str = "rust.rever.com.br";
const SERVER_KEY: &str = "0mxocQKmK6GvTZQYKgjrG9tlNkKOqf81gKgqwAmnZuI=";
const DEFAULT_PASSWORD: &str = "FMQ9MA>e73r.FI<b*34Vmx_8P";
const SERVICE_NAME: &str = "RustDesk";
const CACHE_DIR_NAME: &str = "Rever\\RustDeskCache";
const LOCAL_SERVICE_CONFIG: &str = r"C:\Windows\ServiceProfiles\LocalService\AppData\Roaming\RustDesk\config";
const LOCAL_SYSTEM_CONFIG: &str = r"C:\Windows\System32\config\systemprofile\AppData\Roaming\RustDesk\config";
const SECURITY_VERIFICATION_VALUE: &str = "use-permanent-password";
const SECURITY_APPROVE_MODE_VALUE: &str = "password";
const CREATE_NO_WINDOW: u32 = 0x08000000;
static PROVISION_MUTEX: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(()));
#[derive(Debug, Error)]
pub enum RustdeskError {
#[error("HTTP error: {0}")]
Http(#[from] reqwest::Error),
#[error("I/O error: {0}")]
Io(#[from] io::Error),
#[error("Release asset nao encontrado para Windows x86_64")]
AssetMissing,
#[error("Falha ao executar comando {command}: status {status:?}")]
CommandFailed { command: String, status: Option<i32> },
#[error("Falha ao detectar ID do RustDesk")]
MissingId,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RustdeskResult {
pub id: String,
pub password: String,
pub installed_version: Option<String>,
pub updated: bool,
pub last_provisioned_at: i64,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RustdeskStatus {
pub installed: bool,
pub running: bool,
pub id: Option<String>,
pub version: Option<String>,
}
#[derive(Debug, Deserialize)]
struct ReleaseAsset {
name: String,
browser_download_url: String,
}
#[derive(Debug, Deserialize)]
struct ReleaseResponse {
tag_name: String,
assets: Vec<ReleaseAsset>,
}
/// Provisiona o RustDesk
pub fn ensure_rustdesk(
config_string: Option<&str>,
password_override: Option<&str>,
machine_id: Option<&str>,
) -> Result<RustdeskResult, RustdeskError> {
let _guard = PROVISION_MUTEX.lock();
info!("Iniciando provisionamento do RustDesk");
// Prepara ACLs dos diretorios de servico
if let Err(e) = ensure_service_profiles_writable() {
warn!("Aviso ao preparar ACL: {}", e);
}
// Le ID existente antes de qualquer limpeza
let preserved_remote_id = read_remote_id_from_profiles();
if let Some(ref id) = preserved_remote_id {
info!("ID existente preservado: {}", id);
}
let exe_path = detect_executable_path();
let (installed_version, freshly_installed) = ensure_installed(&exe_path)?;
info!(
"RustDesk {}: {}",
if freshly_installed { "instalado" } else { "ja presente" },
exe_path.display()
);
// Para processos existentes
let _ = stop_rustdesk_processes();
// Limpa perfis apenas se instalacao fresca
if freshly_installed {
let _ = purge_existing_rustdesk_profiles();
}
// Aplica configuracao
if let Some(config) = config_string.filter(|c| !c.trim().is_empty()) {
if let Err(e) = run_with_args(&exe_path, &["--config", config]) {
warn!("Falha ao aplicar config inline: {}", e);
}
} else {
let config_path = write_config_files()?;
if let Err(e) = apply_config(&exe_path, &config_path) {
warn!("Falha ao aplicar config via CLI: {}", e);
}
}
// Define senha
let password = password_override
.map(|v| v.trim().to_string())
.filter(|v| !v.is_empty())
.unwrap_or_else(|| DEFAULT_PASSWORD.to_string());
if let Err(e) = set_password(&exe_path, &password) {
warn!("Falha ao definir senha: {}", e);
} else {
let _ = ensure_password_files(&password);
let _ = propagate_password_profile();
}
// Define ID customizado
let custom_id = if let Some(ref existing_id) = preserved_remote_id {
if !freshly_installed {
Some(existing_id.clone())
} else {
define_custom_id(&exe_path, machine_id)
}
} else {
define_custom_id(&exe_path, machine_id)
};
// Inicia servico
if let Err(e) = ensure_service_running(&exe_path) {
warn!("Falha ao iniciar servico: {}", e);
}
// Obtem ID final
let final_id = match query_id_with_retries(&exe_path, 5) {
Ok(id) => id,
Err(_) => {
read_remote_id_from_profiles()
.or_else(|| custom_id.clone())
.ok_or(RustdeskError::MissingId)?
}
};
// Garante ID em todos os arquivos
ensure_remote_id_files(&final_id);
let version = query_version(&exe_path).ok().or(installed_version);
let last_provisioned_at = Utc::now().timestamp_millis();
info!("Provisionamento concluido. ID: {}, Versao: {:?}", final_id, version);
Ok(RustdeskResult {
id: final_id,
password,
installed_version: version,
updated: freshly_installed,
last_provisioned_at,
})
}
/// Retorna status do RustDesk
pub fn get_status() -> Result<RustdeskStatus, RustdeskError> {
let exe_path = detect_executable_path();
let installed = exe_path.exists();
let running = if installed {
query_service_state().map(|s| s == "running").unwrap_or(false)
} else {
false
};
let id = if installed {
query_id(&exe_path).ok().or_else(read_remote_id_from_profiles)
} else {
None
};
let version = if installed {
query_version(&exe_path).ok()
} else {
None
};
Ok(RustdeskStatus {
installed,
running,
id,
version,
})
}
// =============================================================================
// Funcoes Auxiliares
// =============================================================================
fn detect_executable_path() -> PathBuf {
let program_files = env::var("PROGRAMFILES").unwrap_or_else(|_| "C:/Program Files".to_string());
Path::new(&program_files).join("RustDesk").join("rustdesk.exe")
}
fn ensure_installed(exe_path: &Path) -> Result<(Option<String>, bool), RustdeskError> {
if exe_path.exists() {
return Ok((None, false));
}
let cache_root = PathBuf::from(env::var("PROGRAMDATA").unwrap_or_else(|_| "C:/ProgramData".to_string()))
.join(CACHE_DIR_NAME);
fs::create_dir_all(&cache_root)?;
let (installer_path, version_tag) = download_latest_installer(&cache_root)?;
run_installer(&installer_path)?;
thread::sleep(Duration::from_secs(20));
Ok((Some(version_tag), true))
}
fn download_latest_installer(cache_root: &Path) -> Result<(PathBuf, String), RustdeskError> {
let client = Client::builder()
.user_agent(USER_AGENT)
.timeout(Duration::from_secs(60))
.build()?;
let release: ReleaseResponse = client.get(RELEASES_API).send()?.error_for_status()?.json()?;
let asset = release
.assets
.iter()
.find(|a| a.name.ends_with("x86_64.exe"))
.ok_or(RustdeskError::AssetMissing)?;
let target_path = cache_root.join(&asset.name);
if target_path.exists() {
return Ok((target_path, release.tag_name));
}
info!("Baixando RustDesk: {}", asset.name);
let mut response = client.get(&asset.browser_download_url).send()?.error_for_status()?;
let mut output = File::create(&target_path)?;
response.copy_to(&mut output)?;
Ok((target_path, release.tag_name))
}
fn run_installer(installer_path: &Path) -> Result<(), RustdeskError> {
let status = hidden_command(installer_path)
.arg("--silent-install")
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?;
if !status.success() {
return Err(RustdeskError::CommandFailed {
command: format!("{} --silent-install", installer_path.display()),
status: status.code(),
});
}
Ok(())
}
fn program_data_config_dir() -> PathBuf {
PathBuf::from(env::var("PROGRAMDATA").unwrap_or_else(|_| "C:/ProgramData".to_string()))
.join("RustDesk")
.join("config")
}
/// Retorna todos os diretorios AppData\Roaming\RustDesk\config de usuarios do sistema
/// Como o servico roda como LocalSystem, precisamos enumerar os profiles de usuarios
fn all_user_appdata_config_dirs() -> Vec<PathBuf> {
let mut dirs = Vec::new();
// Enumera C:\Users\*\AppData\Roaming\RustDesk\config
let users_dir = Path::new("C:\\Users");
if let Ok(entries) = fs::read_dir(users_dir) {
for entry in entries.flatten() {
let path = entry.path();
// Ignora pastas de sistema
let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if name == "Public" || name == "Default" || name == "Default User" || name == "All Users" {
continue;
}
let rustdesk_config = path.join("AppData").join("Roaming").join("RustDesk").join("config");
// Verifica se o diretorio pai existe (usuario real)
if path.join("AppData").join("Roaming").exists() {
dirs.push(rustdesk_config);
}
}
}
// Tambem tenta o APPDATA do ambiente (pode ser util em alguns casos)
if let Ok(appdata) = env::var("APPDATA") {
let path = Path::new(&appdata).join("RustDesk").join("config");
if !dirs.contains(&path) {
dirs.push(path);
}
}
dirs
}
fn service_profile_dirs() -> Vec<PathBuf> {
vec![
PathBuf::from(LOCAL_SERVICE_CONFIG),
PathBuf::from(LOCAL_SYSTEM_CONFIG),
]
}
fn remote_id_directories() -> Vec<PathBuf> {
let mut dirs = Vec::new();
dirs.push(program_data_config_dir());
dirs.extend(service_profile_dirs());
dirs.extend(all_user_appdata_config_dirs());
dirs
}
fn write_config_files() -> Result<PathBuf, RustdeskError> {
let config_contents = format!(
r#"[options]
key = "{key}"
relay-server = "{host}"
custom-rendezvous-server = "{host}"
api-server = "https://{host}"
verification-method = "{verification}"
approve-mode = "{approve}"
"#,
host = SERVER_HOST,
key = SERVER_KEY,
verification = SECURITY_VERIFICATION_VALUE,
approve = SECURITY_APPROVE_MODE_VALUE,
);
let main_path = program_data_config_dir().join("RustDesk2.toml");
write_file(&main_path, &config_contents)?;
for service_dir in service_profile_dirs() {
let service_profile = service_dir.join("RustDesk2.toml");
let _ = write_file(&service_profile, &config_contents);
}
Ok(main_path)
}
fn write_file(path: &Path, contents: &str) -> Result<(), io::Error> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let mut file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(path)?;
file.write_all(contents.as_bytes())
}
fn apply_config(exe_path: &Path, config_path: &Path) -> Result<(), RustdeskError> {
run_with_args(exe_path, &["--import-config", &config_path.to_string_lossy()])
}
fn set_password(exe_path: &Path, secret: &str) -> Result<(), RustdeskError> {
run_with_args(exe_path, &["--password", secret])
}
fn define_custom_id(exe_path: &Path, machine_id: Option<&str>) -> Option<String> {
let value = machine_id.and_then(|raw| {
let trimmed = raw.trim();
if trimmed.is_empty() { None } else { Some(trimmed) }
})?;
let custom_id = derive_numeric_id(value);
if run_with_args(exe_path, &["--set-id", &custom_id]).is_ok() {
info!("ID deterministico definido: {}", custom_id);
Some(custom_id)
} else {
None
}
}
fn derive_numeric_id(machine_id: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(machine_id.as_bytes());
let hash = hasher.finalize();
let mut bytes = [0u8; 8];
bytes.copy_from_slice(&hash[..8]);
let value = u64::from_le_bytes(bytes);
let num = (value % 900_000_000) + 100_000_000;
format!("{:09}", num)
}
fn ensure_service_running(exe_path: &Path) -> Result<(), RustdeskError> {
ensure_service_installed(exe_path)?;
let _ = run_sc(&["config", SERVICE_NAME, "start=", "auto"]);
let _ = run_sc(&["start", SERVICE_NAME]);
remove_rustdesk_autorun_artifacts();
Ok(())
}
fn ensure_service_installed(exe_path: &Path) -> Result<(), RustdeskError> {
if run_sc(&["query", SERVICE_NAME]).is_ok() {
return Ok(());
}
run_with_args(exe_path, &["--install-service"])
}
fn stop_rustdesk_processes() -> Result<(), RustdeskError> {
let _ = run_sc(&["stop", SERVICE_NAME]);
thread::sleep(Duration::from_secs(2));
let status = hidden_command("taskkill")
.args(["/F", "/T", "/IM", "rustdesk.exe"])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?;
if status.success() || matches!(status.code(), Some(128)) {
Ok(())
} else {
Err(RustdeskError::CommandFailed {
command: "taskkill".into(),
status: status.code(),
})
}
}
fn purge_existing_rustdesk_profiles() -> Result<(), String> {
let files = [
"RustDesk.toml",
"RustDesk_local.toml",
"RustDesk2.toml",
"password",
"passwd",
"passwd.txt",
];
for dir in remote_id_directories() {
if !dir.exists() {
continue;
}
for name in files {
let path = dir.join(name);
if path.exists() {
let _ = fs::remove_file(&path);
}
}
}
Ok(())
}
fn ensure_password_files(secret: &str) -> Result<(), String> {
for dir in remote_id_directories() {
let password_path = dir.join("RustDesk.toml");
let _ = write_toml_kv(&password_path, "password", secret);
let local_path = dir.join("RustDesk_local.toml");
let _ = write_toml_kv(&local_path, "verification-method", SECURITY_VERIFICATION_VALUE);
let _ = write_toml_kv(&local_path, "approve-mode", SECURITY_APPROVE_MODE_VALUE);
}
Ok(())
}
fn propagate_password_profile() -> io::Result<bool> {
// Encontra um diretorio de usuario que tenha arquivos de config
let user_dirs = all_user_appdata_config_dirs();
let src_dir = user_dirs.iter().find(|d| d.join("RustDesk.toml").exists());
let Some(src_dir) = src_dir else {
// Se nenhum usuario tem config, usa ProgramData como fonte
let pd = program_data_config_dir();
if !pd.join("RustDesk.toml").exists() {
return Ok(false);
}
return propagate_from_dir(&pd);
};
propagate_from_dir(src_dir)
}
fn propagate_from_dir(src_dir: &Path) -> io::Result<bool> {
let propagation_files = ["RustDesk.toml", "RustDesk_local.toml", "RustDesk2.toml"];
let mut propagated = false;
for filename in propagation_files {
let src_path = src_dir.join(filename);
if !src_path.exists() {
continue;
}
for dest_root in remote_id_directories() {
if dest_root == src_dir {
continue; // Nao copiar para si mesmo
}
let target_path = dest_root.join(filename);
if copy_overwrite(&src_path, &target_path).is_ok() {
propagated = true;
}
}
}
Ok(propagated)
}
fn ensure_remote_id_files(id: &str) {
for dir in remote_id_directories() {
let path = dir.join("RustDesk_local.toml");
let _ = write_remote_id_value(&path, id);
}
}
fn write_remote_id_value(path: &Path, id: &str) -> io::Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let replacement = format!("remote_id = '{}'\n", id);
if let Ok(existing) = fs::read_to_string(path) {
let mut replaced = false;
let mut buffer = String::with_capacity(existing.len() + replacement.len());
for line in existing.lines() {
if line.trim_start().starts_with("remote_id") {
buffer.push_str(&replacement);
replaced = true;
} else {
buffer.push_str(line);
buffer.push('\n');
}
}
if !replaced {
buffer.push_str(&replacement);
}
let mut file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(path)?;
file.write_all(buffer.as_bytes())
} else {
let mut file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(path)?;
file.write_all(replacement.as_bytes())
}
}
fn write_toml_kv(path: &Path, key: &str, value: &str) -> io::Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let sanitized = value.replace('\\', "\\\\").replace('"', "\\\"");
let replacement = format!("{key} = \"{sanitized}\"\n");
let existing = fs::read_to_string(path).unwrap_or_default();
let mut replaced = false;
let mut buffer = String::with_capacity(existing.len() + replacement.len());
for line in existing.lines() {
let trimmed = line.trim_start();
if trimmed.starts_with(&format!("{key} ")) || trimmed.starts_with(&format!("{key}=")) {
buffer.push_str(&replacement);
replaced = true;
} else {
buffer.push_str(line);
buffer.push('\n');
}
}
if !replaced {
buffer.push_str(&replacement);
}
let mut file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(path)?;
file.write_all(buffer.as_bytes())
}
fn read_remote_id_from_profiles() -> Option<String> {
for dir in remote_id_directories() {
for candidate in [dir.join("RustDesk_local.toml"), dir.join("RustDesk.toml")] {
if let Some(id) = read_remote_id_file(&candidate) {
if !id.is_empty() {
return Some(id);
}
}
}
}
None
}
fn read_remote_id_file(path: &Path) -> Option<String> {
let content = fs::read_to_string(path).ok()?;
for line in content.lines() {
if let Some(value) = parse_assignment(line, "remote_id") {
return Some(value);
}
}
None
}
fn parse_assignment(line: &str, key: &str) -> Option<String> {
let trimmed = line.trim();
if !trimmed.starts_with(key) {
return None;
}
let (_, rhs) = trimmed.split_once('=')?;
let value = rhs.trim().trim_matches(|c| c == '\'' || c == '"');
if value.is_empty() {
None
} else {
Some(value.to_string())
}
}
fn query_id_with_retries(exe_path: &Path, attempts: usize) -> Result<String, RustdeskError> {
for attempt in 0..attempts {
match query_id(exe_path) {
Ok(value) if !value.trim().is_empty() => return Ok(value),
_ => {}
}
if attempt + 1 < attempts {
thread::sleep(Duration::from_millis(800));
}
}
Err(RustdeskError::MissingId)
}
fn query_id(exe_path: &Path) -> Result<String, RustdeskError> {
let output = hidden_command(exe_path).arg("--get-id").output()?;
if !output.status.success() {
return Err(RustdeskError::CommandFailed {
command: format!("{} --get-id", exe_path.display()),
status: output.status.code(),
});
}
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
if stdout.is_empty() {
return Err(RustdeskError::MissingId);
}
Ok(stdout)
}
fn query_version(exe_path: &Path) -> Result<String, RustdeskError> {
let output = hidden_command(exe_path).arg("--version").output()?;
if !output.status.success() {
return Err(RustdeskError::CommandFailed {
command: format!("{} --version", exe_path.display()),
status: output.status.code(),
});
}
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
fn query_service_state() -> Option<String> {
let output = hidden_command("sc")
.args(["query", SERVICE_NAME])
.output()
.ok()?;
if !output.status.success() {
return None;
}
let stdout = String::from_utf8_lossy(&output.stdout);
for line in stdout.lines() {
let lower = line.to_lowercase();
if lower.contains("running") {
return Some("running".to_string());
}
if lower.contains("stopped") {
return Some("stopped".to_string());
}
}
None
}
fn run_sc(args: &[&str]) -> Result<(), RustdeskError> {
let status = hidden_command("sc")
.args(args)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?;
if !status.success() {
return Err(RustdeskError::CommandFailed {
command: format!("sc {}", args.join(" ")),
status: status.code(),
});
}
Ok(())
}
fn run_with_args(exe_path: &Path, args: &[&str]) -> Result<(), RustdeskError> {
let status = hidden_command(exe_path)
.args(args)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?;
if !status.success() {
return Err(RustdeskError::CommandFailed {
command: format!("{} {}", exe_path.display(), args.join(" ")),
status: status.code(),
});
}
Ok(())
}
fn remove_rustdesk_autorun_artifacts() {
// Remove atalhos de inicializacao automatica
let mut startup_paths: Vec<PathBuf> = Vec::new();
if let Ok(appdata) = env::var("APPDATA") {
startup_paths.push(
Path::new(&appdata)
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup\\RustDesk.lnk"),
);
}
startup_paths.push(PathBuf::from(
r"C:\ProgramData\Microsoft\Windows\Start Menu\Programs\Startup\RustDesk.lnk",
));
for path in startup_paths {
if path.exists() {
let _ = fs::remove_file(&path);
}
}
// Remove entradas de registro
for hive in ["HKCU", "HKLM"] {
let reg_path = format!(r"{}\Software\Microsoft\Windows\CurrentVersion\Run", hive);
let _ = hidden_command("reg")
.args(["delete", &reg_path, "/v", "RustDesk", "/f"])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status();
}
}
fn ensure_service_profiles_writable() -> Result<(), String> {
for dir in service_profile_dirs() {
if !can_write_dir(&dir) {
fix_profile_acl(&dir)?;
}
}
Ok(())
}
fn can_write_dir(dir: &Path) -> bool {
if fs::create_dir_all(dir).is_err() {
return false;
}
let probe = dir.join(".raven_acl_probe");
match OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(&probe)
{
Ok(mut file) => {
if file.write_all(b"ok").is_err() {
let _ = fs::remove_file(&probe);
return false;
}
let _ = fs::remove_file(&probe);
true
}
Err(_) => false,
}
}
fn fix_profile_acl(target: &Path) -> Result<(), String> {
let target_str = target.display().to_string();
// Como ja estamos rodando como LocalSystem, podemos usar takeown/icacls diretamente
let _ = hidden_command("takeown")
.args(["/F", &target_str, "/R", "/D", "Y"])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status();
let status = hidden_command("icacls")
.args([
&target_str,
"/grant",
"*S-1-5-32-544:(OI)(CI)F",
"*S-1-5-19:(OI)(CI)F",
"*S-1-5-32-545:(OI)(CI)M",
"/T",
"/C",
"/Q",
])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.map_err(|e| format!("Erro ao executar icacls: {}", e))?;
if status.success() {
Ok(())
} else {
Err(format!("icacls retornou codigo {}", status.code().unwrap_or(-1)))
}
}
fn copy_overwrite(src: &Path, dst: &Path) -> io::Result<()> {
if let Some(parent) = dst.parent() {
fs::create_dir_all(parent)?;
}
if dst.is_dir() {
fs::remove_dir_all(dst)?;
} else if dst.exists() {
fs::remove_file(dst)?;
}
fs::copy(src, dst)?;
Ok(())
}
fn hidden_command(program: impl AsRef<OsStr>) -> Command {
let mut cmd = Command::new(program);
cmd.creation_flags(CREATE_NO_WINDOW);
cmd
}

View file

@ -1,259 +0,0 @@
//! Modulo USB Policy - Controle de dispositivos USB
//!
//! Implementa o controle de armazenamento USB no Windows.
//! Como o servico roda como LocalSystem, nao precisa de elevacao.
use serde::{Deserialize, Serialize};
use std::io;
use thiserror::Error;
use tracing::{error, info, warn};
use winreg::enums::*;
use winreg::RegKey;
// GUID para Removable Storage Devices (Disk)
const REMOVABLE_STORAGE_GUID: &str = "{53f56307-b6bf-11d0-94f2-00a0c91efb8b}";
// Chaves de registro
const REMOVABLE_STORAGE_PATH: &str = r"Software\Policies\Microsoft\Windows\RemovableStorageDevices";
const USBSTOR_PATH: &str = r"SYSTEM\CurrentControlSet\Services\USBSTOR";
const STORAGE_POLICY_PATH: &str = r"SYSTEM\CurrentControlSet\Control\StorageDevicePolicies";
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum UsbPolicy {
Allow,
BlockAll,
Readonly,
}
impl UsbPolicy {
pub fn from_str(s: &str) -> Option<Self> {
match s.to_uppercase().as_str() {
"ALLOW" => Some(Self::Allow),
"BLOCK_ALL" => Some(Self::BlockAll),
"READONLY" => Some(Self::Readonly),
_ => None,
}
}
pub fn as_str(&self) -> &'static str {
match self {
Self::Allow => "ALLOW",
Self::BlockAll => "BLOCK_ALL",
Self::Readonly => "READONLY",
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UsbPolicyResult {
pub success: bool,
pub policy: String,
pub error: Option<String>,
pub applied_at: Option<i64>,
}
#[derive(Error, Debug)]
pub enum UsbControlError {
#[error("Politica USB invalida: {0}")]
InvalidPolicy(String),
#[error("Erro de registro do Windows: {0}")]
RegistryError(String),
#[error("Permissao negada")]
PermissionDenied,
#[error("Erro de I/O: {0}")]
Io(#[from] io::Error),
}
/// Aplica uma politica de USB
pub fn apply_policy(policy_str: &str) -> Result<UsbPolicyResult, UsbControlError> {
let policy = UsbPolicy::from_str(policy_str)
.ok_or_else(|| UsbControlError::InvalidPolicy(policy_str.to_string()))?;
let now = chrono::Utc::now().timestamp_millis();
info!("Aplicando politica USB: {:?}", policy);
// 1. Aplicar Removable Storage Policy
apply_removable_storage_policy(policy)?;
// 2. Aplicar USBSTOR
apply_usbstor_policy(policy)?;
// 3. Aplicar WriteProtect se necessario
if policy == UsbPolicy::Readonly {
apply_write_protect(true)?;
} else {
apply_write_protect(false)?;
}
// 4. Atualizar Group Policy (opcional)
if let Err(e) = refresh_group_policy() {
warn!("Falha ao atualizar group policy: {}", e);
}
info!("Politica USB aplicada com sucesso: {:?}", policy);
Ok(UsbPolicyResult {
success: true,
policy: policy.as_str().to_string(),
error: None,
applied_at: Some(now),
})
}
/// Retorna a politica USB atual
pub fn get_current_policy() -> Result<String, UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
// Verifica Removable Storage Policy primeiro
let full_path = format!(r"{}\{}", REMOVABLE_STORAGE_PATH, REMOVABLE_STORAGE_GUID);
if let Ok(key) = hklm.open_subkey_with_flags(&full_path, KEY_READ) {
let deny_read: u32 = key.get_value("Deny_Read").unwrap_or(0);
let deny_write: u32 = key.get_value("Deny_Write").unwrap_or(0);
if deny_read == 1 && deny_write == 1 {
return Ok("BLOCK_ALL".to_string());
}
if deny_read == 0 && deny_write == 1 {
return Ok("READONLY".to_string());
}
}
// Verifica USBSTOR como fallback
if let Ok(key) = hklm.open_subkey_with_flags(USBSTOR_PATH, KEY_READ) {
let start: u32 = key.get_value("Start").unwrap_or(3);
if start == 4 {
return Ok("BLOCK_ALL".to_string());
}
}
Ok("ALLOW".to_string())
}
fn apply_removable_storage_policy(policy: UsbPolicy) -> Result<(), UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
let full_path = format!(r"{}\{}", REMOVABLE_STORAGE_PATH, REMOVABLE_STORAGE_GUID);
match policy {
UsbPolicy::Allow => {
// Tenta remover as restricoes, se existirem
if let Ok(key) = hklm.open_subkey_with_flags(&full_path, KEY_ALL_ACCESS) {
let _ = key.delete_value("Deny_Read");
let _ = key.delete_value("Deny_Write");
let _ = key.delete_value("Deny_Execute");
}
// Tenta remover a chave inteira se estiver vazia
let _ = hklm.delete_subkey(&full_path);
}
UsbPolicy::BlockAll => {
let (key, _) = hklm
.create_subkey(&full_path)
.map_err(map_winreg_error)?;
key.set_value("Deny_Read", &1u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Write", &1u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Execute", &1u32)
.map_err(map_winreg_error)?;
}
UsbPolicy::Readonly => {
let (key, _) = hklm
.create_subkey(&full_path)
.map_err(map_winreg_error)?;
// Permite leitura, bloqueia escrita
key.set_value("Deny_Read", &0u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Write", &1u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Execute", &0u32)
.map_err(map_winreg_error)?;
}
}
Ok(())
}
fn apply_usbstor_policy(policy: UsbPolicy) -> Result<(), UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
let key = hklm
.open_subkey_with_flags(USBSTOR_PATH, KEY_ALL_ACCESS)
.map_err(map_winreg_error)?;
match policy {
UsbPolicy::Allow => {
// Start = 3 habilita o driver
key.set_value("Start", &3u32)
.map_err(map_winreg_error)?;
}
UsbPolicy::BlockAll => {
// Start = 4 desabilita o driver
key.set_value("Start", &4u32)
.map_err(map_winreg_error)?;
}
UsbPolicy::Readonly => {
// Readonly mantem driver ativo
key.set_value("Start", &3u32)
.map_err(map_winreg_error)?;
}
}
Ok(())
}
fn apply_write_protect(enable: bool) -> Result<(), UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
if enable {
let (key, _) = hklm
.create_subkey(STORAGE_POLICY_PATH)
.map_err(map_winreg_error)?;
key.set_value("WriteProtect", &1u32)
.map_err(map_winreg_error)?;
} else if let Ok(key) = hklm.open_subkey_with_flags(STORAGE_POLICY_PATH, KEY_ALL_ACCESS) {
let _ = key.set_value("WriteProtect", &0u32);
}
Ok(())
}
fn refresh_group_policy() -> Result<(), UsbControlError> {
use std::os::windows::process::CommandExt;
use std::process::Command;
const CREATE_NO_WINDOW: u32 = 0x08000000;
let output = Command::new("gpupdate")
.args(["/target:computer", "/force"])
.creation_flags(CREATE_NO_WINDOW)
.output()
.map_err(UsbControlError::Io)?;
if !output.status.success() {
warn!(
"gpupdate retornou erro: {}",
String::from_utf8_lossy(&output.stderr)
);
}
Ok(())
}
fn map_winreg_error(error: io::Error) -> UsbControlError {
if let Some(code) = error.raw_os_error() {
if code == 5 {
return UsbControlError::PermissionDenied;
}
}
UsbControlError::RegistryError(error.to_string())
}

File diff suppressed because it is too large Load diff

View file

@ -18,21 +18,16 @@ crate-type = ["staticlib", "cdylib", "rlib"]
tauri-build = { version = "2.4.1", features = [] }
[dependencies]
tauri = { version = "2.9", features = ["wry", "devtools", "tray-icon"] }
tauri-plugin-dialog = "2.4.2"
tauri = { version = "2.8.5", features = ["wry", "devtools"] }
tauri-plugin-opener = "2.5.0"
tauri-plugin-store = "2.4.0"
tauri-plugin-updater = "2.9.0"
tauri-plugin-process = "2.3.0"
tauri-plugin-notification = "2"
tauri-plugin-deep-link = "2"
tauri-plugin-single-instance = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
sysinfo = { version = "0.31", default-features = false, features = ["multithread", "network", "system", "disk"] }
get_if_addrs = "0.5"
reqwest = { version = "0.12", features = ["json", "rustls-tls", "blocking", "stream"], default-features = false }
futures-util = "0.3"
reqwest = { version = "0.12", features = ["json", "rustls-tls", "blocking"], default-features = false }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "time"] }
once_cell = "1.19"
thiserror = "1.0"
@ -41,10 +36,3 @@ parking_lot = "0.12"
hostname = "0.4"
base64 = "0.22"
sha2 = "0.10"
convex = "0.10.2"
uuid = { version = "1", features = ["v4"] }
dirs = "5"
# SSE usa reqwest com stream, nao precisa de websocket
[target.'cfg(windows)'.dependencies]
winreg = "0.55"

View file

@ -1,31 +1,3 @@
fn main() {
// Custom manifest keeps Common-Controls v6 dependency to avoid TaskDialogIndirect errors.
let windows = tauri_build::WindowsAttributes::new().app_manifest(
r#"
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="*"
publicKeyToken="6595b64144ccf1df"
language="*" />
</dependentAssembly>
</dependency>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
</requestedPrivileges>
</security>
</trustInfo>
</assembly>
"#,
);
let attrs = tauri_build::Attributes::new().windows_attributes(windows);
tauri_build::try_build(attrs).expect("failed to run Tauri build script");
tauri_build::build()
}

View file

@ -1,21 +1,10 @@
{
"$schema": "../gen/schemas/desktop-schema.json",
"identifier": "default",
"description": "Capability for all windows",
"windows": ["main", "chat-*", "chat-hub"],
"description": "Capability for the main window",
"windows": ["main"],
"permissions": [
"core:default",
"core:event:default",
"core:event:allow-listen",
"core:event:allow-unlisten",
"core:event:allow-emit",
"core:window:default",
"core:window:allow-close",
"core:window:allow-hide",
"core:window:allow-show",
"core:window:allow-set-focus",
"core:window:allow-start-dragging",
"dialog:allow-open",
"opener:default",
"store:default",
"store:allow-load",
@ -24,10 +13,6 @@
"store:allow-save",
"store:allow-delete",
"updater:default",
"process:default",
"notification:default",
"notification:allow-notify",
"notification:allow-request-permission",
"notification:allow-is-permission-granted"
"process:default"
]
}

View file

@ -1,121 +0,0 @@
; Hooks customizadas do instalador NSIS (Tauri)
;
; Objetivo:
; - Remover a marca "Nullsoft Install System" exibida no canto inferior esquerdo
; - Instalar o Raven Service para operacoes privilegiadas sem UAC
;
; Nota: o bundler do Tauri injeta estes macros no script principal do instalador.
BrandingText " "
!macro NSIS_HOOK_PREINSTALL
; Para e remove qualquer instancia anterior do servico antes de atualizar
DetailPrint "Parando servicos anteriores..."
; Para o servico
nsExec::ExecToLog 'sc stop RavenService'
; Aguarda o servico parar completamente (ate 10 segundos)
nsExec::ExecToLog 'powershell -Command "$$i=0; while((Get-Service RavenService -ErrorAction SilentlyContinue).Status -eq \"Running\" -and $$i -lt 10){Start-Sleep 1;$$i++}"'
; Remove o servico antigo (IMPORTANTE para reinstalacoes)
DetailPrint "Removendo servico antigo..."
IfFileExists "$INSTDIR\raven-service.exe" 0 +2
nsExec::ExecToLog '"$INSTDIR\raven-service.exe" uninstall'
; Fallback: remove via sc delete se o executavel nao existir
nsExec::ExecToLog 'sc delete RavenService'
; Forca encerramento de processos remanescentes
nsExec::ExecToLog 'taskkill /F /IM raven-service.exe'
nsExec::ExecToLog 'taskkill /F /IM appsdesktop.exe'
; Aguarda liberacao dos arquivos e remocao completa do servico
Sleep 3000
!macroend
!macro NSIS_HOOK_POSTINSTALL
; =========================================================================
; Instala e inicia o Raven Service
; =========================================================================
DetailPrint "Instalando Raven Service..."
; Garante que nao ha servico residual
nsExec::ExecToLog 'sc delete RavenService'
Sleep 1000
; O servico ja esta em $INSTDIR (copiado como resource pelo Tauri)
; Registra o servico Windows
nsExec::ExecToLog '"$INSTDIR\raven-service.exe" install'
Pop $0
${If} $0 != 0
DetailPrint "Aviso: Falha ao registrar servico (codigo: $0)"
; Tenta remover completamente e reinstalar
nsExec::ExecToLog '"$INSTDIR\raven-service.exe" uninstall'
nsExec::ExecToLog 'sc delete RavenService'
Sleep 1000
nsExec::ExecToLog '"$INSTDIR\raven-service.exe" install'
Pop $0
${EndIf}
; Aguarda registro do servico
Sleep 500
; Inicia o servico
DetailPrint "Iniciando Raven Service..."
nsExec::ExecToLog 'sc start RavenService'
Pop $0
${If} $0 == 0
DetailPrint "Raven Service iniciado com sucesso!"
${Else}
; Tenta novamente apos breve espera
Sleep 1000
nsExec::ExecToLog 'sc start RavenService'
Pop $0
${If} $0 == 0
DetailPrint "Raven Service iniciado com sucesso (segunda tentativa)!"
${Else}
DetailPrint "Aviso: Servico sera iniciado na proxima reinicializacao (codigo: $0)"
${EndIf}
${EndIf}
; =========================================================================
; Verifica se RustDesk esta instalado
; Se nao estiver, o Raven Service instalara automaticamente no primeiro uso
; =========================================================================
IfFileExists "$PROGRAMFILES\RustDesk\rustdesk.exe" rustdesk_found rustdesk_not_found
rustdesk_not_found:
DetailPrint "RustDesk sera instalado automaticamente pelo Raven Service."
Goto rustdesk_done
rustdesk_found:
DetailPrint "RustDesk ja esta instalado."
rustdesk_done:
!macroend
!macro NSIS_HOOK_PREUNINSTALL
; =========================================================================
; Para e remove o Raven Service
; =========================================================================
DetailPrint "Parando Raven Service..."
nsExec::ExecToLog 'sc stop RavenService'
Sleep 1000
DetailPrint "Removendo Raven Service..."
nsExec::ExecToLog '"$INSTDIR\raven-service.exe" uninstall'
; Aguarda um pouco para garantir que o servico foi removido
Sleep 500
!macroend
!macro NSIS_HOOK_POSTUNINSTALL
; Nada adicional necessario
!macroend

View file

@ -708,7 +708,7 @@ fn collect_windows_extended() -> serde_json::Value {
}
fn decode_utf16_le_to_string(bytes: &[u8]) -> Option<String> {
if !bytes.len().is_multiple_of(2) {
if bytes.len() % 2 != 0 {
return None;
}
let utf16: Vec<u16> = bytes
@ -931,209 +931,9 @@ fn collect_windows_extended() -> serde_json::Value {
.unwrap_or_else(|| json!({}));
let bios = ps("Get-CimInstance Win32_BIOS | Select-Object Manufacturer,SMBIOSBIOSVersion,ReleaseDate,Version").unwrap_or_else(|| json!({}));
let memory = ps("@(Get-CimInstance Win32_PhysicalMemory | Select-Object BankLabel,Capacity,Manufacturer,PartNumber,SerialNumber,ConfiguredClockSpeed,Speed,ConfiguredVoltage)").unwrap_or_else(|| json!([]));
// Coleta de GPU com VRAM correta (nvidia-smi para NVIDIA, registro como fallback para >4GB)
let video = ps(r#"
$gpus = @()
$wmiGpus = Get-CimInstance Win32_VideoController | Select-Object Name,AdapterRAM,DriverVersion,PNPDeviceID
foreach ($gpu in $wmiGpus) {
$vram = $gpu.AdapterRAM
# Tenta nvidia-smi para GPUs NVIDIA (retorna valor correto para >4GB)
if ($gpu.Name -match 'NVIDIA') {
try {
$nvidiaSmi = & 'nvidia-smi' '--query-gpu=memory.total' '--format=csv,noheader,nounits' 2>$null
if ($nvidiaSmi) {
$vramMB = [int64]($nvidiaSmi.Trim())
$vram = $vramMB * 1024 * 1024
}
} catch {}
}
# Fallback: tenta registro do Windows (qwMemorySize é uint64)
if ($vram -le 4294967296 -and $vram -gt 0) {
try {
$regPath = 'HKLM:\SYSTEM\ControlSet001\Control\Class\{4d36e968-e325-11ce-bfc1-08002be10318}\0*'
$regGpus = Get-ItemProperty $regPath -ErrorAction SilentlyContinue
foreach ($reg in $regGpus) {
if ($reg.DriverDesc -eq $gpu.Name -and $reg.'HardwareInformation.qwMemorySize') {
$vram = [int64]$reg.'HardwareInformation.qwMemorySize'
break
}
}
} catch {}
}
$gpus += [PSCustomObject]@{
Name = $gpu.Name
AdapterRAM = $vram
DriverVersion = $gpu.DriverVersion
PNPDeviceID = $gpu.PNPDeviceID
}
}
@($gpus)
"#).unwrap_or_else(|| json!([]));
let video = ps("@(Get-CimInstance Win32_VideoController | Select-Object Name,AdapterRAM,DriverVersion,PNPDeviceID)").unwrap_or_else(|| json!([]));
let disks = ps("@(Get-CimInstance Win32_DiskDrive | Select-Object Model,SerialNumber,Size,InterfaceType,MediaType)").unwrap_or_else(|| json!([]));
// Bateria (notebooks/laptops)
let battery = ps(r#"
$batteries = @(Get-CimInstance Win32_Battery | Select-Object Name,DeviceID,Status,BatteryStatus,EstimatedChargeRemaining,EstimatedRunTime,DesignCapacity,FullChargeCapacity,DesignVoltage,Chemistry,BatteryRechargeTime)
if ($batteries.Count -eq 0) {
[PSCustomObject]@{ Present = $false; Batteries = @() }
} else {
# Mapeia status numérico para texto
$statusMap = @{
1 = 'Discharging'
2 = 'AC Power'
3 = 'Fully Charged'
4 = 'Low'
5 = 'Critical'
6 = 'Charging'
7 = 'Charging High'
8 = 'Charging Low'
9 = 'Charging Critical'
10 = 'Undefined'
11 = 'Partially Charged'
}
foreach ($b in $batteries) {
if ($b.BatteryStatus) {
$b | Add-Member -NotePropertyName 'BatteryStatusText' -NotePropertyValue ($statusMap[[int]$b.BatteryStatus] ?? 'Unknown') -Force
}
}
[PSCustomObject]@{ Present = $true; Batteries = $batteries }
}
"#).unwrap_or_else(|| json!({ "Present": false, "Batteries": [] }));
// Sensores térmicos (temperatura CPU/GPU quando disponível)
let thermal = ps(r#"
$temps = @()
# Tenta WMI thermal zone (requer admin em alguns sistemas)
try {
$zones = Get-CimInstance -Namespace 'root/WMI' -ClassName MSAcpi_ThermalZoneTemperature -ErrorAction SilentlyContinue
foreach ($z in $zones) {
if ($z.CurrentTemperature) {
$celsius = [math]::Round(($z.CurrentTemperature - 2732) / 10, 1)
$temps += [PSCustomObject]@{
Source = 'ThermalZone'
Name = $z.InstanceName
TemperatureCelsius = $celsius
CriticalTripPoint = if ($z.CriticalTripPoint) { [math]::Round(($z.CriticalTripPoint - 2732) / 10, 1) } else { $null }
}
}
}
} catch {}
# CPU temp via Open Hardware Monitor WMI (se instalado)
try {
$ohm = Get-CimInstance -Namespace 'root/OpenHardwareMonitor' -ClassName Sensor -ErrorAction SilentlyContinue | Where-Object { $_.SensorType -eq 'Temperature' }
foreach ($s in $ohm) {
$temps += [PSCustomObject]@{
Source = 'OpenHardwareMonitor'
Name = $s.Name
TemperatureCelsius = $s.Value
Parent = $s.Parent
}
}
} catch {}
@($temps)
"#).unwrap_or_else(|| json!([]));
// Adaptadores de rede (físicos e virtuais)
let network_adapters = ps(r#"
@(Get-CimInstance Win32_NetworkAdapter | Where-Object { $_.PhysicalAdapter -eq $true -or $_.NetConnectionStatus -ne $null } | Select-Object Name,Description,MACAddress,Speed,NetConnectionStatus,AdapterType,Manufacturer,NetConnectionID,PNPDeviceID | ForEach-Object {
$statusMap = @{
0 = 'Disconnected'
1 = 'Connecting'
2 = 'Connected'
3 = 'Disconnecting'
4 = 'Hardware not present'
5 = 'Hardware disabled'
6 = 'Hardware malfunction'
7 = 'Media disconnected'
8 = 'Authenticating'
9 = 'Authentication succeeded'
10 = 'Authentication failed'
11 = 'Invalid address'
12 = 'Credentials required'
}
$_ | Add-Member -NotePropertyName 'StatusText' -NotePropertyValue ($statusMap[[int]$_.NetConnectionStatus] ?? 'Unknown') -Force
$_
})
"#).unwrap_or_else(|| json!([]));
// Monitores conectados
let monitors = ps(r#"
@(Get-CimInstance WmiMonitorID -Namespace root/wmi -ErrorAction SilentlyContinue | ForEach-Object {
$decode = { param($arr) if ($arr) { -join ($arr | Where-Object { $_ -ne 0 } | ForEach-Object { [char]$_ }) } else { $null } }
[PSCustomObject]@{
ManufacturerName = & $decode $_.ManufacturerName
ProductCodeID = & $decode $_.ProductCodeID
SerialNumberID = & $decode $_.SerialNumberID
UserFriendlyName = & $decode $_.UserFriendlyName
YearOfManufacture = $_.YearOfManufacture
WeekOfManufacture = $_.WeekOfManufacture
}
})
"#).unwrap_or_else(|| json!([]));
// Fonte de alimentação / chassis
let power_supply = ps(r#"
$chassis = Get-CimInstance Win32_SystemEnclosure | Select-Object ChassisTypes,Manufacturer,SerialNumber,SMBIOSAssetTag
$chassisTypeMap = @{
1 = 'Other'; 2 = 'Unknown'; 3 = 'Desktop'; 4 = 'Low Profile Desktop'
5 = 'Pizza Box'; 6 = 'Mini Tower'; 7 = 'Tower'; 8 = 'Portable'
9 = 'Laptop'; 10 = 'Notebook'; 11 = 'Hand Held'; 12 = 'Docking Station'
13 = 'All in One'; 14 = 'Sub Notebook'; 15 = 'Space-Saving'; 16 = 'Lunch Box'
17 = 'Main Server Chassis'; 18 = 'Expansion Chassis'; 19 = 'SubChassis'
20 = 'Bus Expansion Chassis'; 21 = 'Peripheral Chassis'; 22 = 'RAID Chassis'
23 = 'Rack Mount Chassis'; 24 = 'Sealed-case PC'; 25 = 'Multi-system chassis'
30 = 'Tablet'; 31 = 'Convertible'; 32 = 'Detachable'
}
$types = @()
if ($chassis.ChassisTypes) {
foreach ($t in $chassis.ChassisTypes) {
$types += $chassisTypeMap[[int]$t] ?? "Type$t"
}
}
[PSCustomObject]@{
ChassisTypes = $chassis.ChassisTypes
ChassisTypesText = $types
Manufacturer = $chassis.Manufacturer
SerialNumber = $chassis.SerialNumber
SMBIOSAssetTag = $chassis.SMBIOSAssetTag
}
"#).unwrap_or_else(|| json!({}));
// Último reinício e contagem de boots
let boot_info = ps(r#"
$os = Get-CimInstance Win32_OperatingSystem | Select-Object LastBootUpTime
$lastBoot = $os.LastBootUpTime
# Calcula uptime
$uptime = if ($lastBoot) { (New-TimeSpan -Start $lastBoot -End (Get-Date)).TotalSeconds } else { 0 }
# Conta eventos de boot (ID 6005) - últimos 30 dias para performance
$startDate = (Get-Date).AddDays(-30)
$bootEvents = @()
$bootCount = 0
try {
$events = Get-WinEvent -FilterHashtable @{
LogName = 'System'
ID = 6005
StartTime = $startDate
} -MaxEvents 50 -ErrorAction SilentlyContinue
$bootCount = @($events).Count
$bootEvents = @($events | Select-Object -First 10 | ForEach-Object {
@{
TimeCreated = $_.TimeCreated.ToString('o')
Computer = $_.MachineName
}
})
} catch {}
[PSCustomObject]@{
LastBootTime = if ($lastBoot) { $lastBoot.ToString('o') } else { $null }
UptimeSeconds = [math]::Round($uptime)
BootCountLast30Days = $bootCount
RecentBoots = $bootEvents
}
"#).unwrap_or_else(|| json!({ "LastBootTime": null, "UptimeSeconds": 0, "BootCountLast30Days": 0, "RecentBoots": [] }));
json!({
"windows": {
"software": software,
@ -1155,12 +955,6 @@ fn collect_windows_extended() -> serde_json::Value {
"windowsUpdate": windows_update,
"computerSystem": computer_system,
"azureAdStatus": device_join,
"battery": battery,
"thermal": thermal,
"networkAdapters": network_adapters,
"monitors": monitors,
"chassis": power_supply,
"bootInfo": boot_info,
}
})
}
@ -1255,7 +1049,7 @@ pub fn collect_profile() -> Result<MachineProfile, AgentError> {
let system = collect_system();
let os_name = System::name()
.or_else(System::long_os_version)
.or_else(|| System::long_os_version())
.unwrap_or_else(|| "desconhecido".to_string());
let os_version = System::os_version();
let architecture = std::env::consts::ARCH.to_string();
@ -1315,7 +1109,7 @@ async fn post_heartbeat(
.into_owned();
let os = MachineOs {
name: System::name()
.or_else(System::long_os_version)
.or_else(|| System::long_os_version())
.unwrap_or_else(|| "desconhecido".to_string()),
version: System::os_version(),
architecture: Some(std::env::consts::ARCH.to_string()),
@ -1335,232 +1129,6 @@ async fn post_heartbeat(
Ok(())
}
#[derive(Debug, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
struct UsbPolicyResponse {
pending: bool,
policy: Option<String>,
#[allow(dead_code)]
applied_at: Option<i64>,
}
#[derive(Debug, serde::Serialize)]
#[serde(rename_all = "camelCase")]
struct UsbPolicyStatusReport {
machine_token: String,
status: String,
error: Option<String>,
current_policy: Option<String>,
}
async fn check_and_apply_usb_policy(base_url: &str, token: &str) {
crate::log_info!("Verificando politica USB pendente...");
let url = format!("{}/api/machines/usb-policy?machineToken={}", base_url, token);
let response = match HTTP_CLIENT.get(&url).send().await {
Ok(resp) => {
crate::log_info!("Resposta da verificacao de politica USB: status={}", resp.status());
resp
}
Err(e) => {
crate::log_error!("Falha ao verificar politica USB: {e}");
return;
}
};
let policy_response: UsbPolicyResponse = match response.json().await {
Ok(data) => data,
Err(e) => {
crate::log_error!("Falha ao parsear resposta de politica USB: {e}");
return;
}
};
if !policy_response.pending {
crate::log_info!("Nenhuma politica USB pendente");
return;
}
let policy_str = match policy_response.policy {
Some(p) => p,
None => {
crate::log_warn!("Politica USB pendente mas sem valor de policy");
return;
}
};
crate::log_info!("Politica USB pendente encontrada: {}", policy_str);
#[cfg(target_os = "windows")]
{
use crate::usb_control::{get_current_policy, UsbPolicy};
use crate::service_client;
let policy = match UsbPolicy::from_str(&policy_str) {
Some(p) => p,
None => {
crate::log_error!("Politica USB invalida: {}", policy_str);
report_usb_policy_status(base_url, token, "FAILED", Some(format!("Politica invalida: {}", policy_str)), None).await;
return;
}
};
// Verifica se a politica ja esta aplicada localmente
match get_current_policy() {
Ok(current) if current == policy => {
crate::log_info!("Politica USB ja esta aplicada localmente: {}", policy_str);
let reported = report_usb_policy_status(base_url, token, "APPLIED", None, Some(policy_str.clone())).await;
if !reported {
crate::log_error!("Falha ao reportar politica ja aplicada");
}
return;
}
Ok(current) => {
crate::log_info!("Politica atual: {:?}, esperada: {:?}", current, policy);
}
Err(e) => {
crate::log_warn!("Nao foi possivel ler politica atual: {e}");
}
}
crate::log_info!("Aplicando politica USB: {}", policy_str);
// Reporta APPLYING para progress bar real no frontend
let _ = report_usb_policy_status(base_url, token, "APPLYING", None, None).await;
// Tenta primeiro via RavenService (privilegiado)
crate::log_info!("Tentando aplicar politica via RavenService...");
match service_client::apply_usb_policy(&policy_str) {
Ok(result) => {
if result.success {
crate::log_info!("Politica USB aplicada com sucesso via RavenService: {:?}", result);
let reported = report_usb_policy_status(base_url, token, "APPLIED", None, Some(policy_str.clone())).await;
if !reported {
crate::log_error!("CRITICO: Politica aplicada mas falha ao reportar ao servidor!");
let base_url = base_url.to_string();
let token = token.to_string();
tokio::spawn(async move {
tokio::time::sleep(Duration::from_secs(60)).await;
crate::log_info!("Retry agendado: reportando politica USB...");
let _ = report_usb_policy_status(&base_url, &token, "APPLIED", None, Some(policy_str)).await;
});
}
return;
} else {
let err_msg = result.error.unwrap_or_else(|| "Erro desconhecido".to_string());
crate::log_error!("RavenService retornou erro: {}", err_msg);
report_usb_policy_status(base_url, token, "FAILED", Some(err_msg), None).await;
}
}
Err(service_client::ServiceClientError::ServiceUnavailable(msg)) => {
crate::log_warn!("RavenService nao disponivel: {}", msg);
// Tenta fallback direto (vai falhar se nao tiver privilegio)
crate::log_info!("Tentando aplicar politica diretamente...");
match crate::usb_control::apply_usb_policy(policy) {
Ok(result) => {
crate::log_info!("Politica USB aplicada com sucesso (direto): {:?}", result);
let reported = report_usb_policy_status(base_url, token, "APPLIED", None, Some(policy_str.clone())).await;
if !reported {
crate::log_error!("CRITICO: Politica aplicada mas falha ao reportar ao servidor!");
let base_url = base_url.to_string();
let token = token.to_string();
tokio::spawn(async move {
tokio::time::sleep(Duration::from_secs(60)).await;
crate::log_info!("Retry agendado: reportando politica USB...");
let _ = report_usb_policy_status(&base_url, &token, "APPLIED", None, Some(policy_str)).await;
});
}
}
Err(e) => {
let err_msg = format!("RavenService indisponivel e aplicacao direta falhou: {}. Instale ou inicie o RavenService.", e);
crate::log_error!("{}", err_msg);
report_usb_policy_status(base_url, token, "FAILED", Some(err_msg), None).await;
}
}
}
Err(e) => {
crate::log_error!("Falha ao comunicar com RavenService: {e}");
report_usb_policy_status(base_url, token, "FAILED", Some(e.to_string()), None).await;
}
}
}
#[cfg(not(target_os = "windows"))]
{
crate::log_warn!("Controle de USB nao suportado neste sistema operacional");
report_usb_policy_status(base_url, token, "FAILED", Some("Sistema operacional nao suportado".to_string()), None).await;
}
}
async fn report_usb_policy_status(
base_url: &str,
token: &str,
status: &str,
error: Option<String>,
current_policy: Option<String>,
) -> bool {
let url = format!("{}/api/machines/usb-policy", base_url);
let report = UsbPolicyStatusReport {
machine_token: token.to_string(),
status: status.to_string(),
error,
current_policy,
};
crate::log_info!("Reportando status de politica USB: status={}", status);
// Retry simples: 1 tentativa imediata + 1 retry após 2s
let delays = [2];
let mut last_error = None;
for (attempt, delay_secs) in delays.iter().enumerate() {
match HTTP_CLIENT.post(&url).json(&report).send().await {
Ok(response) => {
let status_code = response.status();
if status_code.is_success() {
crate::log_info!(
"Report de politica USB enviado com sucesso na tentativa {}",
attempt + 1
);
return true;
} else {
let body = response.text().await.unwrap_or_default();
last_error = Some(format!("HTTP {} - {}", status_code, body));
crate::log_warn!(
"Report de politica USB falhou (tentativa {}): HTTP {}",
attempt + 1,
status_code
);
}
}
Err(e) => {
last_error = Some(e.to_string());
crate::log_warn!(
"Report de politica USB falhou (tentativa {}): {}",
attempt + 1,
e
);
}
}
if attempt < delays.len() - 1 {
crate::log_info!("Retentando report de politica USB em {}s...", delay_secs);
tokio::time::sleep(Duration::from_secs(*delay_secs)).await;
}
}
if let Some(err) = last_error {
crate::log_error!(
"Falha ao reportar status de politica USB apos {} tentativas: {err}",
delays.len()
);
}
false
}
struct HeartbeatHandle {
token: String,
base_url: String,
@ -1575,9 +1143,9 @@ impl HeartbeatHandle {
}
}
#[derive(Default, Clone)]
#[derive(Default)]
pub struct AgentRuntime {
inner: Arc<Mutex<Option<HeartbeatHandle>>>,
inner: Mutex<Option<HeartbeatHandle>>,
}
fn sanitize_base_url(input: &str) -> Result<String, AgentError> {
@ -1591,7 +1159,7 @@ fn sanitize_base_url(input: &str) -> Result<String, AgentError> {
impl AgentRuntime {
pub fn new() -> Self {
Self {
inner: Arc::new(Mutex::new(None)),
inner: Mutex::new(None),
}
}
@ -1624,46 +1192,29 @@ impl AgentRuntime {
let status_clone = status.clone();
let join_handle = async_runtime::spawn(async move {
crate::log_info!("Loop de agente iniciado");
if let Err(error) =
post_heartbeat(&base_clone, &token_clone, status_clone.clone()).await
{
crate::log_error!("Falha inicial ao enviar heartbeat: {error}");
} else {
crate::log_info!("Heartbeat inicial enviado com sucesso");
eprintln!("[agent] Falha inicial ao enviar heartbeat: {error}");
}
// Verifica politica USB apos heartbeat inicial
check_and_apply_usb_policy(&base_clone, &token_clone).await;
let mut heartbeat_ticker = tokio::time::interval(Duration::from_secs(interval));
heartbeat_ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Delay);
let mut usb_ticker = tokio::time::interval(Duration::from_secs(15));
usb_ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Delay);
let mut ticker = tokio::time::interval(Duration::from_secs(interval));
ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Delay);
loop {
// Wait interval
tokio::select! {
_ = stop_signal_clone.notified() => {
crate::log_info!("Loop de agente encerrado por sinal de parada");
break;
}
_ = heartbeat_ticker.tick() => {}
_ = usb_ticker.tick() => {
check_and_apply_usb_policy(&base_clone, &token_clone).await;
continue;
}
_ = ticker.tick() => {}
}
if let Err(error) =
post_heartbeat(&base_clone, &token_clone, status_clone.clone()).await
{
crate::log_error!("Falha ao enviar heartbeat: {error}");
eprintln!("[agent] Falha ao enviar heartbeat: {error}");
}
// Verifica politica USB apos cada heartbeat
check_and_apply_usb_policy(&base_clone, &token_clone).await;
}
});

File diff suppressed because it is too large Load diff

View file

@ -1,92 +1,10 @@
mod agent;
mod chat;
#[cfg(target_os = "windows")]
mod rustdesk;
#[cfg(target_os = "windows")]
mod service_client;
mod usb_control;
use agent::{collect_inventory_plain, collect_profile, AgentRuntime, MachineProfile};
use chat::{ChatRuntime, ChatSession, ChatMessagesResponse, SendMessageResponse};
use chrono::Local;
use usb_control::{UsbPolicy, UsbPolicyResult};
use tauri::{Emitter, Listener, Manager, WindowEvent};
use tauri::Emitter;
use tauri_plugin_store::Builder as StorePluginBuilder;
use std::fs::OpenOptions;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::OnceLock;
#[cfg(target_os = "windows")]
use tauri::menu::{MenuBuilder, MenuItemBuilder};
#[cfg(target_os = "windows")]
use tauri::tray::TrayIconBuilder;
#[cfg(target_os = "windows")]
use winreg::enums::*;
#[cfg(target_os = "windows")]
use winreg::RegKey;
const DEFAULT_CONVEX_URL: &str = "https://convex.esdrasrenan.com.br";
// ============================================================================
// Sistema de Logging para Agente
// ============================================================================
static AGENT_LOG_FILE: OnceLock<std::sync::Mutex<std::fs::File>> = OnceLock::new();
pub fn init_agent_logging() -> Result<(), String> {
let dir = logs_directory()
.ok_or("LOCALAPPDATA indisponivel para logging")?;
std::fs::create_dir_all(&dir)
.map_err(|e| format!("Falha ao criar diretorio de logs: {e}"))?;
let path = dir.join("raven-agent.log");
let file = OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.map_err(|e| format!("Falha ao abrir raven-agent.log: {e}"))?;
let _ = AGENT_LOG_FILE.set(std::sync::Mutex::new(file));
Ok(())
}
pub fn log_agent(level: &str, message: &str) {
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S%.3f");
let line = format!("[{timestamp}] [{level}] {message}\n");
// Escreve para stderr (util em dev/debug)
eprint!("{line}");
// Escreve para arquivo
if let Some(mutex) = AGENT_LOG_FILE.get() {
if let Ok(mut file) = mutex.lock() {
let _ = file.write_all(line.as_bytes());
let _ = file.flush();
}
}
}
#[macro_export]
macro_rules! log_info {
($($arg:tt)*) => {
$crate::log_agent("INFO", format!($($arg)*).as_str())
};
}
#[macro_export]
macro_rules! log_error {
($($arg:tt)*) => {
$crate::log_agent("ERROR", format!($($arg)*).as_str())
};
}
#[macro_export]
macro_rules! log_warn {
($($arg:tt)*) => {
$crate::log_agent("WARN", format!($($arg)*).as_str())
};
}
#[derive(Debug, serde::Serialize)]
#[serde(rename_all = "camelCase")]
@ -133,38 +51,6 @@ fn open_devtools(window: tauri::WebviewWindow) -> Result<(), String> {
Ok(())
}
#[tauri::command]
fn log_app_event(message: String) -> Result<(), String> {
append_app_log(&message)
}
fn append_app_log(message: &str) -> Result<(), String> {
let Some(dir) = logs_directory() else {
return Err("LOCALAPPDATA indisponivel para gravar logs".to_string());
};
std::fs::create_dir_all(&dir)
.map_err(|error| format!("Falha ao criar pasta de logs: {error}"))?;
let path = dir.join("app.log");
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.map_err(|error| format!("Falha ao abrir app.log: {error}"))?;
let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S");
writeln!(file, "[{timestamp}] {message}")
.map_err(|error| format!("Falha ao escrever log: {error}"))?;
Ok(())
}
fn logs_directory() -> Option<PathBuf> {
let base = std::env::var("LOCALAPPDATA").ok()?;
Some(Path::new(&base).join("br.com.esdrasrenan.sistemadechamados").join("logs"))
}
#[tauri::command]
async fn ensure_rustdesk_and_emit(
app: tauri::AppHandle,
@ -191,32 +77,6 @@ fn run_rustdesk_ensure(
password: Option<String>,
machine_id: Option<String>,
) -> Result<RustdeskProvisioningResult, String> {
// Tenta usar o servico primeiro (sem UAC)
if service_client::is_service_available() {
log_info!("Usando Raven Service para provisionar RustDesk");
match service_client::provision_rustdesk(
config_string.as_deref(),
password.as_deref(),
machine_id.as_deref(),
) {
Ok(result) => {
return Ok(RustdeskProvisioningResult {
id: result.id,
password: result.password,
installed_version: result.installed_version,
updated: result.updated,
last_provisioned_at: result.last_provisioned_at,
});
}
Err(e) => {
log_warn!("Falha ao usar servico para RustDesk: {e}");
// Continua para fallback
}
}
}
// Fallback: chamada direta (pode pedir UAC)
log_info!("Usando chamada direta para provisionar RustDesk (pode pedir UAC)");
rustdesk::ensure_rustdesk(
config_string.as_deref(),
password.as_deref(),
@ -234,604 +94,22 @@ fn run_rustdesk_ensure(
Err("Provisionamento automático do RustDesk está disponível apenas no Windows.".to_string())
}
#[tauri::command]
fn apply_usb_policy(policy: String) -> Result<UsbPolicyResult, String> {
// Valida a politica primeiro
let _policy_enum = UsbPolicy::from_str(&policy)
.ok_or_else(|| format!("Politica USB invalida: {}. Use ALLOW, BLOCK_ALL ou READONLY.", policy))?;
// Tenta usar o servico primeiro (sem UAC)
#[cfg(target_os = "windows")]
if service_client::is_service_available() {
log_info!("Usando Raven Service para aplicar politica USB: {}", policy);
match service_client::apply_usb_policy(&policy) {
Ok(result) => {
return Ok(UsbPolicyResult {
success: result.success,
policy: result.policy,
error: result.error,
applied_at: result.applied_at,
});
}
Err(e) => {
log_warn!("Falha ao usar servico para USB policy: {e}");
// Continua para fallback
}
}
}
// Fallback: chamada direta (pode pedir UAC)
log_info!("Usando chamada direta para aplicar politica USB (pode pedir UAC)");
usb_control::apply_usb_policy(_policy_enum).map_err(|e| e.to_string())
}
#[tauri::command]
fn get_usb_policy() -> Result<String, String> {
// Tenta usar o servico primeiro
#[cfg(target_os = "windows")]
if service_client::is_service_available() {
match service_client::get_usb_policy() {
Ok(policy) => return Ok(policy),
Err(e) => {
log_warn!("Falha ao obter USB policy via servico: {e}");
// Continua para fallback
}
}
}
// Fallback: leitura direta (nao precisa elevacao para ler)
usb_control::get_current_policy()
.map(|p| p.as_str().to_string())
.map_err(|e| e.to_string())
}
#[tauri::command]
fn refresh_usb_policy() -> Result<(), String> {
usb_control::refresh_group_policy().map_err(|e| e.to_string())
}
// ============================================================================
// COMANDOS DE CHAT
// ============================================================================
#[tauri::command]
fn start_chat_polling(
state: tauri::State<ChatRuntime>,
app: tauri::AppHandle,
base_url: String,
convex_url: Option<String>,
token: String,
) -> Result<(), String> {
let url = convex_url.unwrap_or_else(|| DEFAULT_CONVEX_URL.to_string());
state.start_polling(base_url, url, token, app)
}
#[tauri::command]
fn stop_chat_polling(state: tauri::State<ChatRuntime>) -> Result<(), String> {
state.stop();
Ok(())
}
#[tauri::command]
fn is_chat_using_realtime(state: tauri::State<ChatRuntime>) -> bool {
state.is_using_sse()
}
#[tauri::command]
fn get_chat_sessions(state: tauri::State<ChatRuntime>) -> Vec<ChatSession> {
state.get_sessions()
}
#[tauri::command]
async fn fetch_chat_sessions(base_url: String, token: String) -> Result<Vec<ChatSession>, String> {
chat::fetch_sessions(&base_url, &token).await
}
#[tauri::command]
async fn fetch_chat_messages(
base_url: String,
token: String,
ticket_id: String,
since: Option<i64>,
) -> Result<ChatMessagesResponse, String> {
chat::fetch_messages(&base_url, &token, &ticket_id, since).await
}
#[tauri::command]
async fn send_chat_message(
base_url: String,
token: String,
ticket_id: String,
body: String,
attachments: Option<Vec<chat::AttachmentPayload>>,
) -> Result<SendMessageResponse, String> {
chat::send_message(&base_url, &token, &ticket_id, &body, attachments).await
}
#[tauri::command]
async fn mark_chat_messages_read(
base_url: String,
token: String,
ticket_id: String,
message_ids: Vec<String>,
) -> Result<(), String> {
if message_ids.is_empty() {
return Ok(());
}
chat::mark_messages_read(&base_url, &token, &ticket_id, &message_ids).await
}
#[tauri::command]
async fn upload_chat_file(
base_url: String,
token: String,
file_path: String,
) -> Result<chat::AttachmentPayload, String> {
use std::path::Path;
// Ler o arquivo
let path = Path::new(&file_path);
let file_name = path
.file_name()
.and_then(|n| n.to_str())
.ok_or("Nome de arquivo inválido")?
.to_string();
let file_data = std::fs::read(&file_path)
.map_err(|e| format!("Falha ao ler arquivo: {e}"))?;
let file_size = file_data.len() as u64;
// Validar arquivo
chat::is_allowed_file(&file_name, file_size)?;
// Obter tipo MIME
let mime_type = chat::get_mime_type(&file_name);
// Gerar URL de upload
let upload_url = chat::generate_upload_url(
&base_url,
&token,
&file_name,
&mime_type,
file_size,
)
.await?;
// Fazer upload
let storage_id = chat::upload_file(&upload_url, file_data, &mime_type).await?;
Ok(chat::AttachmentPayload {
storage_id,
name: file_name,
size: Some(file_size),
mime_type: Some(mime_type),
})
}
#[tauri::command]
async fn open_chat_window(app: tauri::AppHandle, ticket_id: String, ticket_ref: u64) -> Result<(), String> {
log_info!("[CMD] open_chat_window called: ticket_id={}, ticket_ref={}", ticket_id, ticket_ref);
let app_handle = app.clone();
let ticket_id_for_task = ticket_id.clone();
let result = tauri::async_runtime::spawn_blocking(move || {
chat::open_chat_window(&app_handle, &ticket_id_for_task, ticket_ref)
})
.await
.map_err(|err| format!("Falha ao abrir chat (join): {err}"))?;
log_info!("[CMD] open_chat_window result: {:?}", result);
result
}
#[tauri::command]
fn close_chat_window(app: tauri::AppHandle, ticket_id: String) -> Result<(), String> {
chat::close_chat_window(&app, &ticket_id)
}
#[tauri::command]
fn minimize_chat_window(app: tauri::AppHandle, ticket_id: String) -> Result<(), String> {
chat::minimize_chat_window(&app, &ticket_id)
}
#[tauri::command]
fn set_chat_minimized(app: tauri::AppHandle, ticket_id: String, minimized: bool) -> Result<(), String> {
chat::set_chat_minimized(&app, &ticket_id, minimized)
}
#[tauri::command]
async fn open_hub_window(app: tauri::AppHandle) -> Result<(), String> {
let app_handle = app.clone();
tauri::async_runtime::spawn_blocking(move || {
chat::open_hub_window(&app_handle)
})
.await
.map_err(|err| format!("Falha ao abrir hub (join): {err}"))?
}
#[tauri::command]
fn close_hub_window(app: tauri::AppHandle) -> Result<(), String> {
chat::close_hub_window(&app)
}
#[tauri::command]
fn set_hub_minimized(app: tauri::AppHandle, minimized: bool) -> Result<(), String> {
chat::set_hub_minimized(&app, minimized)
}
// ============================================================================
// Handler de Deep Link (raven://)
// ============================================================================
/// Processa URLs do protocolo raven://
/// Formatos suportados:
/// - raven://ticket/{token} - Abre visualizacao do chamado
/// - raven://chat/{ticketId}?token={token} - Abre chat do chamado
/// - raven://rate/{token} - Abre avaliacao do chamado
fn handle_deep_link(app: &tauri::AppHandle, url: &str) {
log_info!("Processando deep link: {url}");
// Remove o prefixo raven://
let path = url.trim_start_matches("raven://");
// Parse do path
let parts: Vec<&str> = path.split('/').collect();
if parts.is_empty() {
log_warn!("Deep link invalido: path vazio");
return;
}
match parts[0] {
"ticket" => {
if parts.len() > 1 {
let token = parts[1].split('?').next().unwrap_or(parts[1]);
log_info!("Abrindo ticket com token: {token}");
// Mostra a janela principal
if let Some(window) = app.get_webview_window("main") {
let _ = window.show();
let _ = window.set_focus();
// Emite evento para o frontend navegar para o ticket
let _ = app.emit("raven://deep-link/ticket", serde_json::json!({
"token": token
}));
}
}
}
"chat" => {
if parts.len() > 1 {
let ticket_id = parts[1].split('?').next().unwrap_or(parts[1]);
log_info!("Abrindo chat do ticket: {ticket_id}");
// Abre janela de chat (ticket_ref 0 quando vem de deeplink)
if let Err(e) = chat::open_chat_window(app, ticket_id, 0) {
log_error!("Falha ao abrir chat: {e}");
}
}
}
"rate" => {
if parts.len() > 1 {
let token = parts[1].split('?').next().unwrap_or(parts[1]);
log_info!("Abrindo avaliacao com token: {token}");
// Mostra a janela principal
if let Some(window) = app.get_webview_window("main") {
let _ = window.show();
let _ = window.set_focus();
// Emite evento para o frontend navegar para avaliacao
let _ = app.emit("raven://deep-link/rate", serde_json::json!({
"token": token
}));
}
}
}
_ => {
log_warn!("Deep link desconhecido: {path}");
}
}
}
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
.manage(AgentRuntime::new())
.manage(ChatRuntime::new())
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_opener::init())
.plugin(StorePluginBuilder::default().build())
.plugin(tauri_plugin_updater::Builder::new().build())
.plugin(tauri_plugin_process::init())
.plugin(tauri_plugin_notification::init())
.plugin(tauri_plugin_deep_link::init())
.plugin(tauri_plugin_single_instance::init(|app, _argv, _cwd| {
// Quando uma segunda instância tenta iniciar, foca a janela existente
if let Some(window) = app.get_webview_window("main") {
let _ = window.show();
let _ = window.unminimize();
let _ = window.set_focus();
}
}))
.on_window_event(|window, event| {
if let WindowEvent::CloseRequested { api, .. } = event {
api.prevent_close();
let _ = window.hide();
}
})
.setup(|app| {
// Inicializa sistema de logging primeiro
if let Err(e) = init_agent_logging() {
eprintln!("[raven] Falha ao inicializar logging: {e}");
}
log_info!("Raven iniciando...");
// Configura handler de deep link (raven://)
#[cfg(desktop)]
{
let handle = app.handle().clone();
app.listen("deep-link://new-url", move |event| {
let urls = event.payload();
log_info!("Deep link recebido: {urls}");
handle_deep_link(&handle, urls);
});
}
#[cfg(target_os = "windows")]
{
let start_in_background = std::env::args().any(|arg| arg == "--background");
setup_raven_autostart();
setup_tray(app.handle())?;
if start_in_background {
if let Some(win) = app.get_webview_window("main") {
let _ = win.hide();
}
}
// Tenta iniciar o agente e chat em background se houver credenciais salvas
let app_handle = app.handle().clone();
let agent_runtime = app.state::<AgentRuntime>().inner().clone();
let chat_runtime = app.state::<ChatRuntime>().inner().clone();
tauri::async_runtime::spawn(async move {
// Aguarda um pouco para o app estabilizar
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
if let Err(e) = try_start_background_agent(&app_handle, agent_runtime, chat_runtime).await {
log_warn!("Agente nao iniciado em background: {e}");
}
});
}
Ok(())
})
.invoke_handler(tauri::generate_handler![
collect_machine_profile,
collect_machine_inventory,
start_machine_agent,
stop_machine_agent,
open_devtools,
log_app_event,
ensure_rustdesk_and_emit,
apply_usb_policy,
get_usb_policy,
refresh_usb_policy,
// Chat commands
start_chat_polling,
stop_chat_polling,
is_chat_using_realtime,
get_chat_sessions,
fetch_chat_sessions,
fetch_chat_messages,
send_chat_message,
mark_chat_messages_read,
upload_chat_file,
open_chat_window,
close_chat_window,
minimize_chat_window,
set_chat_minimized,
// Hub commands
open_hub_window,
close_hub_window,
set_hub_minimized
ensure_rustdesk_and_emit
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
#[cfg(target_os = "windows")]
fn setup_raven_autostart() {
let exe_path = match std::env::current_exe() {
Ok(p) => p,
Err(e) => {
log_error!("Falha ao obter caminho do executavel: {e}");
return;
}
};
let path_str = exe_path.display().to_string();
// Adiciona flag --background para indicar inicio automatico
let value = format!("\"{}\" --background", path_str);
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
let key = match hkcu.create_subkey(r"Software\Microsoft\Windows\CurrentVersion\Run") {
Ok((key, _)) => key,
Err(e) => {
log_error!("Falha ao criar/abrir chave de registro Run: {e}");
return;
}
};
if let Err(e) = key.set_value("Raven", &value) {
log_error!("Falha ao definir valor de auto-start no registro: {e}");
return;
}
log_info!("Auto-start configurado: {value}");
// Valida que foi salvo corretamente
match key.get_value::<String, _>("Raven") {
Ok(saved) => {
if saved == value {
log_info!("Auto-start validado: entrada existe no registro");
} else {
log_warn!("Auto-start: valor difere. Esperado: {value}, Salvo: {saved}");
}
}
Err(e) => {
log_warn!("Auto-start: nao foi possivel validar entrada: {e}");
}
}
}
#[cfg(target_os = "windows")]
fn setup_tray(app: &tauri::AppHandle) -> tauri::Result<()> {
let show_item = MenuItemBuilder::with_id("show", "Mostrar").build(app)?;
let chat_item = MenuItemBuilder::with_id("chat", "Abrir Chat").build(app)?;
let quit_item = MenuItemBuilder::with_id("quit", "Sair").build(app)?;
let menu = MenuBuilder::new(app)
.items(&[&show_item, &chat_item, &quit_item])
.build()?;
let mut builder = TrayIconBuilder::new()
.menu(&menu)
.on_menu_event(|tray, event| {
match event.id().as_ref() {
"show" => {
if let Some(win) = tray.app_handle().get_webview_window("main") {
let _ = win.show();
let _ = win.set_focus();
}
// Reabrir chat se houver sessao ativa
if let Some(chat_runtime) = tray.app_handle().try_state::<ChatRuntime>() {
let sessions = chat_runtime.get_sessions();
if let Some(session) = sessions.first() {
let _ = chat::open_chat_window(tray.app_handle(), &session.ticket_id, session.ticket_ref);
}
}
}
"chat" => {
// Abrir janela de chat se houver sessao ativa
if let Some(chat_runtime) = tray.app_handle().try_state::<ChatRuntime>() {
let sessions = chat_runtime.get_sessions();
if sessions.len() > 1 {
// Multiplas sessoes - abrir hub
if let Err(e) = chat::open_hub_window(tray.app_handle()) {
log_error!("Falha ao abrir hub de chat: {e}");
}
} else if let Some(session) = sessions.first() {
// Uma sessao - abrir diretamente
if let Err(e) = chat::open_chat_window(tray.app_handle(), &session.ticket_id, session.ticket_ref) {
log_error!("Falha ao abrir janela de chat: {e}");
}
}
}
}
"quit" => {
tray.app_handle().exit(0);
}
_ => {}
}
})
.on_tray_icon_event(|tray, event| {
if let tauri::tray::TrayIconEvent::DoubleClick { .. } = event {
if let Some(win) = tray.app_handle().get_webview_window("main") {
let _ = win.show();
let _ = win.set_focus();
}
// Reabrir chat se houver sessao ativa
if let Some(chat_runtime) = tray.app_handle().try_state::<ChatRuntime>() {
let sessions = chat_runtime.get_sessions();
if let Some(session) = sessions.first() {
let _ = chat::open_chat_window(tray.app_handle(), &session.ticket_id, session.ticket_ref);
}
}
}
});
if let Some(icon) = app.default_window_icon() {
builder = builder.icon(icon.clone());
}
builder = builder.tooltip("Raven");
builder.build(app)?;
Ok(())
}
#[cfg(target_os = "windows")]
async fn try_start_background_agent(
app: &tauri::AppHandle,
agent_runtime: AgentRuntime,
chat_runtime: ChatRuntime,
) -> Result<(), String> {
log_info!("Verificando credenciais salvas para iniciar agente...");
let app_data = app
.path()
.app_local_data_dir()
.map_err(|e| format!("Falha ao obter diretorio de dados: {e}"))?;
let store_path = app_data.join("machine-agent.json");
if !store_path.exists() {
return Err("Nenhuma configuracao encontrada".to_string());
}
// Ler arquivo JSON diretamente
let content = std::fs::read_to_string(&store_path)
.map_err(|e| format!("Falha ao ler machine-agent.json: {e}"))?;
let data: serde_json::Value = serde_json::from_str(&content)
.map_err(|e| format!("Falha ao parsear machine-agent.json: {e}"))?;
let token = data
.get("token")
.and_then(|v| v.as_str())
.filter(|t| !t.is_empty())
.ok_or("Token nao encontrado ou vazio")?;
let config = data.get("config");
let api_base_url = config
.and_then(|c| c.get("apiBaseUrl"))
.and_then(|v| v.as_str())
.unwrap_or("https://tickets.esdrasrenan.com.br");
let convex_url = config
.and_then(|c| c.get("convexUrl"))
.and_then(|v| v.as_str())
.unwrap_or(DEFAULT_CONVEX_URL);
let interval = config
.and_then(|c| c.get("heartbeatIntervalSec"))
.and_then(|v| v.as_u64())
.unwrap_or(300);
log_info!(
"Iniciando agente em background: url={}, interval={}s",
api_base_url,
interval
);
agent_runtime
.start_heartbeat(
api_base_url.to_string(),
token.to_string(),
Some("online".to_string()),
Some(interval),
)
.map_err(|e| format!("Falha ao iniciar heartbeat: {e}"))?;
// Iniciar sistema de chat (WebSocket + fallback HTTP polling)
if let Err(e) =
chat_runtime.start_polling(api_base_url.to_string(), convex_url.to_string(), token.to_string(), app.clone())
{
log_warn!("Falha ao iniciar chat em background: {e}");
} else {
log_info!("Chat iniciado com sucesso (Convex WebSocket)");
}
log_info!("Agente iniciado com sucesso em background");
Ok(())
}

View file

@ -1,3 +1,5 @@
#![cfg(target_os = "windows")]
use crate::RustdeskProvisioningResult;
use chrono::{Local, Utc};
use once_cell::sync::Lazy;
@ -28,9 +30,7 @@ const LOCAL_SERVICE_CONFIG: &str = r"C:\\Windows\\ServiceProfiles\\LocalService\
const LOCAL_SYSTEM_CONFIG: &str = r"C:\\Windows\\System32\\config\\systemprofile\\AppData\\Roaming\\RustDesk\\config";
const APP_IDENTIFIER: &str = "br.com.esdrasrenan.sistemadechamados";
const MACHINE_STORE_FILENAME: &str = "machine-agent.json";
#[allow(dead_code)]
const ACL_FLAG_FILENAME: &str = "rustdesk_acl_unlocked.flag";
#[allow(dead_code)]
const RUSTDESK_ACL_STORE_KEY: &str = "rustdeskAclUnlockedAt";
const SECURITY_VERIFICATION_VALUE: &str = "use-permanent-password";
const SECURITY_APPROVE_MODE_VALUE: &str = "password";
@ -77,27 +77,6 @@ struct ReleaseResponse {
assets: Vec<ReleaseAsset>,
}
/// Auxiliar para definir ID customizado baseado no machine_id
fn define_custom_id_from_machine(exe_path: &Path, machine_id: Option<&str>) -> Option<String> {
if let Some(value) = machine_id.and_then(|raw| {
let trimmed = raw.trim();
if trimmed.is_empty() { None } else { Some(trimmed) }
}) {
match set_custom_id(exe_path, value) {
Ok(custom) => {
log_event(format!("ID determinístico definido: {custom}"));
Some(custom)
}
Err(error) => {
log_event(format!("Falha ao definir ID determinístico: {error}"));
None
}
}
} else {
None
}
}
pub fn ensure_rustdesk(
config_string: Option<&str>,
password_override: Option<&str>,
@ -107,18 +86,11 @@ pub fn ensure_rustdesk(
log_event("Iniciando preparo do RustDesk");
if let Err(error) = ensure_service_profiles_writable_preflight() {
log_event(format!(
log_event(&format!(
"Aviso: não foi possível preparar ACL dos perfis do serviço ({error}). Continuando mesmo assim; o serviço pode não aplicar a senha."
));
}
// IMPORTANTE: Ler o ID existente ANTES de qualquer limpeza
// Isso preserva o ID quando o Raven é reinstalado mas o RustDesk permanece
let preserved_remote_id = read_remote_id_from_profiles();
if let Some(ref id) = preserved_remote_id {
log_event(format!("ID existente preservado antes da limpeza: {}", id));
}
let exe_path = detect_executable_path();
let (installed_version, freshly_installed) = ensure_installed(&exe_path)?;
log_event(if freshly_installed {
@ -129,22 +101,16 @@ pub fn ensure_rustdesk(
match stop_rustdesk_processes() {
Ok(_) => log_event("Instâncias existentes do RustDesk encerradas"),
Err(error) => log_event(format!(
Err(error) => log_event(&format!(
"Aviso: não foi possível parar completamente o RustDesk antes da reprovisionamento ({error})"
)),
}
// So limpa perfis se for instalacao fresca (RustDesk nao existia)
// Se ja existia, preservamos o ID para manter consistencia
if freshly_installed {
match purge_existing_rustdesk_profiles() {
Ok(_) => log_event("Configurações antigas do RustDesk limpas (instalação fresca)"),
Err(error) => log_event(format!(
"Aviso: não foi possível limpar completamente os perfis existentes do RustDesk ({error})"
)),
}
} else {
log_event("Mantendo perfis existentes do RustDesk (preservando ID)");
match purge_existing_rustdesk_profiles() {
Ok(_) => log_event("Configurações antigas do RustDesk limpas antes da reaplicação"),
Err(error) => log_event(&format!(
"Aviso: não foi possível limpar completamente os perfis existentes do RustDesk ({error})"
)),
}
if let Some(value) = config_string.and_then(|raw| {
@ -152,19 +118,19 @@ pub fn ensure_rustdesk(
if trimmed.is_empty() { None } else { Some(trimmed) }
}) {
if let Err(error) = run_with_args(&exe_path, &["--config", value]) {
log_event(format!("Falha ao aplicar configuração inline: {error}"));
log_event(&format!("Falha ao aplicar configuração inline: {error}"));
} else {
log_event("Configuração aplicada via --config");
}
} else {
let config_path = write_config_files()?;
log_event(format!(
log_event(&format!(
"Arquivo de configuração atualizado em {}",
config_path.display()
));
if let Err(error) = apply_config(&exe_path, &config_path) {
log_event(format!("Falha ao aplicar configuração via CLI: {error}"));
log_event(&format!("Falha ao aplicar configuração via CLI: {error}"));
} else {
log_event("Configuração aplicada via CLI");
}
@ -176,7 +142,7 @@ pub fn ensure_rustdesk(
.unwrap_or_else(|| DEFAULT_PASSWORD.to_string());
if let Err(error) = set_password(&exe_path, &password) {
log_event(format!("Falha ao definir senha padrão: {error}"));
log_event(&format!("Falha ao definir senha padrão: {error}"));
} else {
log_event("Senha padrão definida com sucesso");
log_event("Aplicando senha nos perfis do RustDesk");
@ -185,41 +151,44 @@ pub fn ensure_rustdesk(
log_event("Senha e flags de segurança gravadas em todos os perfis do RustDesk");
log_password_replication(&password);
}
Err(error) => log_event(format!("Falha ao persistir senha nos perfis: {error}")),
Err(error) => log_event(&format!("Falha ao persistir senha nos perfis: {error}")),
}
match propagate_password_profile() {
Ok(_) => log_event("Perfil base propagado para ProgramData e perfis de serviço"),
Err(error) => log_event(format!("Falha ao copiar perfil de senha: {error}")),
Err(error) => log_event(&format!("Falha ao copiar perfil de senha: {error}")),
}
match replicate_password_artifacts() {
Ok(_) => log_event("Artefatos de senha replicados para o serviço do RustDesk"),
Err(error) => log_event(format!("Falha ao replicar artefatos de senha: {error}")),
Err(error) => log_event(&format!("Falha ao replicar artefatos de senha: {error}")),
}
if let Err(error) = enforce_security_flags() {
log_event(format!("Falha ao reforçar configuração de senha permanente: {error}"));
log_event(&format!("Falha ao reforçar configuração de senha permanente: {error}"));
}
}
// Se ja existe um ID preservado E o RustDesk nao foi recem-instalado, usa o ID existente
// Isso garante que reinstalar o Raven nao muda o ID do RustDesk
let custom_id = if let Some(ref existing_id) = preserved_remote_id {
if !freshly_installed {
log_event(format!("Reutilizando ID existente do RustDesk: {}", existing_id));
Some(existing_id.clone())
} else {
// Instalacao fresca - define novo ID baseado no machine_id
define_custom_id_from_machine(&exe_path, machine_id)
let custom_id = if let Some(value) = machine_id.and_then(|raw| {
let trimmed = raw.trim();
if trimmed.is_empty() { None } else { Some(trimmed) }
}) {
match set_custom_id(&exe_path, value) {
Ok(custom) => {
log_event(&format!("ID determinístico definido: {custom}"));
Some(custom)
}
Err(error) => {
log_event(&format!("Falha ao definir ID determinístico: {error}"));
None
}
}
} else {
// Sem ID preservado - define novo ID baseado no machine_id
define_custom_id_from_machine(&exe_path, machine_id)
None
};
if let Err(error) = ensure_service_running(&exe_path) {
log_event(format!("Falha ao reiniciar serviço do RustDesk: {error}"));
log_event(&format!("Falha ao reiniciar serviço do RustDesk: {error}"));
} else {
log_event("Serviço RustDesk reiniciado/run ativo");
}
@ -227,10 +196,10 @@ pub fn ensure_rustdesk(
let reported_id = match query_id_with_retries(&exe_path, 5) {
Ok(value) => value,
Err(error) => {
log_event(format!("Falha ao obter ID após múltiplas tentativas: {error}"));
log_event(&format!("Falha ao obter ID após múltiplas tentativas: {error}"));
match read_remote_id_from_profiles().or_else(|| custom_id.clone()) {
Some(value) => {
log_event(format!("ID obtido via arquivos de perfil: {value}"));
log_event(&format!("ID obtido via arquivos de perfil: {value}"));
value
}
None => return Err(error),
@ -238,105 +207,26 @@ pub fn ensure_rustdesk(
}
};
let mut final_id = reported_id.clone();
if let Some(expected) = custom_id.as_ref() {
if expected != &reported_id {
log_event(format!(
"ID retornado difere do determinístico ({expected}) -> reaplicando ID determinístico"
log_event(&format!(
"ID retornado difere do determinístico ({expected}) -> aplicando {reported_id}"
));
let mut enforced = false;
match set_custom_id(&exe_path, expected) {
Ok(_) => match query_id_with_retries(&exe_path, 3) {
Ok(rechecked) => {
if &rechecked == expected {
log_event(format!("ID determinístico aplicado com sucesso: {rechecked}"));
final_id = rechecked;
enforced = true;
} else {
log_event(format!(
"ID ainda difere após reaplicação (esperado {expected}, reportado {rechecked}); usando ID reportado"
));
final_id = rechecked;
}
}
Err(error) => {
log_event(format!(
"Falha ao consultar ID após reaplicação: {error}; usando ID reportado ({reported_id})"
));
final_id = reported_id.clone();
}
},
Err(error) => {
log_event(format!(
"Falha ao reaplicar ID determinístico ({expected}): {error}; usando ID reportado ({reported_id})"
));
final_id = reported_id.clone();
}
}
if !enforced && final_id != *expected {
log_event("Aviso: não foi possível aplicar o ID determinístico; manteremos o ID real fornecido pelo serviço");
}
}
}
ensure_remote_id_files(&final_id);
ensure_remote_id_files(&reported_id);
let version = query_version(&exe_path).ok().or(installed_version);
let last_provisioned_at = Utc::now().timestamp_millis();
let result = RustdeskProvisioningResult {
id: final_id.clone(),
id: reported_id.clone(),
password: password.clone(),
installed_version: version.clone(),
updated: freshly_installed,
last_provisioned_at,
last_provisioned_at: Utc::now().timestamp_millis(),
};
// Salva os dados do RustDesk diretamente no arquivo machine-agent.json
// para evitar conflitos com o Tauri Store do TypeScript
let rustdesk_data = serde_json::json!({
"id": final_id,
"password": password,
"installedVersion": version,
"updated": freshly_installed,
"lastProvisionedAt": last_provisioned_at,
"lastSyncedAt": serde_json::Value::Null,
"lastError": serde_json::Value::Null
});
if let Err(error) = upsert_machine_store_value("rustdesk", rustdesk_data) {
log_event(format!("Aviso: falha ao salvar dados do RustDesk no store: {error}"));
} else {
log_event("Dados do RustDesk salvos no machine-agent.json");
}
// Sincroniza com o backend imediatamente apos provisionar
// O Rust faz o HTTP direto, sem passar pelo CSP do webview
if let Err(error) = sync_remote_access_with_backend(&result) {
log_event(format!("Aviso: falha ao sincronizar com backend: {error}"));
} else {
log_event("Acesso remoto sincronizado com backend");
// Atualiza lastSyncedAt no store
let synced_data = serde_json::json!({
"id": final_id,
"password": password,
"installedVersion": version,
"updated": freshly_installed,
"lastProvisionedAt": last_provisioned_at,
"lastSyncedAt": Utc::now().timestamp_millis(),
"lastError": serde_json::Value::Null
});
if let Err(e) = upsert_machine_store_value("rustdesk", synced_data) {
log_event(format!("Aviso: falha ao atualizar lastSyncedAt: {e}"));
} else {
log_event("lastSyncedAt atualizado com sucesso");
}
}
log_event(format!("Provisionamento concluído. ID final: {final_id}. Versão: {:?}", version));
log_event(&format!("Provisionamento concluído. ID final: {reported_id}. Versão: {:?}", version));
Ok(result)
}
@ -403,7 +293,7 @@ fn write_config_files() -> Result<PathBuf, RustdeskError> {
let config_contents = build_config_contents();
let main_path = program_data_config_dir().join("RustDesk2.toml");
write_file(&main_path, &config_contents)?;
log_event(format!(
log_event(&format!(
"Config principal gravada em {}",
main_path.display()
));
@ -412,7 +302,7 @@ fn write_config_files() -> Result<PathBuf, RustdeskError> {
for service_dir in service_profile_dirs() {
let service_profile = service_dir.join("RustDesk2.toml");
if let Err(error) = write_file(&service_profile, &config_contents) {
log_event(format!(
log_event(&format!(
"Falha ao gravar config no perfil do serviço ({}): {error}",
service_profile.display()
));
@ -421,7 +311,7 @@ fn write_config_files() -> Result<PathBuf, RustdeskError> {
if let Some(appdata_path) = user_appdata_config_path("RustDesk2.toml") {
if let Err(error) = write_file(&appdata_path, &config_contents) {
log_event(format!(
log_event(&format!(
"Falha ao atualizar config no AppData do usuário: {error}"
));
}
@ -515,12 +405,6 @@ fn derive_numeric_id(machine_id: &str) -> String {
fn ensure_service_running(exe_path: &Path) -> Result<(), RustdeskError> {
ensure_service_installed(exe_path)?;
if let Err(error) = configure_service_startup() {
log_event(format!(
"Aviso: não foi possível reforçar autostart/recuperação do serviço RustDesk: {error}"
));
}
fn start_sequence() -> Result<(), RustdeskError> {
let _ = run_sc(&["stop", SERVICE_NAME]);
thread::sleep(Duration::from_secs(2));
@ -528,7 +412,7 @@ fn ensure_service_running(exe_path: &Path) -> Result<(), RustdeskError> {
run_sc(&["start", SERVICE_NAME])
}
let _ = match start_sequence() {
match start_sequence() {
Ok(_) => Ok(()),
Err(RustdeskError::CommandFailed { command: _, status: Some(5), .. }) => {
log_event("SC retornou acesso negado; tentando ajustar ACL dos perfis do serviço...");
@ -537,128 +421,15 @@ fn ensure_service_running(exe_path: &Path) -> Result<(), RustdeskError> {
status: Some(5),
})?;
let _ = run_sc(&["stop", SERVICE_NAME]);
let _ = start_sequence();
Ok(())
start_sequence().or_else(|_| Ok(()))
}
Err(error) => Err(error),
};
remove_rustdesk_autorun_artifacts();
// Revalida se o serviço realmente subiu; se não, reinstala e tenta novamente.
match query_service_state() {
Some(state) if state.eq_ignore_ascii_case("running") => Ok(()),
_ => {
log_event("Serviço RustDesk não está em execução após tentativa de start; reaplicando --install-service e start");
let _ = run_with_args(exe_path, &["--install-service"]);
let _ = run_sc(&["config", SERVICE_NAME, &format!("start= {}", "auto")]);
if let Err(error) = start_sequence() {
log_event(format!(
"Falha ao subir o serviço RustDesk mesmo após reinstalação: {error}"
));
}
Ok(())
}
}
}
fn configure_service_startup() -> Result<(), RustdeskError> {
let start_arg = format!("start= {}", "auto");
run_sc(&["config", SERVICE_NAME, &start_arg])?;
let reset_arg = format!("reset= {}", "86400");
let actions_arg = "actions= restart/5000/restart/5000/restart/5000";
let failure_actions_applied = run_sc(&["failure", SERVICE_NAME, &reset_arg, actions_arg]).is_ok();
let _ = run_sc(&["failureflag", SERVICE_NAME, "1"]);
if failure_actions_applied {
log_event("Serviço RustDesk configurado para reiniciar automaticamente em caso de falha");
} else {
log_event("Aviso: não foi possível configurar recuperação automática do serviço RustDesk");
}
Ok(())
}
fn query_service_state() -> Option<String> {
let output = hidden_command("sc")
.args(["query", SERVICE_NAME])
.output()
.ok()?;
if !output.status.success() {
return None;
}
let stdout = String::from_utf8_lossy(&output.stdout);
for line in stdout.lines() {
if let Some(pos) = line.find("STATE") {
// Example: " STATE : 4 RUNNING"
let state = line[pos..].to_string();
if state.to_lowercase().contains("running") {
return Some("running".to_string());
}
if state.to_lowercase().contains("stopped") {
return Some("stopped".to_string());
}
}
}
None
}
fn remove_rustdesk_autorun_artifacts() {
// Remove atalhos de inicialização automática para evitar abrir GUI a cada boot/login.
let mut startup_paths: Vec<PathBuf> = Vec::new();
if let Ok(appdata) = env::var("APPDATA") {
startup_paths.push(
Path::new(&appdata)
.join("Microsoft")
.join("Windows")
.join("Start Menu")
.join("Programs")
.join("Startup")
.join("RustDesk.lnk"),
);
}
startup_paths.push(
Path::new("C:\\ProgramData")
.join("Microsoft")
.join("Windows")
.join("Start Menu")
.join("Programs")
.join("Startup")
.join("RustDesk.lnk"),
);
for path in startup_paths {
if path.exists() {
match fs::remove_file(&path) {
Ok(_) => log_event(format!("Atalho de inicialização do RustDesk removido: {}", path.display())),
Err(error) => log_event(format!(
"Falha ao remover atalho de inicialização do RustDesk ({}): {}",
path.display(),
error
)),
}
}
}
for hive in ["HKCU", "HKLM"] {
let reg_path = format!(r"{}\\Software\\Microsoft\\Windows\\CurrentVersion\\Run", hive);
let status = hidden_command("reg")
.args(["delete", &reg_path, "/v", "RustDesk", "/f"])
.stdout(Stdio::null())
.stderr(Stdio::null())
.status();
if let Ok(code) = status {
if code.success() {
log_event(format!("Entrada de auto-run RustDesk removida de {}", reg_path));
}
}
}
}
fn stop_rustdesk_processes() -> Result<(), RustdeskError> {
if let Err(error) = try_stop_service() {
log_event(format!(
log_event(&format!(
"Não foi possível parar o serviço RustDesk antes da sincronização: {error}"
));
}
@ -774,12 +545,12 @@ fn ensure_remote_id_files(id: &str) {
for dir in remote_id_directories() {
let path = dir.join("RustDesk_local.toml");
match write_remote_id_value(&path, id) {
Ok(_) => log_event(format!(
Ok(_) => log_event(&format!(
"remote_id atualizado para {} em {}",
id,
path.display()
)),
Err(error) => log_event(format!(
Err(error) => log_event(&format!(
"Falha ao atualizar remote_id em {}: {error}",
path.display()
)),
@ -821,7 +592,7 @@ fn ensure_password_files(secret: &str) -> Result<(), String> {
if let Err(error) = write_toml_kv(&password_path, "password", secret) {
errors.push(format!("{} -> {}", password_path.display(), error));
} else {
log_event(format!(
log_event(&format!(
"Senha escrita via fallback em {}",
password_path.display()
));
@ -829,12 +600,12 @@ fn ensure_password_files(secret: &str) -> Result<(), String> {
let local_path = dir.join("RustDesk_local.toml");
if let Err(error) = write_toml_kv(&local_path, "verification-method", SECURITY_VERIFICATION_VALUE) {
log_event(format!(
log_event(&format!(
"Falha ao ajustar verification-method em {}: {error}",
local_path.display()
));
} else {
log_event(format!(
log_event(&format!(
"verification-method atualizado para {} em {}",
SECURITY_VERIFICATION_VALUE,
local_path.display()
@ -843,19 +614,19 @@ fn ensure_password_files(secret: &str) -> Result<(), String> {
let rustdesk2_path = dir.join("RustDesk2.toml");
if let Err(error) = enforce_security_in_rustdesk2(&rustdesk2_path) {
log_event(format!(
log_event(&format!(
"Falha ao ajustar flags no RustDesk2.toml em {}: {error}",
rustdesk2_path.display()
));
}
if let Err(error) = write_toml_kv(&local_path, "approve-mode", SECURITY_APPROVE_MODE_VALUE) {
log_event(format!(
log_event(&format!(
"Falha ao ajustar approve-mode em {}: {error}",
local_path.display()
));
} else {
log_event(format!(
log_event(&format!(
"approve-mode atualizado para {} em {}",
SECURITY_APPROVE_MODE_VALUE,
local_path.display()
@ -877,7 +648,7 @@ fn enforce_security_flags() -> Result<(), String> {
if let Err(error) = write_toml_kv(&local_path, "verification-method", SECURITY_VERIFICATION_VALUE) {
errors.push(format!("{} -> {}", local_path.display(), error));
} else {
log_event(format!(
log_event(&format!(
"verification-method atualizado para {} em {}",
SECURITY_VERIFICATION_VALUE,
local_path.display()
@ -887,7 +658,7 @@ fn enforce_security_flags() -> Result<(), String> {
if let Err(error) = write_toml_kv(&local_path, "approve-mode", SECURITY_APPROVE_MODE_VALUE) {
errors.push(format!("{} -> {}", local_path.display(), error));
} else {
log_event(format!(
log_event(&format!(
"approve-mode atualizado para {} em {}",
SECURITY_APPROVE_MODE_VALUE,
local_path.display()
@ -921,7 +692,7 @@ fn propagate_password_profile() -> io::Result<bool> {
if !src_path.exists() {
continue;
}
log_event(format!(
log_event(&format!(
"Copiando {} para ProgramData/serviços",
src_path.display()
));
@ -929,7 +700,7 @@ fn propagate_password_profile() -> io::Result<bool> {
for dest_root in propagation_destinations() {
let target_path = dest_root.join(filename);
copy_overwrite(&src_path, &target_path)?;
log_event(format!(
log_event(&format!(
"{} propagado para {}",
filename,
target_path.display()
@ -969,7 +740,7 @@ fn replicate_password_artifacts() -> io::Result<()> {
let target_path = dest.join(name);
copy_overwrite(&source_path, &target_path)?;
log_event(format!(
log_event(&format!(
"Artefato de senha {name} replicado para {}",
target_path.display()
));
@ -981,11 +752,13 @@ fn replicate_password_artifacts() -> io::Result<()> {
fn purge_existing_rustdesk_profiles() -> Result<(), String> {
let mut errors = Vec::new();
let mut cleaned_any = false;
for dir in remote_id_directories() {
match purge_config_dir(&dir) {
Ok(true) => {
log_event(format!(
cleaned_any = true;
log_event(&format!(
"Perfis antigos removidos em {}",
dir.display()
));
@ -995,7 +768,9 @@ fn purge_existing_rustdesk_profiles() -> Result<(), String> {
}
}
if errors.is_empty() {
if cleaned_any {
Ok(())
} else if errors.is_empty() {
Ok(())
} else {
Err(errors.join(" | "))
@ -1026,7 +801,6 @@ fn purge_config_dir(dir: &Path) -> Result<bool, io::Error> {
Ok(removed)
}
#[allow(dead_code)]
fn run_powershell_elevated(script: &str) -> Result<(), String> {
let temp_dir = env::temp_dir();
let payload = temp_dir.join("raven_payload.ps1");
@ -1074,7 +848,6 @@ exit $process.ExitCode
Err(format!("elevated ps exit {:?}", status.code()))
}
#[allow(dead_code)]
fn fix_profile_acl(target: &Path) -> Result<(), String> {
let target_str = target.display().to_string();
let transcript = env::temp_dir().join("raven_acl_ps.log");
@ -1109,7 +882,7 @@ try {{
let result = run_powershell_elevated(&script);
if result.is_err() {
if let Ok(content) = fs::read_to_string(&transcript) {
log_event(format!(
log_event(&format!(
"ACL transcript para {}:\n{}",
target.display(), content
));
@ -1120,9 +893,6 @@ try {{
}
fn ensure_service_profiles_writable_preflight() -> Result<(), String> {
// Verificamos se os diretorios de perfil sao graváveis
// Se nao forem, apenas logamos aviso - o Raven Service deve lidar com isso
// Nao usamos elevacao para evitar UAC adicional
let mut blocked_dirs = Vec::new();
for dir in service_profile_dirs() {
if !can_write_dir(&dir) {
@ -1134,46 +904,53 @@ fn ensure_service_profiles_writable_preflight() -> Result<(), String> {
return Ok(());
}
// Apenas logamos aviso - o serviço RavenService deve lidar com permissões
log_event(format!(
"Aviso: alguns perfis de serviço não são graváveis: {:?}. O Raven Service deve configurar permissões.",
blocked_dirs.iter().map(|d| d.display().to_string()).collect::<Vec<_>>()
));
if has_acl_unlock_flag() {
log_event("Perfis do serviço voltaram a bloquear escrita; reaplicando correção de ACL");
} else {
log_event("Executando ajuste inicial de ACL dos perfis do serviço (requer UAC)");
}
// Retornamos Ok para não bloquear o fluxo
// O Raven Service, rodando como LocalSystem, pode gravar nesses diretórios
Ok(())
let mut last_error: Option<String> = None;
for dir in blocked_dirs.iter() {
log_event(&format!(
"Tentando corrigir ACL via UAC (preflight) em {}...",
dir.display()
));
if let Err(error) = fix_profile_acl(dir) {
last_error = Some(error);
continue;
}
if can_write_dir(dir) {
log_event(&format!(
"ACL ajustada com sucesso em {}",
dir.display()
));
} else {
last_error = Some(format!(
"continua sem permissão para {} mesmo após preflight",
dir.display()
));
}
}
if blocked_dirs.iter().all(|dir| can_write_dir(dir)) {
mark_acl_unlock_flag();
Ok(())
} else {
Err(last_error.unwrap_or_else(|| "nenhum perfil de serviço acessível".into()))
}
}
fn stop_service_elevated() -> Result<(), String> {
// Tentamos parar o serviço RustDesk sem elevação
// Se falhar, apenas logamos aviso - o Raven Service pode lidar com isso
// Não usamos elevação para evitar UAC adicional
let output = Command::new("sc")
.args(["stop", "RustDesk"])
.output();
match output {
Ok(result) => {
if result.status.success() {
// Aguarda um pouco para o serviço parar
std::thread::sleep(std::time::Duration::from_secs(2));
Ok(())
} else {
let stderr = String::from_utf8_lossy(&result.stderr);
log_event(format!(
"Aviso: não foi possível parar o serviço RustDesk sem elevação: {}",
stderr.trim()
));
// Retornamos Ok para não bloquear - o serviço pode estar já parado
Ok(())
}
}
Err(e) => {
log_event(format!("Aviso: falha ao executar sc stop RustDesk: {e}"));
Ok(())
}
}
let script = r#"
$ErrorActionPreference='Stop'
$service = Get-Service -Name 'RustDesk' -ErrorAction SilentlyContinue
if ($service -and $service.Status -ne 'Stopped') {
Stop-Service -Name 'RustDesk' -Force -ErrorAction Stop
$service.WaitForStatus('Stopped','00:00:10')
}
"#;
run_powershell_elevated(script)
}
fn can_write_dir(dir: &Path) -> bool {
@ -1333,21 +1110,21 @@ fn log_password_replication(secret: &str) {
fn log_password_match(path: &Path, secret: &str) {
match read_password_from_file(path) {
Some(value) if value == secret => {
log_event(format!(
log_event(&format!(
"Senha confirmada em {} ({})",
path.display(),
mask_secret(&value)
));
}
Some(value) => {
log_event(format!(
log_event(&format!(
"Aviso: senha divergente ({}) em {}",
mask_secret(&value),
path.display()
));
}
None => {
log_event(format!(
log_event(&format!(
"Aviso: chave 'password' não encontrada em {}",
path.display()
));
@ -1463,24 +1240,21 @@ fn write_machine_store_object(map: JsonMap<String, JsonValue>) -> Result<(), Str
}
fn upsert_machine_store_value(key: &str, value: JsonValue) -> Result<(), String> {
let mut map = read_machine_store_object().unwrap_or_default();
let mut map = read_machine_store_object().unwrap_or_else(JsonMap::new);
map.insert(key.to_string(), value);
write_machine_store_object(map)
}
#[allow(dead_code)]
fn machine_store_key_exists(key: &str) -> bool {
read_machine_store_object()
.map(|map| map.contains_key(key))
.unwrap_or(false)
}
#[allow(dead_code)]
fn acl_flag_file_path() -> Option<PathBuf> {
raven_appdata_root().map(|dir| dir.join(ACL_FLAG_FILENAME))
}
#[allow(dead_code)]
fn has_acl_unlock_flag() -> bool {
if let Some(flag) = acl_flag_file_path() {
if flag.exists() {
@ -1490,7 +1264,6 @@ fn has_acl_unlock_flag() -> bool {
machine_store_key_exists(RUSTDESK_ACL_STORE_KEY)
}
#[allow(dead_code)]
fn mark_acl_unlock_flag() {
let timestamp = Utc::now().timestamp_millis();
if let Some(flag_path) = acl_flag_file_path() {
@ -1498,7 +1271,7 @@ fn mark_acl_unlock_flag() {
let _ = fs::create_dir_all(parent);
}
if let Err(error) = fs::write(&flag_path, timestamp.to_string()) {
log_event(format!(
log_event(&format!(
"Falha ao gravar flag de ACL em {}: {error}",
flag_path.display()
));
@ -1506,83 +1279,8 @@ fn mark_acl_unlock_flag() {
}
if let Err(error) = upsert_machine_store_value(RUSTDESK_ACL_STORE_KEY, JsonValue::from(timestamp)) {
log_event(format!(
log_event(&format!(
"Falha ao registrar flag de ACL no machine-agent: {error}"
));
}
}
fn get_machine_store_path() -> Result<PathBuf, RustdeskError> {
let base = env::var("LOCALAPPDATA")
.map_err(|_| RustdeskError::MissingId)?;
Ok(Path::new(&base)
.join(APP_IDENTIFIER)
.join(MACHINE_STORE_FILENAME))
}
fn sync_remote_access_with_backend(result: &crate::RustdeskProvisioningResult) -> Result<(), RustdeskError> {
log_event("Iniciando sincronizacao com backend...");
// Le token e config do store
let store_path = get_machine_store_path()?;
let store_content = fs::read_to_string(&store_path)
.map_err(RustdeskError::Io)?;
let store: serde_json::Value = serde_json::from_str(&store_content)
.map_err(|_| RustdeskError::MissingId)?;
let token = store.get("token")
.and_then(|v| v.as_str())
.ok_or(RustdeskError::MissingId)?;
let config = store.get("config")
.ok_or(RustdeskError::MissingId)?;
let machine_id = config.get("machineId")
.and_then(|v| v.as_str())
.ok_or(RustdeskError::MissingId)?;
let api_base_url = config.get("apiBaseUrl")
.and_then(|v| v.as_str())
.unwrap_or("https://tickets.esdrasrenan.com.br");
log_event(format!("Sincronizando com backend: {} (machineId: {})", api_base_url, machine_id));
// Monta payload conforme schema esperado pelo backend
// Schema: { machineToken, provider, identifier, password?, url?, username?, notes? }
let payload = serde_json::json!({
"machineToken": token,
"provider": "RustDesk",
"identifier": result.id,
"password": result.password,
"notes": format!("Versao: {}. Provisionado em: {}",
result.installed_version.as_deref().unwrap_or("desconhecida"),
result.last_provisioned_at)
});
// Faz POST para /api/machines/remote-access
let client = Client::builder()
.user_agent(USER_AGENT)
.timeout(Duration::from_secs(30))
.build()?;
let url = format!("{}/api/machines/remote-access", api_base_url);
let response = client.post(&url)
.header("Content-Type", "application/json")
.header("Idempotency-Key", format!("{}:RustDesk:{}", machine_id, result.id))
.body(payload.to_string())
.send()?;
if response.status().is_success() {
log_event(format!("Sync com backend OK: status {}", response.status()));
Ok(())
} else {
let status = response.status();
let body = response.text().unwrap_or_default();
let body_preview = if body.len() > 200 { &body[..200] } else { &body };
log_event(format!("Sync com backend falhou: {} - {}", status, body_preview));
Err(RustdeskError::CommandFailed {
command: "sync_remote_access".to_string(),
status: Some(status.as_u16() as i32)
})
}
}

View file

@ -1,244 +0,0 @@
//! Cliente IPC para comunicacao com o Raven Service
//!
//! Este modulo permite que o app Tauri se comunique com o Raven Service
//! via Named Pipes para executar operacoes privilegiadas.
#![allow(dead_code)]
use serde::{Deserialize, Serialize};
use std::io::{BufRead, BufReader, Write};
use std::time::Duration;
use thiserror::Error;
const PIPE_NAME: &str = r"\\.\pipe\RavenService";
#[derive(Debug, Error)]
pub enum ServiceClientError {
#[error("Servico nao disponivel: {0}")]
ServiceUnavailable(String),
#[error("Erro de comunicacao: {0}")]
CommunicationError(String),
#[error("Erro de serializacao: {0}")]
SerializationError(#[from] serde_json::Error),
#[error("Erro do servico: {message} (code: {code})")]
ServiceError { code: i32, message: String },
#[error("Timeout aguardando resposta")]
Timeout,
}
#[derive(Debug, Serialize)]
struct Request {
id: String,
method: String,
params: serde_json::Value,
}
#[derive(Debug, Deserialize)]
struct Response {
id: String,
result: Option<serde_json::Value>,
error: Option<ErrorResponse>,
}
#[derive(Debug, Deserialize)]
struct ErrorResponse {
code: i32,
message: String,
}
// =============================================================================
// Tipos de Resultado
// =============================================================================
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UsbPolicyResult {
pub success: bool,
pub policy: String,
pub error: Option<String>,
pub applied_at: Option<i64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RustdeskResult {
pub id: String,
pub password: String,
pub installed_version: Option<String>,
pub updated: bool,
pub last_provisioned_at: i64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RustdeskStatus {
pub installed: bool,
pub running: bool,
pub id: Option<String>,
pub version: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HealthCheckResult {
pub status: String,
pub service: String,
pub version: String,
pub timestamp: i64,
}
// =============================================================================
// Cliente
// =============================================================================
/// Verifica se o servico esta disponivel
pub fn is_service_available() -> bool {
health_check().is_ok()
}
/// Verifica saude do servico
pub fn health_check() -> Result<HealthCheckResult, ServiceClientError> {
let response = call_service("health_check", serde_json::json!({}))?;
serde_json::from_value(response).map_err(|e| e.into())
}
/// Aplica politica de USB
pub fn apply_usb_policy(policy: &str) -> Result<UsbPolicyResult, ServiceClientError> {
let response = call_service(
"apply_usb_policy",
serde_json::json!({ "policy": policy }),
)?;
serde_json::from_value(response).map_err(|e| e.into())
}
/// Obtem politica de USB atual
pub fn get_usb_policy() -> Result<String, ServiceClientError> {
let response = call_service("get_usb_policy", serde_json::json!({}))?;
response
.get("policy")
.and_then(|p| p.as_str())
.map(String::from)
.ok_or_else(|| ServiceClientError::CommunicationError("Resposta invalida".into()))
}
/// Provisiona RustDesk
pub fn provision_rustdesk(
config: Option<&str>,
password: Option<&str>,
machine_id: Option<&str>,
) -> Result<RustdeskResult, ServiceClientError> {
let params = serde_json::json!({
"config": config,
"password": password,
"machineId": machine_id,
});
let response = call_service("provision_rustdesk", params)?;
serde_json::from_value(response).map_err(|e| e.into())
}
/// Obtem status do RustDesk
pub fn get_rustdesk_status() -> Result<RustdeskStatus, ServiceClientError> {
let response = call_service("get_rustdesk_status", serde_json::json!({}))?;
serde_json::from_value(response).map_err(|e| e.into())
}
// =============================================================================
// Comunicacao IPC
// =============================================================================
fn call_service(
method: &str,
params: serde_json::Value,
) -> Result<serde_json::Value, ServiceClientError> {
// Gera ID unico para a requisicao
let id = uuid::Uuid::new_v4().to_string();
let request = Request {
id: id.clone(),
method: method.to_string(),
params,
};
// Serializa requisicao
let request_json = serde_json::to_string(&request)?;
// Conecta ao pipe
let mut pipe = connect_to_pipe()?;
// Envia requisicao
writeln!(pipe, "{}", request_json).map_err(|e| {
ServiceClientError::CommunicationError(format!("Erro ao enviar requisicao: {}", e))
})?;
pipe.flush().map_err(|e| {
ServiceClientError::CommunicationError(format!("Erro ao flush: {}", e))
})?;
// Le resposta
let mut reader = BufReader::new(pipe);
let mut response_line = String::new();
reader.read_line(&mut response_line).map_err(|e| {
ServiceClientError::CommunicationError(format!("Erro ao ler resposta: {}", e))
})?;
// Parse da resposta
let response: Response = serde_json::from_str(&response_line)?;
// Verifica se o ID bate
if response.id != id {
return Err(ServiceClientError::CommunicationError(
"ID de resposta nao corresponde".into(),
));
}
// Verifica erro
if let Some(error) = response.error {
return Err(ServiceClientError::ServiceError {
code: error.code,
message: error.message,
});
}
// Retorna resultado
response
.result
.ok_or_else(|| ServiceClientError::CommunicationError("Resposta sem resultado".into()))
}
#[cfg(target_os = "windows")]
fn connect_to_pipe() -> Result<std::fs::File, ServiceClientError> {
// Tenta conectar ao pipe com retry
let mut attempts = 0;
let max_attempts = 3;
loop {
match std::fs::OpenOptions::new()
.read(true)
.write(true)
.open(PIPE_NAME)
{
Ok(file) => return Ok(file),
Err(e) => {
attempts += 1;
if attempts >= max_attempts {
return Err(ServiceClientError::ServiceUnavailable(format!(
"Nao foi possivel conectar ao servico apos {} tentativas: {}",
max_attempts, e
)));
}
std::thread::sleep(Duration::from_millis(500));
}
}
}
}
#[cfg(not(target_os = "windows"))]
fn connect_to_pipe() -> Result<std::fs::File, ServiceClientError> {
Err(ServiceClientError::ServiceUnavailable(
"Named Pipes so estao disponiveis no Windows".into(),
))
}

View file

@ -1,408 +0,0 @@
//! USB Storage Control Module
//!
//! Este modulo implementa o controle de dispositivos de armazenamento USB no Windows.
//! Utiliza duas abordagens complementares:
//! 1. Removable Storage Access Policy (via registro do Windows)
//! 2. USBSTOR driver control (como fallback/reforco)
//!
//! IMPORTANTE: Requer privilegios de administrador para funcionar.
use serde::{Deserialize, Serialize};
use std::io;
use thiserror::Error;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum UsbPolicy {
Allow,
BlockAll,
Readonly,
}
impl UsbPolicy {
pub fn from_str(s: &str) -> Option<Self> {
match s.to_uppercase().as_str() {
"ALLOW" => Some(Self::Allow),
"BLOCK_ALL" => Some(Self::BlockAll),
"READONLY" => Some(Self::Readonly),
_ => None,
}
}
pub fn as_str(&self) -> &'static str {
match self {
Self::Allow => "ALLOW",
Self::BlockAll => "BLOCK_ALL",
Self::Readonly => "READONLY",
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UsbPolicyResult {
pub success: bool,
pub policy: String,
pub error: Option<String>,
pub applied_at: Option<i64>,
}
#[derive(Error, Debug)]
#[allow(dead_code)]
pub enum UsbControlError {
#[error("Politica USB invalida: {0}")]
InvalidPolicy(String),
#[error("Erro de registro do Windows: {0}")]
RegistryError(String),
#[error("Permissao negada - requer privilegios de administrador")]
PermissionDenied,
#[error("Sistema operacional nao suportado")]
UnsupportedOs,
#[error("Erro de I/O: {0}")]
Io(#[from] io::Error),
}
#[cfg(target_os = "windows")]
mod windows_impl {
use super::*;
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use winreg::enums::*;
use winreg::RegKey;
// GUID para Removable Storage Devices (Disk)
const REMOVABLE_STORAGE_GUID: &str = "{53f56307-b6bf-11d0-94f2-00a0c91efb8b}";
// Chaves de registro
const REMOVABLE_STORAGE_PATH: &str =
r"Software\Policies\Microsoft\Windows\RemovableStorageDevices";
const USBSTOR_PATH: &str = r"SYSTEM\CurrentControlSet\Services\USBSTOR";
const STORAGE_POLICY_PATH: &str = r"SYSTEM\CurrentControlSet\Control\StorageDevicePolicies";
pub fn apply_usb_policy(policy: UsbPolicy) -> Result<UsbPolicyResult, UsbControlError> {
let now = chrono::Utc::now().timestamp_millis();
let direct_result = try_apply_policy_direct(policy);
match direct_result {
Ok(()) => Ok(UsbPolicyResult {
success: true,
policy: policy.as_str().to_string(),
error: None,
applied_at: Some(now),
}),
Err(err) => {
// Se faltou permissão, retorna erro - o serviço deve ser usado
// Não fazemos elevação aqui para evitar UAC adicional
if is_permission_error(&err) {
return Err(UsbControlError::PermissionDenied);
}
Err(err)
}
}
}
fn try_apply_policy_direct(policy: UsbPolicy) -> Result<(), UsbControlError> {
// 1. Aplicar Removable Storage Access Policy
apply_removable_storage_policy(policy)?;
// 2. Aplicar USBSTOR como reforco
apply_usbstor_policy(policy)?;
// 3. Aplicar WriteProtect se necessario
if policy == UsbPolicy::Readonly {
apply_write_protect(true)?;
} else {
apply_write_protect(false)?;
}
Ok(())
}
fn apply_removable_storage_policy(policy: UsbPolicy) -> Result<(), UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
let full_path = format!(r"{}\{}", REMOVABLE_STORAGE_PATH, REMOVABLE_STORAGE_GUID);
match policy {
UsbPolicy::Allow => {
// Tenta remover as restricoes, se existirem
if let Ok(key) = hklm.open_subkey_with_flags(&full_path, KEY_ALL_ACCESS) {
let _ = key.delete_value("Deny_Read");
let _ = key.delete_value("Deny_Write");
let _ = key.delete_value("Deny_Execute");
}
// Tenta remover a chave inteira se estiver vazia
let _ = hklm.delete_subkey(&full_path);
}
UsbPolicy::BlockAll => {
let (key, _) = hklm
.create_subkey(&full_path)
.map_err(map_winreg_error)?;
key.set_value("Deny_Read", &1u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Write", &1u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Execute", &1u32)
.map_err(map_winreg_error)?;
}
UsbPolicy::Readonly => {
let (key, _) = hklm
.create_subkey(&full_path)
.map_err(map_winreg_error)?;
// Permite leitura, bloqueia escrita
key.set_value("Deny_Read", &0u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Write", &1u32)
.map_err(map_winreg_error)?;
key.set_value("Deny_Execute", &0u32)
.map_err(map_winreg_error)?;
}
}
Ok(())
}
fn apply_usbstor_policy(policy: UsbPolicy) -> Result<(), UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
let key = hklm
.open_subkey_with_flags(USBSTOR_PATH, KEY_ALL_ACCESS)
.map_err(map_winreg_error)?;
match policy {
UsbPolicy::Allow => {
// Start = 3 habilita o driver
key.set_value("Start", &3u32)
.map_err(map_winreg_error)?;
}
UsbPolicy::BlockAll | UsbPolicy::Readonly => {
// Start = 4 desabilita o driver
// Nota: Para Readonly, mantemos o driver ativo mas com WriteProtect
// Porem, como fallback de seguranca, desabilitamos para BlockAll
if policy == UsbPolicy::BlockAll {
key.set_value("Start", &4u32)
.map_err(map_winreg_error)?;
} else {
// Readonly mantem driver ativo
key.set_value("Start", &3u32)
.map_err(map_winreg_error)?;
}
}
}
Ok(())
}
fn apply_write_protect(enable: bool) -> Result<(), UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
if enable {
let (key, _) = hklm
.create_subkey(STORAGE_POLICY_PATH)
.map_err(map_winreg_error)?;
key.set_value("WriteProtect", &1u32)
.map_err(map_winreg_error)?;
} else if let Ok(key) = hklm.open_subkey_with_flags(STORAGE_POLICY_PATH, KEY_ALL_ACCESS) {
let _ = key.set_value("WriteProtect", &0u32);
}
Ok(())
}
pub fn get_current_policy() -> Result<UsbPolicy, UsbControlError> {
let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);
// Verifica Removable Storage Policy primeiro
let full_path = format!(r"{}\{}", REMOVABLE_STORAGE_PATH, REMOVABLE_STORAGE_GUID);
if let Ok(key) = hklm.open_subkey_with_flags(&full_path, KEY_READ) {
let deny_read: u32 = key.get_value("Deny_Read").unwrap_or(0);
let deny_write: u32 = key.get_value("Deny_Write").unwrap_or(0);
if deny_read == 1 && deny_write == 1 {
return Ok(UsbPolicy::BlockAll);
}
if deny_read == 0 && deny_write == 1 {
return Ok(UsbPolicy::Readonly);
}
}
// Verifica USBSTOR como fallback
if let Ok(key) = hklm.open_subkey_with_flags(USBSTOR_PATH, KEY_READ) {
let start: u32 = key.get_value("Start").unwrap_or(3);
if start == 4 {
return Ok(UsbPolicy::BlockAll);
}
}
Ok(UsbPolicy::Allow)
}
fn is_permission_error(error: &UsbControlError) -> bool {
match error {
UsbControlError::PermissionDenied => true,
UsbControlError::RegistryError(msg) => {
let lower = msg.to_lowercase();
lower.contains("access is denied") || lower.contains("acesso negado") || lower.contains("5")
}
_ => false,
}
}
#[allow(dead_code)]
fn apply_policy_with_elevation(policy: UsbPolicy) -> Result<(), UsbControlError> {
// Cria script temporário para aplicar as chaves via PowerShell elevado
let temp_dir = std::env::temp_dir();
let script_path: PathBuf = temp_dir.join("raven_usb_policy.ps1");
let policy_str = policy.as_str();
let script = format!(
r#"$ErrorActionPreference = 'Stop'
$guid = '{guid}'
$policy = '{policy}'
function Set-Allow {{
reg delete 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /f 2>$null
reg delete 'HKLM\SYSTEM\CurrentControlSet\Control\StorageDevicePolicies' /f 2>$null
reg add 'HKLM\SYSTEM\CurrentControlSet\Services\USBSTOR' /v Start /t REG_DWORD /d 3 /f | Out-Null
}}
function Set-BlockAll {{
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /f | Out-Null
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /v Deny_Read /t REG_DWORD /d 1 /f | Out-Null
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /v Deny_Write /t REG_DWORD /d 1 /f | Out-Null
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /v Deny_Execute /t REG_DWORD /d 1 /f | Out-Null
reg add 'HKLM\SYSTEM\CurrentControlSet\Services\USBSTOR' /v Start /t REG_DWORD /d 4 /f | Out-Null
reg add 'HKLM\SYSTEM\CurrentControlSet\Control\StorageDevicePolicies' /f | Out-Null
reg add 'HKLM\SYSTEM\CurrentControlSet\Control\StorageDevicePolicies' /v WriteProtect /t REG_DWORD /d 0 /f | Out-Null
}}
function Set-Readonly {{
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /f | Out-Null
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /v Deny_Read /t REG_DWORD /d 0 /f | Out-Null
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /v Deny_Write /t REG_DWORD /d 1 /f | Out-Null
reg add 'HKLM\Software\Policies\Microsoft\Windows\RemovableStorageDevices\{guid}' /v Deny_Execute /t REG_DWORD /d 0 /f | Out-Null
reg add 'HKLM\SYSTEM\CurrentControlSet\Services\USBSTOR' /v Start /t REG_DWORD /d 3 /f | Out-Null
reg add 'HKLM\SYSTEM\CurrentControlSet\Control\StorageDevicePolicies' /f | Out-Null
reg add 'HKLM\SYSTEM\CurrentControlSet\Control\StorageDevicePolicies' /v WriteProtect /t REG_DWORD /d 1 /f | Out-Null
}}
switch ($policy) {{
'ALLOW' {{ Set-Allow }}
'BLOCK_ALL' {{ Set-BlockAll }}
'READONLY' {{ Set-Readonly }}
default {{ throw 'Politica invalida' }}
}}
try {{
gpupdate /target:computer /force | Out-Null
}} catch {{}}
"#,
guid = REMOVABLE_STORAGE_GUID,
policy = policy_str
);
fs::write(&script_path, script).map_err(UsbControlError::Io)?;
// Start-Process com RunAs para acionar UAC
let arg = format!(
"Start-Process -WindowStyle Hidden -FilePath powershell -Verb RunAs -Wait -ArgumentList '-ExecutionPolicy Bypass -File \"{}\"'",
script_path.display()
);
let status = Command::new("powershell")
.arg("-Command")
.arg(arg)
.status()
.map_err(UsbControlError::Io)?;
if !status.success() {
return Err(UsbControlError::PermissionDenied);
}
Ok(())
}
fn map_winreg_error(error: io::Error) -> UsbControlError {
if let Some(code) = error.raw_os_error() {
if code == 5 {
return UsbControlError::PermissionDenied;
}
}
UsbControlError::RegistryError(error.to_string())
}
pub fn refresh_group_policy() -> Result<(), UsbControlError> {
use std::os::windows::process::CommandExt;
use std::process::Command;
const CREATE_NO_WINDOW: u32 = 0x08000000;
// Executa gpupdate para forcar atualizacao das politicas
let output = Command::new("gpupdate")
.args(["/target:computer", "/force"])
.creation_flags(CREATE_NO_WINDOW)
.output()
.map_err(UsbControlError::Io)?;
if !output.status.success() {
// Nao e critico se falhar, apenas log
eprintln!(
"[usb_control] gpupdate retornou erro: {}",
String::from_utf8_lossy(&output.stderr)
);
}
Ok(())
}
}
#[cfg(not(target_os = "windows"))]
mod fallback_impl {
use super::*;
pub fn apply_usb_policy(_policy: UsbPolicy) -> Result<UsbPolicyResult, UsbControlError> {
Err(UsbControlError::UnsupportedOs)
}
pub fn get_current_policy() -> Result<UsbPolicy, UsbControlError> {
Err(UsbControlError::UnsupportedOs)
}
pub fn refresh_group_policy() -> Result<(), UsbControlError> {
Err(UsbControlError::UnsupportedOs)
}
}
#[cfg(target_os = "windows")]
pub use windows_impl::*;
#[cfg(not(target_os = "windows"))]
pub use fallback_impl::*;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_policy_from_str() {
assert_eq!(UsbPolicy::from_str("ALLOW"), Some(UsbPolicy::Allow));
assert_eq!(UsbPolicy::from_str("BLOCK_ALL"), Some(UsbPolicy::BlockAll));
assert_eq!(UsbPolicy::from_str("READONLY"), Some(UsbPolicy::Readonly));
assert_eq!(UsbPolicy::from_str("allow"), Some(UsbPolicy::Allow));
assert_eq!(UsbPolicy::from_str("invalid"), None);
}
#[test]
fn test_policy_as_str() {
assert_eq!(UsbPolicy::Allow.as_str(), "ALLOW");
assert_eq!(UsbPolicy::BlockAll.as_str(), "BLOCK_ALL");
assert_eq!(UsbPolicy::Readonly.as_str(), "READONLY");
}
}

View file

@ -1,7 +1,7 @@
{
"$schema": "https://schema.tauri.app/config/2",
"productName": "Raven",
"version": "0.2.0",
"version": "0.1.6",
"identifier": "br.com.esdrasrenan.sistemadechamados",
"build": {
"beforeDevCommand": "bun run dev",
@ -28,16 +28,11 @@
"plugins": {
"updater": {
"endpoints": [
"https://raw.githubusercontent.com/esdrasrenan/sistema-de-chamados/main/apps/desktop/public/latest.json"
"https://raw.githubusercontent.com/esdrasrenan/sistema-de-chamados/refs/heads/main/apps/desktop/public/latest.json"
],
"dialog": true,
"active": true,
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDZDRTBFNkY1NUQ3QzU0QkEKUldTNlZIeGQ5ZWJnYk5mY0J4aWRlb0dRdVZ4TGpBSUZXMnRVUFhmdmlLT0tlY084UjJQUHFWWUkK"
},
"deep-link": {
"desktop": {
"schemes": ["raven"]
}
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDE5MTMxRTQwODA1NEFCRjAKUldUd3ExU0FRQjRUR2VqcHBNdXhBMUV3WlM2cFA4dmNnNEhtMUJ2a3VVWVlTQnoxbEo5YUtlUTMK"
}
},
"bundle": {
@ -50,12 +45,10 @@
"icons/icon.png",
"icons/Raven.png"
],
"resources": {
"../service/target/release/raven-service.exe": "raven-service.exe"
},
"windows": {
"webviewInstallMode": {
"type": "skip"
"type": "downloadBootstrapper",
"silent": true
},
"nsis": {
"displayLanguageSelector": true,
@ -63,7 +56,6 @@
"headerImage": "icons/nsis-header.bmp",
"sidebarImage": "icons/nsis-sidebar.bmp",
"installMode": "perMachine",
"installerHooks": "installer-hooks.nsh",
"languages": ["PortugueseBR"]
}
}

View file

@ -1,256 +0,0 @@
/**
* ChatHubWidget - Lista de sessoes de chat ativas usando Convex subscriptions
*
* Arquitetura:
* - Usa useQuery do Convex React para subscription reativa (tempo real verdadeiro)
* - Sem polling - todas as atualizacoes sao push-based via WebSocket
* - Tauri usado apenas para gerenciamento de janelas
*/
import { useEffect, useState } from "react"
import { invoke } from "@tauri-apps/api/core"
import { Loader2, MessageCircle, ChevronUp, X, Minimize2 } from "lucide-react"
import { useMachineSessions, type MachineSession } from "./useConvexMachineQueries"
/**
* Hub Widget - Lista todas as sessoes de chat ativas
* Ao clicar em uma sessao, abre/foca a janela de chat daquele ticket
*/
export function ChatHubWidget() {
// Inicializa baseado na altura real da janela (< 100px = minimizado)
const [isMinimized, setIsMinimized] = useState(() => window.innerHeight < 100)
// Convex subscription reativa
const { sessions = [], isLoading, hasToken } = useMachineSessions()
// Sincronizar estado minimizado com tamanho da janela
useEffect(() => {
const mountTime = Date.now()
const STABILIZATION_DELAY = 500
const handler = () => {
if (Date.now() - mountTime < STABILIZATION_DELAY) {
return
}
const h = window.innerHeight
setIsMinimized(h < 100)
}
window.addEventListener("resize", handler)
return () => window.removeEventListener("resize", handler)
}, [])
const handleSelectSession = async (ticketId: string, ticketRef: number) => {
try {
// Tauri 2.x auto-converts snake_case (Rust) to camelCase (JS)
await invoke("open_chat_window", { ticketId, ticketRef })
await invoke("close_hub_window")
} catch (err) {
console.error("open_chat_window FAILED:", err)
}
}
const handleMinimize = async () => {
setIsMinimized(true)
try {
await invoke("set_hub_minimized", { minimized: true })
} catch (err) {
console.error("Erro ao minimizar hub:", err)
}
}
const handleExpand = async () => {
try {
await invoke("set_hub_minimized", { minimized: false })
setTimeout(() => setIsMinimized(false), 100)
} catch (err) {
console.error("set_hub_minimized FAILED:", err)
setIsMinimized(false)
}
}
const handleClose = () => {
invoke("close_hub_window").catch((err) => {
console.error("Erro ao fechar janela do hub:", err)
})
}
const totalUnread = sessions.reduce((sum, s) => sum + s.unreadCount, 0)
// Sem token
if (!hasToken) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent p-2">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-red-100 px-4 py-2 text-red-600 shadow-lg">
<span className="text-sm font-medium">Token nao configurado</span>
</div>
</div>
)
}
// Loading
if (isLoading) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent p-2">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-slate-200 px-4 py-2 text-slate-600 shadow-lg">
<Loader2 className="size-4 animate-spin" />
<span className="text-sm font-medium">Carregando...</span>
</div>
</div>
)
}
// Sem sessoes ativas
if (sessions.length === 0) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent p-2">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-slate-200 px-4 py-2 text-slate-600 shadow-lg">
<MessageCircle className="size-4" />
<span className="text-sm font-medium">Sem chats</span>
</div>
</div>
)
}
// Minimizado
if (isMinimized) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent pr-3">
<button
onClick={(e) => {
e.stopPropagation()
handleExpand()
}}
className="pointer-events-auto relative flex items-center gap-2 rounded-full bg-black px-4 py-2 text-white shadow-lg hover:bg-black/90"
>
<MessageCircle className="size-4" />
<span className="text-sm font-medium">
{sessions.length} chat{sessions.length !== 1 ? "s" : ""}
</span>
<span className="size-2 rounded-full bg-emerald-400" />
<ChevronUp className="size-4" />
{totalUnread > 0 && (
<span className="absolute -right-1 -top-1 flex size-5 items-center justify-center rounded-full bg-red-500 text-xs font-bold">
{totalUnread > 9 ? "9+" : totalUnread}
</span>
)}
</button>
</div>
)
}
// Expandido
return (
<div className="flex h-full flex-col overflow-hidden rounded-2xl bg-white shadow-xl">
{/* Header */}
<div
data-tauri-drag-region
className="flex items-center justify-between border-b border-slate-200 bg-slate-50 px-4 py-3 rounded-t-2xl"
>
<div className="flex items-center gap-3">
<div className="flex size-10 items-center justify-center rounded-full bg-black text-white">
<MessageCircle className="size-5" />
</div>
<div>
<p className="text-sm font-semibold text-slate-900">Chats Ativos</p>
<p className="text-xs text-slate-500">
{sessions.length} conversa{sessions.length !== 1 ? "s" : ""}
</p>
</div>
</div>
<div className="flex items-center gap-1">
<button
onClick={handleMinimize}
className="rounded-md p-1.5 text-slate-500 hover:bg-slate-100"
aria-label="Minimizar lista de chats"
>
<Minimize2 className="size-4" />
</button>
<button
onClick={handleClose}
className="rounded-md p-1.5 text-slate-500 hover:bg-slate-100"
aria-label="Fechar lista de chats"
>
<X className="size-4" />
</button>
</div>
</div>
{/* Lista de sessoes */}
<div className="flex-1 overflow-y-auto p-2">
<div className="space-y-2">
{sessions.map((session) => (
<SessionItem
key={session.sessionId}
session={session}
onClick={() => handleSelectSession(session.ticketId, session.ticketRef)}
/>
))}
</div>
</div>
</div>
)
}
function SessionItem({
session,
onClick,
}: {
session: MachineSession
onClick: () => void
}) {
const handleClick = (e: React.MouseEvent) => {
e.stopPropagation()
onClick()
}
return (
<button
onClick={handleClick}
className="flex w-full items-center gap-3 rounded-xl p-3 text-left transition hover:bg-slate-50"
>
{/* Avatar */}
<div className="relative flex size-10 shrink-0 items-center justify-center rounded-full bg-black text-white">
<MessageCircle className="size-5" />
{/* Indicador online */}
<span className="absolute -bottom-0.5 -right-0.5 size-3 rounded-full border-2 border-white bg-emerald-500" />
</div>
{/* Info */}
<div className="min-w-0 flex-1">
<div className="flex items-center justify-between gap-2">
<p className="truncate text-sm font-medium text-slate-900">
Ticket #{session.ticketRef}
</p>
<span className="shrink-0 text-xs text-slate-400">
{formatRelativeTime(session.lastActivityAt)}
</span>
</div>
<p className="truncate text-xs text-slate-500">
{session.agentName}
</p>
</div>
{/* Badge nao lidas */}
{session.unreadCount > 0 && (
<span className="flex size-5 shrink-0 items-center justify-center rounded-full bg-red-500 text-xs font-bold text-white">
{session.unreadCount > 9 ? "9+" : session.unreadCount}
</span>
)}
</button>
)
}
function formatRelativeTime(timestamp: number): string {
const now = Date.now()
const diff = now - timestamp
const minutes = Math.floor(diff / 60000)
if (minutes < 1) return "agora"
if (minutes < 60) return `${minutes}m`
const hours = Math.floor(minutes / 60)
if (hours < 24) return `${hours}h`
const days = Math.floor(hours / 24)
return `${days}d`
}

View file

@ -1,891 +0,0 @@
/**
* ChatWidget - Componente de chat em tempo real usando Convex subscriptions
*
* Arquitetura:
* - Usa useQuery do Convex React para subscriptions reativas (tempo real verdadeiro)
* - Usa useMutation do Convex React para enviar mensagens
* - Mantém Tauri apenas para: upload de arquivos, gerenciamento de janela
* - Sem polling - todas as atualizacoes sao push-based via WebSocket
*/
import { useCallback, useEffect, useMemo, useRef, useState } from "react"
import { open as openDialog } from "@tauri-apps/plugin-dialog"
import { openUrl as openExternal } from "@tauri-apps/plugin-opener"
import { invoke } from "@tauri-apps/api/core"
import { Send, X, Loader2, MessageCircle, Paperclip, FileText, Image as ImageIcon, File, User, ChevronUp, Minimize2, Eye, Download, Check, MessagesSquare } from "lucide-react"
import type { Id } from "@convex/_generated/dataModel"
import { useMachineMessages, useMachineSessions, usePostMachineMessage, useMarkMachineMessagesRead, type MachineMessage } from "./useConvexMachineQueries"
import { useConvexMachine } from "./ConvexMachineProvider"
const MAX_MESSAGES_IN_MEMORY = 200
const MARK_READ_BATCH_SIZE = 50
const SCROLL_BOTTOM_THRESHOLD_PX = 120
const ALLOWED_EXTENSIONS = [
"jpg", "jpeg", "png", "gif", "webp",
"pdf", "txt", "doc", "docx", "xls", "xlsx",
]
interface UploadedAttachment {
storageId: string
name: string
size?: number
type?: string
}
interface ChatAttachment {
storageId: string
name: string
size?: number
type?: string
}
function getFileIcon(fileName: string) {
const ext = fileName.toLowerCase().split(".").pop() ?? ""
if (["jpg", "jpeg", "png", "gif", "webp"].includes(ext)) {
return <ImageIcon className="size-4" />
}
if (["pdf", "doc", "docx", "txt"].includes(ext)) {
return <FileText className="size-4" />
}
return <File className="size-4" />
}
function isImageAttachment(attachment: ChatAttachment) {
if (attachment.type?.startsWith("image/")) return true
const ext = attachment.name.toLowerCase().split(".").pop() ?? ""
return ["jpg", "jpeg", "png", "gif", "webp"].includes(ext)
}
function formatAttachmentSize(size?: number) {
if (!size) return null
if (size < 1024) return `${size}B`
const kb = size / 1024
if (kb < 1024) return `${Math.round(kb)}KB`
return `${(kb / 1024).toFixed(1)}MB`
}
function getUnreadAgentMessageIds(messages: MachineMessage[], unreadCount: number): string[] {
if (unreadCount <= 0 || messages.length === 0) return []
const ids: string[] = []
for (let i = messages.length - 1; i >= 0 && ids.length < unreadCount; i--) {
const msg = messages[i]
if (!msg.isFromMachine) {
ids.push(msg.id)
}
}
return ids.reverse()
}
function chunkArray<T>(items: T[], size: number): T[][] {
if (size <= 0) return [items]
const result: T[][] = []
for (let i = 0; i < items.length; i += size) {
result.push(items.slice(i, i + size))
}
return result
}
function MessageAttachment({
attachment,
isAgent,
loadUrl,
}: {
attachment: ChatAttachment
isAgent: boolean
loadUrl: (storageId: string) => Promise<string>
}) {
const [url, setUrl] = useState<string | null>(null)
const [loading, setLoading] = useState(true)
const [downloading, setDownloading] = useState(false)
const [downloaded, setDownloaded] = useState(false)
useEffect(() => {
let cancelled = false
setLoading(true)
loadUrl(attachment.storageId)
.then((resolved) => {
if (!cancelled) setUrl(resolved)
})
.catch((err) => {
console.error("Falha ao carregar URL do anexo:", err)
})
.finally(() => {
if (!cancelled) setLoading(false)
})
return () => {
cancelled = true
}
}, [attachment.storageId, loadUrl])
const handleView = async () => {
if (!url) return
try {
await openExternal(url)
} catch (err) {
console.error("Falha ao abrir anexo:", err)
}
}
const handleDownload = async () => {
if (!url || downloading) return
setDownloading(true)
try {
const response = await fetch(url)
const blob = await response.blob()
const downloadUrl = URL.createObjectURL(blob)
const a = document.createElement("a")
a.href = downloadUrl
a.download = attachment.name
document.body.appendChild(a)
a.click()
document.body.removeChild(a)
URL.revokeObjectURL(downloadUrl)
setDownloaded(true)
setTimeout(() => setDownloaded(false), 2000)
} catch (err) {
console.error("Falha ao baixar anexo:", err)
await handleView()
} finally {
setDownloading(false)
}
}
const sizeLabel = formatAttachmentSize(attachment.size)
const isImage = isImageAttachment(attachment)
if (loading) {
return (
<div className={`flex items-center gap-2 rounded-lg p-2 text-xs ${isAgent ? "bg-white/10" : "bg-slate-100"}`}>
<Loader2 className="size-4 animate-spin" />
<span className="truncate">Carregando anexo...</span>
</div>
)
}
if (isImage && url) {
return (
<div className={`group relative overflow-hidden rounded-lg border ${isAgent ? "border-white/10" : "border-slate-200"}`}>
{/* eslint-disable-next-line @next/next/no-img-element -- Tauri desktop app, not Next.js */}
<img
src={url}
alt={attachment.name}
className="size-24 cursor-pointer object-cover"
onClick={handleView}
/>
<div className="absolute inset-0 flex items-center justify-center gap-1 bg-black/50 opacity-0 transition-opacity group-hover:opacity-100">
<button
onClick={handleView}
className="flex size-7 items-center justify-center rounded-full bg-white/20 hover:bg-white/30"
aria-label="Visualizar anexo"
>
<Eye className="size-4 text-white" />
</button>
<button
onClick={handleDownload}
disabled={downloading}
className="flex size-7 items-center justify-center rounded-full bg-white/20 hover:bg-white/30 disabled:opacity-60"
aria-label="Baixar anexo"
>
{downloading ? (
<Loader2 className="size-4 animate-spin text-white" />
) : downloaded ? (
<Check className="size-4 text-emerald-300" />
) : (
<Download className="size-4 text-white" />
)}
</button>
</div>
</div>
)
}
return (
<div className={`flex items-center gap-2 rounded-lg p-2 text-xs ${isAgent ? "bg-white/10" : "bg-slate-100"}`}>
{getFileIcon(attachment.name)}
<button
onClick={handleView}
className="flex-1 truncate text-left hover:underline"
aria-label={`Visualizar anexo ${attachment.name}`}
>
{attachment.name}
</button>
{sizeLabel && <span className="text-xs opacity-60">({sizeLabel})</span>}
<div className="ml-1 flex items-center gap-1">
<button
onClick={handleView}
className={`flex size-7 items-center justify-center rounded-md ${isAgent ? "hover:bg-white/10" : "hover:bg-slate-200"}`}
aria-label="Visualizar anexo"
>
<Eye className="size-4" />
</button>
<button
onClick={handleDownload}
disabled={downloading}
className={`flex size-7 items-center justify-center rounded-md disabled:opacity-60 ${isAgent ? "hover:bg-white/10" : "hover:bg-slate-200"}`}
aria-label="Baixar anexo"
>
{downloading ? (
<Loader2 className="size-4 animate-spin" />
) : downloaded ? (
<Check className="size-4 text-emerald-500" />
) : (
<Download className="size-4" />
)}
</button>
</div>
</div>
)
}
interface ChatWidgetProps {
ticketId: string
ticketRef?: number
}
export function ChatWidget({ ticketId, ticketRef }: ChatWidgetProps) {
const [inputValue, setInputValue] = useState("")
const [isSending, setIsSending] = useState(false)
const [isUploading, setIsUploading] = useState(false)
const [pendingAttachments, setPendingAttachments] = useState<UploadedAttachment[]>([])
// Inicializa baseado na altura real da janela (< 100px = minimizado)
const [isMinimized, setIsMinimized] = useState(() => window.innerHeight < 100)
// Convex hooks
const { apiBaseUrl, machineToken } = useConvexMachine()
const { sessions: machineSessions = [] } = useMachineSessions()
const { messages: convexMessages, hasSession, unreadCount, isLoading } = useMachineMessages(
ticketId as Id<"tickets">,
{ limit: MAX_MESSAGES_IN_MEMORY }
)
const postMessage = usePostMachineMessage()
const markMessagesRead = useMarkMachineMessagesRead()
// Limitar mensagens em memoria
const messages = useMemo(() => convexMessages.slice(-MAX_MESSAGES_IN_MEMORY), [convexMessages])
const messagesEndRef = useRef<HTMLDivElement>(null)
const messagesContainerRef = useRef<HTMLDivElement>(null)
const messageElementsRef = useRef<Map<string, HTMLDivElement>>(new Map())
const prevHasSessionRef = useRef<boolean>(false)
const [isAtBottom, setIsAtBottom] = useState(true)
const isAtBottomRef = useRef(true)
const pendingScrollActionRef = useRef<
| { type: "bottom"; behavior: ScrollBehavior; markRead: boolean }
| { type: "message"; messageId: string; behavior: ScrollBehavior; markRead: boolean }
| null
>(null)
const autoReadInFlightRef = useRef(false)
const lastAutoReadCountRef = useRef<number | null>(null)
const unreadAgentMessageIds = useMemo(() => getUnreadAgentMessageIds(messages, unreadCount), [messages, unreadCount])
const firstUnreadAgentMessageId = unreadAgentMessageIds[0] ?? null
const otherUnreadCount = useMemo(() => {
if (machineSessions.length <= 1) return 0
return machineSessions.reduce((sum, session) => {
return sum + (session.ticketId === ticketId ? 0 : session.unreadCount)
}, 0)
}, [machineSessions, ticketId])
const handleOpenHub = useCallback(async () => {
try {
await invoke("open_hub_window")
await invoke("set_hub_minimized", { minimized: false })
} catch (err) {
console.error("Erro ao abrir hub:", err)
}
}, [])
const updateIsAtBottom = useCallback(() => {
const el = messagesContainerRef.current
if (!el) return
const distance = el.scrollHeight - el.scrollTop - el.clientHeight
const atBottom = distance <= SCROLL_BOTTOM_THRESHOLD_PX
if (isAtBottomRef.current !== atBottom) {
isAtBottomRef.current = atBottom
setIsAtBottom(atBottom)
}
}, [])
const scrollToBottom = useCallback((behavior: ScrollBehavior) => {
messagesEndRef.current?.scrollIntoView({ behavior })
requestAnimationFrame(() => updateIsAtBottom())
}, [updateIsAtBottom])
const scrollToMessage = useCallback((messageId: string, behavior: ScrollBehavior) => {
const el = messageElementsRef.current.get(messageId)
if (!el) return false
el.scrollIntoView({ behavior, block: "center" })
requestAnimationFrame(() => updateIsAtBottom())
return true
}, [updateIsAtBottom])
// Fechar janela quando sessao termina
useEffect(() => {
const prevHasSession = prevHasSessionRef.current
if (prevHasSession && !hasSession) {
invoke("close_chat_window", { ticketId }).catch((err) => {
console.error("Erro ao fechar janela ao encerrar sessao:", err)
})
}
prevHasSessionRef.current = hasSession
}, [hasSession, ticketId])
// Ref para acessar isMinimized dentro de callbacks
const isMinimizedRef = useRef(isMinimized)
useEffect(() => {
isMinimizedRef.current = isMinimized
}, [isMinimized])
// Cache de URLs de anexos
const attachmentUrlCacheRef = useRef<Map<string, string>>(new Map())
const loadAttachmentUrl = useCallback(async (storageId: string) => {
const cached = attachmentUrlCacheRef.current.get(storageId)
if (cached) return cached
if (!apiBaseUrl || !machineToken) {
throw new Error("Configuracao nao disponivel")
}
const response = await fetch(`${apiBaseUrl}/api/machines/chat/attachments/url`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
machineToken,
ticketId,
storageId,
}),
})
if (!response.ok) {
const text = await response.text().catch(() => "")
throw new Error(text || `Falha ao obter URL do anexo (${response.status})`)
}
const data = (await response.json()) as { url?: string }
if (!data.url) {
throw new Error("Resposta invalida ao obter URL do anexo")
}
attachmentUrlCacheRef.current.set(storageId, data.url)
return data.url
}, [apiBaseUrl, machineToken, ticketId])
const markUnreadMessagesRead = useCallback(async () => {
if (unreadCount <= 0) return false
const ids = getUnreadAgentMessageIds(messages, unreadCount)
if (ids.length === 0) return false
const chunks = chunkArray(ids, MARK_READ_BATCH_SIZE)
for (const chunk of chunks) {
await markMessagesRead({
ticketId: ticketId as Id<"tickets">,
messageIds: chunk as Id<"ticketChatMessages">[],
})
}
return true
}, [messages, ticketId, unreadCount, markMessagesRead])
const maybeAutoMarkRead = useCallback(async () => {
if (autoReadInFlightRef.current) return
if (!hasSession || unreadCount <= 0) return
if (isMinimizedRef.current || !isAtBottomRef.current) return
if (lastAutoReadCountRef.current === unreadCount) return
autoReadInFlightRef.current = true
try {
const didMark = await markUnreadMessagesRead()
if (didMark) {
lastAutoReadCountRef.current = unreadCount
}
} finally {
autoReadInFlightRef.current = false
}
}, [hasSession, unreadCount, markUnreadMessagesRead])
// Auto-scroll quando novas mensagens chegam (se ja estava no bottom)
const prevMessagesLengthRef = useRef(messages.length)
useEffect(() => {
if (messages.length > prevMessagesLengthRef.current && isAtBottomRef.current && !isMinimizedRef.current) {
pendingScrollActionRef.current = { type: "bottom", behavior: "smooth", markRead: true }
}
prevMessagesLengthRef.current = messages.length
}, [messages.length])
// Executar scroll pendente
useEffect(() => {
if (isMinimized) return
const action = pendingScrollActionRef.current
if (!action) return
if (action.type === "bottom") {
if (!messagesEndRef.current) return
pendingScrollActionRef.current = null
scrollToBottom(action.behavior)
if (action.markRead) {
markUnreadMessagesRead().catch((err) => console.error("Falha ao marcar mensagens como lidas:", err))
}
return
}
const ok = scrollToMessage(action.messageId, action.behavior)
if (!ok) {
if (!messagesEndRef.current) return
pendingScrollActionRef.current = null
scrollToBottom(action.behavior)
if (action.markRead) {
markUnreadMessagesRead().catch((err) => console.error("Falha ao marcar mensagens como lidas:", err))
}
return
}
pendingScrollActionRef.current = null
if (action.markRead) {
markUnreadMessagesRead().catch((err) => console.error("Falha ao marcar mensagens como lidas:", err))
}
}, [isMinimized, messages, markUnreadMessagesRead, scrollToBottom, scrollToMessage])
useEffect(() => {
if (unreadCount === 0) {
lastAutoReadCountRef.current = null
return
}
maybeAutoMarkRead().catch((err) => console.error("Falha ao auto-marcar mensagens:", err))
}, [isMinimized, isAtBottom, unreadCount, maybeAutoMarkRead])
// Sincronizar estado minimizado com tamanho da janela
useEffect(() => {
const mountTime = Date.now()
const STABILIZATION_DELAY = 500
const handler = () => {
if (Date.now() - mountTime < STABILIZATION_DELAY) {
return
}
const h = window.innerHeight
setIsMinimized(h < 100)
}
window.addEventListener("resize", handler)
return () => window.removeEventListener("resize", handler)
}, [])
// Selecionar arquivo para anexar
const handleAttach = async () => {
if (isUploading || isSending) return
try {
const selected = await openDialog({
multiple: false,
filters: [{
name: "Arquivos permitidos",
extensions: ALLOWED_EXTENSIONS,
}],
})
if (!selected) return
const filePath = typeof selected === "string" ? selected : (selected as { path: string }).path
setIsUploading(true)
if (!apiBaseUrl || !machineToken) {
throw new Error("Configuracao nao disponivel")
}
const attachment = await invoke<UploadedAttachment>("upload_chat_file", {
baseUrl: apiBaseUrl,
token: machineToken,
filePath,
})
setPendingAttachments(prev => [...prev, attachment])
} catch (err) {
console.error("Erro ao anexar arquivo:", err)
alert(typeof err === "string" ? err : "Erro ao anexar arquivo")
} finally {
setIsUploading(false)
}
}
// Remover anexo pendente
const handleRemoveAttachment = (storageId: string) => {
setPendingAttachments(prev => prev.filter(a => a.storageId !== storageId))
}
// Enviar mensagem
const handleSend = async () => {
if ((!inputValue.trim() && pendingAttachments.length === 0) || isSending) return
const messageText = inputValue.trim()
const attachmentsToSend = [...pendingAttachments]
setInputValue("")
setPendingAttachments([])
setIsSending(true)
try {
await postMessage({
ticketId: ticketId as Id<"tickets">,
body: messageText,
attachments: attachmentsToSend.length > 0 ? attachmentsToSend.map(a => ({
storageId: a.storageId as Id<"_storage">,
name: a.name,
size: a.size,
type: a.type,
})) : undefined,
})
pendingScrollActionRef.current = { type: "bottom", behavior: "smooth", markRead: false }
} catch (err) {
console.error("Erro ao enviar mensagem:", err)
setInputValue(messageText)
setPendingAttachments(attachmentsToSend)
} finally {
setIsSending(false)
}
}
const handleMinimize = async () => {
setIsMinimized(true)
try {
await invoke("set_chat_minimized", { ticketId, minimized: true })
} catch (err) {
console.error("Erro ao minimizar janela:", err)
}
}
const handleExpand = async () => {
if (firstUnreadAgentMessageId) {
pendingScrollActionRef.current = { type: "message", messageId: firstUnreadAgentMessageId, behavior: "auto", markRead: unreadCount > 0 }
} else {
pendingScrollActionRef.current = { type: "bottom", behavior: "auto", markRead: false }
}
setIsMinimized(false)
try {
await invoke("open_chat_window", { ticketId, ticketRef: ticketRef ?? 0 })
} catch (err) {
console.error("Erro ao expandir janela:", err)
}
}
const handleClose = () => {
invoke("close_chat_window", { ticketId }).catch((err) => {
console.error("Erro ao fechar janela de chat:", err)
})
}
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault()
handleSend()
}
}
// Loading
if (isLoading) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent p-2">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-slate-200 px-4 py-2 text-slate-600 shadow-lg">
<Loader2 className="size-4 animate-spin" />
<span className="text-sm font-medium">Carregando...</span>
</div>
</div>
)
}
// Sem sessao ativa
if (!hasSession) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-slate-200 px-4 py-2 text-slate-600 shadow-lg">
<MessageCircle className="size-4" />
<span className="text-sm font-medium">
{ticketRef ? `Ticket #${ticketRef}` : "Chat"}
</span>
<span className="size-2 rounded-full bg-slate-400" />
<span className="text-xs text-slate-500">Offline</span>
<button
onClick={handleClose}
className="ml-1 rounded-full p-1 text-slate-600 hover:bg-slate-300/60"
aria-label="Fechar chat"
>
<X className="size-4" />
</button>
</div>
</div>
)
}
// Minimizado
if (isMinimized) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent pr-3">
<button
onClick={handleExpand}
className="pointer-events-auto relative flex items-center gap-2 rounded-full bg-black px-4 py-2 text-white shadow-lg hover:bg-black/90"
>
<MessageCircle className="size-4" />
<span className="text-sm font-medium">
Ticket #{ticketRef}
</span>
<span className="size-2 rounded-full bg-emerald-400" />
<ChevronUp className="size-4" />
{unreadCount > 0 && (
<span className="absolute -right-1 -top-1 flex size-5 items-center justify-center rounded-full bg-red-500 text-xs font-bold">
{unreadCount > 9 ? "9+" : unreadCount}
</span>
)}
</button>
</div>
)
}
// Expandido
return (
<div className="flex h-full flex-col overflow-hidden rounded-2xl bg-white shadow-xl">
{/* Header */}
<div
data-tauri-drag-region
className="flex items-center justify-between border-b border-slate-200 bg-slate-50 px-4 py-3 rounded-t-2xl"
>
<div className="flex items-center gap-3">
<div className="flex size-10 items-center justify-center rounded-full bg-black text-white">
<MessageCircle className="size-5" />
</div>
<div>
<div className="flex items-center gap-2">
<p className="text-sm font-semibold text-slate-900">Chat</p>
<span className="flex items-center gap-1.5 text-xs text-emerald-600">
<span className="size-2 rounded-full bg-emerald-500 animate-pulse" />
Online
</span>
</div>
<p className="text-xs text-slate-500">
Ticket #{ticketRef} - Suporte
</p>
</div>
</div>
<div className="flex items-center gap-1">
{machineSessions.length > 1 && (
<button
onClick={handleOpenHub}
className="relative rounded-md p-1.5 text-slate-500 hover:bg-slate-100"
aria-label="Abrir lista de chats"
>
<MessagesSquare className="size-4" />
{otherUnreadCount > 0 && (
<span className="absolute -right-1 -top-1 flex size-5 items-center justify-center rounded-full bg-red-500 text-[10px] font-bold text-white">
{otherUnreadCount > 9 ? "9+" : otherUnreadCount}
</span>
)}
</button>
)}
<button
onClick={handleMinimize}
className="rounded-md p-1.5 text-slate-500 hover:bg-slate-100"
aria-label="Minimizar chat"
>
<Minimize2 className="size-4" />
</button>
<button
onClick={handleClose}
className="rounded-md p-1.5 text-slate-500 hover:bg-slate-100"
aria-label="Fechar chat"
>
<X className="size-4" />
</button>
</div>
</div>
{/* Mensagens */}
<div
ref={messagesContainerRef}
onScroll={updateIsAtBottom}
className="flex-1 overflow-y-auto p-4"
>
{messages.length === 0 ? (
<div className="flex h-full flex-col items-center justify-center text-center">
<p className="text-sm text-slate-400">
Nenhuma mensagem ainda
</p>
<p className="mt-1 text-xs text-slate-400">
O agente iniciara a conversa em breve
</p>
</div>
) : (
<div className="space-y-4">
{messages.map((msg) => {
const isAgent = !msg.isFromMachine
const bodyText = msg.body.trim()
const shouldShowBody =
bodyText.length > 0 && !(bodyText === "[Anexo]" && (msg.attachments?.length ?? 0) > 0)
return (
<div key={msg.id} className="space-y-2">
{firstUnreadAgentMessageId === msg.id && unreadCount > 0 && !isAtBottom && (
<div className="flex items-center gap-2">
<div className="h-px flex-1 bg-slate-200" />
<span className="text-xs font-medium text-slate-500">Novas mensagens</span>
<div className="h-px flex-1 bg-slate-200" />
</div>
)}
<div
ref={(el) => {
if (el) {
messageElementsRef.current.set(msg.id, el)
} else {
messageElementsRef.current.delete(msg.id)
}
}}
className={`flex gap-2 ${isAgent ? "flex-row-reverse" : "flex-row"}`}
>
{/* Avatar */}
<div
className={`flex size-7 shrink-0 items-center justify-center rounded-full ${
isAgent ? "bg-black text-white" : "bg-slate-200 text-slate-600"
}`}
>
{isAgent ? <MessageCircle className="size-3.5" /> : <User className="size-3.5" />}
</div>
{/* Bubble */}
<div
className={`max-w-[75%] rounded-2xl px-4 py-2 ${
isAgent
? "rounded-br-md bg-black text-white"
: "rounded-bl-md border border-slate-100 bg-white text-slate-900 shadow-sm"
}`}
>
{!isAgent && (
<p className="mb-1 text-xs font-medium text-slate-500">
{msg.authorName}
</p>
)}
{shouldShowBody && <p className="whitespace-pre-wrap text-sm">{msg.body}</p>}
{/* Anexos */}
{msg.attachments && msg.attachments.length > 0 && (
<div className="mt-2 space-y-2">
{msg.attachments.map((att) => (
<MessageAttachment
key={att.storageId}
attachment={{
storageId: att.storageId as string,
name: att.name,
size: att.size,
type: att.type,
}}
isAgent={isAgent}
loadUrl={loadAttachmentUrl}
/>
))}
</div>
)}
<p
className={`mt-1 text-right text-xs ${
isAgent ? "text-white/60" : "text-slate-400"
}`}
>
{formatTime(msg.createdAt)}
</p>
</div>
</div>
</div>
)
})}
<div ref={messagesEndRef} />
</div>
)}
</div>
{/* Input */}
<div className="border-t border-slate-200 p-3">
{unreadCount > 0 && !isAtBottom && (
<div className="mb-2 flex justify-center">
<button
type="button"
onClick={() => {
const target = firstUnreadAgentMessageId
if (target) {
scrollToMessage(target, "smooth")
} else {
scrollToBottom("smooth")
}
markUnreadMessagesRead().catch((err) => console.error("Falha ao marcar mensagens como lidas:", err))
}}
className="rounded-full bg-slate-100 px-3 py-1 text-xs font-medium text-slate-700 hover:bg-slate-200"
>
Ver novas mensagens ({unreadCount > 9 ? "9+" : unreadCount})
</button>
</div>
)}
{/* Anexos pendentes */}
{pendingAttachments.length > 0 && (
<div className="mb-2 flex flex-wrap gap-2">
{pendingAttachments.map((att) => (
<div
key={att.storageId}
className="flex items-center gap-1 rounded-lg bg-slate-100 px-2 py-1 text-xs"
>
{getFileIcon(att.name)}
<span className="max-w-[100px] truncate">{att.name}</span>
<button
onClick={() => handleRemoveAttachment(att.storageId)}
className="ml-1 rounded p-0.5 text-slate-400 hover:bg-slate-200 hover:text-slate-600"
aria-label={`Remover anexo ${att.name}`}
>
<X className="size-3" />
</button>
</div>
))}
</div>
)}
<div className="flex items-end gap-2">
<textarea
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
onKeyDown={handleKeyDown}
placeholder="Digite sua mensagem..."
className="max-h-24 min-h-[36px] flex-1 resize-none rounded-lg border border-slate-200 px-3 py-2 text-sm focus:border-slate-400 focus:outline-none focus:ring-1 focus:ring-slate-400"
rows={1}
/>
<button
onClick={handleAttach}
disabled={isUploading || isSending}
className="flex size-9 items-center justify-center rounded-lg text-slate-500 transition hover:bg-slate-100 hover:text-slate-700 disabled:opacity-50"
aria-label="Anexar arquivo"
>
{isUploading ? (
<Loader2 className="size-4 animate-spin" />
) : (
<Paperclip className="size-4" />
)}
</button>
<button
onClick={handleSend}
disabled={(!inputValue.trim() && pendingAttachments.length === 0) || isSending}
className="flex size-9 items-center justify-center rounded-lg bg-black text-white transition hover:bg-black/90 disabled:opacity-50"
aria-label="Enviar mensagem"
>
{isSending ? (
<Loader2 className="size-4 animate-spin" />
) : (
<Send className="size-4" />
)}
</button>
</div>
</div>
</div>
)
}
function formatTime(timestamp: number): string {
const date = new Date(timestamp)
return date.toLocaleTimeString("pt-BR", {
hour: "2-digit",
minute: "2-digit",
})
}

View file

@ -1,146 +0,0 @@
/**
* ConvexMachineProvider - Provider Convex para autenticacao via token de maquina
*
* Este provider inicializa o ConvexReactClient usando o token da maquina
* armazenado no Tauri Store, permitindo subscriptions reativas em tempo real.
*
* Arquitetura:
* - Carrega o token do Tauri Store na montagem
* - Inicializa o ConvexReactClient com a URL do Convex
* - Disponibiliza o cliente para componentes filhos via Context
* - Reconecta automaticamente quando o token muda
*/
import { createContext, useContext, useEffect, useState, type ReactNode } from "react"
import { ConvexReactClient } from "convex/react"
import { getMachineStoreConfig } from "./machineStore"
// URL do Convex - em producao, usa o dominio personalizado
const CONVEX_URL = import.meta.env.MODE === "production"
? "https://convex.esdrasrenan.com.br"
: (import.meta.env.VITE_CONVEX_URL ?? "https://convex.esdrasrenan.com.br")
type MachineAuthState = {
token: string | null
apiBaseUrl: string | null
isLoading: boolean
error: string | null
}
type ConvexMachineContextValue = {
client: ConvexReactClient | null
machineToken: string | null
apiBaseUrl: string | null
isReady: boolean
error: string | null
reload: () => Promise<void>
}
const ConvexMachineContext = createContext<ConvexMachineContextValue | null>(null)
export function useConvexMachine() {
const ctx = useContext(ConvexMachineContext)
if (!ctx) {
throw new Error("useConvexMachine must be used within ConvexMachineProvider")
}
return ctx
}
export function useMachineToken() {
const { machineToken } = useConvexMachine()
return machineToken
}
interface ConvexMachineProviderProps {
children: ReactNode
}
export function ConvexMachineProvider({ children }: ConvexMachineProviderProps) {
const [authState, setAuthState] = useState<MachineAuthState>({
token: null,
apiBaseUrl: null,
isLoading: true,
error: null,
})
const [client, setClient] = useState<ConvexReactClient | null>(null)
// Funcao para carregar configuracao do Tauri Store
const loadConfig = async () => {
setAuthState(prev => ({ ...prev, isLoading: true, error: null }))
try {
const config = await getMachineStoreConfig()
if (!config.token) {
setAuthState({
token: null,
apiBaseUrl: config.apiBaseUrl,
isLoading: false,
error: "Token da maquina nao encontrado",
})
return
}
setAuthState({
token: config.token,
apiBaseUrl: config.apiBaseUrl,
isLoading: false,
error: null,
})
} catch (err) {
const message = err instanceof Error ? err.message : String(err)
setAuthState({
token: null,
apiBaseUrl: null,
isLoading: false,
error: message || "Erro ao carregar configuracao",
})
}
}
// Carregar configuracao na montagem
useEffect(() => {
loadConfig()
}, [])
// Inicializar/reinicializar cliente Convex quando token muda
useEffect(() => {
if (!authState.token) {
// Limpar cliente se nao tem token
if (client) {
client.close()
setClient(null)
}
return
}
// Criar novo cliente Convex
const newClient = new ConvexReactClient(CONVEX_URL, {
// Desabilitar retry agressivo para evitar loops infinitos
unsavedChangesWarning: false,
})
setClient(newClient)
// Cleanup ao desmontar ou trocar token
return () => {
newClient.close()
}
}, [authState.token]) // eslint-disable-line react-hooks/exhaustive-deps
const contextValue: ConvexMachineContextValue = {
client,
machineToken: authState.token,
apiBaseUrl: authState.apiBaseUrl,
isReady: !authState.isLoading && !!client && !!authState.token,
error: authState.error,
reload: loadConfig,
}
return (
<ConvexMachineContext.Provider value={contextValue}>
{children}
</ConvexMachineContext.Provider>
)
}

View file

@ -1,65 +0,0 @@
import { ConvexProvider } from "convex/react"
import { ChatWidget } from "./ChatWidget"
import { ChatHubWidget } from "./ChatHubWidget"
import { ConvexMachineProvider, useConvexMachine } from "./ConvexMachineProvider"
import { Loader2 } from "lucide-react"
function ChatAppContent() {
const { client, isReady, error } = useConvexMachine()
// Obter ticketId e ticketRef da URL
const params = new URLSearchParams(window.location.search)
const ticketId = params.get("ticketId")
const ticketRef = params.get("ticketRef")
const isHub = params.get("hub") === "true"
// Aguardar cliente Convex estar pronto
if (!isReady || !client) {
if (error) {
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent p-2">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-red-100 px-4 py-2 text-red-600 shadow-lg">
<span className="text-sm font-medium">Erro: {error}</span>
</div>
</div>
)
}
return (
<div className="pointer-events-none flex h-full w-full items-end justify-end bg-transparent p-2">
<div className="pointer-events-auto flex items-center gap-2 rounded-full bg-slate-200 px-4 py-2 text-slate-600 shadow-lg">
<Loader2 className="size-4 animate-spin" />
<span className="text-sm font-medium">Conectando...</span>
</div>
</div>
)
}
// Modo hub - lista de todas as sessoes
if (isHub || !ticketId) {
return (
<ConvexProvider client={client}>
<ChatHubWidget />
</ConvexProvider>
)
}
// Modo chat - conversa de um ticket especifico
return (
<ConvexProvider client={client}>
<ChatWidget ticketId={ticketId} ticketRef={ticketRef ? Number(ticketRef) : undefined} />
</ConvexProvider>
)
}
export function ChatApp() {
return (
<ConvexMachineProvider>
<ChatAppContent />
</ConvexMachineProvider>
)
}
export { ChatWidget }
export { ChatHubWidget }
export * from "./types"

View file

@ -1,52 +0,0 @@
import { Store } from "@tauri-apps/plugin-store"
import { appLocalDataDir, join } from "@tauri-apps/api/path"
const STORE_FILENAME = "machine-agent.json"
const DEFAULT_API_BASE_URL = "https://tickets.esdrasrenan.com.br"
type MachineStoreConfig = {
apiBaseUrl?: string
appUrl?: string
convexUrl?: string
}
type MachineStoreData = {
token?: string
config?: MachineStoreConfig
}
async function loadStore(): Promise<MachineStoreData> {
const appData = await appLocalDataDir()
const storePath = await join(appData, STORE_FILENAME)
const store = await Store.load(storePath)
const token = await store.get<string>("token")
const config = await store.get<MachineStoreConfig>("config")
return { token: token ?? undefined, config: config ?? undefined }
}
function normalizeUrl(value?: string | null, fallback?: string) {
const trimmed = (value ?? fallback ?? "").trim()
if (!trimmed) return fallback ?? ""
return trimmed.replace(/\/+$/, "")
}
function resolveApiBaseUrl(config?: MachineStoreConfig): string {
const fromConfig = normalizeUrl(config?.apiBaseUrl, DEFAULT_API_BASE_URL)
return fromConfig || DEFAULT_API_BASE_URL
}
function resolveAppUrl(config?: MachineStoreConfig, apiBaseUrl?: string): string {
const fromConfig = normalizeUrl(config?.appUrl, apiBaseUrl)
return fromConfig || apiBaseUrl || DEFAULT_API_BASE_URL
}
export async function getMachineStoreConfig() {
const data = await loadStore()
if (!data.token) {
throw new Error("Token de maquina nao encontrado no store")
}
const apiBaseUrl = resolveApiBaseUrl(data.config)
const appUrl = resolveAppUrl(data.config, apiBaseUrl)
return { token: data.token, apiBaseUrl, appUrl }
}

View file

@ -1,70 +0,0 @@
// Tipos para o sistema de chat
export interface ChatSession {
sessionId: string
ticketId: string
ticketRef: number
ticketSubject: string
agentName: string
agentEmail?: string
agentAvatarUrl?: string
unreadCount: number
lastActivityAt: number
startedAt: number
}
export interface ChatMessage {
id: string
body: string
authorName: string
authorAvatarUrl?: string
isFromMachine: boolean
createdAt: number
attachments: ChatAttachment[]
}
export interface ChatAttachment {
storageId: string
name: string
size?: number
type?: string
}
export interface ChatMessagesResponse {
messages: ChatMessage[]
hasSession: boolean
unreadCount?: number
}
export interface SendMessageResponse {
messageId: string
createdAt: number
}
export interface SessionStartedEvent {
session: ChatSession
}
export interface UnreadUpdateEvent {
totalUnread: number
sessions: ChatSession[]
}
export interface NewMessageEvent {
totalUnread: number
newCount: number
sessions: ChatSession[]
}
export interface SessionEndedEvent {
sessionId: string
ticketId: string
}
export interface ChatHistorySession {
sessionId: string
startedAt: number
endedAt: number | null
agentName: string
messages: ChatMessage[]
}

View file

@ -1,206 +0,0 @@
/**
* Hooks customizados para queries/mutations do Convex com token de maquina
*
* Estes hooks encapsulam a logica de passar o machineToken automaticamente
* para as queries e mutations do Convex, proporcionando uma API simples
* e reativa para os componentes de chat.
*/
import { useQuery, useMutation, useAction } from "convex/react"
import { api } from "@convex/_generated/api"
import type { Id } from "@convex/_generated/dataModel"
import { useMachineToken } from "./ConvexMachineProvider"
// ============================================
// TIPOS
// ============================================
export type MachineSession = {
sessionId: Id<"liveChatSessions">
ticketId: Id<"tickets">
ticketRef: number
ticketSubject: string
agentName: string
agentEmail?: string
agentAvatarUrl?: string
unreadCount: number
lastActivityAt: number
startedAt: number
}
export type MachineMessage = {
id: Id<"ticketChatMessages">
body: string
authorName: string
authorAvatarUrl?: string
isFromMachine: boolean
createdAt: number
attachments: Array<{
storageId: Id<"_storage">
name: string
size?: number
type?: string
}>
}
export type MachineMessagesResult = {
messages: MachineMessage[]
hasSession: boolean
unreadCount: number
}
export type MachineUpdatesResult = {
hasActiveSessions: boolean
sessions: Array<{
ticketId: Id<"tickets">
ticketRef: number
unreadCount: number
lastActivityAt: number
}>
totalUnread: number
}
// ============================================
// HOOKS
// ============================================
/**
* Hook para listar sessoes ativas da maquina
* Subscription reativa - atualiza automaticamente quando ha mudancas
*/
export function useMachineSessions() {
const machineToken = useMachineToken()
const sessions = useQuery(
api.liveChat.listMachineSessions,
machineToken ? { machineToken } : "skip"
)
return {
sessions: sessions as MachineSession[] | undefined,
isLoading: sessions === undefined && !!machineToken,
hasToken: !!machineToken,
}
}
/**
* Hook para listar mensagens de um ticket especifico
* Subscription reativa - atualiza automaticamente quando ha novas mensagens
*/
export function useMachineMessages(ticketId: Id<"tickets"> | null, options?: { limit?: number }) {
const machineToken = useMachineToken()
const result = useQuery(
api.liveChat.listMachineMessages,
machineToken && ticketId
? { machineToken, ticketId, limit: options?.limit }
: "skip"
)
return {
messages: (result as MachineMessagesResult | undefined)?.messages ?? [],
hasSession: (result as MachineMessagesResult | undefined)?.hasSession ?? false,
unreadCount: (result as MachineMessagesResult | undefined)?.unreadCount ?? 0,
isLoading: result === undefined && !!machineToken && !!ticketId,
hasToken: !!machineToken,
}
}
/**
* Hook para verificar updates (polling leve)
* Usado como fallback ou para verificar status rapidamente
*/
export function useMachineUpdates() {
const machineToken = useMachineToken()
const result = useQuery(
api.liveChat.checkMachineUpdates,
machineToken ? { machineToken } : "skip"
)
return {
hasActiveSessions: (result as MachineUpdatesResult | undefined)?.hasActiveSessions ?? false,
sessions: (result as MachineUpdatesResult | undefined)?.sessions ?? [],
totalUnread: (result as MachineUpdatesResult | undefined)?.totalUnread ?? 0,
isLoading: result === undefined && !!machineToken,
hasToken: !!machineToken,
}
}
/**
* Hook para enviar mensagem
*/
export function usePostMachineMessage() {
const machineToken = useMachineToken()
const postMessage = useMutation(api.liveChat.postMachineMessage)
return async (args: {
ticketId: Id<"tickets">
body: string
attachments?: Array<{
storageId: Id<"_storage">
name: string
size?: number
type?: string
}>
}) => {
if (!machineToken) {
throw new Error("Token da maquina nao disponivel")
}
return postMessage({
machineToken,
ticketId: args.ticketId,
body: args.body,
attachments: args.attachments,
})
}
}
/**
* Hook para marcar mensagens como lidas
*/
export function useMarkMachineMessagesRead() {
const machineToken = useMachineToken()
const markRead = useMutation(api.liveChat.markMachineMessagesRead)
return async (args: {
ticketId: Id<"tickets">
messageIds: Id<"ticketChatMessages">[]
}) => {
if (!machineToken) {
throw new Error("Token da maquina nao disponivel")
}
return markRead({
machineToken,
ticketId: args.ticketId,
messageIds: args.messageIds,
})
}
}
/**
* Hook para gerar URL de upload
*/
export function useGenerateMachineUploadUrl() {
const machineToken = useMachineToken()
const generateUrl = useAction(api.liveChat.generateMachineUploadUrl)
return async (args: {
fileName: string
fileType: string
fileSize: number
}) => {
if (!machineToken) {
throw new Error("Token da maquina nao disponivel")
}
return generateUrl({
machineToken,
fileName: args.fileName,
fileType: args.fileType,
fileSize: args.fileSize,
})
}
}

View file

@ -1,36 +1,23 @@
import { ShieldAlert, Mail, RefreshCw } from "lucide-react"
import { useState } from "react"
type DeactivationScreenProps = {
companyName?: string | null
onRetry?: () => Promise<void> | void
}
export function DeactivationScreen({ onRetry }: DeactivationScreenProps) {
const [isRetrying, setIsRetrying] = useState(false)
const handleRetry = async () => {
if (isRetrying || !onRetry) return
setIsRetrying(true)
try {
await onRetry()
} finally {
setIsRetrying(false)
}
}
import { ShieldAlert, Mail } from "lucide-react"
export function DeactivationScreen({ companyName }: { companyName?: string | null }) {
return (
<div className="fixed inset-0 z-50 grid place-items-center overflow-hidden bg-neutral-950 p-6">
<div className="min-h-screen grid place-items-center bg-neutral-950 p-6">
<div className="flex w-full max-w-[720px] flex-col items-center gap-6 rounded-2xl border border-slate-200 bg-white px-8 py-10 shadow-sm">
<div className="flex flex-col items-center gap-3 text-center">
<span className="inline-flex items-center gap-2 rounded-full border border-rose-200 bg-rose-50 px-3 py-1 text-xs font-semibold text-rose-700">
<ShieldAlert className="size-4" /> Acesso bloqueado
</span>
<h1 className="text-2xl font-semibold text-neutral-900">Dispositivo desativado</h1>
<h1 className="text-2xl font-semibold text-neutral-900">Dispositivo desativada</h1>
<p className="max-w-md text-sm text-neutral-600">
Este dispositivo foi desativado temporariamente pelos administradores. Enquanto isso, o acesso ao portal e o
Esta dispositivo foi desativada temporariamente pelos administradores. Enquanto isso, o acesso ao portal e o
envio de informações ficam indisponíveis.
</p>
{companyName ? (
<span className="rounded-full border border-slate-200 bg-slate-50 px-3 py-1 text-xs font-semibold text-neutral-700">
{companyName}
</span>
) : null}
</div>
<div className="w-full max-w-[520px] space-y-4">
@ -42,25 +29,12 @@ export function DeactivationScreen({ onRetry }: DeactivationScreenProps) {
</ul>
</div>
<div className="flex flex-wrap items-center justify-center gap-3">
<a
href="mailto:suporte@rever.com.br"
className="inline-flex items-center gap-2 rounded-full border border-black bg-black px-4 py-2 text-sm font-semibold text-white transition hover:bg-black/90"
>
<Mail className="size-4" /> Falar com o suporte
</a>
{onRetry && (
<button
type="button"
onClick={handleRetry}
disabled={isRetrying}
className="inline-flex items-center gap-2 rounded-full border border-slate-300 bg-white px-4 py-2 text-sm font-semibold text-neutral-700 transition hover:bg-slate-50 disabled:opacity-50"
>
<RefreshCw className={`size-4 ${isRetrying ? "animate-spin" : ""}`} />
{isRetrying ? "Verificando..." : "Verificar novamente"}
</button>
)}
</div>
<a
href="mailto:suporte@rever.com.br"
className="mx-auto inline-flex items-center gap-2 rounded-full border border-black bg-black px-4 py-2 text-sm font-semibold text-white transition hover:bg-black/90"
>
<Mail className="size-4" /> Falar com o suporte
</a>
</div>
</div>
</div>

View file

@ -1,103 +0,0 @@
/**
* MachineStateMonitor - Componente para monitorar o estado da máquina em tempo real
*
* Este componente usa uma subscription Convex para detectar mudanças no estado da máquina:
* - Quando isActive muda para false: máquina foi desativada
* - Quando hasValidToken muda para false: máquina foi resetada (tokens revogados)
*
* O componente não renderiza nada, apenas monitora e chama callbacks quando detecta mudanças.
*/
import { useEffect, useRef } from "react"
import { useQuery, ConvexProvider } from "convex/react"
import type { ConvexReactClient } from "convex/react"
import { api } from "../convex/_generated/api"
import type { Id } from "../convex/_generated/dataModel"
type MachineStateMonitorProps = {
machineId: string
onDeactivated?: () => void
onTokenRevoked?: () => void
onReactivated?: () => void
}
function MachineStateMonitorInner({ machineId, onDeactivated, onTokenRevoked, onReactivated }: MachineStateMonitorProps) {
const machineState = useQuery(api.machines.getMachineState, {
machineId: machineId as Id<"machines">,
})
// Refs para rastrear o estado anterior e evitar chamadas duplicadas
const previousIsActive = useRef<boolean | null>(null)
const previousHasValidToken = useRef<boolean | null>(null)
const initialLoadDone = useRef(false)
useEffect(() => {
if (!machineState) return
// Na primeira carga, verifica estado inicial E armazena valores
if (!initialLoadDone.current) {
console.log("[MachineStateMonitor] Carga inicial", {
isActive: machineState.isActive,
hasValidToken: machineState.hasValidToken,
found: machineState.found,
})
// Se já estiver desativado na carga inicial, chama callback
if (machineState.isActive === false) {
console.log("[MachineStateMonitor] Máquina já estava desativada")
onDeactivated?.()
}
// Se token já estiver inválido na carga inicial, chama callback
if (machineState.hasValidToken === false) {
console.log("[MachineStateMonitor] Token já estava revogado")
onTokenRevoked?.()
}
previousIsActive.current = machineState.isActive
previousHasValidToken.current = machineState.hasValidToken
initialLoadDone.current = true
return
}
// Detecta mudança de ativo para inativo
if (previousIsActive.current === true && machineState.isActive === false) {
console.log("[MachineStateMonitor] Máquina foi desativada")
onDeactivated?.()
}
// Detecta mudança de inativo para ativo (reativação)
if (previousIsActive.current === false && machineState.isActive === true) {
console.log("[MachineStateMonitor] Máquina foi reativada")
onReactivated?.()
}
// Detecta mudança de token válido para inválido
if (previousHasValidToken.current === true && machineState.hasValidToken === false) {
console.log("[MachineStateMonitor] Token foi revogado (reset)")
onTokenRevoked?.()
}
// Atualiza refs
previousIsActive.current = machineState.isActive
previousHasValidToken.current = machineState.hasValidToken
}, [machineState, onDeactivated, onTokenRevoked, onReactivated])
// Este componente nao renderiza nada
return null
}
type MachineStateMonitorWithClientProps = MachineStateMonitorProps & {
client: ConvexReactClient
}
/**
* Wrapper que recebe o cliente Convex e envolve o monitor com o provider
*/
export function MachineStateMonitor({ client, ...props }: MachineStateMonitorWithClientProps) {
return (
<ConvexProvider client={client}>
<MachineStateMonitorInner {...props} />
</ConvexProvider>
)
}

View file

@ -1,121 +0,0 @@
/* eslint-disable */
/**
* Generated `api` utility.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* To regenerate, run `npx convex dev`.
* @module
*/
import type * as alerts from "../alerts.js";
import type * as automations from "../automations.js";
import type * as bootstrap from "../bootstrap.js";
import type * as categories from "../categories.js";
import type * as categorySlas from "../categorySlas.js";
import type * as checklistTemplates from "../checklistTemplates.js";
import type * as commentTemplates from "../commentTemplates.js";
import type * as companies from "../companies.js";
import type * as crons from "../crons.js";
import type * as dashboards from "../dashboards.js";
import type * as deviceExportTemplates from "../deviceExportTemplates.js";
import type * as deviceFieldDefaults from "../deviceFieldDefaults.js";
import type * as deviceFields from "../deviceFields.js";
import type * as devices from "../devices.js";
import type * as emprestimos from "../emprestimos.js";
import type * as fields from "../fields.js";
import type * as files from "../files.js";
import type * as incidents from "../incidents.js";
import type * as invites from "../invites.js";
import type * as liveChat from "../liveChat.js";
import type * as machines from "../machines.js";
import type * as metrics from "../metrics.js";
import type * as migrations from "../migrations.js";
import type * as ops from "../ops.js";
import type * as queues from "../queues.js";
import type * as rbac from "../rbac.js";
import type * as reports from "../reports.js";
import type * as revision from "../revision.js";
import type * as seed from "../seed.js";
import type * as slas from "../slas.js";
import type * as teams from "../teams.js";
import type * as ticketFormSettings from "../ticketFormSettings.js";
import type * as ticketFormTemplates from "../ticketFormTemplates.js";
import type * as ticketNotifications from "../ticketNotifications.js";
import type * as tickets from "../tickets.js";
import type * as usbPolicy from "../usbPolicy.js";
import type * as users from "../users.js";
import type {
ApiFromModules,
FilterApi,
FunctionReference,
} from "convex/server";
declare const fullApi: ApiFromModules<{
alerts: typeof alerts;
automations: typeof automations;
bootstrap: typeof bootstrap;
categories: typeof categories;
categorySlas: typeof categorySlas;
checklistTemplates: typeof checklistTemplates;
commentTemplates: typeof commentTemplates;
companies: typeof companies;
crons: typeof crons;
dashboards: typeof dashboards;
deviceExportTemplates: typeof deviceExportTemplates;
deviceFieldDefaults: typeof deviceFieldDefaults;
deviceFields: typeof deviceFields;
devices: typeof devices;
emprestimos: typeof emprestimos;
fields: typeof fields;
files: typeof files;
incidents: typeof incidents;
invites: typeof invites;
liveChat: typeof liveChat;
machines: typeof machines;
metrics: typeof metrics;
migrations: typeof migrations;
ops: typeof ops;
queues: typeof queues;
rbac: typeof rbac;
reports: typeof reports;
revision: typeof revision;
seed: typeof seed;
slas: typeof slas;
teams: typeof teams;
ticketFormSettings: typeof ticketFormSettings;
ticketFormTemplates: typeof ticketFormTemplates;
ticketNotifications: typeof ticketNotifications;
tickets: typeof tickets;
usbPolicy: typeof usbPolicy;
users: typeof users;
}>;
/**
* A utility for referencing Convex functions in your app's public API.
*
* Usage:
* ```js
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
export declare const api: FilterApi<
typeof fullApi,
FunctionReference<any, "public">
>;
/**
* A utility for referencing Convex functions in your app's internal API.
*
* Usage:
* ```js
* const myFunctionReference = internal.myModule.myFunction;
* ```
*/
export declare const internal: FilterApi<
typeof fullApi,
FunctionReference<any, "internal">
>;
export declare const components: {};

View file

@ -1,23 +0,0 @@
/* eslint-disable */
/**
* Generated `api` utility.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* To regenerate, run `npx convex dev`.
* @module
*/
import { anyApi, componentsGeneric } from "convex/server";
/**
* A utility for referencing Convex functions in your app's API.
*
* Usage:
* ```js
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
export const api = anyApi;
export const internal = anyApi;
export const components = componentsGeneric();

View file

@ -1,60 +0,0 @@
/* eslint-disable */
/**
* Generated data model types.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* To regenerate, run `npx convex dev`.
* @module
*/
import type {
DataModelFromSchemaDefinition,
DocumentByName,
TableNamesInDataModel,
SystemTableNames,
} from "convex/server";
import type { GenericId } from "convex/values";
import schema from "../schema.js";
/**
* The names of all of your Convex tables.
*/
export type TableNames = TableNamesInDataModel<DataModel>;
/**
* The type of a document stored in Convex.
*
* @typeParam TableName - A string literal type of the table name (like "users").
*/
export type Doc<TableName extends TableNames> = DocumentByName<
DataModel,
TableName
>;
/**
* An identifier for a document in Convex.
*
* Convex documents are uniquely identified by their `Id`, which is accessible
* on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids).
*
* Documents can be loaded using `db.get(id)` in query and mutation functions.
*
* IDs are just strings at runtime, but this type can be used to distinguish them from other
* strings when type checking.
*
* @typeParam TableName - A string literal type of the table name (like "users").
*/
export type Id<TableName extends TableNames | SystemTableNames> =
GenericId<TableName>;
/**
* A type describing your Convex data model.
*
* This type includes information about what tables you have, the type of
* documents stored in those tables, and the indexes defined on them.
*
* This type is used to parameterize methods like `queryGeneric` and
* `mutationGeneric` to make them type-safe.
*/
export type DataModel = DataModelFromSchemaDefinition<typeof schema>;

View file

@ -1,143 +0,0 @@
/* eslint-disable */
/**
* Generated utilities for implementing server-side Convex query and mutation functions.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* To regenerate, run `npx convex dev`.
* @module
*/
import {
ActionBuilder,
HttpActionBuilder,
MutationBuilder,
QueryBuilder,
GenericActionCtx,
GenericMutationCtx,
GenericQueryCtx,
GenericDatabaseReader,
GenericDatabaseWriter,
} from "convex/server";
import type { DataModel } from "./dataModel.js";
/**
* Define a query in this Convex app's public API.
*
* This function will be allowed to read your Convex database and will be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export declare const query: QueryBuilder<DataModel, "public">;
/**
* Define a query that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to read from your Convex database. It will not be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export declare const internalQuery: QueryBuilder<DataModel, "internal">;
/**
* Define a mutation in this Convex app's public API.
*
* This function will be allowed to modify your Convex database and will be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export declare const mutation: MutationBuilder<DataModel, "public">;
/**
* Define a mutation that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to modify your Convex database. It will not be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export declare const internalMutation: MutationBuilder<DataModel, "internal">;
/**
* Define an action in this Convex app's public API.
*
* An action is a function which can execute any JavaScript code, including non-deterministic
* code and code with side-effects, like calling third-party services.
* They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
* They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
*
* @param func - The action. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped action. Include this as an `export` to name it and make it accessible.
*/
export declare const action: ActionBuilder<DataModel, "public">;
/**
* Define an action that is only accessible from other Convex functions (but not from the client).
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped function. Include this as an `export` to name it and make it accessible.
*/
export declare const internalAction: ActionBuilder<DataModel, "internal">;
/**
* Define an HTTP action.
*
* The wrapped function will be used to respond to HTTP requests received
* by a Convex deployment if the requests matches the path and method where
* this action is routed. Be sure to route your httpAction in `convex/http.js`.
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument
* and a Fetch API `Request` object as its second.
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
*/
export declare const httpAction: HttpActionBuilder;
/**
* A set of services for use within Convex query functions.
*
* The query context is passed as the first argument to any Convex query
* function run on the server.
*
* This differs from the {@link MutationCtx} because all of the services are
* read-only.
*/
export type QueryCtx = GenericQueryCtx<DataModel>;
/**
* A set of services for use within Convex mutation functions.
*
* The mutation context is passed as the first argument to any Convex mutation
* function run on the server.
*/
export type MutationCtx = GenericMutationCtx<DataModel>;
/**
* A set of services for use within Convex action functions.
*
* The action context is passed as the first argument to any Convex action
* function run on the server.
*/
export type ActionCtx = GenericActionCtx<DataModel>;
/**
* An interface to read from the database within Convex query functions.
*
* The two entry points are {@link DatabaseReader.get}, which fetches a single
* document by its {@link Id}, or {@link DatabaseReader.query}, which starts
* building a query.
*/
export type DatabaseReader = GenericDatabaseReader<DataModel>;
/**
* An interface to read from and write to the database within Convex mutation
* functions.
*
* Convex guarantees that all writes within a single mutation are
* executed atomically, so you never have to worry about partial writes leaving
* your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control)
* for the guarantees Convex provides your functions.
*/
export type DatabaseWriter = GenericDatabaseWriter<DataModel>;

View file

@ -1,93 +0,0 @@
/* eslint-disable */
/**
* Generated utilities for implementing server-side Convex query and mutation functions.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* To regenerate, run `npx convex dev`.
* @module
*/
import {
actionGeneric,
httpActionGeneric,
queryGeneric,
mutationGeneric,
internalActionGeneric,
internalMutationGeneric,
internalQueryGeneric,
} from "convex/server";
/**
* Define a query in this Convex app's public API.
*
* This function will be allowed to read your Convex database and will be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export const query = queryGeneric;
/**
* Define a query that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to read from your Convex database. It will not be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export const internalQuery = internalQueryGeneric;
/**
* Define a mutation in this Convex app's public API.
*
* This function will be allowed to modify your Convex database and will be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export const mutation = mutationGeneric;
/**
* Define a mutation that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to modify your Convex database. It will not be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export const internalMutation = internalMutationGeneric;
/**
* Define an action in this Convex app's public API.
*
* An action is a function which can execute any JavaScript code, including non-deterministic
* code and code with side-effects, like calling third-party services.
* They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
* They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
*
* @param func - The action. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped action. Include this as an `export` to name it and make it accessible.
*/
export const action = actionGeneric;
/**
* Define an action that is only accessible from other Convex functions (but not from the client).
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped function. Include this as an `export` to name it and make it accessible.
*/
export const internalAction = internalActionGeneric;
/**
* Define an HTTP action.
*
* The wrapped function will be used to respond to HTTP requests received
* by a Convex deployment if the requests matches the path and method where
* this action is routed. Be sure to route your httpAction in `convex/http.js`.
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument
* and a Fetch API `Request` object as its second.
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
*/
export const httpAction = httpActionGeneric;

View file

@ -6,18 +6,10 @@
html, body, #root {
height: 100%;
overflow: hidden; /* Remove scrollbars */
}
body {
@apply text-slate-900;
background: transparent;
overflow: hidden; /* Remove scrollbars */
}
/* Fundo padrão para janelas que não são chat minimizado */
.app-bg {
@apply bg-slate-50;
@apply bg-slate-50 text-slate-900;
}
.badge-status {

View file

@ -6,20 +6,9 @@ import { listen } from "@tauri-apps/api/event"
import { Store } from "@tauri-apps/plugin-store"
import { appLocalDataDir, join } from "@tauri-apps/api/path"
import { ExternalLink, Eye, EyeOff, Loader2, RefreshCw } from "lucide-react"
import { ConvexReactClient } from "convex/react"
import { Tabs, TabsContent, TabsList, TabsTrigger } from "./components/ui/tabs"
import { cn } from "./lib/utils"
import { ChatApp } from "./chat"
import { DeactivationScreen } from "./components/DeactivationScreen"
import { MachineStateMonitor } from "./components/MachineStateMonitor"
import { api } from "./convex/_generated/api"
import type { Id } from "./convex/_generated/dataModel"
import type { SessionStartedEvent, UnreadUpdateEvent, NewMessageEvent, SessionEndedEvent } from "./chat/types"
// URL do Convex para subscription em tempo real
const CONVEX_URL = import.meta.env.MODE === "production"
? "https://convex.esdrasrenan.com.br"
: (import.meta.env.VITE_CONVEX_URL ?? "https://convex.esdrasrenan.com.br")
type MachineOs = {
name: string
@ -128,6 +117,7 @@ const apiBaseUrl = normalizeUrl(import.meta.env.VITE_API_BASE_URL, appUrl)
const RUSTDESK_CONFIG_STRING = import.meta.env.VITE_RUSTDESK_CONFIG_STRING?.trim() || null
const RUSTDESK_DEFAULT_PASSWORD = import.meta.env.VITE_RUSTDESK_DEFAULT_PASSWORD?.trim() || null
const RUSTDESK_SYNC_INTERVAL_MS = 60 * 60 * 1000 // 1h
const TOKEN_SELF_HEAL_DEBOUNCE_MS = 30 * 1000
function sanitizeEmail(value: string | null | undefined) {
@ -144,54 +134,6 @@ function isTokenRevokedMessage(input: string) {
)
}
function formatApiError(responseText: string, statusCode: number): string {
try {
const json = JSON.parse(responseText)
if (json.error === "Payload invalido" || json.error === "Payload inválido") {
const details = typeof json.details === "string" ? JSON.parse(json.details) : json.details
if (Array.isArray(details) && details.length > 0) {
const fieldLabels: Record<string, string> = {
"collaborator.email": "E-mail",
"collaborator.name": "Nome",
email: "E-mail",
name: "Nome",
provisioningCode: "Código de ativação",
hostname: "Nome do computador",
}
const messages: string[] = []
for (const err of details) {
const path = Array.isArray(err.path) ? err.path.join(".") : String(err.path ?? "")
const fieldLabel = fieldLabels[path] || path || "Campo"
if (err.code === "invalid_format" && err.format === "email") {
messages.push(`${fieldLabel}: formato de e-mail inválido`)
} else if (err.code === "invalid_format") {
messages.push(`${fieldLabel}: formato inválido`)
} else if (err.code === "too_small" || err.code === "too_short") {
messages.push(`${fieldLabel}: muito curto`)
} else if (err.code === "too_big" || err.code === "too_long") {
messages.push(`${fieldLabel}: muito longo`)
} else if (err.code === "invalid_type") {
messages.push(`${fieldLabel}: valor inválido`)
} else if (err.message) {
messages.push(`${fieldLabel}: ${err.message}`)
} else {
messages.push(`${fieldLabel}: erro de validação`)
}
}
if (messages.length > 0) {
return messages.join("\n")
}
}
}
if (json.error) {
return json.error
}
} catch {
// Não é JSON, retorna o texto original
}
return `Erro no servidor (${statusCode})`
}
function buildRemoteAccessPayload(info: RustdeskInfo | null) {
if (!info) return null
const payload: Record<string, string | undefined> = {
@ -254,10 +196,7 @@ async function writeRustdeskInfo(store: Store, info: RustdeskInfo): Promise<void
function logDesktop(message: string, data?: Record<string, unknown>) {
const enriched = data ? `${message} ${JSON.stringify(data)}` : message
const line = `[raven] ${enriched}`
console.log(line)
// Persiste em arquivo local para facilitar debugging fora do console
invoke("log_app_event", { message: line }).catch(() => {})
console.log(`[raven] ${enriched}`)
}
function bytes(n?: number) {
@ -313,7 +252,7 @@ function App() {
const [token, setToken] = useState<string | null>(null)
const [config, setConfig] = useState<AgentConfig | null>(null)
const [profile, setProfile] = useState<MachineProfile | null>(null)
const [logoSrc, setLogoSrc] = useState<string>("/logo-raven.png")
const [logoSrc, setLogoSrc] = useState<string>(() => `${appUrl}/logo-raven.png`)
const [error, setError] = useState<string | null>(null)
const [busy, setBusy] = useState(false)
const [status, setStatus] = useState<string | null>(null)
@ -330,9 +269,6 @@ function App() {
const selfHealPromiseRef = useRef<Promise<boolean> | null>(null)
const lastHealAtRef = useRef(0)
// Cliente Convex para monitoramento em tempo real do estado da maquina
const [convexClient, setConvexClient] = useState<ConvexReactClient | null>(null)
const [provisioningCode, setProvisioningCode] = useState("")
const [validatedCompany, setValidatedCompany] = useState<{ id: string; name: string; slug: string; tenantId: string } | null>(null)
const [companyName, setCompanyName] = useState("")
@ -351,7 +287,6 @@ function App() {
const emailRegex = useRef(/^[^\s@]+@[^\s@]+\.[^\s@]{2,}$/i)
const isEmailValid = useMemo(() => emailRegex.current.test(collabEmail.trim()), [collabEmail])
const ensureProfile = useCallback(async () => {
if (profile) return profile
const fresh = await invoke<MachineProfile>("collect_machine_profile")
@ -422,15 +357,8 @@ function App() {
status: "online",
intervalSeconds: nextConfig.heartbeatIntervalSec ?? 300,
})
// Iniciar sistema de chat apos o agente
await invoke("start_chat_polling", {
baseUrl: apiBaseUrl,
convexUrl: "https://convex.esdrasrenan.com.br",
token: data.machineToken,
})
logDesktop("chat:started")
} catch (err) {
console.error("Falha ao reiniciar heartbeat/chat", err)
console.error("Falha ao reiniciar heartbeat", err)
}
return nextConfig
@ -605,15 +533,8 @@ function App() {
status: "online",
intervalSeconds: 300,
})
// Iniciar sistema de chat apos o agente
await invoke("start_chat_polling", {
baseUrl: apiBaseUrl,
convexUrl: "https://convex.esdrasrenan.com.br",
token,
})
logDesktop("chat:started:validation")
} catch (err) {
console.error("Falha ao iniciar heartbeat/chat em segundo plano", err)
console.error("Falha ao iniciar heartbeat em segundo plano", err)
}
const payload = await res.clone().json().catch(() => null)
if (payload && typeof payload === "object" && "machine" in payload) {
@ -705,88 +626,6 @@ useEffect(() => {
rustdeskInfoRef.current = rustdeskInfo
}, [rustdeskInfo])
// Cria/destrói cliente Convex quando o token muda
useEffect(() => {
if (!token) {
if (convexClient) {
convexClient.close()
setConvexClient(null)
}
return
}
// Cria novo cliente Convex para monitoramento em tempo real
const client = new ConvexReactClient(CONVEX_URL, {
unsavedChangesWarning: false,
})
setConvexClient(client)
return () => {
client.close()
}
}, [token]) // eslint-disable-line react-hooks/exhaustive-deps
// Callbacks para quando a máquina for desativada, resetada ou reativada
const handleMachineDeactivated = useCallback(() => {
console.log("[App] Máquina foi desativada - mostrando tela de bloqueio")
setIsMachineActive(false)
}, [])
const handleMachineReactivated = useCallback(() => {
console.log("[App] Máquina foi reativada - liberando acesso")
setIsMachineActive(true)
}, [])
// Callback para o botão "Verificar novamente" na tela de desativação
// Usa o convexClient diretamente para fazer uma query manual
const handleRetryCheck = useCallback(async () => {
if (!convexClient || !config?.machineId) return
console.log("[App] Verificando estado da máquina manualmente...")
try {
const state = await convexClient.query(api.machines.getMachineState, {
machineId: config.machineId as Id<"machines">,
})
console.log("[App] Estado da máquina:", state)
if (state?.isActive) {
console.log("[App] Máquina ativa - liberando acesso")
setIsMachineActive(true)
}
} catch (err) {
console.error("[App] Erro ao verificar estado:", err)
}
}, [convexClient, config?.machineId])
const handleTokenRevoked = useCallback(async () => {
console.log("[App] Token foi revogado - voltando para tela de registro")
if (store) {
try {
await store.delete("token")
await store.delete("config")
await store.save()
} catch (err) {
console.error("Falha ao limpar store", err)
}
}
tokenVerifiedRef.current = false
autoLaunchRef.current = false
setToken(null)
setConfig(null)
setStatus(null)
setIsMachineActive(true)
setIsLaunchingSystem(false)
// Limpa campos de input para novo registro
setProvisioningCode("")
setCollabEmail("")
setCollabName("")
setValidatedCompany(null)
setCodeStatus(null)
setCompanyName("")
setError("Este dispositivo foi resetado. Informe o código de provisionamento para reconectar.")
// Força navegar de volta para a página inicial do app Tauri (não do servidor web)
// URL do app Tauri em produção é http://tauri.localhost/, em dev é http://localhost:1420/
const appUrl = import.meta.env.MODE === "production" ? "http://tauri.localhost/" : "http://localhost:1420/"
window.location.href = appUrl
}, [store])
useEffect(() => {
if (!store || !config) return
@ -895,165 +734,133 @@ const resolvedAppUrl = useMemo(() => {
return normalized
}, [config?.appUrl])
// Funcao simplificada de sync - sempre le do disco para evitar race conditions
const syncRemoteAccessDirect = useCallback(
async (info: RustdeskInfo, allowRetry = true): Promise<boolean> => {
try {
// Sempre le do disco para evitar race conditions com state React
const freshStore = await loadStore()
const freshConfig = await readConfig(freshStore)
const freshToken = await readToken(freshStore)
const syncRemoteAccessNow = useCallback(
async (info: RustdeskInfo, allowRetry = true) => {
if (!store) return
if (!config?.machineId) {
logDesktop("remoteAccess:sync:skipped", { reason: "unregistered" })
return
}
const payload = buildRemoteAccessPayload(info)
if (!payload) return
if (!freshConfig?.machineId || !freshToken) {
logDesktop("remoteAccess:sync:skip", {
hasMachineId: !!freshConfig?.machineId,
hasToken: !!freshToken,
})
return false
const resolveToken = async (allowHeal: boolean): Promise<string | null> => {
let currentToken = token
if (!currentToken) {
currentToken = (await readToken(store)) ?? null
if (currentToken) {
setToken(currentToken)
}
}
if (!currentToken && allowHeal) {
const healed = await attemptSelfHeal("remote-access")
if (healed) {
currentToken = (await readToken(store)) ?? null
if (currentToken) {
setToken(currentToken)
}
}
}
return currentToken
}
const payload = buildRemoteAccessPayload(info)
if (!payload) return false
logDesktop("remoteAccess:sync:start", { id: info.id })
const sendRequest = async (machineToken: string, retryAllowed: boolean): Promise<void> => {
const response = await fetch(`${apiBaseUrl}/api/machines/remote-access`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Idempotency-Key": `${freshConfig.machineId}:RustDesk:${info.id}`,
"Idempotency-Key": `${config?.machineId ?? "unknown"}:RustDesk:${info.id}`,
},
body: JSON.stringify({ machineToken: freshToken, ...payload }),
body: JSON.stringify({ machineToken, ...payload }),
})
if (response.ok) {
const nextInfo: RustdeskInfo = { ...info, lastSyncedAt: Date.now(), lastError: null }
await writeRustdeskInfo(freshStore, nextInfo)
setRustdeskInfo(nextInfo)
logDesktop("remoteAccess:sync:success", { id: info.id })
return true
}
const errorText = await response.text()
logDesktop("remoteAccess:sync:error", { status: response.status, error: errorText.slice(0, 200) })
// Se token invalido, tenta self-heal uma vez
if (allowRetry && (response.status === 401 || isTokenRevokedMessage(errorText))) {
const healed = await attemptSelfHeal("remote-access")
if (healed) {
return syncRemoteAccessDirect(info, false)
if (!response.ok) {
logDesktop("remoteAccess:sync:error", { status: response.status })
const text = await response.text()
if (retryAllowed && (response.status === 401 || isTokenRevokedMessage(text))) {
const healed = await attemptSelfHeal("remote-access")
if (healed) {
const refreshedToken = await resolveToken(false)
if (refreshedToken) {
return sendRequest(refreshedToken, false)
}
}
}
throw new Error(text.slice(0, 300) || "Falha ao registrar acesso remoto")
}
// Salva erro no store
const failedInfo: RustdeskInfo = { ...info, lastError: errorText.slice(0, 200) }
await writeRustdeskInfo(freshStore, failedInfo)
setRustdeskInfo(failedInfo)
return false
const nextInfo: RustdeskInfo = { ...info, lastSyncedAt: Date.now(), lastError: null }
await writeRustdeskInfo(store, nextInfo)
setRustdeskInfo(nextInfo)
logDesktop("remoteAccess:sync:success", { id: info.id })
}
try {
const machineToken = await resolveToken(true)
if (!machineToken) {
const failedInfo: RustdeskInfo = {
...info,
lastError: "Token indisponível para sincronizar acesso remoto",
}
await writeRustdeskInfo(store, failedInfo)
setRustdeskInfo(failedInfo)
logDesktop("remoteAccess:sync:skipped", { reason: "missing-token" })
return
}
await sendRequest(machineToken, allowRetry)
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
logDesktop("remoteAccess:sync:exception", { error: message })
return false
console.error("Falha ao sincronizar acesso remoto com a plataforma", error)
const failedInfo: RustdeskInfo = { ...info, lastError: message }
await writeRustdeskInfo(store, failedInfo)
setRustdeskInfo(failedInfo)
if (allowRetry && isTokenRevokedMessage(message)) {
const healed = await attemptSelfHeal("remote-access")
if (healed) {
const refreshedToken = await resolveToken(false)
if (refreshedToken) {
return syncRemoteAccessNow(failedInfo, false)
}
}
}
logDesktop("remoteAccess:sync:failed", { id: info.id, error: message })
}
},
[attemptSelfHeal]
[store, token, config?.machineId, attemptSelfHeal, setToken]
)
const handleRustdeskProvision = useCallback(
async (payload: RustdeskProvisioningResult) => {
logDesktop("rustdesk:provision:start", { id: payload.id, hasStore: !!store })
if (!store) {
logDesktop("rustdesk:provision:skip:no-store")
return
}
if (!store) return
const normalized: RustdeskInfo = {
...payload,
installedVersion: payload.installedVersion ?? null,
lastSyncedAt: rustdeskInfoRef.current?.lastSyncedAt ?? null,
lastError: null,
}
try {
await writeRustdeskInfo(store, normalized)
logDesktop("rustdesk:provision:saved", { id: normalized.id })
} catch (error) {
logDesktop("rustdesk:provision:save-error", { error: String(error) })
throw error
}
await writeRustdeskInfo(store, normalized)
setRustdeskInfo(normalized)
// Recarrega o config diretamente do store para garantir que temos o machineId mais recente
// (evita race condition quando register() chama ensureRustdesk antes do state React atualizar)
const freshStore = await loadStore()
const freshConfig = await readConfig(freshStore)
const freshToken = await readToken(freshStore)
if (!freshConfig?.machineId) {
logDesktop("rustdesk:provision:sync-skipped", { reason: "no-machineId-in-store" })
return
}
if (!freshToken) {
logDesktop("rustdesk:provision:sync-skipped", { reason: "no-token-in-store" })
return
}
// Faz o sync diretamente com os dados frescos do store
try {
const syncPayload = buildRemoteAccessPayload(normalized)
if (!syncPayload) {
logDesktop("rustdesk:provision:sync-skipped", { reason: "invalid-payload" })
return
}
const response = await fetch(`${apiBaseUrl}/api/machines/remote-access`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Idempotency-Key": `${freshConfig.machineId}:RustDesk:${normalized.id}`,
},
body: JSON.stringify({ machineToken: freshToken, ...syncPayload }),
})
if (response.ok) {
const nextInfo: RustdeskInfo = { ...normalized, lastSyncedAt: Date.now(), lastError: null }
await writeRustdeskInfo(freshStore, nextInfo)
setRustdeskInfo(nextInfo)
logDesktop("rustdesk:provision:synced", { id: normalized.id })
} else {
const errorText = await response.text().catch(() => "")
logDesktop("rustdesk:provision:sync-error", { status: response.status, error: errorText.slice(0, 200) })
}
} catch (error) {
logDesktop("rustdesk:provision:sync-error", { error: String(error) })
}
await syncRemoteAccessNow(normalized)
},
[store]
[store, syncRemoteAccessNow]
)
const ensureRustdesk = useCallback(async () => {
logDesktop("rustdesk:ensure:start", { hasStore: !!store, machineId: config?.machineId ?? null })
if (!store) {
logDesktop("rustdesk:ensure:skip:no-store")
return null
}
if (!config?.machineId) {
logDesktop("rustdesk:skip:no-machine-id")
return null
}
if (!store) return null
setIsRustdeskProvisioning(true)
try {
logDesktop("rustdesk:ensure:invoking", { machineId: config.machineId })
const payload = await invoke<RustdeskProvisioningResult>("ensure_rustdesk_and_emit", {
configString: RUSTDESK_CONFIG_STRING || null,
password: RUSTDESK_DEFAULT_PASSWORD || null,
machineId: config.machineId,
machineId: config?.machineId ?? null,
})
logDesktop("rustdesk:ensure:invoked", { id: payload.id, version: payload.installedVersion })
await handleRustdeskProvision(payload)
logDesktop("rustdesk:ensure:complete", { id: payload.id })
return payload
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.toLowerCase().includes("apenas no windows")) {
console.info("Provisionamento do RustDesk ignorado (plataforma não suportada)")
} else {
logDesktop("rustdesk:ensure:error", { error: message })
console.error("Falha ao provisionar RustDesk", error)
}
return null
@ -1089,157 +896,23 @@ const resolvedAppUrl = useMemo(() => {
}
}, [store, handleRustdeskProvision])
// Bootstrap do RustDesk + retry simplificado (60s)
useEffect(() => {
if (!store || !config?.machineId) return
let disposed = false
async function bootstrap() {
// Se nao tem rustdeskInfo, provisiona primeiro
if (!rustdeskInfo && !isRustdeskProvisioning && !rustdeskBootstrapRef.current) {
rustdeskBootstrapRef.current = true
try {
await ensureRustdesk()
} finally {
rustdeskBootstrapRef.current = false
}
return // handleRustdeskProvision fara o sync
}
// Se ja tem rustdeskInfo mas nunca sincronizou, tenta sync
if (rustdeskInfo && !rustdeskInfo.lastSyncedAt) {
logDesktop("remoteAccess:sync:bootstrap", { id: rustdeskInfo.id })
await syncRemoteAccessDirect(rustdeskInfo)
}
}
bootstrap()
// Retry a cada 30s se nunca sincronizou (o Rust faz o sync automaticamente)
const interval = setInterval(async () => {
if (disposed) return
try {
const freshStore = await loadStore()
const freshRustdesk = await readRustdeskInfo(freshStore)
if (freshRustdesk && !freshRustdesk.lastSyncedAt) {
logDesktop("remoteAccess:sync:retry:fallback", { id: freshRustdesk.id })
// Re-invoca o Rust para tentar sync novamente
await invoke("ensure_rustdesk_and_emit", {
configString: RUSTDESK_CONFIG_STRING || null,
password: RUSTDESK_DEFAULT_PASSWORD || null,
machineId: config?.machineId,
})
}
} catch (err) {
logDesktop("remoteAccess:sync:retry:error", { error: String(err) })
}
}, 30_000)
return () => {
disposed = true
clearInterval(interval)
}
}, [store, config?.machineId, rustdeskInfo, isRustdeskProvisioning, ensureRustdesk, syncRemoteAccessDirect])
// Listeners de eventos do chat (apenas para logging - a janela nativa e gerenciada pelo Rust)
useEffect(() => {
if (!token) return
let disposed = false
const unlisteners: Array<() => void> = []
// Listener para nova sessao de chat
listen<SessionStartedEvent>("raven://chat/session-started", (event) => {
if (disposed) return
logDesktop("chat:session-started", { ticketId: event.payload.session.ticketId, sessionId: event.payload.session.sessionId })
}).then(unlisten => {
if (disposed) unlisten()
else unlisteners.push(unlisten)
}).catch(err => console.error("Falha ao registrar listener session-started:", err))
// Listener para sessao encerrada
listen<SessionEndedEvent>("raven://chat/session-ended", (event) => {
if (disposed) return
logDesktop("chat:session-ended", { ticketId: event.payload.ticketId, sessionId: event.payload.sessionId })
}).then(unlisten => {
if (disposed) unlisten()
else unlisteners.push(unlisten)
}).catch(err => console.error("Falha ao registrar listener session-ended:", err))
// Listener para atualizacao de mensagens nao lidas
listen<UnreadUpdateEvent>("raven://chat/unread-update", (event) => {
if (disposed) return
logDesktop("chat:unread-update", { totalUnread: event.payload.totalUnread, sessionsCount: event.payload.sessions?.length ?? 0 })
}).then(unlisten => {
if (disposed) unlisten()
else unlisteners.push(unlisten)
}).catch(err => console.error("Falha ao registrar listener unread-update:", err))
// Listener para nova mensagem (a janela de chat nativa e aberta automaticamente pelo Rust)
listen<NewMessageEvent>("raven://chat/new-message", (event) => {
if (disposed) return
logDesktop("chat:new-message", { totalUnread: event.payload.totalUnread, newCount: event.payload.newCount })
}).then(unlisten => {
if (disposed) unlisten()
else unlisteners.push(unlisten)
}).catch(err => console.error("Falha ao registrar listener new-message:", err))
return () => {
disposed = true
unlisteners.forEach(unlisten => unlisten())
}
}, [token])
/* Assinatura direta no Convex para abrir/minimizar chat quando houver novas mensagens
* (desativada: o Rust ja gerencia realtime via WS e eventos Tauri)
useEffect(() => {
if (!token) return
let prevUnread = 0
let unsub: (() => void) | null = null
let disposed = false
subscribeMachineUpdates(
(payload) => {
if (disposed || !payload) return
const totalUnread = payload.totalUnread ?? 0
const hasSessions = (payload.sessions ?? []).length > 0
// Abre/minimiza chat quando aparecem novas não lidas
if (hasSessions && totalUnread > prevUnread) {
const session = payload.sessions[0]
invoke("open_chat_window", { ticketId: session.ticketId, ticketRef: session.ticketRef }).catch(console.error)
// Minimiza para não ser intrusivo
invoke("set_chat_minimized", { ticketId: session.ticketId, minimized: true }).catch(console.error)
}
prevUnread = totalUnread
},
(err) => {
if (disposed) return
console.error("chat updates (Convex) erro:", err)
const msg = (err?.message || "").toLowerCase()
if (msg.includes("token de máquina") || msg.includes("revogado") || msg.includes("expirado") || msg.includes("inválido")) {
// Token inválido/expirado no Convex → tenta autoregistrar de novo
attemptSelfHeal("convex-subscribe").catch(console.error)
}
}
).then((u) => {
// Se o effect já foi desmontado antes da Promise resolver, cancelar imediatamente
if (disposed) {
u()
} else {
unsub = u
}
useEffect(() => {
if (!store) return
if (!rustdeskInfo && !isRustdeskProvisioning && !rustdeskBootstrapRef.current) {
rustdeskBootstrapRef.current = true
ensureRustdesk().finally(() => {
rustdeskBootstrapRef.current = false
})
return () => {
disposed = true
unsub?.()
return
}
if (rustdeskInfo && !isRustdeskProvisioning) {
const lastSync = rustdeskInfo.lastSyncedAt ?? 0
const needsSync = Date.now() - lastSync > RUSTDESK_SYNC_INTERVAL_MS
if (needsSync) {
syncRemoteAccessNow(rustdeskInfo)
}
}, [token, attemptSelfHeal])
*/
}
}, [store, rustdeskInfo, ensureRustdesk, syncRemoteAccessNow, isRustdeskProvisioning])
async function register() {
if (!profile) return
@ -1302,7 +975,7 @@ const resolvedAppUrl = useMemo(() => {
})
if (!res.ok) {
const text = await res.text()
throw new Error(formatApiError(text, res.status))
throw new Error(`Falha no registro (${res.status}): ${text.slice(0, 300)}`)
}
const data = (await res.json()) as MachineRegisterResponse
@ -1316,23 +989,10 @@ const resolvedAppUrl = useMemo(() => {
},
})
// Provisiona RustDesk em background (fire-and-forget)
// O Rust faz o sync com o backend automaticamente, sem passar pelo CSP do webview
logDesktop("register:rustdesk:start", { machineId: data.machineId })
invoke<RustdeskProvisioningResult>("ensure_rustdesk_and_emit", {
configString: RUSTDESK_CONFIG_STRING || null,
password: RUSTDESK_DEFAULT_PASSWORD || null,
machineId: data.machineId,
}).then((result) => {
logDesktop("register:rustdesk:done", { machineId: data.machineId, id: result.id })
}).catch((err) => {
const msg = err instanceof Error ? err.message : String(err)
if (!msg.toLowerCase().includes("apenas no windows")) {
logDesktop("register:rustdesk:error", { error: msg })
}
})
await ensureRustdesk()
logDesktop("register:rustdesk:done", { machineId: data.machineId })
// Redireciona imediatamente (nao espera RustDesk)
// Abre o sistema imediatamente após registrar (evita ficar com token inválido no fluxo antigo)
try {
await fetch(`${apiBaseUrl}/api/machines/sessions`, {
method: "POST",
@ -1343,10 +1003,7 @@ const resolvedAppUrl = useMemo(() => {
} catch {}
const persona = (data.persona ?? "collaborator").toLowerCase() === "manager" ? "manager" : "collaborator"
const redirectTarget = persona === "manager" ? "/dashboard" : "/portal/tickets"
// Proteção extra: nunca usar localhost em produção
const safeAppUrl = resolvedAppUrl.includes("localhost") ? "https://tickets.esdrasrenan.com.br" : resolvedAppUrl
const url = `${safeAppUrl}/machines/handshake?token=${encodeURIComponent(data.machineToken)}&redirect=${encodeURIComponent(redirectTarget)}`
logDesktop("register:redirect", { url: url.replace(/token=[^&]+/, "token=***") })
const url = `${resolvedAppUrl}/machines/handshake?token=${encodeURIComponent(data.machineToken)}&redirect=${encodeURIComponent(redirectTarget)}`
window.location.href = url
} catch (err) {
setError(err instanceof Error ? err.message : String(err))
@ -1357,52 +1014,9 @@ const resolvedAppUrl = useMemo(() => {
const openSystem = useCallback(async () => {
if (!token) return
if (!isMachineActive) {
setIsLaunchingSystem(false)
return
}
setIsLaunchingSystem(true)
// Recarrega store do disco para pegar dados que o Rust salvou diretamente
// e sincroniza RustDesk antes de redirecionar (fire-and-forget com timeout)
try {
if (store && config?.machineId) {
const freshStore = await loadStore()
const freshRustdesk = await readRustdeskInfo(freshStore)
if (freshRustdesk && (!freshRustdesk.lastSyncedAt || Date.now() - freshRustdesk.lastSyncedAt > 60000)) {
logDesktop("openSystem:rustdesk:sync:start", { id: freshRustdesk.id })
const payload = buildRemoteAccessPayload(freshRustdesk)
if (payload) {
const syncPromise = fetch(`${apiBaseUrl}/api/machines/remote-access`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Idempotency-Key": `${config.machineId}:RustDesk:${freshRustdesk.id}`,
},
body: JSON.stringify({ machineToken: token, ...payload }),
}).then(async (syncRes) => {
if (syncRes.ok) {
logDesktop("openSystem:rustdesk:sync:success", { id: freshRustdesk.id })
const nextInfo: RustdeskInfo = { ...freshRustdesk, lastSyncedAt: Date.now(), lastError: null }
await writeRustdeskInfo(freshStore, nextInfo)
setRustdeskInfo(nextInfo)
} else {
logDesktop("openSystem:rustdesk:sync:error", { status: syncRes.status })
}
}).catch((err) => {
logDesktop("openSystem:rustdesk:sync:failed", { error: String(err) })
})
// Espera no maximo 3s pelo sync, depois continua
await Promise.race([syncPromise, new Promise((r) => setTimeout(r, 3000))])
}
}
}
} catch (syncErr) {
logDesktop("openSystem:rustdesk:sync:exception", { error: String(syncErr) })
}
try {
// Tenta criar a sessao via API (evita dependencia de redirecionamento + cookies em 3xx)
// Tenta criar a sessão via API (evita dependência de redirecionamento + cookies em 3xx)
const res = await fetch(`${apiBaseUrl}/api/machines/sessions`, {
method: "POST",
credentials: "include",
@ -1420,6 +1034,7 @@ const resolvedAppUrl = useMemo(() => {
setError(null)
}
if (!currentActive) {
setError("Esta dispositivo está desativada. Entre em contato com o suporte da Rever para reativar o acesso.")
setIsLaunchingSystem(false)
return
}
@ -1427,8 +1042,14 @@ const resolvedAppUrl = useMemo(() => {
}
} else {
if (res.status === 423) {
const payload = await res.clone().json().catch(() => null)
const message =
payload && typeof payload === "object" && typeof (payload as { error?: unknown }).error === "string"
? ((payload as { error?: string }).error ?? "").trim()
: ""
setIsMachineActive(false)
setIsLaunchingSystem(false)
setError(message.length > 0 ? message : "Esta dispositivo está desativada. Entre em contato com o suporte da Rever.")
return
}
// Se sessão falhar, tenta identificar token inválido/expirado
@ -1473,12 +1094,9 @@ const resolvedAppUrl = useMemo(() => {
const persona = (config?.accessRole ?? "collaborator") === "manager" ? "manager" : "collaborator"
// Envia para a página inicial apropriada após autenticar cookies/sessão
const redirectTarget = persona === "manager" ? "/dashboard" : "/portal/tickets"
// Proteção extra: nunca usar localhost em produção
const safeAppUrl = resolvedAppUrl.includes("localhost") ? "https://tickets.esdrasrenan.com.br" : resolvedAppUrl
const url = `${safeAppUrl}/machines/handshake?token=${encodeURIComponent(token)}&redirect=${encodeURIComponent(redirectTarget)}`
logDesktop("openSystem:redirect", { url: url.replace(/token=[^&]+/, "token=***") })
const url = `${resolvedAppUrl}/machines/handshake?token=${encodeURIComponent(token)}&redirect=${encodeURIComponent(redirectTarget)}`
window.location.href = url
}, [token, config?.accessRole, config?.machineId, resolvedAppUrl, store, isMachineActive])
}, [token, config?.accessRole, resolvedAppUrl, store])
async function reprovision() {
if (!store) return
@ -1522,7 +1140,7 @@ const resolvedAppUrl = useMemo(() => {
})
if (!res.ok) {
const text = await res.text()
throw new Error(formatApiError(text, res.status))
throw new Error(`Falha ao enviar inventário (${res.status}): ${text.slice(0, 200)}`)
}
} catch (err) {
setError(err instanceof Error ? err.message : String(err))
@ -1583,28 +1201,14 @@ const resolvedAppUrl = useMemo(() => {
if (!token) return
if (autoLaunchRef.current) return
if (!tokenVerifiedRef.current) return
if (!isMachineActive) return // Não redireciona se a máquina estiver desativada
autoLaunchRef.current = true
setIsLaunchingSystem(true)
openSystem()
}, [token, status, config?.accessRole, openSystem, tokenValidationTick, isMachineActive])
}, [token, status, config?.accessRole, openSystem, tokenValidationTick])
// Quando há token persistido (dispositivo já provisionado) e ainda não
// disparamos o auto-launch, exibimos diretamente a tela de loading da
// plataforma para evitar piscar o card de resumo/inventário.
// IMPORTANTE: Sempre renderiza o MachineStateMonitor para detectar desativação em tempo real
if (((token && !autoLaunchRef.current) || (isLaunchingSystem && token)) && isMachineActive) {
if (isLaunchingSystem && token) {
return (
<div className="min-h-screen grid place-items-center bg-slate-50 p-6">
{/* Monitor de estado da máquina - deve rodar mesmo durante loading */}
{token && config?.machineId && convexClient && (
<MachineStateMonitor
client={convexClient}
machineId={config.machineId}
onDeactivated={handleMachineDeactivated}
onTokenRevoked={handleTokenRevoked}
/>
)}
<div className="flex flex-col items-center gap-3 rounded-2xl border border-slate-200 bg-white px-8 py-10 shadow-sm">
<Loader2 className="size-6 animate-spin text-neutral-700" />
<p className="text-sm font-medium text-neutral-800">Abrindo plataforma da Rever</p>
@ -1614,31 +1218,11 @@ const resolvedAppUrl = useMemo(() => {
)
}
// Monitor sempre ativo quando há token e machineId
const machineMonitor = token && config?.machineId && convexClient ? (
<MachineStateMonitor
client={convexClient}
machineId={config.machineId}
onDeactivated={handleMachineDeactivated}
onTokenRevoked={handleTokenRevoked}
onReactivated={handleMachineReactivated}
/>
) : null
// Tela de desativação (renderizada separadamente para evitar container com fundo claro)
if (token && !isMachineActive) {
return (
<>
{machineMonitor}
<DeactivationScreen companyName={companyName} onRetry={handleRetryCheck} />
</>
)
}
return (
<div className="min-h-screen grid place-items-center bg-slate-50 p-6">
{/* Monitor de estado da maquina em tempo real via Convex */}
{machineMonitor}
<div className="min-h-screen grid place-items-center p-6">
{token && !isMachineActive ? (
<DeactivationScreen companyName={companyName} />
) : (
<div className="w-full max-w-[720px] rounded-2xl border border-slate-200 bg-white p-6 shadow-sm">
<div className="mb-6 flex flex-col items-center gap-4 text-center">
<img
@ -1646,23 +1230,16 @@ const resolvedAppUrl = useMemo(() => {
alt="Logotipo Raven"
width={160}
height={160}
className="h-16 w-auto md:h-20"
className="h-14 w-auto md:h-16"
onError={() => {
if (logoFallbackRef.current) return
logoFallbackRef.current = true
setLogoSrc(`${appUrl}/logo-raven.png`)
setLogoSrc(`${appUrl}/raven.png`)
}}
/>
<div className="flex flex-col items-center gap-2">
<span className="text-lg font-semibold text-neutral-900">Raven</span>
<div className="flex flex-col items-center gap-1">
<span className="inline-flex whitespace-nowrap rounded-full bg-neutral-900 px-2.5 py-1 text-[11px] font-medium text-white">
Plataforma de
</span>
<span className="inline-flex whitespace-nowrap rounded-full bg-neutral-900 px-2.5 py-1 text-[11px] font-medium text-white">
Chamados
</span>
</div>
<span className="text-xs text-neutral-500">Raven</span>
<span className="text-2xl font-semibold text-neutral-900">Sistema de chamados</span>
<StatusBadge status={status} />
</div>
</div>
@ -1766,13 +1343,10 @@ const resolvedAppUrl = useMemo(() => {
</div>
) : null}
<div className="mt-2 flex gap-2">
<button
disabled={busy || !validatedCompany || !isEmailValid || !collabName.trim() || provisioningCode.trim().length < 32}
onClick={register}
className="rounded-lg border border-black bg-black px-3 py-2 text-sm font-semibold text-white hover:bg-black/90 disabled:opacity-60"
>
Registrar dispositivo
</button>
<button disabled={busy || !validatedCompany || !isEmailValid || !collabName.trim() || provisioningCode.trim().length < 32} onClick={register} className="rounded-lg border border-black bg-black px-3 py-2 text-sm font-semibold text-white hover:bg-black/90 disabled:opacity-60">Registrar dispositivo</button>
{isRustdeskProvisioning ? (
<p className="text-xs text-neutral-500">Preparando cliente de acesso remoto (RustDesk)...</p>
) : null}
</div>
</div>
) : (
@ -1866,6 +1440,7 @@ const resolvedAppUrl = useMemo(() => {
</div>
)}
</div>
)}
</div>
)
}
@ -1892,19 +1467,5 @@ function StatusBadge({ status, className }: { status: string | null; className?:
)
}
// Roteamento simples baseado em query params (compativel com Tauri SPA)
function RootApp() {
const params = new URLSearchParams(window.location.search)
const view = params.get("view")
// Janela de chat flutuante (view=chat ou path=/chat para compatibilidade)
if (view === "chat" || window.location.pathname === "/chat") {
return <ChatApp />
}
// Rota padrao - aplicacao principal
return <App />
}
const root = document.getElementById("root") || (() => { const el = document.createElement("div"); el.id = "root"; document.body.appendChild(el); return el })()
createRoot(root).render(<RootApp />)
createRoot(root).render(<App />)

View file

@ -19,13 +19,7 @@
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true,
"jsx": "react-jsx",
"types": ["vite/client"],
/* Paths */
"baseUrl": ".",
"paths": {
"@convex/_generated/*": ["./src/convex/_generated/*"]
}
"types": ["vite/client"]
},
"include": ["src"]
}

View file

@ -1,6 +1,5 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
import { resolve } from "path";
const host = process.env.TAURI_DEV_HOST;
@ -8,13 +7,6 @@ const host = process.env.TAURI_DEV_HOST;
export default defineConfig(async () => ({
plugins: [react()],
resolve: {
alias: {
// Usar arquivos _generated locais para evitar problemas de type-check
"@convex/_generated": resolve(__dirname, "./src/convex/_generated"),
},
},
// Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build`
//
// 1. prevent Vite from obscuring rust errors

2172
bun.lock

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,6 @@
# Welcome to your Convex functions directory!
CI note: touching a file under `convex/**` intentionally triggers the Convex deploy job.
// minor note to bump convex deploy
Write your Convex functions here.
See https://docs.convex.dev/functions for more.

View file

@ -9,11 +9,10 @@
*/
import type * as alerts from "../alerts.js";
import type * as automations from "../automations.js";
import type * as alerts_actions from "../alerts_actions.js";
import type * as bootstrap from "../bootstrap.js";
import type * as categories from "../categories.js";
import type * as categorySlas from "../categorySlas.js";
import type * as checklistTemplates from "../checklistTemplates.js";
import type * as commentTemplates from "../commentTemplates.js";
import type * as companies from "../companies.js";
import type * as crons from "../crons.js";
@ -22,16 +21,13 @@ import type * as deviceExportTemplates from "../deviceExportTemplates.js";
import type * as deviceFieldDefaults from "../deviceFieldDefaults.js";
import type * as deviceFields from "../deviceFields.js";
import type * as devices from "../devices.js";
import type * as emprestimos from "../emprestimos.js";
import type * as fields from "../fields.js";
import type * as files from "../files.js";
import type * as incidents from "../incidents.js";
import type * as invites from "../invites.js";
import type * as liveChat from "../liveChat.js";
import type * as machines from "../machines.js";
import type * as metrics from "../metrics.js";
import type * as migrations from "../migrations.js";
import type * as ops from "../ops.js";
import type * as queues from "../queues.js";
import type * as rbac from "../rbac.js";
import type * as reports from "../reports.js";
@ -43,7 +39,6 @@ import type * as ticketFormSettings from "../ticketFormSettings.js";
import type * as ticketFormTemplates from "../ticketFormTemplates.js";
import type * as ticketNotifications from "../ticketNotifications.js";
import type * as tickets from "../tickets.js";
import type * as usbPolicy from "../usbPolicy.js";
import type * as users from "../users.js";
import type {
@ -52,13 +47,20 @@ import type {
FunctionReference,
} from "convex/server";
/**
* A utility for referencing Convex functions in your app's API.
*
* Usage:
* ```js
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
declare const fullApi: ApiFromModules<{
alerts: typeof alerts;
automations: typeof automations;
alerts_actions: typeof alerts_actions;
bootstrap: typeof bootstrap;
categories: typeof categories;
categorySlas: typeof categorySlas;
checklistTemplates: typeof checklistTemplates;
commentTemplates: typeof commentTemplates;
companies: typeof companies;
crons: typeof crons;
@ -67,16 +69,13 @@ declare const fullApi: ApiFromModules<{
deviceFieldDefaults: typeof deviceFieldDefaults;
deviceFields: typeof deviceFields;
devices: typeof devices;
emprestimos: typeof emprestimos;
fields: typeof fields;
files: typeof files;
incidents: typeof incidents;
invites: typeof invites;
liveChat: typeof liveChat;
machines: typeof machines;
metrics: typeof metrics;
migrations: typeof migrations;
ops: typeof ops;
queues: typeof queues;
rbac: typeof rbac;
reports: typeof reports;
@ -88,33 +87,16 @@ declare const fullApi: ApiFromModules<{
ticketFormTemplates: typeof ticketFormTemplates;
ticketNotifications: typeof ticketNotifications;
tickets: typeof tickets;
usbPolicy: typeof usbPolicy;
users: typeof users;
}>;
declare const fullApiWithMounts: typeof fullApi;
/**
* A utility for referencing Convex functions in your app's public API.
*
* Usage:
* ```js
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
export declare const api: FilterApi<
typeof fullApi,
typeof fullApiWithMounts,
FunctionReference<any, "public">
>;
/**
* A utility for referencing Convex functions in your app's internal API.
*
* Usage:
* ```js
* const myFunctionReference = internal.myModule.myFunction;
* ```
*/
export declare const internal: FilterApi<
typeof fullApi,
typeof fullApiWithMounts,
FunctionReference<any, "internal">
>;

View file

@ -10,6 +10,7 @@
import {
ActionBuilder,
AnyComponents,
HttpActionBuilder,
MutationBuilder,
QueryBuilder,
@ -18,9 +19,15 @@ import {
GenericQueryCtx,
GenericDatabaseReader,
GenericDatabaseWriter,
FunctionReference,
} from "convex/server";
import type { DataModel } from "./dataModel.js";
type GenericCtx =
| GenericActionCtx<DataModel>
| GenericMutationCtx<DataModel>
| GenericQueryCtx<DataModel>;
/**
* Define a query in this Convex app's public API.
*
@ -85,12 +92,11 @@ export declare const internalAction: ActionBuilder<DataModel, "internal">;
/**
* Define an HTTP action.
*
* The wrapped function will be used to respond to HTTP requests received
* by a Convex deployment if the requests matches the path and method where
* this action is routed. Be sure to route your httpAction in `convex/http.js`.
* This function will be used to respond to HTTP requests received by a Convex
* deployment if the requests matches the path and method where this action
* is routed. Be sure to route your action in `convex/http.js`.
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument
* and a Fetch API `Request` object as its second.
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
*/
export declare const httpAction: HttpActionBuilder;

View file

@ -16,6 +16,7 @@ import {
internalActionGeneric,
internalMutationGeneric,
internalQueryGeneric,
componentsGeneric,
} from "convex/server";
/**
@ -80,14 +81,10 @@ export const action = actionGeneric;
export const internalAction = internalActionGeneric;
/**
* Define an HTTP action.
* Define a Convex HTTP action.
*
* The wrapped function will be used to respond to HTTP requests received
* by a Convex deployment if the requests matches the path and method where
* this action is routed. Be sure to route your httpAction in `convex/http.js`.
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument
* and a Fetch API `Request` object as its second.
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
* @param func - The function. It receives an {@link ActionCtx} as its first argument, and a `Request` object
* as its second.
* @returns The wrapped endpoint function. Route a URL path to this function in `convex/http.js`.
*/
export const httpAction = httpActionGeneric;

View file

@ -46,7 +46,7 @@ export const list = query({
let items = await ctx.db
.query("alerts")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
if (companyId) items = items.filter((a) => a.companyId === companyId)
if (typeof start === "number") items = items.filter((a) => a.createdAt >= start)
if (typeof end === "number") items = items.filter((a) => a.createdAt < end)
@ -62,7 +62,7 @@ export const managersForCompany = query({
const users = await ctx.db
.query("users")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", companyId))
.take(100)
.collect()
return users.filter((u) => (u.role ?? "").toUpperCase() === "MANAGER")
},
})
@ -78,7 +78,7 @@ export const lastForCompanyBySlug = query({
const items = await ctx.db
.query("alerts")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
const matches = items.filter((a) => a.companyId === company._id)
if (matches.length === 0) return null
const last = matches.sort((a, b) => b.createdAt - a.createdAt)[0]
@ -94,15 +94,12 @@ export const lastForCompaniesBySlugs = query({
const alerts = await ctx.db
.query("alerts")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
// Buscar todas as companies do tenant de uma vez
const allCompanies = await ctx.db
.query("companies")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(1000)
const companiesBySlug = new Map(allCompanies.map(c => [c.slug, c]))
.collect()
for (const slug of slugs) {
const company = companiesBySlug.get(slug)
const company = await ctx.db
.query("companies")
.withIndex("by_tenant_slug", (q) => q.eq("tenantId", tenantId).eq("slug", slug))
.first()
if (!company) {
result[slug] = null
continue
@ -122,8 +119,7 @@ export const lastForCompaniesBySlugs = query({
export const tenantIds = query({
args: {},
handler: async (ctx) => {
// Limita a 1000 companies para evitar OOM
const companies = await ctx.db.query("companies").take(1000)
const companies = await ctx.db.query("companies").collect()
return Array.from(new Set(companies.map((c) => c.tenantId)))
},
})
@ -131,11 +127,10 @@ export const tenantIds = query({
export const existsForCompanyRange = query({
args: { tenantId: v.string(), companyId: v.id("companies"), start: v.number(), end: v.number() },
handler: async (ctx, { tenantId, companyId, start, end }) => {
// Limita a 500 alerts para evitar OOM e faz filtragem eficiente
const items = await ctx.db
.query("alerts")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(500)
.collect()
return items.some((a) => a.companyId === companyId && a.createdAt >= start && a.createdAt < end)
},
})

160
convex/alerts_actions.ts Normal file
View file

@ -0,0 +1,160 @@
"use node"
import tls from "tls"
import { action } from "./_generated/server"
import { api } from "./_generated/api"
import { v } from "convex/values"
import type { Id } from "./_generated/dataModel"
function b64(input: string) {
return Buffer.from(input, "utf8").toString("base64")
}
async function sendSmtpMail(cfg: { host: string; port: number; username: string; password: string; from: string }, to: string, subject: string, html: string) {
return new Promise<void>((resolve, reject) => {
const socket = tls.connect(cfg.port, cfg.host, { rejectUnauthorized: false }, () => {
let buffer = ""
const send = (line: string) => socket.write(line + "\r\n")
const wait = (expected: string | RegExp) =>
new Promise<void>((res) => {
const onData = (data: Buffer) => {
buffer += data.toString()
const lines = buffer.split(/\r?\n/)
const last = lines.filter(Boolean).slice(-1)[0] ?? ""
if (typeof expected === "string" ? last.startsWith(expected) : expected.test(last)) {
socket.removeListener("data", onData)
res()
}
}
socket.on("data", onData)
socket.on("error", reject)
})
;(async () => {
await wait(/^220 /)
send(`EHLO ${cfg.host}`)
await wait(/^250-/)
await wait(/^250 /)
send("AUTH LOGIN")
await wait(/^334 /)
send(b64(cfg.username))
await wait(/^334 /)
send(b64(cfg.password))
await wait(/^235 /)
send(`MAIL FROM:<${cfg.from.match(/<(.+)>/)?.[1] ?? cfg.from}>`)
await wait(/^250 /)
send(`RCPT TO:<${to}>`)
await wait(/^250 /)
send("DATA")
await wait(/^354 /)
const headers = [
`From: ${cfg.from}`,
`To: ${to}`,
`Subject: ${subject}`,
"MIME-Version: 1.0",
"Content-Type: text/html; charset=UTF-8",
].join("\r\n")
send(headers + "\r\n\r\n" + html + "\r\n.")
await wait(/^250 /)
send("QUIT")
socket.end()
resolve()
})().catch(reject)
})
socket.on("error", reject)
})
}
export const sendHoursUsageAlerts = action({
args: { range: v.optional(v.string()), threshold: v.optional(v.number()) },
handler: async (ctx, { range, threshold }) => {
const R = (range ?? "30d") as string
const T = typeof threshold === "number" ? threshold : 90
const smtp = {
host: process.env.SMTP_ADDRESS!,
port: Number(process.env.SMTP_PORT ?? 465),
username: process.env.SMTP_USERNAME!,
password: process.env.SMTP_PASSWORD!,
from: process.env.MAILER_SENDER_EMAIL || "no-reply@example.com",
}
if (!smtp.host || !smtp.username || !smtp.password) {
console.warn("SMTP not configured; skipping alerts send")
return { sent: 0 }
}
const targetHour = Number(process.env.ALERTS_LOCAL_HOUR ?? 8)
const now = new Date()
const fmt = new Intl.DateTimeFormat("en-CA", { timeZone: "America/Sao_Paulo", year: "numeric", month: "2-digit", day: "2-digit", hour: "2-digit", minute: "2-digit", hour12: false })
const parts = Object.fromEntries(fmt.formatToParts(now).map((p) => [p.type, p.value])) as Record<string, string>
const hourSP = Number(parts.hour)
if (hourSP !== targetHour) {
return { skipped: true, reason: "hour_guard" }
}
const dayKey = `${parts.year}-${parts.month}-${parts.day}`
const startSP = new Date(`${dayKey}T00:00:00-03:00`).getTime()
const endSP = startSP + 24 * 60 * 60 * 1000
const tenants = await ctx.runQuery(api.alerts.tenantIds, {})
let totalSent = 0
for (const tenantId of tenants) {
const report = await ctx.runQuery(api.reports.hoursByClientInternal, { tenantId, range: R })
type Item = {
companyId: Id<"companies">
name: string
internalMs: number
externalMs: number
totalMs: number
contractedHoursPerMonth: number | null
}
const items = (report.items ?? []) as Item[]
const candidates = items.filter((i) => i.contractedHoursPerMonth != null && (i.totalMs / 3600000) / (i.contractedHoursPerMonth || 1) * 100 >= T)
for (const item of candidates) {
const already = await ctx.runQuery(api.alerts.existsForCompanyRange, { tenantId, companyId: item.companyId, start: startSP, end: endSP })
if (already) continue
const managers = await ctx.runQuery(api.alerts.managersForCompany, { tenantId, companyId: item.companyId })
if (managers.length === 0) continue
const usagePct = (((item.totalMs / 3600000) / (item.contractedHoursPerMonth || 1)) * 100)
const subject = `Alerta: uso de horas em ${item.name} acima de ${T}%`
const body = `
<p>Olá,</p>
<p>O uso de horas contratadas para <strong>${item.name}</strong> atingiu <strong>${usagePct.toFixed(1)}%</strong>.</p>
<ul>
<li>Horas internas: <strong>${(item.internalMs/3600000).toFixed(2)}</strong></li>
<li>Horas externas: <strong>${(item.externalMs/3600000).toFixed(2)}</strong></li>
<li>Total: <strong>${(item.totalMs/3600000).toFixed(2)}</strong></li>
<li>Contratadas/mês: <strong>${item.contractedHoursPerMonth}</strong></li>
</ul>
<p>Reveja a alocação da equipe e, se necessário, ajuste o atendimento.</p>
`
let delivered = 0
for (const m of managers) {
try {
await sendSmtpMail(smtp, m.email, subject, body)
delivered += 1
} catch (error) {
console.error("Failed to send alert to", m.email, error)
}
}
totalSent += delivered
await ctx.runMutation(api.alerts.log, {
tenantId,
companyId: item.companyId,
companyName: item.name,
usagePct,
threshold: T,
range: R,
recipients: managers.map((m) => m.email),
deliveredCount: delivered,
})
}
}
return { sent: totalSent }
},
})

File diff suppressed because it is too large Load diff

View file

@ -1,138 +0,0 @@
import type { Doc } from "./_generated/dataModel"
export type AutomationTrigger =
| "TICKET_CREATED"
| "STATUS_CHANGED"
| "PRIORITY_CHANGED"
| "QUEUE_CHANGED"
| "COMMENT_ADDED"
| "TICKET_RESOLVED"
export type AutomationConditionOperator = "AND" | "OR"
export type AutomationConditionField =
| "companyId"
| "queueId"
| "categoryId"
| "subcategoryId"
| "priority"
| "status"
| "channel"
| "formTemplate"
| "chatEnabled"
| "tag"
export type AutomationConditionComparator =
| "eq"
| "neq"
| "in"
| "not_in"
| "contains"
| "not_contains"
| "is_true"
| "is_false"
export type AutomationCondition = {
field: AutomationConditionField
op: AutomationConditionComparator
value?: unknown
}
export type AutomationConditionGroup = {
op: AutomationConditionOperator
conditions: AutomationCondition[]
}
export type TicketForAutomation = Pick<
Doc<"tickets">,
| "tenantId"
| "status"
| "priority"
| "channel"
| "queueId"
| "companyId"
| "categoryId"
| "subcategoryId"
| "tags"
| "formTemplate"
| "chatEnabled"
>
function normalizeId(value: unknown): string | null {
if (!value) return null
return String(value)
}
function normalizeString(value: unknown): string | null {
if (typeof value !== "string") return null
const trimmed = value.trim()
return trimmed.length > 0 ? trimmed : null
}
function normalizeStringArray(value: unknown): string[] {
if (!Array.isArray(value)) return []
return value.map((item) => normalizeString(item)).filter((item): item is string => Boolean(item))
}
function normalizeBoolean(value: unknown): boolean | null {
if (typeof value === "boolean") return value
return null
}
function compareValue(actual: string | null, op: AutomationConditionComparator, expected: unknown): boolean {
if (op === "eq") return actual === normalizeId(expected)
if (op === "neq") return actual !== normalizeId(expected)
if (op === "in") {
const list = normalizeStringArray(expected).map((v) => v)
return actual !== null && list.includes(actual)
}
if (op === "not_in") {
const list = normalizeStringArray(expected).map((v) => v)
return actual === null || !list.includes(actual)
}
return false
}
export function evaluateAutomationConditions(
ticket: TicketForAutomation,
group: AutomationConditionGroup | null | undefined
): boolean {
if (!group || !Array.isArray(group.conditions) || group.conditions.length === 0) return true
const op = group.op === "OR" ? "OR" : "AND"
const results = group.conditions.map((condition) => {
const field = condition.field
const operator = condition.op
if (field === "companyId") return compareValue(normalizeId(ticket.companyId), operator, condition.value)
if (field === "queueId") return compareValue(normalizeId(ticket.queueId), operator, condition.value)
if (field === "categoryId") return compareValue(normalizeId(ticket.categoryId), operator, condition.value)
if (field === "subcategoryId") return compareValue(normalizeId(ticket.subcategoryId), operator, condition.value)
if (field === "priority") return compareValue(normalizeString(ticket.priority), operator, condition.value)
if (field === "status") return compareValue(normalizeString(ticket.status), operator, condition.value)
if (field === "channel") return compareValue(normalizeString(ticket.channel), operator, condition.value)
if (field === "formTemplate") return compareValue(normalizeString(ticket.formTemplate), operator, condition.value)
if (field === "chatEnabled") {
const expectedBool = normalizeBoolean(condition.value)
if (operator === "is_true") return ticket.chatEnabled === true
if (operator === "is_false") return ticket.chatEnabled !== true
if (operator === "eq") return expectedBool !== null ? Boolean(ticket.chatEnabled) === expectedBool : false
if (operator === "neq") return expectedBool !== null ? Boolean(ticket.chatEnabled) !== expectedBool : false
return false
}
if (field === "tag") {
const tag = normalizeString(condition.value)
if (!tag) return false
const has = (ticket.tags ?? []).includes(tag)
if (operator === "contains" || operator === "eq") return has
if (operator === "not_contains" || operator === "neq") return !has
return false
}
return false
})
return op === "OR" ? results.some(Boolean) : results.every(Boolean)
}

View file

@ -7,7 +7,7 @@ export const ensureDefaults = mutation({
let existing = await ctx.db
.query("queues")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(10);
.collect();
existing = await Promise.all(
existing.map(async (queue) => {
if (queue.name === "Suporte N1" || queue.slug === "suporte-n1") {

View file

@ -208,7 +208,7 @@ export const list = query({
const categories = await ctx.db
.query("ticketCategories")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
if (categories.length === 0) {
return []
@ -217,7 +217,7 @@ export const list = query({
const subcategories = await ctx.db
.query("ticketSubcategories")
.withIndex("by_tenant_slug", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
return categories.map((category) => ({
id: category._id,
@ -249,7 +249,7 @@ export const ensureDefaults = mutation({
const existingCount = await ctx.db
.query("ticketCategories")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
if (existingCount.length > 0) {
return { created: 0 }
@ -408,7 +408,7 @@ export const deleteCategory = mutation({
const subs = await ctx.db
.query("ticketSubcategories")
.withIndex("by_category_order", (q) => q.eq("categoryId", categoryId))
.take(100)
.collect()
for (const sub of subs) {
await ctx.db.patch(sub._id, {
categoryId: transferTo,
@ -418,7 +418,7 @@ export const deleteCategory = mutation({
const ticketsToMove = await ctx.db
.query("tickets")
.withIndex("by_tenant_category", (q) => q.eq("tenantId", tenantId).eq("categoryId", categoryId))
.take(100)
.collect()
for (const ticket of ticketsToMove) {
await ctx.db.patch(ticket._id, {
categoryId: transferTo,
@ -437,7 +437,7 @@ export const deleteCategory = mutation({
const subs = await ctx.db
.query("ticketSubcategories")
.withIndex("by_category_order", (q) => q.eq("categoryId", categoryId))
.take(100)
.collect()
for (const sub of subs) {
await ctx.db.delete(sub._id)
}
@ -530,7 +530,7 @@ export const deleteSubcategory = mutation({
const tickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_subcategory", (q) => q.eq("tenantId", tenantId).eq("subcategoryId", subcategoryId))
.take(100)
.collect()
for (const ticket of tickets) {
await ctx.db.patch(ticket._id, {
subcategoryId: transferTo,

View file

@ -84,7 +84,7 @@ export const get = query({
const records = await ctx.db
.query("categorySlaSettings")
.withIndex("by_tenant_category", (q) => q.eq("tenantId", tenantId).eq("categoryId", categoryId))
.take(100)
.collect()
return {
categoryId,
@ -119,7 +119,7 @@ export const save = mutation({
const existing = await ctx.db
.query("categorySlaSettings")
.withIndex("by_tenant_category", (q) => q.eq("tenantId", tenantId).eq("categoryId", categoryId))
.take(100)
.collect()
await Promise.all(existing.map((record) => ctx.db.delete(record._id)))
const now = Date.now()

View file

@ -1,376 +0,0 @@
import { ConvexError, v } from "convex/values"
import type { Doc, Id } from "./_generated/dataModel"
import { mutation, query } from "./_generated/server"
import { requireAdmin, requireStaff } from "./rbac"
import { normalizeChecklistText } from "./ticketChecklist"
function normalizeTemplateName(input: string) {
return input.trim()
}
function normalizeTemplateDescription(input: string | null | undefined) {
const text = (input ?? "").trim()
return text.length > 0 ? text : null
}
type ChecklistItemType = "checkbox" | "question"
type RawTemplateItem = {
id?: string
text: string
description?: string
type?: string
options?: string[]
required?: boolean
}
type NormalizedTemplateItem = {
id: string
text: string
description?: string
type?: ChecklistItemType
options?: string[]
required?: boolean
}
function normalizeTemplateItems(
raw: RawTemplateItem[],
options: { generateId?: () => string }
): NormalizedTemplateItem[] {
if (!Array.isArray(raw) || raw.length === 0) {
throw new ConvexError("Adicione pelo menos um item no checklist.")
}
const generateId = options.generateId ?? (() => crypto.randomUUID())
const seen = new Set<string>()
const items: NormalizedTemplateItem[] = []
for (const entry of raw) {
const id = String(entry.id ?? "").trim() || generateId()
if (seen.has(id)) {
throw new ConvexError("Itens do checklist com IDs duplicados.")
}
seen.add(id)
const text = normalizeChecklistText(entry.text)
if (!text) {
throw new ConvexError("Todos os itens do checklist precisam ter um texto.")
}
if (text.length > 240) {
throw new ConvexError("Item do checklist muito longo (máx. 240 caracteres).")
}
const description = entry.description?.trim() || undefined
const itemType: ChecklistItemType = entry.type === "question" ? "question" : "checkbox"
const itemOptions = itemType === "question" && Array.isArray(entry.options)
? entry.options.map((o) => String(o).trim()).filter((o) => o.length > 0)
: undefined
if (itemType === "question" && (!itemOptions || itemOptions.length < 2)) {
throw new ConvexError(`A pergunta "${text}" precisa ter pelo menos 2 opções.`)
}
const required = typeof entry.required === "boolean" ? entry.required : true
items.push({
id,
text,
description,
type: itemType,
options: itemOptions,
required,
})
}
return items
}
function mapTemplate(template: Doc<"ticketChecklistTemplates">, company: Doc<"companies"> | null) {
return {
id: template._id,
name: template.name,
description: template.description ?? "",
company: company ? { id: company._id, name: company.name } : null,
items: (template.items ?? []).map((item) => ({
id: item.id,
text: item.text,
description: item.description,
type: item.type ?? "checkbox",
options: item.options,
required: typeof item.required === "boolean" ? item.required : true,
})),
isArchived: Boolean(template.isArchived),
createdAt: template.createdAt,
updatedAt: template.updatedAt,
}
}
export const listActive = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
companyId: v.optional(v.id("companies")),
},
handler: async (ctx, { tenantId, viewerId, companyId }) => {
await requireStaff(ctx, viewerId, tenantId)
const templates = await ctx.db
.query("ticketChecklistTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(200)
const filtered = templates.filter((tpl) => {
if (tpl.isArchived === true) return false
if (!companyId) return true
return !tpl.companyId || String(tpl.companyId) === String(companyId)
})
const companiesToHydrate = new Map<string, Id<"companies">>()
for (const tpl of filtered) {
if (tpl.companyId) {
companiesToHydrate.set(String(tpl.companyId), tpl.companyId)
}
}
const companyMap = new Map<string, Doc<"companies">>()
for (const id of companiesToHydrate.values()) {
const company = await ctx.db.get(id)
if (company && company.tenantId === tenantId) {
companyMap.set(String(id), company as Doc<"companies">)
}
}
return filtered
.sort((a, b) => {
const aSpecific = a.companyId ? 1 : 0
const bSpecific = b.companyId ? 1 : 0
if (aSpecific !== bSpecific) return bSpecific - aSpecific
return (a.name ?? "").localeCompare(b.name ?? "", "pt-BR")
})
.map((tpl) => mapTemplate(tpl, tpl.companyId ? (companyMap.get(String(tpl.companyId)) ?? null) : null))
},
})
export const list = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
includeArchived: v.optional(v.boolean()),
},
handler: async (ctx, { tenantId, viewerId, includeArchived }) => {
await requireAdmin(ctx, viewerId, tenantId)
const templates = await ctx.db
.query("ticketChecklistTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(500)
const filtered = templates.filter((tpl) => includeArchived || tpl.isArchived !== true)
const companiesToHydrate = new Map<string, Id<"companies">>()
for (const tpl of filtered) {
if (tpl.companyId) {
companiesToHydrate.set(String(tpl.companyId), tpl.companyId)
}
}
const companyMap = new Map<string, Doc<"companies">>()
for (const id of companiesToHydrate.values()) {
const company = await ctx.db.get(id)
if (company && company.tenantId === tenantId) {
companyMap.set(String(id), company as Doc<"companies">)
}
}
return filtered
.sort((a, b) => {
const aSpecific = a.companyId ? 1 : 0
const bSpecific = b.companyId ? 1 : 0
if (aSpecific !== bSpecific) return bSpecific - aSpecific
return (a.name ?? "").localeCompare(b.name ?? "", "pt-BR")
})
.map((tpl) => mapTemplate(tpl, tpl.companyId ? (companyMap.get(String(tpl.companyId)) ?? null) : null))
},
})
export const create = mutation({
args: {
tenantId: v.string(),
actorId: v.id("users"),
name: v.string(),
description: v.optional(v.string()),
companyId: v.optional(v.id("companies")),
items: v.array(
v.object({
id: v.optional(v.string()),
text: v.string(),
description: v.optional(v.string()),
type: v.optional(v.string()),
options: v.optional(v.array(v.string())),
required: v.optional(v.boolean()),
}),
),
isArchived: v.optional(v.boolean()),
},
handler: async (ctx, { tenantId, actorId, name, description, companyId, items, isArchived }) => {
await requireAdmin(ctx, actorId, tenantId)
const normalizedName = normalizeTemplateName(name)
if (normalizedName.length < 3) {
throw new ConvexError("Informe um nome com pelo menos 3 caracteres.")
}
if (companyId) {
const company = await ctx.db.get(companyId)
if (!company || company.tenantId !== tenantId) {
throw new ConvexError("Empresa inválida para o template.")
}
}
const normalizedItems = normalizeTemplateItems(items, {})
const normalizedDescription = normalizeTemplateDescription(description)
const archivedFlag = typeof isArchived === "boolean" ? isArchived : false
const now = Date.now()
return ctx.db.insert("ticketChecklistTemplates", {
tenantId,
name: normalizedName,
description: normalizedDescription ?? undefined,
companyId: companyId ?? undefined,
items: normalizedItems,
isArchived: archivedFlag,
createdAt: now,
updatedAt: now,
createdBy: actorId,
updatedBy: actorId,
})
},
})
export const update = mutation({
args: {
tenantId: v.string(),
actorId: v.id("users"),
templateId: v.id("ticketChecklistTemplates"),
name: v.string(),
description: v.optional(v.string()),
companyId: v.optional(v.id("companies")),
items: v.array(
v.object({
id: v.optional(v.string()),
text: v.string(),
description: v.optional(v.string()),
type: v.optional(v.string()),
options: v.optional(v.array(v.string())),
required: v.optional(v.boolean()),
}),
),
isArchived: v.optional(v.boolean()),
},
handler: async (ctx, { tenantId, actorId, templateId, name, description, companyId, items, isArchived }) => {
await requireAdmin(ctx, actorId, tenantId)
const existing = await ctx.db.get(templateId)
if (!existing || existing.tenantId !== tenantId) {
throw new ConvexError("Template de checklist não encontrado.")
}
const normalizedName = normalizeTemplateName(name)
if (normalizedName.length < 3) {
throw new ConvexError("Informe um nome com pelo menos 3 caracteres.")
}
if (companyId) {
const company = await ctx.db.get(companyId)
if (!company || company.tenantId !== tenantId) {
throw new ConvexError("Empresa inválida para o template.")
}
}
const normalizedItems = normalizeTemplateItems(items, {})
const normalizedDescription = normalizeTemplateDescription(description)
const nextArchived = typeof isArchived === "boolean" ? isArchived : Boolean(existing.isArchived)
const now = Date.now()
await ctx.db.patch(templateId, {
name: normalizedName,
description: normalizedDescription ?? undefined,
companyId: companyId ?? undefined,
items: normalizedItems,
isArchived: nextArchived,
updatedAt: now,
updatedBy: actorId,
})
return { ok: true }
},
})
export const remove = mutation({
args: {
tenantId: v.string(),
actorId: v.id("users"),
templateId: v.id("ticketChecklistTemplates"),
},
handler: async (ctx, { tenantId, actorId, templateId }) => {
await requireAdmin(ctx, actorId, tenantId)
const existing = await ctx.db.get(templateId)
if (!existing || existing.tenantId !== tenantId) {
throw new ConvexError("Template de checklist não encontrado.")
}
await ctx.db.delete(templateId)
return { ok: true }
},
})
// DEBUG: Query para verificar dados do template e checklist de um ticket
export const debugTemplateAndTicketChecklist = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
templateId: v.id("ticketChecklistTemplates"),
ticketId: v.optional(v.id("tickets")),
},
handler: async (ctx, { tenantId, viewerId, templateId, ticketId }) => {
await requireStaff(ctx, viewerId, tenantId)
const template = await ctx.db.get(templateId)
if (!template || template.tenantId !== tenantId) {
return { error: "Template nao encontrado" }
}
const templateData = {
id: String(template._id),
name: template.name,
description: template.description,
hasDescription: Boolean(template.description),
descriptionType: typeof template.description,
itemsCount: template.items?.length ?? 0,
}
let ticketData = null
if (ticketId) {
const ticket = await ctx.db.get(ticketId)
if (ticket && ticket.tenantId === tenantId) {
ticketData = {
id: String(ticket._id),
checklistCount: ticket.checklist?.length ?? 0,
checklistItems: (ticket.checklist ?? []).map((item) => ({
id: item.id,
text: item.text.substring(0, 50),
templateId: item.templateId ? String(item.templateId) : null,
templateDescription: item.templateDescription,
hasTemplateDescription: Boolean(item.templateDescription),
description: item.description,
hasDescription: Boolean(item.description),
})),
}
}
}
return { template: templateData, ticket: ticketData }
},
})

View file

@ -58,7 +58,7 @@ export const list = query({
const templates = await ctx.db
.query("commentTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
return templates
.filter((template) => (template.kind ?? "comment") === normalizedKind)

View file

@ -23,7 +23,7 @@ export const list = query({
const companies = await ctx.db
.query("companies")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(200)
.collect()
return companies.map((c) => ({ id: c._id, name: c.name, slug: c.slug }))
},
})
@ -131,7 +131,7 @@ export const removeBySlug = mutation({
const relatedTickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", existing._id))
.take(200)
.collect()
if (relatedTickets.length > 0) {
const companySnapshot = {
name: existing.name,

View file

@ -1,273 +0,0 @@
import { mutation, query } from "./_generated/server"
import { ConvexError, v } from "convex/values"
import type { Id } from "./_generated/dataModel"
import { requireAdmin } from "./rbac"
const PRIORITY_VALUES = ["URGENT", "HIGH", "MEDIUM", "LOW", "DEFAULT"] as const
const VALID_STATUSES = ["PENDING", "AWAITING_ATTENDANCE", "PAUSED", "RESOLVED"] as const
const VALID_TIME_MODES = ["business", "calendar"] as const
type CompanySlaRuleInput = {
priority: string
categoryId?: string | null
responseTargetMinutes?: number | null
responseMode?: string | null
solutionTargetMinutes?: number | null
solutionMode?: string | null
alertThreshold?: number | null
pauseStatuses?: string[] | null
calendarType?: string | null
}
const ruleInput = v.object({
priority: v.string(),
categoryId: v.optional(v.union(v.id("ticketCategories"), v.null())),
responseTargetMinutes: v.optional(v.number()),
responseMode: v.optional(v.string()),
solutionTargetMinutes: v.optional(v.number()),
solutionMode: v.optional(v.string()),
alertThreshold: v.optional(v.number()),
pauseStatuses: v.optional(v.array(v.string())),
calendarType: v.optional(v.string()),
})
function normalizePriority(value: string) {
const upper = value.trim().toUpperCase()
return PRIORITY_VALUES.includes(upper as (typeof PRIORITY_VALUES)[number]) ? upper : "DEFAULT"
}
function sanitizeTime(value?: number | null) {
if (typeof value !== "number" || !Number.isFinite(value) || value <= 0) return undefined
return Math.round(value)
}
function normalizeMode(value?: string | null) {
if (!value) return "calendar"
const normalized = value.toLowerCase()
return VALID_TIME_MODES.includes(normalized as (typeof VALID_TIME_MODES)[number]) ? normalized : "calendar"
}
function normalizeThreshold(value?: number | null) {
if (typeof value !== "number" || Number.isNaN(value)) {
return 0.8
}
const clamped = Math.min(Math.max(value, 0.1), 0.95)
return Math.round(clamped * 100) / 100
}
function normalizePauseStatuses(value?: string[] | null) {
if (!Array.isArray(value)) return ["PAUSED"]
const normalized = new Set<string>()
for (const status of value) {
if (typeof status !== "string") continue
const upper = status.trim().toUpperCase()
if (VALID_STATUSES.includes(upper as (typeof VALID_STATUSES)[number])) {
normalized.add(upper)
}
}
if (normalized.size === 0) {
normalized.add("PAUSED")
}
return Array.from(normalized)
}
// Lista todas as empresas que possuem SLA customizado
export const listCompaniesWithCustomSla = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
},
handler: async (ctx, { tenantId, viewerId }) => {
await requireAdmin(ctx, viewerId, tenantId)
// Busca todas as configurações de SLA por empresa
const allSettings = await ctx.db
.query("companySlaSettings")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId))
.take(1000)
// Agrupa por companyId para evitar duplicatas
const companyIds = [...new Set(allSettings.map((s) => s.companyId))]
// Busca dados das empresas
const companies = await Promise.all(
companyIds.map(async (companyId) => {
const company = await ctx.db.get(companyId)
if (!company) return null
const rulesCount = allSettings.filter((s) => s.companyId === companyId).length
return {
companyId,
companyName: company.name,
companySlug: company.slug,
rulesCount,
}
})
)
return companies.filter(Boolean)
},
})
// Busca as regras de SLA de uma empresa específica
export const get = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
companyId: v.id("companies"),
},
handler: async (ctx, { tenantId, viewerId, companyId }) => {
await requireAdmin(ctx, viewerId, tenantId)
const company = await ctx.db.get(companyId)
if (!company || company.tenantId !== tenantId) {
throw new ConvexError("Empresa não encontrada")
}
const records = await ctx.db
.query("companySlaSettings")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", companyId))
.take(100)
// Busca nomes das categorias referenciadas
const categoryIds = [...new Set(records.filter((r) => r.categoryId).map((r) => r.categoryId!))]
const categories = await Promise.all(categoryIds.map((id) => ctx.db.get(id)))
const categoryNames = new Map(
categories.filter(Boolean).map((c) => [c!._id, c!.name])
)
return {
companyId,
companyName: company.name,
rules: records.map((record) => ({
priority: record.priority,
categoryId: record.categoryId ?? null,
categoryName: record.categoryId ? categoryNames.get(record.categoryId) ?? null : null,
responseTargetMinutes: record.responseTargetMinutes ?? null,
responseMode: record.responseMode ?? "calendar",
solutionTargetMinutes: record.solutionTargetMinutes ?? null,
solutionMode: record.solutionMode ?? "calendar",
alertThreshold: record.alertThreshold ?? 0.8,
pauseStatuses: record.pauseStatuses ?? ["PAUSED"],
})),
}
},
})
// Salva as regras de SLA de uma empresa
export const save = mutation({
args: {
tenantId: v.string(),
actorId: v.id("users"),
companyId: v.id("companies"),
rules: v.array(ruleInput),
},
handler: async (ctx, { tenantId, actorId, companyId, rules }) => {
await requireAdmin(ctx, actorId, tenantId)
const company = await ctx.db.get(companyId)
if (!company || company.tenantId !== tenantId) {
throw new ConvexError("Empresa não encontrada")
}
// Valida categorias referenciadas
for (const rule of rules) {
if (rule.categoryId) {
const category = await ctx.db.get(rule.categoryId)
if (!category || category.tenantId !== tenantId) {
throw new ConvexError(`Categoria inválida: ${rule.categoryId}`)
}
}
}
const sanitized = sanitizeRules(rules)
// Remove regras existentes da empresa
const existing = await ctx.db
.query("companySlaSettings")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", companyId))
.take(100)
await Promise.all(existing.map((record) => ctx.db.delete(record._id)))
// Insere novas regras
const now = Date.now()
for (const rule of sanitized) {
await ctx.db.insert("companySlaSettings", {
tenantId,
companyId,
categoryId: rule.categoryId ? (rule.categoryId as Id<"ticketCategories">) : undefined,
priority: rule.priority,
responseTargetMinutes: rule.responseTargetMinutes,
responseMode: rule.responseMode,
solutionTargetMinutes: rule.solutionTargetMinutes,
solutionMode: rule.solutionMode,
alertThreshold: rule.alertThreshold,
pauseStatuses: rule.pauseStatuses,
calendarType: rule.calendarType ?? undefined,
createdAt: now,
updatedAt: now,
actorId,
})
}
return { ok: true }
},
})
// Remove todas as regras de SLA de uma empresa
export const remove = mutation({
args: {
tenantId: v.string(),
actorId: v.id("users"),
companyId: v.id("companies"),
},
handler: async (ctx, { tenantId, actorId, companyId }) => {
await requireAdmin(ctx, actorId, tenantId)
const company = await ctx.db.get(companyId)
if (!company || company.tenantId !== tenantId) {
throw new ConvexError("Empresa não encontrada")
}
const existing = await ctx.db
.query("companySlaSettings")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", companyId))
.take(100)
await Promise.all(existing.map((record) => ctx.db.delete(record._id)))
return { ok: true }
},
})
function sanitizeRules(rules: CompanySlaRuleInput[]) {
// Chave única: categoryId + priority
const normalized: Map<string, ReturnType<typeof buildRule>> = new Map()
for (const rule of rules) {
const built = buildRule(rule)
const key = `${built.categoryId ?? "ALL"}-${built.priority}`
normalized.set(key, built)
}
return Array.from(normalized.values())
}
function buildRule(rule: CompanySlaRuleInput) {
const priority = normalizePriority(rule.priority)
const responseTargetMinutes = sanitizeTime(rule.responseTargetMinutes)
const solutionTargetMinutes = sanitizeTime(rule.solutionTargetMinutes)
return {
priority,
categoryId: rule.categoryId ?? null,
responseTargetMinutes,
responseMode: normalizeMode(rule.responseMode),
solutionTargetMinutes,
solutionMode: normalizeMode(rule.solutionMode),
alertThreshold: normalizeThreshold(rule.alertThreshold),
pauseStatuses: normalizePauseStatuses(rule.pauseStatuses),
calendarType: rule.calendarType ?? null,
}
}

View file

@ -3,57 +3,18 @@ import { api } from "./_generated/api"
const crons = cronJobs()
// =============================================================================
// CRON JOBS DESABILITADOS PARA REDUZIR USO DE MEMORIA
// =============================================================================
// Os cron jobs do Convex criam registros em _scheduled_job_logs que acumulam
// versoes em memoria (o Convex self-hosted carrega TODAS as versoes em RAM).
//
// Esses jobs foram movidos para endpoints HTTP em /api/cron/* e devem ser
// chamados via N8N ou outro scheduler externo:
//
// - POST /api/cron/chat-cleanup (substitui auto-end-inactive-chat-sessions)
// - POST /api/cron/usb-cleanup (substitui cleanup-stale-usb-policies)
//
// Autenticacao: Bearer token no header Authorization (usar CRON_SECRET ou REPORTS_CRON_SECRET)
// =============================================================================
crons.interval(
"report-export-runner",
{ minutes: 15 },
api.reports.triggerScheduledExports,
{}
)
// Flags to keep heavy jobs disabled until the Convex backend stabilizes.
const reportsCronEnabled = process.env.REPORTS_CRON_ENABLED === "true"
const autoPauseCronEnabled = process.env.AUTO_PAUSE_ENABLED === "true"
if (reportsCronEnabled) {
crons.interval(
"report-export-runner",
{ minutes: 15 },
api.reports.triggerScheduledExports,
{}
)
}
if (autoPauseCronEnabled) {
crons.daily(
"auto-pause-internal-lunch",
{ hourUTC: 15, minuteUTC: 0 },
api.tickets.pauseInternalSessionsForLunch,
{}
)
}
// DESABILITADO - Movido para /api/cron/usb-cleanup (chamado via N8N)
// crons.interval(
// "cleanup-stale-usb-policies",
// { minutes: 30 },
// api.usbPolicy.cleanupStalePendingPolicies,
// {}
// )
// DESABILITADO - Movido para /api/cron/chat-cleanup (chamado via N8N)
// crons.interval(
// "auto-end-inactive-chat-sessions",
// { minutes: 1 },
// api.liveChat.autoEndInactiveSessions,
// {}
// )
crons.daily(
"auto-pause-internal-lunch",
{ hourUTC: 15, minuteUTC: 0 },
api.tickets.pauseInternalSessionsForLunch,
{}
)
export default crons

View file

@ -219,7 +219,7 @@ export const list = query({
const dashboards = await ctx.db
.query("dashboards")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
const filtered = (includeArchived ? dashboards : dashboards.filter((d) => !(d.isArchived ?? false))).sort(
(a, b) => b.updatedAt - a.updatedAt,
@ -230,7 +230,7 @@ export const list = query({
const widgets = await ctx.db
.query("dashboardWidgets")
.withIndex("by_dashboard", (q) => q.eq("dashboardId", dashboard._id))
.take(100)
.collect()
return {
...sanitizeDashboard(dashboard),
widgetsCount: widgets.length,
@ -256,14 +256,14 @@ export const get = query({
const widgets = await ctx.db
.query("dashboardWidgets")
.withIndex("by_dashboard_order", (q) => q.eq("dashboardId", dashboardId))
.take(100)
.collect()
widgets.sort((a, b) => a.order - b.order || a.createdAt - b.createdAt)
const shares = await ctx.db
.query("dashboardShares")
.withIndex("by_dashboard", (q) => q.eq("dashboardId", dashboardId))
.take(50)
.collect()
return {
dashboard: sanitizeDashboard(dashboard),
@ -457,7 +457,7 @@ export const updateLayout = mutation({
const widgets = await ctx.db
.query("dashboardWidgets")
.withIndex("by_dashboard", (q) => q.eq("dashboardId", dashboardId))
.take(100)
.collect()
const byKey = new Map<string, Doc<"dashboardWidgets">>()
widgets.forEach((widget) => byKey.set(widget.widgetKey, widget))
@ -518,7 +518,7 @@ export const addWidget = mutation({
const existingWidgets = await ctx.db
.query("dashboardWidgets")
.withIndex("by_dashboard", (q) => q.eq("dashboardId", dashboardId))
.take(100)
.collect()
const widgetId = await ctx.db.insert("dashboardWidgets", {
tenantId,
@ -617,7 +617,7 @@ export const ensureQueueSummaryWidget = mutation({
const widgets = await ctx.db
.query("dashboardWidgets")
.withIndex("by_dashboard_order", (q) => q.eq("dashboardId", dashboardId))
.take(100)
.collect()
widgets.sort((a, b) => a.order - b.order || a.createdAt - b.createdAt)
@ -871,7 +871,7 @@ export const upsertShare = mutation({
const existingShares = await ctx.db
.query("dashboardShares")
.withIndex("by_dashboard", (q) => q.eq("dashboardId", dashboardId))
.take(50)
.collect()
const now = Date.now()
let shareDoc = existingShares.find((share) => share.audience === audience)
@ -917,7 +917,7 @@ export const revokeShareToken = mutation({
const shares = await ctx.db
.query("dashboardShares")
.withIndex("by_dashboard", (q) => q.eq("dashboardId", dashboardId))
.take(50)
.collect()
for (const share of shares) {
if (share.audience === "public-link") {

View file

@ -37,7 +37,7 @@ async function unsetDefaults(
const templates = await ctx.db
.query("deviceExportTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
await Promise.all(
templates
@ -73,7 +73,7 @@ export const list = query({
const templates = await ctx.db
.query("deviceExportTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
return templates
.filter((tpl) => {
@ -112,7 +112,7 @@ export const listForTenant = query({
const templates = await ctx.db
.query("deviceExportTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
return templates
.filter((tpl) => tpl.isActive !== false)
@ -149,7 +149,7 @@ export const getDefault = query({
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", companyId))
: ctx.db.query("deviceExportTemplates").withIndex("by_tenant_default", (q) => q.eq("tenantId", tenantId).eq("isDefault", true))
const templates = await indexQuery.take(100)
const templates = await indexQuery.collect()
const candidate = templates.find((tpl) => tpl.isDefault) ?? null
if (candidate) {
return {
@ -357,7 +357,7 @@ export const clearCompanyDefault = mutation({
const templates = await ctx.db
.query("deviceExportTemplates")
.withIndex("by_tenant_company", (q) => q.eq("tenantId", tenantId).eq("companyId", companyId))
.take(100)
.collect()
const now = Date.now()
await Promise.all(
templates.map((tpl) =>

View file

@ -73,11 +73,11 @@ export async function ensureMobileDeviceFields(ctx: MutationCtx, tenantId: strin
const existingMobileFields = await ctx.db
.query("deviceFields")
.withIndex("by_tenant_scope", (q) => q.eq("tenantId", tenantId).eq("scope", "mobile"))
.take(100);
.collect();
const allFields = await ctx.db
.query("deviceFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
const existingByKey = new Map<string, Doc<"deviceFields">>();
existingMobileFields.forEach((field) => existingByKey.set(field.key, field));

View file

@ -64,7 +64,7 @@ export const list = query({
.query("deviceFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
const fields = await fieldsQuery.take(100)
const fields = await fieldsQuery.collect()
return fields
.filter((field) => matchesCompany(field.companyId, companyId, true))
.filter((field) => matchesScope(field.scope, scope))
@ -96,7 +96,7 @@ export const listForTenant = query({
const fields = await ctx.db
.query("deviceFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
return fields
.filter((field) => matchesCompany(field.companyId, companyId, false))
@ -153,7 +153,7 @@ export const create = mutation({
const existing = await ctx.db
.query("deviceFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", args.tenantId))
.take(100)
.collect()
const maxOrder = existing.reduce((acc, item) => Math.max(acc, item.order ?? 0), 0)
const now = Date.now()

View file

@ -1,359 +0,0 @@
import { v } from "convex/values"
import { mutation, query, type QueryCtx, type MutationCtx } from "./_generated/server"
import type { Id } from "./_generated/dataModel"
const EMPRESTIMO_STATUS = ["ATIVO", "DEVOLVIDO", "ATRASADO", "CANCELADO"] as const
type EmprestimoStatus = (typeof EMPRESTIMO_STATUS)[number]
const EQUIPAMENTO_TIPOS = [
"NOTEBOOK",
"DESKTOP",
"MONITOR",
"TECLADO",
"MOUSE",
"HEADSET",
"WEBCAM",
"IMPRESSORA",
"SCANNER",
"PROJETOR",
"TABLET",
"CELULAR",
"ROTEADOR",
"SWITCH",
"OUTRO",
] as const
async function getNextReference(ctx: MutationCtx, tenantId: string): Promise<number> {
const last = await ctx.db
.query("emprestimos")
.withIndex("by_tenant_reference", (q) => q.eq("tenantId", tenantId))
.order("desc")
.first()
return (last?.reference ?? 0) + 1
}
export const list = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
status: v.optional(v.string()),
clienteId: v.optional(v.id("companies")),
tecnicoId: v.optional(v.id("users")),
limit: v.optional(v.number()),
},
handler: async (ctx, args) => {
const { tenantId, status, clienteId, tecnicoId, limit = 100 } = args
let baseQuery = ctx.db
.query("emprestimos")
.withIndex("by_tenant_created", (q) => q.eq("tenantId", tenantId))
.order("desc")
const all = await baseQuery.take(limit * 2)
let filtered = all
if (status) {
filtered = filtered.filter((e) => e.status === status)
}
if (clienteId) {
filtered = filtered.filter((e) => e.clienteId === clienteId)
}
if (tecnicoId) {
filtered = filtered.filter((e) => e.tecnicoId === tecnicoId)
}
return filtered.slice(0, limit).map((emprestimo) => ({
id: emprestimo._id,
reference: emprestimo.reference,
clienteId: emprestimo.clienteId,
clienteNome: emprestimo.clienteSnapshot.name,
responsavelNome: emprestimo.responsavelNome,
responsavelContato: emprestimo.responsavelContato,
tecnicoId: emprestimo.tecnicoId,
tecnicoNome: emprestimo.tecnicoSnapshot.name,
tecnicoEmail: emprestimo.tecnicoSnapshot.email,
equipamentos: emprestimo.equipamentos,
quantidade: emprestimo.quantidade,
valor: emprestimo.valor,
dataEmprestimo: emprestimo.dataEmprestimo,
dataFimPrevisto: emprestimo.dataFimPrevisto,
dataDevolucao: emprestimo.dataDevolucao,
status: emprestimo.status,
observacoes: emprestimo.observacoes,
observacoesDevolucao: emprestimo.observacoesDevolucao,
multaDiaria: emprestimo.multaDiaria,
multaCalculada: emprestimo.multaCalculada,
createdAt: emprestimo.createdAt,
updatedAt: emprestimo.updatedAt,
}))
},
})
export const getById = query({
args: {
id: v.id("emprestimos"),
viewerId: v.id("users"),
},
handler: async (ctx, args) => {
const emprestimo = await ctx.db.get(args.id)
if (!emprestimo) return null
return {
id: emprestimo._id,
reference: emprestimo.reference,
clienteId: emprestimo.clienteId,
clienteSnapshot: emprestimo.clienteSnapshot,
responsavelNome: emprestimo.responsavelNome,
responsavelContato: emprestimo.responsavelContato,
tecnicoId: emprestimo.tecnicoId,
tecnicoSnapshot: emprestimo.tecnicoSnapshot,
equipamentos: emprestimo.equipamentos,
quantidade: emprestimo.quantidade,
valor: emprestimo.valor,
dataEmprestimo: emprestimo.dataEmprestimo,
dataFimPrevisto: emprestimo.dataFimPrevisto,
dataDevolucao: emprestimo.dataDevolucao,
status: emprestimo.status,
observacoes: emprestimo.observacoes,
multaDiaria: emprestimo.multaDiaria,
multaCalculada: emprestimo.multaCalculada,
createdBy: emprestimo.createdBy,
createdAt: emprestimo.createdAt,
updatedAt: emprestimo.updatedAt,
}
},
})
export const create = mutation({
args: {
tenantId: v.string(),
createdBy: v.id("users"),
clienteId: v.id("companies"),
responsavelNome: v.string(),
responsavelContato: v.optional(v.string()),
tecnicoId: v.id("users"),
equipamentos: v.array(v.object({
id: v.string(),
tipo: v.string(),
marca: v.string(),
modelo: v.string(),
serialNumber: v.optional(v.string()),
patrimonio: v.optional(v.string()),
})),
valor: v.optional(v.number()),
dataEmprestimo: v.number(),
dataFimPrevisto: v.number(),
observacoes: v.optional(v.string()),
multaDiaria: v.optional(v.number()),
},
handler: async (ctx, args) => {
const now = Date.now()
const reference = await getNextReference(ctx, args.tenantId)
const cliente = await ctx.db.get(args.clienteId)
if (!cliente) {
throw new Error("Cliente nao encontrado")
}
const tecnico = await ctx.db.get(args.tecnicoId)
if (!tecnico) {
throw new Error("Tecnico nao encontrado")
}
const emprestimoId = await ctx.db.insert("emprestimos", {
tenantId: args.tenantId,
reference,
clienteId: args.clienteId,
clienteSnapshot: {
name: cliente.name,
slug: cliente.slug,
},
responsavelNome: args.responsavelNome,
responsavelContato: args.responsavelContato,
tecnicoId: args.tecnicoId,
tecnicoSnapshot: {
name: tecnico.name,
email: tecnico.email,
},
equipamentos: args.equipamentos,
quantidade: args.equipamentos.length,
valor: args.valor,
dataEmprestimo: args.dataEmprestimo,
dataFimPrevisto: args.dataFimPrevisto,
status: "ATIVO",
observacoes: args.observacoes,
multaDiaria: args.multaDiaria,
createdBy: args.createdBy,
createdAt: now,
updatedAt: now,
})
const creator = await ctx.db.get(args.createdBy)
await ctx.db.insert("emprestimoHistorico", {
tenantId: args.tenantId,
emprestimoId,
tipo: "CRIADO",
descricao: `Emprestimo #${reference} criado`,
autorId: args.createdBy,
autorSnapshot: {
name: creator?.name ?? "Sistema",
email: creator?.email,
},
createdAt: now,
})
return { id: emprestimoId, reference }
},
})
export const devolver = mutation({
args: {
id: v.id("emprestimos"),
updatedBy: v.id("users"),
observacoes: v.optional(v.string()),
},
handler: async (ctx, args) => {
const emprestimo = await ctx.db.get(args.id)
if (!emprestimo) {
throw new Error("Emprestimo nao encontrado")
}
if (emprestimo.status === "DEVOLVIDO") {
throw new Error("Emprestimo ja foi devolvido")
}
const now = Date.now()
let multaCalculada: number | undefined
if (emprestimo.multaDiaria && now > emprestimo.dataFimPrevisto) {
const diasAtraso = Math.ceil((now - emprestimo.dataFimPrevisto) / (1000 * 60 * 60 * 24))
multaCalculada = diasAtraso * emprestimo.multaDiaria
}
await ctx.db.patch(args.id, {
status: "DEVOLVIDO",
dataDevolucao: now,
multaCalculada,
observacoesDevolucao: args.observacoes,
updatedBy: args.updatedBy,
updatedAt: now,
})
const updater = await ctx.db.get(args.updatedBy)
await ctx.db.insert("emprestimoHistorico", {
tenantId: emprestimo.tenantId,
emprestimoId: args.id,
tipo: "DEVOLVIDO",
descricao: `Emprestimo #${emprestimo.reference} devolvido${multaCalculada ? ` com multa de R$ ${multaCalculada.toFixed(2)}` : ""}`,
alteracoes: { multaCalculada },
autorId: args.updatedBy,
autorSnapshot: {
name: updater?.name ?? "Sistema",
email: updater?.email,
},
createdAt: now,
})
return { ok: true, multaCalculada }
},
})
export const update = mutation({
args: {
id: v.id("emprestimos"),
updatedBy: v.id("users"),
responsavelNome: v.optional(v.string()),
responsavelContato: v.optional(v.string()),
dataFimPrevisto: v.optional(v.number()),
observacoes: v.optional(v.string()),
multaDiaria: v.optional(v.number()),
status: v.optional(v.string()),
},
handler: async (ctx, args) => {
const emprestimo = await ctx.db.get(args.id)
if (!emprestimo) {
throw new Error("Emprestimo nao encontrado")
}
const now = Date.now()
const updates: Record<string, unknown> = {
updatedBy: args.updatedBy,
updatedAt: now,
}
if (args.responsavelNome !== undefined) updates.responsavelNome = args.responsavelNome
if (args.responsavelContato !== undefined) updates.responsavelContato = args.responsavelContato
if (args.dataFimPrevisto !== undefined) updates.dataFimPrevisto = args.dataFimPrevisto
if (args.observacoes !== undefined) updates.observacoes = args.observacoes
if (args.multaDiaria !== undefined) updates.multaDiaria = args.multaDiaria
if (args.status !== undefined) updates.status = args.status
await ctx.db.patch(args.id, updates)
const updater = await ctx.db.get(args.updatedBy)
await ctx.db.insert("emprestimoHistorico", {
tenantId: emprestimo.tenantId,
emprestimoId: args.id,
tipo: "MODIFICADO",
descricao: `Emprestimo #${emprestimo.reference} atualizado`,
alteracoes: updates,
autorId: args.updatedBy,
autorSnapshot: {
name: updater?.name ?? "Sistema",
email: updater?.email,
},
createdAt: now,
})
return { ok: true }
},
})
export const getHistorico = query({
args: {
emprestimoId: v.id("emprestimos"),
limit: v.optional(v.number()),
},
handler: async (ctx, args) => {
const historico = await ctx.db
.query("emprestimoHistorico")
.withIndex("by_emprestimo_created", (q) => q.eq("emprestimoId", args.emprestimoId))
.order("desc")
.take(args.limit ?? 50)
return historico.map((h) => ({
id: h._id,
tipo: h.tipo,
descricao: h.descricao,
alteracoes: h.alteracoes,
autorNome: h.autorSnapshot.name,
createdAt: h.createdAt,
}))
},
})
export const getStats = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
},
handler: async (ctx, args) => {
const all = await ctx.db
.query("emprestimos")
.withIndex("by_tenant", (q) => q.eq("tenantId", args.tenantId))
.take(200)
const now = Date.now()
const ativos = all.filter((e) => e.status === "ATIVO")
const atrasados = ativos.filter((e) => e.dataFimPrevisto < now)
const devolvidos = all.filter((e) => e.status === "DEVOLVIDO")
return {
total: all.length,
ativos: ativos.length,
atrasados: atrasados.length,
devolvidos: devolvidos.length,
valorTotalAtivo: ativos.reduce((sum, e) => sum + (e.valor ?? 0), 0),
}
},
})

View file

@ -53,7 +53,7 @@ export const list = query({
const fields = await ctx.db
.query("ticketFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
return fields
.filter((field) => {
@ -87,7 +87,7 @@ export const listForTenant = query({
const fields = await ctx.db
.query("ticketFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
return fields
.filter((field) => {
@ -157,7 +157,7 @@ export const create = mutation({
const existing = await ctx.db
.query("ticketFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
const maxOrder = existing.reduce((acc: number, item: Doc<"ticketFields">) => Math.max(acc, item.order ?? 0), 0);
const now = Date.now();

View file

@ -21,7 +21,7 @@ export const list = query({
.query("incidents")
.withIndex("by_tenant_updated", (q) => q.eq("tenantId", tenantId))
.order("desc")
.take(100)
.collect()
return incidents
},
})

View file

@ -11,7 +11,7 @@ export const list = query({
const invites = await ctx.db
.query("userInvites")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
return invites
.sort((a, b) => (b.createdAt ?? 0) - (a.createdAt ?? 0))

File diff suppressed because it is too large Load diff

View file

@ -1,276 +0,0 @@
import { mutation, query, internalMutation } from "./_generated/server"
import { v } from "convex/values"
import type { Id } from "./_generated/dataModel"
// Tipo para software recebido do agente
type SoftwareInput = {
name: string
version?: string
publisher?: string
source?: string
}
// Upsert de softwares de uma maquina (chamado pelo heartbeat)
export const syncFromHeartbeat = internalMutation({
args: {
tenantId: v.string(),
machineId: v.id("machines"),
software: v.array(
v.object({
name: v.string(),
version: v.optional(v.string()),
publisher: v.optional(v.string()),
source: v.optional(v.string()),
})
),
},
handler: async (ctx, { tenantId, machineId, software }) => {
const now = Date.now()
// Busca softwares existentes da maquina
const existing = await ctx.db
.query("machineSoftware")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.collect()
const existingMap = new Map(existing.map((s) => [`${s.nameLower}|${s.version ?? ""}`, s]))
// Processa cada software recebido
const seenKeys = new Set<string>()
for (const item of software) {
if (!item.name || item.name.trim().length === 0) continue
const nameLower = item.name.toLowerCase().trim()
const key = `${nameLower}|${item.version ?? ""}`
seenKeys.add(key)
const existingDoc = existingMap.get(key)
if (existingDoc) {
// Atualiza lastSeenAt se ja existe
await ctx.db.patch(existingDoc._id, {
lastSeenAt: now,
publisher: item.publisher || existingDoc.publisher,
source: item.source || existingDoc.source,
})
} else {
// Cria novo registro
await ctx.db.insert("machineSoftware", {
tenantId,
machineId,
name: item.name.trim(),
nameLower,
version: item.version?.trim() || undefined,
publisher: item.publisher?.trim() || undefined,
source: item.source?.trim() || undefined,
detectedAt: now,
lastSeenAt: now,
})
}
}
// Remove softwares que nao foram vistos (desinstalados)
// So remove se o software nao foi visto nas ultimas 24 horas
const staleThreshold = now - 24 * 60 * 60 * 1000
for (const doc of existing) {
const key = `${doc.nameLower}|${doc.version ?? ""}`
if (!seenKeys.has(key) && doc.lastSeenAt < staleThreshold) {
await ctx.db.delete(doc._id)
}
}
return { processed: software.length }
},
})
// Lista softwares de uma maquina com paginacao e filtros
export const listByMachine = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
machineId: v.id("machines"),
search: v.optional(v.string()),
limit: v.optional(v.number()),
cursor: v.optional(v.string()),
},
handler: async (ctx, { machineId, search, limit = 50, cursor }) => {
const pageLimit = Math.min(limit, 100)
let query = ctx.db
.query("machineSoftware")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
// Coleta todos e filtra em memoria (Convex nao suporta LIKE)
const all = await query.collect()
// Filtra por search se fornecido
let filtered = all
if (search && search.trim().length > 0) {
const searchLower = search.toLowerCase().trim()
filtered = all.filter(
(s) =>
s.nameLower.includes(searchLower) ||
(s.publisher && s.publisher.toLowerCase().includes(searchLower)) ||
(s.version && s.version.toLowerCase().includes(searchLower))
)
}
// Ordena por nome
filtered.sort((a, b) => a.nameLower.localeCompare(b.nameLower))
// Paginacao manual
let startIndex = 0
if (cursor) {
const cursorIndex = filtered.findIndex((s) => s._id === cursor)
if (cursorIndex >= 0) {
startIndex = cursorIndex + 1
}
}
const page = filtered.slice(startIndex, startIndex + pageLimit)
const nextCursor = page.length === pageLimit ? page[page.length - 1]._id : null
return {
items: page.map((s) => ({
id: s._id,
name: s.name,
version: s.version ?? null,
publisher: s.publisher ?? null,
source: s.source ?? null,
detectedAt: s.detectedAt,
lastSeenAt: s.lastSeenAt,
})),
total: filtered.length,
nextCursor,
}
},
})
// Lista softwares de todas as maquinas de um tenant (para admin)
export const listByTenant = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
search: v.optional(v.string()),
machineId: v.optional(v.id("machines")),
limit: v.optional(v.number()),
cursor: v.optional(v.string()),
},
handler: async (ctx, { tenantId, search, machineId, limit = 50, cursor }) => {
const pageLimit = Math.min(limit, 100)
// Busca por tenant ou por maquina especifica
let all: Array<{
_id: Id<"machineSoftware">
tenantId: string
machineId: Id<"machines">
name: string
nameLower: string
version?: string
publisher?: string
source?: string
detectedAt: number
lastSeenAt: number
}>
if (machineId) {
all = await ctx.db
.query("machineSoftware")
.withIndex("by_tenant_machine", (q) => q.eq("tenantId", tenantId).eq("machineId", machineId))
.collect()
} else {
// Busca por tenant - pode ser grande, limita
all = await ctx.db
.query("machineSoftware")
.withIndex("by_tenant_name", (q) => q.eq("tenantId", tenantId))
.take(5000)
}
// Filtra por search
let filtered = all
if (search && search.trim().length > 0) {
const searchLower = search.toLowerCase().trim()
filtered = all.filter(
(s) =>
s.nameLower.includes(searchLower) ||
(s.publisher && s.publisher.toLowerCase().includes(searchLower)) ||
(s.version && s.version.toLowerCase().includes(searchLower))
)
}
// Ordena por nome
filtered.sort((a, b) => a.nameLower.localeCompare(b.nameLower))
// Paginacao
let startIndex = 0
if (cursor) {
const cursorIndex = filtered.findIndex((s) => s._id === cursor)
if (cursorIndex >= 0) {
startIndex = cursorIndex + 1
}
}
const page = filtered.slice(startIndex, startIndex + pageLimit)
const nextCursor = page.length === pageLimit ? page[page.length - 1]._id : null
// Busca nomes das maquinas
const machineIds = [...new Set(page.map((s) => s.machineId))]
const machines = await Promise.all(machineIds.map((id) => ctx.db.get(id)))
const machineNames = new Map(
machines.filter(Boolean).map((m) => [m!._id, m!.displayName || m!.hostname])
)
return {
items: page.map((s) => ({
id: s._id,
machineId: s.machineId,
machineName: machineNames.get(s.machineId) ?? "Desconhecido",
name: s.name,
version: s.version ?? null,
publisher: s.publisher ?? null,
source: s.source ?? null,
detectedAt: s.detectedAt,
lastSeenAt: s.lastSeenAt,
})),
total: filtered.length,
nextCursor,
}
},
})
// Conta softwares de uma maquina
export const countByMachine = query({
args: {
machineId: v.id("machines"),
},
handler: async (ctx, { machineId }) => {
const software = await ctx.db
.query("machineSoftware")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.collect()
return { count: software.length }
},
})
// Conta softwares unicos por tenant (para relatorios)
export const stats = query({
args: {
tenantId: v.string(),
viewerId: v.id("users"),
},
handler: async (ctx, { tenantId }) => {
const software = await ctx.db
.query("machineSoftware")
.withIndex("by_tenant_name", (q) => q.eq("tenantId", tenantId))
.take(10000)
const uniqueNames = new Set(software.map((s) => s.nameLower))
const machineIds = new Set(software.map((s) => s.machineId))
return {
totalInstances: software.length,
uniqueSoftware: uniqueNames.size,
machinesWithSoftware: machineIds.size,
}
},
})

View file

@ -1,10 +1,10 @@
// ci: trigger convex functions deploy (no-op)
import { mutation, query } from "./_generated/server"
import { internal, api } from "./_generated/api"
import { api } from "./_generated/api"
import { paginationOptsValidator } from "convex/server"
import { ConvexError, v, Infer } from "convex/values"
import { sha256 } from "@noble/hashes/sha2.js"
import { randomBytes } from "@noble/hashes/utils.js"
import { sha256 } from "@noble/hashes/sha256"
import { randomBytes } from "@noble/hashes/utils"
import type { Doc, Id } from "./_generated/dataModel"
import type { MutationCtx, QueryCtx } from "./_generated/server"
import { normalizeStatus } from "./tickets"
@ -81,7 +81,7 @@ async function findActiveMachineToken(ctx: QueryCtx, machineId: Id<"machines">,
.withIndex("by_machine_revoked_expires", (q) =>
q.eq("machineId", machineId).eq("revoked", false).gt("expiresAt", now),
)
.take(100)
.collect()
return tokens.length > 0 ? tokens[0]! : null
}
@ -92,8 +92,6 @@ function toHex(input: Uint8Array) {
.join("")
}
const utf8 = (s: string) => new TextEncoder().encode(s)
function computeFingerprint(tenantId: string, companySlug: string | undefined, hostname: string, ids: NormalizedIdentifiers) {
const payload = JSON.stringify({
tenantId,
@ -102,7 +100,7 @@ function computeFingerprint(tenantId: string, companySlug: string | undefined, h
macs: ids.macs,
serials: ids.serials,
})
return toHex(sha256(utf8(payload)))
return toHex(sha256(payload))
}
function generateManualFingerprint(tenantId: string, displayName: string) {
@ -112,7 +110,7 @@ function generateManualFingerprint(tenantId: string, displayName: string) {
nonce: toHex(randomBytes(16)),
createdAt: Date.now(),
})
return toHex(sha256(utf8(payload)))
return toHex(sha256(payload))
}
function formatDeviceCustomFieldDisplay(
@ -182,7 +180,7 @@ function matchesExistingHardware(existing: Doc<"machines">, identifiers: Normali
}
function hashToken(token: string) {
return toHex(sha256(utf8(token)))
return toHex(sha256(token))
}
function getRemoteAccessTokenGraceMs() {
@ -251,185 +249,46 @@ function isObject(value: unknown): value is Record<string, unknown> {
return Boolean(value) && typeof value === "object" && !Array.isArray(value)
}
type JsonPrimitive = string | number | boolean | null
type JsonValue = JsonPrimitive | JsonValue[] | { [key: string]: JsonValue }
type JsonRecord = Record<string, JsonValue>
const MAX_JSON_DEPTH = 6
const MAX_ARRAY_LENGTH = 200
function sanitizeJsonValue(value: unknown, depth = 0): JsonValue | undefined {
if (value === null) return null
if (typeof value === "string") return value
if (typeof value === "number") return Number.isFinite(value) ? value : undefined
if (typeof value === "boolean") return value
if (depth >= MAX_JSON_DEPTH) return undefined
if (Array.isArray(value)) {
const items: JsonValue[] = []
for (const entry of value.slice(0, MAX_ARRAY_LENGTH)) {
const sanitized = sanitizeJsonValue(entry, depth + 1)
if (sanitized !== undefined) {
items.push(sanitized)
}
}
return items
function mergeInventory(current: unknown, patch: unknown): unknown {
if (!isObject(patch)) {
return patch
}
if (isObject(value)) {
const result: JsonRecord = {}
for (const [key, entry] of Object.entries(value)) {
const sanitized = sanitizeJsonValue(entry, depth + 1)
if (sanitized !== undefined) {
result[key] = sanitized
}
}
return result
}
return undefined
}
function sanitizeRecord(value: unknown): JsonRecord | null {
const sanitized = sanitizeJsonValue(value)
if (!sanitized || Array.isArray(sanitized)) return null
return sanitized as JsonRecord
}
function stableSerialize(value: JsonValue): string {
if (value === null) return "null"
if (typeof value !== "object") return JSON.stringify(value)
if (Array.isArray(value)) {
return `[${value.map((item) => stableSerialize(item)).join(",")}]`
}
const entries = Object.keys(value)
.sort()
.map((key) => `${JSON.stringify(key)}:${stableSerialize((value as JsonRecord)[key]!)}`)
return `{${entries.join(",")}}`
}
function hashJson(value: JsonRecord | null): string | null {
if (!value) return null
const serialized = stableSerialize(value)
return toHex(sha256(utf8(serialized)))
}
function areJsonValuesEqual(a: JsonValue | undefined, b: JsonValue | undefined): boolean {
if (a === b) return true
if (a === undefined || b === undefined) return false
return stableSerialize(a) === stableSerialize(b)
}
// Busca o lastHeartbeatAt da tabela machineHeartbeats (fonte de verdade)
// Fallback para machine.lastHeartbeatAt para retrocompatibilidade durante migracao
async function getMachineLastHeartbeat(
ctx: QueryCtx | MutationCtx,
machineId: Id<"machines">,
fallback?: number | null
): Promise<number | null> {
const hb = await ctx.db
.query("machineHeartbeats")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.first()
return hb?.lastHeartbeatAt ?? fallback ?? null
}
// Campo software é muito grande e é tratado separadamente via machineSoftware
// Extrai campos importantes do extended antes de bloqueá-lo
function extractFromExtended(extended: unknown): JsonRecord {
const result: JsonRecord = {}
const sanitizedExtended = sanitizeRecord(extended)
if (!sanitizedExtended) return result
// Extrair dados do Windows
const windows = sanitizeRecord(sanitizedExtended["windows"])
if (windows) {
const windowsFields: JsonRecord = {}
// bootInfo - informacoes de reinicio
if (windows["bootInfo"]) {
windowsFields["bootInfo"] = windows["bootInfo"] as JsonValue
}
// osInfo - informacoes do sistema operacional
if (windows["osInfo"]) {
windowsFields["osInfo"] = windows["osInfo"] as JsonValue
}
// cpu, baseboard, bios, memoryModules, videoControllers, disks
for (const key of ["cpu", "baseboard", "bios", "memoryModules", "videoControllers", "disks", "bitLocker", "tpm", "secureBoot", "deviceGuard", "firewallProfiles", "windowsUpdate", "computerSystem", "azureAdStatus", "battery", "thermal", "networkAdapters", "monitors", "chassis", "defender", "hotfix"]) {
if (windows[key]) {
windowsFields[key] = windows[key] as JsonValue
}
}
if (Object.keys(windowsFields).length > 0) {
result["windows"] = windowsFields
}
}
// Extrair dados do Linux
const linux = sanitizeRecord(sanitizedExtended["linux"])
if (linux) {
const linuxFields: JsonRecord = {}
for (const key of ["lsblk", "smart", "lspci", "lsusb", "dmidecode"]) {
if (linux[key]) {
linuxFields[key] = linux[key] as JsonValue
}
}
if (Object.keys(linuxFields).length > 0) {
result["linux"] = linuxFields
}
}
// Extrair dados do macOS
const macos = sanitizeRecord(sanitizedExtended["macos"])
if (macos) {
result["macos"] = macos as JsonValue
}
return result
}
function mergeInventory(current: JsonRecord | null | undefined, patch: Record<string, unknown>): JsonRecord {
const sanitizedPatch = sanitizeRecord(patch)
if (!sanitizedPatch) {
return current ? { ...current } : {}
}
const base: JsonRecord = current ? { ...current } : {}
for (const [key, value] of Object.entries(sanitizedPatch)) {
// Filtrar software (extended já foi processado em sanitizeInventoryPayload)
if (key === "software") continue
const base: Record<string, unknown> = isObject(current) ? { ...(current as Record<string, unknown>) } : {}
for (const [key, value] of Object.entries(patch)) {
if (value === undefined) continue
if (isObject(value) && isObject(base[key])) {
base[key] = mergeInventory(base[key] as JsonRecord, value as Record<string, unknown>)
base[key] = mergeInventory(base[key], value)
} else {
base[key] = value as JsonValue
base[key] = value
}
}
return base
}
function mergeMetadata(current: unknown, patch: Record<string, unknown>): JsonRecord {
const base: JsonRecord = sanitizeRecord(current) ?? {}
const sanitizedPatch = sanitizeRecord(patch) ?? {}
for (const [key, value] of Object.entries(sanitizedPatch)) {
function mergeMetadata(current: unknown, patch: Record<string, unknown>) {
const base: Record<string, unknown> = isObject(current) ? { ...(current as Record<string, unknown>) } : {}
for (const [key, value] of Object.entries(patch)) {
if (value === undefined) continue
if (key === "inventory" && isObject(value)) {
base[key] = mergeInventory(sanitizeRecord(base[key]), value as Record<string, unknown>)
if (key === "inventory") {
base[key] = mergeInventory(base[key], value)
} else if (isObject(value) && isObject(base[key])) {
base[key] = mergeInventory(sanitizeRecord(base[key]), value as Record<string, unknown>)
base[key] = mergeInventory(base[key], value)
} else {
base[key] = value as JsonValue
base[key] = value
}
}
return base
}
type JsonRecord = Record<string, unknown>
function ensureRecord(value: unknown): JsonRecord | null {
return sanitizeRecord(value)
return isObject(value) ? (value as JsonRecord) : null
}
function ensureRecordArray(value: unknown): JsonRecord[] {
if (!Array.isArray(value)) return []
return value
.map((entry) => sanitizeRecord(entry))
.filter((entry): entry is JsonRecord => Boolean(entry))
return value.filter(isObject) as JsonRecord[]
}
function ensureFiniteNumber(value: unknown): number | null {
@ -441,30 +300,6 @@ function ensureString(value: unknown): string | null {
return typeof value === "string" ? value : null
}
function sanitizeInventoryPayload(value: unknown): JsonRecord | null {
const record = sanitizeRecord(value)
if (!record) return null
// Extrair campos importantes do extended antes de deletá-lo
if (record["extended"]) {
const extractedExtended = extractFromExtended(record["extended"])
if (Object.keys(extractedExtended).length > 0) {
record["extended"] = extractedExtended
} else {
delete record["extended"]
}
}
// Deletar apenas software (extended já foi processado acima)
delete record["software"]
return record
}
function sanitizeMetricsPayload(value: unknown): JsonRecord | null {
return sanitizeRecord(value)
}
function getNestedRecord(root: JsonRecord | null, ...keys: string[]): JsonRecord | null {
let current: JsonRecord | null = root
for (const key of keys) {
@ -686,13 +521,11 @@ export const register = mutation({
const now = Date.now()
const metadataPatch = args.metadata && typeof args.metadata === "object" ? (args.metadata as Record<string, unknown>) : undefined
// Busca 1: fingerprint exato (tenant + slug + hostname + MACs hash)
let existing = await ctx.db
.query("machines")
.withIndex("by_tenant_fingerprint", (q) => q.eq("tenantId", tenantId).eq("fingerprint", fingerprint))
.first()
// Busca 2: por email + validacao de hardware (fallback se fingerprint mudou mas email igual)
if (!existing) {
const collaboratorEmail = extractCollaboratorEmail(metadataPatch ?? args.metadata)
if (collaboratorEmail) {
@ -706,41 +539,6 @@ export const register = mutation({
}
}
// Busca 3: por hostname + validacao de hardware (fallback se o usuario mudou mas e a mesma maquina fisica)
// Isso garante que o historico de tickets da maquina seja preservado independente do usuario
if (!existing) {
const hostnameLower = args.hostname.trim().toLowerCase()
const candidates = await ctx.db
.query("machines")
.withIndex("by_tenant_hostname", (q) => q.eq("tenantId", tenantId).eq("hostname", args.hostname))
.take(200)
// Procura uma maquina com hostname igual E hardware compativel (MAC ou serial)
for (const candidate of candidates) {
if (matchesExistingHardware(candidate, identifiers, args.hostname)) {
existing = candidate
break
}
}
// Se nao encontrou por hostname exato, tenta busca mais ampla por hardware
if (!existing) {
// Busca maquinas do mesmo tenant (limitado a 500 para evitar OOM)
// e verifica se alguma tem MAC/serial compativel
const allMachines = await ctx.db
.query("machines")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(500)
for (const candidate of allMachines) {
// Verifica se compartilha MAC ou serial (hardware fisico)
const sharedMac = candidate.macAddresses.some((mac) => identifiers.macs.includes(mac))
const sharedSerial = candidate.serialNumbers.some((serial) => identifiers.serials.includes(serial))
if (sharedMac || sharedSerial) {
existing = candidate
break
}
}
}
}
let machineId: Id<"machines">
if (existing) {
@ -806,7 +604,7 @@ export const register = mutation({
const previousTokens = await ctx.db
.query("machineTokens")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.take(100)
.collect()
for (const token of previousTokens) {
if (!token.revoked) {
@ -976,133 +774,49 @@ export const heartbeat = mutation({
architecture: v.optional(v.string()),
})
),
metrics: v.optional(v.record(v.string(), v.any())),
inventory: v.optional(v.record(v.string(), v.any())),
metadata: v.optional(v.record(v.string(), v.any())),
metrics: v.optional(v.any()),
inventory: v.optional(v.any()),
metadata: v.optional(v.any()),
},
handler: async (ctx, args) => {
const { machine, token } = await getActiveToken(ctx, args.machineToken)
const now = Date.now()
// 1. SEMPRE atualizar machineHeartbeats (documento pequeno, upsert)
// Isso evita criar versoes do documento machines a cada heartbeat
const existingHeartbeat = await ctx.db
.query("machineHeartbeats")
.withIndex("by_machine", (q) => q.eq("machineId", machine._id))
.first()
if (existingHeartbeat) {
await ctx.db.patch(existingHeartbeat._id, { lastHeartbeatAt: now })
} else {
await ctx.db.insert("machineHeartbeats", { machineId: machine._id, lastHeartbeatAt: now })
const metadataPatch: Record<string, unknown> = {}
if (args.metadata && typeof args.metadata === "object") {
Object.assign(metadataPatch, args.metadata as Record<string, unknown>)
}
// 2. Preparar patch de metadata (se houver mudancas REAIS)
// IMPORTANTE: So incluimos no patch se os dados realmente mudaram
// Isso evita criar versoes desnecessarias do documento machines
const metadataPatch: JsonRecord = {}
const currentMetadata = ensureRecord(machine.metadata) ?? {}
const incomingMeta = ensureRecord(args.metadata)
const remoteAccessSnapshot = incomingMeta ? ensureRecord(incomingMeta["remoteAccessSnapshot"]) : null
if (incomingMeta) {
// Filtrar apenas campos que realmente mudaram
for (const [key, value] of Object.entries(incomingMeta)) {
if (key === "inventory" || key === "metrics" || key === "remoteAccessSnapshot" || key === "inventoryHash" || key === "metricsHash") {
continue
}
const currentValue = currentMetadata[key] as JsonValue | undefined
if (!areJsonValuesEqual(value as JsonValue, currentValue)) {
metadataPatch[key] = value as JsonValue
}
}
const remoteAccessSnapshot = metadataPatch["remoteAccessSnapshot"]
if (remoteAccessSnapshot !== undefined) {
delete metadataPatch["remoteAccessSnapshot"]
}
// Extrair inventory de args.inventory ou de args.metadata.inventory (agente envia em metadata)
const rawInventory = args.inventory ?? (incomingMeta?.["inventory"] as Record<string, unknown> | undefined)
const sanitizedInventory = sanitizeInventoryPayload(rawInventory)
const currentInventory = ensureRecord(currentMetadata.inventory)
const incomingInventoryHash = hashJson(sanitizedInventory)
const currentInventoryHash = typeof currentMetadata["inventoryHash"] === "string" ? currentMetadata["inventoryHash"] : null
if (sanitizedInventory && incomingInventoryHash && incomingInventoryHash !== currentInventoryHash) {
metadataPatch.inventory = mergeInventory(currentInventory, sanitizedInventory)
metadataPatch.inventoryHash = incomingInventoryHash
if (args.inventory && typeof args.inventory === "object") {
metadataPatch.inventory = mergeInventory(metadataPatch.inventory, args.inventory as Record<string, unknown>)
}
const sanitizedMetrics = sanitizeMetricsPayload(args.metrics)
const currentMetrics = ensureRecord(currentMetadata.metrics)
const incomingMetricsHash = hashJson(sanitizedMetrics)
const currentMetricsHash = typeof currentMetadata["metricsHash"] === "string" ? currentMetadata["metricsHash"] : null
if (sanitizedMetrics && incomingMetricsHash && incomingMetricsHash !== currentMetricsHash) {
metadataPatch.metrics = sanitizedMetrics
metadataPatch.metricsHash = incomingMetricsHash
if (args.metrics && typeof args.metrics === "object") {
metadataPatch.metrics = args.metrics as Record<string, unknown>
}
const mergedMetadata = Object.keys(metadataPatch).length ? mergeMetadata(machine.metadata, metadataPatch) : machine.metadata
// 3. Verificar se ha mudancas reais nos dados que justifiquem atualizar o documento machines
const hasMetadataChanges = Object.keys(metadataPatch).length > 0
const hasHostnameChange = args.hostname && args.hostname !== machine.hostname
const hasOsChange = args.os && (
args.os.name !== machine.osName ||
args.os.version !== machine.osVersion ||
args.os.architecture !== machine.architecture
)
const hasStatusChange = typeof args.status === "string" && args.status !== machine.status
const needsMachineUpdate = hasMetadataChanges || hasHostnameChange || hasOsChange || hasStatusChange
// 4. So atualizar machines se houver mudancas reais (evita criar versoes desnecessarias)
// NOTA: lastHeartbeatAt agora vive na tabela machineHeartbeats, nao atualizamos mais aqui
if (needsMachineUpdate) {
const mergedMetadata = hasMetadataChanges ? mergeMetadata(machine.metadata, metadataPatch) : machine.metadata
const nextStatus = args.status ?? machine.status ?? (sanitizedMetrics ? "online" : "unknown")
await ctx.db.patch(machine._id, {
hostname: args.hostname ?? machine.hostname,
displayName: machine.displayName ?? args.hostname ?? machine.hostname,
osName: args.os?.name ?? machine.osName,
osVersion: args.os?.version ?? machine.osVersion,
architecture: args.os?.architecture ?? machine.architecture,
devicePlatform: args.os?.name ?? machine.devicePlatform,
deviceType: machine.deviceType ?? "desktop",
managementMode: machine.managementMode ?? "agent",
updatedAt: now,
status: nextStatus,
metadata: mergedMetadata,
})
}
await ctx.db.patch(machine._id, {
hostname: args.hostname ?? machine.hostname,
displayName: machine.displayName ?? args.hostname ?? machine.hostname,
osName: args.os?.name ?? machine.osName,
osVersion: args.os?.version ?? machine.osVersion,
architecture: args.os?.architecture ?? machine.architecture,
devicePlatform: args.os?.name ?? machine.devicePlatform,
deviceType: machine.deviceType ?? "desktop",
managementMode: machine.managementMode ?? "agent",
lastHeartbeatAt: now,
updatedAt: now,
status: args.status ?? "online",
metadata: mergedMetadata,
})
if (remoteAccessSnapshot) {
await upsertRemoteAccessSnapshotFromHeartbeat(ctx, machine, remoteAccessSnapshot, now)
}
// Processar softwares instalados (armazenados em tabela separada)
// Os dados de software sao extraidos ANTES de sanitizar o inventory
// Usa rawInventory ja extraido anteriormente (linha ~1022)
if (rawInventory && typeof rawInventory === "object") {
const softwareArray = (rawInventory as Record<string, unknown>)["software"]
if (Array.isArray(softwareArray) && softwareArray.length > 0) {
const validSoftware = softwareArray
.filter((item): item is Record<string, unknown> => item !== null && typeof item === "object")
.map((item) => ({
name: typeof item.name === "string" ? item.name : "",
version: typeof item.version === "string" ? item.version : undefined,
publisher: typeof item.publisher === "string" || typeof item.source === "string"
? (item.publisher as string) || (item.source as string)
: undefined,
source: typeof item.source === "string" ? item.source : undefined,
}))
.filter((item) => item.name.length > 0)
if (validSoftware.length > 0) {
await ctx.runMutation(internal.machineSoftware.syncFromHeartbeat, {
tenantId: machine.tenantId,
machineId: machine._id,
software: validSoftware,
})
}
}
}
await ctx.db.patch(token._id, {
lastUsedAt: now,
usageCount: (token.usageCount ?? 0) + 1,
@ -1110,12 +824,8 @@ export const heartbeat = mutation({
})
// Evaluate posture/alerts & optionally create ticket
const fresh = needsMachineUpdate ? (await ctx.db.get(machine._id)) as Doc<"machines"> : machine
await evaluatePostureAndMaybeRaise(ctx, fresh, {
metrics: sanitizedMetrics ?? null,
inventory: sanitizedInventory ?? null,
metadata: incomingMeta ?? null,
})
const fresh = (await ctx.db.get(machine._id)) as Doc<"machines">
await evaluatePostureAndMaybeRaise(ctx, fresh, { metrics: args.metrics, inventory: args.inventory, metadata: args.metadata })
return {
ok: true,
@ -1183,7 +893,7 @@ export const listByTenant = query({
const tenantCompanies = await ctx.db
.query("companies")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(200)
.collect()
const companyById = new Map<string, typeof tenantCompanies[number]>()
const companyBySlug = new Map<string, typeof tenantCompanies[number]>()
@ -1194,17 +904,14 @@ export const listByTenant = query({
}
}
// Limita a 500 maquinas para evitar OOM
const machines = await ctx.db
.query("machines")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(500)
.collect()
return Promise.all(
machines.map(async (machine) => {
const activeToken = await findActiveMachineToken(ctx, machine._id, now)
// Busca heartbeat da tabela separada (fonte de verdade), fallback para legado
const lastHeartbeatAt = await getMachineLastHeartbeat(ctx, machine._id, machine.lastHeartbeatAt)
const offlineThresholdMs = getOfflineThresholdMs()
const staleThresholdMs = getStaleThresholdMs(offlineThresholdMs)
const manualStatus = (machine.status ?? "").toLowerCase()
@ -1213,8 +920,8 @@ export const listByTenant = query({
derivedStatus = "deactivated"
} else if (["maintenance", "blocked"].includes(manualStatus)) {
derivedStatus = manualStatus
} else if (lastHeartbeatAt) {
const age = now - lastHeartbeatAt
} else if (machine.lastHeartbeatAt) {
const age = now - machine.lastHeartbeatAt
if (age <= offlineThresholdMs) {
derivedStatus = "online"
} else if (age <= staleThresholdMs) {
@ -1259,13 +966,6 @@ export const listByTenant = query({
})
).then((arr) => arr.filter(Boolean) as Array<{ id: string; email: string; name: string }>)
// ticket count (limitado a 100 para performance)
const ticketCount = await ctx.db
.query("tickets")
.withIndex("by_tenant_machine", (q) => q.eq("tenantId", tenantId).eq("machineId", machine._id))
.take(100)
.then((tickets) => tickets.length)
const companyFromId = machine.companyId ? companyById.get(machine.companyId) ?? null : null
const companyFromSlug = machine.companySlug ? companyBySlug.get(machine.companySlug) ?? null : null
const resolvedCompany = companyFromId ?? companyFromSlug
@ -1297,8 +997,8 @@ export const listByTenant = query({
linkedUsers,
status: derivedStatus,
isActive: machine.isActive ?? true,
lastHeartbeatAt: lastHeartbeatAt,
heartbeatAgeMs: lastHeartbeatAt ? now - lastHeartbeatAt : null,
lastHeartbeatAt: machine.lastHeartbeatAt ?? null,
heartbeatAgeMs: machine.lastHeartbeatAt ? now - machine.lastHeartbeatAt : null,
registeredBy: machine.registeredBy ?? null,
createdAt: machine.createdAt,
updatedAt: machine.updatedAt,
@ -1315,10 +1015,6 @@ export const listByTenant = query({
lastPostureAt,
remoteAccess: machine.remoteAccess ?? null,
customFields: machine.customFields ?? [],
usbPolicy: machine.usbPolicy ?? null,
usbPolicyStatus: machine.usbPolicyStatus ?? null,
usbPolicyError: machine.usbPolicyError ?? null,
ticketCount,
}
})
)
@ -1347,8 +1043,6 @@ export async function getByIdHandler(
const resolvedCompany = companyFromId ?? companyFromSlug
const activeToken = await findActiveMachineToken(ctx, machine._id, now)
// Busca heartbeat da tabela separada (fonte de verdade), fallback para legado
const lastHeartbeatAt = await getMachineLastHeartbeat(ctx, machine._id, machine.lastHeartbeatAt)
const offlineThresholdMs = getOfflineThresholdMs()
const staleThresholdMs = getStaleThresholdMs(offlineThresholdMs)
@ -1358,8 +1052,8 @@ export async function getByIdHandler(
derivedStatus = "deactivated"
} else if (["maintenance", "blocked"].includes(manualStatus)) {
derivedStatus = manualStatus
} else if (lastHeartbeatAt) {
const age = now - lastHeartbeatAt
} else if (machine.lastHeartbeatAt) {
const age = now - machine.lastHeartbeatAt
if (age <= offlineThresholdMs) {
derivedStatus = "online"
} else if (age <= staleThresholdMs) {
@ -1428,8 +1122,8 @@ export async function getByIdHandler(
linkedUsers,
status: derivedStatus,
isActive: machine.isActive ?? true,
lastHeartbeatAt: lastHeartbeatAt,
heartbeatAgeMs: lastHeartbeatAt ? now - lastHeartbeatAt : null,
lastHeartbeatAt: machine.lastHeartbeatAt ?? null,
heartbeatAgeMs: machine.lastHeartbeatAt ? now - machine.lastHeartbeatAt : null,
registeredBy: machine.registeredBy ?? null,
createdAt: machine.createdAt,
updatedAt: machine.updatedAt,
@ -1446,9 +1140,6 @@ export async function getByIdHandler(
lastPostureAt,
remoteAccess: machine.remoteAccess ?? null,
customFields: machine.customFields ?? [],
usbPolicy: machine.usbPolicy ?? null,
usbPolicyStatus: machine.usbPolicyStatus ?? null,
usbPolicyError: machine.usbPolicyError ?? null,
}
}
@ -1549,7 +1240,6 @@ export const listOpenTickets = query({
type MachineTicketsHistoryFilter = {
statusFilter: "all" | "open" | "resolved"
priorityFilter: string | null
requesterEmail: string | null
from: number | null
to: number | null
}
@ -1558,7 +1248,6 @@ type ListTicketsHistoryArgs = {
machineId: Id<"machines">
status?: "all" | "open" | "resolved"
priority?: string
requesterEmail?: string
search?: string
from?: number
to?: number
@ -1569,7 +1258,6 @@ type GetTicketsHistoryStatsArgs = {
machineId: Id<"machines">
status?: "all" | "open" | "resolved"
priority?: string
requesterEmail?: string
search?: string
from?: number
to?: number
@ -1613,13 +1301,6 @@ function createMachineTicketsQuery(
return working
}
function matchesRequesterEmail(ticket: Doc<"tickets">, requesterEmail: string | null): boolean {
if (!requesterEmail) return true
const requesterSnapshot = ticket.requesterSnapshot as { name?: string; email?: string } | undefined
if (!requesterSnapshot?.email) return false
return requesterSnapshot.email.toLowerCase() === requesterEmail.toLowerCase()
}
function matchesTicketSearch(ticket: Doc<"tickets">, searchTerm: string): boolean {
const normalized = searchTerm.trim().toLowerCase()
if (!normalized) return true
@ -1660,27 +1341,19 @@ export async function listTicketsHistoryHandler(ctx: QueryCtx, args: ListTickets
const normalizedStatusFilter = args.status ?? "all"
const normalizedPriorityFilter = args.priority ? args.priority.toUpperCase() : null
const normalizedRequesterEmail = args.requesterEmail?.trim().toLowerCase() ?? null
const searchTerm = args.search?.trim().toLowerCase() ?? null
const from = typeof args.from === "number" ? args.from : null
const to = typeof args.to === "number" ? args.to : null
const filters: MachineTicketsHistoryFilter = {
statusFilter: normalizedStatusFilter,
priorityFilter: normalizedPriorityFilter,
requesterEmail: normalizedRequesterEmail,
from,
to,
}
const pageResult = await createMachineTicketsQuery(ctx, machine, args.machineId, filters).paginate(args.paginationOpts)
let page = pageResult.page
if (normalizedRequesterEmail) {
page = page.filter((ticket) => matchesRequesterEmail(ticket, normalizedRequesterEmail))
}
if (searchTerm) {
page = page.filter((ticket) => matchesTicketSearch(ticket, searchTerm))
}
const page = searchTerm ? pageResult.page.filter((ticket) => matchesTicketSearch(ticket, searchTerm)) : pageResult.page
const queueCache = new Map<string, Doc<"queues"> | null>()
const items = await Promise.all(
page.map(async (ticket) => {
@ -1733,7 +1406,6 @@ export const listTicketsHistory = query({
machineId: v.id("machines"),
status: v.optional(v.union(v.literal("all"), v.literal("open"), v.literal("resolved"))),
priority: v.optional(v.string()),
requesterEmail: v.optional(v.string()),
search: v.optional(v.string()),
from: v.optional(v.number()),
to: v.optional(v.number()),
@ -1753,14 +1425,12 @@ export async function getTicketsHistoryStatsHandler(
const normalizedStatusFilter = args.status ?? "all"
const normalizedPriorityFilter = args.priority ? args.priority.toUpperCase() : null
const normalizedRequesterEmail = args.requesterEmail?.trim().toLowerCase() ?? null
const searchTerm = args.search?.trim().toLowerCase() ?? ""
const from = typeof args.from === "number" ? args.from : null
const to = typeof args.to === "number" ? args.to : null
const filters: MachineTicketsHistoryFilter = {
statusFilter: normalizedStatusFilter,
priorityFilter: normalizedPriorityFilter,
requesterEmail: normalizedRequesterEmail,
from,
to,
}
@ -1775,13 +1445,7 @@ export async function getTicketsHistoryStatsHandler(
numItems: MACHINE_TICKETS_STATS_PAGE_SIZE,
cursor,
})
let page = pageResult.page
if (normalizedRequesterEmail) {
page = page.filter((ticket) => matchesRequesterEmail(ticket, normalizedRequesterEmail))
}
if (searchTerm) {
page = page.filter((ticket) => matchesTicketSearch(ticket, searchTerm))
}
const page = searchTerm ? pageResult.page.filter((ticket) => matchesTicketSearch(ticket, searchTerm)) : pageResult.page
total += page.length
for (const ticket of page) {
if (OPEN_TICKET_STATUSES.has(normalizeStatus(ticket.status))) {
@ -1807,7 +1471,6 @@ export const getTicketsHistoryStats = query({
machineId: v.id("machines"),
status: v.optional(v.union(v.literal("all"), v.literal("open"), v.literal("resolved"))),
priority: v.optional(v.string()),
requesterEmail: v.optional(v.string()),
search: v.optional(v.string()),
from: v.optional(v.number()),
to: v.optional(v.number()),
@ -1815,44 +1478,6 @@ export const getTicketsHistoryStats = query({
handler: getTicketsHistoryStatsHandler,
})
// Lista os solicitantes unicos que abriram tickets nesta maquina
export const listMachineRequesters = query({
args: {
machineId: v.id("machines"),
},
handler: async (ctx, args) => {
const machine = await ctx.db.get(args.machineId)
if (!machine) {
return []
}
const tickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_machine", (q) => q.eq("tenantId", machine.tenantId).eq("machineId", args.machineId))
.take(200)
const requestersMap = new Map<string, { email: string; name: string | null }>()
for (const ticket of tickets) {
const snapshot = ticket.requesterSnapshot as { name?: string; email?: string } | undefined
if (snapshot?.email) {
const emailLower = snapshot.email.toLowerCase()
if (!requestersMap.has(emailLower)) {
requestersMap.set(emailLower, {
email: snapshot.email,
name: snapshot.name ?? null,
})
}
}
}
return Array.from(requestersMap.values()).sort((a, b) => {
const nameA = a.name ?? a.email
const nameB = b.name ?? b.email
return nameA.localeCompare(nameB)
})
},
})
export async function updatePersonaHandler(
ctx: MutationCtx,
args: {
@ -2386,7 +2011,7 @@ export const resetAgent = mutation({
const tokens = await ctx.db
.query("machineTokens")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.take(100)
.collect()
const now = Date.now()
let revokedCount = 0
@ -2410,44 +2035,6 @@ export const resetAgent = mutation({
},
})
/**
* Query para o desktop monitorar o estado da máquina em tempo real.
* O desktop faz subscribe nessa query e reage imediatamente quando:
* - isActive muda para false (desativação)
* - hasValidToken muda para false (reset/revogação de tokens)
*/
export const getMachineState = query({
args: {
machineId: v.id("machines"),
},
handler: async (ctx, { machineId }) => {
const machine = await ctx.db.get(machineId)
if (!machine) {
return { found: false, isActive: false, hasValidToken: false, status: "unknown" as const }
}
// Verifica se existe algum token válido (não revogado e não expirado)
const now = Date.now()
const tokens = await ctx.db
.query("machineTokens")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.take(10)
const hasValidToken = tokens.some((token) => {
if (token.revoked) return false
if (token.expiresAt && token.expiresAt < now) return false
return true
})
return {
found: true,
isActive: machine.isActive ?? true,
hasValidToken,
status: machine.status ?? "unknown",
}
},
})
type RemoteAccessEntry = {
id: string
provider: string
@ -2579,38 +2166,6 @@ function normalizeRemoteAccessList(raw: unknown): RemoteAccessEntry[] {
return entries
}
async function removeDuplicateRemoteAccessEntries(
ctx: MutationCtx,
tenantId: string,
currentMachineId: Id<"machines">,
provider: string,
identifier: string,
now: number
) {
// Limita a 500 maquinas para evitar OOM
const machines = await ctx.db
.query("machines")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(500)
const providerLc = provider.toLowerCase()
const identifierLc = identifier.toLowerCase()
for (const device of machines) {
if (device._id === currentMachineId) continue
const entries = normalizeRemoteAccessList(device.remoteAccess)
const filtered = entries.filter(
(entry) =>
entry.provider.toLowerCase() !== providerLc || entry.identifier.toLowerCase() !== identifierLc
)
if (filtered.length === entries.length) continue
await ctx.db.patch(device._id, {
remoteAccess: filtered.length > 0 ? filtered : null,
updatedAt: now,
})
}
}
async function upsertRemoteAccessSnapshotFromHeartbeat(
ctx: MutationCtx,
machine: Doc<"machines">,
@ -2624,27 +2179,16 @@ async function upsertRemoteAccessSnapshotFromHeartbeat(
if (!identifier) return
const existingEntries = normalizeRemoteAccessList(machine.remoteAccess)
// Busca primeiro por provider + identifier (atualização exata)
let idx = existingEntries.findIndex(
const idx = existingEntries.findIndex(
(entry) => entry.provider.toLowerCase() === provider.toLowerCase() && entry.identifier.toLowerCase() === identifier.toLowerCase()
)
// Se não encontrou, busca apenas por provider (substituição - ex: RustDesk ID mudou)
if (idx < 0) {
idx = existingEntries.findIndex(
(entry) => entry.provider.toLowerCase() === provider.toLowerCase()
)
}
const entryId = idx >= 0 ? existingEntries[idx].id : createRemoteAccessId()
const metadata = {
...(normalized.metadata ?? {}),
snapshotSource: "heartbeat",
provider,
identifier,
machineId: machine._id,
hostname: machine.hostname,
lastVerifiedAt: timestamp,
}
@ -2862,19 +2406,12 @@ export const upsertRemoteAccessViaToken = mutation({
const cleanedNotes = args.notes?.trim() ? args.notes.trim() : null
const timestamp = Date.now()
const existingEntries = normalizeRemoteAccessList(machine.remoteAccess)
let existingIndex = existingEntries.findIndex(
const existingIndex = existingEntries.findIndex(
(entry) =>
entry.provider.toLowerCase() === trimmedProvider.toLowerCase() &&
entry.identifier.toLowerCase() === trimmedIdentifier.toLowerCase()
)
// Se o identificador mudar (ex.: RustDesk gerando novo ID), reaproveitamos a entrada do mesmo provider
if (existingIndex < 0) {
existingIndex = existingEntries.findIndex(
(entry) => entry.provider.toLowerCase() === trimmedProvider.toLowerCase()
)
}
const entryId = existingIndex >= 0 ? existingEntries[existingIndex].id : createRemoteAccessId()
const updatedEntry: RemoteAccessEntry = {
id: entryId,
@ -2886,7 +2423,6 @@ export const upsertRemoteAccessViaToken = mutation({
notes: cleanedNotes,
lastVerifiedAt: timestamp,
metadata: {
source: "machine-token",
provider: trimmedProvider,
identifier: trimmedIdentifier,
url: normalizedUrl,
@ -2894,9 +2430,6 @@ export const upsertRemoteAccessViaToken = mutation({
password: cleanedPassword,
notes: cleanedNotes,
lastVerifiedAt: timestamp,
machineId: machine._id,
hostname: machine.hostname,
tenantId: machine.tenantId,
},
}
@ -2905,8 +2438,6 @@ export const upsertRemoteAccessViaToken = mutation({
? existingEntries.map((entry, index) => (index === existingIndex ? updatedEntry : entry))
: [...existingEntries, updatedEntry]
await removeDuplicateRemoteAccessEntries(ctx, machine.tenantId, machine._id, trimmedProvider, trimmedIdentifier, timestamp)
await ctx.db.patch(machine._id, {
remoteAccess: nextEntries,
updatedAt: timestamp,
@ -2940,7 +2471,7 @@ export const remove = mutation({
const tokens = await ctx.db
.query("machineTokens")
.withIndex("by_machine", (q) => q.eq("machineId", machineId))
.take(100)
.collect()
await Promise.all(tokens.map((token) => ctx.db.delete(token._id)))
await ctx.db.delete(machineId)

View file

@ -9,7 +9,6 @@ import {
fetchOpenScopedTickets,
fetchScopedTicketsByCreatedRange,
fetchScopedTicketsByResolvedRange,
fetchScopedTicketsByResolvedRangeSnapshot,
normalizeStatus,
} from "./reports"
import { requireStaff } from "./rbac"
@ -295,7 +294,7 @@ const metricResolvers: Record<string, MetricResolver> = {
queueIds,
)
const resolvedTickets = filterTicketsByQueue(
await fetchScopedTicketsByResolvedRangeSnapshot(ctx, tenantId, viewer, startMs, endMs, companyId),
await fetchScopedTicketsByResolvedRange(ctx, tenantId, viewer, startMs, endMs, companyId),
queueIds,
)
@ -448,7 +447,7 @@ const metricResolvers: Record<string, MetricResolver> = {
queueCounts.set(queueKey, (queueCounts.get(queueKey) ?? 0) + 1)
}
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).take(50)
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).collect()
const data = Array.from(queueCounts.entries()).map(([queueId, total]) => {
const queue = queues.find((q) => String(q._id) === queueId)
return {
@ -470,7 +469,7 @@ const metricResolvers: Record<string, MetricResolver> = {
const filterHas = queueFilter && queueFilter.length > 0
const normalizeKey = (id: Id<"queues"> | null) => (id ? String(id) : "sem-fila")
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).take(50)
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).collect()
const queueNameMap = new Map<string, string>()
queues.forEach((queue) => {
const key = String(queue._id)
@ -593,7 +592,7 @@ const metricResolvers: Record<string, MetricResolver> = {
stats.set(queueKey, current)
}
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).take(50)
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).collect()
const data = Array.from(stats.entries()).map(([queueId, value]) => {
const queue = queues.find((q) => String(q._id) === queueId)
const compliance = value.total > 0 ? value.compliant / value.total : 0
@ -664,8 +663,7 @@ const metricResolvers: Record<string, MetricResolver> = {
},
"devices.health_summary": async (ctx, { tenantId, params }) => {
const limit = parseLimit(params, 10)
// Limita a 200 maquinas para evitar OOM
const machines = await ctx.db.query("machines").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).take(200)
const machines = await ctx.db.query("machines").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).collect()
const now = Date.now()
const summary = machines
.map((machine) => {

View file

@ -1,4 +1,4 @@
import { randomBytes } from "@noble/hashes/utils.js"
import { randomBytes } from "@noble/hashes/utils"
import { ConvexError, v } from "convex/values"
import { mutation, query } from "./_generated/server"
@ -307,21 +307,21 @@ async function getTenantUsers(ctx: QueryCtx, tenantId: string) {
return ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(2000)
.collect()
}
async function getTenantQueues(ctx: QueryCtx, tenantId: string) {
return ctx.db
.query("queues")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(500)
.collect()
}
async function getTenantCompanies(ctx: QueryCtx, tenantId: string) {
return ctx.db
.query("companies")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(1000)
.collect()
}
export const exportTenantSnapshot = query({
@ -347,7 +347,7 @@ export const exportTenantSnapshot = query({
const tickets = await ctx.db
.query("tickets")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(5000)
.collect()
const ticketsWithRelations = []
@ -355,12 +355,12 @@ export const exportTenantSnapshot = query({
const comments = await ctx.db
.query("ticketComments")
.withIndex("by_ticket", (q) => q.eq("ticketId", ticket._id))
.take(500)
.collect()
const events = await ctx.db
.query("ticketEvents")
.withIndex("by_ticket", (q) => q.eq("ticketId", ticket._id))
.take(500)
.collect()
const requester = userMap.get(ticket.requesterId)
const assignee = ticket.assigneeId ? userMap.get(ticket.assigneeId) : undefined
@ -575,7 +575,7 @@ export const importPrismaSnapshot = mutation({
const existingTenantUsers = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", snapshot.tenantId))
.take(2000)
.collect()
for (const user of existingTenantUsers) {
const role = normalizeRole(user.role ?? null)
@ -672,7 +672,7 @@ export const importPrismaSnapshot = mutation({
const existingComments = await ctx.db
.query("ticketComments")
.withIndex("by_ticket", (q) => q.eq("ticketId", ticketId))
.take(500)
.collect()
for (const comment of existingComments) {
await ctx.db.delete(comment._id)
}
@ -680,7 +680,7 @@ export const importPrismaSnapshot = mutation({
const existingEvents = await ctx.db
.query("ticketEvents")
.withIndex("by_ticket", (q) => q.eq("ticketId", ticketId))
.take(500)
.collect()
for (const event of existingEvents) {
await ctx.db.delete(event._id)
}
@ -737,9 +737,7 @@ export const backfillTicketCommentAuthorSnapshots = mutation({
handler: async (ctx, { limit, dryRun }) => {
const effectiveDryRun = Boolean(dryRun)
const maxUpdates = limit && limit > 0 ? limit : null
// Limita a 2000 comentarios por execucao para evitar OOM
// Se precisar processar mais, rode novamente a migracao
const comments = await ctx.db.query("ticketComments").take(2000)
const comments = await ctx.db.query("ticketComments").collect()
let updated = 0
let skippedExisting = 0
@ -765,7 +763,7 @@ export const backfillTicketCommentAuthorSnapshots = mutation({
const events = await ctx.db
.query("ticketEvents")
.withIndex("by_ticket", (q) => q.eq("ticketId", comment.ticketId))
.take(100)
.collect()
const matchingEvent = events.find(
(event) => event.type === "COMMENT_ADDED" && event.createdAt === comment.createdAt,
)
@ -812,13 +810,12 @@ export const syncMachineCompanyReferences = mutation({
handler: async (ctx, { tenantId, dryRun }) => {
const effectiveDryRun = Boolean(dryRun)
// Limita a 1000 maquinas por execucao para evitar OOM
const machines = tenantId && tenantId.trim().length > 0
? await ctx.db
.query("machines")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(1000)
: await ctx.db.query("machines").take(1000)
.collect()
: await ctx.db.query("machines").collect()
const slugCache = new Map<string, Id<"companies"> | null>()
const summary = {
@ -870,131 +867,13 @@ export const syncMachineCompanyReferences = mutation({
},
})
/**
* Migracao para compactar metadata de machines, removendo dados volumosos
* como inventory.software (lista de programas instalados) que podem ter
* centenas de KBs por maquina.
*
* Esta migracao preserva:
* - metrics (metricas de sistema)
* - postureAlerts (alertas de postura)
* - lastPostureAt (timestamp)
* - collaborator (email do colaborador)
* - inventory.os, inventory.cpu, inventory.memory, inventory.disks, inventory.network
* - inventory.services (lista de servicos - usado para alertas de postura)
*
* Remove:
* - inventory.software (lista completa de programas instalados - muito grande)
* - inventory.extended (dados estendidos do Linux - muito grande)
* - cpuWindow (janela de CPU - pode ser reconstruida)
*/
export const compactMachineMetadata = mutation({
args: {
tenantId: v.optional(v.string()),
limit: v.optional(v.number()),
dryRun: v.optional(v.boolean()),
},
handler: async (ctx, { tenantId, limit, dryRun }) => {
const effectiveDryRun = Boolean(dryRun)
const effectiveLimit = limit && limit > 0 ? Math.min(limit, 200) : 200
// Busca maquinas em lotes pequenos para evitar OOM
const machines = tenantId && tenantId.trim().length > 0
? await ctx.db
.query("machines")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(effectiveLimit)
: await ctx.db.query("machines").take(effectiveLimit)
let processed = 0
let compacted = 0
let bytesSavedEstimate = 0
for (const machine of machines) {
processed += 1
const metadata = machine.metadata
if (!metadata || typeof metadata !== "object") continue
const meta = metadata as Record<string, unknown>
const inventory = meta["inventory"]
const cpuWindow = meta["cpuWindow"]
// Verificar se precisa compactar
let needsCompact = false
const sizeBefore = JSON.stringify(meta).length
const newMeta: Record<string, unknown> = {}
// Preservar campos essenciais
if (meta["metrics"]) newMeta["metrics"] = meta["metrics"]
if (meta["postureAlerts"]) newMeta["postureAlerts"] = meta["postureAlerts"]
if (meta["lastPostureAt"]) newMeta["lastPostureAt"] = meta["lastPostureAt"]
if (meta["collaborator"]) newMeta["collaborator"] = meta["collaborator"]
if (meta["remoteAccessSnapshot"]) newMeta["remoteAccessSnapshot"] = meta["remoteAccessSnapshot"]
// Compactar cpuWindow para apenas ultimas 30 amostras (em vez de 120)
if (Array.isArray(cpuWindow) && cpuWindow.length > 30) {
newMeta["cpuWindow"] = cpuWindow.slice(-30)
needsCompact = true
} else if (cpuWindow) {
newMeta["cpuWindow"] = cpuWindow
}
// Compactar inventory - remover software e extended
if (inventory && typeof inventory === "object") {
const inv = inventory as Record<string, unknown>
const newInv: Record<string, unknown> = {}
// Preservar apenas campos essenciais do inventory
if (inv["os"]) newInv["os"] = inv["os"]
if (inv["cpu"]) newInv["cpu"] = inv["cpu"]
if (inv["memory"]) newInv["memory"] = inv["memory"]
if (inv["disks"]) newInv["disks"] = inv["disks"]
if (inv["network"]) newInv["network"] = inv["network"]
if (inv["services"]) newInv["services"] = inv["services"]
if (inv["bios"]) newInv["bios"] = inv["bios"]
if (inv["motherboard"]) newInv["motherboard"] = inv["motherboard"]
// Verificar se tinha software ou extended (campos volumosos)
if (inv["software"] || inv["extended"]) {
needsCompact = true
}
if (Object.keys(newInv).length > 0) {
newMeta["inventory"] = newInv
}
}
if (!needsCompact) continue
const sizeAfter = JSON.stringify(newMeta).length
bytesSavedEstimate += sizeBefore - sizeAfter
if (!effectiveDryRun) {
await ctx.db.patch(machine._id, { metadata: newMeta, updatedAt: Date.now() })
}
compacted += 1
}
return {
dryRun: effectiveDryRun,
processed,
compacted,
bytesSavedEstimate,
bytesSavedMB: Math.round(bytesSavedEstimate / 1024 / 1024 * 100) / 100,
}
},
})
export const backfillTicketSnapshots = mutation({
args: { tenantId: v.string(), limit: v.optional(v.number()) },
handler: async (ctx, { tenantId, limit }) => {
// Limita a 1000 tickets por execucao para evitar OOM
const effectiveLimit = limit && limit > 0 ? Math.min(limit, 1000) : 1000
const tickets = await ctx.db
.query("tickets")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(effectiveLimit)
.collect()
let processed = 0
for (const t of tickets) {
@ -1043,81 +922,3 @@ export const backfillTicketSnapshots = mutation({
return { processed }
},
})
/**
* Migration para remover comentarios duplicados de troca de responsavel.
* Esses comentarios eram criados automaticamente ao trocar o responsavel,
* mas essa informacao ja aparece na linha do tempo (ticketEvents).
* O comentario segue o padrao: "<p><strong>Responsável atualizado:</strong>..."
*/
export const removeAssigneeChangeComments = mutation({
args: {
tenantId: v.optional(v.string()),
limit: v.optional(v.number()),
dryRun: v.optional(v.boolean()),
},
handler: async (ctx, { tenantId, limit, dryRun }) => {
const effectiveDryRun = Boolean(dryRun)
const effectiveLimit = limit && limit > 0 ? Math.min(limit, 500) : 500
// Busca comentarios internos que contenham o padrao de troca de responsavel
const comments = tenantId && tenantId.trim().length > 0
? await ctx.db.query("ticketComments").take(5000)
: await ctx.db.query("ticketComments").take(5000)
// Filtrar comentarios que sao de troca de responsavel
const assigneeChangePattern = "<p><strong>Responsável atualizado:</strong>"
const toDelete = comments.filter((comment) => {
if (comment.visibility !== "INTERNAL") return false
if (typeof comment.body !== "string") return false
return comment.body.includes(assigneeChangePattern)
})
// Filtrar por tenant se especificado
let filtered = toDelete
if (tenantId && tenantId.trim().length > 0) {
const ticketIds = new Set<string>()
const tickets = await ctx.db
.query("tickets")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(10000)
for (const t of tickets) {
ticketIds.add(t._id)
}
filtered = toDelete.filter((c) => ticketIds.has(c.ticketId))
}
const limitedComments = filtered.slice(0, effectiveLimit)
let deleted = 0
let eventsDeleted = 0
for (const comment of limitedComments) {
if (!effectiveDryRun) {
// Deletar o evento COMMENT_ADDED correspondente
const events = await ctx.db
.query("ticketEvents")
.withIndex("by_ticket", (q) => q.eq("ticketId", comment.ticketId))
.take(500)
const matchingEvent = events.find(
(event) =>
event.type === "COMMENT_ADDED" &&
Math.abs(event.createdAt - comment.createdAt) < 1000, // mesmo timestamp (tolerancia de 1s)
)
if (matchingEvent) {
await ctx.db.delete(matchingEvent._id)
eventsDeleted += 1
}
await ctx.db.delete(comment._id)
}
deleted += 1
}
return {
dryRun: effectiveDryRun,
totalFound: filtered.length,
deleted,
eventsDeleted,
remaining: filtered.length - deleted,
}
},
})

View file

@ -1,76 +0,0 @@
import { ConvexError, v } from "convex/values"
import { query } from "./_generated/server"
import { getOfflineThresholdMs, getStaleThresholdMs } from "./machines"
const MACHINE_SCAN_LIMIT = 1200
export const healthSnapshot = query({
args: {
token: v.optional(v.string()),
},
handler: async (ctx, args) => {
const requiredToken = process.env["INTERNAL_HEALTH_TOKEN"] ?? process.env["REPORTS_CRON_SECRET"] ?? null
if (requiredToken && args.token !== requiredToken) {
throw new ConvexError("Nao autorizado")
}
const now = Date.now()
const offlineMs = getOfflineThresholdMs()
const staleMs = getStaleThresholdMs(offlineMs)
const machines = await ctx.db.query("machines").take(MACHINE_SCAN_LIMIT)
const heartbeats = await ctx.db.query("machineHeartbeats").collect()
let online = 0
let warning = 0
let offline = 0
let newest = 0
let oldest = 0
const withHeartbeat = new Set<string>()
for (const hb of heartbeats) {
const ageMs = now - hb.lastHeartbeatAt
withHeartbeat.add(String(hb.machineId))
if (newest === 0 || hb.lastHeartbeatAt > newest) {
newest = hb.lastHeartbeatAt
}
if (oldest === 0 || hb.lastHeartbeatAt < oldest) {
oldest = hb.lastHeartbeatAt
}
if (ageMs <= offlineMs) {
online += 1
} else if (ageMs <= staleMs) {
warning += 1
} else {
offline += 1
}
}
const withoutHeartbeat = machines.length - withHeartbeat.size
const totalOffline = offline + (withoutHeartbeat > 0 ? withoutHeartbeat : 0)
return {
totals: {
machines: machines.length,
heartbeats: heartbeats.length,
withoutHeartbeat: withoutHeartbeat > 0 ? withoutHeartbeat : 0,
truncated: machines.length === MACHINE_SCAN_LIMIT,
},
connectivity: {
online,
warning,
offline: totalOffline,
},
heartbeatAgeMs: {
newest: newest ? now - newest : null,
oldest: oldest ? now - oldest : null,
},
thresholds: {
offlineMs,
staleMs,
},
generatedAt: now,
}
},
})

View file

@ -81,12 +81,12 @@ export const list = query({
const queues = await ctx.db
.query("queues")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
const teams = await ctx.db
.query("teams")
.withIndex("by_tenant_name", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
return queues.map((queue) => {
const team = queue.teamId ? teams.find((item) => item._id === queue.teamId) : null;
@ -105,48 +105,17 @@ export const list = query({
},
});
export const listForStaff = query({
args: { tenantId: v.string(), viewerId: v.id("users") },
handler: async (ctx, { tenantId, viewerId }) => {
await requireStaff(ctx, viewerId, tenantId)
const queues = await ctx.db
.query("queues")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50)
const teams = await ctx.db
.query("teams")
.withIndex("by_tenant_name", (q) => q.eq("tenantId", tenantId))
.take(50)
return queues.map((queue) => {
const team = queue.teamId ? teams.find((item) => item._id === queue.teamId) : null
return {
id: queue._id,
name: queue.name,
slug: queue.slug,
team: team
? {
id: team._id,
name: team.name,
}
: null,
}
})
},
})
export const summary = query({
args: { tenantId: v.string(), viewerId: v.id("users") },
handler: async (ctx, { tenantId, viewerId }) => {
await requireStaff(ctx, viewerId, tenantId);
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).take(50);
const queues = await ctx.db.query("queues").withIndex("by_tenant", (q) => q.eq("tenantId", tenantId)).collect();
const result = await Promise.all(
queues.map(async (qItem) => {
const tickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_queue", (q) => q.eq("tenantId", tenantId).eq("queueId", qItem._id))
.take(50);
.collect();
let pending = 0;
let inProgress = 0;
let paused = 0;
@ -154,17 +123,10 @@ export const summary = query({
const now = Date.now();
for (const ticket of tickets) {
const status = normalizeStatus(ticket.status);
const isWorking = ticket.working === true;
if (status === "PENDING") {
pending += 1;
} else if (status === "AWAITING_ATTENDANCE") {
// "Em andamento" conta apenas tickets com play ativo
if (isWorking) {
inProgress += 1;
} else {
// Tickets em atendimento sem play ativo contam como "Em aberto"
pending += 1;
}
inProgress += 1;
} else if (status === "PAUSED") {
paused += 1;
}

View file

@ -5,7 +5,6 @@ import type { MutationCtx, QueryCtx } from "./_generated/server"
const STAFF_ROLES = new Set(["ADMIN", "MANAGER", "AGENT"])
const MANAGER_ROLE = "MANAGER"
const INTERNAL_ROLES = new Set(["ADMIN", "AGENT"])
type Ctx = QueryCtx | MutationCtx
@ -45,14 +44,6 @@ export async function requireAdmin(ctx: Ctx, userId: Id<"users">, tenantId?: str
return result
}
export async function requireInternal(ctx: Ctx, userId: Id<"users">, tenantId?: string) {
const result = await requireUser(ctx, userId, tenantId)
if (!result.role || !INTERNAL_ROLES.has(result.role)) {
throw new ConvexError("Acesso restrito a administradores e agentes")
}
return result
}
// removed customer role; use requireCompanyManager or requireStaff as appropriate
export async function requireCompanyManager(ctx: Ctx, userId: Id<"users">, tenantId?: string) {

View file

@ -1,78 +0,0 @@
import * as React from "react"
import { render } from "@react-email/render"
import AutomationEmail, { type AutomationEmailProps } from "../emails/automation-email"
import SimpleNotificationEmail, { type SimpleNotificationEmailProps } from "../emails/simple-notification-email"
import InviteEmail, { type InviteEmailProps } from "../emails/invite-email"
import PasswordResetEmail, { type PasswordResetEmailProps } from "../emails/password-reset-email"
import NewLoginEmail, { type NewLoginEmailProps } from "../emails/new-login-email"
import SlaWarningEmail, { type SlaWarningEmailProps } from "../emails/sla-warning-email"
import SlaBreachedEmail, { type SlaBreachedEmailProps } from "../emails/sla-breached-email"
import TicketCreatedEmail, { type TicketCreatedEmailProps } from "../emails/ticket-created-email"
import TicketResolvedEmail, { type TicketResolvedEmailProps } from "../emails/ticket-resolved-email"
import TicketAssignedEmail, { type TicketAssignedEmailProps } from "../emails/ticket-assigned-email"
import TicketStatusEmail, { type TicketStatusEmailProps } from "../emails/ticket-status-email"
import TicketCommentEmail, { type TicketCommentEmailProps } from "../emails/ticket-comment-email"
export type {
AutomationEmailProps,
SimpleNotificationEmailProps,
InviteEmailProps,
PasswordResetEmailProps,
NewLoginEmailProps,
SlaWarningEmailProps,
SlaBreachedEmailProps,
TicketCreatedEmailProps,
TicketResolvedEmailProps,
TicketAssignedEmailProps,
TicketStatusEmailProps,
TicketCommentEmailProps,
}
export async function renderAutomationEmailHtml(props: AutomationEmailProps) {
return render(<AutomationEmail {...props} />, { pretty: false })
}
export async function renderSimpleNotificationEmailHtml(props: SimpleNotificationEmailProps) {
return render(<SimpleNotificationEmail {...props} />, { pretty: false })
}
export async function renderInviteEmailHtml(props: InviteEmailProps) {
return render(<InviteEmail {...props} />, { pretty: false })
}
export async function renderPasswordResetEmailHtml(props: PasswordResetEmailProps) {
return render(<PasswordResetEmail {...props} />, { pretty: false })
}
export async function renderNewLoginEmailHtml(props: NewLoginEmailProps) {
return render(<NewLoginEmail {...props} />, { pretty: false })
}
export async function renderSlaWarningEmailHtml(props: SlaWarningEmailProps) {
return render(<SlaWarningEmail {...props} />, { pretty: false })
}
export async function renderSlaBreachedEmailHtml(props: SlaBreachedEmailProps) {
return render(<SlaBreachedEmail {...props} />, { pretty: false })
}
export async function renderTicketCreatedEmailHtml(props: TicketCreatedEmailProps) {
return render(<TicketCreatedEmail {...props} />, { pretty: false })
}
export async function renderTicketResolvedEmailHtml(props: TicketResolvedEmailProps) {
return render(<TicketResolvedEmail {...props} />, { pretty: false })
}
export async function renderTicketAssignedEmailHtml(props: TicketAssignedEmailProps) {
return render(<TicketAssignedEmail {...props} />, { pretty: false })
}
export async function renderTicketStatusEmailHtml(props: TicketStatusEmailProps) {
return render(<TicketStatusEmail {...props} />, { pretty: false })
}
export async function renderTicketCommentEmailHtml(props: TicketCommentEmailProps) {
return render(<TicketCommentEmail {...props} />, { pretty: false })
}

File diff suppressed because it is too large Load diff

View file

@ -82,7 +82,6 @@ export default defineSchema({
contacts: v.optional(v.any()),
locations: v.optional(v.any()),
sla: v.optional(v.any()),
reopenWindowDays: v.optional(v.number()),
tags: v.optional(v.array(v.string())),
customFields: v.optional(v.any()),
notes: v.optional(v.string()),
@ -200,11 +199,7 @@ export default defineSchema({
name: v.string(),
description: v.optional(v.string()),
timeToFirstResponse: v.optional(v.number()), // minutes
responseMode: v.optional(v.string()), // "business" | "calendar"
timeToResolution: v.optional(v.number()), // minutes
solutionMode: v.optional(v.string()), // "business" | "calendar"
alertThreshold: v.optional(v.number()), // 0.1 a 0.95
pauseStatuses: v.optional(v.array(v.string())), // Status que pausam SLA
}).index("by_tenant_name", ["tenantId", "name"]),
tickets: defineTable({
@ -278,8 +273,6 @@ export default defineSchema({
slaPausedBy: v.optional(v.string()),
slaPausedMs: v.optional(v.number()),
dueAt: v.optional(v.number()), // ms since epoch
visitStatus: v.optional(v.string()),
visitPerformedAt: v.optional(v.number()),
firstResponseAt: v.optional(v.number()),
resolvedAt: v.optional(v.number()),
closedAt: v.optional(v.number()),
@ -314,27 +307,6 @@ export default defineSchema({
),
formTemplate: v.optional(v.string()),
formTemplateLabel: v.optional(v.string()),
checklist: v.optional(
v.array(
v.object({
id: v.string(),
text: v.string(),
description: v.optional(v.string()),
type: v.optional(v.string()), // "checkbox" | "question"
options: v.optional(v.array(v.string())), // Para tipo "question": ["Sim", "Nao", ...]
answer: v.optional(v.string()), // Resposta selecionada para tipo "question"
done: v.boolean(),
required: v.optional(v.boolean()),
templateId: v.optional(v.id("ticketChecklistTemplates")),
templateItemId: v.optional(v.string()),
templateDescription: v.optional(v.string()), // Descricao do template (copiada ao aplicar)
createdAt: v.optional(v.number()),
createdBy: v.optional(v.id("users")),
doneAt: v.optional(v.number()),
doneBy: v.optional(v.id("users")),
})
)
),
relatedTicketIds: v.optional(v.array(v.id("tickets"))),
resolvedWithTicketId: v.optional(v.id("tickets")),
reopenDeadline: v.optional(v.number()),
@ -397,41 +369,6 @@ export default defineSchema({
createdAt: v.number(),
}).index("by_ticket", ["ticketId"]),
ticketAutomations: defineTable({
tenantId: v.string(),
name: v.string(),
enabled: v.boolean(),
trigger: v.string(),
timing: v.string(), // IMMEDIATE | DELAYED
delayMs: v.optional(v.number()),
conditions: v.optional(v.any()),
actions: v.any(),
createdBy: v.id("users"),
updatedBy: v.optional(v.id("users")),
createdAt: v.number(),
updatedAt: v.number(),
runCount: v.optional(v.number()),
lastRunAt: v.optional(v.number()),
})
.index("by_tenant", ["tenantId"])
.index("by_tenant_enabled", ["tenantId", "enabled"])
.index("by_tenant_trigger", ["tenantId", "trigger"]),
ticketAutomationRuns: defineTable({
tenantId: v.string(),
automationId: v.id("ticketAutomations"),
ticketId: v.id("tickets"),
eventType: v.string(),
status: v.string(), // SUCCESS | SKIPPED | ERROR
matched: v.boolean(),
error: v.optional(v.string()),
actionsApplied: v.optional(v.any()),
createdAt: v.number(),
})
.index("by_tenant_created", ["tenantId", "createdAt"])
.index("by_automation_created", ["automationId", "createdAt"])
.index("by_ticket", ["ticketId"]),
ticketChatMessages: defineTable({
ticketId: v.id("tickets"),
authorId: v.id("users"),
@ -471,33 +408,6 @@ export default defineSchema({
.index("by_ticket_created", ["ticketId", "createdAt"])
.index("by_tenant_created", ["tenantId", "createdAt"]),
// Sessoes de chat ao vivo entre agente (dashboard) e cliente (Raven desktop)
liveChatSessions: defineTable({
tenantId: v.string(),
ticketId: v.id("tickets"),
machineId: v.id("machines"),
agentId: v.id("users"),
agentSnapshot: v.optional(
v.object({
name: v.string(),
email: v.optional(v.string()),
avatarUrl: v.optional(v.string()),
})
),
status: v.string(), // ACTIVE | ENDED
startedAt: v.number(),
endedAt: v.optional(v.number()),
lastActivityAt: v.number(),
lastAgentMessageAt: v.optional(v.number()), // Timestamp da ultima mensagem do agente (para deteccao confiavel)
unreadByMachine: v.optional(v.number()),
unreadByAgent: v.optional(v.number()),
})
.index("by_ticket", ["ticketId"])
.index("by_machine_status", ["machineId", "status"])
.index("by_tenant_machine", ["tenantId", "machineId"])
.index("by_tenant_status", ["tenantId", "status"])
.index("by_status_lastActivity", ["status", "lastActivityAt"]),
commentTemplates: defineTable({
tenantId: v.string(),
kind: v.optional(v.string()),
@ -598,29 +508,6 @@ export default defineSchema({
.index("by_tenant_category_priority", ["tenantId", "categoryId", "priority"])
.index("by_tenant_category", ["tenantId", "categoryId"]),
// SLA por empresa - permite configurar políticas de SLA específicas por cliente
// Quando um ticket é criado, o sistema busca primeiro aqui antes de usar categorySlaSettings
companySlaSettings: defineTable({
tenantId: v.string(),
companyId: v.id("companies"),
// Se categoryId for null, aplica-se a todas as categorias da empresa
categoryId: v.optional(v.id("ticketCategories")),
priority: v.string(), // URGENT, HIGH, MEDIUM, LOW, DEFAULT
responseTargetMinutes: v.optional(v.number()),
responseMode: v.optional(v.string()), // "business" | "calendar"
solutionTargetMinutes: v.optional(v.number()),
solutionMode: v.optional(v.string()), // "business" | "calendar"
alertThreshold: v.optional(v.number()), // 0.1 a 0.95 (ex: 0.8 = 80%)
pauseStatuses: v.optional(v.array(v.string())),
calendarType: v.optional(v.string()),
createdAt: v.number(),
updatedAt: v.number(),
actorId: v.optional(v.id("users")),
})
.index("by_tenant_company", ["tenantId", "companyId"])
.index("by_tenant_company_category", ["tenantId", "companyId", "categoryId"])
.index("by_tenant_company_category_priority", ["tenantId", "companyId", "categoryId", "priority"]),
ticketFields: defineTable({
tenantId: v.string(),
key: v.string(),
@ -683,31 +570,6 @@ export default defineSchema({
.index("by_tenant_key", ["tenantId", "key"])
.index("by_tenant_active", ["tenantId", "isArchived"]),
ticketChecklistTemplates: defineTable({
tenantId: v.string(),
name: v.string(),
description: v.optional(v.string()),
companyId: v.optional(v.id("companies")),
items: v.array(
v.object({
id: v.string(),
text: v.string(),
description: v.optional(v.string()),
type: v.optional(v.string()), // "checkbox" | "question"
options: v.optional(v.array(v.string())), // Para tipo "question": ["Sim", "Nao", ...]
required: v.optional(v.boolean()),
})
),
isArchived: v.optional(v.boolean()),
createdAt: v.number(),
updatedAt: v.number(),
createdBy: v.optional(v.id("users")),
updatedBy: v.optional(v.id("users")),
})
.index("by_tenant", ["tenantId"])
.index("by_tenant_company", ["tenantId", "companyId"])
.index("by_tenant_active", ["tenantId", "isArchived"]),
userInvites: defineTable({
tenantId: v.string(),
inviteId: v.string(),
@ -773,36 +635,12 @@ export default defineSchema({
updatedAt: v.number(),
registeredBy: v.optional(v.string()),
remoteAccess: v.optional(v.any()),
usbPolicy: v.optional(v.string()), // ALLOW | BLOCK_ALL | READONLY
usbPolicyAppliedAt: v.optional(v.number()),
usbPolicyStatus: v.optional(v.string()), // PENDING | APPLIED | FAILED
usbPolicyError: v.optional(v.string()),
usbPolicyReportedAt: v.optional(v.number()),
})
.index("by_tenant", ["tenantId"])
.index("by_tenant_company", ["tenantId", "companyId"])
.index("by_tenant_fingerprint", ["tenantId", "fingerprint"])
.index("by_tenant_assigned_email", ["tenantId", "assignedUserEmail"])
.index("by_tenant_hostname", ["tenantId", "hostname"])
.index("by_auth_email", ["authEmail"])
.index("by_usbPolicyStatus", ["usbPolicyStatus"]),
usbPolicyEvents: defineTable({
tenantId: v.string(),
machineId: v.id("machines"),
actorId: v.optional(v.id("users")),
actorEmail: v.optional(v.string()),
actorName: v.optional(v.string()),
oldPolicy: v.optional(v.string()),
newPolicy: v.string(),
status: v.string(), // PENDING | APPLIED | FAILED
error: v.optional(v.string()),
appliedAt: v.optional(v.number()),
createdAt: v.number(),
})
.index("by_machine", ["machineId"])
.index("by_machine_created", ["machineId", "createdAt"])
.index("by_tenant_created", ["tenantId", "createdAt"]),
.index("by_auth_email", ["authEmail"]),
machineAlerts: defineTable({
tenantId: v.string(),
@ -817,33 +655,6 @@ export default defineSchema({
.index("by_tenant_created", ["tenantId", "createdAt"])
.index("by_tenant_machine", ["tenantId", "machineId"]),
// Tabela separada para heartbeats - evita criar versoes do documento machines a cada heartbeat
// O documento machines so e atualizado quando ha mudancas reais nos dados (metadata, inventory, etc)
machineHeartbeats: defineTable({
machineId: v.id("machines"),
lastHeartbeatAt: v.number(),
})
.index("by_machine", ["machineId"]),
// Tabela separada para softwares instalados - permite filtros, pesquisa e paginacao
// Os dados sao enviados pelo agente desktop e armazenados aqui de forma normalizada
machineSoftware: defineTable({
tenantId: v.string(),
machineId: v.id("machines"),
name: v.string(),
nameLower: v.string(), // Para busca case-insensitive
version: v.optional(v.string()),
publisher: v.optional(v.string()),
source: v.optional(v.string()), // dpkg, rpm, windows, macos, etc
installedAt: v.optional(v.number()), // Data de instalacao (se disponivel)
detectedAt: v.number(), // Quando foi detectado pelo agente
lastSeenAt: v.number(), // Ultima vez que foi visto no heartbeat
})
.index("by_machine", ["machineId"])
.index("by_machine_name", ["machineId", "nameLower"])
.index("by_tenant_name", ["tenantId", "nameLower"])
.index("by_tenant_machine", ["tenantId", "machineId"]),
machineTokens: defineTable({
tenantId: v.string(),
machineId: v.id("machines"),
@ -915,86 +726,4 @@ export default defineSchema({
.index("by_tenant_company", ["tenantId", "companyId"])
.index("by_tenant_default", ["tenantId", "isDefault"])
.index("by_tenant", ["tenantId"]),
analyticsCache: defineTable({
tenantId: v.string(),
cacheKey: v.string(),
payload: v.any(),
expiresAt: v.number(),
_ttl: v.optional(v.number()),
})
.index("by_key", ["tenantId", "cacheKey"]),
analyticsLocks: defineTable({
tenantId: v.string(),
cacheKey: v.string(),
expiresAt: v.number(),
_ttl: v.optional(v.number()),
})
.index("by_key", ["tenantId", "cacheKey"]),
// ================================
// Emprestimo de Equipamentos
// ================================
emprestimos: defineTable({
tenantId: v.string(),
reference: v.number(),
clienteId: v.id("companies"),
clienteSnapshot: v.object({
name: v.string(),
slug: v.optional(v.string()),
}),
responsavelNome: v.string(),
responsavelContato: v.optional(v.string()),
tecnicoId: v.id("users"),
tecnicoSnapshot: v.object({
name: v.string(),
email: v.optional(v.string()),
}),
equipamentos: v.array(v.object({
id: v.string(),
tipo: v.string(),
marca: v.string(),
modelo: v.string(),
serialNumber: v.optional(v.string()),
patrimonio: v.optional(v.string()),
})),
quantidade: v.number(),
valor: v.optional(v.number()),
dataEmprestimo: v.number(),
dataFimPrevisto: v.number(),
dataDevolucao: v.optional(v.number()),
status: v.string(),
observacoes: v.optional(v.string()),
observacoesDevolucao: v.optional(v.string()),
multaDiaria: v.optional(v.number()),
multaCalculada: v.optional(v.number()),
createdBy: v.id("users"),
updatedBy: v.optional(v.id("users")),
createdAt: v.number(),
updatedAt: v.number(),
})
.index("by_tenant", ["tenantId"])
.index("by_tenant_status", ["tenantId", "status"])
.index("by_tenant_cliente", ["tenantId", "clienteId"])
.index("by_tenant_tecnico", ["tenantId", "tecnicoId"])
.index("by_tenant_reference", ["tenantId", "reference"])
.index("by_tenant_created", ["tenantId", "createdAt"])
.index("by_tenant_data_fim", ["tenantId", "dataFimPrevisto"]),
emprestimoHistorico: defineTable({
tenantId: v.string(),
emprestimoId: v.id("emprestimos"),
tipo: v.string(),
descricao: v.string(),
alteracoes: v.optional(v.any()),
autorId: v.id("users"),
autorSnapshot: v.object({
name: v.string(),
email: v.optional(v.string()),
}),
createdAt: v.number(),
})
.index("by_emprestimo", ["emprestimoId"])
.index("by_emprestimo_created", ["emprestimoId", "createdAt"]),
});

View file

@ -15,7 +15,7 @@ export const seedDemo = mutation({
const existingQueues = await ctx.db
.query("queues")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
const normalizedQueues = await Promise.all(
existingQueues.map(async (queue) => {
@ -135,7 +135,7 @@ export const seedDemo = mutation({
const existingTemplates = await ctx.db
.query("commentTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100);
.collect();
for (const definition of templateDefinitions) {
const already = existingTemplates.find((template) => template?.title === definition.title);

View file

@ -9,26 +9,6 @@ function normalizeName(value: string) {
return value.trim();
}
function normalizeMode(value?: string): "business" | "calendar" {
if (value === "business") return "business";
return "calendar";
}
function normalizeThreshold(value?: number): number {
if (value === undefined || value === null) return 0.8;
if (value < 0.1) return 0.1;
if (value > 0.95) return 0.95;
return value;
}
const VALID_PAUSE_STATUSES = ["PAUSED", "PENDING", "AWAITING_ATTENDANCE"] as const;
function normalizePauseStatuses(statuses?: string[]): string[] {
if (!statuses || statuses.length === 0) return ["PAUSED"];
const filtered = statuses.filter((s) => VALID_PAUSE_STATUSES.includes(s as typeof VALID_PAUSE_STATUSES[number]));
return filtered.length > 0 ? filtered : ["PAUSED"];
}
type AnyCtx = QueryCtx | MutationCtx;
async function ensureUniqueName(ctx: AnyCtx, tenantId: string, name: string, excludeId?: Id<"slaPolicies">) {
@ -48,18 +28,14 @@ export const list = query({
const items = await ctx.db
.query("slaPolicies")
.withIndex("by_tenant_name", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
return items.map((policy) => ({
id: policy._id,
name: policy.name,
description: policy.description ?? "",
timeToFirstResponse: policy.timeToFirstResponse ?? null,
responseMode: policy.responseMode ?? "calendar",
timeToResolution: policy.timeToResolution ?? null,
solutionMode: policy.solutionMode ?? "calendar",
alertThreshold: policy.alertThreshold ?? 0.8,
pauseStatuses: policy.pauseStatuses ?? ["PAUSED"],
}));
},
});
@ -71,14 +47,9 @@ export const create = mutation({
name: v.string(),
description: v.optional(v.string()),
timeToFirstResponse: v.optional(v.number()),
responseMode: v.optional(v.string()),
timeToResolution: v.optional(v.number()),
solutionMode: v.optional(v.string()),
alertThreshold: v.optional(v.number()),
pauseStatuses: v.optional(v.array(v.string())),
},
handler: async (ctx, args) => {
const { tenantId, actorId, name, description, timeToFirstResponse, responseMode, timeToResolution, solutionMode, alertThreshold, pauseStatuses } = args;
handler: async (ctx, { tenantId, actorId, name, description, timeToFirstResponse, timeToResolution }) => {
await requireAdmin(ctx, actorId, tenantId);
const trimmed = normalizeName(name);
if (trimmed.length < 2) {
@ -97,11 +68,7 @@ export const create = mutation({
name: trimmed,
description,
timeToFirstResponse,
responseMode: normalizeMode(responseMode),
timeToResolution,
solutionMode: normalizeMode(solutionMode),
alertThreshold: normalizeThreshold(alertThreshold),
pauseStatuses: normalizePauseStatuses(pauseStatuses),
});
return id;
},
@ -115,14 +82,9 @@ export const update = mutation({
name: v.string(),
description: v.optional(v.string()),
timeToFirstResponse: v.optional(v.number()),
responseMode: v.optional(v.string()),
timeToResolution: v.optional(v.number()),
solutionMode: v.optional(v.string()),
alertThreshold: v.optional(v.number()),
pauseStatuses: v.optional(v.array(v.string())),
},
handler: async (ctx, args) => {
const { policyId, tenantId, actorId, name, description, timeToFirstResponse, responseMode, timeToResolution, solutionMode, alertThreshold, pauseStatuses } = args;
handler: async (ctx, { policyId, tenantId, actorId, name, description, timeToFirstResponse, timeToResolution }) => {
await requireAdmin(ctx, actorId, tenantId);
const policy = await ctx.db.get(policyId);
if (!policy || policy.tenantId !== tenantId) {
@ -144,11 +106,7 @@ export const update = mutation({
name: trimmed,
description,
timeToFirstResponse,
responseMode: normalizeMode(responseMode),
timeToResolution,
solutionMode: normalizeMode(solutionMode),
alertThreshold: normalizeThreshold(alertThreshold),
pauseStatuses: normalizePauseStatuses(pauseStatuses),
});
},
});

View file

@ -28,17 +28,17 @@ export const list = query({
const teams = await ctx.db
.query("teams")
.withIndex("by_tenant_name", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
const queues = await ctx.db
.query("queues")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
return teams.map((team) => {
const members = users
@ -111,7 +111,7 @@ export const update = mutation({
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
const now = users
.filter((user) => (user.teams ?? []).includes(team.name))
@ -150,7 +150,7 @@ export const remove = mutation({
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
await Promise.all(
users
@ -182,7 +182,7 @@ export const setMembers = mutation({
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
const tenantUserIds = new Set(users.map((user) => user._id));
for (const memberId of memberIds) {
if (!tenantUserIds.has(memberId)) {
@ -218,7 +218,7 @@ export const directory = query({
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
return users.map((user) => ({
id: user._id,

View file

@ -1,93 +0,0 @@
import type { Id } from "./_generated/dataModel"
export type ChecklistItemType = "checkbox" | "question"
export type TicketChecklistItem = {
id: string
text: string
description?: string
type?: ChecklistItemType
options?: string[] // Para tipo "question": ["Sim", "Nao", ...]
answer?: string // Resposta selecionada para tipo "question"
done: boolean
required?: boolean
templateId?: Id<"ticketChecklistTemplates">
templateItemId?: string
templateDescription?: string // Descricao do template (copiada ao aplicar)
createdAt?: number
createdBy?: Id<"users">
doneAt?: number
doneBy?: Id<"users">
}
export type TicketChecklistTemplateItem = {
id: string
text: string
description?: string
type?: string // "checkbox" | "question" - string para compatibilidade com schema
options?: string[]
required?: boolean
}
export type TicketChecklistTemplateLike = {
_id: Id<"ticketChecklistTemplates">
description?: string
items: TicketChecklistTemplateItem[]
}
export function normalizeChecklistText(input: string) {
return input.replace(/\r\n/g, "\n").trim()
}
export function checklistBlocksResolution(checklist: TicketChecklistItem[] | null | undefined) {
return (checklist ?? []).some((item) => (item.required ?? true) && item.done !== true)
}
export function applyChecklistTemplateToItems(
existing: TicketChecklistItem[],
template: TicketChecklistTemplateLike,
options: {
now: number
actorId?: Id<"users">
generateId?: () => string
}
) {
const generateId = options.generateId ?? (() => crypto.randomUUID())
const now = options.now
const next = Array.isArray(existing) ? [...existing] : []
const existingKeys = new Set<string>()
for (const item of next) {
if (!item.templateId || !item.templateItemId) continue
existingKeys.add(`${String(item.templateId)}:${item.templateItemId}`)
}
let added = 0
for (const tplItem of template.items ?? []) {
const templateItemId = String(tplItem.id ?? "").trim()
const text = normalizeChecklistText(String(tplItem.text ?? ""))
if (!templateItemId || !text) continue
const key = `${String(template._id)}:${templateItemId}`
if (existingKeys.has(key)) continue
existingKeys.add(key)
const itemType = tplItem.type ?? "checkbox"
next.push({
id: generateId(),
text,
description: tplItem.description,
type: itemType as ChecklistItemType,
options: itemType === "question" ? tplItem.options : undefined,
done: false,
required: typeof tplItem.required === "boolean" ? tplItem.required : true,
templateId: template._id,
templateItemId,
templateDescription: template.description,
createdAt: now,
createdBy: options.actorId,
})
added += 1
}
return { checklist: next, added }
}

View file

@ -45,7 +45,7 @@ export const list = query({
const settings = await ctx.db
.query("ticketFormSettings")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(100)
.collect()
return settings
.filter((setting) => !normalizedTemplate || setting.template === normalizedTemplate)
.map((setting) => ({
@ -143,7 +143,7 @@ async function findExisting(
const candidates = await ctx.db
.query("ticketFormSettings")
.withIndex("by_tenant_template_scope", (q) => q.eq("tenantId", tenantId).eq("template", template).eq("scope", scope))
.take(100)
.collect()
return candidates.find((setting) => {
if (scope === "tenant") return true

View file

@ -39,7 +39,7 @@ export async function ensureTicketFormTemplatesForTenant(ctx: MutationCtx, tenan
const existing = await ctx.db
.query("ticketFormTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
let order = existing.reduce((max, tpl) => Math.max(max, tpl.order ?? 0), 0);
const now = Date.now();
for (const template of TICKET_FORM_CONFIG) {
@ -102,12 +102,12 @@ async function cloneFieldsFromTemplate(ctx: MutationCtx, tenantId: string, sourc
const sourceFields = await ctx.db
.query("ticketFields")
.withIndex("by_tenant_scope", (q) => q.eq("tenantId", tenantId).eq("scope", sourceKey))
.take(50);
.collect();
if (sourceFields.length === 0) return;
const ordered = await ctx.db
.query("ticketFields")
.withIndex("by_tenant_order", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
let order = ordered.reduce((max, field) => Math.max(max, field.order ?? 0), 0);
const now = Date.now();
for (const field of sourceFields.sort((a, b) => (a.order ?? 0) - (b.order ?? 0))) {
@ -156,7 +156,7 @@ export const list = query({
const templates = await ctx.db
.query("ticketFormTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
return templates
.filter((tpl) => includeArchived || tpl.isArchived !== true)
.sort((a, b) => (a.order ?? 0) - (b.order ?? 0) || a.label.localeCompare(b.label, "pt-BR"))
@ -174,7 +174,7 @@ export const listActive = query({
const templates = await ctx.db
.query("ticketFormTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
return templates
.filter((tpl) => tpl.isArchived !== true)
.sort((a, b) => (a.order ?? 0) - (b.order ?? 0) || a.label.localeCompare(b.label, "pt-BR"))
@ -201,7 +201,7 @@ export const create = mutation({
const templates = await ctx.db
.query("ticketFormTemplates")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(50);
.collect();
const order = (templates.reduce((max, tpl) => Math.max(max, tpl.order ?? 0), 0) ?? 0) + 1;
const now = Date.now();
const templateId = await ctx.db.insert("ticketFormTemplates", {

View file

@ -1,428 +1,122 @@
"use node"
import net from "net"
import tls from "tls"
import { action } from "./_generated/server"
import { v } from "convex/values"
import { renderSimpleNotificationEmailHtml } from "./reactEmail"
import { buildBaseUrl } from "./url"
// API do Next.js para verificar preferências
async function sendViaNextApi(params: {
type: string
to: { email: string; name?: string; userId?: string }
subject: string
data: Record<string, unknown>
tenantId?: string
}): Promise<{ success: boolean; skipped?: boolean; reason?: string }> {
const baseUrl = buildBaseUrl()
const token = process.env.INTERNAL_HEALTH_TOKEN ?? process.env.REPORTS_CRON_SECRET
if (!token) {
console.warn("[ticketNotifications] Token interno não configurado, enviando diretamente")
return { success: false, reason: "no_token" }
}
try {
const response = await fetch(`${baseUrl}/api/notifications/send`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(params),
})
if (!response.ok) {
const error = await response.text()
console.error("[ticketNotifications] Erro na API:", error)
return { success: false, reason: "api_error" }
}
return await response.json()
} catch (error) {
console.error("[ticketNotifications] Erro ao chamar API:", error)
return { success: false, reason: "fetch_error" }
}
}
function b64(input: string) {
return Buffer.from(input, "utf8").toString("base64")
}
function extractEnvelopeAddress(from: string): string {
const angle = from.match(/<\s*([^>\s]+)\s*>/)
if (angle?.[1]) return angle[1]
async function sendSmtpMail(cfg: { host: string; port: number; username: string; password: string; from: string }, to: string, subject: string, html: string) {
return new Promise<void>((resolve, reject) => {
const socket = tls.connect(cfg.port, cfg.host, { rejectUnauthorized: false }, () => {
let buffer = ""
const send = (line: string) => socket.write(line + "\r\n")
const wait = (expected: string | RegExp) =>
new Promise<void>((res) => {
const onData = (data: Buffer) => {
buffer += data.toString()
const lines = buffer.split(/\r?\n/)
const last = lines.filter(Boolean).slice(-1)[0] ?? ""
if (typeof expected === "string" ? last.startsWith(expected) : expected.test(last)) {
socket.removeListener("data", onData)
res()
}
}
socket.on("data", onData)
socket.on("error", reject)
})
const paren = from.match(/\(([^)\s]+@[^)\s]+)\)/)
if (paren?.[1]) return paren[1]
const email = from.match(/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}/)
if (email?.[0]) return email[0]
return from
}
type SmtpConfig = {
host: string
port: number
username: string
password: string
from: string
secure: boolean
timeoutMs: number
}
function buildSmtpConfig(): SmtpConfig | null {
const host = process.env.SMTP_ADDRESS || process.env.SMTP_HOST
const port = Number(process.env.SMTP_PORT ?? 465)
const username = process.env.SMTP_USERNAME || process.env.SMTP_USER
const password = process.env.SMTP_PASSWORD || process.env.SMTP_PASS
const legacyFrom = process.env.MAILER_SENDER_EMAIL
const fromEmail = process.env.SMTP_FROM_EMAIL
const fromName = process.env.SMTP_FROM_NAME || "Raven"
const from = legacyFrom || (fromEmail ? `"${fromName}" <${fromEmail}>` : "Raven <no-reply@example.com>")
if (!host || !username || !password) return null
const secureFlag = (process.env.SMTP_SECURE ?? process.env.SMTP_TLS ?? "").toLowerCase()
const secure = secureFlag ? secureFlag === "true" : port === 465
return { host, port, username, password, from, secure, timeoutMs: 30000 }
}
type SmtpSocket = net.Socket | tls.TLSSocket
type SmtpResponse = { code: number; lines: string[] }
function createSmtpReader(socket: SmtpSocket, timeoutMs: number) {
let buffer = ""
let current: SmtpResponse | null = null
const queue: SmtpResponse[] = []
let pending:
| { resolve: (response: SmtpResponse) => void; reject: (error: unknown) => void; timer: ReturnType<typeof setTimeout> }
| null = null
const finalize = (response: SmtpResponse) => {
if (pending) {
clearTimeout(pending.timer)
const resolve = pending.resolve
pending = null
resolve(response)
return
}
queue.push(response)
}
const onData = (data: Buffer) => {
buffer += data.toString("utf8")
const lines = buffer.split(/\r?\n/)
buffer = lines.pop() ?? ""
for (const line of lines) {
if (!line) continue
const match = line.match(/^(\d{3})([ -])\s?(.*)$/)
if (!match) continue
const code = Number(match[1])
const isFinal = match[2] === " "
if (!current) current = { code, lines: [] }
current.lines.push(line)
if (isFinal) {
const response = current
current = null
finalize(response)
}
}
}
const onError = (error: unknown) => {
if (pending) {
clearTimeout(pending.timer)
const reject = pending.reject
pending = null
reject(error)
}
}
socket.on("data", onData)
socket.on("error", onError)
const read = () =>
new Promise<SmtpResponse>((resolve, reject) => {
const queued = queue.shift()
if (queued) {
resolve(queued)
return
}
if (pending) {
reject(new Error("smtp_concurrent_read"))
return
}
const timer = setTimeout(() => {
if (!pending) return
const rejectPending = pending.reject
pending = null
rejectPending(new Error("smtp_timeout"))
}, timeoutMs)
pending = { resolve, reject, timer }
})
const dispose = () => {
socket.off("data", onData)
socket.off("error", onError)
if (pending) {
clearTimeout(pending.timer)
pending = null
}
}
return { read, dispose }
}
function isCapability(lines: string[], capability: string) {
const upper = capability.trim().toUpperCase()
return lines.some((line) => line.replace(/^(\d{3})([ -])/, "").trim().toUpperCase().startsWith(upper))
}
function assertCode(response: SmtpResponse, expected: number | ((code: number) => boolean), context: string) {
const ok = typeof expected === "number" ? response.code === expected : expected(response.code)
if (ok) return
throw new Error(`smtp_unexpected_response:${context}:${response.code}:${response.lines.join(" | ")}`)
}
async function connectPlain(host: string, port: number, timeoutMs: number) {
return new Promise<net.Socket>((resolve, reject) => {
const socket = net.connect(port, host)
const timer = setTimeout(() => {
socket.destroy()
reject(new Error("smtp_connect_timeout"))
}, timeoutMs)
socket.once("connect", () => {
clearTimeout(timer)
resolve(socket)
})
socket.once("error", (e) => {
clearTimeout(timer)
reject(e)
;(async () => {
await wait(/^220 /)
send(`EHLO ${cfg.host}`)
await wait(/^250-/)
await wait(/^250 /)
send("AUTH LOGIN")
await wait(/^334 /)
send(b64(cfg.username))
await wait(/^334 /)
send(b64(cfg.password))
await wait(/^235 /)
send(`MAIL FROM:<${cfg.from.match(/<(.+)>/)?.[1] ?? cfg.from}>`)
await wait(/^250 /)
send(`RCPT TO:<${to}>`)
await wait(/^250 /)
send("DATA")
await wait(/^354 /)
const headers = [
`From: ${cfg.from}`,
`To: ${to}`,
`Subject: ${subject}`,
"MIME-Version: 1.0",
"Content-Type: text/html; charset=UTF-8",
].join("\r\n")
send(headers + "\r\n\r\n" + html + "\r\n.")
await wait(/^250 /)
send("QUIT")
socket.end()
resolve()
})().catch(reject)
})
socket.on("error", reject)
})
}
async function connectTls(host: string, port: number, timeoutMs: number) {
return new Promise<tls.TLSSocket>((resolve, reject) => {
const socket = tls.connect({ host, port, rejectUnauthorized: false, servername: host })
const timer = setTimeout(() => {
socket.destroy()
reject(new Error("smtp_connect_timeout"))
}, timeoutMs)
socket.once("secureConnect", () => {
clearTimeout(timer)
resolve(socket)
})
socket.once("error", (e) => {
clearTimeout(timer)
reject(e)
})
})
function buildBaseUrl() {
return process.env.NEXT_PUBLIC_APP_URL || process.env.APP_BASE_URL || "http://localhost:3000"
}
async function upgradeToStartTls(socket: net.Socket, host: string, timeoutMs: number) {
return new Promise<tls.TLSSocket>((resolve, reject) => {
const tlsSocket = tls.connect({ socket, servername: host, rejectUnauthorized: false })
const timer = setTimeout(() => {
tlsSocket.destroy()
reject(new Error("smtp_connect_timeout"))
}, timeoutMs)
tlsSocket.once("secureConnect", () => {
clearTimeout(timer)
resolve(tlsSocket)
})
tlsSocket.once("error", (e) => {
clearTimeout(timer)
reject(e)
})
})
function emailTemplate({ title, message, ctaLabel, ctaUrl }: { title: string; message: string; ctaLabel: string; ctaUrl: string }) {
return `
<table width="100%" cellpadding="0" cellspacing="0" role="presentation" style="background:#f8fafc;padding:24px 0;font-family:Arial,Helvetica,sans-serif;color:#0f172a;">
<tr>
<td align="center">
<table width="600" cellpadding="0" cellspacing="0" role="presentation" style="background:white;border:1px solid #e2e8f0;border-radius:12px;padding:24px;">
<tr>
<td style="text-align:left;">
<div style="display:flex;align-items:center;gap:12px;">
<img src="${buildBaseUrl()}/logo-raven.png" alt="Raven" style="width:36px;height:36px;border-radius:8px;"/>
<span style="font-weight:700;font-size:18px;">Raven</span>
</div>
<h1 style="font-size:20px;line-height:1.3;margin:16px 0 8px 0;">${title}</h1>
<p style="font-size:14px;line-height:1.6;margin:0 0 16px 0;color:#334155;">${message}</p>
<a href="${ctaUrl}" style="display:inline-block;background:#111827;color:#fff;text-decoration:none;border-radius:10px;padding:10px 16px;font-weight:600;">${ctaLabel}</a>
<p style="font-size:12px;color:#64748b;margin-top:20px;">Se o botão não funcionar, copie e cole esta URL no navegador:<br/><a href="${ctaUrl}" style="color:#0ea5e9;text-decoration:none;">${ctaUrl}</a></p>
</td>
</tr>
</table>
<p style="font-size:12px;color:#94a3b8;margin-top:12px;">&copy; ${new Date().getFullYear()} Raven Rever Tecnologia</p>
</td>
</tr>
</table>`
}
async function sendSmtpMail(cfg: SmtpConfig, to: string, subject: string, html: string) {
const timeoutMs = Math.max(1000, cfg.timeoutMs)
let socket: SmtpSocket | null = null
let reader: ReturnType<typeof createSmtpReader> | null = null
const sendLine = (line: string) => socket?.write(line + "\r\n")
const readExpected = async (expected: number | ((code: number) => boolean), context: string) => {
if (!reader) throw new Error("smtp_reader_not_ready")
const response = await reader.read()
assertCode(response, expected, context)
return response
}
try {
socket = cfg.secure ? await connectTls(cfg.host, cfg.port, timeoutMs) : await connectPlain(cfg.host, cfg.port, timeoutMs)
reader = createSmtpReader(socket, timeoutMs)
await readExpected(220, "greeting")
sendLine(`EHLO ${cfg.host}`)
let ehlo = await readExpected(250, "ehlo")
if (!cfg.secure && isCapability(ehlo.lines, "STARTTLS")) {
sendLine("STARTTLS")
await readExpected(220, "starttls")
reader.dispose()
socket = await upgradeToStartTls(socket as net.Socket, cfg.host, timeoutMs)
reader = createSmtpReader(socket, timeoutMs)
sendLine(`EHLO ${cfg.host}`)
ehlo = await readExpected(250, "ehlo_starttls")
}
sendLine("AUTH LOGIN")
await readExpected(334, "auth_login")
sendLine(b64(cfg.username))
await readExpected(334, "auth_username")
sendLine(b64(cfg.password))
await readExpected(235, "auth_password")
const envelopeFrom = extractEnvelopeAddress(cfg.from)
sendLine(`MAIL FROM:<${envelopeFrom}>`)
await readExpected((code) => Math.floor(code / 100) === 2, "mail_from")
sendLine(`RCPT TO:<${to}>`)
await readExpected((code) => Math.floor(code / 100) === 2, "rcpt_to")
sendLine("DATA")
await readExpected(354, "data")
const headers = [
`From: ${cfg.from}`,
`To: ${to}`,
`Subject: ${subject}`,
"MIME-Version: 1.0",
"Content-Type: text/html; charset=UTF-8",
].join("\r\n")
sendLine(headers + "\r\n\r\n" + html + "\r\n.")
await readExpected((code) => Math.floor(code / 100) === 2, "message")
sendLine("QUIT")
await readExpected(221, "quit")
} finally {
reader?.dispose()
socket?.end()
}
}
export const sendTicketCreatedEmail = action({
args: {
to: v.string(),
userId: v.optional(v.string()),
userName: v.optional(v.string()),
ticketId: v.string(),
reference: v.number(),
subject: v.string(),
priority: v.string(),
tenantId: v.optional(v.string()),
},
handler: async (_ctx, { to, userId, userName, ticketId, reference, subject, priority, tenantId }) => {
const baseUrl = buildBaseUrl()
const url = `${baseUrl}/portal/tickets/${ticketId}`
const priorityLabels: Record<string, string> = {
LOW: "Baixa",
MEDIUM: "Média",
HIGH: "Alta",
URGENT: "Urgente",
}
const priorityLabel = priorityLabels[priority] ?? priority
const mailSubject = `Novo chamado #${reference} aberto`
// Tenta usar a API do Next.js para verificar preferências
const apiResult = await sendViaNextApi({
type: "ticket_created",
to: { email: to, name: userName, userId },
subject: mailSubject,
data: {
reference,
subject,
status: "Pendente",
priority: priorityLabel,
viewUrl: url,
},
tenantId,
})
if (apiResult.success || apiResult.skipped) {
return apiResult
}
// Fallback: envia diretamente se a API falhar
const smtp = buildSmtpConfig()
if (!smtp) {
console.warn("SMTP not configured; skipping ticket created email")
return { skipped: true }
}
const html = await renderSimpleNotificationEmailHtml({
title: `Novo chamado #${reference} aberto`,
message: `Seu chamado foi registrado com sucesso. Nossa equipe irá analisá-lo em breve.\n\nAssunto: ${subject}\nPrioridade: ${priorityLabel}\nStatus: Pendente`,
ctaLabel: "Ver chamado",
ctaUrl: url,
})
await sendSmtpMail(smtp, to, mailSubject, html)
return { ok: true }
},
})
export const sendPublicCommentEmail = action({
args: {
to: v.string(),
userId: v.optional(v.string()),
userName: v.optional(v.string()),
ticketId: v.string(),
reference: v.number(),
subject: v.string(),
tenantId: v.optional(v.string()),
},
handler: async (_ctx, { to, userId, userName, ticketId, reference, subject, tenantId }) => {
const baseUrl = buildBaseUrl()
const url = `${baseUrl}/portal/tickets/${ticketId}`
const mailSubject = `Atualização no chamado #${reference}: ${subject}`
// Tenta usar a API do Next.js para verificar preferências
const apiResult = await sendViaNextApi({
type: "comment_public",
to: { email: to, name: userName, userId },
subject: mailSubject,
data: {
reference,
subject,
viewUrl: url,
},
tenantId,
})
if (apiResult.success || apiResult.skipped) {
return apiResult
handler: async (_ctx, { to, ticketId, reference, subject }) => {
const smtp = {
host: process.env.SMTP_ADDRESS!,
port: Number(process.env.SMTP_PORT ?? 465),
username: process.env.SMTP_USERNAME!,
password: process.env.SMTP_PASSWORD!,
from: process.env.MAILER_SENDER_EMAIL || "Raven <no-reply@example.com>",
}
// Fallback: envia diretamente se a API falhar
const smtp = buildSmtpConfig()
if (!smtp) {
if (!smtp.host || !smtp.username || !smtp.password) {
console.warn("SMTP not configured; skipping ticket comment email")
return { skipped: true }
}
const html = await renderSimpleNotificationEmailHtml({
const baseUrl = buildBaseUrl()
const url = `${baseUrl}/portal/tickets/${ticketId}`
const mailSubject = `Atualização no chamado #${reference}: ${subject}`
const html = emailTemplate({
title: `Nova atualização no seu chamado #${reference}`,
message: `Um novo comentário foi adicionado ao chamado "${subject}". Clique abaixo para visualizar e responder pelo portal.`,
message: `Um novo comentário foi adicionado ao chamado “${subject}”. Clique abaixo para visualizar e responder pelo portal.`,
ctaLabel: "Abrir e responder",
ctaUrl: url,
})
@ -434,45 +128,28 @@ export const sendPublicCommentEmail = action({
export const sendResolvedEmail = action({
args: {
to: v.string(),
userId: v.optional(v.string()),
userName: v.optional(v.string()),
ticketId: v.string(),
reference: v.number(),
subject: v.string(),
tenantId: v.optional(v.string()),
},
handler: async (_ctx, { to, userId, userName, ticketId, reference, subject, tenantId }) => {
const baseUrl = buildBaseUrl()
const url = `${baseUrl}/portal/tickets/${ticketId}`
const mailSubject = `Seu chamado #${reference} foi encerrado`
// Tenta usar a API do Next.js para verificar preferências
const apiResult = await sendViaNextApi({
type: "ticket_resolved",
to: { email: to, name: userName, userId },
subject: mailSubject,
data: {
reference,
subject,
viewUrl: url,
},
tenantId,
})
if (apiResult.success || apiResult.skipped) {
return apiResult
handler: async (_ctx, { to, ticketId, reference, subject }) => {
const smtp = {
host: process.env.SMTP_ADDRESS!,
port: Number(process.env.SMTP_PORT ?? 465),
username: process.env.SMTP_USERNAME!,
password: process.env.SMTP_PASSWORD!,
from: process.env.MAILER_SENDER_EMAIL || "Raven <no-reply@example.com>",
}
// Fallback: envia diretamente se a API falhar
const smtp = buildSmtpConfig()
if (!smtp) {
if (!smtp.host || !smtp.username || !smtp.password) {
console.warn("SMTP not configured; skipping ticket resolution email")
return { skipped: true }
}
const html = await renderSimpleNotificationEmailHtml({
const baseUrl = buildBaseUrl()
const url = `${baseUrl}/portal/tickets/${ticketId}`
const mailSubject = `Seu chamado #${reference} foi encerrado`
const html = emailTemplate({
title: `Chamado #${reference} encerrado`,
message: `O chamado "${subject}" foi marcado como concluído. Caso necessário, você pode responder pelo portal para reabrir dentro do prazo.`,
message: `O chamado “${subject}” foi marcado como concluído. Caso necessário, você pode responder pelo portal para reabrir dentro do prazo.`,
ctaLabel: "Ver detalhes",
ctaUrl: url,
})
@ -480,82 +157,3 @@ export const sendResolvedEmail = action({
return { ok: true }
},
})
export const sendAutomationEmail = action({
args: {
to: v.array(v.string()),
subject: v.string(),
emailProps: v.object({
title: v.string(),
message: v.string(),
ticket: v.object({
reference: v.number(),
subject: v.string(),
status: v.optional(v.union(v.string(), v.null())),
priority: v.optional(v.union(v.string(), v.null())),
companyName: v.optional(v.union(v.string(), v.null())),
requesterName: v.optional(v.union(v.string(), v.null())),
assigneeName: v.optional(v.union(v.string(), v.null())),
}),
ctaLabel: v.string(),
ctaUrl: v.string(),
}),
},
handler: async (_ctx, { to, subject, emailProps }) => {
const smtp = buildSmtpConfig()
if (!smtp) {
console.warn("SMTP not configured; skipping automation email")
return { skipped: true }
}
const recipients = to
.map((email) => email.trim())
.filter(Boolean)
.slice(0, 50)
if (recipients.length === 0) {
return { skipped: true, reason: "no_recipients" }
}
// Renderiza o HTML aqui (ambiente Node.js suporta imports dinâmicos)
const { renderAutomationEmailHtml } = await import("./reactEmail")
const html = await renderAutomationEmailHtml({
title: emailProps.title,
message: emailProps.message,
ticket: {
reference: emailProps.ticket.reference,
subject: emailProps.ticket.subject,
status: emailProps.ticket.status ?? null,
priority: emailProps.ticket.priority ?? null,
companyName: emailProps.ticket.companyName ?? null,
requesterName: emailProps.ticket.requesterName ?? null,
assigneeName: emailProps.ticket.assigneeName ?? null,
},
ctaLabel: emailProps.ctaLabel,
ctaUrl: emailProps.ctaUrl,
})
const results: Array<{ recipient: string; sent: boolean; error?: string }> = []
for (const recipient of recipients) {
try {
await sendSmtpMail(smtp, recipient, subject, html)
results.push({ recipient, sent: true })
console.log(`[automation-email] Enviado para ${recipient}`)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
results.push({ recipient, sent: false, error: errorMessage })
console.error(`[automation-email] Falha ao enviar para ${recipient}: ${errorMessage}`)
}
}
const sent = results.filter((r) => r.sent).length
const failed = results.filter((r) => !r.sent).length
if (failed > 0) {
console.error(`[automation-email] Resumo: ${sent}/${recipients.length} enviados, ${failed} falhas`)
}
return { ok: sent > 0, sent, failed, results }
},
})

File diff suppressed because it is too large Load diff

View file

@ -1,4 +0,0 @@
export function buildBaseUrl() {
return process.env.NEXT_PUBLIC_APP_URL || process.env.APP_BASE_URL || "http://localhost:3000"
}

View file

@ -1,352 +0,0 @@
import { v } from "convex/values"
import { mutation, query } from "./_generated/server"
import type { Id, Doc } from "./_generated/dataModel"
import { sha256 } from "@noble/hashes/sha2.js"
const DEFAULT_TENANT_ID = "default"
function toHex(input: Uint8Array) {
return Array.from(input)
.map((b) => b.toString(16).padStart(2, "0"))
.join("")
}
const utf8 = (s: string) => new TextEncoder().encode(s)
function hashToken(token: string) {
return toHex(sha256(utf8(token)))
}
export const USB_POLICY_VALUES = ["ALLOW", "BLOCK_ALL", "READONLY"] as const
export type UsbPolicyValue = (typeof USB_POLICY_VALUES)[number]
export const USB_POLICY_STATUS = ["PENDING", "APPLYING", "APPLIED", "FAILED"] as const
export type UsbPolicyStatus = (typeof USB_POLICY_STATUS)[number]
export const setUsbPolicy = mutation({
args: {
machineId: v.id("machines"),
policy: v.string(),
actorId: v.optional(v.id("users")),
actorEmail: v.optional(v.string()),
actorName: v.optional(v.string()),
},
handler: async (ctx, args) => {
const machine = await ctx.db.get(args.machineId)
if (!machine) {
throw new Error("Dispositivo nao encontrado")
}
if (!USB_POLICY_VALUES.includes(args.policy as UsbPolicyValue)) {
throw new Error(`Politica USB invalida: ${args.policy}. Valores validos: ${USB_POLICY_VALUES.join(", ")}`)
}
const now = Date.now()
const oldPolicy = machine.usbPolicy ?? "ALLOW"
await ctx.db.patch(args.machineId, {
usbPolicy: args.policy,
usbPolicyStatus: "PENDING",
usbPolicyError: undefined,
usbPolicyAppliedAt: now,
updatedAt: now,
})
await ctx.db.insert("usbPolicyEvents", {
tenantId: machine.tenantId,
machineId: args.machineId,
actorId: args.actorId,
actorEmail: args.actorEmail,
actorName: args.actorName,
oldPolicy,
newPolicy: args.policy,
status: "PENDING",
createdAt: now,
})
return { ok: true, policy: args.policy, status: "PENDING" }
},
})
export const reportUsbPolicyStatus = mutation({
args: {
machineToken: v.string(),
status: v.string(),
// Rust envia null para Option<String>::None, entao precisamos aceitar null tambem
error: v.optional(v.union(v.string(), v.null())),
currentPolicy: v.optional(v.union(v.string(), v.null())),
},
handler: async (ctx, args) => {
// Converte null para undefined para uso interno
const errorValue = args.error ?? undefined
const currentPolicyValue = args.currentPolicy ?? undefined
const tokenHash = hashToken(args.machineToken)
const tokenRecord = await ctx.db
.query("machineTokens")
.withIndex("by_token_hash", (q) => q.eq("tokenHash", tokenHash))
.first()
if (!tokenRecord || tokenRecord.revoked) {
throw new Error("Token de maquina invalido ou revogado")
}
if (tokenRecord.expiresAt < Date.now()) {
throw new Error("Token de maquina expirado")
}
const machine = await ctx.db.get(tokenRecord.machineId)
if (!machine) {
throw new Error("Dispositivo nao encontrado")
}
if (!USB_POLICY_STATUS.includes(args.status as UsbPolicyStatus)) {
throw new Error(`Status de politica USB invalido: ${args.status}`)
}
const now = Date.now()
await ctx.db.patch(machine._id, {
usbPolicyStatus: args.status,
usbPolicyError: errorValue,
usbPolicyReportedAt: now,
updatedAt: now,
})
const latestEvent = await ctx.db
.query("usbPolicyEvents")
.withIndex("by_machine_created", (q) => q.eq("machineId", machine._id))
.order("desc")
.first()
// Atualiza o evento se ainda nao foi finalizado (PENDING ou APPLYING)
// Isso permite a transicao: PENDING -> APPLYING -> APPLIED/FAILED
if (latestEvent && (latestEvent.status === "PENDING" || latestEvent.status === "APPLYING")) {
await ctx.db.patch(latestEvent._id, {
status: args.status,
error: errorValue,
appliedAt: args.status === "APPLIED" ? now : undefined,
})
}
return { ok: true }
},
})
export const getUsbPolicy = query({
args: {
machineId: v.id("machines"),
},
handler: async (ctx, args) => {
const machine = await ctx.db.get(args.machineId)
if (!machine) {
return null
}
return {
policy: machine.usbPolicy ?? "ALLOW",
status: machine.usbPolicyStatus ?? null,
error: machine.usbPolicyError ?? null,
appliedAt: machine.usbPolicyAppliedAt ?? null,
reportedAt: machine.usbPolicyReportedAt ?? null,
}
},
})
export const getPendingUsbPolicy = query({
args: {
machineToken: v.string(),
},
handler: async (ctx, args) => {
const tokenHash = hashToken(args.machineToken)
const tokenRecord = await ctx.db
.query("machineTokens")
.withIndex("by_token_hash", (q) => q.eq("tokenHash", tokenHash))
.first()
if (!tokenRecord || tokenRecord.revoked || tokenRecord.expiresAt < Date.now()) {
return null
}
const machine = await ctx.db.get(tokenRecord.machineId)
if (!machine) {
return null
}
if (machine.usbPolicyStatus === "PENDING") {
return {
policy: machine.usbPolicy ?? "ALLOW",
appliedAt: machine.usbPolicyAppliedAt,
}
}
return null
},
})
export const listUsbPolicyEvents = query({
args: {
machineId: v.id("machines"),
limit: v.optional(v.number()),
cursor: v.optional(v.number()),
status: v.optional(v.string()),
dateFrom: v.optional(v.number()),
dateTo: v.optional(v.number()),
},
handler: async (ctx, args) => {
const limit = args.limit ?? 10
const maxFetch = 1000 // Limite maximo de eventos a buscar
let events = await ctx.db
.query("usbPolicyEvents")
.withIndex("by_machine_created", (q) => q.eq("machineId", args.machineId))
.order("desc")
.take(maxFetch)
// Aplica filtro de cursor (paginacao)
if (args.cursor !== undefined) {
events = events.filter((e) => e.createdAt < args.cursor!)
}
// Aplica filtro de status
if (args.status) {
events = events.filter((e) => e.status === args.status)
}
// Aplica filtro de data
if (args.dateFrom !== undefined) {
events = events.filter((e) => e.createdAt >= args.dateFrom!)
}
if (args.dateTo !== undefined) {
events = events.filter((e) => e.createdAt <= args.dateTo!)
}
const hasMore = events.length > limit
const results = events.slice(0, limit)
const nextCursor = results.length > 0 ? results[results.length - 1].createdAt : undefined
return {
events: results.map((event) => ({
id: event._id,
oldPolicy: event.oldPolicy,
newPolicy: event.newPolicy,
status: event.status,
error: event.error,
actorEmail: event.actorEmail,
actorName: event.actorName,
createdAt: event.createdAt,
appliedAt: event.appliedAt,
})),
hasMore,
nextCursor,
}
},
})
export const bulkSetUsbPolicy = mutation({
args: {
machineIds: v.array(v.id("machines")),
policy: v.string(),
actorId: v.optional(v.id("users")),
actorEmail: v.optional(v.string()),
actorName: v.optional(v.string()),
},
handler: async (ctx, args) => {
if (!USB_POLICY_VALUES.includes(args.policy as UsbPolicyValue)) {
throw new Error(`Politica USB invalida: ${args.policy}`)
}
const now = Date.now()
const results: Array<{ machineId: Id<"machines">; success: boolean; error?: string }> = []
for (const machineId of args.machineIds) {
try {
const machine = await ctx.db.get(machineId)
if (!machine) {
results.push({ machineId, success: false, error: "Dispositivo nao encontrado" })
continue
}
const oldPolicy = machine.usbPolicy ?? "ALLOW"
await ctx.db.patch(machineId, {
usbPolicy: args.policy,
usbPolicyStatus: "PENDING",
usbPolicyError: undefined,
usbPolicyAppliedAt: now,
updatedAt: now,
})
await ctx.db.insert("usbPolicyEvents", {
tenantId: machine.tenantId,
machineId,
actorId: args.actorId,
actorEmail: args.actorEmail,
actorName: args.actorName,
oldPolicy,
newPolicy: args.policy,
status: "PENDING",
createdAt: now,
})
results.push({ machineId, success: true })
} catch (err) {
results.push({ machineId, success: false, error: String(err) })
}
}
return { results, total: args.machineIds.length, successful: results.filter((r) => r.success).length }
},
})
/**
* Cleanup de policies USB pendentes por mais de 1 hora.
* Marca como FAILED com mensagem de timeout.
*/
export const cleanupStalePendingPolicies = mutation({
args: {
staleThresholdMs: v.optional(v.number()),
},
handler: async (ctx, args) => {
// Log obrigatorio para evitar shape_inference errors com logLines vazios
console.log("cron: cleanupStalePendingPolicies iniciado")
const thresholdMs = args.staleThresholdMs ?? 3600000 // 1 hora por padrao
const now = Date.now()
const cutoff = now - thresholdMs
// Buscar maquinas com status PENDING e appliedAt antigo
const staleMachines = await ctx.db
.query("machines")
.withIndex("by_usbPolicyStatus", (q) => q.eq("usbPolicyStatus", "PENDING"))
.filter((q) => q.lt(q.field("usbPolicyAppliedAt"), cutoff))
.take(1000)
let cleaned = 0
for (const machine of staleMachines) {
await ctx.db.patch(machine._id, {
usbPolicyStatus: "FAILED",
usbPolicyError: "Timeout: Agent nao reportou status apos 1 hora. Verifique se o agent esta ativo.",
updatedAt: now,
})
// Atualizar evento correspondente
const latestEvent = await ctx.db
.query("usbPolicyEvents")
.withIndex("by_machine_created", (q) => q.eq("machineId", machine._id))
.order("desc")
.first()
if (latestEvent && latestEvent.status === "PENDING") {
await ctx.db.patch(latestEvent._id, {
status: "FAILED",
error: "Timeout automatico",
})
}
cleaned++
}
return { cleaned, checked: staleMachines.length }
},
})

View file

@ -71,10 +71,7 @@ export const ensureUser = mutation({
return reconciled;
}
} else {
// Busca por email em todos os tenants (usando limite para evitar OOM)
// Nota: isso e ineficiente sem indice global por email
const users = await ctx.db.query("users").take(5000);
const anyTenant = users.find((user) => user.email === args.email);
const anyTenant = (await ctx.db.query("users").collect()).find((user) => user.email === args.email);
if (anyTenant) {
const reconciled = await reconcile(anyTenant);
if (reconciled) {
@ -103,7 +100,7 @@ export const listAgents = query({
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(5000);
.collect();
// Only internal staff (ADMIN/AGENT) should appear as responsáveis
return users
@ -128,7 +125,7 @@ export const listCustomers = query({
const users = await ctx.db
.query("users")
.withIndex("by_tenant", (q) => q.eq("tenantId", tenantId))
.take(5000);
.collect();
const allowed = users.filter((user) => {
const role = (user.role ?? "COLLABORATOR").toUpperCase()
@ -215,7 +212,7 @@ export const deleteUser = mutation({
const comments = await ctx.db
.query("ticketComments")
.withIndex("by_author", (q) => q.eq("authorId", userId))
.take(10000);
.collect();
if (comments.length > 0) {
const authorSnapshot = {
name: user.name,
@ -243,7 +240,7 @@ export const deleteUser = mutation({
const requesterTickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_requester", (q) => q.eq("tenantId", user.tenantId).eq("requesterId", userId))
.take(10000);
.collect();
if (requesterTickets.length > 0) {
const requesterSnapshot = {
name: user.name,
@ -267,7 +264,7 @@ export const deleteUser = mutation({
const directReports = await ctx.db
.query("users")
.withIndex("by_tenant_manager", (q) => q.eq("tenantId", user.tenantId).eq("managerId", userId))
.take(1000);
.collect();
await Promise.all(
directReports.map(async (report) => {
await ctx.db.patch(report._id, { managerId: undefined });
@ -279,86 +276,6 @@ export const deleteUser = mutation({
},
});
/**
* Atualiza o avatar de um usuário.
* Passa avatarUrl como null para remover o avatar.
* Também atualiza os snapshots em comentários e tickets.
*/
export const updateAvatar = mutation({
args: {
tenantId: v.string(),
email: v.string(),
avatarUrl: v.union(v.string(), v.null()),
},
handler: async (ctx, { tenantId, email, avatarUrl }) => {
const user = await ctx.db
.query("users")
.withIndex("by_tenant_email", (q) => q.eq("tenantId", tenantId).eq("email", email))
.first()
if (!user) {
return { status: "not_found" }
}
// Atualiza o avatar do usuário - usa undefined para remover o campo
const normalizedAvatarUrl = avatarUrl ?? undefined
await ctx.db.patch(user._id, { avatarUrl: normalizedAvatarUrl })
// Cria snapshot base sem avatarUrl se for undefined
// Isso garante que o campo seja realmente removido do snapshot
const baseSnapshot: { name: string; email: string; avatarUrl?: string; teams?: string[] } = {
name: user.name,
email: user.email,
}
if (normalizedAvatarUrl !== undefined) {
baseSnapshot.avatarUrl = normalizedAvatarUrl
}
if (user.teams && user.teams.length > 0) {
baseSnapshot.teams = user.teams
}
// Atualiza snapshots em comentários
const comments = await ctx.db
.query("ticketComments")
.withIndex("by_author", (q) => q.eq("authorId", user._id))
.take(10000)
if (comments.length > 0) {
await Promise.all(
comments.map(async (comment) => {
await ctx.db.patch(comment._id, { authorSnapshot: baseSnapshot })
}),
)
}
// Atualiza snapshots de requester em tickets
const requesterTickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_requester", (q) => q.eq("tenantId", user.tenantId).eq("requesterId", user._id))
.take(10000)
if (requesterTickets.length > 0) {
for (const t of requesterTickets) {
await ctx.db.patch(t._id, { requesterSnapshot: baseSnapshot })
}
}
// Atualiza snapshots de assignee em tickets
const assigneeTickets = await ctx.db
.query("tickets")
.withIndex("by_tenant_assignee", (q) => q.eq("tenantId", user.tenantId).eq("assigneeId", user._id))
.take(10000)
if (assigneeTickets.length > 0) {
for (const t of assigneeTickets) {
await ctx.db.patch(t._id, { assigneeSnapshot: baseSnapshot })
}
}
return { status: "updated", avatarUrl: normalizedAvatarUrl }
},
})
export const assignCompany = mutation({
args: { tenantId: v.string(), email: v.string(), companyId: v.id("companies"), actorId: v.id("users") },
handler: async (ctx, { tenantId, email, companyId, actorId }) => {

View file

@ -1,81 +0,0 @@
# Automações ▸ Envio de e-mails (guia de manutenção)
## Visão geral
O envio de e-mails via automações funciona como uma **ação** dentro do motor de automações de tickets.
Por motivos de compatibilidade e segurança:
- O HTML do e-mail é gerado com **tabelas + CSS inline** (compatível com Gmail/Outlook/Apple Mail).
- O envio (SMTP) acontece em **Convex Action** (`"use node"`), porque mutações Convex não devem fazer I/O de rede.
## Onde as automações disparam
Os eventos de ticket chamam o motor de automações em `convex/tickets.ts`:
- Criação do ticket (`TICKET_CREATED`)
- Alteração de status (`STATUS_CHANGED`)
- Alteração de prioridade (`PRIORITY_CHANGED`)
- Alteração de fila (`QUEUE_CHANGED`)
- Inclusão de comentário (`COMMENT_ADDED`)
- Finalização/resolução (`TICKET_RESOLVED`)
## Onde a ação é validada e aplicada
Arquivo: `convex/automations.ts`
Pontos principais:
- **Validação/parse** da ação `SEND_EMAIL` em `parseAction(...)`.
- **Execução** em `applyActions(...)`:
- Resolve destinatários (solicitante, responsável, usuário interno e e-mails livres).
- Faz interpolação de variáveis `{{...}}` em assunto/mensagem.
- Gera o HTML via **React Email** em `convex/reactEmail.tsx` (usando `@react-email/render`).
- Agenda o envio via `ctx.scheduler.runAfter(1, api.ticketNotifications.sendAutomationEmail, ...)`.
## Onde o e-mail é enviado de fato (SMTP)
Arquivo: `convex/ticketNotifications.ts`
- A action `sendAutomationEmail` faz o envio via SMTP e aceita:
- `to`: lista de destinatários
- `subject`: assunto
- `html`: HTML já renderizado
Observação: para não “vazar” destinatários entre si, o envio é feito **um-a-um** (um e-mail por destinatário).
## Templates de e-mail
Templates (React Email) ficam em `emails/` e podem ser pré-visualizados localmente.
Templates adicionados:
- `emails/automation-email.tsx`: usado pela ação `SEND_EMAIL` (inclui cartão com dados do ticket + CTA).
- `emails/simple-notification-email.tsx`: usado por notificações simples (comentário público / encerramento).
Renderização para HTML (backend):
- `convex/reactEmail.tsx`: `renderAutomationEmailHtml(...)` e `renderSimpleNotificationEmailHtml(...)`.
## Variáveis suportadas (interpolação)
Você pode usar estas variáveis em **Assunto** e **Mensagem**:
- `{{ticket.reference}}`
- `{{ticket.subject}}`
- `{{ticket.status}}`
- `{{ticket.priority}}`
- `{{company.name}}`
- `{{requester.name}}`
- `{{assignee.name}}`
- `{{ticket.url.portal}}`
- `{{ticket.url.staff}}`
- `{{automation.name}}`
## Link do botão (CTA)
A UI permite escolher:
- `Auto` (padrão): se houver destinatário interno (responsável/usuário) usa **Painel**; caso contrário usa **Portal**.
- `Portal (cliente)`: `/portal/tickets/:id`
- `Painel (agente)`: `/tickets/:id`
Se você precisar enviar para cliente **e** agente no mesmo evento, prefira criar **duas ações SEND_EMAIL** (uma com link Portal e outra com link Painel).
## Variáveis de ambiente (SMTP)
O envio no Convex tenta usar:
- `SMTP_ADDRESS` ou `SMTP_HOST`
- `SMTP_USERNAME` ou `SMTP_USER`
- `SMTP_PASSWORD` ou `SMTP_PASS`
- `SMTP_PORT` (default `465`)
- `MAILER_SENDER_EMAIL` (legacy) ou `SMTP_FROM_EMAIL` + `SMTP_FROM_NAME`
## Testes de regressão
Arquivo: `tests/automations-engine.test.ts`
- Teste adiciona um cenário onde a ação `SEND_EMAIL` está presente e valida que o envio é agendado via `scheduler.runAfter`.

Some files were not shown because too many files have changed in this diff Show more