PC SOFT

PROFESSIONAL NEWSGROUPS
WINDEVWEBDEV and WINDEV Mobile

Home → WINDEV 25 → Como instalar pelo Powershell a sua iA e usar ela com os seus sistemas
Como instalar pelo Powershell a sua iA e usar ela com os seus sistemas
Started by Boller, Jan., 29 2026 11:35 AM - No answer
Registered member
4,618 messages
Posted on January, 29 2026 - 11:35 AM
<#
Phoenix AI - Windows 11 Setup (Ollama + LLaMA + RAG + LangGraph)
- Instala dependências Python
- Cria venv
- Baixa modelos no Ollama
- Cria estrutura kb/
- Gera scripts index_kb.py e rag_graph.py
- Roda index e inicia o chat RAG

Como usar:
1) Instale manualmente: Python 3.11+ (marque "Add to PATH") e Ollama for Windows.
2) Abra PowerShell como Usuário normal.
3) Rode: .\setup_phoenix_ai.ps1

Obs: Se der erro de ExecutionPolicy:
Set-ExecutionPolicy -Scope CurrentUser RemoteSigned
#>

$ErrorActionPreference = "Stop"

function Write-Step($msg) {
Write-Host "`n==> $msg" -ForegroundColor Cyan
}

function Assert-Command($cmd, $hint) {
if (-not (Get-Command $cmd -ErrorAction SilentlyContinue)) {
Write-Host "`nERRO: '$cmd' não encontrado no PATH." -ForegroundColor Red
Write-Host "Dica: $hint" -ForegroundColor Yellow
exit 1
}
}

Write-Step "Checando pré-requisitos (python, pip, ollama)"
Assert-Command "python" "Instale Python 3.11+ (64-bit) e marque 'Add Python to PATH'."
Assert-Command "pip" "O pip vem junto do Python. Reinstale o Python marcando PATH."
Assert-Command "ollama" "Instale o Ollama for Windows e reabra o PowerShell."

Write-Step "Mostrando versões"
python --version
pip --version
ollama --version

# Pasta do projeto
$ProjectDir = Join-Path $PWD "phoenix_ai"
Write-Step "Criando pasta do projeto: $ProjectDir"
if (-not (Test-Path $ProjectDir)) {
New-Item -ItemType Directory -Path $ProjectDir | Out-Null
}
Set-Location $ProjectDir

# Criar venv
$VenvDir = Join-Path $ProjectDir ".venv"
Write-Step "Criando venv em: $VenvDir"
if (-not (Test-Path $VenvDir)) {
python -m venv .venv
} else {
Write-Host "Venv já existe. Mantendo." -ForegroundColor DarkGray
}

# Ativar venv
Write-Step "Ativando venv"
$ActivatePs1 = Join-Path $VenvDir "Scripts\Activate.ps1"
if (-not (Test-Path $ActivatePs1)) {
Write-Host "ERRO: Não achei $ActivatePs1" -ForegroundColor Red
exit 1
}
. $ActivatePs1

# Atualizar pip
Write-Step "Atualizando pip"
python -m pip install -U pip

# Instalar deps Python
Write-Step "Instalando dependências (langgraph, langchain, ollama, chroma)"
pip install -U langgraph langchain langchain-ollama langchain-chroma chromadb langchain-text-splitters

# Baixar modelos Ollama
Write-Step "Baixando modelos no Ollama (llama3.1 + embeddings)"
ollama pull llama3.1
ollama pull mxbai-embed-large

# Estrutura KB
Write-Step "Criando estrutura de base de conhecimento (kb/...)"
$KbDir = Join-Path $ProjectDir "kb"
$KbCommands = Join-Path $KbDir "commands"
$KbFunctions = Join-Path $KbDir "functions"
$KbCases = Join-Path $KbDir "cases"

foreach ($d in @($KbDir, $KbCommands, $KbFunctions, $KbCases)) {
if (-not (Test-Path $d)) { New-Item -ItemType Directory -Path $d | Out-Null }
}

# Criar um exemplo mínimo (para você testar na hora)
$SampleMd = Join-Path $KbCommands "print.md"
if (-not (Test-Path $SampleMd)) {
@"
# print

## O que faz
Imprime texto na saída padrão.

## Sintaxe
print(text)

## Parâmetros
- text: string

## Retorno
- void

## Exemplo
print("Olá, Phoenix!")
"@ | Set-Content -Path $SampleMd -Encoding utf8
}

# Gerar index_kb.py
Write-Step "Gerando index_kb.py"
$IndexPy = Join-Path $ProjectDir "index_kb.py"
@"
import os
from pathlib import Path

from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_chroma import Chroma
from langchain_ollama import OllamaEmbeddings

KB_DIR = Path("kb")
CHROMA_DIR = Path("chroma_db")
COLLECTION = "phoenix_kb"

def load_markdown_files(root: Path):
docs = []
for p in root.rglob("*.md"):
text = p.read_text(encoding="utf-8", errors="ignore")
rel = p.relative_to(root).as_posix()
docs.append({"text": text, "metadata": {"source": rel}})
return docs

def main():
if not KB_DIR.exists():
raise SystemExit("Pasta kb/ não existe. Crie kb/ e coloque .md lá dentro.")

embeddings = OllamaEmbeddings(model="mxbai-embed-large")

vectorstore = Chroma(
collection_name=COLLECTION,
persist_directory=str(CHROMA_DIR),
embedding_function=embeddings,
)

raw_docs = load_markdown_files(KB_DIR)

splitter = RecursiveCharacterTextSplitter(
chunk_size=1200,
chunk_overlap=150,
)

texts = []
metadatas = []
for d in raw_docs:
chunks = splitter.split_text(d["text"])
for i, c in enumerate(chunks):
texts.append(c)
metadatas.append({**d["metadata"], "chunk": i})

vectorstore.add_texts(texts=texts, metadatas=metadatas)

print(f"OK: indexados {len(texts)} chunks em {CHROMA_DIR}/ coleção {COLLECTION}")

if __name__ == "__main__":
main()
"@ | Set-Content -Path $IndexPy -Encoding utf8

# Gerar rag_graph.py
Write-Step "Gerando rag_graph.py"
$RagPy = Join-Path $ProjectDir "rag_graph.py"
@"
from typing_extensions import TypedDict, List

from langgraph.graph import StateGraph, START, END
from langchain_chroma import Chroma
from langchain_ollama import ChatOllama, OllamaEmbeddings

CHROMA_DIR = "chroma_db"
COLLECTION = "phoenix_kb"

class State(TypedDict):
question: str
contexts: List[str]
answer: str

def retrieve(state: State) -> dict:
embeddings = OllamaEmbeddings(model="mxbai-embed-large")
vs = Chroma(
collection_name=COLLECTION,
persist_directory=CHROMA_DIR,
embedding_function=embeddings,
)
retriever = vs.as_retriever(search_kwargs={"k": 6})
docs = retriever.get_relevant_documents(state["question"])
contexts = [d.page_content for d in docs]
return {"contexts": contexts}

def generate(state: State) -> dict:
llm = ChatOllama(model="llama3.1", temperature=0)

context_text = "\n\n---\n\n".join(state["contexts"]).strip()

prompt = f\"""
Você é o assistente oficial da linguagem Phoenix.
Responda APENAS usando o contexto abaixo. Se o contexto não tiver a resposta, diga:
"Não sei com base na base atual."

CONTEXTO:
{context_text}

PERGUNTA:
{state["question"]}

RESPOSTA (com exemplo de código e notas de uso):
\""".strip()

out = llm.invoke(prompt)
return {"answer": out.content}

def build_app():
g = StateGraph(State)
g.add_node("retrieve", retrieve)
g.add_node("generate", generate)

g.add_edge(START, "retrieve")
g.add_edge("retrieve", "generate")
g.add_edge("generate", END)

return g.compile()

if __name__ == "__main__":
app = build_app()

while True:
q = input("\nPergunta> ").strip()
if not q:
continue
if q.lower() in ("exit", "quit"):
break

result = app.invoke({"question": q, "contexts": [], "answer": ""})
print("\n" + result["answer"])
"@ | Set-Content -Path $RagPy -Encoding utf8

# Indexar KB
Write-Step "Indexando base de conhecimento (index_kb.py)"
python .\index_kb.py

# Rodar RAG
Write-Step "Iniciando chat RAG (rag_graph.py). Para sair: exit"
python .\rag_graph.py

--
Adriano José Boller
______________________________________________
Consultor e Representante Oficial da
PcSoft no Brasil
+55 (41) 99949 1800
adrianoboller@gmail.com
skype: adrianoboller
http://wxinformatica.com.br/