O modelo foi removido pelo google. Testei com outro modelo e funcionou
###########################
0.Configuração do Ambiente
!pip install -U google-genai
from google import genai
import os
os.environ["GOOGLE_API_KEY"] = "SUA_CHAVE"
client = genai.Client()
#Verifica se estamos no endpoint certo (v1)
models = client.models.list()
for m in models:
print(m.name)
response = client.models.generate_content(
model="models/gemini-2.5-flash",
contents="What is HPC?"
)
print(response.text)
###########################
1Prompting Tradicional vs RAG - Comparação Prática
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import ChatPromptTemplate
llm = ChatGoogleGenerativeAI(
model="models/gemini-2.5-flash",
temperature=0
)
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful assistant"),
("human", "{input}")
])
chain = prompt | llm
print(chain.invoke({"input": "O que é HPC?"}).content)