1
Get an API Key
Sign up at /signup for a free key (100 calls/day).
2
Pick Your Framework
Gnist Context serves tool definitions in the native format for 7 AI agent frameworks. Each connector returns the same tools — just shaped for your framework’s expected schema.
| Framework | Endpoint |
|---|---|
| OpenAI | /connectors/openai |
| LangChain | /connectors/langchain |
| CrewAI | /connectors/crewai |
| Pydantic AI | /connectors/pydantic-ai |
| LlamaIndex | /connectors/llamaindex |
| AutoGen / AG2 | /connectors/autogen |
| Mastra | /connectors/mastra |
All endpoints also accept a server filter: /connectors/{framework}/{server} (e.g. /connectors/openai/arxiv).
3
Fetch & Use Tools
Choose your framework below for a complete working example.
Python — OpenAI
pip install openai httpx
Python
import httpx
from openai import OpenAI
GNIST_KEY = "YOUR_GNIST_API_KEY"
BASE = "https://context.gnist.ai"
# 1. Fetch tool definitions in OpenAI format
resp = httpx.get(
f"{BASE}/connectors/openai/arxiv",
headers={"Gnist-API-Key": GNIST_KEY},
)
tools = resp.json()["tools"]
# 2. Use tools in a chat completion
client = OpenAI()
response = client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "Find recent papers on MCP"}],
tools=tools,
)
# 3. Execute the tool call via Gnist REST bridge
for call in response.choices[0].message.tool_calls or []:
import json
result = httpx.post(
f"{BASE}/rest/arxiv/{call.function.name.split('__')[-1]}",
headers={"Gnist-API-Key": GNIST_KEY},
json=json.loads(call.function.arguments),
)
print(result.json())
Python — LangChain
pip install langchain-openai httpx
Python
import httpx
from langchain_openai import ChatOpenAI
GNIST_KEY = "YOUR_GNIST_API_KEY"
BASE = "https://context.gnist.ai"
# 1. Fetch tool definitions in LangChain format
resp = httpx.get(
f"{BASE}/connectors/langchain/arxiv",
headers={"Gnist-API-Key": GNIST_KEY},
)
tools = resp.json()["tools"]
# 2. Bind tools to a LangChain LLM
llm = ChatOpenAI(model="gpt-4o")
llm_with_tools = llm.bind_tools(tools)
# 3. Invoke and handle tool calls
result = llm_with_tools.invoke("Find recent papers on MCP")
for call in result.tool_calls:
tool_name = call["name"].split("__")[-1]
resp = httpx.post(
f"{BASE}/rest/arxiv/{tool_name}",
headers={"Gnist-API-Key": GNIST_KEY},
json=call["args"],
)
print(resp.json())
Python — CrewAI
pip install crewai httpx
Python
import httpx
GNIST_KEY = "YOUR_GNIST_API_KEY"
BASE = "https://context.gnist.ai"
# 1. Fetch tool definitions in CrewAI format
resp = httpx.get(
f"{BASE}/connectors/crewai/arxiv",
headers={"Gnist-API-Key": GNIST_KEY},
)
tools = resp.json()["tools"]
# Each tool has: name, description, args_schema, return_direct
# 2. Build a CrewAI tool that calls the Gnist REST bridge
from crewai.tools import BaseTool
from pydantic import Field
class GnistTool(BaseTool):
name: str = "gnist_tool"
description: str = "Gnist Context tool"
server: str = Field(default="arxiv")
tool_name: str = Field(default="search_papers")
def _run(self, **kwargs):
resp = httpx.post(
f"{BASE}/rest/{self.server}/{self.tool_name}",
headers={"Gnist-API-Key": GNIST_KEY},
json=kwargs,
)
return resp.json()
# 3. Create tools from definitions
crewai_tools = []
for t in tools:
tool = GnistTool(
name=t["name"],
description=t["description"],
server="arxiv",
tool_name=t["name"].split("__")[-1],
)
crewai_tools.append(tool)
Python — Pydantic AI
pip install pydantic-ai httpx
Python
import httpx
GNIST_KEY = "YOUR_GNIST_API_KEY"
BASE = "https://context.gnist.ai"
# 1. Fetch tool definitions in Pydantic AI format
resp = httpx.get(
f"{BASE}/connectors/pydantic-ai/arxiv",
headers={"Gnist-API-Key": GNIST_KEY},
)
tools = resp.json()["tools"]
# Each tool has: name, description, parameters_json_schema
# 2. Use with Pydantic AI agent
from pydantic_ai import Agent, Tool
async def call_gnist(server: str, tool_name: str, args: dict) -> dict:
async with httpx.AsyncClient() as client:
resp = await client.post(
f"{BASE}/rest/{server}/{tool_name}",
headers={"Gnist-API-Key": GNIST_KEY},
json=args,
)
return resp.json()
# 3. Register tools dynamically
agent = Agent("openai:gpt-4o")
for t in tools:
print(f"Available: {t['name']} — {t['description'][:80]}")
Python — LlamaIndex
pip install llama-index httpx
Python
import httpx
GNIST_KEY = "YOUR_GNIST_API_KEY"
BASE = "https://context.gnist.ai"
# 1. Fetch tool definitions in LlamaIndex format
resp = httpx.get(
f"{BASE}/connectors/llamaindex/arxiv",
headers={"Gnist-API-Key": GNIST_KEY},
)
tools = resp.json()["tools"]
# Each tool has: tool_metadata (name, description, fn_schema)
# 2. Build LlamaIndex FunctionTool wrappers
from llama_index.core.tools import FunctionTool
def make_gnist_fn(server, tool_name):
def fn(**kwargs):
resp = httpx.post(
f"{BASE}/rest/{server}/{tool_name}",
headers={"Gnist-API-Key": GNIST_KEY},
json=kwargs,
)
return resp.json()
return fn
llama_tools = []
for t in tools:
meta = t["tool_metadata"]
fn = make_gnist_fn("arxiv", meta["name"].split("__")[-1])
tool = FunctionTool.from_defaults(fn=fn, name=meta["name"], description=meta["description"])
llama_tools.append(tool)
Python — AutoGen / AG2
pip install autogen-agentchat httpx
Python
import httpx
GNIST_KEY = "YOUR_GNIST_API_KEY"
BASE = "https://context.gnist.ai"
# 1. Fetch tool definitions in AutoGen format (OpenAI function calling)
resp = httpx.get(
f"{BASE}/connectors/autogen/arxiv",
headers={"Gnist-API-Key": GNIST_KEY},
)
tools = resp.json()["tools"]
# Same structure as OpenAI: type=function, function={name, description, parameters}
# 2. Register as AutoGen tools
import json
def execute_gnist_tool(name: str, arguments: str) -> str:
tool_name = name.split("__")[-1]
resp = httpx.post(
f"{BASE}/rest/arxiv/{tool_name}",
headers={"Gnist-API-Key": GNIST_KEY},
json=json.loads(arguments),
)
return json.dumps(resp.json())
# 3. Pass tool definitions to your AutoGen assistant
# assistant = AssistantAgent("researcher", llm_config={"tools": tools})
print(f"Loaded {len(tools)} AutoGen tools")
TypeScript — Mastra
npm install @mastra/core
TypeScript
const GNIST_KEY = "YOUR_GNIST_API_KEY";
const BASE = "https://context.gnist.ai";
// 1. Fetch tool definitions in Mastra format
const resp = await fetch(`${BASE}/connectors/mastra/arxiv`, {
headers: { "Gnist-API-Key": GNIST_KEY },
});
const { tools } = await resp.json();
// Each tool has: id, description, inputSchema (Zod-compatible JSON Schema)
// 2. Execute a tool via the Gnist REST bridge
async function callGnist(toolId: string, args: Record<string, unknown>) {
const toolName = toolId.split("__").pop()!;
const resp = await fetch(`${BASE}/rest/arxiv/${toolName}`, {
method: "POST",
headers: {
"Gnist-API-Key": GNIST_KEY,
"Content-Type": "application/json",
},
body: JSON.stringify(args),
});
return resp.json();
}
// 3. Use with Mastra agent
console.log(`Loaded ${tools.length} Mastra tools`);
4
Filter by Toolkit
Each connector endpoint supports per-server filtering. Append the toolkit slug to get only that toolkit’s tools:
Shell
# All tools (all toolkits) curl "https://context.gnist.ai/connectors/openai" -H "Gnist-API-Key: YOUR_API_KEY" # Only arxiv tools curl "https://context.gnist.ai/connectors/openai/arxiv" -H "Gnist-API-Key: YOUR_API_KEY" # Pretty-printed output curl "https://context.gnist.ai/connectors/langchain/arxiv?pretty=true" -H "Gnist-API-Key: YOUR_API_KEY"
Browse all available toolkits at /toolkits.
5
Response Format
Every connector returns a consistent wrapper:
JSON
{
"format": "openai",
"metadata": {
"base_url": "https://context.gnist.ai",
"server_count": 1,
"tool_count": 5,
"filter": "arxiv"
},
"tools": [
{
"type": "function",
"function": { "name": "arxiv__search_papers", ... },
"metadata": {
"server": "arxiv",
"rest_endpoint": "https://context.gnist.ai/rest/arxiv/search_papers",
"mcp_endpoint": "https://context.gnist.ai/mcp/arxiv/"
}
}
]
}
The metadata on each tool gives you the REST and MCP endpoints to call after the LLM selects a tool.
6
Explore
REST API Quickstart
Direct API calls with curl, Python, and TypeScript.
/quickstart/api
MCP Quickstart
Connect as an MCP server in Claude Desktop or Cursor.
/quickstart/mcp
Browse Toolkits
All data sources with full API documentation.
/toolkits
OpenAPI Spec
Machine-readable spec for all REST endpoints.
/api/openapi.json