Initial release — APIClient - Agent v2.0.0
AI-first API testing desktop client built with Python + PyQt6. Features: - Multi-tab HTTP request editor with params/headers/body/auth/tests - KeyValueTable with per-row enable/disable checkboxes and 36px rows - Format JSON button, syntax highlighting, pre-request & test scripts - Collections, environments, history, import/export (Postman v2.1, cURL) - OpenAPI 3.x / Swagger 2.0 local parser (no AI tokens) - EKIKA Odoo API Framework generator — JSON-API, REST JSON, GraphQL, Custom REST JSON with all auth types (instant, no AI tokens) - Persistent AI chat sidebar (Claude-powered co-pilot) with streaming, context-aware suggestions, and one-click Apply to request editor - AI collection generator from any docs URL or pasted spec - WebSocket client, Mock server, Collection runner, Code generator - Dark/light theme engine (global QSS, object-name selectors) - SSL error detection with actionable hints - MIT License Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
189
app/core/ai_chat.py
Normal file
189
app/core/ai_chat.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""APIClient - Agent — Conversational AI co-pilot core."""
|
||||
import json
|
||||
import re
|
||||
import httpx
|
||||
from app.core import storage
|
||||
|
||||
MAX_HISTORY_MSGS = 30
|
||||
|
||||
_SYSTEM_PROMPT = """\
|
||||
You are APIClient - Agent, an expert AI API testing co-pilot embedded in the APIClient - Agent desktop application.
|
||||
|
||||
Your responsibilities:
|
||||
• Help craft and debug HTTP requests (REST, JSON-API, GraphQL, Odoo APIs)
|
||||
• Analyze HTTP responses — status codes, headers, body structure, errors
|
||||
• Specialize in the EKIKA Odoo API Framework:
|
||||
- JSON-API (Content-Type: application/vnd.api+json) — body format: {"data": {"type": model, "attributes": {...}}}
|
||||
- REST JSON (Content-Type: application/json)
|
||||
- GraphQL (POST with {"query": "..."} body)
|
||||
- Auth: x-api-key header, Basic Auth, OAuth2 Bearer, JWT Bearer
|
||||
- Odoo models: sale.order, res.partner, account.move, product.template, stock.picking, etc.
|
||||
• Generate request bodies, params, headers, and test scripts
|
||||
• Explain SSL/TLS errors, auth failures, and connection issues
|
||||
• Help with environment variable setup ({{base_url}}, {{api_key}}, etc.)
|
||||
|
||||
When you produce content the user should apply to their request, use EXACTLY these fences:
|
||||
|
||||
```apply:body
|
||||
{ "json": "here" }
|
||||
```
|
||||
|
||||
```apply:params
|
||||
page=1
|
||||
limit=10
|
||||
fields=id,name
|
||||
```
|
||||
|
||||
```apply:headers
|
||||
x-api-key: {{api_key}}
|
||||
Accept: application/vnd.api+json
|
||||
```
|
||||
|
||||
```apply:test
|
||||
pm.test('Status 200', lambda: pm.response.to_have_status(200))
|
||||
pm.test('Has data', lambda: expect(pm.response.json()).to_have_key('data'))
|
||||
```
|
||||
|
||||
Rules:
|
||||
- Be concise and actionable — explain WHY, not just WHAT
|
||||
- If you add apply blocks, briefly explain what each block does
|
||||
- For JSON-API responses: data is in response.data, errors in response.errors
|
||||
- For SSL cert errors: tell user to uncheck SSL verification in the Settings tab
|
||||
- For 401/403: check x-api-key header and environment variable values
|
||||
- For unresolved {{variable}}: tell user to set up environment via Tools → Environments
|
||||
"""
|
||||
|
||||
|
||||
class AIError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_api_key() -> str:
|
||||
return storage.get_setting("anthropic_api_key", "")
|
||||
|
||||
|
||||
def build_context(req=None, resp=None, env_vars: dict = None) -> str:
|
||||
"""Build compact context string for system prompt injection."""
|
||||
parts = []
|
||||
|
||||
if req:
|
||||
parts.append(f"METHOD: {req.method}")
|
||||
parts.append(f"URL: {req.url}")
|
||||
if req.headers:
|
||||
safe = {
|
||||
k: ("***" if any(s in k.lower() for s in ["key", "token", "secret", "auth", "pass"]) else v)
|
||||
for k, v in req.headers.items()
|
||||
}
|
||||
parts.append(f"HEADERS: {json.dumps(safe)}")
|
||||
if req.params:
|
||||
parts.append(f"PARAMS: {json.dumps(req.params)}")
|
||||
if req.body:
|
||||
preview = req.body[:1500] + ("…" if len(req.body) > 1500 else "")
|
||||
parts.append(f"BODY ({req.body_type}):\n{preview}")
|
||||
if req.content_type:
|
||||
parts.append(f"CONTENT-TYPE: {req.content_type}")
|
||||
if req.test_script:
|
||||
parts.append(f"TEST SCRIPT:\n{req.test_script}")
|
||||
|
||||
if resp:
|
||||
if resp.error:
|
||||
parts.append(f"\nRESPONSE ERROR: {resp.error}")
|
||||
else:
|
||||
parts.append(f"\nRESPONSE: {resp.status} {resp.reason} ({resp.elapsed_ms:.0f} ms)")
|
||||
ct = (resp.headers or {}).get("content-type", (resp.headers or {}).get("Content-Type", ""))
|
||||
if ct:
|
||||
parts.append(f"RESPONSE CONTENT-TYPE: {ct}")
|
||||
if resp.body:
|
||||
preview = resp.body[:4000] + ("…" if len(resp.body) > 4000 else "")
|
||||
parts.append(f"RESPONSE BODY:\n{preview}")
|
||||
|
||||
if env_vars:
|
||||
safe_vars = {
|
||||
k: ("***" if any(s in k.lower() for s in ["key", "token", "secret", "password", "pass"]) else v)
|
||||
for k, v in env_vars.items()
|
||||
}
|
||||
parts.append(f"\nENVIRONMENT VARIABLES: {json.dumps(safe_vars)}")
|
||||
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
def stream_chat(messages: list[dict], context: str = "", chunk_cb=None) -> str:
|
||||
"""
|
||||
Stream a multi-turn conversation to Claude.
|
||||
messages: list of {"role": "user"|"assistant", "content": str}
|
||||
chunk_cb(chunk: str): called for each streamed text chunk
|
||||
Returns full assistant response text.
|
||||
Raises AIError on failure.
|
||||
"""
|
||||
api_key = get_api_key()
|
||||
if not api_key:
|
||||
raise AIError(
|
||||
"No Anthropic API key configured.\n"
|
||||
"Go to Tools → AI Assistant → Settings to add your key."
|
||||
)
|
||||
|
||||
system = _SYSTEM_PROMPT
|
||||
if context:
|
||||
system += f"\n\n## Current Request Context\n{context}"
|
||||
|
||||
headers = {
|
||||
"x-api-key": api_key,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"content-type": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"model": "claude-opus-4-6",
|
||||
"max_tokens": 2048,
|
||||
"system": system,
|
||||
"messages": messages[-MAX_HISTORY_MSGS:],
|
||||
}
|
||||
|
||||
full_text = ""
|
||||
try:
|
||||
with httpx.stream(
|
||||
"POST",
|
||||
"https://api.anthropic.com/v1/messages",
|
||||
headers=headers,
|
||||
json=payload,
|
||||
timeout=60.0,
|
||||
) as resp:
|
||||
if resp.status_code != 200:
|
||||
body = resp.read().decode()
|
||||
raise AIError(f"Claude API error {resp.status_code}: {body[:400]}")
|
||||
|
||||
for line in resp.iter_lines():
|
||||
if not line.startswith("data:"):
|
||||
continue
|
||||
data_str = line[5:].strip()
|
||||
if data_str == "[DONE]":
|
||||
break
|
||||
try:
|
||||
event = json.loads(data_str)
|
||||
delta = event.get("delta", {})
|
||||
if delta.get("type") == "text_delta":
|
||||
chunk = delta.get("text", "")
|
||||
full_text += chunk
|
||||
if chunk_cb:
|
||||
chunk_cb(chunk)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
except httpx.TimeoutException:
|
||||
raise AIError("Request timed out. Try a shorter question or check your connection.")
|
||||
except httpx.RequestError as e:
|
||||
raise AIError(f"Network error: {e}")
|
||||
|
||||
return full_text
|
||||
|
||||
|
||||
def parse_apply_blocks(text: str) -> list[dict]:
|
||||
"""Parse ```apply:TYPE ... ``` blocks from AI response text."""
|
||||
blocks = []
|
||||
for m in re.finditer(r"```apply:(\w+)\n(.*?)```", text, re.DOTALL):
|
||||
blocks.append({"type": m.group(1), "content": m.group(2).strip()})
|
||||
return blocks
|
||||
|
||||
|
||||
def strip_apply_blocks(text: str) -> str:
|
||||
"""Remove apply fences from display text, leaving the explanation."""
|
||||
return re.sub(r"```apply:\w+\n.*?```", "", text, flags=re.DOTALL).strip()
|
||||
219
app/core/ai_client.py
Normal file
219
app/core/ai_client.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""APIClient - Agent — Claude AI integration."""
|
||||
import json
|
||||
import re
|
||||
import httpx
|
||||
|
||||
from app.core import storage
|
||||
|
||||
# Max characters to send to Claude (roughly 60k tokens)
|
||||
_MAX_CONTENT_CHARS = 80_000
|
||||
|
||||
|
||||
def _strip_html(html: str) -> str:
|
||||
"""Strip HTML tags and collapse whitespace for cleaner AI input."""
|
||||
# Remove script/style blocks entirely
|
||||
html = re.sub(r"<(script|style)[^>]*>.*?</(script|style)>", " ", html, flags=re.S | re.I)
|
||||
# Remove HTML tags
|
||||
html = re.sub(r"<[^>]+>", " ", html)
|
||||
# Decode common entities
|
||||
html = (html
|
||||
.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
.replace(""", '"').replace("'", "'").replace(" ", " "))
|
||||
# Collapse whitespace
|
||||
html = re.sub(r"\s{3,}", "\n\n", html)
|
||||
return html.strip()
|
||||
|
||||
_SYSTEM_PROMPT = """\
|
||||
You are an expert API documentation analyzer for APIClient - Agent.
|
||||
Given API documentation (which may be a spec, a web page, framework docs, or raw text),
|
||||
extract or infer all useful API endpoints and return structured JSON.
|
||||
|
||||
Return ONLY valid JSON — no markdown, no commentary, just the JSON object.
|
||||
|
||||
Schema:
|
||||
{
|
||||
"collection_name": "API Name",
|
||||
"base_url": "https://api.example.com",
|
||||
"auth_type": "bearer|basic|apikey|none",
|
||||
"doc_type": "openapi|rest|framework|graphql|unknown",
|
||||
"endpoints": [
|
||||
{
|
||||
"name": "Human readable name",
|
||||
"method": "GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS",
|
||||
"path": "/v1/resource",
|
||||
"description": "What this endpoint does",
|
||||
"headers": {"Header-Name": "value or {{variable}}"},
|
||||
"params": {"param_name": "example_value"},
|
||||
"body": "",
|
||||
"body_type": "raw|form-urlencoded|form-data",
|
||||
"content_type": "application/json",
|
||||
"test_script": "pm.test('Status 200', lambda: pm.response.to_have_status(200))"
|
||||
}
|
||||
],
|
||||
"environment_variables": {
|
||||
"base_url": "https://api.example.com",
|
||||
"token": ""
|
||||
},
|
||||
"notes": "Any important setup notes for the user"
|
||||
}
|
||||
|
||||
Rules:
|
||||
- Use {{variable_name}} for ALL dynamic values (tokens, IDs, model names, etc.)
|
||||
- Always output realistic example values for query params and bodies
|
||||
- Generate a test_script for every endpoint
|
||||
- Detect auth pattern and add the correct header to every endpoint
|
||||
- If the documentation is a FRAMEWORK (e.g. it documents URL patterns like
|
||||
{domain}/{endpoint}/{model} rather than fixed paths), do the following:
|
||||
* Set doc_type to "framework"
|
||||
* Use {{base_url}} as the domain placeholder
|
||||
* Use {{model}} as a placeholder for the resource/model name
|
||||
* Generate one endpoint per HTTP method the framework supports (GET list,
|
||||
GET single, POST create, PATCH update, DELETE delete, plus any special ops)
|
||||
* Set notes explaining that the user must replace {{model}} with actual model names
|
||||
e.g. "res.partner", "sale.order", "product.template" etc.
|
||||
- If it is a GRAPHQL API, generate a POST /graphql endpoint with example query body
|
||||
- If auth options are shown (API key, OAuth, Basic), include ALL variants as separate
|
||||
environment variables so the user can choose
|
||||
- Keep paths clean — strip trailing slashes, normalise to lowercase
|
||||
"""
|
||||
|
||||
|
||||
class AIError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_api_key() -> str:
|
||||
return storage.get_setting("anthropic_api_key", "")
|
||||
|
||||
|
||||
def set_api_key(key: str):
|
||||
storage.set_setting("anthropic_api_key", key.strip())
|
||||
|
||||
|
||||
def analyze_docs(content: str, progress_cb=None) -> dict:
|
||||
"""
|
||||
Send API documentation content to Claude and return parsed collection dict.
|
||||
progress_cb(message: str) is called with status updates during streaming.
|
||||
Raises AIError on failure.
|
||||
"""
|
||||
api_key = get_api_key()
|
||||
if not api_key:
|
||||
raise AIError("No Anthropic API key configured. Go to Tools → AI Assistant → Settings.")
|
||||
|
||||
if progress_cb:
|
||||
progress_cb("Sending to Claude AI…")
|
||||
|
||||
headers = {
|
||||
"x-api-key": api_key,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"content-type": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"model": "claude-opus-4-6",
|
||||
"max_tokens": 8192,
|
||||
"system": _SYSTEM_PROMPT,
|
||||
"messages": [{"role": "user", "content": content}],
|
||||
}
|
||||
|
||||
full_text = ""
|
||||
try:
|
||||
with httpx.stream(
|
||||
"POST",
|
||||
"https://api.anthropic.com/v1/messages",
|
||||
headers=headers,
|
||||
json=payload,
|
||||
timeout=120.0,
|
||||
) as resp:
|
||||
if resp.status_code != 200:
|
||||
body = resp.read().decode()
|
||||
raise AIError(f"API error {resp.status_code}: {body[:300]}")
|
||||
|
||||
for line in resp.iter_lines():
|
||||
if not line.startswith("data:"):
|
||||
continue
|
||||
data_str = line[5:].strip()
|
||||
if data_str == "[DONE]":
|
||||
break
|
||||
try:
|
||||
event = json.loads(data_str)
|
||||
delta = event.get("delta", {})
|
||||
if delta.get("type") == "text_delta":
|
||||
chunk = delta.get("text", "")
|
||||
full_text += chunk
|
||||
if progress_cb and len(full_text) % 500 < len(chunk):
|
||||
progress_cb(f"Receiving response… ({len(full_text)} chars)")
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
except httpx.TimeoutException:
|
||||
raise AIError("Request timed out. The documentation may be too large.")
|
||||
except httpx.RequestError as e:
|
||||
raise AIError(f"Network error: {e}")
|
||||
|
||||
if progress_cb:
|
||||
progress_cb("Parsing AI response…")
|
||||
|
||||
return _parse_ai_response(full_text)
|
||||
|
||||
|
||||
def _parse_ai_response(text: str) -> dict:
|
||||
"""Extract and validate the JSON from the AI response."""
|
||||
text = text.strip()
|
||||
|
||||
# Strip markdown code fences if present
|
||||
if text.startswith("```"):
|
||||
lines = text.split("\n")
|
||||
text = "\n".join(lines[1:-1] if lines[-1].strip() == "```" else lines[1:])
|
||||
|
||||
try:
|
||||
data = json.loads(text)
|
||||
except json.JSONDecodeError:
|
||||
# Try to find JSON object in the text
|
||||
start = text.find("{")
|
||||
end = text.rfind("}") + 1
|
||||
if start >= 0 and end > start:
|
||||
try:
|
||||
data = json.loads(text[start:end])
|
||||
except json.JSONDecodeError:
|
||||
raise AIError("AI returned invalid JSON. Try again or simplify the documentation.")
|
||||
else:
|
||||
raise AIError("AI response did not contain a JSON object.")
|
||||
|
||||
# Validate required keys
|
||||
if "endpoints" not in data:
|
||||
raise AIError("AI response missing 'endpoints' key.")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def fetch_url_content(url: str) -> str:
|
||||
"""Fetch content from a URL, strip HTML if needed, and truncate if too large."""
|
||||
try:
|
||||
resp = httpx.get(url, follow_redirects=True, timeout=30.0, headers={
|
||||
"User-Agent": "EKIKA-API-Client/2.0 (documentation-fetcher)",
|
||||
"Accept": "application/json, text/yaml, text/html, */*",
|
||||
})
|
||||
resp.raise_for_status()
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise AIError(f"HTTP {e.response.status_code} fetching URL.")
|
||||
except httpx.RequestError as e:
|
||||
raise AIError(f"Could not fetch URL: {e}")
|
||||
|
||||
ct = resp.headers.get("content-type", "")
|
||||
text = resp.text
|
||||
|
||||
# If HTML page — strip tags for cleaner AI input
|
||||
if "html" in ct and not _looks_like_spec(text):
|
||||
text = _strip_html(text)
|
||||
|
||||
# Truncate if too large
|
||||
if len(text) > _MAX_CONTENT_CHARS:
|
||||
text = text[:_MAX_CONTENT_CHARS] + "\n\n[Content truncated for length]"
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def _looks_like_spec(text: str) -> bool:
|
||||
"""Quick check: is this likely a JSON/YAML OpenAPI spec?"""
|
||||
t = text.lstrip()
|
||||
return t.startswith("{") or t.startswith("openapi:") or t.startswith("swagger:")
|
||||
163
app/core/code_gen.py
Normal file
163
app/core/code_gen.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""APIClient - Agent — Code snippet generators."""
|
||||
import json
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from app.models import HttpRequest
|
||||
|
||||
|
||||
def _qs(req: HttpRequest) -> str:
|
||||
if not req.params:
|
||||
return req.url
|
||||
sep = "&" if "?" in req.url else "?"
|
||||
return req.url + sep + urlencode(req.params)
|
||||
|
||||
|
||||
def to_curl(req: HttpRequest) -> str:
|
||||
parts = [f"curl -X {req.method}"]
|
||||
parts.append(f' "{_qs(req)}"')
|
||||
for k, v in req.headers.items():
|
||||
parts.append(f" -H '{k}: {v}'")
|
||||
if req.body:
|
||||
escaped = req.body.replace("'", r"'\''")
|
||||
parts.append(f" -d '{escaped}'")
|
||||
if not req.ssl_verify:
|
||||
parts.append(" --insecure")
|
||||
return " \\\n".join(parts)
|
||||
|
||||
|
||||
def to_python_requests(req: HttpRequest) -> str:
|
||||
headers = dict(req.headers)
|
||||
if req.body and "Content-Type" not in headers:
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
lines = ["import requests", ""]
|
||||
lines.append(f'url = "{req.url}"')
|
||||
if headers:
|
||||
lines.append(f"headers = {json.dumps(headers, indent=4)}")
|
||||
if req.params:
|
||||
lines.append(f"params = {json.dumps(req.params, indent=4)}")
|
||||
lines.append("")
|
||||
|
||||
call_args = ["url"]
|
||||
if headers:
|
||||
call_args.append("headers=headers")
|
||||
if req.params:
|
||||
call_args.append("params=params")
|
||||
if req.body:
|
||||
lines.append(f"payload = {json.dumps(req.body)}")
|
||||
call_args.append("data=payload")
|
||||
if not req.ssl_verify:
|
||||
call_args.append("verify=False")
|
||||
|
||||
lines.append(f"response = requests.{req.method.lower()}({', '.join(call_args)})")
|
||||
lines.append("")
|
||||
lines.append("print(response.status_code)")
|
||||
lines.append("print(response.json())")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def to_python_httpx(req: HttpRequest) -> str:
|
||||
headers = dict(req.headers)
|
||||
lines = ["import httpx", ""]
|
||||
lines.append(f'url = "{req.url}"')
|
||||
if headers:
|
||||
lines.append(f"headers = {json.dumps(headers, indent=4)}")
|
||||
if req.params:
|
||||
lines.append(f"params = {json.dumps(req.params, indent=4)}")
|
||||
lines.append("")
|
||||
|
||||
call_args = ["url"]
|
||||
if headers:
|
||||
call_args.append("headers=headers")
|
||||
if req.params:
|
||||
call_args.append("params=params")
|
||||
if req.body:
|
||||
lines.append(f"payload = {json.dumps(req.body)}")
|
||||
call_args.append("content=payload.encode()")
|
||||
if not req.ssl_verify:
|
||||
call_args.append("verify=False")
|
||||
|
||||
lines.append("with httpx.Client() as client:")
|
||||
lines.append(f" response = client.{req.method.lower()}({', '.join(call_args)})")
|
||||
lines.append(" print(response.status_code)")
|
||||
lines.append(" print(response.json())")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def to_javascript_fetch(req: HttpRequest) -> str:
|
||||
options: dict = {"method": req.method}
|
||||
if req.headers:
|
||||
options["headers"] = req.headers
|
||||
if req.body:
|
||||
options["body"] = req.body
|
||||
|
||||
lines = [
|
||||
f'const response = await fetch("{_qs(req)}", {json.dumps(options, indent=2)});',
|
||||
"",
|
||||
"const data = await response.json();",
|
||||
"console.log(response.status, data);",
|
||||
]
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def to_javascript_axios(req: HttpRequest) -> str:
|
||||
config: dict = {}
|
||||
if req.headers:
|
||||
config["headers"] = req.headers
|
||||
if req.params:
|
||||
config["params"] = req.params
|
||||
if req.body:
|
||||
config["data"] = req.body
|
||||
|
||||
lines = ["const axios = require('axios');", ""]
|
||||
if config:
|
||||
lines.append(f"const config = {json.dumps(config, indent=2)};")
|
||||
lines.append("")
|
||||
lines.append(f'const response = await axios.{req.method.lower()}("{req.url}", config);')
|
||||
else:
|
||||
lines.append(f'const response = await axios.{req.method.lower()}("{req.url}");')
|
||||
lines.append("console.log(response.status, response.data);")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def to_ruby(req: HttpRequest) -> str:
|
||||
lines = [
|
||||
"require 'net/http'",
|
||||
"require 'uri'",
|
||||
"require 'json'",
|
||||
"",
|
||||
f'uri = URI.parse("{_qs(req)}")',
|
||||
f"http = Net::HTTP.new(uri.host, uri.port)",
|
||||
"http.use_ssl = uri.scheme == 'https'",
|
||||
]
|
||||
if not req.ssl_verify:
|
||||
lines.append("http.verify_mode = OpenSSL::SSL::VERIFY_NONE")
|
||||
lines.append("")
|
||||
klass_map = {
|
||||
"GET": "Net::HTTP::Get", "POST": "Net::HTTP::Post",
|
||||
"PUT": "Net::HTTP::Put", "PATCH": "Net::HTTP::Patch",
|
||||
"DELETE": "Net::HTTP::Delete", "HEAD": "Net::HTTP::Head",
|
||||
}
|
||||
klass = klass_map.get(req.method, "Net::HTTP::Get")
|
||||
lines.append(f"request = {klass}.new(uri.request_uri)")
|
||||
for k, v in req.headers.items():
|
||||
lines.append(f'request["{k}"] = "{v}"')
|
||||
if req.body:
|
||||
lines.append(f"request.body = {json.dumps(req.body)}")
|
||||
lines += [
|
||||
"",
|
||||
"response = http.request(request)",
|
||||
"puts response.code",
|
||||
"puts response.body",
|
||||
]
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
GENERATORS = {
|
||||
"curl": to_curl,
|
||||
"Python (requests)": to_python_requests,
|
||||
"Python (httpx)": to_python_httpx,
|
||||
"JavaScript (fetch)": to_javascript_fetch,
|
||||
"JavaScript (axios)": to_javascript_axios,
|
||||
"Ruby (Net::HTTP)": to_ruby,
|
||||
}
|
||||
613
app/core/ekika_odoo_generator.py
Normal file
613
app/core/ekika_odoo_generator.py
Normal file
@@ -0,0 +1,613 @@
|
||||
"""EKIKA Odoo API Framework — Direct collection generator.
|
||||
|
||||
Generates complete Postman-style collections from the EKIKA api_framework module
|
||||
without requiring any AI API calls. All URL patterns, body formats, auth headers,
|
||||
and special operations are derived directly from the framework documentation.
|
||||
|
||||
Supported API kinds: JSON-API, REST JSON, GraphQL, Custom REST JSON
|
||||
Supported auth types: API Key, Basic Auth, User Credentials, OAuth2, JWT, Public
|
||||
"""
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
# ── Constants ─────────────────────────────────────────────────────────────────
|
||||
|
||||
API_KINDS = ["JSON-API", "REST JSON", "GraphQL", "Custom REST JSON"]
|
||||
|
||||
AUTH_TYPES = [
|
||||
"API Key",
|
||||
"Basic Auth",
|
||||
"User Credentials",
|
||||
"OAuth2",
|
||||
"JWT",
|
||||
"Public",
|
||||
]
|
||||
|
||||
OPERATIONS = [
|
||||
"List Records",
|
||||
"Get Single Record",
|
||||
"Create Record",
|
||||
"Update Record",
|
||||
"Delete Record",
|
||||
"Execute Method",
|
||||
"Export to Excel",
|
||||
"Generate Report",
|
||||
"Get Fields",
|
||||
"Check Access Rights",
|
||||
]
|
||||
|
||||
# Content-Type per API kind
|
||||
_CT = {
|
||||
"JSON-API": "application/vnd.api+json",
|
||||
"REST JSON": "application/json",
|
||||
"GraphQL": "application/json",
|
||||
"Custom REST JSON": "application/json",
|
||||
}
|
||||
|
||||
|
||||
# ── Auth header builder ───────────────────────────────────────────────────────
|
||||
|
||||
def _auth_headers(auth_type: str) -> dict:
|
||||
if auth_type == "API Key":
|
||||
return {"x-api-key": "{{api_key}}"}
|
||||
if auth_type == "Basic Auth":
|
||||
return {"Authorization": "Basic {{base64_user_pass}}"}
|
||||
if auth_type == "User Credentials":
|
||||
return {"username": "{{username}}", "password": "{{password}}"}
|
||||
if auth_type == "OAuth2":
|
||||
return {"Authorization": "Bearer {{access_token}}"}
|
||||
if auth_type == "JWT":
|
||||
return {"Authorization": "Bearer {{jwt_token}}"}
|
||||
return {} # Public
|
||||
|
||||
|
||||
def _env_vars(instance_url: str, auth_type: str, extra: dict = None) -> dict:
|
||||
base = instance_url.rstrip("/")
|
||||
env: dict = {"base_url": base}
|
||||
|
||||
if auth_type == "API Key":
|
||||
env["api_key"] = extra.get("api_key", "") if extra else ""
|
||||
elif auth_type == "Basic Auth":
|
||||
env["base64_user_pass"] = extra.get("base64_user_pass", "") if extra else ""
|
||||
env["username"] = extra.get("username", "") if extra else ""
|
||||
env["password"] = extra.get("password", "") if extra else ""
|
||||
elif auth_type == "User Credentials":
|
||||
env["username"] = extra.get("username", "") if extra else ""
|
||||
env["password"] = extra.get("password", "") if extra else ""
|
||||
elif auth_type == "OAuth2":
|
||||
env["access_token"] = extra.get("access_token", "") if extra else ""
|
||||
env["refresh_token"] = ""
|
||||
env["client_id"] = ""
|
||||
env["client_secret"] = ""
|
||||
elif auth_type == "JWT":
|
||||
env["jwt_token"] = extra.get("jwt_token", "") if extra else ""
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def _clean_endpoint(endpoint: str) -> str:
|
||||
"""Normalise endpoint slug — ensure leading slash, strip trailing slash."""
|
||||
ep = endpoint.strip().strip("/")
|
||||
return f"/{ep}" if ep else "/api"
|
||||
|
||||
|
||||
def _model_gql(model: str) -> str:
|
||||
"""Convert 'sale.order' → 'sale_order' for GraphQL field names."""
|
||||
return model.replace(".", "_")
|
||||
|
||||
|
||||
# ── Request builders ──────────────────────────────────────────────────────────
|
||||
|
||||
def _jsonapi_test(status: int = 200) -> str:
|
||||
return (
|
||||
f"pm.test('Status {status}', lambda: pm.response.to_have_status({status}))\n"
|
||||
f"pm.test('Has data', lambda: expect(pm.response.json()).to_have_key('data'))"
|
||||
)
|
||||
|
||||
|
||||
def _rest_test(status: int = 200) -> str:
|
||||
return (
|
||||
f"pm.test('Status {status}', lambda: pm.response.to_have_status({status}))\n"
|
||||
f"pm.test('Has body', lambda: expect(pm.response.text).to_be_truthy())"
|
||||
)
|
||||
|
||||
|
||||
def _build_jsonapi_endpoints(base_ep: str, model: str, headers: dict,
|
||||
operations: list[str]) -> list[dict]:
|
||||
ct = _CT["JSON-API"]
|
||||
ep_path = f"{base_ep}/{model}"
|
||||
eps = []
|
||||
|
||||
if "List Records" in operations:
|
||||
eps.append({
|
||||
"name": f"List {model}",
|
||||
"method": "GET",
|
||||
"path": ep_path,
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {
|
||||
"page[number]": "1",
|
||||
"page[size]": "10",
|
||||
f"fields[{model}]": "id,name,display_name",
|
||||
"sort": "id",
|
||||
},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _jsonapi_test(200),
|
||||
"description": f"Fetch paginated list of {model} records with field selection, sorting and filtering.",
|
||||
})
|
||||
|
||||
if "Get Single Record" in operations:
|
||||
eps.append({
|
||||
"name": f"Get {model} by ID",
|
||||
"method": "GET",
|
||||
"path": f"{ep_path}/{{{{id}}}}",
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {f"fields[{model}]": "id,name,display_name"},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _jsonapi_test(200),
|
||||
"description": f"Fetch a single {model} record by its database ID.",
|
||||
})
|
||||
|
||||
if "Create Record" in operations:
|
||||
body = json.dumps({
|
||||
"data": {
|
||||
"type": model,
|
||||
"attributes": {"name": f"New {model.split('.')[-1].replace('_', ' ').title()}"},
|
||||
}
|
||||
}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Create {model}",
|
||||
"method": "POST",
|
||||
"path": ep_path,
|
||||
"headers": {**headers, "Content-Type": ct, "Accept": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _jsonapi_test(201),
|
||||
"description": f"Create a new {model} record.",
|
||||
})
|
||||
|
||||
if "Update Record" in operations:
|
||||
body = json.dumps({
|
||||
"data": {
|
||||
"type": model,
|
||||
"id": "{{id}}",
|
||||
"attributes": {"name": "Updated Name"},
|
||||
}
|
||||
}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Update {model}",
|
||||
"method": "PATCH",
|
||||
"path": f"{ep_path}/{{{{id}}}}",
|
||||
"headers": {**headers, "Content-Type": ct, "Accept": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _jsonapi_test(200),
|
||||
"description": f"Update an existing {model} record by ID.",
|
||||
})
|
||||
|
||||
if "Delete Record" in operations:
|
||||
eps.append({
|
||||
"name": f"Delete {model}",
|
||||
"method": "DELETE",
|
||||
"path": f"{ep_path}/{{{{id}}}}",
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _jsonapi_test(200),
|
||||
"description": f"Delete a {model} record by ID.",
|
||||
})
|
||||
|
||||
if "Execute Method" in operations:
|
||||
body = json.dumps({
|
||||
"data": {
|
||||
"type": model,
|
||||
"id": "{{id}}",
|
||||
"attributes": {
|
||||
"method": "action_confirm",
|
||||
"args": [],
|
||||
"kwargs": {},
|
||||
},
|
||||
}
|
||||
}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Execute Method on {model}",
|
||||
"method": "POST",
|
||||
"path": f"{ep_path}/{{{{id}}}}/execute",
|
||||
"headers": {**headers, "Content-Type": ct, "Accept": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Execute an ORM method on a {model} record. Change 'action_confirm' to any valid method name.",
|
||||
})
|
||||
|
||||
if "Export to Excel" in operations:
|
||||
body = json.dumps({
|
||||
"data": {
|
||||
"type": model,
|
||||
"attributes": {
|
||||
"fields": ["id", "name", "display_name"],
|
||||
"ids": [],
|
||||
},
|
||||
}
|
||||
}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Export {model} to Excel",
|
||||
"method": "POST",
|
||||
"path": f"{base_ep}/export",
|
||||
"headers": {**headers, "Content-Type": ct, "Accept": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": "Returns a Base64-encoded Excel file with the specified fields.",
|
||||
})
|
||||
|
||||
if "Generate Report" in operations:
|
||||
body = json.dumps({
|
||||
"data": {
|
||||
"type": model,
|
||||
"attributes": {
|
||||
"report": f"{model.split('.')[0]}.report_{model.split('.')[-1]}",
|
||||
"ids": ["{{id}}"],
|
||||
"format": "pdf",
|
||||
},
|
||||
}
|
||||
}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Generate Report for {model}",
|
||||
"method": "POST",
|
||||
"path": f"{base_ep}/report",
|
||||
"headers": {**headers, "Content-Type": ct, "Accept": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": "Generate a PDF/HTML/TEXT report. Change format to 'html' or 'text' as needed.",
|
||||
})
|
||||
|
||||
if "Get Fields" in operations:
|
||||
eps.append({
|
||||
"name": f"Get Fields — {model}",
|
||||
"method": "GET",
|
||||
"path": f"{ep_path}/fields_get",
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {"attributes": "string,type,required,help"},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Get all field definitions for {model}.",
|
||||
})
|
||||
|
||||
if "Check Access Rights" in operations:
|
||||
eps.append({
|
||||
"name": f"Check Access — {model}",
|
||||
"method": "GET",
|
||||
"path": f"{ep_path}/check_access_rights",
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {"operation": "read"},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _rest_test(200),
|
||||
"description": "Check if the current user has the specified access right (read/write/create/unlink).",
|
||||
})
|
||||
|
||||
return eps
|
||||
|
||||
|
||||
def _build_restjson_endpoints(base_ep: str, model: str, headers: dict,
|
||||
operations: list[str]) -> list[dict]:
|
||||
ct = _CT["REST JSON"]
|
||||
ep_path = f"{base_ep}/{model}"
|
||||
eps = []
|
||||
|
||||
if "List Records" in operations:
|
||||
eps.append({
|
||||
"name": f"List {model}",
|
||||
"method": "GET",
|
||||
"path": ep_path,
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {"page": "1", "limit": "10", "fields": "id,name,display_name", "sort": "id"},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Fetch paginated list of {model} records.",
|
||||
})
|
||||
|
||||
if "Get Single Record" in operations:
|
||||
eps.append({
|
||||
"name": f"Get {model} by ID",
|
||||
"method": "GET",
|
||||
"path": f"{ep_path}/{{{{id}}}}",
|
||||
"headers": {**headers, "Accept": ct},
|
||||
"params": {"fields": "id,name,display_name"},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Fetch a single {model} record by ID.",
|
||||
})
|
||||
|
||||
if "Create Record" in operations:
|
||||
body = json.dumps({"name": f"New {model.split('.')[-1].title()}"}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Create {model}",
|
||||
"method": "POST",
|
||||
"path": ep_path,
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(201),
|
||||
"description": f"Create a new {model} record.",
|
||||
})
|
||||
|
||||
if "Update Record" in operations:
|
||||
body = json.dumps({"name": "Updated Name"}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Update {model}",
|
||||
"method": "PATCH",
|
||||
"path": f"{ep_path}/{{{{id}}}}",
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Update an existing {model} record.",
|
||||
})
|
||||
|
||||
if "Delete Record" in operations:
|
||||
eps.append({
|
||||
"name": f"Delete {model}",
|
||||
"method": "DELETE",
|
||||
"path": f"{ep_path}/{{{{id}}}}",
|
||||
"headers": {**headers},
|
||||
"params": {},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Delete a {model} record by ID.",
|
||||
})
|
||||
|
||||
if "Execute Method" in operations:
|
||||
body = json.dumps({"method": "action_confirm", "args": [], "kwargs": {}}, indent=2)
|
||||
eps.append({
|
||||
"name": f"Execute Method on {model}",
|
||||
"method": "POST",
|
||||
"path": f"{ep_path}/{{{{id}}}}/execute",
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": body,
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": "Execute an ORM method on a record.",
|
||||
})
|
||||
|
||||
if "Get Fields" in operations:
|
||||
eps.append({
|
||||
"name": f"Get Fields — {model}",
|
||||
"method": "GET",
|
||||
"path": f"{ep_path}/fields_get",
|
||||
"headers": {**headers},
|
||||
"params": {},
|
||||
"body": "",
|
||||
"body_type": "raw",
|
||||
"content_type": "",
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"Get field definitions for {model}.",
|
||||
})
|
||||
|
||||
return eps
|
||||
|
||||
|
||||
def _build_graphql_endpoints(base_ep: str, model: str, headers: dict,
|
||||
operations: list[str]) -> list[dict]:
|
||||
ct = _CT["GraphQL"]
|
||||
gql = _model_gql(model)
|
||||
path = f"{base_ep}/graphql"
|
||||
eps = []
|
||||
|
||||
if "List Records" in operations:
|
||||
query = (
|
||||
f"query {{\n"
|
||||
f" {gql}(\n"
|
||||
f" filter: \"\"\n"
|
||||
f" pageSize: 10\n"
|
||||
f" pageNumber: 1\n"
|
||||
f" ) {{\n"
|
||||
f" id\n"
|
||||
f" name\n"
|
||||
f" display_name\n"
|
||||
f" }}\n"
|
||||
f"}}"
|
||||
)
|
||||
eps.append({
|
||||
"name": f"GraphQL — List {model}",
|
||||
"method": "POST",
|
||||
"path": path,
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": json.dumps({"query": query}, indent=2),
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"GraphQL query to list {model} records.",
|
||||
})
|
||||
|
||||
if "Get Single Record" in operations:
|
||||
query = (
|
||||
f"query {{\n"
|
||||
f" {gql}(id: {{{{id}}}}) {{\n"
|
||||
f" id\n"
|
||||
f" name\n"
|
||||
f" display_name\n"
|
||||
f" }}\n"
|
||||
f"}}"
|
||||
)
|
||||
eps.append({
|
||||
"name": f"GraphQL — Get {model} by ID",
|
||||
"method": "POST",
|
||||
"path": path,
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": json.dumps({"query": query}, indent=2),
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"GraphQL query to get a single {model} record.",
|
||||
})
|
||||
|
||||
if "Create Record" in operations:
|
||||
mutation = (
|
||||
f"mutation {{\n"
|
||||
f" create_{gql}(\n"
|
||||
f" attributes: {{\n"
|
||||
f" name: \"New Record\"\n"
|
||||
f" }}\n"
|
||||
f" ) {{\n"
|
||||
f" id\n"
|
||||
f" name\n"
|
||||
f" }}\n"
|
||||
f"}}"
|
||||
)
|
||||
eps.append({
|
||||
"name": f"GraphQL — Create {model}",
|
||||
"method": "POST",
|
||||
"path": path,
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": json.dumps({"query": mutation}, indent=2),
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"GraphQL mutation to create a {model} record.",
|
||||
})
|
||||
|
||||
if "Update Record" in operations:
|
||||
mutation = (
|
||||
f"mutation {{\n"
|
||||
f" update_{gql}(\n"
|
||||
f" id: {{{{id}}}}\n"
|
||||
f" attributes: {{\n"
|
||||
f" name: \"Updated Name\"\n"
|
||||
f" }}\n"
|
||||
f" ) {{\n"
|
||||
f" id\n"
|
||||
f" name\n"
|
||||
f" }}\n"
|
||||
f"}}"
|
||||
)
|
||||
eps.append({
|
||||
"name": f"GraphQL — Update {model}",
|
||||
"method": "POST",
|
||||
"path": path,
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": json.dumps({"query": mutation}, indent=2),
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"GraphQL mutation to update a {model} record.",
|
||||
})
|
||||
|
||||
if "Delete Record" in operations:
|
||||
mutation = (
|
||||
f"mutation {{\n"
|
||||
f" delete_{gql}(id: {{{{id}}}}) {{\n"
|
||||
f" id\n"
|
||||
f" }}\n"
|
||||
f"}}"
|
||||
)
|
||||
eps.append({
|
||||
"name": f"GraphQL — Delete {model}",
|
||||
"method": "POST",
|
||||
"path": path,
|
||||
"headers": {**headers, "Content-Type": ct},
|
||||
"params": {},
|
||||
"body": json.dumps({"query": mutation}, indent=2),
|
||||
"body_type": "raw",
|
||||
"content_type": ct,
|
||||
"test_script": _rest_test(200),
|
||||
"description": f"GraphQL mutation to delete a {model} record.",
|
||||
})
|
||||
|
||||
return eps
|
||||
|
||||
|
||||
# ── Main entry point ──────────────────────────────────────────────────────────
|
||||
|
||||
def generate_collection(
|
||||
instance_url: str,
|
||||
endpoint: str,
|
||||
api_kind: str,
|
||||
auth_type: str,
|
||||
auth_creds: dict,
|
||||
models: list[str],
|
||||
operations: list[str],
|
||||
collection_name: str = "",
|
||||
) -> dict:
|
||||
"""
|
||||
Generate an EKIKA API Framework collection dict ready for storage.
|
||||
|
||||
Returns same structure as ai_client.analyze_docs().
|
||||
"""
|
||||
base_url = instance_url.rstrip("/")
|
||||
base_ep = _clean_endpoint(endpoint)
|
||||
headers = _auth_headers(auth_type)
|
||||
env_vars = _env_vars(instance_url, auth_type, auth_creds)
|
||||
|
||||
all_endpoints = []
|
||||
for model in models:
|
||||
model = model.strip()
|
||||
if not model:
|
||||
continue
|
||||
if api_kind == "JSON-API":
|
||||
all_endpoints += _build_jsonapi_endpoints(base_ep, model, headers, operations)
|
||||
elif api_kind == "REST JSON":
|
||||
all_endpoints += _build_restjson_endpoints(base_ep, model, headers, operations)
|
||||
elif api_kind == "GraphQL":
|
||||
all_endpoints += _build_graphql_endpoints(base_ep, model, headers, operations)
|
||||
else: # Custom REST JSON — same as REST JSON
|
||||
all_endpoints += _build_restjson_endpoints(base_ep, model, headers, operations)
|
||||
|
||||
# Build URLs using {{base_url}} variable
|
||||
for ep in all_endpoints:
|
||||
if not ep["path"].startswith("http"):
|
||||
ep["url"] = f"{{{{base_url}}}}{ep['path']}"
|
||||
|
||||
name = collection_name or f"EKIKA Odoo — {api_kind} — {', '.join(models[:3])}"
|
||||
|
||||
return {
|
||||
"collection_name": name,
|
||||
"base_url": base_url,
|
||||
"auth_type": auth_type.lower().replace(" ", "_"),
|
||||
"doc_type": "ekika_odoo_framework",
|
||||
"endpoints": all_endpoints,
|
||||
"environment_variables": env_vars,
|
||||
"notes": (
|
||||
f"API Kind: {api_kind} | Auth: {auth_type}\n"
|
||||
f"Endpoint: {base_url}{base_ep}/{{model}}\n"
|
||||
f"Replace {{{{id}}}} with actual record IDs before sending."
|
||||
),
|
||||
"_source": "ekika_odoo",
|
||||
}
|
||||
66
app/core/exporter.py
Normal file
66
app/core/exporter.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Export collections to Postman Collection v2.1 JSON."""
|
||||
import json
|
||||
from app.models import HttpRequest
|
||||
from app.core import storage
|
||||
|
||||
|
||||
def export_collection(collection_id: int) -> str:
|
||||
collections = storage.get_collections()
|
||||
col = next((c for c in collections if c["id"] == collection_id), None)
|
||||
if not col:
|
||||
raise ValueError(f"Collection {collection_id} not found")
|
||||
|
||||
items = []
|
||||
|
||||
def make_request_item(r: dict) -> dict:
|
||||
header = [{"key": k, "value": v} for k, v in r.get("headers", {}).items()]
|
||||
url_raw = r.get("url", "")
|
||||
params = r.get("params", {})
|
||||
query = [{"key": k, "value": v} for k, v in params.items()]
|
||||
|
||||
body_obj = None
|
||||
body = r.get("body", "")
|
||||
body_type = r.get("body_type", "raw")
|
||||
if body:
|
||||
if body_type == "urlencoded":
|
||||
pairs = []
|
||||
for line in body.split("&"):
|
||||
if "=" in line:
|
||||
k, _, v = line.partition("=")
|
||||
pairs.append({"key": k, "value": v})
|
||||
body_obj = {"mode": "urlencoded", "urlencoded": pairs}
|
||||
else:
|
||||
body_obj = {"mode": "raw", "raw": body}
|
||||
|
||||
item = {
|
||||
"name": r.get("name") or url_raw,
|
||||
"request": {
|
||||
"method": r.get("method", "GET"),
|
||||
"header": header,
|
||||
"url": {"raw": url_raw, "query": query},
|
||||
}
|
||||
}
|
||||
if body_obj:
|
||||
item["request"]["body"] = body_obj
|
||||
return item
|
||||
|
||||
# Top-level requests (no folder)
|
||||
for r in storage.get_requests(collection_id):
|
||||
items.append(make_request_item(r))
|
||||
|
||||
# Folders
|
||||
for folder in storage.get_folders(collection_id):
|
||||
folder_item = {
|
||||
"name": folder["name"],
|
||||
"item": [make_request_item(r) for r in storage.get_requests(collection_id, folder["id"])]
|
||||
}
|
||||
items.append(folder_item)
|
||||
|
||||
collection = {
|
||||
"info": {
|
||||
"name": col["name"],
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
|
||||
},
|
||||
"item": items
|
||||
}
|
||||
return json.dumps(collection, indent=2)
|
||||
161
app/core/http_client.py
Normal file
161
app/core/http_client.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""APIClient - Agent — HTTP client engine."""
|
||||
import re
|
||||
import base64
|
||||
from copy import deepcopy
|
||||
|
||||
import httpx
|
||||
|
||||
from app.models import HttpRequest, HttpResponse
|
||||
|
||||
|
||||
def resolve_variables(text: str, variables: dict) -> str:
|
||||
"""Replace {{variable}} placeholders with environment values."""
|
||||
if not text or not variables:
|
||||
return text
|
||||
|
||||
def replacer(m):
|
||||
key = m.group(1).strip()
|
||||
return str(variables.get(key, m.group(0)))
|
||||
|
||||
return re.sub(r"\{\{(.+?)\}\}", replacer, text)
|
||||
|
||||
|
||||
def apply_variables(req: HttpRequest, variables: dict) -> HttpRequest:
|
||||
"""Return a deep copy of the request with all variables resolved."""
|
||||
r = deepcopy(req)
|
||||
r.url = resolve_variables(r.url, variables)
|
||||
r.body = resolve_variables(r.body, variables)
|
||||
r.headers = {k: resolve_variables(v, variables) for k, v in r.headers.items()}
|
||||
r.params = {k: resolve_variables(v, variables) for k, v in r.params.items()}
|
||||
if r.auth_data:
|
||||
r.auth_data = {k: resolve_variables(str(v), variables) for k, v in r.auth_data.items()}
|
||||
return r
|
||||
|
||||
|
||||
def _build_auth_headers(req: HttpRequest) -> dict:
|
||||
headers = {}
|
||||
if req.auth_type == "bearer":
|
||||
token = req.auth_data.get("token", "")
|
||||
if token:
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
elif req.auth_type == "basic":
|
||||
user = req.auth_data.get("username", "")
|
||||
pwd = req.auth_data.get("password", "")
|
||||
encoded = base64.b64encode(f"{user}:{pwd}".encode()).decode()
|
||||
headers["Authorization"] = f"Basic {encoded}"
|
||||
elif req.auth_type == "apikey":
|
||||
key = req.auth_data.get("key", "")
|
||||
value = req.auth_data.get("value", "")
|
||||
location = req.auth_data.get("in", "header")
|
||||
if location == "header" and key:
|
||||
headers[key] = value
|
||||
return headers
|
||||
|
||||
|
||||
def send_request(req: HttpRequest, variables: dict = None) -> HttpResponse:
|
||||
r = req # will be overwritten with resolved copy; kept here for exception handlers
|
||||
try:
|
||||
r = apply_variables(req, variables or {})
|
||||
|
||||
# Check for unresolved variables
|
||||
unresolved = re.findall(r"\{\{(.+?)\}\}", r.url)
|
||||
if unresolved:
|
||||
return HttpResponse(
|
||||
error=f"Unresolved variable(s): {', '.join(unresolved)}. "
|
||||
"Go to Tools → Environments to define them."
|
||||
)
|
||||
|
||||
headers = {**r.headers, **_build_auth_headers(r)}
|
||||
|
||||
# Query params (merge URL params dict + API-key-in-query)
|
||||
params = r.params.copy()
|
||||
if r.auth_type == "apikey" and r.auth_data.get("in") == "query":
|
||||
params[r.auth_data.get("key", "")] = r.auth_data.get("value", "")
|
||||
|
||||
# Build request body
|
||||
content = None
|
||||
data = None
|
||||
files = None
|
||||
|
||||
if r.body_type == "raw" and r.body:
|
||||
content = r.body.encode()
|
||||
# Auto Content-Type: honour explicit override, then try to detect JSON
|
||||
if r.content_type:
|
||||
headers.setdefault("Content-Type", r.content_type)
|
||||
elif "Content-Type" not in headers:
|
||||
stripped = r.body.lstrip()
|
||||
if stripped.startswith(("{", "[")):
|
||||
headers["Content-Type"] = "application/json"
|
||||
else:
|
||||
headers["Content-Type"] = "text/plain"
|
||||
|
||||
elif r.body_type == "urlencoded" and r.body:
|
||||
pairs = {}
|
||||
for line in r.body.splitlines():
|
||||
if "=" in line:
|
||||
k, _, v = line.partition("=")
|
||||
pairs[k.strip()] = v.strip()
|
||||
data = pairs
|
||||
|
||||
elif r.body_type == "form-data" and r.body:
|
||||
# Expect "key=value" lines; values starting with "@" treated as file paths
|
||||
pairs = {}
|
||||
for line in r.body.splitlines():
|
||||
if "=" in line:
|
||||
k, _, v = line.partition("=")
|
||||
pairs[k.strip()] = v.strip()
|
||||
data = pairs
|
||||
|
||||
timeout = httpx.Timeout(
|
||||
connect=10.0,
|
||||
read=float(r.timeout),
|
||||
write=float(r.timeout),
|
||||
pool=5.0,
|
||||
)
|
||||
|
||||
with httpx.Client(
|
||||
follow_redirects=True,
|
||||
timeout=timeout,
|
||||
verify=r.ssl_verify,
|
||||
) as client:
|
||||
response = client.request(
|
||||
method=r.method,
|
||||
url=r.url,
|
||||
headers=headers,
|
||||
params=params or None,
|
||||
content=content,
|
||||
data=data,
|
||||
files=files,
|
||||
)
|
||||
body = response.text
|
||||
size_bytes = len(response.content)
|
||||
return HttpResponse(
|
||||
status=response.status_code,
|
||||
reason=response.reason_phrase,
|
||||
headers=dict(response.headers),
|
||||
body=body,
|
||||
elapsed_ms=response.elapsed.total_seconds() * 1000,
|
||||
size_bytes=size_bytes,
|
||||
)
|
||||
|
||||
except httpx.InvalidURL:
|
||||
return HttpResponse(error=f"Invalid URL: {r.url}")
|
||||
except httpx.ConnectError as e:
|
||||
detail = str(e)
|
||||
if "CERTIFICATE_VERIFY_FAILED" in detail or "certificate" in detail.lower() or "SSL" in detail:
|
||||
return HttpResponse(error=(
|
||||
f"SSL certificate error — could not connect to {r.url}\n\n"
|
||||
f"The server's certificate is not trusted or doesn't match the hostname.\n"
|
||||
f"Tip: disable SSL verification in the request Settings tab."
|
||||
))
|
||||
return HttpResponse(error=f"Connection refused — could not reach {r.url}")
|
||||
except httpx.ConnectTimeout:
|
||||
return HttpResponse(error=f"Connection timed out after {req.timeout}s")
|
||||
except httpx.ReadTimeout:
|
||||
return HttpResponse(error=f"Read timed out — server took too long to respond")
|
||||
except httpx.SSLError as e:
|
||||
return HttpResponse(error=f"SSL error: {e}. Disable SSL verification if using a self-signed cert.")
|
||||
except httpx.TooManyRedirects:
|
||||
return HttpResponse(error="Too many redirects — possible redirect loop")
|
||||
except Exception as e:
|
||||
return HttpResponse(error=str(e))
|
||||
103
app/core/importer.py
Normal file
103
app/core/importer.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""Import from Postman Collection v2.1 JSON or curl command."""
|
||||
import json
|
||||
import re
|
||||
import shlex
|
||||
from app.models import HttpRequest
|
||||
|
||||
|
||||
def from_postman_collection(json_text: str) -> tuple[str, list[HttpRequest]]:
|
||||
"""Returns (collection_name, list of HttpRequest)."""
|
||||
data = json.loads(json_text)
|
||||
name = data.get("info", {}).get("name", "Imported Collection")
|
||||
requests = []
|
||||
|
||||
def parse_item(item):
|
||||
if "request" in item:
|
||||
r = item["request"]
|
||||
method = r.get("method", "GET")
|
||||
url_obj = r.get("url", {})
|
||||
if isinstance(url_obj, str):
|
||||
url = url_obj
|
||||
params = {}
|
||||
else:
|
||||
raw = url_obj.get("raw", "")
|
||||
url = raw.split("?")[0] if "?" in raw else raw
|
||||
params = {}
|
||||
for qp in url_obj.get("query", []):
|
||||
if not qp.get("disabled"):
|
||||
params[qp.get("key", "")] = qp.get("value", "")
|
||||
|
||||
headers = {}
|
||||
for h in r.get("header", []):
|
||||
if not h.get("disabled"):
|
||||
headers[h.get("key", "")] = h.get("value", "")
|
||||
|
||||
body_obj = r.get("body", {})
|
||||
body = ""
|
||||
body_type = "raw"
|
||||
if body_obj:
|
||||
mode = body_obj.get("mode", "raw")
|
||||
if mode == "raw":
|
||||
body = body_obj.get("raw", "")
|
||||
body_type = "raw"
|
||||
elif mode == "urlencoded":
|
||||
pairs = body_obj.get("urlencoded", [])
|
||||
body = "&".join(f"{p['key']}={p.get('value','')}" for p in pairs if not p.get("disabled"))
|
||||
body_type = "urlencoded"
|
||||
|
||||
requests.append(HttpRequest(
|
||||
method=method, url=url, headers=headers,
|
||||
params=params, body=body, body_type=body_type,
|
||||
name=item.get("name", "")
|
||||
))
|
||||
elif "item" in item:
|
||||
for sub in item["item"]:
|
||||
parse_item(sub)
|
||||
|
||||
for item in data.get("item", []):
|
||||
parse_item(item)
|
||||
|
||||
return name, requests
|
||||
|
||||
|
||||
def from_curl(curl_cmd: str) -> HttpRequest:
|
||||
"""Parse a curl command string into an HttpRequest."""
|
||||
# Normalize line continuations
|
||||
cmd = curl_cmd.replace("\\\n", " ").strip()
|
||||
try:
|
||||
tokens = shlex.split(cmd)
|
||||
except ValueError:
|
||||
tokens = cmd.split()
|
||||
|
||||
req = HttpRequest(method="GET")
|
||||
i = 1 # skip 'curl'
|
||||
while i < len(tokens):
|
||||
token = tokens[i]
|
||||
if token in ("-X", "--request") and i + 1 < len(tokens):
|
||||
req.method = tokens[i + 1].upper()
|
||||
i += 2
|
||||
elif token in ("-H", "--header") and i + 1 < len(tokens):
|
||||
header = tokens[i + 1]
|
||||
if ":" in header:
|
||||
k, _, v = header.partition(":")
|
||||
req.headers[k.strip()] = v.strip()
|
||||
i += 2
|
||||
elif token in ("-d", "--data", "--data-raw", "--data-binary") and i + 1 < len(tokens):
|
||||
req.body = tokens[i + 1]
|
||||
if req.method == "GET":
|
||||
req.method = "POST"
|
||||
i += 2
|
||||
elif token in ("-u", "--user") and i + 1 < len(tokens):
|
||||
user_pass = tokens[i + 1]
|
||||
if ":" in user_pass:
|
||||
u, _, p = user_pass.partition(":")
|
||||
req.auth_type = "basic"
|
||||
req.auth_data = {"username": u, "password": p}
|
||||
i += 2
|
||||
elif not token.startswith("-") and not req.url:
|
||||
req.url = token.strip("'\"")
|
||||
i += 1
|
||||
else:
|
||||
i += 1
|
||||
|
||||
return req
|
||||
82
app/core/mock_server.py
Normal file
82
app/core/mock_server.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""APIClient - Agent — Lightweight HTTP mock server."""
|
||||
import threading
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
|
||||
from app.core import storage
|
||||
|
||||
|
||||
_server_instance: HTTPServer | None = None
|
||||
_server_thread: threading.Thread | None = None
|
||||
_server_port: int = 8888
|
||||
|
||||
|
||||
class _MockHandler(BaseHTTPRequestHandler):
|
||||
"""Queries the DB on every request so new/edited endpoints are served immediately."""
|
||||
|
||||
def log_message(self, fmt, *args):
|
||||
pass # suppress default console logging
|
||||
|
||||
def _handle(self):
|
||||
endpoints = storage.get_mock_endpoints()
|
||||
matched = None
|
||||
for ep in endpoints:
|
||||
method_match = ep.method == "*" or ep.method == self.command
|
||||
if method_match and ep.path == self.path:
|
||||
matched = ep
|
||||
break
|
||||
|
||||
if matched:
|
||||
self.send_response(matched.status_code)
|
||||
for k, v in matched.response_headers.items():
|
||||
self.send_header(k, v)
|
||||
body = matched.response_body.encode("utf-8")
|
||||
self.send_header("Content-Length", str(len(body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(body)
|
||||
else:
|
||||
body = b'{"error": "No mock endpoint matched"}'
|
||||
self.send_response(404)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.send_header("Content-Length", str(len(body)))
|
||||
self.end_headers()
|
||||
self.wfile.write(body)
|
||||
|
||||
do_GET = _handle
|
||||
do_POST = _handle
|
||||
do_PUT = _handle
|
||||
do_PATCH = _handle
|
||||
do_DELETE = _handle
|
||||
do_HEAD = _handle
|
||||
do_OPTIONS = _handle
|
||||
|
||||
|
||||
def start(port: int = 8888) -> str:
|
||||
global _server_instance, _server_thread, _server_port
|
||||
if _server_instance:
|
||||
return f"Already running on port {_server_port}"
|
||||
_server_port = port
|
||||
try:
|
||||
_server_instance = HTTPServer(("localhost", port), _MockHandler)
|
||||
except OSError as e:
|
||||
return f"Failed to start: {e}"
|
||||
_server_thread = threading.Thread(target=_server_instance.serve_forever, daemon=True)
|
||||
_server_thread.start()
|
||||
return f"Mock server running on http://localhost:{port}"
|
||||
|
||||
|
||||
def stop() -> str:
|
||||
global _server_instance, _server_thread
|
||||
if _server_instance:
|
||||
_server_instance.shutdown()
|
||||
_server_instance = None
|
||||
_server_thread = None
|
||||
return "Mock server stopped"
|
||||
return "Not running"
|
||||
|
||||
|
||||
def is_running() -> bool:
|
||||
return _server_instance is not None
|
||||
|
||||
|
||||
def get_port() -> int:
|
||||
return _server_port
|
||||
236
app/core/openapi_parser.py
Normal file
236
app/core/openapi_parser.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""APIClient - Agent — OpenAPI / Swagger spec parser.
|
||||
|
||||
Parses OpenAPI 3.x and Swagger 2.0 specs (JSON or YAML) directly,
|
||||
without needing AI tokens.
|
||||
"""
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
def _try_yaml(text: str) -> dict | None:
|
||||
try:
|
||||
import yaml
|
||||
return yaml.safe_load(text)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _try_json(text: str) -> dict | None:
|
||||
try:
|
||||
return json.loads(text)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def detect_spec(text: str) -> dict | None:
|
||||
"""Try to parse text as OpenAPI/Swagger JSON or YAML. Returns raw dict or None."""
|
||||
data = _try_json(text) or _try_yaml(text)
|
||||
if not isinstance(data, dict):
|
||||
return None
|
||||
if "openapi" in data or "swagger" in data:
|
||||
return data
|
||||
return None
|
||||
|
||||
|
||||
def parse_spec(data: dict) -> dict:
|
||||
"""
|
||||
Parse an OpenAPI 3.x or Swagger 2.0 spec dict into EKIKA's internal format:
|
||||
{
|
||||
"collection_name": str,
|
||||
"base_url": str,
|
||||
"auth_type": str,
|
||||
"endpoints": [...],
|
||||
"environment_variables": {...}
|
||||
}
|
||||
"""
|
||||
version = str(data.get("openapi", data.get("swagger", "2")))
|
||||
is_v3 = version.startswith("3")
|
||||
|
||||
# Collection name
|
||||
info = data.get("info", {})
|
||||
collection_name = info.get("title", "Imported API")
|
||||
|
||||
# Base URL
|
||||
if is_v3:
|
||||
servers = data.get("servers", [])
|
||||
base_url = servers[0].get("url", "") if servers else ""
|
||||
else:
|
||||
host = data.get("host", "")
|
||||
schemes = data.get("schemes", ["https"])
|
||||
base_p = data.get("basePath", "/")
|
||||
base_url = f"{schemes[0]}://{host}{base_p}" if host else ""
|
||||
|
||||
# Clean trailing slash
|
||||
base_url = base_url.rstrip("/")
|
||||
|
||||
# Auth detection
|
||||
security_schemes = {}
|
||||
if is_v3:
|
||||
security_schemes = data.get("components", {}).get("securitySchemes", {})
|
||||
else:
|
||||
security_schemes = data.get("securityDefinitions", {})
|
||||
|
||||
auth_type = "none"
|
||||
for scheme in security_schemes.values():
|
||||
t = scheme.get("type", "").lower()
|
||||
if t in ("http", "bearer") or scheme.get("scheme", "").lower() == "bearer":
|
||||
auth_type = "bearer"
|
||||
break
|
||||
if t == "apikey":
|
||||
auth_type = "apikey"
|
||||
break
|
||||
if t in ("basic", "http") and scheme.get("scheme", "").lower() == "basic":
|
||||
auth_type = "basic"
|
||||
break
|
||||
|
||||
# Endpoints
|
||||
endpoints = []
|
||||
paths = data.get("paths", {})
|
||||
|
||||
for path, path_item in paths.items():
|
||||
if not isinstance(path_item, dict):
|
||||
continue
|
||||
for method in ("get", "post", "put", "patch", "delete", "head", "options"):
|
||||
op = path_item.get(method)
|
||||
if not isinstance(op, dict):
|
||||
continue
|
||||
|
||||
name = op.get("summary") or op.get("operationId") or f"{method.upper()} {path}"
|
||||
description = op.get("description", "")
|
||||
|
||||
# Headers
|
||||
headers: dict = {}
|
||||
|
||||
# Query params
|
||||
params: dict = {}
|
||||
body_example = ""
|
||||
content_type = "application/json"
|
||||
body_type = "raw"
|
||||
|
||||
# Parameters
|
||||
for param in op.get("parameters", []):
|
||||
if not isinstance(param, dict):
|
||||
continue
|
||||
p_in = param.get("in", "")
|
||||
p_name = param.get("name", "")
|
||||
if p_in == "query":
|
||||
params[p_name] = param.get("example", "")
|
||||
elif p_in == "header":
|
||||
headers[p_name] = param.get("example", "")
|
||||
|
||||
# Request body (OpenAPI 3)
|
||||
if is_v3 and "requestBody" in op:
|
||||
rb = op["requestBody"]
|
||||
content = rb.get("content", {})
|
||||
if "application/json" in content:
|
||||
schema = content["application/json"].get("schema", {})
|
||||
body_example = _schema_to_example_str(schema)
|
||||
content_type = "application/json"
|
||||
elif "application/x-www-form-urlencoded" in content:
|
||||
body_type = "form-urlencoded"
|
||||
content_type = ""
|
||||
elif content:
|
||||
first_ct = next(iter(content))
|
||||
content_type = first_ct
|
||||
|
||||
# Request body (Swagger 2)
|
||||
if not is_v3:
|
||||
consumes = op.get("consumes", data.get("consumes", ["application/json"]))
|
||||
for param in op.get("parameters", []):
|
||||
if param.get("in") == "body":
|
||||
schema = param.get("schema", {})
|
||||
body_example = _schema_to_example_str(schema)
|
||||
if consumes:
|
||||
content_type = consumes[0]
|
||||
|
||||
# Add auth header hint
|
||||
if auth_type == "bearer":
|
||||
headers.setdefault("Authorization", "Bearer {{token}}")
|
||||
elif auth_type == "apikey":
|
||||
headers.setdefault("X-API-Key", "{{api_key}}")
|
||||
|
||||
# Basic test script
|
||||
test_script = (
|
||||
f"pm.test('Status OK', lambda: pm.response.to_have_status(200))\n"
|
||||
f"pm.test('Has body', lambda: expect(pm.response.text).to_be_truthy())"
|
||||
)
|
||||
|
||||
endpoints.append({
|
||||
"name": name,
|
||||
"method": method.upper(),
|
||||
"path": path,
|
||||
"description": description,
|
||||
"headers": headers,
|
||||
"params": params,
|
||||
"body": body_example,
|
||||
"body_type": body_type,
|
||||
"content_type": content_type,
|
||||
"test_script": test_script,
|
||||
})
|
||||
|
||||
# Environment variables
|
||||
env_vars: dict = {}
|
||||
if base_url:
|
||||
env_vars["base_url"] = base_url
|
||||
if auth_type == "bearer":
|
||||
env_vars["token"] = ""
|
||||
elif auth_type == "apikey":
|
||||
env_vars["api_key"] = ""
|
||||
elif auth_type == "basic":
|
||||
env_vars["username"] = ""
|
||||
env_vars["password"] = ""
|
||||
|
||||
return {
|
||||
"collection_name": collection_name,
|
||||
"base_url": base_url,
|
||||
"auth_type": auth_type,
|
||||
"endpoints": endpoints,
|
||||
"environment_variables": env_vars,
|
||||
}
|
||||
|
||||
|
||||
def _schema_to_example_str(schema: dict) -> str:
|
||||
"""Generate a compact JSON example string from an OpenAPI schema."""
|
||||
try:
|
||||
example = _schema_to_example(schema)
|
||||
return json.dumps(example, indent=2, ensure_ascii=False)
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def _schema_to_example(schema: dict, depth: int = 0) -> object:
|
||||
if depth > 5:
|
||||
return {}
|
||||
if not isinstance(schema, dict):
|
||||
return {}
|
||||
|
||||
# Use provided example first
|
||||
if "example" in schema:
|
||||
return schema["example"]
|
||||
if "default" in schema:
|
||||
return schema["default"]
|
||||
|
||||
t = schema.get("type", "object")
|
||||
|
||||
if t == "object" or "properties" in schema:
|
||||
result = {}
|
||||
for k, v in schema.get("properties", {}).items():
|
||||
result[k] = _schema_to_example(v, depth + 1)
|
||||
return result
|
||||
|
||||
if t == "array":
|
||||
items = schema.get("items", {})
|
||||
return [_schema_to_example(items, depth + 1)]
|
||||
|
||||
if t == "string":
|
||||
fmt = schema.get("format", "")
|
||||
if fmt == "date-time": return "2024-01-01T00:00:00Z"
|
||||
if fmt == "date": return "2024-01-01"
|
||||
if fmt == "email": return "user@example.com"
|
||||
if fmt == "uuid": return "00000000-0000-0000-0000-000000000000"
|
||||
return schema.get("enum", ["string"])[0]
|
||||
|
||||
if t == "integer": return 0
|
||||
if t == "number": return 0.0
|
||||
if t == "boolean": return True
|
||||
return {}
|
||||
436
app/core/storage.py
Normal file
436
app/core/storage.py
Normal file
@@ -0,0 +1,436 @@
|
||||
"""APIClient - Agent — Storage layer (SQLite)."""
|
||||
import json
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from app.models import HttpRequest, Environment, MockEndpoint
|
||||
|
||||
DB_PATH = Path.home() / ".ekika-api-client" / "data.db"
|
||||
|
||||
|
||||
def _get_conn() -> sqlite3.Connection:
|
||||
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA foreign_keys=ON")
|
||||
return conn
|
||||
|
||||
|
||||
def _migrate(conn: sqlite3.Connection):
|
||||
"""Add columns/tables introduced after initial schema."""
|
||||
existing = {row[1] for row in conn.execute("PRAGMA table_info(requests)")}
|
||||
migrations = [
|
||||
("folder_id", "ALTER TABLE requests ADD COLUMN folder_id INTEGER"),
|
||||
("params", "ALTER TABLE requests ADD COLUMN params TEXT"),
|
||||
("body_type", "ALTER TABLE requests ADD COLUMN body_type TEXT DEFAULT 'raw'"),
|
||||
("auth_type", "ALTER TABLE requests ADD COLUMN auth_type TEXT DEFAULT 'none'"),
|
||||
("auth_data", "ALTER TABLE requests ADD COLUMN auth_data TEXT"),
|
||||
("pre_request_script", "ALTER TABLE requests ADD COLUMN pre_request_script TEXT"),
|
||||
("test_script", "ALTER TABLE requests ADD COLUMN test_script TEXT"),
|
||||
("timeout", "ALTER TABLE requests ADD COLUMN timeout INTEGER DEFAULT 30"),
|
||||
("ssl_verify", "ALTER TABLE requests ADD COLUMN ssl_verify INTEGER DEFAULT 1"),
|
||||
("content_type", "ALTER TABLE requests ADD COLUMN content_type TEXT"),
|
||||
("created_at", "ALTER TABLE requests ADD COLUMN created_at DATETIME DEFAULT CURRENT_TIMESTAMP"),
|
||||
]
|
||||
for col, sql in migrations:
|
||||
if col not in existing:
|
||||
conn.execute(sql)
|
||||
|
||||
hist_cols = {row[1] for row in conn.execute("PRAGMA table_info(history)")}
|
||||
hist_migrations = [
|
||||
("params", "ALTER TABLE history ADD COLUMN params TEXT"),
|
||||
("body_type", "ALTER TABLE history ADD COLUMN body_type TEXT"),
|
||||
("auth_type", "ALTER TABLE history ADD COLUMN auth_type TEXT"),
|
||||
("auth_data", "ALTER TABLE history ADD COLUMN auth_data TEXT"),
|
||||
("timeout", "ALTER TABLE history ADD COLUMN timeout INTEGER DEFAULT 30"),
|
||||
("ssl_verify","ALTER TABLE history ADD COLUMN ssl_verify INTEGER DEFAULT 1"),
|
||||
]
|
||||
for col, sql in hist_migrations:
|
||||
if col not in hist_cols:
|
||||
conn.execute(sql)
|
||||
|
||||
|
||||
def init_db():
|
||||
with _get_conn() as conn:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS collections (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS folders (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
collection_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
FOREIGN KEY (collection_id) REFERENCES collections(id)
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS requests (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
collection_id INTEGER,
|
||||
folder_id INTEGER,
|
||||
name TEXT,
|
||||
method TEXT,
|
||||
url TEXT,
|
||||
headers TEXT,
|
||||
params TEXT,
|
||||
body TEXT,
|
||||
body_type TEXT DEFAULT 'raw',
|
||||
content_type TEXT,
|
||||
auth_type TEXT DEFAULT 'none',
|
||||
auth_data TEXT,
|
||||
pre_request_script TEXT,
|
||||
test_script TEXT,
|
||||
timeout INTEGER DEFAULT 30,
|
||||
ssl_verify INTEGER DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (collection_id) REFERENCES collections(id),
|
||||
FOREIGN KEY (folder_id) REFERENCES folders(id)
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
method TEXT,
|
||||
url TEXT,
|
||||
headers TEXT,
|
||||
params TEXT,
|
||||
body TEXT,
|
||||
body_type TEXT,
|
||||
auth_type TEXT,
|
||||
auth_data TEXT,
|
||||
timeout INTEGER DEFAULT 30,
|
||||
ssl_verify INTEGER DEFAULT 1,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS environments (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
variables TEXT DEFAULT '{}',
|
||||
is_active INTEGER DEFAULT 0
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS mock_endpoints (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT,
|
||||
method TEXT DEFAULT 'GET',
|
||||
path TEXT NOT NULL,
|
||||
status_code INTEGER DEFAULT 200,
|
||||
response_headers TEXT DEFAULT '{}',
|
||||
response_body TEXT DEFAULT ''
|
||||
)
|
||||
""")
|
||||
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
# Performance indexes
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_requests_collection ON requests(collection_id)")
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_requests_folder ON requests(folder_id)")
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_history_created ON history(created_at DESC)")
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_requests_url ON requests(url)")
|
||||
|
||||
_migrate(conn)
|
||||
|
||||
|
||||
# ── Collections ──────────────────────────────────────────────────────────────
|
||||
|
||||
def get_collections() -> list[dict]:
|
||||
with _get_conn() as conn:
|
||||
rows = conn.execute("SELECT * FROM collections ORDER BY name COLLATE NOCASE").fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
def add_collection(name: str) -> int:
|
||||
with _get_conn() as conn:
|
||||
cur = conn.execute("INSERT INTO collections (name) VALUES (?)", (name,))
|
||||
return cur.lastrowid
|
||||
|
||||
|
||||
def rename_collection(col_id: int, name: str):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("UPDATE collections SET name=? WHERE id=?", (name, col_id))
|
||||
|
||||
|
||||
def delete_collection(col_id: int):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("DELETE FROM requests WHERE collection_id=?", (col_id,))
|
||||
conn.execute("DELETE FROM folders WHERE collection_id=?", (col_id,))
|
||||
conn.execute("DELETE FROM collections WHERE id=?", (col_id,))
|
||||
|
||||
|
||||
# ── Folders ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def get_folders(collection_id: int) -> list[dict]:
|
||||
with _get_conn() as conn:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM folders WHERE collection_id=? ORDER BY name COLLATE NOCASE",
|
||||
(collection_id,)
|
||||
).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
def add_folder(collection_id: int, name: str) -> int:
|
||||
with _get_conn() as conn:
|
||||
cur = conn.execute(
|
||||
"INSERT INTO folders (collection_id, name) VALUES (?,?)", (collection_id, name)
|
||||
)
|
||||
return cur.lastrowid
|
||||
|
||||
|
||||
def rename_folder(folder_id: int, name: str):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("UPDATE folders SET name=? WHERE id=?", (name, folder_id))
|
||||
|
||||
|
||||
def delete_folder(folder_id: int):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("DELETE FROM requests WHERE folder_id=?", (folder_id,))
|
||||
conn.execute("DELETE FROM folders WHERE id=?", (folder_id,))
|
||||
|
||||
|
||||
# ── Requests ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def _deserialize_request(r) -> dict:
|
||||
d = dict(r)
|
||||
d["headers"] = json.loads(d.get("headers") or "{}")
|
||||
d["params"] = json.loads(d.get("params") or "{}")
|
||||
d["auth_data"]= json.loads(d.get("auth_data")or "{}")
|
||||
return d
|
||||
|
||||
|
||||
def get_requests(collection_id: int, folder_id: int = None) -> list[dict]:
|
||||
with _get_conn() as conn:
|
||||
if folder_id is not None:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM requests WHERE collection_id=? AND folder_id=? ORDER BY name COLLATE NOCASE",
|
||||
(collection_id, folder_id)
|
||||
).fetchall()
|
||||
else:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM requests WHERE collection_id=? AND folder_id IS NULL ORDER BY name COLLATE NOCASE",
|
||||
(collection_id,)
|
||||
).fetchall()
|
||||
return [_deserialize_request(r) for r in rows]
|
||||
|
||||
|
||||
def get_all_requests(collection_id: int) -> list[dict]:
|
||||
with _get_conn() as conn:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM requests WHERE collection_id=? ORDER BY name COLLATE NOCASE",
|
||||
(collection_id,)
|
||||
).fetchall()
|
||||
return [_deserialize_request(r) for r in rows]
|
||||
|
||||
|
||||
def save_request(collection_id: int, req: HttpRequest, folder_id: int = None) -> int:
|
||||
with _get_conn() as conn:
|
||||
cur = conn.execute(
|
||||
"""INSERT INTO requests
|
||||
(collection_id, folder_id, name, method, url, headers, params,
|
||||
body, body_type, content_type, auth_type, auth_data,
|
||||
pre_request_script, test_script, timeout, ssl_verify)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""",
|
||||
(collection_id, folder_id, req.name, req.method, req.url,
|
||||
json.dumps(req.headers), json.dumps(req.params),
|
||||
req.body, req.body_type, req.content_type,
|
||||
req.auth_type, json.dumps(req.auth_data),
|
||||
req.pre_request_script, req.test_script,
|
||||
req.timeout, int(req.ssl_verify)),
|
||||
)
|
||||
return cur.lastrowid
|
||||
|
||||
|
||||
def update_request(req_id: int, req: HttpRequest):
|
||||
with _get_conn() as conn:
|
||||
conn.execute(
|
||||
"""UPDATE requests SET
|
||||
name=?, method=?, url=?, headers=?, params=?,
|
||||
body=?, body_type=?, content_type=?, auth_type=?, auth_data=?,
|
||||
pre_request_script=?, test_script=?, timeout=?, ssl_verify=?
|
||||
WHERE id=?""",
|
||||
(req.name, req.method, req.url,
|
||||
json.dumps(req.headers), json.dumps(req.params),
|
||||
req.body, req.body_type, req.content_type,
|
||||
req.auth_type, json.dumps(req.auth_data),
|
||||
req.pre_request_script, req.test_script,
|
||||
req.timeout, int(req.ssl_verify), req_id),
|
||||
)
|
||||
|
||||
|
||||
def delete_request(req_id: int):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("DELETE FROM requests WHERE id=?", (req_id,))
|
||||
|
||||
|
||||
def search_requests(query: str) -> list[dict]:
|
||||
with _get_conn() as conn:
|
||||
like = f"%{query}%"
|
||||
rows = conn.execute(
|
||||
"""SELECT r.*, c.name as collection_name
|
||||
FROM requests r
|
||||
LEFT JOIN collections c ON r.collection_id = c.id
|
||||
WHERE r.name LIKE ? OR r.url LIKE ?
|
||||
ORDER BY r.name COLLATE NOCASE""",
|
||||
(like, like)
|
||||
).fetchall()
|
||||
return [_deserialize_request(r) for r in rows]
|
||||
|
||||
|
||||
# ── History ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def add_to_history(req: HttpRequest):
|
||||
with _get_conn() as conn:
|
||||
# Trim history to 200 entries
|
||||
conn.execute(
|
||||
"""DELETE FROM history WHERE id NOT IN (
|
||||
SELECT id FROM history ORDER BY created_at DESC LIMIT 199
|
||||
)"""
|
||||
)
|
||||
conn.execute(
|
||||
"""INSERT INTO history
|
||||
(method, url, headers, params, body, body_type, auth_type, auth_data, timeout, ssl_verify)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?)""",
|
||||
(req.method, req.url,
|
||||
json.dumps(req.headers), json.dumps(req.params),
|
||||
req.body, req.body_type, req.auth_type, json.dumps(req.auth_data),
|
||||
req.timeout, int(req.ssl_verify)),
|
||||
)
|
||||
|
||||
|
||||
def get_history(limit: int = 50) -> list[dict]:
|
||||
with _get_conn() as conn:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM history ORDER BY created_at DESC LIMIT ?", (limit,)
|
||||
).fetchall()
|
||||
return [_deserialize_request(r) for r in rows]
|
||||
|
||||
|
||||
def clear_history():
|
||||
with _get_conn() as conn:
|
||||
conn.execute("DELETE FROM history")
|
||||
|
||||
|
||||
# ── Environments ──────────────────────────────────────────────────────────────
|
||||
|
||||
def get_environments() -> list[Environment]:
|
||||
with _get_conn() as conn:
|
||||
rows = conn.execute("SELECT * FROM environments ORDER BY name COLLATE NOCASE").fetchall()
|
||||
return [
|
||||
Environment(
|
||||
id=r["id"], name=r["name"],
|
||||
variables=json.loads(r["variables"] or "{}"),
|
||||
is_active=bool(r["is_active"])
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def get_active_environment() -> Environment | None:
|
||||
with _get_conn() as conn:
|
||||
row = conn.execute("SELECT * FROM environments WHERE is_active=1").fetchone()
|
||||
if row:
|
||||
return Environment(
|
||||
id=row["id"], name=row["name"],
|
||||
variables=json.loads(row["variables"] or "{}"),
|
||||
is_active=True
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def save_environment(env: Environment) -> int:
|
||||
with _get_conn() as conn:
|
||||
if env.id:
|
||||
conn.execute(
|
||||
"UPDATE environments SET name=?, variables=? WHERE id=?",
|
||||
(env.name, json.dumps(env.variables), env.id)
|
||||
)
|
||||
return env.id
|
||||
else:
|
||||
cur = conn.execute(
|
||||
"INSERT INTO environments (name, variables, is_active) VALUES (?,?,?)",
|
||||
(env.name, json.dumps(env.variables), int(env.is_active))
|
||||
)
|
||||
return cur.lastrowid
|
||||
|
||||
|
||||
def set_active_environment(env_id: int | None):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("UPDATE environments SET is_active=0")
|
||||
if env_id:
|
||||
conn.execute("UPDATE environments SET is_active=1 WHERE id=?", (env_id,))
|
||||
|
||||
|
||||
def delete_environment(env_id: int):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("DELETE FROM environments WHERE id=?", (env_id,))
|
||||
|
||||
|
||||
# ── Mock Endpoints ────────────────────────────────────────────────────────────
|
||||
|
||||
def get_mock_endpoints() -> list[MockEndpoint]:
|
||||
with _get_conn() as conn:
|
||||
rows = conn.execute("SELECT * FROM mock_endpoints ORDER BY path").fetchall()
|
||||
return [
|
||||
MockEndpoint(
|
||||
id=r["id"], name=r["name"], method=r["method"],
|
||||
path=r["path"], status_code=r["status_code"],
|
||||
response_headers=json.loads(r["response_headers"] or "{}"),
|
||||
response_body=r["response_body"] or ""
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def save_mock_endpoint(ep: MockEndpoint) -> int:
|
||||
with _get_conn() as conn:
|
||||
if ep.id:
|
||||
conn.execute(
|
||||
"""UPDATE mock_endpoints
|
||||
SET name=?, method=?, path=?, status_code=?, response_headers=?, response_body=?
|
||||
WHERE id=?""",
|
||||
(ep.name, ep.method, ep.path, ep.status_code,
|
||||
json.dumps(ep.response_headers), ep.response_body, ep.id)
|
||||
)
|
||||
return ep.id
|
||||
else:
|
||||
cur = conn.execute(
|
||||
"""INSERT INTO mock_endpoints
|
||||
(name, method, path, status_code, response_headers, response_body)
|
||||
VALUES (?,?,?,?,?,?)""",
|
||||
(ep.name, ep.method, ep.path, ep.status_code,
|
||||
json.dumps(ep.response_headers), ep.response_body)
|
||||
)
|
||||
return cur.lastrowid
|
||||
|
||||
|
||||
def delete_mock_endpoint(ep_id: int):
|
||||
with _get_conn() as conn:
|
||||
conn.execute("DELETE FROM mock_endpoints WHERE id=?", (ep_id,))
|
||||
|
||||
|
||||
# ── Settings ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def get_setting(key: str, default: str = "") -> str:
|
||||
with _get_conn() as conn:
|
||||
row = conn.execute("SELECT value FROM settings WHERE key=?", (key,)).fetchone()
|
||||
return row["value"] if row else default
|
||||
|
||||
|
||||
def set_setting(key: str, value: str):
|
||||
with _get_conn() as conn:
|
||||
conn.execute(
|
||||
"INSERT INTO settings (key, value) VALUES (?,?) "
|
||||
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
|
||||
(key, value)
|
||||
)
|
||||
86
app/core/test_runner.py
Normal file
86
app/core/test_runner.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Run test scripts after a response is received."""
|
||||
import json
|
||||
from app.models import HttpResponse, TestResult
|
||||
|
||||
|
||||
class TestContext:
|
||||
"""Exposes pm.test(), pm.expect(), pm.response in test scripts."""
|
||||
|
||||
def __init__(self, response: HttpResponse):
|
||||
self.results: list[TestResult] = []
|
||||
self.response = _ResponseProxy(response)
|
||||
self.expect = _expect
|
||||
|
||||
def test(self, name: str, fn):
|
||||
try:
|
||||
fn()
|
||||
self.results.append(TestResult(name=name, passed=True))
|
||||
except AssertionError as e:
|
||||
self.results.append(TestResult(name=name, passed=False, message=str(e)))
|
||||
except Exception as e:
|
||||
self.results.append(TestResult(name=name, passed=False, message=f"Error: {e}"))
|
||||
|
||||
|
||||
class _ResponseProxy:
|
||||
def __init__(self, resp: HttpResponse):
|
||||
self._resp = resp
|
||||
self.status = resp.status
|
||||
self.responseTime = resp.elapsed_ms
|
||||
self.text = resp.body
|
||||
try:
|
||||
self._json = json.loads(resp.body)
|
||||
except Exception:
|
||||
self._json = None
|
||||
|
||||
def json(self):
|
||||
return self._json
|
||||
|
||||
def to_have_status(self, code: int):
|
||||
assert self._resp.status == code, f"Expected status {code}, got {self._resp.status}"
|
||||
|
||||
|
||||
class _Assertion:
|
||||
def __init__(self, value):
|
||||
self._value = value
|
||||
|
||||
def to_equal(self, expected):
|
||||
assert self._value == expected, f"Expected {expected!r}, got {self._value!r}"
|
||||
return self
|
||||
|
||||
def to_be_truthy(self):
|
||||
assert self._value, f"Expected truthy, got {self._value!r}"
|
||||
return self
|
||||
|
||||
def to_include(self, substr):
|
||||
assert substr in str(self._value), f"Expected {substr!r} in {self._value!r}"
|
||||
return self
|
||||
|
||||
def to_be_below(self, n):
|
||||
assert self._value < n, f"Expected {self._value} < {n}"
|
||||
return self
|
||||
|
||||
def to_have_property(self, key):
|
||||
assert hasattr(self._value, key) or (isinstance(self._value, dict) and key in self._value), \
|
||||
f"Expected property {key!r}"
|
||||
return self
|
||||
|
||||
|
||||
def _expect(value) -> _Assertion:
|
||||
return _Assertion(value)
|
||||
|
||||
|
||||
def run_tests(script: str, response: HttpResponse) -> list[TestResult]:
|
||||
if not script or not script.strip():
|
||||
return []
|
||||
|
||||
ctx = TestContext(response)
|
||||
namespace = {
|
||||
"pm": ctx,
|
||||
"response": ctx.response,
|
||||
"expect": _expect,
|
||||
}
|
||||
try:
|
||||
exec(script, namespace)
|
||||
except Exception as e:
|
||||
ctx.results.append(TestResult(name="Script Error", passed=False, message=str(e)))
|
||||
return ctx.results
|
||||
Reference in New Issue
Block a user