|
|
import gradio as gr |
|
|
import requests |
|
|
import json |
|
|
import os |
|
|
import threading |
|
|
from models import OptimizeRequest, AutotuneRequest, QARequest |
|
|
from api import start_api |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
threading.Thread(target=start_api, daemon=True).start() |
|
|
|
|
|
|
|
|
BASE_INTERNAL = "http://127.0.0.1:8000" |
|
|
|
|
|
|
|
|
def call_api(endpoint: str, payload: dict) -> str: |
|
|
try: |
|
|
r = requests.post(f"{BASE_INTERNAL}{endpoint}", json=payload, timeout=120) |
|
|
return json.dumps(r.json(), indent=2) |
|
|
except Exception as e: |
|
|
return str(e) |
|
|
|
|
|
|
|
|
def clear_cache_tool(docs_path="data/docs"): |
|
|
""" |
|
|
🧹 Clear Cache MCP Tool |
|
|
Deletes all files and directories inside docs_path on the server. |
|
|
Accepts: |
|
|
- local paths (str), default='data/docs/' |
|
|
""" |
|
|
try: |
|
|
r = requests.post( |
|
|
f"{BASE_INTERNAL}/clear_cache", |
|
|
data={"docs_path": docs_path}, |
|
|
timeout=60 |
|
|
) |
|
|
r.raise_for_status() |
|
|
return r.json() |
|
|
except Exception as e: |
|
|
return {"error": str(e)} |
|
|
|
|
|
|
|
|
def upload_docs_tool(files, docs_path="data/docs"): |
|
|
""" |
|
|
Upload documents to the server's docs folder via FastAPI /upload_docs. |
|
|
Accepts: |
|
|
- local file paths (str) |
|
|
- URLs (str) |
|
|
- file-like objects |
|
|
""" |
|
|
import shutil, tempfile |
|
|
|
|
|
os.makedirs(docs_path, exist_ok=True) |
|
|
files_payload = [] |
|
|
|
|
|
temp_files = [] |
|
|
|
|
|
try: |
|
|
for f in files: |
|
|
if isinstance(f, str) and f.startswith(("http://", "https://")): |
|
|
|
|
|
resp = requests.get(f, timeout=60) |
|
|
resp.raise_for_status() |
|
|
|
|
|
|
|
|
ext = os.path.splitext(f)[1] or ".txt" |
|
|
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=ext) |
|
|
|
|
|
if "text" in resp.headers.get("Content-Type", "").lower(): |
|
|
tmp.write(resp.text.encode("utf-8")) |
|
|
else: |
|
|
tmp.write(resp.content) |
|
|
|
|
|
tmp.close() |
|
|
temp_files.append(tmp.name) |
|
|
|
|
|
files_payload.append(("files", open(tmp.name, "rb"))) |
|
|
|
|
|
elif isinstance(f, str): |
|
|
|
|
|
files_payload.append(("files", open(f, "rb"))) |
|
|
|
|
|
else: |
|
|
|
|
|
files_payload.append(("files", f)) |
|
|
|
|
|
resp = requests.post( |
|
|
f"{BASE_INTERNAL}/upload_docs", |
|
|
files=files_payload, |
|
|
data={"docs_path": docs_path} |
|
|
) |
|
|
resp.raise_for_status() |
|
|
return resp.json() |
|
|
|
|
|
finally: |
|
|
|
|
|
for _, file_obj in files_payload: |
|
|
if not file_obj.closed: |
|
|
file_obj.close() |
|
|
|
|
|
for tmp_file in temp_files: |
|
|
try: |
|
|
os.unlink(tmp_file) |
|
|
except Exception: |
|
|
pass |
|
|
|
|
|
|
|
|
def optimize_rag_tool(payload: str) -> str: |
|
|
"""🔧 Explicit optimization request: user provides all pipeline configs manually.""" |
|
|
return call_api("/optimize_rag", json.loads(payload)) |
|
|
|
|
|
|
|
|
def autotune_tool(payload: str) -> str: |
|
|
"""🔧 Autotune RAG: recommends chunk sizes and embedding models automatically.""" |
|
|
return call_api("/autotune_rag", json.loads(payload)) |
|
|
|
|
|
|
|
|
def generate_qa_tool(payload: str) -> str: |
|
|
"""🧩 Generates a validation QA dataset for RAG evaluation.""" |
|
|
return call_api("/generate_validation_qa", json.loads(payload)) |
|
|
|
|
|
|
|
|
|
|
|
optimize_rag_tool.__doc__ = OptimizeRequest.__doc__ |
|
|
autotune_tool.__doc__ = AutotuneRequest.__doc__ |
|
|
generate_qa_tool.__doc__ = QARequest.__doc__ |
|
|
|
|
|
|
|
|
def model_to_json(model_cls) -> str: |
|
|
return json.dumps({k: v.default for k, v in model_cls.model_fields.items()}, indent=2) |
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_UPLOAD_PATH = "data/docs" |
|
|
DEFAULT_OPTIMIZE_JSON = model_to_json(OptimizeRequest) |
|
|
DEFAULT_AUTOTUNE_JSON = model_to_json(AutotuneRequest) |
|
|
DEFAULT_QA_JSON = model_to_json(QARequest) |
|
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Soft()) as demo: |
|
|
gr.Markdown("# Ragmint MCP Server") |
|
|
|
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Upload Documents") |
|
|
gr.Markdown("📂 Upload files (local paths or URLs) to your `data/docs` folder") |
|
|
upload_files = gr.File(file_count="multiple", type="filepath") |
|
|
upload_path = gr.Textbox(value=DEFAULT_UPLOAD_PATH, label="Docs Path") |
|
|
upload_btn = gr.Button("Upload", variant="primary") |
|
|
upload_out = gr.JSON(label="Response") |
|
|
upload_btn.click(upload_docs_tool, inputs=[upload_files, upload_path], outputs=upload_out) |
|
|
gr.Markdown("---") |
|
|
|
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Upload Documents (URLs) via MCP") |
|
|
gr.Markdown("📂 Upload files (URLs) to your `data/docs` folder on MCP.") |
|
|
upload_mcp_input = gr.Textbox( |
|
|
lines=5, |
|
|
placeholder='Enter list of URLs (e.g., ["https://example.com/example.txt",...])', |
|
|
label="Files (JSON list)" |
|
|
) |
|
|
upload_mcp_path = gr.Textbox(value=DEFAULT_UPLOAD_PATH, label="Docs Path") |
|
|
upload_mcp_out = gr.JSON(label="Response") |
|
|
upload_mcp_btn = gr.Button("Upload via MCP", variant="primary") |
|
|
|
|
|
|
|
|
def upload_urls_tool(files_json, docs_path): |
|
|
""" |
|
|
Upload documents to the server's docs folder via MCP. |
|
|
Accepts: |
|
|
- URLs (str) |
|
|
""" |
|
|
import ast |
|
|
try: |
|
|
files = ast.literal_eval(files_json) |
|
|
except Exception: |
|
|
return {"error": "Invalid JSON list of files"} |
|
|
return upload_docs_tool(files, docs_path) |
|
|
|
|
|
upload_mcp_btn.click( |
|
|
upload_urls_tool, |
|
|
inputs=[upload_mcp_input, upload_mcp_path], |
|
|
outputs=upload_mcp_out |
|
|
) |
|
|
gr.Markdown("---") |
|
|
|
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Optimize RAG") |
|
|
gr.Markdown(OptimizeRequest.__doc__ or "No description available.") |
|
|
optimize_input = gr.Textbox(lines=12, value=DEFAULT_OPTIMIZE_JSON, label="OptimizeRequest JSON") |
|
|
optimize_btn = gr.Button("Submit", variant="primary") |
|
|
optimize_out = gr.Textbox(lines=15, label="Response") |
|
|
optimize_btn.click(optimize_rag_tool, inputs=optimize_input, outputs=optimize_out) |
|
|
gr.Markdown("---") |
|
|
|
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Autotune RAG") |
|
|
gr.Markdown(AutotuneRequest.__doc__ or "No description available.") |
|
|
autotune_input = gr.Textbox(lines=12, value=DEFAULT_AUTOTUNE_JSON, label="AutotuneRequest JSON") |
|
|
autotune_btn = gr.Button("Submit", variant="primary") |
|
|
autotune_out = gr.Textbox(lines=15, label="Response") |
|
|
autotune_btn.click(autotune_tool, inputs=autotune_input, outputs=autotune_out) |
|
|
gr.Markdown("---") |
|
|
|
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Generate QA") |
|
|
gr.Markdown(QARequest.__doc__ or "No description available.") |
|
|
qa_input = gr.Textbox(lines=12, value=DEFAULT_QA_JSON, label="QARequest JSON") |
|
|
qa_btn = gr.Button("Submit", variant="primary") |
|
|
qa_out = gr.Textbox(lines=15, label="Response") |
|
|
qa_btn.click(generate_qa_tool, inputs=qa_input, outputs=qa_out) |
|
|
gr.Markdown("---") |
|
|
|
|
|
|
|
|
with gr.Column(): |
|
|
gr.Markdown("## Clear Cache") |
|
|
gr.Markdown("🧹 Deletes all files and directories inside docs_path on the server.") |
|
|
clear_path = gr.Textbox(value=DEFAULT_UPLOAD_PATH, label="Docs Path to Clear") |
|
|
clear_btn = gr.Button("Clear Cache", variant="primary") |
|
|
clear_out = gr.JSON(label="Response") |
|
|
clear_btn.click(clear_cache_tool, inputs=[clear_path], outputs=clear_out) |
|
|
gr.Markdown("---") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
demo.launch( |
|
|
server_name="0.0.0.0", |
|
|
server_port=7860, |
|
|
mcp_server=True, |
|
|
show_error=True |
|
|
) |
|
|
|