Basic Search Examples¶
Examples of making search requests to Polymathy.
Using curl¶
Simple Query¶
With Pretty Output¶
Save to File¶
Using Python¶
Basic Request¶
import requests
def search(query: str) -> dict:
"""Perform a search query against Polymathy."""
response = requests.get(
"http://localhost:8080/v1/search",
params={"q": query}
)
response.raise_for_status()
return response.json()
# Usage
results = search("artificial intelligence")
print(f"Found {len(results)} chunks")
With Error Handling¶
import requests
from typing import Optional
def search_safe(query: str, timeout: int = 30) -> Optional[dict]:
"""Search with error handling."""
try:
response = requests.get(
"http://localhost:8080/v1/search",
params={"q": query},
timeout=timeout
)
response.raise_for_status()
return response.json()
except requests.Timeout:
print(f"Search timed out for query: {query}")
return None
except requests.RequestException as e:
print(f"Search failed: {e}")
return None
# Usage
if results := search_safe("neural networks"):
for chunk_id, (url, content) in results.items():
print(f"[{chunk_id}] {url}")
print(f" {content[:100]}...")
Async with httpx¶
import httpx
import asyncio
async def search_async(query: str) -> dict:
"""Async search using httpx."""
async with httpx.AsyncClient() as client:
response = await client.get(
"http://localhost:8080/v1/search",
params={"q": query}
)
response.raise_for_status()
return response.json()
# Usage
async def main():
results = await search_async("deep learning")
print(f"Found {len(results)} chunks")
asyncio.run(main())
Using JavaScript/TypeScript¶
Basic Fetch¶
async function search(query) {
const response = await fetch(
`http://localhost:8080/v1/search?q=${encodeURIComponent(query)}`
);
if (!response.ok) {
throw new Error(`Search failed: ${response.statusText}`);
}
return response.json();
}
// Usage
search("data science").then(results => {
Object.entries(results).forEach(([id, [url, content]]) => {
console.log(`[${id}] ${url}: ${content.slice(0, 100)}...`);
});
});
TypeScript with Types¶
interface SearchResults {
[key: string]: [string, string];
}
async function search(query: string): Promise<SearchResults> {
const url = new URL("http://localhost:8080/v1/search");
url.searchParams.set("q", query);
const response = await fetch(url.toString());
if (!response.ok) {
throw new Error(`Search failed: ${response.statusText}`);
}
return response.json();
}
// Usage
const results = await search("typescript best practices");
for (const [id, [sourceUrl, content]] of Object.entries(results)) {
console.log(`Chunk ${id} from ${sourceUrl}`);
console.log(content);
}
Node.js with axios¶
const axios = require('axios');
async function search(query) {
const { data } = await axios.get('http://localhost:8080/v1/search', {
params: { q: query }
});
return data;
}
// Usage
search('nodejs async patterns')
.then(results => console.log(results))
.catch(err => console.error('Search failed:', err.message));
Using Go¶
package main
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
)
type SearchResults map[string][2]string
func search(query string) (SearchResults, error) {
baseURL := "http://localhost:8080/v1/search"
params := url.Values{}
params.Set("q", query)
resp, err := http.Get(baseURL + "?" + params.Encode())
if err != nil {
return nil, err
}
defer resp.Body.Close()
var results SearchResults
if err := json.NewDecoder(resp.Body).Decode(&results); err != nil {
return nil, err
}
return results, nil
}
func main() {
results, err := search("golang concurrency")
if err != nil {
fmt.Println("Error:", err)
return
}
for id, tuple := range results {
fmt.Printf("[%s] %s\n", id, tuple[0])
fmt.Printf(" %s...\n", tuple[1][:min(len(tuple[1]), 100)])
}
}
Processing Results¶
Extract Unique URLs¶
def get_unique_urls(results: dict) -> set:
"""Extract unique source URLs from results."""
return {url for url, _ in results.values()}
results = search("your query")
urls = get_unique_urls(results)
print(f"Content from {len(urls)} unique sources")
Group by URL¶
from collections import defaultdict
def group_by_url(results: dict) -> dict:
"""Group chunks by their source URL."""
grouped = defaultdict(list)
for chunk_id, (url, content) in results.items():
grouped[url].append((chunk_id, content))
return dict(grouped)
results = search("your query")
by_url = group_by_url(results)
for url, chunks in by_url.items():
print(f"\n{url}:")
for chunk_id, content in chunks:
print(f" [{chunk_id}] {content[:50]}...")