Skip to main content

Search API Usage Examples

This document provides detailed examples of how to use the Search API, helping you get started quickly and master various search functionalities.

🚀 Quick Start

Basic Search Example

import requests
import json

def search_web(query, api_key, count=10):
"""
Execute web search

Parameters:
query (str): Search keyword
api_key (str): API key
count (int): Number of results to return, default 10
"""
url = "https://platform.kuaisou.com/api/web-search"

# Build request data
payload = json.dumps({
"query": query,
"offset": 1,
"count": count
})

# Set request headers
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}

# Send request
response = requests.post(url, headers=headers, data=payload)

# Check response status
if response.status_code == 200:
return response.json()
else:
raise Exception(f"Search request failed: {response.status_code} - {response.text}")

# Usage example
if __name__ == "__main__":
# Set API key
API_KEY = "sk-********" # Replace with your API key

try:
# Execute search
results = search_web(
query="AI development trends",
api_key=API_KEY,
count=10
)

# Print results
print(json.dumps(results, indent=2, ensure_ascii=False))

except Exception as e:
print(f"Error occurred: {str(e)}")

📝 Detailed Examples

# Search content from the past week
results = search_web(
query="Artificial Intelligence",
api_key=API_KEY,
count=20,
freshness="oneWeek" # Results from the past week
)

# Search with custom date range
results = search_web(
query="Tech News",
api_key=API_KEY,
count=10,
freshness="2024-01-01..2024-03-20" # Custom date range
)
def search_with_pagination(query, api_key, total_pages=3):
"""
Pagination search example
"""
all_results = []

for page in range(1, total_pages + 1):
payload = json.dumps({
"query": query,
"offset": page,
"count": 10
})

headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}

response = requests.post(
"https://platform.kuaisou.com/api/web-search",
headers=headers,
data=payload
)

if response.status_code == 200:
data = response.json()
all_results.extend(data.get('webPages', {}).get('value', []))
else:
print(f"Page {page} request failed: {response.status_code}")

return all_results

# Use pagination search
results = search_with_pagination("Python Tutorial", API_KEY, 3)
print(f"Total results obtained: {len(results)}")

3. Error Handling Example

def safe_search(query, api_key, max_retries=3):
"""
Search function with error handling
"""
for attempt in range(max_retries):
try:
payload = json.dumps({
"query": query,
"count": 10
})

headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}

response = requests.post(
"https://platform.kuaisou.com/api/web-search",
headers=headers,
data=payload,
timeout=10 # Set timeout
)

if response.status_code == 200:
return response.json()
elif response.status_code == 401:
raise Exception("Invalid API key")
elif response.status_code == 429:
print(f"Request frequency too high, retrying... (Attempt {attempt + 1}/{max_retries})")
time.sleep(2 ** attempt) # Exponential backoff
else:
raise Exception(f"Request failed: {response.status_code}")

except requests.exceptions.Timeout:
print(f"Request timeout, retrying... (Attempt {attempt + 1}/{max_retries})")
except Exception as e:
print(f"Error occurred: {str(e)}")
if attempt == max_retries - 1:
raise

raise Exception("All retries failed")

# Use safe search
try:
results = safe_search("Machine Learning", API_KEY)
print("Search successful!")
except Exception as e:
print(f"Search failed: {str(e)}")

4. Result Processing Example

def process_search_results(results):
"""
Example of processing search results
"""
if not results or 'webPages' not in results:
return []

web_pages = results['webPages'].get('value', [])
processed_results = []

for page in web_pages:
processed_result = {
'title': page.get('name', ''),
'url': page.get('url', ''),
'snippet': page.get('snippet', ''),
'summary': page.get('summary', ''),
'site_name': page.get('siteName', ''),
'publish_time': page.get('datePublished', ''),
}
processed_results.append(processed_result)

return processed_results

def display_results(results):
"""
Beautiful display of search results
"""
processed = process_search_results(results)

print(f"\n🔍 Search Results ({len(processed)} items)")
print("=" * 60)

for i, result in enumerate(processed, 1):
print(f"\n📄 Result {i}")
print(f"Title: {result['title']}")
print(f"URL: {result['url']}")
print(f"Summary: {result['snippet'][:100]}...")
print(f"Site: {result['site_name']}")
print(f"Publish Time: {result['publish_time']}")
print("-" * 40)

# Use result processing
results = search_web("Blockchain Technology", API_KEY)
display_results(results)

🔧 cURL Examples

Basic Search Request

curl -X POST "https://platform.kuaisou.com/api/web-search" \
-H "Authorization: Bearer YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"query": "AI development trends",
"count": 10,
"offset": 1
}'

Search with Time Range

curl -X POST "https://platform.kuaisou.com/api/web-search" \
-H "Authorization: Bearer YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"query": "Tech News",
"count": 20,
"freshness": "oneWeek"
}'

📊 Response Processing

Response Format Example

{
"_type": "SearchResponse",
"queryContext": {
"originalQuery": "artificial intelligence"
},
"webPages": {
"webSearchUrl": "https://platform.kuaisou.com/api/web-search?query=artificial+intelligence",
"totalEstimatedMatches": 1000,
"value": [
{
"id": "https://platform.kuaisou.com/api/v1/#WebPages.0",
"name": "Artificial Intelligence - Wikipedia",
"url": "https://en.wikipedia.org/wiki/Artificial_intelligence",
"displayUrl": "en.wikipedia.org/wiki/Artificial_intelligence",
"snippet": "Artificial intelligence (AI) is intelligence demonstrated by machines, as opposed to natural intelligence displayed by animals including humans.",
"summary": "Artificial intelligence is a branch of computer science that aims to understand the nature of intelligence and produce machines that can simulate human-like intelligence.",
}
]
}
}

💡 Best Practices

1. Performance Optimization

  • Set reasonable count parameters to avoid fetching too much data at once
  • Use freshness parameters to limit time ranges and improve search accuracy
  • Implement request caching to avoid repeated searches for the same keywords

2. Error Handling

  • Always check response status codes
  • Implement retry mechanisms for temporary errors
  • Monitor API call frequency to avoid exceeding limits

3. User Experience

  • Provide search progress indicators
  • Implement search result caching
  • Add search suggestion functionality