Batch IP Lookup
The Batch IP Lookup endpoint allows you to look up intelligence data for multiple IP addresses in a single request. This is more efficient than making individual requests when you need to query many IPs.
Endpoint
POST/v1/ip/batch
Authentication
This endpoint requires an API key. See Authentication for details.
Request
Headers
| Header | Required | Description |
|---|---|---|
X-API-Key | Yes | Your API key |
Content-Type | Yes | Must be application/json |
Accept | No | Set to application/json (default) |
Request Body
{
"ips": ["8.8.8.8", "1.1.1.1", "208.67.222.222"]
}
| Field | Type | Required | Description |
|---|---|---|---|
ips | array | Yes | Array of IPv4 or IPv6 addresses (max 1000) |
Batch Size Limits
| Tier | Maximum Batch Size |
|---|---|
| Free | 10 IPs |
| Starter | 100 IPs |
| Pro | 500 IPs |
| Enterprise | 1000 IPs |
Response Headers
| Header | Description |
|---|---|
X-Request-ID | Unique identifier for the request |
X-RateLimit-Limit | Maximum requests allowed per time window |
X-RateLimit-Remaining | Requests remaining in current window |
X-RateLimit-Reset | Unix timestamp when the limit resets |
Response
Success Response (200)
{
"data": {
"8.8.8.8": {
"ip": "8.8.8.8",
"prefix": "8.8.8.0/24",
"asn": 15169,
"asn_name": "Google LLC",
"country": "US",
"detection": {
"is_datacenter": true,
"is_ai_crawler": false,
"cloud_provider": "Google Cloud"
},
"threat": {
"score": 0,
"level": "low"
}
},
"1.1.1.1": {
"ip": "1.1.1.1",
"prefix": "1.1.1.0/24",
"asn": 13335,
"asn_name": "Cloudflare, Inc.",
"country": "US",
"detection": {
"is_datacenter": true,
"is_ai_crawler": false,
"cloud_provider": "Cloudflare"
},
"threat": {
"score": 0,
"level": "low"
}
},
"208.67.222.222": {
"ip": "208.67.222.222",
"prefix": "208.67.222.0/24",
"asn": 36692,
"asn_name": "Cisco OpenDNS, LLC",
"country": "US",
"detection": {
"is_datacenter": true,
"is_ai_crawler": false
},
"threat": {
"score": 0,
"level": "low"
}
}
},
"meta": {
"request_id": "550e8400-e29b-41d4-a716-446655440001",
"processing_time_ms": 15,
"dataset_version": 1
}
}
Response Structure
The response data field is a map where:
- Keys are the queried IP addresses
- Values are the lookup results (same schema as single IP lookup)
If an IP is not found in the database, it will be omitted from the results.
Error Responses
400 Bad Request - Invalid JSON
{
"data": {
"error": "invalid JSON body",
"code": "INVALID_JSON"
},
"meta": {
"processing_time_ms": 0
}
}
400 Bad Request - Missing IPs Array
{
"data": {
"error": "ips array is required",
"code": "MISSING_IPS"
},
"meta": {
"processing_time_ms": 0
}
}
400 Bad Request - Batch Too Large
{
"data": {
"error": "batch size exceeds limit of 1000",
"code": "BATCH_TOO_LARGE"
},
"meta": {
"processing_time_ms": 0
}
}
400 Bad Request - Invalid IP in Batch
{
"data": {
"error": "invalid IP in batch: not-an-ip",
"code": "INVALID_IP"
},
"meta": {
"processing_time_ms": 0
}
}
401 Unauthorized
{
"data": {
"error": "unauthorized",
"code": "UNAUTHORIZED"
},
"meta": {
"processing_time_ms": 0
}
}
429 Rate Limit Exceeded
{
"data": {
"error": "rate limit exceeded",
"code": "RATE_LIMIT_EXCEEDED"
},
"meta": {
"processing_time_ms": 0
}
}
Code Examples
cURL
# Basic batch lookup
curl -X POST "https://api.limesindex.com/v1/ip/batch" \
-H "X-API-Key: YOUR_API_KEY" \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
-d '{
"ips": ["8.8.8.8", "1.1.1.1", "208.67.222.222"]
}'
# With jq for formatted output
curl -s -X POST "https://api.limesindex.com/v1/ip/batch" \
-H "X-API-Key: YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d '{"ips": ["8.8.8.8", "1.1.1.1"]}' | jq .
# From a file
echo '{"ips": ["8.8.8.8", "1.1.1.1", "9.9.9.9"]}' > ips.json
curl -X POST "https://api.limesindex.com/v1/ip/batch" \
-H "X-API-Key: YOUR_API_KEY" \
-H "Content-Type: application/json" \
-d @ips.json
Python
import requests
import os
from typing import List, Dict
API_KEY = os.environ.get("LIMESINDEX_API_KEY")
BASE_URL = "https://api.limesindex.com"
def batch_lookup(ips: List[str]) -> Dict:
"""Look up intelligence data for multiple IP addresses."""
response = requests.post(
f"{BASE_URL}/v1/ip/batch",
headers={
"X-API-Key": API_KEY,
"Content-Type": "application/json",
"Accept": "application/json"
},
json={"ips": ips}
)
response.raise_for_status()
return response.json()
# Example usage
ips_to_check = [
"8.8.8.8",
"1.1.1.1",
"208.67.222.222",
"9.9.9.9",
"185.199.108.153"
]
result = batch_lookup(ips_to_check)
# Process results
for ip, data in result["data"].items():
print(f"\n{ip}:")
print(f" ASN: {data['asn']} ({data['asn_name']})")
print(f" Country: {data['country']}")
print(f" Datacenter: {data['detection']['is_datacenter']}")
print(f" Threat Level: {data['threat']['level']}")
# Find IPs not in results (not found in database)
found_ips = set(result["data"].keys())
not_found = set(ips_to_check) - found_ips
if not_found:
print(f"\nIPs not found: {not_found}")
JavaScript (Node.js)
const API_KEY = process.env.LIMESINDEX_API_KEY;
const BASE_URL = 'https://api.limesindex.com';
async function batchLookup(ips) {
const response = await fetch(`${BASE_URL}/v1/ip/batch`, {
method: 'POST',
headers: {
'X-API-Key': API_KEY,
'Content-Type': 'application/json',
'Accept': 'application/json'
},
body: JSON.stringify({ ips })
});
if (!response.ok) {
const error = await response.json();
throw new Error(`API Error: ${error.data.error} (${error.data.code})`);
}
return response.json();
}
// Example usage
async function main() {
const ipsToCheck = [
'8.8.8.8',
'1.1.1.1',
'208.67.222.222',
'9.9.9.9'
];
try {
const result = await batchLookup(ipsToCheck);
// Process results
for (const [ip, data] of Object.entries(result.data)) {
console.log(`\n${ip}:`);
console.log(` ASN: ${data.asn} (${data.asn_name})`);
console.log(` Country: ${data.country}`);
console.log(` Datacenter: ${data.detection.is_datacenter}`);
console.log(` Threat Level: ${data.threat.level}`);
}
console.log(`\nProcessed in ${result.meta.processing_time_ms}ms`);
} catch (error) {
console.error('Batch lookup failed:', error.message);
}
}
main();
Go
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
)
const baseURL = "https://api.limesindex.com"
type BatchRequest struct {
IPs []string `json:"ips"`
}
type Detection struct {
IsDatacenter bool `json:"is_datacenter"`
IsTorExit bool `json:"is_tor_exit"`
IsProxy bool `json:"is_proxy"`
IsVPN bool `json:"is_vpn"`
IsAICrawler bool `json:"is_ai_crawler"`
CloudProvider string `json:"cloud_provider,omitempty"`
}
type Threat struct {
Score int `json:"score"`
Level string `json:"level"`
}
type IPData struct {
IP string `json:"ip"`
Prefix string `json:"prefix"`
ASN int `json:"asn"`
ASNName string `json:"asn_name"`
Country string `json:"country"`
Detection Detection `json:"detection"`
Threat Threat `json:"threat"`
}
type Meta struct {
RequestID string `json:"request_id"`
ProcessingTimeMs int `json:"processing_time_ms"`
DatasetVersion int `json:"dataset_version"`
}
type BatchResponse struct {
Data map[string]IPData `json:"data"`
Meta Meta `json:"meta"`
}
func batchLookup(apiKey string, ips []string) (*BatchResponse, error) {
reqBody := BatchRequest{IPs: ips}
jsonBody, err := json.Marshal(reqBody)
if err != nil {
return nil, err
}
req, err := http.NewRequest("POST", baseURL+"/v1/ip/batch", bytes.NewBuffer(jsonBody))
if err != nil {
return nil, err
}
req.Header.Set("X-API-Key", apiKey)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("API error: %s", string(body))
}
var result BatchResponse
if err := json.Unmarshal(body, &result); err != nil {
return nil, err
}
return &result, nil
}
func main() {
apiKey := os.Getenv("LIMESINDEX_API_KEY")
ips := []string{
"8.8.8.8",
"1.1.1.1",
"208.67.222.222",
"9.9.9.9",
}
result, err := batchLookup(apiKey, ips)
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
for ip, data := range result.Data {
fmt.Printf("\n%s:\n", ip)
fmt.Printf(" ASN: %d (%s)\n", data.ASN, data.ASNName)
fmt.Printf(" Country: %s\n", data.Country)
fmt.Printf(" Datacenter: %t\n", data.Detection.IsDatacenter)
fmt.Printf(" Threat Level: %s\n", data.Threat.Level)
}
fmt.Printf("\nProcessed in %dms\n", result.Meta.ProcessingTimeMs)
}
Use Cases
Log Analysis
import requests
from collections import defaultdict
def analyze_access_logs(log_ips: List[str]) -> dict:
"""Analyze a list of IPs from access logs."""
# Deduplicate IPs
unique_ips = list(set(log_ips))
# Process in batches of 1000
batch_size = 1000
all_results = {}
for i in range(0, len(unique_ips), batch_size):
batch = unique_ips[i:i + batch_size]
result = batch_lookup(batch)
all_results.update(result["data"])
# Analyze results
stats = {
"total": len(unique_ips),
"datacenter": 0,
"vpn": 0,
"tor": 0,
"residential": 0,
"ai_crawler": 0,
"by_country": defaultdict(int),
"by_asn": defaultdict(int),
"high_threat": []
}
for ip, data in all_results.items():
detection = data["detection"]
stats["by_country"][data["country"]] += 1
stats["by_asn"][data["asn_name"]] += 1
if detection.get("is_datacenter"):
stats["datacenter"] += 1
if detection.get("is_vpn"):
stats["vpn"] += 1
if detection.get("is_tor_exit"):
stats["tor"] += 1
if detection.get("is_residential"):
stats["residential"] += 1
if detection.get("is_ai_crawler"):
stats["ai_crawler"] += 1
if data["threat"]["score"] >= 70:
stats["high_threat"].append(ip)
return stats
Firewall Rule Generation
def generate_firewall_rules(ips: List[str]) -> List[str]:
"""Generate firewall block rules for suspicious IPs."""
result = batch_lookup(ips)
rules = []
for ip, data in result["data"].items():
if data["threat"]["score"] >= 70:
rules.append(f"iptables -A INPUT -s {ip} -j DROP # Threat: {data['threat']['level']}")
elif data["detection"].get("is_tor_exit"):
rules.append(f"iptables -A INPUT -s {ip} -j DROP # Tor exit node")
return rules
Geographic Distribution Analysis
async function analyzeGeographicDistribution(ips) {
const result = await batchLookup(ips);
const countryStats = {};
for (const [ip, data] of Object.entries(result.data)) {
const country = data.country;
if (!countryStats[country]) {
countryStats[country] = { count: 0, ips: [] };
}
countryStats[country].count++;
countryStats[country].ips.push(ip);
}
// Sort by count
const sorted = Object.entries(countryStats)
.sort((a, b) => b[1].count - a[1].count);
return sorted;
}
Performance Considerations
Optimal Batch Size
While the maximum batch size is 1000 IPs, consider these factors:
| Batch Size | Latency | Throughput |
|---|---|---|
| 1-50 | ~50ms | Good for real-time |
| 50-200 | ~100ms | Balanced |
| 200-500 | ~200ms | High throughput |
| 500-1000 | ~400ms | Maximum throughput |
Rate Limiting
Batch requests count as a single request against your rate limit, making them much more efficient for high-volume lookups.
Parallel Batching
For very large datasets, process batches in parallel:
import asyncio
import aiohttp
async def batch_lookup_async(session, ips):
async with session.post(
f"{BASE_URL}/v1/ip/batch",
headers={"X-API-Key": API_KEY, "Content-Type": "application/json"},
json={"ips": ips}
) as response:
return await response.json()
async def process_large_dataset(all_ips, batch_size=500, concurrency=5):
batches = [all_ips[i:i+batch_size] for i in range(0, len(all_ips), batch_size)]
async with aiohttp.ClientSession() as session:
semaphore = asyncio.Semaphore(concurrency)
async def limited_lookup(batch):
async with semaphore:
return await batch_lookup_async(session, batch)
results = await asyncio.gather(*[limited_lookup(b) for b in batches])
return results
Related Endpoints
- IP Lookup - Look up a single IP address
- ASN Lookup - Get details about an Autonomous System