Skip to content

Custom RPC Proxy

Build a caching proxy for optimized performance.

rpc-proxy.ts
import { createServer } from 'http'
import { createPublicClient, http } from 'viem'
import { riseTestnet } from 'viem/chains'
 
class CachingProxy {
  private cache = new Map<string, { data: any; timestamp: number }>()
  private client: any
  private cacheDuration = 1000 // 1 second
  
  constructor() {
    this.client = createPublicClient({
      chain: riseTestnet,
      transport: http()
    })
  }
  
  async handleRequest(method: string, params: any[]): Promise<any> {
    const cacheKey = `${method}:${JSON.stringify(params)}`
    
    // Check cache for read methods
    if (this.isReadMethod(method)) {
      const cached = this.cache.get(cacheKey)
      if (cached && Date.now() - cached.timestamp < this.cacheDuration) {
        console.log(`Cache hit: ${method}`)
        return cached.data
      }
    }
    
    // Forward to RISE node
    console.log(`Forwarding: ${method}`)
    const result = await this.client.request({ method, params })
    
    // Cache read results
    if (this.isReadMethod(method)) {
      this.cache.set(cacheKey, {
        data: result,
        timestamp: Date.now()
      })
    }
    
    return result
  }
  
  private isReadMethod(method: string): boolean {
    return [
      'eth_getBalance',
      'eth_getCode',
      'eth_getStorageAt',
      'eth_call',
      'eth_getTransactionCount'
    ].includes(method)
  }
}
 
// Create HTTP server
const proxy = new CachingProxy()
 
createServer(async (req, res) => {
  if (req.method !== 'POST') {
    res.writeHead(405)
    res.end()
    return
  }
  
  let body = ''
  req.on('data', chunk => body += chunk)
  req.on('end', async () => {
    try {
      const { method, params, id } = JSON.parse(body)
      const result = await proxy.handleRequest(method, params)
      
      res.writeHead(200, { 'Content-Type': 'application/json' })
      res.end(JSON.stringify({
        jsonrpc: '2.0',
        id,
        result
      }))
    } catch (error) {
      res.writeHead(200, { 'Content-Type': 'application/json' })
      res.end(JSON.stringify({
        jsonrpc: '2.0',
        id: 0,
        error: { code: -32603, message: error.message }
      }))
    }
  })
}).listen(8545, () => {
  console.log('Proxy running on http://localhost:8545')
})

Features

  • Smart Caching: Caches read-only methods for 1 second
  • Transparent: Works with any Ethereum client
  • Performance: Reduces RPC calls significantly
  • Configurable: Adjust cache duration and methods

Advanced Proxy Features

class AdvancedProxy extends CachingProxy {
  private requestCount = new Map<string, number>()
  private blocklist = new Set<string>()
  
  async handleRequest(method: string, params: any[]): Promise<any> {
    // Rate limiting
    const count = this.requestCount.get(method) || 0
    if (count > 100) {
      throw new Error('Rate limit exceeded')
    }
    this.requestCount.set(method, count + 1)
    
    // Method filtering
    if (this.blocklist.has(method)) {
      throw new Error('Method not allowed')
    }
    
    // Call parent implementation
    return super.handleRequest(method, params)
  }
  
  // Reset counters every minute
  startRateLimitReset() {
    setInterval(() => {
      this.requestCount.clear()
    }, 60000)
  }
}

Usage with viem

import { createPublicClient, http } from 'viem'
import { riseTestnet } from 'viem/chains'
 
// Connect through your proxy
const client = createPublicClient({
  chain: riseTestnet,
  transport: http('http://localhost:8545')
})
 
// Use normally - caching happens transparently
const balance = await client.getBalance({
  address: '0x...'
})

Performance Benefits

  1. Reduced Latency: Cached responses return instantly
  2. Lower Costs: Fewer RPC calls to paid providers
  3. Better UX: Faster application response times
  4. Scalability: Handle more users with same infrastructure

Monitoring and Analytics

class MonitoredProxy extends CachingProxy {
  private metrics = {
    totalRequests: 0,
    cacheHits: 0,
    cacheMisses: 0,
    errorCount: 0
  }
  
  getMetrics() {
    const hitRate = this.metrics.cacheHits / 
      (this.metrics.cacheHits + this.metrics.cacheMisses) || 0
    
    return {
      ...this.metrics,
      cacheHitRate: `${(hitRate * 100).toFixed(2)}%`
    }
  }
}

Next Steps

  • Add persistent caching with Redis
  • Implement request batching
  • Add WebSocket support
  • Create a load balancer for multiple nodes