Class: CacheStache::CacheClient

Inherits:
Object
  • Object
show all
Defined in:
lib/cache_stache/cache_client.rb

Constant Summary collapse

INCR_AND_EXPIRE_SCRIPT =

Lua script for atomic increment with expiry

<<~LUA
  local key = KEYS[1]
  local expire_seconds = tonumber(ARGV[1])
  local increments = cjson.decode(ARGV[2])

  for field, value in pairs(increments) do
    redis.call('HINCRBYFLOAT', key, field, value)
  end

  local ttl = redis.call('TTL', key)
  if ttl == -1 or ttl < expire_seconds then
    redis.call('EXPIRE', key, expire_seconds)
  end

  return redis.status_reply('OK')
LUA

Instance Method Summary collapse

Constructor Details

#initialize(config = CacheStache.configuration) ⇒ CacheClient

Returns a new instance of CacheClient.



26
27
28
29
30
31
# File 'lib/cache_stache/cache_client.rb', line 26

def initialize(config = CacheStache.configuration)
  @config = config
  @pool = ConnectionPool.new(size: @config.redis_pool_size) do
    @config.build_redis
  end
end

Instance Method Details

#estimate_storage_sizeObject



113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# File 'lib/cache_stache/cache_client.rb', line 113

def estimate_storage_size
  # Calculate theoretical number of buckets
  max_buckets = (@config.retention_seconds.to_f / @config.bucket_seconds).ceil

  # Each bucket has:
  # - overall:hits and overall:misses (2 fields)
  # - keyspace_name:hits and keyspace_name:misses per keyspace (2 * num_keyspaces)
  fields_per_bucket = 2 + (@config.keyspaces.size * 2)

  # Estimate bytes per field:
  # - Field name: ~20 bytes average (e.g., "search:hits", "profiles:misses")
  # - Field value: ~8 bytes (float stored as string, e.g., "12345.0")
  # - Redis hash overhead: ~24 bytes per field
  bytes_per_field = 52

  # Key overhead: "cache_stache:v1:environment:timestamp" ~45 bytes
  # Plus Redis key overhead: ~96 bytes
  key_overhead = 141

  # Calculate total size per bucket
  bytes_per_bucket = (fields_per_bucket * bytes_per_field) + key_overhead

  # Total estimated size
  total_bytes = max_buckets * bytes_per_bucket

  # Add config metadata key size (~200 bytes)
  total_bytes += 200

  {
    max_buckets: max_buckets,
    fields_per_bucket: fields_per_bucket,
    bytes_per_bucket: bytes_per_bucket,
    total_bytes: total_bytes,
    human_size: format_bytes(total_bytes)
  }
rescue => e
  Rails.logger.error("CacheStache: Failed to estimate storage size: #{e.message}")
  {total_bytes: 0, human_size: "Unknown"}
end

#fetch_buckets(from_ts, to_ts) ⇒ Object



51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# File 'lib/cache_stache/cache_client.rb', line 51

def fetch_buckets(from_ts, to_ts)
  keys = bucket_keys_in_range(from_ts, to_ts)
  return [] if keys.empty?

  Rails.logger.debug { "CacheStache: Redis fetching #{keys.size} buckets from #{from_ts} to #{to_ts}" }

  without_instrumentation do
    @pool.with do |redis|
      Rails.logger.debug { "CacheStache: Redis PIPELINE hgetall for #{keys.size} keys" }
      pipeline_results = redis.pipelined do |pipe|
        keys.each { |key| pipe.hgetall(key) }
      end

      keys.zip(pipeline_results).map do |key, data|
        next unless data && !data.empty?

        {
          timestamp: extract_timestamp_from_key(key),
          stats: data.transform_values(&:to_f)
        }
      end.compact
    end
  end
rescue => e
  Rails.logger.error("CacheStache: Failed to fetch buckets: #{e.message}")
  []
end

#fetch_config_metadataObject



98
99
100
101
102
103
104
105
106
107
108
109
110
111
# File 'lib/cache_stache/cache_client.rb', line 98

def 
  key = "cache_stache:v1:#{@config.rails_env}:config"

  without_instrumentation do
    @pool.with do |redis|
      Rails.logger.debug { "CacheStache: Redis GET #{key}" }
      data = redis.get(key)
      data ? JSON.parse(data) : nil
    end
  end
rescue => e
  Rails.logger.error("CacheStache: Failed to fetch config metadata: #{e.message}")
  nil
end

#increment_stats(bucket_ts, increments) ⇒ Object



33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/cache_stache/cache_client.rb', line 33

def increment_stats(bucket_ts, increments)
  key = bucket_key(bucket_ts)

  without_instrumentation do
    @pool.with do |redis|
      Rails.logger.debug { "CacheStache: Redis EVAL increment on #{key} with #{increments.size} fields" }
      redis.eval(
        INCR_AND_EXPIRE_SCRIPT,
        keys: [key],
        argv: [retention_seconds, increments.to_json]
      )
    end
  end
rescue => e
  Rails.logger.error("CacheStache: Failed to increment stats: #{e.message}")
  Rails.logger.error(e.backtrace.join("\n"))
end

#store_config_metadataObject



79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
# File 'lib/cache_stache/cache_client.rb', line 79

def 
  key = "cache_stache:v1:#{@config.rails_env}:config"
   = {
    bucket_seconds: @config.bucket_seconds.to_i,
    retention_seconds: retention_seconds,
    updated_at: Time.current.to_i
  }

  without_instrumentation do
    @pool.with do |redis|
      # Use SETEX for atomic set-with-expiry (single command)
      Rails.logger.debug { "CacheStache: Redis SETEX #{key} #{retention_seconds}" }
      redis.setex(key, retention_seconds, .to_json)
    end
  end
rescue => e
  Rails.logger.error("CacheStache: Failed to store config metadata: #{e.message}")
end