It is a method to add Expire at once when the number of Redis keys increases too much.
I am careful about the following.
--Process multiple cases at the same time by pipeline processing --Sleep for a few seconds after processing multiple records to avoid overloading Redis --Distribute the expire time so that the load due to key deletion is not applied
Save the data obtained by keys to a file
$ redis-cli -h {host} -n {database} keys "*" > keys
Grep a specific key from the file and add expire
$ grep {expire_key_name} keys | python set_expires.py
set_expires.py
# -*- coding: utf-8 -*-
import redis
import time
import sys
import random
EXPIRATION_SECONDS_MIN = 86400 * 10 #The 10th
EXPIRATION_SECONDS_MAX = 86400 * 20 #The 20th
def main():
set_expire_keys()
def set_expire_keys():
redis_cli = _create_client()
pipe = redis_cli.pipeline()
line_num = 0
for line in sys.stdin:
key_name = line.strip()
#Separate periods with random numbers to prevent a large number of keys from disappearing at once
expiration_time = random.randint(EXPIRATION_SECONDS_MIN, EXPIRATION_SECONDS_MAX)
pipe.expire(key_name, expiration_time)
#Execute every 10,000 keys
if line_num % 10000 == 0:
print("{} lines proceeded.".format(line_num))
pipe.execute()
#Stop for 3 seconds considering the load on Redis
time.sleep(3)
pipe = redis_cli.pipeline()
line_num += 1
pipe.execute()
print("{} lines proceeded.".format(line_num))
print("complete!!")
def _create_client():
redis_cli = redis.Redis(
host="localhost",
port=6379,
db=1
)
return redis_cli
if __name__ == "__main__":
main()
Recommended Posts