add cache for certs to prevent hitting the rate limit

This commit is contained in:
Joachim Lusiardi 2020-05-16 19:04:13 +02:00
parent 7f48055b47
commit 4aa0116f4d
1 changed files with 23 additions and 4 deletions

View File

@ -15,6 +15,7 @@ from docker import Client
cert_path = '/data/haproxy'
cert_file = cert_path + '/cert.pem'
pid_file = '/haproxy.pid'
max_age = 24 * 60 * 60
delay = 10
@ -128,8 +129,8 @@ def create_haproxy_cert():
fullchain = read_file(youngest_directory + '/fullchain.pem')
privkey = read_file(youngest_directory + '/privkey.pem')
write_file(cert_file, fullchain + privkey)
logging.info('file written')
return fullchain + privkey
def create_cert_data_standalone(domains):
if len(domains) == 0:
@ -215,14 +216,32 @@ if __name__ == '__main__':
t = threading.Thread(target=cert_watcher)
t.start()
cert_cache = {}
for line in client.events():
line_str = line.decode("utf-8")
event = json.loads(line_str)
if event['Action'] in ['start', 'stop']:
if event['Action'] in ['create']:
# check if there is any domain name configured
container_id = event['id']
if len(list_domains.handle_container(client, container_id)) > 0:
resolved_domains = list_domains.get_resolving_domains_from_containers(client)
resolved_domains = sorted(resolved_domains)
if resolved_domains in cert_cache:
cached_data = cert_cache[resolved_domains]
if time.time() - cached_data['time'] <= max_age:
logging.info('get cached cert: %s s old', time.time() - cached_data['time'])
write_file(cert_file, cached_data['data'])
logging.info('file written')
continue
# no previous data or to old
logging.info('create new cert')
create_cert_data_standalone(resolved_domains)
create_haproxy_cert()
cert_data = create_haproxy_cert()
cert_cache[resolved_domains] = {
'time': time.time(),
'data': cert_data
}
write_file(cert_file, cert_data)
logging.info('file written')