-
Notifications
You must be signed in to change notification settings - Fork 1
/
clear_cache_and_log.py
105 lines (71 loc) · 3.1 KB
/
clear_cache_and_log.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# I'm having some caching problems with different versions of build files; this is just to log the filenames to help debug. Every filename has a UUID in it, so I can see which versions are live and compare to on disk.
# f-strings only work in 3.6, so if it blows up, it's probably because I'm in some obsolete pipenv.
import datetime
from glob import glob
import os
import requests
import re
now = datetime.datetime.now().strftime("%m-%d-%Y %I:%M%p")
labels = ["css", "app_js", "manifest_js", "vendor_js"]
# CLEAR_THE_CLOUDFLARE_CACHE
email = os.environ["cloudflareemail"]
apikey = os.environ["cloudflareapi"]
zone = os.environ['cloudflarezone']
endpoint = "https://api.cloudflare.com/client/v4/zones/{}/purge_cache".format(zone)
headers = {"X-Auth-Email": email, "X-Auth-Key": apikey, "Content-Type": "application/json"}
def clear_cache():
print("Trying to clear cache.")
resp = requests.post(endpoint, headers=headers, json={"purge_everything": True})
output = resp.json()
print(output)
assert output["errors"] == [], "ABORTING: cloudflare api call produced errors"
assert output['success'], "ABORTING: cloudflare API call did not produce success"
return output['success']
cache_api_cleared = clear_cache()
# GET THE CORRECT FILENAMES FROM CURRENT BUILD
jsfiles = sorted([os.path.basename(x) for x in glob("dist/static/js/*.js")])
cssfiles = sorted([os.path.basename(x) for x in glob("dist/static/css/*.css")])
local_files = dict(zip(labels, cssfiles + jsfiles))
# GET THE FILENAMES THAT SHOW UP ONLINE
livehtml = requests.get("https://gowder.io").text
regex_patterns = [r'static\/css\/(app.*?\.css)',
r'static\/js\/(app.*?\.js)',
r'static\/js\/(manifest.*?\.js)',
r'static\/js\/(vendor.*?\.js)']
regexes = dict(zip(labels, regex_patterns))
def extract_remote_file(label):
return label, re.search(regexes[label], livehtml).group(1)
remote_files = {k: v for (k, v) in map(extract_remote_file, labels)}
# ALERT IF SOMETHING IS WRONG
fuckups = []
def check_for_fuckup(name):
if local_files[name] != remote_files[name]:
print(f'{name} DOES NOT MATCH REMOTE. Local: {local_files[name]}, Remote: {remote_files[name]}')
return True
return False
for x in labels:
fucked_up = check_for_fuckup(x)
if fucked_up:
fuckups.append(x)
all_files_match = not bool(fuckups)
if all_files_match:
print("All files successfully matched.")
else:
print("Had a cache error, all files not successfully matched.")
# LOG IT
extended_labels = ['date', 'cache_api_cleared', 'all_files_match']
for x in labels:
extended_labels.append('local_' + x)
extended_labels.append('remote_' + x)
header = f'{",".join(extended_labels)}\n'
logfields = [now, cache_api_cleared, all_files_match]
for x in labels:
logfields.append(local_files[x])
logfields.append(remote_files[x])
logline = f'{",".join([str(x) for x in logfields])}\n'
logfilename = "cache_invalidation_log.csv"
if not os.path.exists(logfilename):
with open(logfilename, "w") as logfile:
logfile.write(header)
with open(logfilename, "a") as logfile:
logfile.write(logline)