@@ -42,6 +42,10 @@ def read_pages() -> str:
42
42
SAYFALAR = read_pages ()
43
43
44
44
45
+ def chunks (xs , n ):
46
+ return (xs [i :i + n ] for i in range (0 , len (xs ), n ))
47
+
48
+
45
49
def get_existing (namespace_id : str ) -> set [str ]:
46
50
res = requests .get (
47
51
ACCOUNTS_URL + 'keys' , headers = {'authorization' : 'Bearer ' + CF_UPLOADER_TOKEN })
@@ -67,15 +71,17 @@ def batch_upload(names: list[str]):
67
71
68
72
69
73
def purge_cache (assets ):
70
- to_upload = {
71
- 'files' : [ROUTE + asset for asset in assets ]
72
- }
73
- to_upload = json .dumps (to_upload , separators = (',' , ':' ))
74
- print (to_upload )
75
- return requests .post (ZONES_URL + 'purge_cache' , data = to_upload , headers = {
76
- 'content-type' : 'application/json' ,
77
- 'authorization' : 'Bearer ' + CF_UPLOADER_TOKEN
78
- })
74
+ batches = chunks (assets , 20 )
75
+ for i , batch in enumerate (batches ):
76
+ to_purge = {
77
+ 'files' : [ROUTE + asset for asset in batch ]
78
+ }
79
+ to_purge = json .dumps (to_purge , separators = (',' , ':' ))
80
+ res = requests .post (ZONES_URL + 'purge_cache' , data = to_purge , headers = {
81
+ 'content-type' : 'application/json' ,
82
+ 'authorization' : 'Bearer ' + CF_UPLOADER_TOKEN
83
+ })
84
+ print (f"Batch { i } result: { res .status_code } " )
79
85
80
86
81
87
def is_static_upload (name : str ) -> bool :
0 commit comments