File tree 2 files changed +12
-5
lines changed
2 files changed +12
-5
lines changed Original file line number Diff line number Diff line change @@ -114,9 +114,14 @@ def cronhandler(event, context):
114
114
pipeline_limit = settings .PIPELINE_LIMIT if 'pipeline_limit' not in event else event ['pipeline_limit' ]
115
115
realtime_limit = settings .REALTIME_LIMIT if 'realtime_limit' not in event else event ['realtime_limit' ]
116
116
metadata_limit = settings .METADATA_LIMIT if 'metadata_limit' not in event else event ['metadata_limit' ]
117
+ fetchlogKey = event .get ('fetchlogKey' )
117
118
118
- logger .info (f"Running cron job: { event ['source' ]} , ascending: { ascending } " )
119
+ # this is mostly for debuging and running the occasional file from the aws console
120
+ if fetchlogKey is not None :
121
+ limit = event .get ('limit' , 10 )
122
+ return load_measurements_db (limit = limit , ascending = True , pattern = fetchlogKey )
119
123
124
+ logger .info (f"Running cron job: { event ['source' ]} , ascending: { ascending } " )
120
125
# these exceptions are just a failsafe so that if something
121
126
# unaccounted for happens we can still move on to the next
122
127
# process. In case of this type of exception we will need to
Original file line number Diff line number Diff line change 41
41
client = IngestClient ()
42
42
## load all the data into the client
43
43
client .load_keys ([
44
- [1 , '~/Downloads/openaq-fetches/lcs-etl-pipeline/measures/lovemyair/2024-11-12/1731445632-1snpf.json' , '2024-10-23' ]
44
+ # [1, '~/Downloads/openaq-fetches/lcs-etl-pipeline/measures/airgradient/2025-02-14/1739542053-5n5q.json', '2024-10-23']
45
+ # [1, '/home/christian/Downloads/1739444861-6bvu.json', '2025-02-13']
46
+ [7786652 , 'lcs-etl-pipeline/measures/airgradient/2025-02-14/1739549254-h5b0m.json.gz' , '2025-02-14' ]
45
47
])
46
48
47
49
## dump just the locations
48
- client .dump ()
50
+ # client.dump()
49
51
50
52
# rollups and cached tables
51
53
#client.process_hourly_data()
52
54
#client.process_daily_data()
53
55
#client.process_annual_data()
54
56
#client.refresh_cached_tables()
55
57
56
- # client.dump_locations(False )
57
- # client.dump_measurements(load=False )
58
+ client .dump_locations ()
59
+ client .dump_measurements (load = True )
58
60
## dump just the measurements
59
61
# client.dump_measurements
60
62
## Dump both
You can’t perform that action at this time.
0 commit comments