11
11
from ..utilities .log import log_stuff
12
12
13
13
14
- def get_redis_client (redis_url ):
15
- """
16
- Returns a Redis client instance using a Redis URL.
17
-
18
- :param redis_url: Redis connection URL (e.g., "redis://localhost:6379/0")
19
- :return: Redis client instance
20
- """
14
+ def _get_redis_client (redis_url ):
21
15
try :
22
16
return redis .from_url (redis_url , decode_responses = True )
23
17
except redis .RedisError as e :
24
18
print (f"Redis connection error: { e } " )
25
19
return None
26
20
27
21
28
- def get_key_from_redis (redis_url , key ):
29
- """
30
- Retrieves a key's value from Redis. Returns None if the key does not exist.
31
-
32
- :param redis_url: Redis connection URL
33
- :param key: Key to retrieve from Redis
34
- :return: Value of the key or None if key does not exist
35
- """
36
- client = get_redis_client (redis_url )
22
+ def _get_key_from_redis (redis_url , key ):
23
+ client = _get_redis_client (redis_url )
37
24
if client :
38
25
try :
39
26
return client .get (key ) # Returns None if key does not exist
@@ -42,59 +29,39 @@ def get_key_from_redis(redis_url, key):
42
29
return None
43
30
44
31
45
- def set_key_in_redis (redis_url , key , value , expiry = None ):
46
- """
47
- Sets a key-value pair in Redis with an optional expiry time.
48
-
49
- :param redis_url: Redis connection URL
50
- :param key: Key to store in Redis
51
- :param value: Value to store in Redis
52
- :param expiry: Expiry time in seconds (optional)
53
- """
54
- client = get_redis_client (redis_url )
32
+ def _set_key_in_redis (redis_url , key , value , expiry = None ):
33
+ client = _get_redis_client (redis_url )
55
34
if client :
56
35
try :
57
36
if expiry :
58
37
client .set (key , value , ex = expiry ) # Set key with expiry
59
38
else :
60
39
client .set (key , value ) # Set key without expiry
61
- # print(f"Key '{key}' set successfully in Redis.")
62
40
except redis .RedisError as e :
63
41
print (f"Redis error: { e } " )
64
42
65
43
66
-
67
44
def _get_country_key (country_code ):
68
45
"""Returns the key name for the given country to be stored in redis cache"""
69
46
return "codegreen_generation_public_data_" + country_code
70
47
71
- def round_to_nearest_hour (dt ):
72
- """
73
- Rounds a given datetime to the nearest hour.
74
- """
48
+ def _round_to_nearest_hour (dt ):
49
+ """ Rounds a given datetime to the nearest hour."""
75
50
return dt .replace (minute = 0 , second = 0 , microsecond = 0 )
76
51
77
- def get_time_range (nHours ):
78
- """
79
- Returns a tuple (start_date, end_date) where:
80
- - start_date is current datetime minus nHours
81
- - end_date is the current datetime
82
- """
83
- end_date = round_to_nearest_hour (datetime .now ().replace (microsecond = 0 ))
52
+ def _get_time_range (nHours ):
53
+ """ Returns a tuple (start_date, end_date) where: start_date is current datetime minus nHours, end_date is the current datetime """
54
+ end_date = _round_to_nearest_hour (datetime .now ().replace (microsecond = 0 ))
84
55
start_date = end_date - timedelta (hours = nHours )
85
56
return start_date , end_date
86
57
87
- def gather_energy_data (country , start_time , end_time ):
88
- """
89
- Gets energy data form public energy sources (online)
90
- """
58
+ def _gather_energy_data (country , start_time , end_time ):
59
+ """ Gets energy data form public energy sources (online) """
91
60
energy_data = et .get_actual_production_percentage (country , start_time , end_time ,interval60 = True )["data" ]
92
61
return energy_data
93
62
94
- def get_filtered_data (dataframe , start_time , end_time ):
95
- """
96
- Function that returns a tuple (partial: True/False, data: DataFrame/None)
97
- indicating if the data is partially available and the corresponding data.
63
+ def _get_filtered_data (dataframe , start_time , end_time ):
64
+ """Function that returns a tuple (partial: True/False, data: DataFrame/None) indicating if the data is partially available and the corresponding data.
98
65
"""
99
66
if dataframe .empty :
100
67
return (False , None )
@@ -137,12 +104,12 @@ def _sync_offline_file(country):
137
104
138
105
current_time = datetime .now ()
139
106
# storing data from 5 hours from now.
140
- end_time = round_to_nearest_hour (current_time ) - timedelta (hours = 5 )
107
+ end_time = _round_to_nearest_hour (current_time ) - timedelta (hours = 5 )
141
108
142
109
if not (os .path .exists (json_file_path ) and os .path .exists (csv_file_path )):
143
110
print ("Files do not exist. Gathering new data." )
144
111
try :
145
- data = gather_energy_data (country , start_time , end_time )
112
+ data = _gather_energy_data (country , start_time , end_time )
146
113
147
114
data .to_csv (csv_file_path , index = False )
148
115
metadata = {
@@ -153,8 +120,8 @@ def _sync_offline_file(country):
153
120
"updated_on" : datetime .now ().strftime ("%Y-%m-%d %H:%M:%S" ),
154
121
}
155
122
with open (json_file_path , "w" ) as f :
156
- print (metadata )
157
123
json .dump (metadata , f , indent = 4 )
124
+ log_stuff ("Successfully created new offline file for " + country )
158
125
return data
159
126
except Exception as e :
160
127
print (e )
@@ -172,12 +139,12 @@ def _sync_offline_file(country):
172
139
update_required = False
173
140
if start_diff .total_seconds () > 0 :
174
141
print ("Gathering missing data before current start time." )
175
- new_data = gather_energy_data (country , start_time , current_start_time )
142
+ new_data = _gather_energy_data (country , start_time , current_start_time )
176
143
df = pd .concat ([new_data , df ], ignore_index = True )
177
144
update_required = True
178
145
if end_diff .total_seconds () > 0 :
179
146
print ("Gathering missing data after current end time." )
180
- new_data = gather_energy_data (country , current_end_time , end_time )
147
+ new_data = _gather_energy_data (country , current_end_time , end_time )
181
148
#print(new_data)
182
149
df = pd .concat ([df , new_data ], ignore_index = True )
183
150
update_required = True
@@ -190,39 +157,39 @@ def _sync_offline_file(country):
190
157
metadata ["updated_on" ] = datetime .now ().strftime ("%Y-%m-%d %H:%M:%S" )
191
158
with open (json_file_path , "w" ) as f :
192
159
json .dump (metadata , f , indent = 4 )
160
+ log_stuff ("Successfully synced offline file for " + country )
193
161
else :
194
162
print ("No update required" )
195
163
#last_72_hours = end_time - timedelta(hours=72)
196
164
#recent_data = df[pd.to_datetime(df["timestamp"]) >= last_72_hours]
197
- log_stuff ( "Successfully synced offline file for " + country )
165
+
198
166
199
167
def _sync_offline_cache (country ):
200
168
# print("syncs offline cache for the given country")
201
169
if not Config .get ("enable_energy_caching" ):
202
170
raise Exception ("This method cannot be used to get data since enable_energy_caching option is not enabled" )
203
171
204
172
c_key = _get_country_key (country )
205
-
206
- data = get_key_from_redis (Config .get ("energy_redis_path" ),c_key )
173
+ hour_count = int (Config .get ("generation_cache_hour" ))
174
+ quarter_time = hour_count / 4
175
+ data = _get_key_from_redis (Config .get ("energy_redis_path" ),c_key )
176
+ update_required = False
177
+ s ,e = _get_time_range (hour_count )
207
178
if data is not None :
208
- print ("check if updated to the latest" )
209
179
metadata = json .loads (data )
210
- # print(metadata)
211
180
dataframe = pd .DataFrame .from_dict (metadata ["dataframe" ])
212
- dataframe ["startTime" ] = pd .to_datetime (dataframe ["startTime" ]) # Converts to pandas.Timestamp
213
- print (dataframe )
214
- s ,e = get_time_range (72 )
181
+ dataframe ["startTime" ] = pd .to_datetime (dataframe ["startTime" ])
215
182
last_start_time = pd .to_datetime (dataframe .iloc [- 1 ]["startTime" ])
216
-
217
183
# Calculate the difference in hours
218
184
time_difference = abs ((e - last_start_time ).total_seconds ()) / 3600
219
- print (last_start_time )
220
- print (e )
221
- print (time_difference )
185
+ if quarter_time <= time_difference :
186
+ update_required = True
222
187
else :
223
- print ("new_data_to_add" )
224
- s ,e = get_time_range (72 )
225
- dataframe = gather_energy_data (country ,s ,e )
188
+ update_required = True
189
+
190
+ if update_required :
191
+ # todo : see if offline data have the required data
192
+ dataframe = _gather_energy_data (country ,s ,e )
226
193
dataframe ["startTime" ] = pd .to_datetime (dataframe ["startTime" ])
227
194
dataframe ["startTime" ] = dataframe ["startTime" ].dt .tz_localize (None )
228
195
metadata = {
@@ -233,8 +200,7 @@ def _sync_offline_cache(country):
233
200
"updated_on" : datetime .now ().strftime ("%Y-%m-%d %H:%M:%S" ),
234
201
"dataframe" :dataframe .to_dict ()
235
202
}
236
- set_key_in_redis (Config .get ("energy_redis_path" ),c_key ,json .dumps (metadata , default = str ))
237
-
203
+ _set_key_in_redis (Config .get ("energy_redis_path" ),c_key ,json .dumps (metadata , default = str ))
238
204
239
205
240
206
def _get_offline_file_data (country ,start_time , end_time ):
@@ -258,25 +224,24 @@ def _get_offline_file_data(country,start_time, end_time):
258
224
return (False , None )
259
225
260
226
local_data = pd .read_csv (csv_file_path )
261
- return get_filtered_data (local_data , start_time , end_time )
227
+ return _get_filtered_data (local_data , start_time , end_time )
262
228
263
229
264
230
def _get_offline_cache_data (country ,start ,end ):
265
231
print ("offline cache data" )
266
232
if not Config .get ("enable_energy_caching" ):
267
233
raise Exception ("This method cannot be used to get data since enable_energy_caching option is not enabled" )
268
- data = get_key_from_redis (Config .get ("energy_redis_path" ),_get_country_key (country ))
234
+ data = _get_key_from_redis (Config .get ("energy_redis_path" ),_get_country_key (country ))
269
235
# print(data)
270
236
if data is not None :
271
237
metadata = json .loads (data )
272
238
# print(metadata)
273
239
dataframe = pd .DataFrame .from_dict (metadata ["dataframe" ])
274
240
dataframe ["startTime" ] = pd .to_datetime (dataframe ["startTime" ]) # Converts to pandas.Timestamp
275
- return get_filtered_data (dataframe , start , end )
241
+ return _get_filtered_data (dataframe , start , end )
276
242
else :
277
243
return False ,None
278
244
279
-
280
245
281
246
def get_offline_data (country ,start ,end ,sync_first = False ):
282
247
"""
@@ -288,7 +253,7 @@ def get_offline_data(country,start,end,sync_first=False):
288
253
returns {available:True/False, data:dataframe}
289
254
Note that this method assumes that syncing of the sources is being handled separately
290
255
"""
291
- output = {"available" :False ,"data" :None , "partial" :False }
256
+ output = {"available" :False ,"data" :None , "partial" :False , "source" : "" }
292
257
offline = Config .get ("enable_offline_energy_generation" )
293
258
cache = Config .get ("enable_energy_caching" )
294
259
@@ -306,6 +271,7 @@ def get_offline_data(country,start,end,sync_first=False):
306
271
output ["partial" ] = partial
307
272
output ["data" ] = data
308
273
output ["available" ] = True
274
+ output ["source" ] = "cache"
309
275
print ("data from cache" )
310
276
return output
311
277
@@ -318,34 +284,31 @@ def get_offline_data(country,start,end,sync_first=False):
318
284
output ["partial" ] = partial
319
285
output ["data" ] = data
320
286
output ["available" ] = True
287
+ output ["source" ] = "offline_file"
321
288
print ("just got the data from offline file" )
322
289
323
290
return output
324
291
325
292
326
- def sync_offline_data ():
293
+ def sync_offline_data (file = False , cache = False ):
327
294
"""
328
295
This method syncs offline data for offline sources enabled in the cache.
329
296
Data is synced for all available countries
330
297
You need to run this before getting offline data. you can even setup a CRON job to call this method on regular intervals
331
298
"""
332
299
c_keys = meta .get_country_metadata ()
333
- if Config .get ("enable_offline_energy_generation" ):
300
+ if Config .get ("enable_offline_energy_generation" ) == True and file == True :
334
301
for key in c_keys :
335
302
try :
336
303
_sync_offline_file (key )
337
304
except Exception as e :
338
305
# print(e)
339
306
log_stuff ("Error in syncing offline file for " + key + ". Message" + str (e ))
340
- if Config .get ("enable_energy_caching" ):
307
+ if Config .get ("enable_energy_caching" ) == True and cache == True :
341
308
for key in c_keys :
342
309
try :
343
310
_sync_offline_cache (key )
344
311
except Exception as e :
345
312
# print(e)
346
313
log_stuff ("Error in syncing offline file for " + key + ". Message: " + str (e ))
347
-
348
-
349
-
350
-
351
314
0 commit comments