|
13 | 13 | from collections import defaultdict
|
14 | 14 | from redis.exceptions import ResponseError
|
15 | 15 |
|
| 16 | +def ptransform(nm): |
| 17 | + if nm.startswith('celery-task-meta'): |
| 18 | + spl = nm.split('-') |
| 19 | + rt = '-'.join(spl[0:3])+':'+'-'.join(spl[3:]) |
| 20 | + elif nm.startswith('qo_cli.aff_aggregations.aggregate_aff_aname_aname'): |
| 21 | + spl = nm.split('-') |
| 22 | + rt = '-'.join(spl[0:1])+':'+'-'.join(spl[1:]) |
| 23 | + elif nm.endswith('_trigger_queue_user_job'): |
| 24 | + spl = nm.split('_') |
| 25 | + rt = '_'.join(spl[1:])+':'+'_'.join(spl[0:1]) |
| 26 | + elif nm.endswith('.reply.celery.pidbox'): |
| 27 | + spl = nm.split('.') |
| 28 | + rt = '.'.join(spl[1:])+':'+spl[0] |
| 29 | + elif nm.endswith('_user_queue_user_job'): |
| 30 | + spl = nm.split('_') |
| 31 | + rt = '_'.join(spl[1:])+':'+spl[0] |
| 32 | + else: |
| 33 | + rt = nm |
| 34 | + return rt |
| 35 | + |
16 | 36 |
|
17 | 37 | def connect_to_redis(host, port, db=0, password=None):
|
18 | 38 | """
|
@@ -152,14 +172,15 @@ def do_ram(self, res):
|
152 | 172 | total_keys = sum(len(values) for key, values in aggregate_patterns.items())
|
153 | 173 | ret += (rule.analyze(keys=aggregate_patterns, total=total_keys))
|
154 | 174 |
|
| 175 | + |
155 | 176 | return ret
|
156 | 177 |
|
157 | 178 | def get_pattern_aggregated_data(self, data):
|
158 |
| - split_patterns = self.splitter.split((obj["name"] for obj in data)) |
| 179 | + split_patterns = self.splitter.split((ptransform(obj["name"]) for obj in data)) |
159 | 180 | self.logger.debug(split_patterns)
|
160 | 181 |
|
161 | 182 | aggregate_patterns = {item: [] for item in split_patterns}
|
162 | 183 | for pattern in split_patterns:
|
163 |
| - aggregate_patterns[pattern] = list(filter(lambda obj: fnmatch.fnmatch(obj["name"], pattern), data)) |
| 184 | + aggregate_patterns[pattern] = list(filter(lambda obj: fnmatch.fnmatch(ptransform(obj["name"]), pattern), data)) |
164 | 185 |
|
165 | 186 | return aggregate_patterns
|
0 commit comments