|
32 | 32 | let(:event) { project_config.get_event_from_key('test_event') }
|
33 | 33 |
|
34 | 34 | before(:example) do
|
| 35 | + spy_logger = spy('logger') |
35 | 36 | @event_queue = SizedQueue.new(100)
|
36 | 37 | @event_dispatcher = Optimizely::EventDispatcher.new
|
37 | 38 | allow(@event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event))
|
|
46 | 47 | it 'should log waring when service is already started' do
|
47 | 48 | @event_processor = Optimizely::BatchEventProcessor.new(logger: spy_logger)
|
48 | 49 | @event_processor.start!
|
| 50 | + @event_processor.start! |
| 51 | + |
49 | 52 | expect(spy_logger).to have_received(:log).with(Logger::WARN, 'Service already started.').once
|
50 | 53 | end
|
51 | 54 |
|
|
61 | 64 | )
|
62 | 65 |
|
63 | 66 | @event_processor.process(conversion_event)
|
64 |
| - # flush interval is set to 100ms. Wait for 300ms and assert that event is dispatched. |
65 |
| - sleep 1 |
| 67 | + # flush interval is set to 100ms. Wait for 200ms and assert that event is dispatched. |
| 68 | + sleep 0.2 |
66 | 69 |
|
67 | 70 | expect(@event_dispatcher).to have_received(:dispatch_event).with(log_event).once
|
68 | 71 | expect(@notification_center).to have_received(:send_notifications).with(
|
69 | 72 | Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT],
|
70 | 73 | log_event
|
71 | 74 | ).once
|
72 | 75 | expect(spy_logger).to have_received(:log).with(Logger::INFO, 'Flushing Queue.').once
|
| 76 | + expect(spy_logger).to have_received(:log).with(Logger::DEBUG, 'Deadline exceeded flushing current batch.').at_most(2).times |
73 | 77 | end
|
74 | 78 |
|
75 | 79 | it 'should flush the current batch when max batch size met' do
|
76 | 80 | @event_processor = Optimizely::BatchEventProcessor.new(
|
77 | 81 | event_dispatcher: @event_dispatcher,
|
78 | 82 | batch_size: 11,
|
79 |
| - flush_interval: 100_000, |
| 83 | + flush_interval: 10_000, |
80 | 84 | logger: spy_logger
|
81 | 85 | )
|
82 | 86 |
|
|
92 | 96 | end
|
93 | 97 |
|
94 | 98 | # Wait until other thread has processed the event.
|
95 |
| - until @event_processor.event_queue.empty?; end |
96 |
| - until @event_processor.current_batch.empty?; end |
| 99 | + sleep 0.1 until @event_processor.event_queue.empty? |
| 100 | + |
| 101 | + sleep 0.1 until @event_processor.current_batch.empty? |
97 | 102 |
|
98 | 103 | expect(Optimizely::EventFactory).to have_received(:create_log_event).with(expected_batch, spy_logger).once
|
99 | 104 | expect(@event_dispatcher).to have_received(:dispatch_event).with(
|
|
109 | 114 | @event_processor = Optimizely::BatchEventProcessor.new(
|
110 | 115 | event_queue: @event_queue,
|
111 | 116 | event_dispatcher: @event_dispatcher,
|
112 |
| - flush_interval: 100_000, |
| 117 | + flush_interval: 10_000, |
113 | 118 | logger: spy_logger
|
114 | 119 | )
|
115 | 120 |
|
|
120 | 125 | @event_processor.flush
|
121 | 126 |
|
122 | 127 | # Wait until other thread has processed the event.
|
123 |
| - until @event_processor.event_queue.empty?; end |
124 |
| - until @event_processor.current_batch.empty?; end |
| 128 | + sleep 0.1 until @event_processor.event_queue.empty? |
| 129 | + |
| 130 | + sleep 0.1 until @event_processor.current_batch.empty? |
125 | 131 |
|
126 | 132 | expect(@event_dispatcher).to have_received(:dispatch_event).with(log_event).twice
|
127 | 133 | expect(@event_processor.event_queue.length).to eq(0)
|
|
143 | 149 | expect(user_event1.event_context[:revision]).to eq('1')
|
144 | 150 | @event_processor.process(user_event1)
|
145 | 151 | # Wait until other thread has processed the event.
|
146 |
| - while @event_processor.current_batch.length != 1; end |
| 152 | + sleep 0.1 while @event_processor.current_batch.length != 1 |
147 | 153 |
|
148 | 154 | expect(user_event2.event_context[:revision]).to eq('2')
|
149 | 155 | @event_processor.process(user_event2)
|
150 | 156 | @event_processor.process(user_event2)
|
151 | 157 | # Wait until other thread has processed the event.
|
152 |
| - while @event_processor.current_batch.length != 2; end |
| 158 | + sleep 0.1 while @event_processor.current_batch.length != 2 |
153 | 159 |
|
154 | 160 | expect(@event_dispatcher).to have_received(:dispatch_event).with(log_event).once
|
155 | 161 | expect(spy_logger).to have_received(:log).with(Logger::DEBUG, 'Revisions mismatched: Flushing current batch.').once
|
|
170 | 176 | expect(user_event1.event_context[:project_id]).to eq('X')
|
171 | 177 | @event_processor.process(user_event1)
|
172 | 178 | # Wait until other thread has processed the event.
|
173 |
| - while @event_processor.current_batch.length != 1; end |
| 179 | + sleep 0.1 while @event_processor.current_batch.length != 1 |
174 | 180 |
|
175 | 181 | expect(user_event2.event_context[:project_id]).to eq('Y')
|
176 | 182 | @event_processor.process(user_event2)
|
177 | 183 | @event_processor.process(user_event2)
|
178 | 184 | # Wait until other thread has processed the event.
|
179 |
| - while @event_processor.current_batch.length != 2; end |
| 185 | + sleep 0.1 while @event_processor.current_batch.length != 2 |
180 | 186 |
|
181 | 187 | expect(@event_dispatcher).to have_received(:dispatch_event).with(log_event).once
|
182 | 188 | expect(spy_logger).to have_received(:log).with(Logger::DEBUG, 'Project Ids mismatched: Flushing current batch.').once
|
|
252 | 258 | @event_processor.process(conversion_event)
|
253 | 259 |
|
254 | 260 | # Wait until other thread has processed the event.
|
255 |
| - while @event_processor.current_batch.length != 1; end |
| 261 | + sleep 0.1 while @event_processor.current_batch.length != 1 |
| 262 | + |
256 | 263 | @event_processor.flush
|
257 | 264 | # Wait until other thread has processed the event.
|
258 |
| - until @event_processor.current_batch.empty?; end |
| 265 | + sleep 0.1 until @event_processor.current_batch.empty? |
259 | 266 |
|
260 | 267 | expect(@notification_center).to have_received(:send_notifications).with(
|
261 | 268 | Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT],
|
|
281 | 288 |
|
282 | 289 | @event_processor.process(conversion_event)
|
283 | 290 | # Wait until other thread has processed the event.
|
284 |
| - while @event_processor.current_batch.length != 1; end |
| 291 | + sleep 0.1 while @event_processor.current_batch.length != 1 |
| 292 | + |
285 | 293 | @event_processor.flush
|
286 | 294 | # Wait until other thread has processed the event.
|
287 |
| - until @event_processor.current_batch.empty?; end |
| 295 | + sleep 0.1 until @event_processor.current_batch.empty? |
288 | 296 |
|
289 | 297 | expect(@notification_center).not_to have_received(:send_notifications)
|
290 | 298 | expect(spy_logger).to have_received(:log).once.with(
|
|
315 | 323 | end
|
316 | 324 |
|
317 | 325 | # Wait until other thread has processed the event.
|
318 |
| - while @event_processor.current_batch.length != 4; end |
| 326 | + sleep 0.1 while @event_processor.current_batch.length != 4 |
319 | 327 | expect(@event_dispatcher).not_to have_received(:dispatch_event)
|
320 | 328 |
|
321 | 329 | @event_processor.stop!
|
|
329 | 337 |
|
330 | 338 | it 'should log a warning when Queue gets full' do
|
331 | 339 | @event_processor = Optimizely::BatchEventProcessor.new(
|
332 |
| - event_queue: SizedQueue.new(10), |
| 340 | + event_queue: SizedQueue.new(5), |
333 | 341 | event_dispatcher: @event_dispatcher,
|
334 |
| - batch_size: 100, |
335 |
| - flush_interval: 100_000, |
| 342 | + batch_size: 1000, |
| 343 | + flush_interval: 10_000, |
336 | 344 | logger: spy_logger
|
337 | 345 | )
|
338 | 346 |
|
339 | 347 | user_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, nil)
|
340 |
| - 11.times do |
| 348 | + 900.times do |
341 | 349 | @event_processor.process(user_event)
|
342 | 350 | end
|
343 | 351 |
|
344 |
| - # Wait until other thread has processed the event. |
345 |
| - while @event_processor.current_batch.length != 10; end |
346 | 352 | expect(@event_dispatcher).not_to have_received(:dispatch_event)
|
347 |
| - expect(spy_logger).to have_received(:log).with(Logger::WARN, 'Payload not accepted by the queue.').once |
| 353 | + expect(spy_logger).to have_received(:log).with(Logger::WARN, 'Payload not accepted by the queue: queue full').at_least(:once) |
348 | 354 | end
|
349 | 355 |
|
350 | 356 | it 'should not process and log when Executor is not running' do
|
351 | 357 | @event_processor = Optimizely::BatchEventProcessor.new(
|
352 | 358 | event_dispatcher: @event_dispatcher,
|
353 | 359 | batch_size: 100,
|
354 |
| - flush_interval: 100_000, |
| 360 | + flush_interval: 10_000, |
355 | 361 | logger: spy_logger
|
356 | 362 | )
|
357 | 363 |
|
| 364 | + @event_processor.start! |
358 | 365 | @event_processor.stop!
|
359 | 366 |
|
360 | 367 | user_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, nil)
|
|
0 commit comments