diff --git a/swift/stats/log_processor.py b/swift/stats/log_processor.py index 6ac8e0501e..f8ac20ac39 100644 --- a/swift/stats/log_processor.py +++ b/swift/stats/log_processor.py @@ -468,7 +468,7 @@ class LogProcessorDaemon(Daemon): # map processor_args = (self.total_conf, self.logger) results = multiprocess_collate(processor_args, logs_to_process, - self.worker_count, self.logger) + self.worker_count) # reduce aggr_data = self.get_aggregate_data(processed_files, results) @@ -527,8 +527,7 @@ class LogProcessorDaemon(Daemon): ((time.time() - start) / 60)) -def multiprocess_collate(processor_args, logs_to_process, worker_count, - logger): +def multiprocess_collate(processor_args, logs_to_process, worker_count): ''' yield hourly data from logs_to_process Every item that this function yields will be added to the processed files @@ -554,11 +553,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count, except Queue.Empty: time.sleep(.01) else: - if isinstance(data, Exception): - item_string = '/'.join(item[2:]) - logger.exception("Problem processing file '%s'" % - (item_string)) - else: + if not isinstance(data, Exception): yield item, data if not any(r.is_alive() for r in results) and out_queue.empty(): # all the workers are done and nothing is in the queue @@ -576,5 +571,7 @@ def collate_worker(processor_args, in_queue, out_queue): try: ret = p.process_one_file(*item) except Exception, err: + item_string = '/'.join(item[2:]) + p.logger.exception("Unable to process file '%s'" % (item_string)) ret = err out_queue.put((item, ret)) diff --git a/test/unit/stats/test_log_processor.py b/test/unit/stats/test_log_processor.py index 8a325310fb..1dfa4424b2 100644 --- a/test/unit/stats/test_log_processor.py +++ b/test/unit/stats/test_log_processor.py @@ -387,8 +387,7 @@ use = egg:swift#proxy logs_to_process = [item] results = log_processor.multiprocess_collate(processor_args, logs_to_process, - 1, - DumbLogger()) + 1) results = list(results) expected = [(item, {('acct', '2010', '07', '09', '04'): {('public', 'object', 'GET', '2xx'): 1, @@ -422,8 +421,7 @@ use = egg:swift#proxy logs_to_process = [item] results = log_processor.multiprocess_collate(processor_args, logs_to_process, - 1, - DumbLogger()) + 1) results = list(results) expected = [] self.assertEquals(results, expected) @@ -763,13 +761,12 @@ class TestLogProcessorDaemon(unittest.TestCase): d = MockLogProcessorDaemon(self) def mock_multiprocess_collate(processor_args, logs_to_process, - worker_count, logger): + worker_count): self.assertEquals(d.total_conf, processor_args[0]) self.assertEquals(d.logger, processor_args[1]) self.assertEquals(mock_logs_to_process, logs_to_process) self.assertEquals(d.worker_count, worker_count) - self.assertEquals(d.logger, logger) return multiprocess_collate_return