feat: dump log for each testcase

This commit is contained in:
debugtalk
2020-03-10 18:59:18 +08:00
parent ffbab95259
commit 856295a68d
4 changed files with 30 additions and 15 deletions

View File

@@ -2,6 +2,10 @@
## 3.0.0 (2020-03-10)
**Added**
- feat: dump log for each testcase
**Changed**
- replace logging with [loguru](https://github.com/Delgan/loguru)

View File

@@ -50,6 +50,7 @@ class HttpRunner(object):
self.test_loader = unittest.TestLoader()
self.save_tests = save_tests
self._summary = None
self.project_mapping = None
def _add_tests(self, testcases):
""" initialize testcase with Runner() and add to test suite.
@@ -134,7 +135,14 @@ class HttpRunner(object):
"""
tests_results = []
for testcase in test_suite:
for index, testcase in enumerate(test_suite):
log_handler = None
if self.save_tests:
logs_file_abs_path = utils.prepare_log_file_abs_path(
self.project_mapping, f"testcase_{index+1}.log"
)
log_handler = logger.add(logs_file_abs_path, level="DEBUG")
testcase_name = testcase.config.get("name")
logger.info(f"Start to run testcase: {testcase_name}")
@@ -144,6 +152,9 @@ class HttpRunner(object):
else:
tests_results.insert(0, (testcase, result))
if self.save_tests and log_handler:
logger.remove(log_handler)
return tests_results
def _aggregate(self, tests_results):
@@ -192,10 +203,10 @@ class HttpRunner(object):
""" run testcase/testsuite data
"""
capture_message("start to run tests")
project_mapping = tests_mapping.get("project_mapping", {})
self.project_mapping = tests_mapping.get("project_mapping", {})
if self.save_tests:
utils.dump_logs(tests_mapping, project_mapping, "loaded")
utils.dump_logs(tests_mapping, self.project_mapping, "loaded")
# parse tests
self.exception_stage = "parse tests"
@@ -203,14 +214,14 @@ class HttpRunner(object):
parse_failed_testfiles = parser.get_parse_failed_testfiles()
if parse_failed_testfiles:
logger.warning("parse failures occurred ...")
utils.dump_logs(parse_failed_testfiles, project_mapping, "parse_failed")
utils.dump_logs(parse_failed_testfiles, self.project_mapping, "parse_failed")
if len(parsed_testcases) == 0:
logger.error("failed to parse all cases, abort.")
raise exceptions.ParseTestsFailure
if self.save_tests:
utils.dump_logs(parsed_testcases, project_mapping, "parsed")
utils.dump_logs(parsed_testcases, self.project_mapping, "parsed")
# add tests to test suite
self.exception_stage = "add tests to test suite"
@@ -229,10 +240,10 @@ class HttpRunner(object):
report.stringify_summary(self._summary)
if self.save_tests:
utils.dump_logs(self._summary, project_mapping, "summary")
utils.dump_logs(self._summary, self.project_mapping, "summary")
# save variables and export data
vars_out = self.get_vars_out()
utils.dump_logs(vars_out, project_mapping, "io")
utils.dump_logs(vars_out, self.project_mapping, "io")
return self._summary

View File

@@ -593,7 +593,7 @@ def dump_json_file(json_data, json_file_abs_path):
logger.error(msg)
def prepare_dump_json_file_abs_path(project_mapping, tag_name):
def prepare_log_file_abs_path(project_mapping, file_name):
""" prepare dump json file absolute path.
"""
current_working_dir = os.getcwd()
@@ -601,7 +601,7 @@ def prepare_dump_json_file_abs_path(project_mapping, tag_name):
if not test_path:
# running passed in testcase/testsuite data structure
dump_file_name = f"tests_mapping.{tag_name}.json"
dump_file_name = f"tests_mapping.{file_name}"
dumped_json_file_abs_path = os.path.join(current_working_dir, "logs", dump_file_name)
return dumped_json_file_abs_path
@@ -610,12 +610,12 @@ def prepare_dump_json_file_abs_path(project_mapping, tag_name):
if os.path.isdir(test_path):
file_foder_path = os.path.join(logs_dir_path, test_path)
dump_file_name = f"all.{tag_name}.json"
dump_file_name = f"all.{file_name}"
else:
file_relative_folder_path, test_file = os.path.split(test_path)
file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path)
test_file_name, _file_suffix = os.path.splitext(test_file)
dump_file_name = f"{test_file_name}.{tag_name}.json"
dump_file_name = f"{test_file_name}.{file_name}"
dumped_json_file_abs_path = os.path.join(file_foder_path, dump_file_name)
return dumped_json_file_abs_path
@@ -631,5 +631,5 @@ def dump_logs(json_data, project_mapping, tag_name):
tag_name (str): tag name, loaded/parsed/summary
"""
json_file_abs_path = prepare_dump_json_file_abs_path(project_mapping, tag_name)
json_file_abs_path = prepare_log_file_abs_path(project_mapping, f"{tag_name}.json")
dump_json_file(json_data, json_file_abs_path)

View File

@@ -282,7 +282,7 @@ class TestUtils(unittest.TestCase):
"test_path": os.path.join("tests", "httpbin", "a.b.c")
}
self.assertEqual(
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
utils.prepare_log_file_abs_path(project_mapping, "loaded"),
os.path.join(os.getcwd(), "logs", "tests/httpbin/a.b.c/all.loaded.json")
)
@@ -292,7 +292,7 @@ class TestUtils(unittest.TestCase):
"test_path": os.path.join("tests", "httpbin", "a.b.c", "rpc.yml")
}
self.assertEqual(
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
utils.prepare_log_file_abs_path(project_mapping, "loaded"),
os.path.join(os.getcwd(), "logs", "tests/httpbin/a.b.c/rpc.loaded.json")
)
@@ -302,6 +302,6 @@ class TestUtils(unittest.TestCase):
"PWD": project_working_directory
}
self.assertEqual(
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
utils.prepare_log_file_abs_path(project_mapping, "loaded"),
os.path.join(os.getcwd(), "logs", "tests_mapping.loaded.json")
)