mirror of
https://github.com/httprunner/httprunner.git
synced 2026-05-11 18:11:21 +08:00
fix: compatibility with --save-tests
This commit is contained in:
@@ -20,6 +20,9 @@ def init_parser_run(subparsers):
|
||||
|
||||
|
||||
def main_run(extra_args):
|
||||
# keep compatibility with v2
|
||||
extra_args = ensure_cli_args(extra_args)
|
||||
|
||||
tests_path_list = []
|
||||
extra_args_new = []
|
||||
for item in extra_args:
|
||||
@@ -41,7 +44,6 @@ def main_run(extra_args):
|
||||
sys.exit(1)
|
||||
|
||||
extra_args_new.extend(testcase_path_list)
|
||||
extra_args_new = ensure_cli_args(extra_args_new)
|
||||
pytest.main(extra_args_new)
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
"""
|
||||
This module handles compatibility issues between testcase format v2 and v3.
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Dict, Text, Union
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from httprunner import exceptions
|
||||
from httprunner.loader import load_project_meta
|
||||
from httprunner.utils import sort_dict_by_custom_order
|
||||
|
||||
|
||||
@@ -169,8 +172,112 @@ def ensure_cli_args(args: List) -> List:
|
||||
args[index] = "--html"
|
||||
args.append("--self-contained-html")
|
||||
|
||||
# remove deprecated --save-tests
|
||||
# keep compatibility with --save-tests in v2
|
||||
if "--save-tests" in args:
|
||||
args.pop(args.index("--save-tests"))
|
||||
generate_conftest_for_summary(args)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def generate_conftest_for_summary(args: List):
|
||||
|
||||
for arg in args:
|
||||
if os.path.exists(arg):
|
||||
test_path = arg
|
||||
# FIXME: several test paths maybe specified
|
||||
break
|
||||
else:
|
||||
raise exceptions.FileNotFound(f"No test path specified!")
|
||||
|
||||
project_meta = load_project_meta(test_path)
|
||||
conftest_path = os.path.join(project_meta.PWD, "conftest.py")
|
||||
if os.path.isfile(conftest_path):
|
||||
return
|
||||
|
||||
conftest_content = '''# NOTICE: Generated By HttpRunner.
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
import pytest
|
||||
from loguru import logger
|
||||
|
||||
from httprunner.utils import get_platform
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def session_fixture(request):
|
||||
"""setup and teardown each task"""
|
||||
logger.info(f"start running testcases ...")
|
||||
|
||||
start_at = time.time()
|
||||
|
||||
yield
|
||||
|
||||
logger.info(f"task finished, generate task summary for --save-tests")
|
||||
|
||||
summary = {
|
||||
"success": True,
|
||||
"stat": {
|
||||
"testcases": {"total": 0, "success": 0, "fail": 0},
|
||||
"teststeps": {"total": 0, "failures": 0, "successes": 0},
|
||||
},
|
||||
"time": {"start_at": start_at, "duration": time.time() - start_at},
|
||||
"platform": get_platform(),
|
||||
"details": [],
|
||||
}
|
||||
|
||||
for item in request.node.items:
|
||||
testcase_summary = item.instance.get_summary()
|
||||
summary["success"] &= testcase_summary.success
|
||||
|
||||
summary["stat"]["testcases"]["total"] += 1
|
||||
summary["stat"]["teststeps"]["total"] += len(testcase_summary.step_datas)
|
||||
if testcase_summary.success:
|
||||
summary["stat"]["testcases"]["success"] += 1
|
||||
summary["stat"]["teststeps"]["successes"] += len(
|
||||
testcase_summary.step_datas
|
||||
)
|
||||
else:
|
||||
summary["stat"]["testcases"]["fail"] += 1
|
||||
summary["stat"]["teststeps"]["successes"] += (
|
||||
len(testcase_summary.step_datas) - 1
|
||||
)
|
||||
summary["stat"]["teststeps"]["failures"] += 1
|
||||
|
||||
summary["details"].append(testcase_summary.dict())
|
||||
|
||||
summary_path = "{{SUMMARY_PATH_PLACEHOLDER}}"
|
||||
summary_dir = os.path.dirname(summary_path)
|
||||
os.makedirs(summary_dir, exist_ok=True)
|
||||
|
||||
with open(summary_path, "w", encoding="utf-8") as f:
|
||||
json.dump(summary, f, indent=4)
|
||||
|
||||
logger.info(f"generated task summary: {summary_path}")
|
||||
|
||||
'''
|
||||
|
||||
test_path = os.path.abspath(test_path)
|
||||
logs_dir_path = os.path.join(project_meta.PWD, "logs")
|
||||
test_path_relative_path = test_path[len(project_meta.PWD) + 1 :]
|
||||
|
||||
if os.path.isdir(test_path):
|
||||
file_foder_path = os.path.join(logs_dir_path, test_path_relative_path)
|
||||
dump_file_name = "all.summary.json"
|
||||
else:
|
||||
file_relative_folder_path, test_file = os.path.split(test_path_relative_path)
|
||||
file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path)
|
||||
test_file_name, _ = os.path.splitext(test_file)
|
||||
dump_file_name = f"{test_file_name}.summary.json"
|
||||
|
||||
summary_path = os.path.join(file_foder_path, dump_file_name)
|
||||
conftest_content = conftest_content.replace(
|
||||
"{{SUMMARY_PATH_PLACEHOLDER}}", summary_path
|
||||
)
|
||||
|
||||
with open(conftest_path, "w", encoding="utf-8") as f:
|
||||
f.write(conftest_content)
|
||||
|
||||
logger.info("generated conftest.py to generate summary.json")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from httprunner import compat, exceptions
|
||||
@@ -141,20 +142,38 @@ class TestCompat(unittest.TestCase):
|
||||
)
|
||||
|
||||
def test_ensure_cli_args(self):
|
||||
args1 = ["/path/to/testcase.yml", "--failfast"]
|
||||
self.assertEqual(compat.ensure_cli_args(args1), ["/path/to/testcase.yml"])
|
||||
args1 = ["examples/postman_echo/request_methods/hardcode.yml", "--failfast"]
|
||||
self.assertEqual(
|
||||
compat.ensure_cli_args(args1),
|
||||
["examples/postman_echo/request_methods/hardcode.yml"],
|
||||
)
|
||||
|
||||
args2 = ["/path/to/testcase.yml", "--save-tests"]
|
||||
self.assertEqual(compat.ensure_cli_args(args2), ["/path/to/testcase.yml"])
|
||||
args2 = ["examples/postman_echo/request_methods/hardcode.yml", "--save-tests"]
|
||||
self.assertEqual(
|
||||
compat.ensure_cli_args(args2),
|
||||
["examples/postman_echo/request_methods/hardcode.yml"],
|
||||
)
|
||||
self.assertTrue(
|
||||
os.path.isfile("examples/postman_echo/request_methods/conftest.py")
|
||||
)
|
||||
|
||||
args3 = ["/path/to/testcase.yml", "--report-file", "report.html"]
|
||||
args3 = [
|
||||
"examples/postman_echo/request_methods/hardcode.yml",
|
||||
"--report-file",
|
||||
"report.html",
|
||||
]
|
||||
self.assertEqual(
|
||||
compat.ensure_cli_args(args3),
|
||||
["/path/to/testcase.yml", "--html", "report.html", "--self-contained-html"],
|
||||
[
|
||||
"examples/postman_echo/request_methods/hardcode.yml",
|
||||
"--html",
|
||||
"report.html",
|
||||
"--self-contained-html",
|
||||
],
|
||||
)
|
||||
|
||||
args4 = [
|
||||
"/path/to/testcase.yml",
|
||||
"examples/postman_echo/request_methods/hardcode.yml",
|
||||
"--failfast",
|
||||
"--save-tests",
|
||||
"--report-file",
|
||||
@@ -162,5 +181,10 @@ class TestCompat(unittest.TestCase):
|
||||
]
|
||||
self.assertEqual(
|
||||
compat.ensure_cli_args(args4),
|
||||
["/path/to/testcase.yml", "--html", "report.html", "--self-contained-html"],
|
||||
[
|
||||
"examples/postman_echo/request_methods/hardcode.yml",
|
||||
"--html",
|
||||
"report.html",
|
||||
"--self-contained-html",
|
||||
],
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user