mirror of
https://github.com/httprunner/httprunner.git
synced 2026-05-13 08:19:44 +08:00
@@ -1,5 +1,15 @@
|
||||
# Release History
|
||||
|
||||
## 2.3.3 (2019-12-04)
|
||||
|
||||
**Fixed**
|
||||
|
||||
- fix #768: dump json file path error when folder name contains dot, such as `a.b.c`
|
||||
|
||||
**Changed**
|
||||
|
||||
- change: rename builtin function, sleep_N_secs => sleep
|
||||
|
||||
## 2.3.2 (2019-11-01)
|
||||
|
||||
**Added**
|
||||
|
||||
13
docs/FAQ.md
13
docs/FAQ.md
@@ -1 +1,14 @@
|
||||
# 常见问题
|
||||
|
||||
## HTTPS SSLError
|
||||
|
||||
请求 HTTPS 接口时,若本地开启了代理软件(Charles/Fiddler),由于 HTTPS 证书的原因,会导致 SSLError 的报错。
|
||||
|
||||
解决的方式是,在 config 中增加 `verify: False`,原理见 requests 的 [`SSL Cert Verification`](https://requests.kennethreitz.org/en/master/user/advanced/#ssl-cert-verification) 部分。
|
||||
|
||||
```yaml
|
||||
config:
|
||||
name: XXX
|
||||
base_url: XXX
|
||||
verify: False
|
||||
```
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
__version__ = "2.3.2"
|
||||
__version__ = "2.3.3"
|
||||
__description__ = "One-stop solution for HTTP(S) testing."
|
||||
|
||||
__all__ = ["__version__", "__description__"]
|
||||
|
||||
@@ -258,7 +258,6 @@ class HttpRunner(object):
|
||||
# load tests
|
||||
self.exception_stage = "load tests"
|
||||
tests_mapping = loader.load_tests(path, dot_env_path)
|
||||
tests_mapping["project_mapping"]["test_path"] = path
|
||||
|
||||
if mapping:
|
||||
tests_mapping["project_mapping"]["variables"] = mapping
|
||||
|
||||
@@ -30,6 +30,7 @@ def gen_random_string(str_len):
|
||||
return ''.join(
|
||||
random.choice(string.ascii_letters + string.digits) for _ in range(str_len))
|
||||
|
||||
|
||||
def get_timestamp(str_len=13):
|
||||
""" get timestamp string, length can only between 0 and 16
|
||||
"""
|
||||
@@ -38,12 +39,19 @@ def get_timestamp(str_len=13):
|
||||
|
||||
raise ParamsError("timestamp length can only between 0 and 16.")
|
||||
|
||||
|
||||
def get_current_date(fmt="%Y-%m-%d"):
|
||||
""" get current date, default format is %Y-%m-%d
|
||||
"""
|
||||
return datetime.datetime.now().strftime(fmt)
|
||||
|
||||
|
||||
def sleep(n_secs):
|
||||
""" sleep n seconds
|
||||
"""
|
||||
time.sleep(n_secs)
|
||||
|
||||
|
||||
###############################################################################
|
||||
## upload files with requests-toolbelt
|
||||
# e.g.
|
||||
@@ -66,6 +74,7 @@ def get_current_date(fmt="%Y-%m-%d"):
|
||||
def multipart_encoder(**kwargs):
|
||||
""" initialize MultipartEncoder with uploading fields.
|
||||
"""
|
||||
|
||||
def get_filetype(file_path):
|
||||
file_type = filetype.guess(file_path)
|
||||
if file_type:
|
||||
@@ -108,52 +117,66 @@ def multipart_content_type(multipart_encoder):
|
||||
def equals(check_value, expect_value):
|
||||
assert check_value == expect_value
|
||||
|
||||
|
||||
def less_than(check_value, expect_value):
|
||||
assert check_value < expect_value
|
||||
|
||||
|
||||
def less_than_or_equals(check_value, expect_value):
|
||||
assert check_value <= expect_value
|
||||
|
||||
|
||||
def greater_than(check_value, expect_value):
|
||||
assert check_value > expect_value
|
||||
|
||||
|
||||
def greater_than_or_equals(check_value, expect_value):
|
||||
assert check_value >= expect_value
|
||||
|
||||
|
||||
def not_equals(check_value, expect_value):
|
||||
assert check_value != expect_value
|
||||
|
||||
|
||||
def string_equals(check_value, expect_value):
|
||||
assert builtin_str(check_value) == builtin_str(expect_value)
|
||||
|
||||
|
||||
def length_equals(check_value, expect_value):
|
||||
assert isinstance(expect_value, integer_types)
|
||||
assert len(check_value) == expect_value
|
||||
|
||||
|
||||
def length_greater_than(check_value, expect_value):
|
||||
assert isinstance(expect_value, integer_types)
|
||||
assert len(check_value) > expect_value
|
||||
|
||||
|
||||
def length_greater_than_or_equals(check_value, expect_value):
|
||||
assert isinstance(expect_value, integer_types)
|
||||
assert len(check_value) >= expect_value
|
||||
|
||||
|
||||
def length_less_than(check_value, expect_value):
|
||||
assert isinstance(expect_value, integer_types)
|
||||
assert len(check_value) < expect_value
|
||||
|
||||
|
||||
def length_less_than_or_equals(check_value, expect_value):
|
||||
assert isinstance(expect_value, integer_types)
|
||||
assert len(check_value) <= expect_value
|
||||
|
||||
|
||||
def contains(check_value, expect_value):
|
||||
assert isinstance(check_value, (list, tuple, dict, basestring))
|
||||
assert expect_value in check_value
|
||||
|
||||
|
||||
def contained_by(check_value, expect_value):
|
||||
assert isinstance(expect_value, (list, tuple, dict, basestring))
|
||||
assert check_value in expect_value
|
||||
|
||||
|
||||
def type_match(check_value, expect_value):
|
||||
def get_type(name):
|
||||
if isinstance(name, type):
|
||||
@@ -168,20 +191,16 @@ def type_match(check_value, expect_value):
|
||||
|
||||
assert isinstance(check_value, get_type(expect_value))
|
||||
|
||||
|
||||
def regex_match(check_value, expect_value):
|
||||
assert isinstance(expect_value, basestring)
|
||||
assert isinstance(check_value, basestring)
|
||||
assert re.match(expect_value, check_value)
|
||||
|
||||
|
||||
def startswith(check_value, expect_value):
|
||||
assert builtin_str(check_value).startswith(builtin_str(expect_value))
|
||||
|
||||
|
||||
def endswith(check_value, expect_value):
|
||||
assert builtin_str(check_value).endswith(builtin_str(expect_value))
|
||||
|
||||
""" built-in hooks
|
||||
"""
|
||||
def sleep_N_secs(n_secs):
|
||||
""" sleep n seconds
|
||||
"""
|
||||
time.sleep(n_secs)
|
||||
|
||||
@@ -779,6 +779,19 @@ def load_project_tests(test_path, dot_env_path=None):
|
||||
environments and debugtalk.py functions.
|
||||
|
||||
"""
|
||||
|
||||
def prepare_path(path):
|
||||
if not os.path.exists(path):
|
||||
err_msg = "path not exist: {}".format(path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileNotFound(err_msg)
|
||||
|
||||
if not os.path.isabs(path):
|
||||
path = os.path.join(os.getcwd(), path)
|
||||
|
||||
return path
|
||||
|
||||
test_path = prepare_path(test_path)
|
||||
# locate debugtalk.py file
|
||||
debugtalk_path = locate_debugtalk_py(test_path)
|
||||
|
||||
@@ -810,6 +823,7 @@ def load_project_tests(test_path, dot_env_path=None):
|
||||
project_mapping["PWD"] = project_working_directory
|
||||
built_in.PWD = project_working_directory
|
||||
project_mapping["functions"] = debugtalk_functions
|
||||
project_mapping["test_path"] = test_path
|
||||
|
||||
# load api
|
||||
tests_def_mapping["api"] = load_api_folder(os.path.join(project_working_directory, "api"))
|
||||
@@ -869,14 +883,6 @@ def load_tests(path, dot_env_path=None):
|
||||
}
|
||||
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
err_msg = "path not exist: {}".format(path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileNotFound(err_msg)
|
||||
|
||||
if not os.path.isabs(path):
|
||||
path = os.path.join(os.getcwd(), path)
|
||||
|
||||
load_project_tests(path, dot_env_path)
|
||||
tests_mapping = {
|
||||
"project_mapping": project_mapping
|
||||
|
||||
@@ -607,7 +607,7 @@ def omit_long_data(body, omit_len=512):
|
||||
return omitted_body + appendix_str
|
||||
|
||||
|
||||
def dump_json_file(json_data, pwd_dir_path, dump_file_name):
|
||||
def dump_json_file(json_data, json_file_abs_path):
|
||||
""" dump json data to file
|
||||
"""
|
||||
class PythonObjectEncoder(json.JSONEncoder):
|
||||
@@ -617,14 +617,8 @@ def dump_json_file(json_data, pwd_dir_path, dump_file_name):
|
||||
except TypeError:
|
||||
return str(obj)
|
||||
|
||||
logs_dir_path = os.path.join(pwd_dir_path, "logs")
|
||||
if not os.path.isdir(logs_dir_path):
|
||||
os.makedirs(logs_dir_path)
|
||||
|
||||
dump_file_path = os.path.join(logs_dir_path, dump_file_name)
|
||||
|
||||
try:
|
||||
with io.open(dump_file_path, 'w', encoding='utf-8') as outfile:
|
||||
with io.open(json_file_abs_path, 'w', encoding='utf-8') as outfile:
|
||||
if is_py2:
|
||||
outfile.write(
|
||||
unicode(json.dumps(
|
||||
@@ -645,23 +639,44 @@ def dump_json_file(json_data, pwd_dir_path, dump_file_name):
|
||||
cls=PythonObjectEncoder
|
||||
)
|
||||
|
||||
msg = "dump file: {}".format(dump_file_path)
|
||||
msg = "dump file: {}".format(json_file_abs_path)
|
||||
logger.color_print(msg, "BLUE")
|
||||
|
||||
except TypeError as ex:
|
||||
msg = "Failed to dump json file: {}\nReason: {}".format(dump_file_path, ex)
|
||||
msg = "Failed to dump json file: {}\nReason: {}".format(json_file_abs_path, ex)
|
||||
logger.color_print(msg, "RED")
|
||||
|
||||
|
||||
def _prepare_dump_info(project_mapping, tag_name):
|
||||
""" prepare dump file info.
|
||||
def prepare_dump_json_file_abs_path(project_mapping, tag_name):
|
||||
""" prepare dump json file absolute path.
|
||||
"""
|
||||
test_path = project_mapping.get("test_path") or "tests_mapping"
|
||||
pwd_dir_path = project_mapping.get("PWD") or os.getcwd()
|
||||
file_name, file_suffix = os.path.splitext(os.path.basename(test_path.rstrip("/")))
|
||||
dump_file_name = "{}.{}.json".format(file_name, tag_name)
|
||||
test_path = project_mapping.get("test_path")
|
||||
|
||||
return pwd_dir_path, dump_file_name
|
||||
if not test_path:
|
||||
# running passed in testcase/testsuite data structure
|
||||
dump_file_name = "tests_mapping.{}.json".format(tag_name)
|
||||
dumped_json_file_abs_path = os.path.join(pwd_dir_path, "logs", dump_file_name)
|
||||
return dumped_json_file_abs_path
|
||||
|
||||
# both test_path and pwd_dir_path are absolute path
|
||||
logs_dir_path = os.path.join(pwd_dir_path, "logs")
|
||||
test_path_relative_path = test_path[len(pwd_dir_path)+1:]
|
||||
|
||||
if os.path.isdir(test_path):
|
||||
file_foder_path = os.path.join(logs_dir_path, test_path_relative_path)
|
||||
dump_file_name = "all.{}.json".format(tag_name)
|
||||
else:
|
||||
file_relative_folder_path, test_file = os.path.split(test_path_relative_path)
|
||||
file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path)
|
||||
test_file_name, _file_suffix = os.path.splitext(test_file)
|
||||
dump_file_name = "{}.{}.json".format(test_file_name, tag_name)
|
||||
|
||||
if not os.path.isdir(file_foder_path):
|
||||
os.makedirs(file_foder_path)
|
||||
|
||||
dumped_json_file_abs_path = os.path.join(file_foder_path, dump_file_name)
|
||||
return dumped_json_file_abs_path
|
||||
|
||||
|
||||
def dump_logs(json_data, project_mapping, tag_name):
|
||||
@@ -674,8 +689,8 @@ def dump_logs(json_data, project_mapping, tag_name):
|
||||
tag_name (str): tag name, loaded/parsed/summary
|
||||
|
||||
"""
|
||||
pwd_dir_path, dump_file_name = _prepare_dump_info(project_mapping, tag_name)
|
||||
dump_json_file(json_data, pwd_dir_path, dump_file_name)
|
||||
json_file_abs_path = prepare_dump_json_file_abs_path(project_mapping, tag_name)
|
||||
dump_json_file(json_data, json_file_abs_path)
|
||||
|
||||
|
||||
def get_python2_retire_msg():
|
||||
|
||||
@@ -159,13 +159,13 @@ def get_uniform_comparator(comparator):
|
||||
return "length_equals"
|
||||
elif comparator in ["len_gt", "count_gt", "length_greater_than", "count_greater_than"]:
|
||||
return "length_greater_than"
|
||||
elif comparator in ["len_ge", "count_ge", "length_greater_than_or_equals", \
|
||||
"count_greater_than_or_equals"]:
|
||||
elif comparator in ["len_ge", "count_ge", "length_greater_than_or_equals",
|
||||
"count_greater_than_or_equals"]:
|
||||
return "length_greater_than_or_equals"
|
||||
elif comparator in ["len_lt", "count_lt", "length_less_than", "count_less_than"]:
|
||||
return "length_less_than"
|
||||
elif comparator in ["len_le", "count_le", "length_less_than_or_equals", \
|
||||
"count_less_than_or_equals"]:
|
||||
elif comparator in ["len_le", "count_le", "length_less_than_or_equals",
|
||||
"count_less_than_or_equals"]:
|
||||
return "length_less_than_or_equals"
|
||||
else:
|
||||
return comparator
|
||||
@@ -237,14 +237,14 @@ def _convert_validators_to_mapping(validators):
|
||||
|
||||
Examples:
|
||||
>>> validators = [
|
||||
{"check": "v1", "expect": 201, "comparator": "eq"},
|
||||
{"check": {"b": 1}, "expect": 200, "comparator": "eq"}
|
||||
]
|
||||
>>> _convert_validators_to_mapping(validators)
|
||||
{
|
||||
("v1", "eq"): {"check": "v1", "expect": 201, "comparator": "eq"},
|
||||
('{"b": 1}', "eq"): {"check": {"b": 1}, "expect": 200, "comparator": "eq"}
|
||||
}
|
||||
{"check": "v1", "expect": 201, "comparator": "eq"},
|
||||
{"check": {"b": 1}, "expect": 200, "comparator": "eq"}
|
||||
]
|
||||
>>> print(_convert_validators_to_mapping(validators))
|
||||
{
|
||||
("v1", "eq"): {"check": "v1", "expect": 201, "comparator": "eq"},
|
||||
('{"b": 1}', "eq"): {"check": {"b": 1}, "expect": 200, "comparator": "eq"}
|
||||
}
|
||||
|
||||
"""
|
||||
validators_mapping = {}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "httprunner"
|
||||
version = "2.3.2"
|
||||
version = "2.3.3"
|
||||
description = "One-stop solution for HTTP(S) testing."
|
||||
license = "Apache-2.0"
|
||||
readme = "README.md"
|
||||
|
||||
10
tests/httpbin/a.b.c/rpc.yml
Normal file
10
tests/httpbin/a.b.c/rpc.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
name: rpc api
|
||||
base_url: http://httpbin.org
|
||||
variables:
|
||||
expected_status_code: 200
|
||||
request:
|
||||
url: /headers
|
||||
method: GET
|
||||
validate:
|
||||
- eq: ["status_code", $expected_status_code]
|
||||
- eq: [content.headers.Host, "httpbin.org"]
|
||||
@@ -52,11 +52,11 @@ class TestRunner(ApiServerUnittest):
|
||||
"name": "basic test with httpbin",
|
||||
"base_url": HTTPBIN_SERVER,
|
||||
"setup_hooks": [
|
||||
"${sleep_N_secs(0.5)}",
|
||||
"${sleep(0.5)}",
|
||||
"${hook_print(setup)}"
|
||||
],
|
||||
"teardown_hooks": [
|
||||
"${sleep_N_secs(1)}",
|
||||
"${sleep(1)}",
|
||||
"${hook_print(teardown)}"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -275,3 +275,37 @@ class TestUtils(ApiServerUnittest):
|
||||
"d": [4, 5]
|
||||
}
|
||||
utils.print_info(info_mapping)
|
||||
|
||||
def test_prepare_dump_json_file_path_for_folder(self):
|
||||
# hrun tests/httpbin/a.b.c/ --save-tests
|
||||
project_working_directory = os.path.join(os.getcwd(), "tests")
|
||||
project_mapping = {
|
||||
"PWD": project_working_directory,
|
||||
"test_path": os.path.join(os.getcwd(), "tests", "httpbin", "a.b.c")
|
||||
}
|
||||
self.assertEqual(
|
||||
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
|
||||
os.path.join(project_working_directory, "logs", "httpbin/a.b.c/all.loaded.json")
|
||||
)
|
||||
|
||||
def test_prepare_dump_json_file_path_for_file(self):
|
||||
# hrun tests/httpbin/a.b.c/rpc.yml --save-tests
|
||||
project_working_directory = os.path.join(os.getcwd(), "tests")
|
||||
project_mapping = {
|
||||
"PWD": project_working_directory,
|
||||
"test_path": os.path.join(os.getcwd(), "tests", "httpbin", "a.b.c", "rpc.yml")
|
||||
}
|
||||
self.assertEqual(
|
||||
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
|
||||
os.path.join(project_working_directory, "logs", "httpbin/a.b.c/rpc.loaded.json")
|
||||
)
|
||||
|
||||
def test_prepare_dump_json_file_path_for_passed_testcase(self):
|
||||
project_working_directory = os.path.join(os.getcwd(), "tests")
|
||||
project_mapping = {
|
||||
"PWD": project_working_directory
|
||||
}
|
||||
self.assertEqual(
|
||||
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
|
||||
os.path.join(project_working_directory, "logs", "tests_mapping.loaded.json")
|
||||
)
|
||||
|
||||
@@ -117,7 +117,6 @@ class TestValidator(unittest.TestCase):
|
||||
{"check": "status_code", "comparator": "equals", "expect": 201}
|
||||
)
|
||||
|
||||
|
||||
def test_extend_validators(self):
|
||||
def_validators = [
|
||||
{'eq': ['v1', 200]},
|
||||
|
||||
Reference in New Issue
Block a user