mirror of
https://github.com/httprunner/httprunner.git
synced 2026-05-13 17:29:56 +08:00
refactor: make loader as submodule, split to check/locate/load/cases
This commit is contained in:
@@ -1,916 +1,13 @@
|
||||
import csv
|
||||
import importlib
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
from httprunner import built_in, exceptions, logger, utils
|
||||
from httprunner.loader.check import is_testcase_path, is_testcases, is_function, validate_json_file
|
||||
|
||||
try:
|
||||
# PyYAML version >= 5.1
|
||||
# ref: https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
|
||||
yaml.warnings({'YAMLLoadWarning': False})
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
###############################################################################
|
||||
# file loader
|
||||
###############################################################################
|
||||
|
||||
|
||||
def _check_format(file_path, content):
|
||||
""" check testcase format if valid
|
||||
"""
|
||||
# TODO: replace with JSON schema validation
|
||||
if not content:
|
||||
# testcase file content is empty
|
||||
err_msg = u"Testcase file content is empty: {}".format(file_path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileFormatError(err_msg)
|
||||
|
||||
elif not isinstance(content, (list, dict)):
|
||||
# testcase file content does not match testcase format
|
||||
err_msg = u"Testcase file content format invalid: {}".format(file_path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileFormatError(err_msg)
|
||||
|
||||
|
||||
def load_yaml_file(yaml_file):
|
||||
""" load yaml file and check file content format
|
||||
"""
|
||||
with io.open(yaml_file, 'r', encoding='utf-8') as stream:
|
||||
yaml_content = yaml.load(stream)
|
||||
_check_format(yaml_file, yaml_content)
|
||||
return yaml_content
|
||||
|
||||
|
||||
def load_json_file(json_file):
|
||||
""" load json file and check file content format
|
||||
"""
|
||||
with io.open(json_file, encoding='utf-8') as data_file:
|
||||
try:
|
||||
json_content = json.load(data_file)
|
||||
except exceptions.JSONDecodeError:
|
||||
err_msg = u"JSONDecodeError: JSON file format error: {}".format(json_file)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileFormatError(err_msg)
|
||||
|
||||
_check_format(json_file, json_content)
|
||||
return json_content
|
||||
|
||||
|
||||
def load_csv_file(csv_file):
|
||||
""" load csv file and check file content format
|
||||
|
||||
Args:
|
||||
csv_file (str): csv file path, csv file content is like below:
|
||||
|
||||
Returns:
|
||||
list: list of parameters, each parameter is in dict format
|
||||
|
||||
Examples:
|
||||
>>> cat csv_file
|
||||
username,password
|
||||
test1,111111
|
||||
test2,222222
|
||||
test3,333333
|
||||
|
||||
>>> load_csv_file(csv_file)
|
||||
[
|
||||
{'username': 'test1', 'password': '111111'},
|
||||
{'username': 'test2', 'password': '222222'},
|
||||
{'username': 'test3', 'password': '333333'}
|
||||
]
|
||||
|
||||
"""
|
||||
if not os.path.isabs(csv_file):
|
||||
project_working_directory = tests_def_mapping["PWD"] or os.getcwd()
|
||||
# make compatible with Windows/Linux
|
||||
csv_file = os.path.join(project_working_directory, *csv_file.split("/"))
|
||||
|
||||
if not os.path.isfile(csv_file):
|
||||
# file path not exist
|
||||
raise exceptions.CSVNotFound(csv_file)
|
||||
|
||||
csv_content_list = []
|
||||
|
||||
with io.open(csv_file, encoding='utf-8') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
csv_content_list.append(row)
|
||||
|
||||
return csv_content_list
|
||||
|
||||
|
||||
def load_file(file_path):
|
||||
if not os.path.isfile(file_path):
|
||||
raise exceptions.FileNotFound("{} does not exist.".format(file_path))
|
||||
|
||||
file_suffix = os.path.splitext(file_path)[1].lower()
|
||||
if file_suffix == '.json':
|
||||
return load_json_file(file_path)
|
||||
elif file_suffix in ['.yaml', '.yml']:
|
||||
return load_yaml_file(file_path)
|
||||
elif file_suffix == ".csv":
|
||||
return load_csv_file(file_path)
|
||||
else:
|
||||
# '' or other suffix
|
||||
err_msg = u"Unsupported file format: {}".format(file_path)
|
||||
logger.log_warning(err_msg)
|
||||
return []
|
||||
|
||||
|
||||
def load_folder_files(folder_path, recursive=True):
|
||||
""" load folder path, return all files endswith yml/yaml/json in list.
|
||||
|
||||
Args:
|
||||
folder_path (str): specified folder path to load
|
||||
recursive (bool): load files recursively if True
|
||||
|
||||
Returns:
|
||||
list: files endswith yml/yaml/json
|
||||
"""
|
||||
if isinstance(folder_path, (list, set)):
|
||||
files = []
|
||||
for path in set(folder_path):
|
||||
files.extend(load_folder_files(path, recursive))
|
||||
|
||||
return files
|
||||
|
||||
if not os.path.exists(folder_path):
|
||||
return []
|
||||
|
||||
file_list = []
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(folder_path):
|
||||
filenames_list = []
|
||||
|
||||
for filename in filenames:
|
||||
if not filename.endswith(('.yml', '.yaml', '.json')):
|
||||
continue
|
||||
|
||||
filenames_list.append(filename)
|
||||
|
||||
for filename in filenames_list:
|
||||
file_path = os.path.join(dirpath, filename)
|
||||
file_list.append(file_path)
|
||||
|
||||
if not recursive:
|
||||
break
|
||||
|
||||
return file_list
|
||||
|
||||
|
||||
def load_dot_env_file(dot_env_path):
|
||||
""" load .env file.
|
||||
|
||||
Args:
|
||||
dot_env_path (str): .env file path
|
||||
|
||||
Returns:
|
||||
dict: environment variables mapping
|
||||
|
||||
{
|
||||
"UserName": "debugtalk",
|
||||
"Password": "123456",
|
||||
"PROJECT_KEY": "ABCDEFGH"
|
||||
}
|
||||
|
||||
Raises:
|
||||
exceptions.FileFormatError: If .env file format is invalid.
|
||||
|
||||
"""
|
||||
if not os.path.isfile(dot_env_path):
|
||||
return {}
|
||||
|
||||
logger.log_info("Loading environment variables from {}".format(dot_env_path))
|
||||
env_variables_mapping = {}
|
||||
|
||||
with io.open(dot_env_path, 'r', encoding='utf-8') as fp:
|
||||
for line in fp:
|
||||
# maxsplit=1
|
||||
if "=" in line:
|
||||
variable, value = line.split("=", 1)
|
||||
elif ":" in line:
|
||||
variable, value = line.split(":", 1)
|
||||
else:
|
||||
raise exceptions.FileFormatError(".env format error")
|
||||
|
||||
env_variables_mapping[variable.strip()] = value.strip()
|
||||
|
||||
utils.set_os_environ(env_variables_mapping)
|
||||
return env_variables_mapping
|
||||
|
||||
|
||||
def locate_file(start_path, file_name):
|
||||
""" locate filename and return absolute file path.
|
||||
searching will be recursive upward until current working directory.
|
||||
|
||||
Args:
|
||||
start_path (str): start locating path, maybe file path or directory path
|
||||
|
||||
Returns:
|
||||
str: located file path. None if file not found.
|
||||
|
||||
Raises:
|
||||
exceptions.FileNotFound: If failed to locate file.
|
||||
|
||||
"""
|
||||
if os.path.isfile(start_path):
|
||||
start_dir_path = os.path.dirname(start_path)
|
||||
elif os.path.isdir(start_path):
|
||||
start_dir_path = start_path
|
||||
else:
|
||||
raise exceptions.FileNotFound("invalid path: {}".format(start_path))
|
||||
|
||||
file_path = os.path.join(start_dir_path, file_name)
|
||||
if os.path.isfile(file_path):
|
||||
return os.path.abspath(file_path)
|
||||
|
||||
# current working directory
|
||||
if os.path.abspath(start_dir_path) in [os.getcwd(), os.path.abspath(os.sep)]:
|
||||
raise exceptions.FileNotFound("{} not found in {}".format(file_name, start_path))
|
||||
|
||||
# locate recursive upward
|
||||
return locate_file(os.path.dirname(start_dir_path), file_name)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# debugtalk.py module loader
|
||||
###############################################################################
|
||||
|
||||
|
||||
def load_module_functions(module):
|
||||
""" load python module functions.
|
||||
|
||||
Args:
|
||||
module: python module
|
||||
|
||||
Returns:
|
||||
dict: functions mapping for specified python module
|
||||
|
||||
{
|
||||
"func1_name": func1,
|
||||
"func2_name": func2
|
||||
}
|
||||
|
||||
"""
|
||||
module_functions = {}
|
||||
|
||||
for name, item in vars(module).items():
|
||||
if is_function(item):
|
||||
module_functions[name] = item
|
||||
|
||||
return module_functions
|
||||
|
||||
|
||||
def load_builtin_functions():
|
||||
""" load built_in module functions
|
||||
"""
|
||||
return load_module_functions(built_in)
|
||||
|
||||
|
||||
def load_debugtalk_functions():
|
||||
""" load project debugtalk.py module functions
|
||||
debugtalk.py should be located in project working directory.
|
||||
|
||||
Returns:
|
||||
dict: debugtalk module functions mapping
|
||||
{
|
||||
"func1_name": func1,
|
||||
"func2_name": func2
|
||||
}
|
||||
|
||||
"""
|
||||
# load debugtalk.py module
|
||||
imported_module = importlib.import_module("debugtalk")
|
||||
return load_module_functions(imported_module)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# testcase loader
|
||||
###############################################################################
|
||||
|
||||
|
||||
project_mapping = {}
|
||||
tests_def_mapping = {
|
||||
"PWD": None,
|
||||
"api": {},
|
||||
"testcases": {}
|
||||
}
|
||||
|
||||
|
||||
def __extend_with_api_ref(raw_testinfo):
|
||||
""" extend with api reference
|
||||
|
||||
Raises:
|
||||
exceptions.ApiNotFound: api not found
|
||||
|
||||
"""
|
||||
api_name = raw_testinfo["api"]
|
||||
|
||||
# api maybe defined in two types:
|
||||
# 1, individual file: each file is corresponding to one api definition
|
||||
# 2, api sets file: one file contains a list of api definitions
|
||||
if not os.path.isabs(api_name):
|
||||
# make compatible with Windows/Linux
|
||||
api_path = os.path.join(tests_def_mapping["PWD"], *api_name.split("/"))
|
||||
if os.path.isfile(api_path):
|
||||
# type 1: api is defined in individual file
|
||||
api_name = api_path
|
||||
|
||||
try:
|
||||
block = tests_def_mapping["api"][api_name]
|
||||
# NOTICE: avoid project_mapping been changed during iteration.
|
||||
raw_testinfo["api_def"] = utils.deepcopy_dict(block)
|
||||
except KeyError:
|
||||
raise exceptions.ApiNotFound("{} not found!".format(api_name))
|
||||
|
||||
|
||||
def __extend_with_testcase_ref(raw_testinfo):
|
||||
""" extend with testcase reference
|
||||
"""
|
||||
testcase_path = raw_testinfo["testcase"]
|
||||
|
||||
if testcase_path not in tests_def_mapping["testcases"]:
|
||||
# make compatible with Windows/Linux
|
||||
testcase_path = os.path.join(
|
||||
project_mapping["PWD"],
|
||||
*testcase_path.split("/")
|
||||
)
|
||||
loaded_testcase = load_file(testcase_path)
|
||||
|
||||
if isinstance(loaded_testcase, list):
|
||||
# make compatible with version < 2.2.0
|
||||
testcase_dict = load_testcase(loaded_testcase)
|
||||
elif isinstance(loaded_testcase, dict) and "teststeps" in loaded_testcase:
|
||||
# format version 2, implemented in 2.2.0
|
||||
testcase_dict = load_testcase_v2(loaded_testcase)
|
||||
else:
|
||||
raise exceptions.FileFormatError(
|
||||
"Invalid format testcase: {}".format(testcase_path))
|
||||
|
||||
tests_def_mapping["testcases"][testcase_path] = testcase_dict
|
||||
else:
|
||||
testcase_dict = tests_def_mapping["testcases"][testcase_path]
|
||||
|
||||
raw_testinfo["testcase_def"] = testcase_dict
|
||||
|
||||
|
||||
def load_teststep(raw_testinfo):
|
||||
""" load testcase step content.
|
||||
teststep maybe defined directly, or reference api/testcase.
|
||||
|
||||
Args:
|
||||
raw_testinfo (dict): test data, maybe in 3 formats.
|
||||
# api reference
|
||||
{
|
||||
"name": "add product to cart",
|
||||
"api": "/path/to/api",
|
||||
"variables": {},
|
||||
"validate": [],
|
||||
"extract": {}
|
||||
}
|
||||
# testcase reference
|
||||
{
|
||||
"name": "add product to cart",
|
||||
"testcase": "/path/to/testcase",
|
||||
"variables": {}
|
||||
}
|
||||
# define directly
|
||||
{
|
||||
"name": "checkout cart",
|
||||
"request": {},
|
||||
"variables": {},
|
||||
"validate": [],
|
||||
"extract": {}
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: loaded teststep content
|
||||
|
||||
"""
|
||||
# reference api
|
||||
if "api" in raw_testinfo:
|
||||
__extend_with_api_ref(raw_testinfo)
|
||||
|
||||
# TODO: reference proc functions
|
||||
# elif "func" in raw_testinfo:
|
||||
# pass
|
||||
|
||||
# reference testcase
|
||||
elif "testcase" in raw_testinfo:
|
||||
__extend_with_testcase_ref(raw_testinfo)
|
||||
|
||||
# define directly
|
||||
else:
|
||||
pass
|
||||
|
||||
return raw_testinfo
|
||||
|
||||
|
||||
def load_testcase(raw_testcase):
|
||||
""" load testcase with api/testcase references.
|
||||
|
||||
Args:
|
||||
raw_testcase (list): raw testcase content loaded from JSON/YAML file:
|
||||
[
|
||||
# config part
|
||||
{
|
||||
"config": {
|
||||
"name": "XXXX",
|
||||
"base_url": "https://debugtalk.com"
|
||||
}
|
||||
},
|
||||
# teststeps part
|
||||
{
|
||||
"test": {...}
|
||||
},
|
||||
{
|
||||
"test": {...}
|
||||
}
|
||||
]
|
||||
|
||||
Returns:
|
||||
dict: loaded testcase content
|
||||
{
|
||||
"config": {},
|
||||
"teststeps": [test11, test12]
|
||||
}
|
||||
|
||||
"""
|
||||
config = {}
|
||||
tests = []
|
||||
|
||||
for item in raw_testcase:
|
||||
key, test_block = item.popitem()
|
||||
if key == "config":
|
||||
config.update(test_block)
|
||||
elif key == "test":
|
||||
tests.append(load_teststep(test_block))
|
||||
else:
|
||||
logger.log_warning(
|
||||
"unexpected block key: {}. block key should only be 'config' or 'test'.".format(key)
|
||||
)
|
||||
|
||||
return {
|
||||
"config": config,
|
||||
"teststeps": tests
|
||||
}
|
||||
|
||||
|
||||
def load_testcase_v2(raw_testcase):
|
||||
""" load testcase in format version 2.
|
||||
|
||||
Args:
|
||||
raw_testcase (dict): raw testcase content loaded from JSON/YAML file:
|
||||
{
|
||||
"config": {
|
||||
"name": "xxx",
|
||||
"variables": {}
|
||||
}
|
||||
"teststeps": [
|
||||
{
|
||||
"name": "teststep 1",
|
||||
"request" {...}
|
||||
},
|
||||
{
|
||||
"name": "teststep 2",
|
||||
"request" {...}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: loaded testcase content
|
||||
{
|
||||
"config": {},
|
||||
"teststeps": [test11, test12]
|
||||
}
|
||||
|
||||
"""
|
||||
raw_teststeps = raw_testcase.pop("teststeps")
|
||||
raw_testcase["teststeps"] = [
|
||||
load_teststep(teststep)
|
||||
for teststep in raw_teststeps
|
||||
]
|
||||
return raw_testcase
|
||||
|
||||
|
||||
def load_testsuite(raw_testsuite):
|
||||
""" load testsuite with testcase references.
|
||||
support two different formats.
|
||||
|
||||
Args:
|
||||
raw_testsuite (dict): raw testsuite content loaded from JSON/YAML file:
|
||||
# version 1, compatible with version < 2.2.0
|
||||
{
|
||||
"config": {
|
||||
"name": "xxx",
|
||||
"variables": {}
|
||||
}
|
||||
"testcases": {
|
||||
"testcase1": {
|
||||
"testcase": "/path/to/testcase",
|
||||
"variables": {...},
|
||||
"parameters": {...}
|
||||
},
|
||||
"testcase2": {}
|
||||
}
|
||||
}
|
||||
|
||||
# version 2, implemented in 2.2.0
|
||||
{
|
||||
"config": {
|
||||
"name": "xxx",
|
||||
"variables": {}
|
||||
}
|
||||
"testcases": [
|
||||
{
|
||||
"name": "testcase1",
|
||||
"testcase": "/path/to/testcase",
|
||||
"variables": {...},
|
||||
"parameters": {...}
|
||||
},
|
||||
{}
|
||||
]
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: loaded testsuite content
|
||||
{
|
||||
"config": {},
|
||||
"testcases": [testcase1, testcase2]
|
||||
}
|
||||
|
||||
"""
|
||||
raw_testcases = raw_testsuite.pop("testcases")
|
||||
raw_testsuite["testcases"] = {}
|
||||
|
||||
if isinstance(raw_testcases, dict):
|
||||
# make compatible with version < 2.2.0
|
||||
for name, raw_testcase in raw_testcases.items():
|
||||
__extend_with_testcase_ref(raw_testcase)
|
||||
raw_testcase.setdefault("name", name)
|
||||
raw_testsuite["testcases"][name] = raw_testcase
|
||||
|
||||
elif isinstance(raw_testcases, list):
|
||||
# format version 2, implemented in 2.2.0
|
||||
for raw_testcase in raw_testcases:
|
||||
__extend_with_testcase_ref(raw_testcase)
|
||||
testcase_name = raw_testcase["name"]
|
||||
raw_testsuite["testcases"][testcase_name] = raw_testcase
|
||||
|
||||
else:
|
||||
# invalid format
|
||||
raise exceptions.FileFormatError("Invalid testsuite format!")
|
||||
|
||||
return raw_testsuite
|
||||
|
||||
|
||||
def load_test_file(path):
|
||||
""" load test file, file maybe testcase/testsuite/api
|
||||
|
||||
Args:
|
||||
path (str): test file path
|
||||
|
||||
Returns:
|
||||
dict: loaded test content
|
||||
|
||||
# api
|
||||
{
|
||||
"path": path,
|
||||
"type": "api",
|
||||
"name": "",
|
||||
"request": {}
|
||||
}
|
||||
|
||||
# testcase
|
||||
{
|
||||
"path": path,
|
||||
"type": "testcase",
|
||||
"config": {},
|
||||
"teststeps": []
|
||||
}
|
||||
|
||||
# testsuite
|
||||
{
|
||||
"path": path,
|
||||
"type": "testsuite",
|
||||
"config": {},
|
||||
"testcases": {}
|
||||
}
|
||||
|
||||
"""
|
||||
raw_content = load_file(path)
|
||||
loaded_content = None
|
||||
|
||||
if isinstance(raw_content, dict):
|
||||
|
||||
if "testcases" in raw_content:
|
||||
# file_type: testsuite
|
||||
# TODO: add json schema validation for testsuite
|
||||
loaded_content = load_testsuite(raw_content)
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "testsuite"
|
||||
|
||||
elif "teststeps" in raw_content:
|
||||
# file_type: testcase (format version 2)
|
||||
loaded_content = load_testcase_v2(raw_content)
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "testcase"
|
||||
|
||||
elif "request" in raw_content:
|
||||
# file_type: api
|
||||
# TODO: add json schema validation for api
|
||||
loaded_content = raw_content
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "api"
|
||||
|
||||
else:
|
||||
# invalid format
|
||||
raise exceptions.FileFormatError("Invalid test file format!")
|
||||
|
||||
elif isinstance(raw_content, list) and len(raw_content) > 0:
|
||||
# file_type: testcase
|
||||
# make compatible with version < 2.2.0
|
||||
# TODO: add json schema validation for testcase
|
||||
loaded_content = load_testcase(raw_content)
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "testcase"
|
||||
|
||||
else:
|
||||
# invalid format
|
||||
raise exceptions.FileFormatError("Invalid test file format!")
|
||||
|
||||
return loaded_content
|
||||
|
||||
|
||||
def load_folder_content(folder_path):
|
||||
""" load api/testcases/testsuites definitions from folder.
|
||||
|
||||
Args:
|
||||
folder_path (str): api/testcases/testsuites files folder.
|
||||
|
||||
Returns:
|
||||
dict: api definition mapping.
|
||||
|
||||
{
|
||||
"tests/api/basic.yml": [
|
||||
{"api": {"def": "api_login", "request": {}, "validate": []}},
|
||||
{"api": {"def": "api_logout", "request": {}, "validate": []}}
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
items_mapping = {}
|
||||
|
||||
for file_path in load_folder_files(folder_path):
|
||||
items_mapping[file_path] = load_file(file_path)
|
||||
|
||||
return items_mapping
|
||||
|
||||
|
||||
def load_api_folder(api_folder_path):
|
||||
""" load api definitions from api folder.
|
||||
|
||||
Args:
|
||||
api_folder_path (str): api files folder.
|
||||
|
||||
api file should be in the following format:
|
||||
[
|
||||
{
|
||||
"api": {
|
||||
"def": "api_login",
|
||||
"request": {},
|
||||
"validate": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"api": {
|
||||
"def": "api_logout",
|
||||
"request": {},
|
||||
"validate": []
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
Returns:
|
||||
dict: api definition mapping.
|
||||
|
||||
{
|
||||
"api_login": {
|
||||
"function_meta": {"func_name": "api_login", "args": [], "kwargs": {}}
|
||||
"request": {}
|
||||
},
|
||||
"api_logout": {
|
||||
"function_meta": {"func_name": "api_logout", "args": [], "kwargs": {}}
|
||||
"request": {}
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
api_definition_mapping = {}
|
||||
|
||||
api_items_mapping = load_folder_content(api_folder_path)
|
||||
|
||||
for api_file_path, api_items in api_items_mapping.items():
|
||||
# TODO: add JSON schema validation
|
||||
if isinstance(api_items, list):
|
||||
for api_item in api_items:
|
||||
key, api_dict = api_item.popitem()
|
||||
api_id = api_dict.get("id") or api_dict.get("def") \
|
||||
or api_dict.get("name")
|
||||
if key != "api" or not api_id:
|
||||
raise exceptions.ParamsError(
|
||||
"Invalid API defined in {}".format(api_file_path))
|
||||
|
||||
if api_id in api_definition_mapping:
|
||||
raise exceptions.ParamsError(
|
||||
"Duplicated API ({}) defined in {}".format(
|
||||
api_id, api_file_path))
|
||||
else:
|
||||
api_definition_mapping[api_id] = api_dict
|
||||
|
||||
elif isinstance(api_items, dict):
|
||||
if api_file_path in api_definition_mapping:
|
||||
raise exceptions.ParamsError(
|
||||
"Duplicated API defined: {}".format(api_file_path))
|
||||
else:
|
||||
api_definition_mapping[api_file_path] = api_items
|
||||
|
||||
return api_definition_mapping
|
||||
|
||||
|
||||
def locate_debugtalk_py(start_path):
|
||||
""" locate debugtalk.py file
|
||||
|
||||
Args:
|
||||
start_path (str): start locating path,
|
||||
maybe testcase file path or directory path
|
||||
|
||||
Returns:
|
||||
str: debugtalk.py file path, None if not found
|
||||
|
||||
"""
|
||||
try:
|
||||
# locate debugtalk.py file.
|
||||
debugtalk_path = locate_file(start_path, "debugtalk.py")
|
||||
except exceptions.FileNotFound:
|
||||
debugtalk_path = None
|
||||
|
||||
return debugtalk_path
|
||||
|
||||
|
||||
def load_project_tests(test_path, dot_env_path=None):
|
||||
""" load api, testcases, .env, debugtalk.py functions.
|
||||
api/testcases folder is relative to project_working_directory
|
||||
|
||||
Args:
|
||||
test_path (str): test file/folder path, locate pwd from this path.
|
||||
dot_env_path (str): specified .env file path
|
||||
|
||||
Returns:
|
||||
dict: project loaded api/testcases definitions,
|
||||
environments and debugtalk.py functions.
|
||||
|
||||
"""
|
||||
|
||||
def prepare_path(path):
|
||||
if not os.path.exists(path):
|
||||
err_msg = "path not exist: {}".format(path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileNotFound(err_msg)
|
||||
|
||||
if not os.path.isabs(path):
|
||||
path = os.path.join(os.getcwd(), path)
|
||||
|
||||
return path
|
||||
|
||||
test_path = prepare_path(test_path)
|
||||
# locate debugtalk.py file
|
||||
debugtalk_path = locate_debugtalk_py(test_path)
|
||||
|
||||
if debugtalk_path:
|
||||
# The folder contains debugtalk.py will be treated as PWD.
|
||||
project_working_directory = os.path.dirname(debugtalk_path)
|
||||
else:
|
||||
# debugtalk.py not found, use os.getcwd() as PWD.
|
||||
project_working_directory = os.getcwd()
|
||||
|
||||
# add PWD to sys.path
|
||||
sys.path.insert(0, project_working_directory)
|
||||
|
||||
# load .env file
|
||||
# NOTICE:
|
||||
# environment variable maybe loaded in debugtalk.py
|
||||
# thus .env file should be loaded before loading debugtalk.py
|
||||
dot_env_path = dot_env_path or os.path.join(project_working_directory, ".env")
|
||||
project_mapping["env"] = load_dot_env_file(dot_env_path)
|
||||
|
||||
if debugtalk_path:
|
||||
# load debugtalk.py functions
|
||||
debugtalk_functions = load_debugtalk_functions()
|
||||
else:
|
||||
debugtalk_functions = {}
|
||||
|
||||
# locate PWD and load debugtalk.py functions
|
||||
|
||||
project_mapping["PWD"] = project_working_directory
|
||||
built_in.PWD = project_working_directory
|
||||
project_mapping["functions"] = debugtalk_functions
|
||||
project_mapping["test_path"] = test_path
|
||||
|
||||
# load api
|
||||
tests_def_mapping["api"] = load_api_folder(os.path.join(project_working_directory, "api"))
|
||||
tests_def_mapping["PWD"] = project_working_directory
|
||||
|
||||
|
||||
def load_tests(path, dot_env_path=None):
|
||||
""" load testcases from file path, extend and merge with api/testcase definitions.
|
||||
|
||||
Args:
|
||||
path (str): testcase/testsuite file/foler path.
|
||||
path could be in 2 types:
|
||||
- absolute/relative file path
|
||||
- absolute/relative folder path
|
||||
dot_env_path (str): specified .env file path
|
||||
|
||||
Returns:
|
||||
dict: tests mapping, include project_mapping and testcases.
|
||||
each testcase is corresponding to a file.
|
||||
{
|
||||
"project_mapping": {
|
||||
"PWD": "XXXXX",
|
||||
"functions": {},
|
||||
"env": {}
|
||||
},
|
||||
"testcases": [
|
||||
{ # testcase data structure
|
||||
"config": {
|
||||
"name": "desc1",
|
||||
"path": "testcase1_path",
|
||||
"variables": [], # optional
|
||||
},
|
||||
"teststeps": [
|
||||
# test data structure
|
||||
{
|
||||
'name': 'test desc1',
|
||||
'variables': [], # optional
|
||||
'extract': [], # optional
|
||||
'validate': [],
|
||||
'request': {}
|
||||
},
|
||||
test_dict_2 # another test dict
|
||||
]
|
||||
},
|
||||
testcase_2_dict # another testcase dict
|
||||
],
|
||||
"testsuites": [
|
||||
{ # testsuite data structure
|
||||
"config": {},
|
||||
"testcases": {
|
||||
"testcase1": {},
|
||||
"testcase2": {},
|
||||
}
|
||||
},
|
||||
testsuite_2_dict
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
load_project_tests(path, dot_env_path)
|
||||
tests_mapping = {
|
||||
"project_mapping": project_mapping
|
||||
}
|
||||
|
||||
def __load_file_content(path):
|
||||
loaded_content = None
|
||||
try:
|
||||
loaded_content = load_test_file(path)
|
||||
except exceptions.FileFormatError:
|
||||
logger.log_warning("Invalid test file format: {}".format(path))
|
||||
|
||||
if not loaded_content:
|
||||
pass
|
||||
elif loaded_content["type"] == "testsuite":
|
||||
tests_mapping.setdefault("testsuites", []).append(loaded_content)
|
||||
elif loaded_content["type"] == "testcase":
|
||||
tests_mapping.setdefault("testcases", []).append(loaded_content)
|
||||
elif loaded_content["type"] == "api":
|
||||
tests_mapping.setdefault("apis", []).append(loaded_content)
|
||||
|
||||
if os.path.isdir(path):
|
||||
files_list = load_folder_files(path)
|
||||
for path in files_list:
|
||||
__load_file_content(path)
|
||||
|
||||
elif os.path.isfile(path):
|
||||
__load_file_content(path)
|
||||
|
||||
return tests_mapping
|
||||
from httprunner.loader.cases import load_tests, load_project_tests
|
||||
from httprunner.loader.check import is_testcase_path, is_testcases, validate_json_file
|
||||
from httprunner.loader.load import load_csv_file, load_builtin_functions
|
||||
|
||||
__all__ = [
|
||||
"is_testcase_path",
|
||||
"is_testcases",
|
||||
"validate_json_file",
|
||||
"load_csv_file",
|
||||
"load_builtin_functions",
|
||||
"load_project_tests",
|
||||
"load_tests"
|
||||
]
|
||||
|
||||
576
httprunner/loader/cases.py
Normal file
576
httprunner/loader/cases.py
Normal file
@@ -0,0 +1,576 @@
|
||||
import importlib
|
||||
import os
|
||||
|
||||
from httprunner import built_in, exceptions, logger, utils
|
||||
from httprunner.loader.load import load_module_functions, load_folder_content, load_file, load_dot_env_file, \
|
||||
load_folder_files
|
||||
from httprunner.loader.locate import init_project_working_directory, get_project_working_directory
|
||||
|
||||
tests_def_mapping = {
|
||||
"api": {},
|
||||
"testcases": {}
|
||||
}
|
||||
|
||||
|
||||
def load_debugtalk_functions():
|
||||
""" load project debugtalk.py module functions
|
||||
debugtalk.py should be located in project working directory.
|
||||
|
||||
Returns:
|
||||
dict: debugtalk module functions mapping
|
||||
{
|
||||
"func1_name": func1,
|
||||
"func2_name": func2
|
||||
}
|
||||
|
||||
"""
|
||||
# load debugtalk.py module
|
||||
imported_module = importlib.import_module("debugtalk")
|
||||
return load_module_functions(imported_module)
|
||||
|
||||
|
||||
def __extend_with_api_ref(raw_testinfo):
|
||||
""" extend with api reference
|
||||
|
||||
Raises:
|
||||
exceptions.ApiNotFound: api not found
|
||||
|
||||
"""
|
||||
api_name = raw_testinfo["api"]
|
||||
|
||||
# api maybe defined in two types:
|
||||
# 1, individual file: each file is corresponding to one api definition
|
||||
# 2, api sets file: one file contains a list of api definitions
|
||||
if not os.path.isabs(api_name):
|
||||
# make compatible with Windows/Linux
|
||||
pwd = get_project_working_directory()
|
||||
api_path = os.path.join(pwd, *api_name.split("/"))
|
||||
if os.path.isfile(api_path):
|
||||
# type 1: api is defined in individual file
|
||||
api_name = api_path
|
||||
|
||||
try:
|
||||
block = tests_def_mapping["api"][api_name]
|
||||
# NOTICE: avoid project_mapping been changed during iteration.
|
||||
raw_testinfo["api_def"] = utils.deepcopy_dict(block)
|
||||
except KeyError:
|
||||
raise exceptions.ApiNotFound("{} not found!".format(api_name))
|
||||
|
||||
|
||||
def __extend_with_testcase_ref(raw_testinfo):
|
||||
""" extend with testcase reference
|
||||
"""
|
||||
testcase_path = raw_testinfo["testcase"]
|
||||
|
||||
if testcase_path not in tests_def_mapping["testcases"]:
|
||||
# make compatible with Windows/Linux
|
||||
pwd = get_project_working_directory()
|
||||
testcase_path = os.path.join(
|
||||
pwd,
|
||||
*testcase_path.split("/")
|
||||
)
|
||||
loaded_testcase = load_file(testcase_path)
|
||||
|
||||
if isinstance(loaded_testcase, list):
|
||||
# make compatible with version < 2.2.0
|
||||
testcase_dict = load_testcase(loaded_testcase)
|
||||
elif isinstance(loaded_testcase, dict) and "teststeps" in loaded_testcase:
|
||||
# format version 2, implemented in 2.2.0
|
||||
testcase_dict = load_testcase_v2(loaded_testcase)
|
||||
else:
|
||||
raise exceptions.FileFormatError(
|
||||
"Invalid format testcase: {}".format(testcase_path))
|
||||
|
||||
tests_def_mapping["testcases"][testcase_path] = testcase_dict
|
||||
else:
|
||||
testcase_dict = tests_def_mapping["testcases"][testcase_path]
|
||||
|
||||
raw_testinfo["testcase_def"] = testcase_dict
|
||||
|
||||
|
||||
def load_teststep(raw_testinfo):
|
||||
""" load testcase step content.
|
||||
teststep maybe defined directly, or reference api/testcase.
|
||||
|
||||
Args:
|
||||
raw_testinfo (dict): test data, maybe in 3 formats.
|
||||
# api reference
|
||||
{
|
||||
"name": "add product to cart",
|
||||
"api": "/path/to/api",
|
||||
"variables": {},
|
||||
"validate": [],
|
||||
"extract": {}
|
||||
}
|
||||
# testcase reference
|
||||
{
|
||||
"name": "add product to cart",
|
||||
"testcase": "/path/to/testcase",
|
||||
"variables": {}
|
||||
}
|
||||
# define directly
|
||||
{
|
||||
"name": "checkout cart",
|
||||
"request": {},
|
||||
"variables": {},
|
||||
"validate": [],
|
||||
"extract": {}
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: loaded teststep content
|
||||
|
||||
"""
|
||||
# reference api
|
||||
if "api" in raw_testinfo:
|
||||
__extend_with_api_ref(raw_testinfo)
|
||||
|
||||
# TODO: reference proc functions
|
||||
# elif "func" in raw_testinfo:
|
||||
# pass
|
||||
|
||||
# reference testcase
|
||||
elif "testcase" in raw_testinfo:
|
||||
__extend_with_testcase_ref(raw_testinfo)
|
||||
|
||||
# define directly
|
||||
else:
|
||||
pass
|
||||
|
||||
return raw_testinfo
|
||||
|
||||
|
||||
def load_testcase(raw_testcase):
|
||||
""" load testcase with api/testcase references.
|
||||
|
||||
Args:
|
||||
raw_testcase (list): raw testcase content loaded from JSON/YAML file:
|
||||
[
|
||||
# config part
|
||||
{
|
||||
"config": {
|
||||
"name": "XXXX",
|
||||
"base_url": "https://debugtalk.com"
|
||||
}
|
||||
},
|
||||
# teststeps part
|
||||
{
|
||||
"test": {...}
|
||||
},
|
||||
{
|
||||
"test": {...}
|
||||
}
|
||||
]
|
||||
|
||||
Returns:
|
||||
dict: loaded testcase content
|
||||
{
|
||||
"config": {},
|
||||
"teststeps": [test11, test12]
|
||||
}
|
||||
|
||||
"""
|
||||
config = {}
|
||||
tests = []
|
||||
|
||||
for item in raw_testcase:
|
||||
key, test_block = item.popitem()
|
||||
if key == "config":
|
||||
config.update(test_block)
|
||||
elif key == "test":
|
||||
tests.append(load_teststep(test_block))
|
||||
else:
|
||||
logger.log_warning(
|
||||
"unexpected block key: {}. block key should only be 'config' or 'test'.".format(key)
|
||||
)
|
||||
|
||||
return {
|
||||
"config": config,
|
||||
"teststeps": tests
|
||||
}
|
||||
|
||||
|
||||
def load_testcase_v2(raw_testcase):
|
||||
""" load testcase in format version 2.
|
||||
|
||||
Args:
|
||||
raw_testcase (dict): raw testcase content loaded from JSON/YAML file:
|
||||
{
|
||||
"config": {
|
||||
"name": "xxx",
|
||||
"variables": {}
|
||||
}
|
||||
"teststeps": [
|
||||
{
|
||||
"name": "teststep 1",
|
||||
"request" {...}
|
||||
},
|
||||
{
|
||||
"name": "teststep 2",
|
||||
"request" {...}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: loaded testcase content
|
||||
{
|
||||
"config": {},
|
||||
"teststeps": [test11, test12]
|
||||
}
|
||||
|
||||
"""
|
||||
raw_teststeps = raw_testcase.pop("teststeps")
|
||||
raw_testcase["teststeps"] = [
|
||||
load_teststep(teststep)
|
||||
for teststep in raw_teststeps
|
||||
]
|
||||
return raw_testcase
|
||||
|
||||
|
||||
def load_testsuite(raw_testsuite):
|
||||
""" load testsuite with testcase references.
|
||||
support two different formats.
|
||||
|
||||
Args:
|
||||
raw_testsuite (dict): raw testsuite content loaded from JSON/YAML file:
|
||||
# version 1, compatible with version < 2.2.0
|
||||
{
|
||||
"config": {
|
||||
"name": "xxx",
|
||||
"variables": {}
|
||||
}
|
||||
"testcases": {
|
||||
"testcase1": {
|
||||
"testcase": "/path/to/testcase",
|
||||
"variables": {...},
|
||||
"parameters": {...}
|
||||
},
|
||||
"testcase2": {}
|
||||
}
|
||||
}
|
||||
|
||||
# version 2, implemented in 2.2.0
|
||||
{
|
||||
"config": {
|
||||
"name": "xxx",
|
||||
"variables": {}
|
||||
}
|
||||
"testcases": [
|
||||
{
|
||||
"name": "testcase1",
|
||||
"testcase": "/path/to/testcase",
|
||||
"variables": {...},
|
||||
"parameters": {...}
|
||||
},
|
||||
{}
|
||||
]
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: loaded testsuite content
|
||||
{
|
||||
"config": {},
|
||||
"testcases": [testcase1, testcase2]
|
||||
}
|
||||
|
||||
"""
|
||||
raw_testcases = raw_testsuite.pop("testcases")
|
||||
raw_testsuite["testcases"] = {}
|
||||
|
||||
if isinstance(raw_testcases, dict):
|
||||
# make compatible with version < 2.2.0
|
||||
for name, raw_testcase in raw_testcases.items():
|
||||
__extend_with_testcase_ref(raw_testcase)
|
||||
raw_testcase.setdefault("name", name)
|
||||
raw_testsuite["testcases"][name] = raw_testcase
|
||||
|
||||
elif isinstance(raw_testcases, list):
|
||||
# format version 2, implemented in 2.2.0
|
||||
for raw_testcase in raw_testcases:
|
||||
__extend_with_testcase_ref(raw_testcase)
|
||||
testcase_name = raw_testcase["name"]
|
||||
raw_testsuite["testcases"][testcase_name] = raw_testcase
|
||||
|
||||
else:
|
||||
# invalid format
|
||||
raise exceptions.FileFormatError("Invalid testsuite format!")
|
||||
|
||||
return raw_testsuite
|
||||
|
||||
|
||||
def load_test_file(path):
|
||||
""" load test file, file maybe testcase/testsuite/api
|
||||
|
||||
Args:
|
||||
path (str): test file path
|
||||
|
||||
Returns:
|
||||
dict: loaded test content
|
||||
|
||||
# api
|
||||
{
|
||||
"path": path,
|
||||
"type": "api",
|
||||
"name": "",
|
||||
"request": {}
|
||||
}
|
||||
|
||||
# testcase
|
||||
{
|
||||
"path": path,
|
||||
"type": "testcase",
|
||||
"config": {},
|
||||
"teststeps": []
|
||||
}
|
||||
|
||||
# testsuite
|
||||
{
|
||||
"path": path,
|
||||
"type": "testsuite",
|
||||
"config": {},
|
||||
"testcases": {}
|
||||
}
|
||||
|
||||
"""
|
||||
raw_content = load_file(path)
|
||||
loaded_content = None
|
||||
|
||||
if isinstance(raw_content, dict):
|
||||
|
||||
if "testcases" in raw_content:
|
||||
# file_type: testsuite
|
||||
# TODO: add json schema validation for testsuite
|
||||
loaded_content = load_testsuite(raw_content)
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "testsuite"
|
||||
|
||||
elif "teststeps" in raw_content:
|
||||
# file_type: testcase (format version 2)
|
||||
loaded_content = load_testcase_v2(raw_content)
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "testcase"
|
||||
|
||||
elif "request" in raw_content:
|
||||
# file_type: api
|
||||
# TODO: add json schema validation for api
|
||||
loaded_content = raw_content
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "api"
|
||||
|
||||
else:
|
||||
# invalid format
|
||||
raise exceptions.FileFormatError("Invalid test file format!")
|
||||
|
||||
elif isinstance(raw_content, list) and len(raw_content) > 0:
|
||||
# file_type: testcase
|
||||
# make compatible with version < 2.2.0
|
||||
# TODO: add json schema validation for testcase
|
||||
loaded_content = load_testcase(raw_content)
|
||||
loaded_content["path"] = path
|
||||
loaded_content["type"] = "testcase"
|
||||
|
||||
else:
|
||||
# invalid format
|
||||
raise exceptions.FileFormatError("Invalid test file format!")
|
||||
|
||||
return loaded_content
|
||||
|
||||
|
||||
def load_api_folder(api_folder_path):
|
||||
""" load api definitions from api folder.
|
||||
|
||||
Args:
|
||||
api_folder_path (str): api files folder.
|
||||
|
||||
api file should be in the following format:
|
||||
[
|
||||
{
|
||||
"api": {
|
||||
"def": "api_login",
|
||||
"request": {},
|
||||
"validate": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"api": {
|
||||
"def": "api_logout",
|
||||
"request": {},
|
||||
"validate": []
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
Returns:
|
||||
dict: api definition mapping.
|
||||
|
||||
{
|
||||
"api_login": {
|
||||
"function_meta": {"func_name": "api_login", "args": [], "kwargs": {}}
|
||||
"request": {}
|
||||
},
|
||||
"api_logout": {
|
||||
"function_meta": {"func_name": "api_logout", "args": [], "kwargs": {}}
|
||||
"request": {}
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
api_definition_mapping = {}
|
||||
|
||||
api_items_mapping = load_folder_content(api_folder_path)
|
||||
|
||||
for api_file_path, api_items in api_items_mapping.items():
|
||||
# TODO: add JSON schema validation
|
||||
if isinstance(api_items, list):
|
||||
for api_item in api_items:
|
||||
key, api_dict = api_item.popitem()
|
||||
api_id = api_dict.get("id") or api_dict.get("def") \
|
||||
or api_dict.get("name")
|
||||
if key != "api" or not api_id:
|
||||
raise exceptions.ParamsError(
|
||||
"Invalid API defined in {}".format(api_file_path))
|
||||
|
||||
if api_id in api_definition_mapping:
|
||||
raise exceptions.ParamsError(
|
||||
"Duplicated API ({}) defined in {}".format(
|
||||
api_id, api_file_path))
|
||||
else:
|
||||
api_definition_mapping[api_id] = api_dict
|
||||
|
||||
elif isinstance(api_items, dict):
|
||||
if api_file_path in api_definition_mapping:
|
||||
raise exceptions.ParamsError(
|
||||
"Duplicated API defined: {}".format(api_file_path))
|
||||
else:
|
||||
api_definition_mapping[api_file_path] = api_items
|
||||
|
||||
return api_definition_mapping
|
||||
|
||||
|
||||
def load_project_tests(test_path, dot_env_path=None):
|
||||
""" load api, testcases, .env, debugtalk.py functions.
|
||||
api/testcases folder is relative to project_working_directory
|
||||
|
||||
Args:
|
||||
test_path (str): test file/folder path, locate pwd from this path.
|
||||
dot_env_path (str): specified .env file path
|
||||
|
||||
Returns:
|
||||
dict: project loaded api/testcases definitions,
|
||||
environments and debugtalk.py functions.
|
||||
|
||||
"""
|
||||
debugtalk_path, project_working_directory = init_project_working_directory(test_path)
|
||||
|
||||
project_mapping = {}
|
||||
|
||||
# load .env file
|
||||
# NOTICE:
|
||||
# environment variable maybe loaded in debugtalk.py
|
||||
# thus .env file should be loaded before loading debugtalk.py
|
||||
dot_env_path = dot_env_path or os.path.join(project_working_directory, ".env")
|
||||
project_mapping["env"] = load_dot_env_file(dot_env_path)
|
||||
|
||||
if debugtalk_path:
|
||||
# load debugtalk.py functions
|
||||
debugtalk_functions = load_debugtalk_functions()
|
||||
else:
|
||||
debugtalk_functions = {}
|
||||
|
||||
# locate PWD and load debugtalk.py functions
|
||||
|
||||
project_mapping["PWD"] = project_working_directory
|
||||
built_in.PWD = project_working_directory
|
||||
project_mapping["functions"] = debugtalk_functions
|
||||
project_mapping["test_path"] = test_path
|
||||
|
||||
# load api
|
||||
tests_def_mapping["api"] = load_api_folder(os.path.join(project_working_directory, "api"))
|
||||
|
||||
return project_mapping
|
||||
|
||||
|
||||
def load_tests(path, dot_env_path=None):
|
||||
""" load testcases from file path, extend and merge with api/testcase definitions.
|
||||
|
||||
Args:
|
||||
path (str): testcase/testsuite file/foler path.
|
||||
path could be in 2 types:
|
||||
- absolute/relative file path
|
||||
- absolute/relative folder path
|
||||
dot_env_path (str): specified .env file path
|
||||
|
||||
Returns:
|
||||
dict: tests mapping, include project_mapping and testcases.
|
||||
each testcase is corresponding to a file.
|
||||
{
|
||||
"project_mapping": {
|
||||
"PWD": "XXXXX",
|
||||
"functions": {},
|
||||
"env": {}
|
||||
},
|
||||
"testcases": [
|
||||
{ # testcase data structure
|
||||
"config": {
|
||||
"name": "desc1",
|
||||
"path": "testcase1_path",
|
||||
"variables": [], # optional
|
||||
},
|
||||
"teststeps": [
|
||||
# test data structure
|
||||
{
|
||||
'name': 'test desc1',
|
||||
'variables': [], # optional
|
||||
'extract': [], # optional
|
||||
'validate': [],
|
||||
'request': {}
|
||||
},
|
||||
test_dict_2 # another test dict
|
||||
]
|
||||
},
|
||||
testcase_2_dict # another testcase dict
|
||||
],
|
||||
"testsuites": [
|
||||
{ # testsuite data structure
|
||||
"config": {},
|
||||
"testcases": {
|
||||
"testcase1": {},
|
||||
"testcase2": {},
|
||||
}
|
||||
},
|
||||
testsuite_2_dict
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
tests_mapping = {
|
||||
"project_mapping": load_project_tests(path, dot_env_path)
|
||||
}
|
||||
|
||||
def __load_file_content(path):
|
||||
loaded_content = None
|
||||
try:
|
||||
loaded_content = load_test_file(path)
|
||||
except exceptions.FileFormatError:
|
||||
logger.log_warning("Invalid test file format: {}".format(path))
|
||||
|
||||
if not loaded_content:
|
||||
pass
|
||||
elif loaded_content["type"] == "testsuite":
|
||||
tests_mapping.setdefault("testsuites", []).append(loaded_content)
|
||||
elif loaded_content["type"] == "testcase":
|
||||
tests_mapping.setdefault("testcases", []).append(loaded_content)
|
||||
elif loaded_content["type"] == "api":
|
||||
tests_mapping.setdefault("apis", []).append(loaded_content)
|
||||
|
||||
if os.path.isdir(path):
|
||||
files_list = load_folder_files(path)
|
||||
for path in files_list:
|
||||
__load_file_content(path)
|
||||
|
||||
elif os.path.isfile(path):
|
||||
__load_file_content(path)
|
||||
|
||||
return tests_mapping
|
||||
@@ -3,7 +3,7 @@ import json
|
||||
import os
|
||||
import types
|
||||
|
||||
from httprunner import logger
|
||||
from httprunner import logger, exceptions
|
||||
|
||||
|
||||
# TODO: validate data format with JSON schema
|
||||
@@ -130,6 +130,23 @@ def is_testcase_path(path):
|
||||
return True
|
||||
|
||||
|
||||
def check_testcase_format(file_path, content):
|
||||
""" check testcase format if valid
|
||||
"""
|
||||
# TODO: replace with JSON schema validation
|
||||
if not content:
|
||||
# testcase file content is empty
|
||||
err_msg = u"Testcase file content is empty: {}".format(file_path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileFormatError(err_msg)
|
||||
|
||||
elif not isinstance(content, (list, dict)):
|
||||
# testcase file content does not match testcase format
|
||||
err_msg = u"Testcase file content format invalid: {}".format(file_path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileFormatError(err_msg)
|
||||
|
||||
|
||||
def validate_json_file(file_list):
|
||||
""" validate JSON testcase format
|
||||
"""
|
||||
|
||||
241
httprunner/loader/load.py
Normal file
241
httprunner/loader/load.py
Normal file
@@ -0,0 +1,241 @@
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
|
||||
import yaml
|
||||
|
||||
from httprunner import built_in
|
||||
from httprunner import exceptions, logger, utils
|
||||
from httprunner.loader.check import check_testcase_format, is_function
|
||||
from httprunner.loader.locate import get_project_working_directory
|
||||
|
||||
try:
|
||||
# PyYAML version >= 5.1
|
||||
# ref: https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
|
||||
yaml.warnings({'YAMLLoadWarning': False})
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def _load_yaml_file(yaml_file):
|
||||
""" load yaml file and check file content format
|
||||
"""
|
||||
with io.open(yaml_file, 'r', encoding='utf-8') as stream:
|
||||
yaml_content = yaml.load(stream)
|
||||
check_testcase_format(yaml_file, yaml_content)
|
||||
return yaml_content
|
||||
|
||||
|
||||
def _load_json_file(json_file):
|
||||
""" load json file and check file content format
|
||||
"""
|
||||
with io.open(json_file, encoding='utf-8') as data_file:
|
||||
try:
|
||||
json_content = json.load(data_file)
|
||||
except exceptions.JSONDecodeError:
|
||||
err_msg = u"JSONDecodeError: JSON file format error: {}".format(json_file)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileFormatError(err_msg)
|
||||
|
||||
check_testcase_format(json_file, json_content)
|
||||
return json_content
|
||||
|
||||
|
||||
def load_csv_file(csv_file):
|
||||
""" load csv file and check file content format
|
||||
|
||||
Args:
|
||||
csv_file (str): csv file path, csv file content is like below:
|
||||
|
||||
Returns:
|
||||
list: list of parameters, each parameter is in dict format
|
||||
|
||||
Examples:
|
||||
>>> cat csv_file
|
||||
username,password
|
||||
test1,111111
|
||||
test2,222222
|
||||
test3,333333
|
||||
|
||||
>>> load_csv_file(csv_file)
|
||||
[
|
||||
{'username': 'test1', 'password': '111111'},
|
||||
{'username': 'test2', 'password': '222222'},
|
||||
{'username': 'test3', 'password': '333333'}
|
||||
]
|
||||
|
||||
"""
|
||||
if not os.path.isabs(csv_file):
|
||||
pwd = get_project_working_directory()
|
||||
# make compatible with Windows/Linux
|
||||
csv_file = os.path.join(pwd, *csv_file.split("/"))
|
||||
|
||||
if not os.path.isfile(csv_file):
|
||||
# file path not exist
|
||||
raise exceptions.CSVNotFound(csv_file)
|
||||
|
||||
csv_content_list = []
|
||||
|
||||
with io.open(csv_file, encoding='utf-8') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
csv_content_list.append(row)
|
||||
|
||||
return csv_content_list
|
||||
|
||||
|
||||
def load_file(file_path):
|
||||
if not os.path.isfile(file_path):
|
||||
raise exceptions.FileNotFound("{} does not exist.".format(file_path))
|
||||
|
||||
file_suffix = os.path.splitext(file_path)[1].lower()
|
||||
if file_suffix == '.json':
|
||||
return _load_json_file(file_path)
|
||||
elif file_suffix in ['.yaml', '.yml']:
|
||||
return _load_yaml_file(file_path)
|
||||
elif file_suffix == ".csv":
|
||||
return load_csv_file(file_path)
|
||||
else:
|
||||
# '' or other suffix
|
||||
err_msg = u"Unsupported file format: {}".format(file_path)
|
||||
logger.log_warning(err_msg)
|
||||
return []
|
||||
|
||||
|
||||
def load_folder_files(folder_path, recursive=True):
|
||||
""" load folder path, return all files endswith yml/yaml/json in list.
|
||||
|
||||
Args:
|
||||
folder_path (str): specified folder path to load
|
||||
recursive (bool): load files recursively if True
|
||||
|
||||
Returns:
|
||||
list: files endswith yml/yaml/json
|
||||
"""
|
||||
if isinstance(folder_path, (list, set)):
|
||||
files = []
|
||||
for path in set(folder_path):
|
||||
files.extend(load_folder_files(path, recursive))
|
||||
|
||||
return files
|
||||
|
||||
if not os.path.exists(folder_path):
|
||||
return []
|
||||
|
||||
file_list = []
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(folder_path):
|
||||
filenames_list = []
|
||||
|
||||
for filename in filenames:
|
||||
if not filename.endswith(('.yml', '.yaml', '.json')):
|
||||
continue
|
||||
|
||||
filenames_list.append(filename)
|
||||
|
||||
for filename in filenames_list:
|
||||
file_path = os.path.join(dirpath, filename)
|
||||
file_list.append(file_path)
|
||||
|
||||
if not recursive:
|
||||
break
|
||||
|
||||
return file_list
|
||||
|
||||
|
||||
def load_dot_env_file(dot_env_path):
|
||||
""" load .env file.
|
||||
|
||||
Args:
|
||||
dot_env_path (str): .env file path
|
||||
|
||||
Returns:
|
||||
dict: environment variables mapping
|
||||
|
||||
{
|
||||
"UserName": "debugtalk",
|
||||
"Password": "123456",
|
||||
"PROJECT_KEY": "ABCDEFGH"
|
||||
}
|
||||
|
||||
Raises:
|
||||
exceptions.FileFormatError: If .env file format is invalid.
|
||||
|
||||
"""
|
||||
if not os.path.isfile(dot_env_path):
|
||||
return {}
|
||||
|
||||
logger.log_info("Loading environment variables from {}".format(dot_env_path))
|
||||
env_variables_mapping = {}
|
||||
|
||||
with io.open(dot_env_path, 'r', encoding='utf-8') as fp:
|
||||
for line in fp:
|
||||
# maxsplit=1
|
||||
if "=" in line:
|
||||
variable, value = line.split("=", 1)
|
||||
elif ":" in line:
|
||||
variable, value = line.split(":", 1)
|
||||
else:
|
||||
raise exceptions.FileFormatError(".env format error")
|
||||
|
||||
env_variables_mapping[variable.strip()] = value.strip()
|
||||
|
||||
utils.set_os_environ(env_variables_mapping)
|
||||
return env_variables_mapping
|
||||
|
||||
|
||||
def load_folder_content(folder_path):
|
||||
""" load api/testcases/testsuites definitions from folder.
|
||||
|
||||
Args:
|
||||
folder_path (str): api/testcases/testsuites files folder.
|
||||
|
||||
Returns:
|
||||
dict: api definition mapping.
|
||||
|
||||
{
|
||||
"tests/api/basic.yml": [
|
||||
{"api": {"def": "api_login", "request": {}, "validate": []}},
|
||||
{"api": {"def": "api_logout", "request": {}, "validate": []}}
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
items_mapping = {}
|
||||
|
||||
for file_path in load_folder_files(folder_path):
|
||||
items_mapping[file_path] = load_file(file_path)
|
||||
|
||||
return items_mapping
|
||||
|
||||
|
||||
def load_module_functions(module):
|
||||
""" load python module functions.
|
||||
|
||||
Args:
|
||||
module: python module
|
||||
|
||||
Returns:
|
||||
dict: functions mapping for specified python module
|
||||
|
||||
{
|
||||
"func1_name": func1,
|
||||
"func2_name": func2
|
||||
}
|
||||
|
||||
"""
|
||||
module_functions = {}
|
||||
|
||||
for name, item in vars(module).items():
|
||||
if is_function(item):
|
||||
module_functions[name] = item
|
||||
|
||||
return module_functions
|
||||
|
||||
|
||||
def load_builtin_functions():
|
||||
""" load built_in module functions
|
||||
"""
|
||||
return load_module_functions(built_in)
|
||||
|
||||
110
httprunner/loader/locate.py
Normal file
110
httprunner/loader/locate.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from httprunner import exceptions, logger
|
||||
|
||||
project_working_directory = None
|
||||
|
||||
|
||||
def locate_file(start_path, file_name):
|
||||
""" locate filename and return absolute file path.
|
||||
searching will be recursive upward until current working directory.
|
||||
|
||||
Args:
|
||||
file_name (str): target locate file name
|
||||
start_path (str): start locating path, maybe file path or directory path
|
||||
|
||||
Returns:
|
||||
str: located file path. None if file not found.
|
||||
|
||||
Raises:
|
||||
exceptions.FileNotFound: If failed to locate file.
|
||||
|
||||
"""
|
||||
if os.path.isfile(start_path):
|
||||
start_dir_path = os.path.dirname(start_path)
|
||||
elif os.path.isdir(start_path):
|
||||
start_dir_path = start_path
|
||||
else:
|
||||
raise exceptions.FileNotFound("invalid path: {}".format(start_path))
|
||||
|
||||
file_path = os.path.join(start_dir_path, file_name)
|
||||
if os.path.isfile(file_path):
|
||||
return os.path.abspath(file_path)
|
||||
|
||||
# current working directory
|
||||
if os.path.abspath(start_dir_path) in [os.getcwd(), os.path.abspath(os.sep)]:
|
||||
raise exceptions.FileNotFound("{} not found in {}".format(file_name, start_path))
|
||||
|
||||
# locate recursive upward
|
||||
return locate_file(os.path.dirname(start_dir_path), file_name)
|
||||
|
||||
|
||||
def locate_debugtalk_py(start_path):
|
||||
""" locate debugtalk.py file
|
||||
|
||||
Args:
|
||||
start_path (str): start locating path,
|
||||
maybe testcase file path or directory path
|
||||
|
||||
Returns:
|
||||
str: debugtalk.py file path, None if not found
|
||||
|
||||
"""
|
||||
try:
|
||||
# locate debugtalk.py file.
|
||||
debugtalk_path = locate_file(start_path, "debugtalk.py")
|
||||
except exceptions.FileNotFound:
|
||||
debugtalk_path = None
|
||||
|
||||
return debugtalk_path
|
||||
|
||||
|
||||
def init_project_working_directory(test_path):
|
||||
""" this should be called at startup
|
||||
init_project_working_directory <- load_project_tests <- load_tests <- run
|
||||
|
||||
Args:
|
||||
test_path: specified testfile path
|
||||
|
||||
Returns:
|
||||
(str, str): debugtalk.py path, project_working_directory
|
||||
|
||||
"""
|
||||
|
||||
def prepare_path(path):
|
||||
if not os.path.exists(path):
|
||||
err_msg = "path not exist: {}".format(path)
|
||||
logger.log_error(err_msg)
|
||||
raise exceptions.FileNotFound(err_msg)
|
||||
|
||||
if not os.path.isabs(path):
|
||||
path = os.path.join(os.getcwd(), path)
|
||||
|
||||
return path
|
||||
|
||||
test_path = prepare_path(test_path)
|
||||
|
||||
# locate debugtalk.py file
|
||||
debugtalk_path = locate_debugtalk_py(test_path)
|
||||
|
||||
global project_working_directory
|
||||
if debugtalk_path:
|
||||
# The folder contains debugtalk.py will be treated as PWD.
|
||||
project_working_directory = os.path.dirname(debugtalk_path)
|
||||
else:
|
||||
# debugtalk.py not found, use os.getcwd() as PWD.
|
||||
project_working_directory = os.getcwd()
|
||||
|
||||
# add PWD to sys.path
|
||||
sys.path.insert(0, project_working_directory)
|
||||
|
||||
return debugtalk_path, project_working_directory
|
||||
|
||||
|
||||
def get_project_working_directory():
|
||||
global project_working_directory
|
||||
if project_working_directory is None:
|
||||
raise exceptions.MyBaseFailure("loader.load_tests() has not been called!")
|
||||
|
||||
return project_working_directory
|
||||
@@ -6,7 +6,7 @@ import collections
|
||||
import json
|
||||
import re
|
||||
|
||||
from httprunner import exceptions, utils
|
||||
from httprunner import exceptions, utils, loader
|
||||
from httprunner.compat import basestring, numeric_types, str
|
||||
|
||||
# use $$ to escape $ notation
|
||||
@@ -423,7 +423,6 @@ def get_mapping_function(function_name, functions_mapping):
|
||||
return functions_mapping[function_name]
|
||||
|
||||
elif function_name in ["parameterize", "P"]:
|
||||
from httprunner import loader
|
||||
return loader.load_csv_file
|
||||
|
||||
elif function_name in ["environ", "ENV"]:
|
||||
@@ -431,7 +430,6 @@ def get_mapping_function(function_name, functions_mapping):
|
||||
|
||||
try:
|
||||
# check if HttpRunner builtin functions
|
||||
from httprunner import loader
|
||||
built_in_functions = loader.load_builtin_functions()
|
||||
return built_in_functions[function_name]
|
||||
except KeyError:
|
||||
|
||||
@@ -331,9 +331,9 @@ class TestHttpRunner(ApiServerUnittest):
|
||||
]
|
||||
}
|
||||
]
|
||||
loader.load_project_tests("tests")
|
||||
|
||||
tests_mapping = {
|
||||
"project_mapping": loader.project_mapping,
|
||||
"project_mapping": loader.load_project_tests("tests"),
|
||||
"testcases": testcases
|
||||
}
|
||||
summary = self.runner.run_tests(tests_mapping)
|
||||
@@ -363,9 +363,8 @@ class TestHttpRunner(ApiServerUnittest):
|
||||
]
|
||||
}
|
||||
]
|
||||
loader.load_project_tests("tests")
|
||||
tests_mapping = {
|
||||
"project_mapping": loader.project_mapping,
|
||||
"project_mapping": loader.load_project_tests("tests"),
|
||||
"testcases": testcases
|
||||
}
|
||||
summary = self.runner.run_tests(tests_mapping)
|
||||
@@ -393,9 +392,8 @@ class TestHttpRunner(ApiServerUnittest):
|
||||
]
|
||||
}
|
||||
]
|
||||
loader.load_project_tests("tests")
|
||||
tests_mapping = {
|
||||
"project_mapping": loader.project_mapping,
|
||||
"project_mapping": loader.load_project_tests("tests"),
|
||||
"testcases": testcases
|
||||
}
|
||||
summary = self.runner.run_tests(tests_mapping)
|
||||
|
||||
@@ -3,207 +3,21 @@ import os
|
||||
import unittest
|
||||
|
||||
from httprunner import exceptions, loader
|
||||
|
||||
|
||||
class TestFileLoader(unittest.TestCase):
|
||||
|
||||
def test_load_yaml_file_file_format_error(self):
|
||||
yaml_tmp_file = "tests/data/tmp.yml"
|
||||
# create empty yaml file
|
||||
with open(yaml_tmp_file, 'w') as f:
|
||||
f.write("")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
loader.load_yaml_file(yaml_tmp_file)
|
||||
|
||||
os.remove(yaml_tmp_file)
|
||||
|
||||
# create invalid format yaml file
|
||||
with open(yaml_tmp_file, 'w') as f:
|
||||
f.write("abc")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
loader.load_yaml_file(yaml_tmp_file)
|
||||
|
||||
os.remove(yaml_tmp_file)
|
||||
|
||||
def test_load_json_file_file_format_error(self):
|
||||
json_tmp_file = "tests/data/tmp.json"
|
||||
# create empty file
|
||||
with open(json_tmp_file, 'w') as f:
|
||||
f.write("")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
loader.load_json_file(json_tmp_file)
|
||||
|
||||
os.remove(json_tmp_file)
|
||||
|
||||
# create empty json file
|
||||
with open(json_tmp_file, 'w') as f:
|
||||
f.write("{}")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
loader.load_json_file(json_tmp_file)
|
||||
|
||||
os.remove(json_tmp_file)
|
||||
|
||||
# create invalid format json file
|
||||
with open(json_tmp_file, 'w') as f:
|
||||
f.write("abc")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
loader.load_json_file(json_tmp_file)
|
||||
|
||||
os.remove(json_tmp_file)
|
||||
|
||||
def test_load_testcases_bad_filepath(self):
|
||||
testcase_file_path = os.path.join(os.getcwd(), 'tests/data/demo')
|
||||
with self.assertRaises(exceptions.FileNotFound):
|
||||
loader.load_file(testcase_file_path)
|
||||
|
||||
def test_load_json_testcases(self):
|
||||
testcase_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/demo_testcase_hardcode.json')
|
||||
testcases = loader.load_file(testcase_file_path)
|
||||
self.assertEqual(len(testcases), 3)
|
||||
test = testcases[0]["test"]
|
||||
self.assertIn('name', test)
|
||||
self.assertIn('request', test)
|
||||
self.assertIn('url', test['request'])
|
||||
self.assertIn('method', test['request'])
|
||||
|
||||
def test_load_yaml_testcases(self):
|
||||
testcase_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/demo_testcase_hardcode.yml')
|
||||
testcases = loader.load_file(testcase_file_path)
|
||||
self.assertEqual(len(testcases), 3)
|
||||
test = testcases[0]["test"]
|
||||
self.assertIn('name', test)
|
||||
self.assertIn('request', test)
|
||||
self.assertIn('url', test['request'])
|
||||
self.assertIn('method', test['request'])
|
||||
|
||||
def test_load_csv_file_one_parameter(self):
|
||||
csv_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/user_agent.csv')
|
||||
csv_content = loader.load_file(csv_file_path)
|
||||
self.assertEqual(
|
||||
csv_content,
|
||||
[
|
||||
{'user_agent': 'iOS/10.1'},
|
||||
{'user_agent': 'iOS/10.2'},
|
||||
{'user_agent': 'iOS/10.3'}
|
||||
]
|
||||
)
|
||||
|
||||
def test_load_csv_file_multiple_parameters(self):
|
||||
csv_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/account.csv')
|
||||
csv_content = loader.load_file(csv_file_path)
|
||||
self.assertEqual(
|
||||
csv_content,
|
||||
[
|
||||
{'username': 'test1', 'password': '111111'},
|
||||
{'username': 'test2', 'password': '222222'},
|
||||
{'username': 'test3', 'password': '333333'}
|
||||
]
|
||||
)
|
||||
|
||||
def test_load_folder_files(self):
|
||||
folder = os.path.join(os.getcwd(), 'tests')
|
||||
file1 = os.path.join(os.getcwd(), 'tests', 'test_utils.py')
|
||||
file2 = os.path.join(os.getcwd(), 'tests', 'api', 'reset_all.yml')
|
||||
|
||||
files = loader.load_folder_files(folder, recursive=False)
|
||||
self.assertEqual(files, [])
|
||||
|
||||
files = loader.load_folder_files(folder)
|
||||
self.assertIn(file2, files)
|
||||
self.assertNotIn(file1, files)
|
||||
|
||||
files = loader.load_folder_files("not_existed_foulder", recursive=False)
|
||||
self.assertEqual([], files)
|
||||
|
||||
files = loader.load_folder_files(file2, recursive=False)
|
||||
self.assertEqual([], files)
|
||||
|
||||
def test_load_dot_env_file(self):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", ".env"
|
||||
)
|
||||
env_variables_mapping = loader.load_dot_env_file(dot_env_path)
|
||||
self.assertIn("PROJECT_KEY", env_variables_mapping)
|
||||
self.assertEqual(env_variables_mapping["UserName"], "debugtalk")
|
||||
|
||||
def test_load_custom_dot_env_file(self):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", "data", "test.env"
|
||||
)
|
||||
env_variables_mapping = loader.load_dot_env_file(dot_env_path)
|
||||
self.assertIn("PROJECT_KEY", env_variables_mapping)
|
||||
self.assertEqual(env_variables_mapping["UserName"], "test")
|
||||
self.assertEqual(env_variables_mapping["content_type"], "application/json; charset=UTF-8")
|
||||
|
||||
def test_load_env_path_not_exist(self):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", "data",
|
||||
)
|
||||
env_variables_mapping = loader.load_dot_env_file(dot_env_path)
|
||||
self.assertEqual(env_variables_mapping, {})
|
||||
|
||||
def test_locate_file(self):
|
||||
with self.assertRaises(exceptions.FileNotFound):
|
||||
loader.locate_file(os.getcwd(), "debugtalk.py")
|
||||
|
||||
with self.assertRaises(exceptions.FileNotFound):
|
||||
loader.locate_file("", "debugtalk.py")
|
||||
|
||||
start_path = os.path.join(os.getcwd(), "tests")
|
||||
self.assertEqual(
|
||||
loader.locate_file(start_path, "debugtalk.py"),
|
||||
os.path.join(
|
||||
os.getcwd(), "tests/debugtalk.py"
|
||||
)
|
||||
)
|
||||
self.assertEqual(
|
||||
loader.locate_file("tests/", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
self.assertEqual(
|
||||
loader.locate_file("tests", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
self.assertEqual(
|
||||
loader.locate_file("tests/base.py", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
self.assertEqual(
|
||||
loader.locate_file("tests/data/demo_testcase.yml", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
|
||||
def test_load_folder_content(self):
|
||||
path = os.path.join(os.getcwd(), "tests", "api")
|
||||
items_mapping = loader.load_folder_content(path)
|
||||
file_path = os.path.join(os.getcwd(), "tests", "api", "reset_all.yml")
|
||||
self.assertIn(file_path, items_mapping)
|
||||
self.assertIsInstance(items_mapping[file_path], dict)
|
||||
from httprunner.loader import cases
|
||||
|
||||
|
||||
class TestModuleLoader(unittest.TestCase):
|
||||
|
||||
def test_filter_module_functions(self):
|
||||
module_functions = loader.load_module_functions(loader)
|
||||
module_functions = cases.load_module_functions(cases)
|
||||
self.assertIn("load_module_functions", module_functions)
|
||||
self.assertNotIn("is_py3", module_functions)
|
||||
|
||||
def test_load_debugtalk_module(self):
|
||||
loader.load_project_tests(os.path.join(os.getcwd(), "httprunner"))
|
||||
project_mapping = loader.project_mapping
|
||||
project_mapping = cases.load_project_tests(os.path.join(os.getcwd(), "httprunner"))
|
||||
self.assertNotIn("alter_response", project_mapping["functions"])
|
||||
|
||||
loader.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
project_mapping = loader.project_mapping
|
||||
project_mapping = cases.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
self.assertIn("alter_response", project_mapping["functions"])
|
||||
|
||||
is_status_code_200 = project_mapping["functions"]["is_status_code_200"]
|
||||
@@ -211,27 +25,27 @@ class TestModuleLoader(unittest.TestCase):
|
||||
self.assertFalse(is_status_code_200(500))
|
||||
|
||||
def test_load_debugtalk_py(self):
|
||||
loader.load_project_tests("tests/data/demo_testcase.yml")
|
||||
project_working_directory = loader.project_mapping["PWD"]
|
||||
debugtalk_functions = loader.project_mapping["functions"]
|
||||
project_mapping = cases.load_project_tests("tests/data/demo_testcase.yml")
|
||||
project_working_directory = project_mapping["PWD"]
|
||||
debugtalk_functions = project_mapping["functions"]
|
||||
self.assertEqual(
|
||||
project_working_directory,
|
||||
os.path.join(os.getcwd(), "tests")
|
||||
)
|
||||
self.assertIn("gen_md5", debugtalk_functions)
|
||||
|
||||
loader.load_project_tests("tests/base.py")
|
||||
project_working_directory = loader.project_mapping["PWD"]
|
||||
debugtalk_functions = loader.project_mapping["functions"]
|
||||
project_mapping = cases.load_project_tests("tests/base.py")
|
||||
project_working_directory = project_mapping["PWD"]
|
||||
debugtalk_functions = project_mapping["functions"]
|
||||
self.assertEqual(
|
||||
project_working_directory,
|
||||
os.path.join(os.getcwd(), "tests")
|
||||
)
|
||||
self.assertIn("gen_md5", debugtalk_functions)
|
||||
|
||||
loader.load_project_tests("httprunner/__init__.py")
|
||||
project_working_directory = loader.project_mapping["PWD"]
|
||||
debugtalk_functions = loader.project_mapping["functions"]
|
||||
project_mapping = cases.load_project_tests("httprunner/__init__.py")
|
||||
project_working_directory = project_mapping["PWD"]
|
||||
debugtalk_functions = project_mapping["functions"]
|
||||
self.assertEqual(
|
||||
project_working_directory,
|
||||
os.getcwd()
|
||||
@@ -243,9 +57,8 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
loader.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
cls.project_mapping = loader.project_mapping
|
||||
cls.tests_def_mapping = loader.tests_def_mapping
|
||||
cls.project_mapping = cases.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
cls.tests_def_mapping = cases.tests_def_mapping
|
||||
|
||||
def test_load_teststep_api(self):
|
||||
raw_test = {
|
||||
@@ -255,7 +68,7 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
{"uid": "999"}
|
||||
]
|
||||
}
|
||||
teststep = loader.load_teststep(raw_test)
|
||||
teststep = cases.load_teststep(raw_test)
|
||||
self.assertEqual(
|
||||
"create user (override).",
|
||||
teststep["name"]
|
||||
@@ -273,7 +86,7 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
{"device_sn": "$device_sn"}
|
||||
]
|
||||
}
|
||||
testcase = loader.load_teststep(raw_test)
|
||||
testcase = cases.load_teststep(raw_test)
|
||||
self.assertEqual(
|
||||
"setup and reset all (override).",
|
||||
testcase["name"]
|
||||
@@ -284,7 +97,7 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
self.assertEqual(tests[1]["name"], "reset all users")
|
||||
|
||||
def test_load_test_file_api(self):
|
||||
loaded_content = loader.load_test_file("tests/api/create_user.yml")
|
||||
loaded_content = cases.load_test_file("tests/api/create_user.yml")
|
||||
self.assertEqual(loaded_content["type"], "api")
|
||||
self.assertIn("path", loaded_content)
|
||||
self.assertIn("request", loaded_content)
|
||||
@@ -292,8 +105,8 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
|
||||
def test_load_test_file_testcase(self):
|
||||
for loaded_content in [
|
||||
loader.load_test_file("tests/testcases/setup.yml"),
|
||||
loader.load_test_file("tests/testcases/setup.json")
|
||||
cases.load_test_file("tests/testcases/setup.yml"),
|
||||
cases.load_test_file("tests/testcases/setup.json")
|
||||
]:
|
||||
self.assertEqual(loaded_content["type"], "testcase")
|
||||
self.assertIn("path", loaded_content)
|
||||
@@ -304,8 +117,8 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
|
||||
def test_load_test_file_testcase_v2(self):
|
||||
for loaded_content in [
|
||||
loader.load_test_file("tests/testcases/setup.v2.yml"),
|
||||
loader.load_test_file("tests/testcases/setup.v2.json")
|
||||
cases.load_test_file("tests/testcases/setup.v2.yml"),
|
||||
cases.load_test_file("tests/testcases/setup.v2.json")
|
||||
]:
|
||||
self.assertEqual(loaded_content["type"], "testcase")
|
||||
self.assertIn("path", loaded_content)
|
||||
@@ -316,8 +129,8 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
|
||||
def test_load_test_file_testsuite(self):
|
||||
for loaded_content in [
|
||||
loader.load_test_file("tests/testsuites/create_users.yml"),
|
||||
loader.load_test_file("tests/testsuites/create_users.json")
|
||||
cases.load_test_file("tests/testsuites/create_users.yml"),
|
||||
cases.load_test_file("tests/testsuites/create_users.json")
|
||||
]:
|
||||
self.assertEqual(loaded_content["type"], "testsuite")
|
||||
|
||||
@@ -332,8 +145,8 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
|
||||
def test_load_test_file_testsuite_v2(self):
|
||||
for loaded_content in [
|
||||
loader.load_test_file("tests/testsuites/create_users.v2.yml"),
|
||||
loader.load_test_file("tests/testsuites/create_users.v2.json")
|
||||
cases.load_test_file("tests/testsuites/create_users.v2.yml"),
|
||||
cases.load_test_file("tests/testsuites/create_users.v2.json")
|
||||
]:
|
||||
self.assertEqual(loaded_content["type"], "testsuite")
|
||||
|
||||
@@ -466,13 +279,13 @@ class TestSuiteLoader(unittest.TestCase):
|
||||
|
||||
def test_load_api_folder(self):
|
||||
path = os.path.join(os.getcwd(), "tests", "api")
|
||||
api_definition_mapping = loader.load_api_folder(path)
|
||||
api_definition_mapping = cases.load_api_folder(path)
|
||||
api_file_path = os.path.join(os.getcwd(), "tests", "api", "get_token.yml")
|
||||
self.assertIn(api_file_path, api_definition_mapping)
|
||||
self.assertIn("request", api_definition_mapping[api_file_path])
|
||||
|
||||
def test_load_project_tests(self):
|
||||
loader.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
cases.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
api_file_path = os.path.join(os.getcwd(), "tests", "api", "get_token.yml")
|
||||
self.assertIn(api_file_path, self.tests_def_mapping["api"])
|
||||
self.assertEqual(self.project_mapping["env"]["PROJECT_KEY"], "ABCDEFGH")
|
||||
159
tests/test_loader/test_load.py
Normal file
159
tests/test_loader/test_load.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from httprunner import exceptions
|
||||
from httprunner.loader import load
|
||||
|
||||
|
||||
class TestFileLoader(unittest.TestCase):
|
||||
|
||||
def test_load_yaml_file_file_format_error(self):
|
||||
yaml_tmp_file = "tests/data/tmp.yml"
|
||||
# create empty yaml file
|
||||
with open(yaml_tmp_file, 'w') as f:
|
||||
f.write("")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
load._load_yaml_file(yaml_tmp_file)
|
||||
|
||||
os.remove(yaml_tmp_file)
|
||||
|
||||
# create invalid format yaml file
|
||||
with open(yaml_tmp_file, 'w') as f:
|
||||
f.write("abc")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
load._load_yaml_file(yaml_tmp_file)
|
||||
|
||||
os.remove(yaml_tmp_file)
|
||||
|
||||
def test_load_json_file_file_format_error(self):
|
||||
json_tmp_file = "tests/data/tmp.json"
|
||||
# create empty file
|
||||
with open(json_tmp_file, 'w') as f:
|
||||
f.write("")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
load._load_json_file(json_tmp_file)
|
||||
|
||||
os.remove(json_tmp_file)
|
||||
|
||||
# create empty json file
|
||||
with open(json_tmp_file, 'w') as f:
|
||||
f.write("{}")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
load._load_json_file(json_tmp_file)
|
||||
|
||||
os.remove(json_tmp_file)
|
||||
|
||||
# create invalid format json file
|
||||
with open(json_tmp_file, 'w') as f:
|
||||
f.write("abc")
|
||||
|
||||
with self.assertRaises(exceptions.FileFormatError):
|
||||
load._load_json_file(json_tmp_file)
|
||||
|
||||
os.remove(json_tmp_file)
|
||||
|
||||
def test_load_testcases_bad_filepath(self):
|
||||
testcase_file_path = os.path.join(os.getcwd(), 'tests/data/demo')
|
||||
with self.assertRaises(exceptions.FileNotFound):
|
||||
load.load_file(testcase_file_path)
|
||||
|
||||
def test_load_json_testcases(self):
|
||||
testcase_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/demo_testcase_hardcode.json')
|
||||
testcases = load.load_file(testcase_file_path)
|
||||
self.assertEqual(len(testcases), 3)
|
||||
test = testcases[0]["test"]
|
||||
self.assertIn('name', test)
|
||||
self.assertIn('request', test)
|
||||
self.assertIn('url', test['request'])
|
||||
self.assertIn('method', test['request'])
|
||||
|
||||
def test_load_yaml_testcases(self):
|
||||
testcase_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/demo_testcase_hardcode.yml')
|
||||
testcases = load.load_file(testcase_file_path)
|
||||
self.assertEqual(len(testcases), 3)
|
||||
test = testcases[0]["test"]
|
||||
self.assertIn('name', test)
|
||||
self.assertIn('request', test)
|
||||
self.assertIn('url', test['request'])
|
||||
self.assertIn('method', test['request'])
|
||||
|
||||
def test_load_csv_file_one_parameter(self):
|
||||
csv_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/user_agent.csv')
|
||||
csv_content = load.load_file(csv_file_path)
|
||||
self.assertEqual(
|
||||
csv_content,
|
||||
[
|
||||
{'user_agent': 'iOS/10.1'},
|
||||
{'user_agent': 'iOS/10.2'},
|
||||
{'user_agent': 'iOS/10.3'}
|
||||
]
|
||||
)
|
||||
|
||||
def test_load_csv_file_multiple_parameters(self):
|
||||
csv_file_path = os.path.join(
|
||||
os.getcwd(), 'tests/data/account.csv')
|
||||
csv_content = load.load_file(csv_file_path)
|
||||
self.assertEqual(
|
||||
csv_content,
|
||||
[
|
||||
{'username': 'test1', 'password': '111111'},
|
||||
{'username': 'test2', 'password': '222222'},
|
||||
{'username': 'test3', 'password': '333333'}
|
||||
]
|
||||
)
|
||||
|
||||
def test_load_folder_files(self):
|
||||
folder = os.path.join(os.getcwd(), 'tests')
|
||||
file1 = os.path.join(os.getcwd(), 'tests', 'test_utils.py')
|
||||
file2 = os.path.join(os.getcwd(), 'tests', 'api', 'reset_all.yml')
|
||||
|
||||
files = load.load_folder_files(folder, recursive=False)
|
||||
self.assertEqual(files, [])
|
||||
|
||||
files = load.load_folder_files(folder)
|
||||
self.assertIn(file2, files)
|
||||
self.assertNotIn(file1, files)
|
||||
|
||||
files = load.load_folder_files("not_existed_foulder", recursive=False)
|
||||
self.assertEqual([], files)
|
||||
|
||||
files = load.load_folder_files(file2, recursive=False)
|
||||
self.assertEqual([], files)
|
||||
|
||||
def test_load_dot_env_file(self):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", ".env"
|
||||
)
|
||||
env_variables_mapping = load.load_dot_env_file(dot_env_path)
|
||||
self.assertIn("PROJECT_KEY", env_variables_mapping)
|
||||
self.assertEqual(env_variables_mapping["UserName"], "debugtalk")
|
||||
|
||||
def test_load_custom_dot_env_file(self):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", "data", "test.env"
|
||||
)
|
||||
env_variables_mapping = load.load_dot_env_file(dot_env_path)
|
||||
self.assertIn("PROJECT_KEY", env_variables_mapping)
|
||||
self.assertEqual(env_variables_mapping["UserName"], "test")
|
||||
self.assertEqual(env_variables_mapping["content_type"], "application/json; charset=UTF-8")
|
||||
|
||||
def test_load_env_path_not_exist(self):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", "data",
|
||||
)
|
||||
env_variables_mapping = load.load_dot_env_file(dot_env_path)
|
||||
self.assertEqual(env_variables_mapping, {})
|
||||
|
||||
def test_load_folder_content(self):
|
||||
path = os.path.join(os.getcwd(), "tests", "api")
|
||||
items_mapping = load.load_folder_content(path)
|
||||
file_path = os.path.join(os.getcwd(), "tests", "api", "reset_all.yml")
|
||||
self.assertIn(file_path, items_mapping)
|
||||
self.assertIsInstance(items_mapping[file_path], dict)
|
||||
40
tests/test_loader/test_locate.py
Normal file
40
tests/test_loader/test_locate.py
Normal file
@@ -0,0 +1,40 @@
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from httprunner import exceptions
|
||||
from httprunner.loader import locate
|
||||
|
||||
|
||||
class TestLoaderLocate(unittest.TestCase):
|
||||
|
||||
def test_locate_file(self):
|
||||
with self.assertRaises(exceptions.FileNotFound):
|
||||
locate.locate_file(os.getcwd(), "debugtalk.py")
|
||||
|
||||
with self.assertRaises(exceptions.FileNotFound):
|
||||
locate.locate_file("", "debugtalk.py")
|
||||
|
||||
start_path = os.path.join(os.getcwd(), "tests")
|
||||
self.assertEqual(
|
||||
locate.locate_file(start_path, "debugtalk.py"),
|
||||
os.path.join(
|
||||
os.getcwd(), "tests/debugtalk.py"
|
||||
)
|
||||
)
|
||||
self.assertEqual(
|
||||
locate.locate_file("tests/", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
self.assertEqual(
|
||||
locate.locate_file("tests", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
self.assertEqual(
|
||||
locate.locate_file("tests/base.py", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
self.assertEqual(
|
||||
locate.locate_file("tests/data/demo_testcase.yml", "debugtalk.py"),
|
||||
os.path.join(os.getcwd(), "tests", "debugtalk.py")
|
||||
)
|
||||
@@ -3,6 +3,7 @@ import time
|
||||
import unittest
|
||||
|
||||
from httprunner import exceptions, loader, parser
|
||||
from httprunner.loader import load
|
||||
from tests.debugtalk import gen_random_string, sum_two
|
||||
|
||||
|
||||
@@ -931,11 +932,11 @@ class TestParser(unittest.TestCase):
|
||||
dot_env_path = os.path.join(
|
||||
os.getcwd(), "tests", ".env"
|
||||
)
|
||||
loader.load_dot_env_file(dot_env_path)
|
||||
load.load_dot_env_file(dot_env_path)
|
||||
from tests import debugtalk
|
||||
cartesian_product_parameters = parser.parse_parameters(
|
||||
parameters,
|
||||
functions_mapping=loader.load_module_functions(debugtalk)
|
||||
functions_mapping=load.load_module_functions(debugtalk)
|
||||
)
|
||||
self.assertIn(
|
||||
{
|
||||
@@ -966,8 +967,7 @@ class TestParser(unittest.TestCase):
|
||||
)
|
||||
|
||||
def test_parse_parameters_mix(self):
|
||||
loader.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
project_mapping = loader.project_mapping
|
||||
project_mapping = loader.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
|
||||
parameters = [
|
||||
{"user_agent": ["iOS/10.1", "iOS/10.2", "iOS/10.3"]},
|
||||
@@ -1376,7 +1376,7 @@ class TestParser(unittest.TestCase):
|
||||
"base_url": "https://github.com",
|
||||
"api": "api/get_token.yml",
|
||||
}
|
||||
api_def_dict = loader.load_teststep(raw_testinfo)
|
||||
api_def_dict = loader.cases.load_teststep(raw_testinfo)
|
||||
test_block = {
|
||||
"name": "override block",
|
||||
"times": 3,
|
||||
|
||||
@@ -9,7 +9,7 @@ from tests.base import ApiServerUnittest
|
||||
class TestResponse(ApiServerUnittest):
|
||||
|
||||
def setUp(self):
|
||||
self.functions_mapping = loader.load_module_functions(built_in)
|
||||
self.functions_mapping = loader.load.load_module_functions(built_in)
|
||||
|
||||
def test_parse_response_object_json(self):
|
||||
url = "http://127.0.0.1:5000/api/users"
|
||||
|
||||
@@ -2,6 +2,7 @@ import os
|
||||
import time
|
||||
|
||||
from httprunner import loader, parser, runner
|
||||
from httprunner.loader import cases
|
||||
from tests.api_server import HTTPBIN_SERVER
|
||||
from tests.base import ApiServerUnittest
|
||||
|
||||
@@ -9,8 +10,7 @@ from tests.base import ApiServerUnittest
|
||||
class TestRunner(ApiServerUnittest):
|
||||
|
||||
def setUp(self):
|
||||
loader.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
project_mapping = loader.project_mapping
|
||||
project_mapping = cases.load_project_tests(os.path.join(os.getcwd(), "tests"))
|
||||
self.debugtalk_functions = project_mapping["functions"]
|
||||
|
||||
config = {
|
||||
|
||||
@@ -63,7 +63,7 @@ class TestUtils(ApiServerUnittest):
|
||||
|
||||
def current_validators(self):
|
||||
from httprunner import built_in
|
||||
functions_mapping = loader.load_module_functions(built_in)
|
||||
functions_mapping = loader.load.load_module_functions(built_in)
|
||||
|
||||
functions_mapping["equals"](None, None)
|
||||
functions_mapping["equals"](1, 1)
|
||||
|
||||
Reference in New Issue
Block a user