Merge pull request #869 from httprunner/dev

3.0 dev

**Added**

- feat: dump log for each testcase

**Changed**

- replace logging with [loguru](https://github.com/Delgan/loguru)
- remove support for Python 2.7
- replace string format with f-string
- remove dependency colorama and colorlog
- generate reports/logs folder in current working directory
This commit is contained in:
debugtalk
2020-03-10 19:14:57 +08:00
committed by GitHub
50 changed files with 1015 additions and 642 deletions

View File

@@ -10,7 +10,7 @@ jobs:
strategy:
max-parallel: 6
matrix:
python-version: [2.7, 3.5, 3.6, 3.7, 3.8]
python-version: [3.6, 3.7, 3.8]
os: [ubuntu-latest, macos-latest] # TODO: windows-latest
steps:

View File

@@ -10,7 +10,7 @@ jobs:
strategy:
max-parallel: 12
matrix:
python-version: [2.7, 3.5, 3.6, 3.7] # TODO: 3.8
python-version: [3.6, 3.7] # TODO: 3.8
os: [ubuntu-latest, macos-latest] # TODO: windows-latest
steps:

View File

@@ -1,5 +1,19 @@
# Release History
## 3.0.0 (2020-03-10)
**Added**
- feat: dump log for each testcase
**Changed**
- replace logging with [loguru](https://github.com/Delgan/loguru)
- remove support for Python 2.7
- replace string format with f-string
- remove dependency colorama and colorlog
- generate reports/logs folder in current working directory
## 2.5.7 (2020-02-21)
**Changed**

View File

@@ -1,4 +1,4 @@
__version__ = "2.5.7"
__version__ = "3.0.0-alpha"
__description__ = "One-stop solution for HTTP(S) testing."
__all__ = ["__version__", "__description__"]

View File

@@ -1,9 +1,11 @@
import os
import sys
import unittest
from loguru import logger
from sentry_sdk import capture_message
from httprunner import (__version__, exceptions, loader, logger, parser,
from httprunner import (__version__, exceptions, loader, parser,
report, runner, utils)
@@ -32,18 +34,23 @@ class HttpRunner(object):
log_file (str): log file path.
"""
logger.setup_logger(log_level, log_file)
self.exception_stage = "initialize HttpRunner()"
kwargs = {
"failfast": failfast,
"resultclass": report.HtmlTestResult
}
logger.remove()
log_level = log_level.upper()
logger.add(sys.stdout, level=log_level)
if log_file:
logger.add(log_file, level=log_level)
self.unittest_runner = unittest.TextTestRunner(**kwargs)
self.test_loader = unittest.TestLoader()
self.save_tests = save_tests
self._summary = None
self.project_working_directory = None
self.project_mapping = None
def _add_tests(self, testcases):
""" initialize testcase with Runner() and add to test suite.
@@ -99,7 +106,7 @@ class HttpRunner(object):
times = int(times)
except ValueError:
raise exceptions.ParamsError(
"times should be digit, given: {}".format(times))
f"times should be digit, given: {times}")
for times_index in range(times):
# suppose one testcase should not have more than 9999 steps,
@@ -128,9 +135,16 @@ class HttpRunner(object):
"""
tests_results = []
for testcase in test_suite:
for index, testcase in enumerate(test_suite):
log_handler = None
if self.save_tests:
logs_file_abs_path = utils.prepare_log_file_abs_path(
self.project_mapping, f"testcase_{index+1}.log"
)
log_handler = logger.add(logs_file_abs_path, level="DEBUG")
testcase_name = testcase.config.get("name")
logger.log_info("Start to run testcase: {}".format(testcase_name))
logger.info(f"Start to run testcase: {testcase_name}")
result = self.unittest_runner.run(testcase)
if result.wasSuccessful():
@@ -138,6 +152,9 @@ class HttpRunner(object):
else:
tests_results.insert(0, (testcase, result))
if self.save_tests and log_handler:
logger.remove(log_handler)
return tests_results
def _aggregate(self, tests_results):
@@ -162,7 +179,7 @@ class HttpRunner(object):
"details": []
}
for tests_result in tests_results:
for index, tests_result in enumerate(tests_results):
testcase, result = tests_result
testcase_summary = report.get_summary(result)
@@ -178,6 +195,12 @@ class HttpRunner(object):
report.aggregate_stat(summary["stat"]["teststeps"], testcase_summary["stat"])
report.aggregate_stat(summary["time"], testcase_summary["time"])
if self.save_tests:
logs_file_abs_path = utils.prepare_log_file_abs_path(
self.project_mapping, f"testcase_{index + 1}.log"
)
testcase_summary["log"] = logs_file_abs_path
summary["details"].append(testcase_summary)
return summary
@@ -186,26 +209,25 @@ class HttpRunner(object):
""" run testcase/testsuite data
"""
capture_message("start to run tests")
project_mapping = tests_mapping.get("project_mapping", {})
self.project_working_directory = project_mapping.get("PWD", os.getcwd())
self.project_mapping = tests_mapping.get("project_mapping", {})
if self.save_tests:
utils.dump_logs(tests_mapping, project_mapping, "loaded")
utils.dump_logs(tests_mapping, self.project_mapping, "loaded")
# parse tests
self.exception_stage = "parse tests"
parsed_testcases = parser.parse_tests(tests_mapping)
parse_failed_testfiles = parser.get_parse_failed_testfiles()
if parse_failed_testfiles:
logger.log_warning("parse failures occurred ...")
utils.dump_logs(parse_failed_testfiles, project_mapping, "parse_failed")
logger.warning("parse failures occurred ...")
utils.dump_logs(parse_failed_testfiles, self.project_mapping, "parse_failed")
if len(parsed_testcases) == 0:
logger.log_error("failed to parse all cases, abort.")
logger.error("failed to parse all cases, abort.")
raise exceptions.ParseTestsFailure
if self.save_tests:
utils.dump_logs(parsed_testcases, project_mapping, "parsed")
utils.dump_logs(parsed_testcases, self.project_mapping, "parsed")
# add tests to test suite
self.exception_stage = "add tests to test suite"
@@ -224,10 +246,10 @@ class HttpRunner(object):
report.stringify_summary(self._summary)
if self.save_tests:
utils.dump_logs(self._summary, project_mapping, "summary")
utils.dump_logs(self._summary, self.project_mapping, "summary")
# save variables and export data
vars_out = self.get_vars_out()
utils.dump_logs(vars_out, project_mapping, "io")
utils.dump_logs(vars_out, self.project_mapping, "io")
return self._summary
@@ -295,7 +317,7 @@ class HttpRunner(object):
dict: result summary
"""
logger.log_info("HttpRunner version: {}".format(__version__))
logger.info(f"HttpRunner version: {__version__}")
if loader.is_test_path(path_or_tests):
return self.run_path(path_or_tests, dot_env_path, mapping)
elif loader.is_test_content(path_or_tests):
@@ -303,4 +325,4 @@ class HttpRunner(object):
loader.init_pwd(project_working_directory)
return self.run_tests(path_or_tests)
else:
raise exceptions.ParamsError("Invalid testcase path or testcases: {}".format(path_or_tests))
raise exceptions.ParamsError(f"Invalid testcase path or testcases: {path_or_tests}")

22
httprunner/app/main.py Normal file
View File

@@ -0,0 +1,22 @@
from fastapi import FastAPI
from httprunner import __version__
from .routers import deps, debugtalk, debug
app = FastAPI()
@app.get("/hrun/version")
async def get_hrun_version():
return {
"code": 0,
"message": "success",
"result": {
"HttpRunner": __version__
}
}
app.include_router(deps.router)
app.include_router(debugtalk.router)
app.include_router(debug.router)

View File

View File

@@ -0,0 +1,64 @@
from fastapi import APIRouter
from httprunner.api import HttpRunner
from httprunner.schema import ProjectMeta, TestCase
router = APIRouter()
runner = HttpRunner()
@router.post("/hrun/debug/testcase", tags=["debug"])
async def debug_single_testcase(project_meta: ProjectMeta, testcase: TestCase):
resp = {
"code": 0,
"message": "success",
"result": {}
}
project_meta_json = project_meta.dict(by_alias=True)
if project_meta.debugtalk_py:
origin_local_keys = list(locals().keys()).copy()
exec(project_meta.debugtalk_py, {}, locals())
new_local_keys = list(locals().keys()).copy()
new_added_keys = set(new_local_keys) - set(origin_local_keys)
new_added_keys.remove("origin_local_keys")
project_meta_json["functions"] = {}
for func_name in new_added_keys:
project_meta_json["functions"][func_name] = locals()[func_name]
testcase_json = testcase.dict(by_alias=True)
tests_mapping = {
"project_mapping": project_meta_json,
"testcases": [testcase_json]
}
summary = runner.run_tests(tests_mapping)
if not summary["success"]:
resp["code"] = 1
resp["message"] = "fail"
resp["result"] = summary
return resp
# @router.post("/hrun/debug/api", tags=["debug"])
# async def debug_single_api():
# resp = {
# "code": 0,
# "message": "success",
# "result": {}
# }
#
# # tests_mapping
#
# # summary = runner.run_tests(tests_mapping)
#
# return resp
#
#
# @router.post("/hrun/debug/testcases", tags=["debug"])
# async def debug_multiple_testcases(project_meta: ProjectMeta, testcases: TestCases):
# tests_mapping = {
# "project_mapping": project_meta,
# "testcases": testcases
# }

View File

@@ -0,0 +1,51 @@
import unittest
from starlette.testclient import TestClient
from httprunner.app.main import app
client = TestClient(app)
class TestDebug(unittest.TestCase):
def test_debug_single_testcase(self):
json_data = {
"project_meta": {
"debugtalk_py": "\ndef hello(name):\n print(f'hello, {name}')\n",
"variables": {},
"env": {}
},
"testcase": {
"config": {
"name": "test demo for debug service",
"verify": False,
"base_url": "",
"variables": {},
"setup_hooks": [],
"teardown_hooks": [],
"export": []
},
"teststeps": [
{
"name": "get index page",
"request": {
"method": "GET",
"url": "https://httpbin.org/",
"params": {},
"headers": {},
"json": {},
"cookies": {},
"timeout": 30,
"allow_redirects": True,
"verify": False
},
"extract": {},
"validate": []
}
]
}
}
response = client.post("/hrun/debug/testcase", json=json_data)
assert response.status_code == 200
assert response.json()["code"] == 0

View File

@@ -0,0 +1,46 @@
import contextlib
import logging
import sys
from io import StringIO
from fastapi import APIRouter
from starlette.requests import Request
router = APIRouter()
@contextlib.contextmanager
def stdout_io(stdout=None):
old = sys.stdout
if stdout is None:
stdout = StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
@router.post("/hrun/debug/debugtalk_py", tags=["debugtalk"])
async def debug_python(request: Request):
body = await request.body()
if request.headers.get('content-transfer-encoding') == "base64":
# TODO: decode base64
pass
resp = {
"code": 0,
"message": "success",
"result": ""
}
try:
with stdout_io() as s:
exec(body, globals())
output = s.getvalue()
resp["result"] = output
except Exception as ex:
resp["code"] = 1
resp["message"] = "fail"
resp["result"] = str(ex)
logging.error(resp)
return resp

View File

@@ -0,0 +1,42 @@
import logging
import subprocess
from typing import List
import pkg_resources
from fastapi import APIRouter
router = APIRouter()
@router.get("/hrun/deps", tags=["deps"])
async def get_installed_dependenies():
resp = {
"code": 0,
"message": "success",
"result": {}
}
for p in pkg_resources.working_set:
resp["result"][p.project_name] = p.version
return resp
@router.post("/hrun/deps", tags=["deps"])
async def install_dependenies(deps: List[str]):
resp = {
"code": 0,
"message": "success",
"result": {}
}
for dep in deps:
try:
p = subprocess.run(["pip", "install", dep])
assert p.returncode == 0
resp["result"][dep] = True
except (AssertionError, subprocess.SubprocessError):
resp["result"][dep] = False
resp["code"] = 1
resp["message"] = "fail"
logging.error(f"failed to install dependency: {dep}")
return resp

View File

@@ -4,8 +4,6 @@ Built-in validate comparators.
import re
from httprunner.compat import basestring, builtin_str, integer_types
def equals(check_value, expect_value):
assert check_value == expect_value
@@ -32,46 +30,46 @@ def not_equals(check_value, expect_value):
def string_equals(check_value, expect_value):
assert builtin_str(check_value) == builtin_str(expect_value)
assert str(check_value) == str(expect_value)
def length_equals(check_value, expect_value):
assert isinstance(expect_value, integer_types)
assert isinstance(expect_value, int)
expect_len = _cast_to_int(expect_value)
assert len(check_value) == expect_len
def length_greater_than(check_value, expect_value):
assert isinstance(expect_value, integer_types)
assert isinstance(expect_value, int)
expect_len = _cast_to_int(expect_value)
assert len(check_value) > expect_len
def length_greater_than_or_equals(check_value, expect_value):
assert isinstance(expect_value, integer_types)
assert isinstance(expect_value, int)
expect_len = _cast_to_int(expect_value)
assert len(check_value) >= expect_len
def length_less_than(check_value, expect_value):
assert isinstance(expect_value, integer_types)
assert isinstance(expect_value, int)
expect_len = _cast_to_int(expect_value)
assert len(check_value) < expect_len
def length_less_than_or_equals(check_value, expect_value):
assert isinstance(expect_value, integer_types)
assert isinstance(expect_value, int)
expect_len = _cast_to_int(expect_value)
assert len(check_value) <= expect_len
def contains(check_value, expect_value):
assert isinstance(check_value, (list, tuple, dict, basestring))
assert isinstance(check_value, (list, tuple, dict, str, bytes))
assert expect_value in check_value
def contained_by(check_value, expect_value):
assert isinstance(expect_value, (list, tuple, dict, basestring))
assert isinstance(expect_value, (list, tuple, dict, str, bytes))
assert check_value in expect_value
@@ -79,7 +77,7 @@ def type_match(check_value, expect_value):
def get_type(name):
if isinstance(name, type):
return name
elif isinstance(name, basestring):
elif isinstance(name, str):
try:
return __builtins__[name]
except KeyError:
@@ -91,21 +89,21 @@ def type_match(check_value, expect_value):
def regex_match(check_value, expect_value):
assert isinstance(expect_value, basestring)
assert isinstance(check_value, basestring)
assert isinstance(expect_value, str)
assert isinstance(check_value, str)
assert re.match(expect_value, check_value)
def startswith(check_value, expect_value):
assert builtin_str(check_value).startswith(builtin_str(expect_value))
assert str(check_value).startswith(str(expect_value))
def endswith(check_value, expect_value):
assert builtin_str(check_value).endswith(builtin_str(expect_value))
assert str(check_value).endswith(str(expect_value))
def _cast_to_int(expect_value):
try:
return int(expect_value)
except Exception:
raise AssertionError("%r can't cast to int" % str(expect_value))
raise AssertionError("%r can't cast to int" % str(expect_value))

View File

@@ -7,7 +7,6 @@ import random
import string
import time
from httprunner.compat import builtin_str, integer_types
from httprunner.exceptions import ParamsError
@@ -21,8 +20,8 @@ def gen_random_string(str_len):
def get_timestamp(str_len=13):
""" get timestamp string, length can only between 0 and 16
"""
if isinstance(str_len, integer_types) and 0 < str_len < 17:
return builtin_str(time.time()).replace(".", "")[:str_len]
if isinstance(str_len, int) and 0 < str_len < 17:
return str(time.time()).replace(".", "")[:str_len]
raise ParamsError("timestamp length can only between 0 and 16.")

View File

@@ -3,14 +3,13 @@ import os
import sys
import sentry_sdk
from loguru import logger
from httprunner import __description__, __version__, exceptions
from httprunner.api import HttpRunner
from httprunner.compat import is_py2
from httprunner.loader import load_cases
from httprunner.logger import color_print, log_error
from httprunner.report import gen_html_report
from httprunner.utils import (create_scaffold, get_python2_retire_msg,
from httprunner.utils import (create_scaffold,
prettify_json_file, init_sentry_sdk)
init_sentry_sdk()
@@ -19,9 +18,6 @@ init_sentry_sdk()
def main():
""" API test: parse command line options and run commands.
"""
if is_py2:
color_print(get_python2_retire_msg(), "YELLOW")
parser = argparse.ArgumentParser(description=__description__)
parser.add_argument(
'-V', '--version', dest='version', action='store_true',
@@ -71,19 +67,19 @@ def main():
sys.exit(0)
if args.version:
color_print("{}".format(__version__), "GREEN")
print(f"{__version__}")
sys.exit(0)
if args.validate:
for validate_path in args.validate:
try:
color_print("validate test file: {}".format(validate_path), "GREEN")
logger.info(f"validate test file: {validate_path}")
load_cases(validate_path, args.dot_env_path)
except exceptions.MyBaseError as ex:
log_error(str(ex))
logger.error(str(ex))
continue
color_print("done!", "BLUE")
logger.info("done!")
sys.exit(0)
if args.prettify:
@@ -106,7 +102,7 @@ def main():
try:
for path in args.testfile_paths:
summary = runner.run(path, dot_env_path=args.dot_env_path)
report_dir = args.report_dir or os.path.join(runner.project_working_directory, "reports")
report_dir = args.report_dir or os.path.join(os.getcwd(), "reports")
gen_html_report(
summary,
report_template=args.report_template,
@@ -115,8 +111,7 @@ def main():
)
err_code |= (0 if summary and summary["success"] else 1)
except Exception as ex:
color_print("!!!!!!!!!! exception stage: {} !!!!!!!!!!".format(runner.exception_stage), "YELLOW")
color_print(str(ex), "RED")
logger.error(f"!!!!!!!!!! exception stage: {runner.exception_stage} !!!!!!!!!!\n{str(ex)}")
sentry_sdk.capture_exception(ex)
err_code = 1

View File

@@ -1,8 +1,8 @@
import io
import sys
import unittest
from httprunner.cli import main
from httprunner.compat import io
class TestCli(unittest.TestCase):

View File

@@ -4,11 +4,12 @@ import time
import requests
import urllib3
from loguru import logger
from requests import Request, Response
from requests.exceptions import (InvalidSchema, InvalidURL, MissingSchema,
RequestException)
from httprunner import logger, response
from httprunner import response
from httprunner.utils import lower_dict_keys, omit_long_data
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@@ -18,10 +19,10 @@ def get_req_resp_record(resp_obj):
""" get request and response info from Response() object.
"""
def log_print(req_resp_dict, r_type):
msg = "\n================== {} details ==================\n".format(r_type)
msg = f"\n================== {r_type} details ==================\n"
for key, value in req_resp_dict[r_type].items():
msg += "{:<16} : {}\n".format(key, repr(value))
logger.log_debug(msg)
logger.debug(msg)
req_resp_dict = {
"request": {},
@@ -214,15 +215,13 @@ class HttpSession(requests.Session):
try:
response.raise_for_status()
except RequestException as e:
logger.log_error(u"{exception}".format(exception=str(e)))
except RequestException as ex:
logger.error(f"{str(ex)}")
else:
logger.log_info(
"""status_code: {}, response_time(ms): {} ms, response_length: {} bytes\n""".format(
response.status_code,
response_time_ms,
content_size
)
logger.info(
f"status_code: {response.status_code}, "
f"response_time(ms): {response_time_ms} ms, "
f"response_length: {content_size} bytes\n"
)
return response
@@ -234,9 +233,9 @@ class HttpSession(requests.Session):
"""
try:
msg = "processed request:\n"
msg += "> {method} {url}\n".format(method=method, url=url)
msg += "> kwargs: {kwargs}".format(kwargs=kwargs)
logger.log_debug(msg)
msg += f"> {method} {url}\n"
msg += f"> kwargs: {kwargs}"
logger.debug(msg)
return requests.Session.request(self, method, url, **kwargs)
except (MissingSchema, InvalidSchema, InvalidURL):
raise

View File

@@ -1,61 +0,0 @@
# encoding: utf-8
"""
httprunner.compat
~~~~~~~~~~~~~~~~~
This module handles import compatibility issues between Python 2 and
Python 3.
"""
try:
import simplejson as json
except ImportError:
import json
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
# ---------
# Specifics
# ---------
try:
JSONDecodeError = json.JSONDecodeError
except AttributeError:
JSONDecodeError = ValueError
if is_py2:
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
integer_types = (int, long)
FileNotFoundError = IOError
import StringIO as io
elif is_py3:
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
integer_types = (int,)
FileNotFoundError = FileNotFoundError
import io as io

View File

@@ -1,7 +1,3 @@
# encoding: utf-8
from httprunner.compat import JSONDecodeError, FileNotFoundError
""" failure type exceptions
these exceptions will mark test as failure
"""

View File

@@ -18,8 +18,9 @@ import multiprocessing
import os
import sys
from loguru import logger
from httprunner import __version__
from httprunner import logger
from httprunner.utils import init_sentry_sdk
init_sentry_sdk()
@@ -31,7 +32,7 @@ def parse_locustfile(file_path):
if file_path is a YAML/JSON file, convert it to locustfile
"""
if not os.path.isfile(file_path):
logger.color_print("file path invalid, exit.", "RED")
logger.error("file path invalid, exit.")
sys.exit(1)
file_suffix = os.path.splitext(file_path)[1]
@@ -41,7 +42,7 @@ def parse_locustfile(file_path):
locustfile_path = gen_locustfile(file_path)
else:
# '' or other suffix
logger.color_print("file type should be YAML/JSON/Python, exit.", "RED")
logger.error("file type should be YAML/JSON/Python, exit.")
sys.exit(1)
return locustfile_path
@@ -105,7 +106,7 @@ def run_locusts_with_processes(sys_argv, processes_count):
def main():
""" Performance test with locust: parse command line options and run commands.
"""
print("HttpRunner version: {}".format(__version__))
print(f"HttpRunner version: {__version__}")
sys.argv[0] = 'locust'
if len(sys.argv) == 1:
sys.argv.extend(["-h"])
@@ -126,11 +127,13 @@ def main():
loglevel_index = get_arg_index("-L", "--loglevel")
if loglevel_index and loglevel_index < len(sys.argv):
loglevel = sys.argv[loglevel_index]
loglevel = loglevel.upper()
else:
# default
loglevel = "WARNING"
logger.setup_logger(loglevel)
logger.remove()
logger.add(sys.stdout, level=loglevel)
# get testcase file path
try:
@@ -147,7 +150,7 @@ def main():
""" locusts -f locustfile.py --processes 4
"""
if "--no-web" in sys.argv:
logger.log_error("conflict parameter args: --processes & --no-web. \nexit.")
logger.error("conflict parameter args: --processes & --no-web. \nexit.")
sys.exit(1)
processes_index = sys.argv.index('--processes')
@@ -157,7 +160,7 @@ def main():
locusts -f locustfile.py --processes
"""
processes_count = multiprocessing.cpu_count()
logger.log_warning("processes count not specified, use {} by default.".format(processes_count))
logger.warning(f"processes count not specified, use {processes_count} by default.")
else:
try:
""" locusts -f locustfile.py --processes 4 """
@@ -166,7 +169,7 @@ def main():
except ValueError:
""" locusts -f locustfile.py --processes -P 8888 """
processes_count = multiprocessing.cpu_count()
logger.log_warning("processes count not specified, use {} by default.".format(processes_count))
logger.warning(f"processes count not specified, use {processes_count} by default.")
sys.argv.pop(processes_index)
run_locusts_with_processes(sys.argv, processes_count)

View File

@@ -85,12 +85,12 @@ def prepare_upload_test(test_dict):
"""
upload_json = test_dict["request"].pop("upload", {})
if not upload_json:
raise ParamsError("invalid upload info: {}".format(upload_json))
raise ParamsError(f"invalid upload info: {upload_json}")
params_list = []
for key, value in upload_json.items():
test_dict["variables"][key] = value
params_list.append("{}=${}".format(key, key))
params_list.append(f"{key}=${key}")
params_str = ", ".join(params_list)
test_dict["variables"]["m_encoder"] = "${multipart_encoder(" + params_str + ")}"

View File

@@ -1,7 +1,9 @@
import importlib
import os
from httprunner import exceptions, logger, utils
from loguru import logger
from httprunner import exceptions, utils
from httprunner.loader.check import JsonSchemaChecker
from httprunner.loader.load import load_module_functions, load_file, load_dot_env_file, \
load_folder_files
@@ -53,7 +55,7 @@ def __extend_with_api_ref(raw_testinfo):
if api_name in tests_def_mapping["api"]:
block = tests_def_mapping["api"][api_name]
elif not os.path.isfile(api_name):
raise exceptions.ApiNotFound("{} not found!".format(api_name))
raise exceptions.ApiNotFound(f"{api_name} not found!")
else:
block = load_file(api_name)
@@ -84,7 +86,7 @@ def __extend_with_testcase_ref(raw_testinfo):
testcase_dict = load_testcase_v2(loaded_testcase)
else:
raise exceptions.FileFormatError(
"Invalid format testcase: {}".format(testcase_path))
f"Invalid format testcase: {testcase_path}")
tests_def_mapping["testcases"][testcase_path] = testcase_dict
else:
@@ -186,8 +188,8 @@ def load_testcase(raw_testcase):
elif key == "test":
tests.append(load_teststep(test_block))
else:
logger.log_warning(
"unexpected block key: {}. block key should only be 'config' or 'test'.".format(key)
logger.warning(
f"unexpected block key: {key}. block key should only be 'config' or 'test'."
)
return {
@@ -417,7 +419,7 @@ def load_project_data(test_path, dot_env_path=None):
# locate PWD and load debugtalk.py functions
project_mapping["PWD"] = project_working_directory
project_mapping["functions"] = debugtalk_functions
project_mapping["test_path"] = os.path.abspath(test_path)
project_mapping["test_path"] = os.path.abspath(test_path)[len(project_working_directory)+1:]
return project_mapping
@@ -485,9 +487,9 @@ def load_cases(path, dot_env_path=None):
try:
loaded_content = load_test_file(path)
except exceptions.ApiNotFound as ex:
logger.log_warning("Invalid api reference in {}: {}".format(path, ex))
logger.warning(f"Invalid api reference in {path}: {ex}")
except exceptions.FileFormatError:
logger.log_warning("Invalid test file format: {}".format(path))
logger.warning(f"Invalid test file format: {path}")
if not loaded_content:
pass

View File

@@ -281,5 +281,11 @@ class TestSuiteLoader(unittest.TestCase):
buildup.load_project_data(os.path.join(os.getcwd(), "tests"))
self.assertIn("gen_md5", self.project_mapping["functions"])
self.assertEqual(self.project_mapping["env"]["PROJECT_KEY"], "ABCDEFGH")
self.assertEqual(self.project_mapping["PWD"], os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
self.assertEqual(self.project_mapping["test_path"], os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
self.assertEqual(
os.path.basename(self.project_mapping["PWD"]),
"tests"
)
self.assertEqual(
os.path.basename(self.project_mapping["test_path"]),
"tests"
)

View File

@@ -4,8 +4,9 @@ import os
import platform
import jsonschema
from loguru import logger
from httprunner import exceptions, logger
from httprunner import exceptions
schemas_root_dir = os.path.join(os.path.dirname(__file__), "schemas")
common_schema_path = os.path.join(schemas_root_dir, "common.schema.json")
@@ -50,7 +51,7 @@ class JsonSchemaChecker(object):
try:
jsonschema.validate(content, scheme, resolver=resolver)
except jsonschema.exceptions.ValidationError as ex:
logger.log_error(str(ex))
logger.error(str(ex))
raise exceptions.FileFormatError
return True

View File

@@ -5,9 +5,10 @@ import os
import types
import yaml
from loguru import logger
from httprunner import builtin
from httprunner import exceptions, logger, utils
from httprunner import exceptions, utils
from httprunner.loader.locate import get_project_working_directory
try:
@@ -25,7 +26,7 @@ def _load_yaml_file(yaml_file):
try:
yaml_content = yaml.load(stream)
except yaml.YAMLError as ex:
logger.log_error(str(ex))
logger.error(str(ex))
raise exceptions.FileFormatError
return yaml_content
@@ -37,9 +38,9 @@ def _load_json_file(json_file):
with io.open(json_file, encoding='utf-8') as data_file:
try:
json_content = json.load(data_file)
except exceptions.JSONDecodeError:
err_msg = u"JSONDecodeError: JSON file format error: {}".format(json_file)
logger.log_error(err_msg)
except json.JSONDecodeError:
err_msg = f"JSONDecodeError: JSON file format error: {json_file}"
logger.error(err_msg)
raise exceptions.FileFormatError(err_msg)
return json_content
@@ -90,7 +91,7 @@ def load_csv_file(csv_file):
def load_file(file_path):
if not os.path.isfile(file_path):
raise exceptions.FileNotFound("{} does not exist.".format(file_path))
raise exceptions.FileNotFound(f"{file_path} does not exist.")
file_suffix = os.path.splitext(file_path)[1].lower()
if file_suffix == '.json':
@@ -101,8 +102,7 @@ def load_file(file_path):
return load_csv_file(file_path)
else:
# '' or other suffix
err_msg = u"Unsupported file format: {}".format(file_path)
logger.log_warning(err_msg)
logger.warning(f"Unsupported file format: {file_path}")
return []
@@ -169,7 +169,7 @@ def load_dot_env_file(dot_env_path):
if not os.path.isfile(dot_env_path):
return {}
logger.log_info("Loading environment variables from {}".format(dot_env_path))
logger.info(f"Loading environment variables from {dot_env_path}")
env_variables_mapping = {}
with io.open(dot_env_path, 'r', encoding='utf-8') as fp:

View File

@@ -1,7 +1,9 @@
import os
import sys
from httprunner import exceptions, logger
from loguru import logger
from httprunner import exceptions
project_working_directory = None
@@ -26,7 +28,7 @@ def locate_file(start_path, file_name):
elif os.path.isdir(start_path):
start_dir_path = start_path
else:
raise exceptions.FileNotFound("invalid path: {}".format(start_path))
raise exceptions.FileNotFound(f"invalid path: {start_path}")
file_path = os.path.join(start_dir_path, file_name)
if os.path.isfile(file_path):
@@ -34,14 +36,14 @@ def locate_file(start_path, file_name):
# current working directory
if os.path.abspath(start_dir_path) == os.getcwd():
raise exceptions.FileNotFound("{} not found in {}".format(file_name, start_path))
raise exceptions.FileNotFound(f"{file_name} not found in {start_path}")
# system root dir
# Windows, e.g. 'E:\\'
# Linux/Darwin, '/'
parent_dir = os.path.dirname(start_dir_path)
if parent_dir == start_dir_path:
raise exceptions.FileNotFound("{} not found in {}".format(file_name, start_path))
raise exceptions.FileNotFound(f"{file_name} not found in {start_path}")
# locate recursive upward
return locate_file(parent_dir, file_name)
@@ -85,8 +87,8 @@ def init_project_working_directory(test_path):
def prepare_path(path):
if not os.path.exists(path):
err_msg = "path not exist: {}".format(path)
logger.log_error(err_msg)
err_msg = f"path not exist: {path}"
logger.error(err_msg)
raise exceptions.FileNotFound(err_msg)
if not os.path.isabs(path):

View File

@@ -1,98 +0,0 @@
import logging
import os
import sys
from colorama import Fore, init
from colorlog import ColoredFormatter
init(autoreset=True)
LOG_LEVEL = "INFO"
LOG_FILE_PATH = ""
log_colors_config = {
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
loggers = {}
def setup_logger(log_level, log_file=None):
global LOG_LEVEL
LOG_LEVEL = log_level
if log_file:
global LOG_FILE_PATH
LOG_FILE_PATH = log_file
def get_logger(name=None):
"""setup logger with ColoredFormatter."""
name = name or "httprunner"
logger_key = "".join([name, LOG_LEVEL, LOG_FILE_PATH])
if logger_key in loggers:
return loggers[logger_key]
_logger = logging.getLogger(name)
log_level = LOG_LEVEL
level = getattr(logging, log_level.upper(), None)
if not level:
color_print("Invalid log level: %s" % log_level, "RED")
sys.exit(1)
# hide traceback when log level is INFO/WARNING/ERROR/CRITICAL
if level >= logging.INFO:
sys.tracebacklimit = 0
_logger.setLevel(level)
if LOG_FILE_PATH:
log_dir = os.path.dirname(LOG_FILE_PATH)
if not os.path.isdir(log_dir):
os.makedirs(log_dir)
handler = logging.FileHandler(LOG_FILE_PATH, encoding="utf-8")
else:
handler = logging.StreamHandler(sys.stdout)
formatter = ColoredFormatter(
u"%(log_color)s%(bg_white)s%(levelname)-8s%(reset)s %(message)s",
datefmt=None,
reset=True,
log_colors=log_colors_config
)
handler.setFormatter(formatter)
_logger.addHandler(handler)
loggers[logger_key] = _logger
return _logger
def coloring(text, color="WHITE"):
fore_color = getattr(Fore, color.upper())
return fore_color + text
def color_print(msg, color="WHITE"):
fore_color = getattr(Fore, color.upper())
print(fore_color + msg)
def log_with_color(level):
""" log with color by different level
"""
def wrapper(text):
color = log_colors_config[level.upper()]
_logger = get_logger()
getattr(_logger, level.lower())(coloring(text, color))
return wrapper
log_debug = log_with_color("debug")
log_info = log_with_color("info")
log_warning = log_with_color("warning")
log_error = log_with_color("error")
log_critical = log_with_color("critical")

View File

@@ -6,9 +6,9 @@ import collections
import json
import re
from loguru import logger
from httprunner import exceptions, utils, loader
from httprunner import logger
from httprunner.compat import basestring, numeric_types, str
# use $$ to escape $ notation
dolloar_regex_compile = re.compile(r"\$\$")
@@ -45,7 +45,7 @@ def parse_string_value(str_value):
def is_var_or_func_exist(content):
""" check if variable or function exist
"""
if not isinstance(content, basestring):
if not isinstance(content, str):
return False
try:
@@ -286,7 +286,7 @@ def uniform_validator(validator):
"""
if not isinstance(validator, dict):
raise exceptions.ParamsError("invalid validator: {}".format(validator))
raise exceptions.ParamsError(f"invalid validator: {validator}")
if "check" in validator and "expect" in validator:
# format1
@@ -300,12 +300,12 @@ def uniform_validator(validator):
compare_values = validator[comparator]
if not isinstance(compare_values, list) or len(compare_values) != 2:
raise exceptions.ParamsError("invalid validator: {}".format(validator))
raise exceptions.ParamsError(f"invalid validator: {validator}")
check_item, expect_value = compare_values
else:
raise exceptions.ParamsError("invalid validator: {}".format(validator))
raise exceptions.ParamsError(f"invalid validator: {validator}")
# uniform comparator, e.g. lt => less_than, eq => equals
comparator = get_uniform_comparator(comparator)
@@ -410,7 +410,7 @@ def get_mapping_variable(variable_name, variables_mapping):
try:
return variables_mapping[variable_name]
except KeyError:
raise exceptions.VariableNotFound("{} is not found.".format(variable_name))
raise exceptions.VariableNotFound(f"{variable_name} is not found.")
def get_mapping_function(function_name, functions_mapping):
@@ -455,7 +455,7 @@ def get_mapping_function(function_name, functions_mapping):
except AttributeError:
pass
raise exceptions.FunctionNotFound("{} is not found.".format(function_name))
raise exceptions.FunctionNotFound(f"{function_name} is not found.")
def parse_function_params(params):
@@ -578,12 +578,12 @@ class LazyFunction(object):
if self._kwargs:
args_string += ", "
str_kwargs = [
"{}={}".format(key, str(value))
f"{key}={str(value)}"
for key, value in self._kwargs.items()
]
args_string += ", ".join(str_kwargs)
return "LazyFunction({}({}))".format(self.func_name, args_string)
return f"LazyFunction({self.func_name}({args_string}))"
def __prepare_cache_key(self, args, kwargs):
return self.func_name, repr(args), repr(kwargs)
@@ -698,7 +698,7 @@ class LazyString(object):
self._string += escape_braces(remain_string)
def __repr__(self):
return "LazyString({})".format(self.raw_string)
return f"LazyString({self.raw_string})"
def to_value(self, variables_mapping=None):
""" parse lazy data with evaluated variables mapping.
@@ -734,7 +734,7 @@ def prepare_lazy_data(content, functions_mapping=None, check_variables_set=None,
"""
# TODO: refactor type check
if content is None or isinstance(content, (numeric_types, bool, type)):
if content is None or isinstance(content, (int, float, bool, type)):
return content
elif isinstance(content, (list, set, tuple)):
@@ -767,7 +767,7 @@ def prepare_lazy_data(content, functions_mapping=None, check_variables_set=None,
return parsed_content
elif isinstance(content, basestring):
elif isinstance(content, str):
# content is in string format here
if not is_var_or_func_exist(content):
# content is neither variable nor function
@@ -787,7 +787,7 @@ def parse_lazy_data(content, variables_mapping=None):
Notice: variables_mapping should not contain any variable or function.
"""
# TODO: refactor type check
if content is None or isinstance(content, (numeric_types, bool, type)):
if content is None or isinstance(content, (int, float, bool, type)):
return content
elif isinstance(content, LazyString):
@@ -1056,7 +1056,7 @@ def __prepare_config(config, project_mapping, session_variables_set=None):
override_variables = utils.deepcopy_dict(project_mapping.get("variables", {}))
functions = project_mapping.get("functions", {})
if isinstance(raw_config_variables, basestring) and function_regex_compile.match(
if isinstance(raw_config_variables, str) and function_regex_compile.match(
raw_config_variables):
# config variables are generated by calling function
# e.g.
@@ -1247,7 +1247,7 @@ def _parse_testcase(testcase, project_mapping, session_variables_set=None):
testcase_type = testcase["type"]
testcase_path = testcase.get("path")
logger.log_error("failed to parse testcase: {}, error: {}".format(testcase_path, ex))
logger.error(f"failed to parse testcase: {testcase_path}, error: {ex}")
global parse_failed_testfiles
if testcase_type not in parse_failed_testfiles:

View File

@@ -3,8 +3,8 @@ import os
from datetime import datetime
from jinja2 import Template
from loguru import logger
from httprunner import logger
from httprunner.exceptions import SummaryEmpty
@@ -19,7 +19,7 @@ def gen_html_report(summary, report_template=None, report_dir=None, report_file=
"""
if not summary["time"] or summary["stat"]["testcases"]["total"] == 0:
logger.log_error("test result summary is empty ! {}".format(summary))
logger.error(f"test result summary is empty ! {summary}")
raise SummaryEmpty
if not report_template:
@@ -27,11 +27,11 @@ def gen_html_report(summary, report_template=None, report_dir=None, report_file=
os.path.abspath(os.path.dirname(__file__)),
"template.html"
)
logger.log_debug("No html report template specified, use default.")
logger.debug("No html report template specified, use default.")
else:
logger.log_info("render with html report template: {}".format(report_template))
logger.info(f"render with html report template: {report_template}")
logger.log_info("Start to render Html report ...")
logger.info("Start to render Html report ...")
start_at_timestamp = summary["time"]["start_at"]
utc_time_iso_8601_str = datetime.utcfromtimestamp(start_at_timestamp).isoformat()
@@ -58,7 +58,7 @@ def gen_html_report(summary, report_template=None, report_dir=None, report_file=
).render(summary)
fp_w.write(rendered_content)
logger.log_info("Generated Html report: {}".format(report_path))
logger.info(f"Generated Html report: {report_path}")
return report_path

View File

@@ -1,7 +1,7 @@
import time
import unittest
from httprunner import logger
from loguru import logger
class HtmlTestResult(unittest.TextTestResult):
@@ -27,7 +27,7 @@ class HtmlTestResult(unittest.TextTestResult):
def startTest(self, test):
""" add start test time """
super(HtmlTestResult, self).startTest(test)
logger.color_print(test.shortDescription(), "yellow")
logger.info(test.shortDescription())
def addSuccess(self, test):
super(HtmlTestResult, self).addSuccess(test)

View File

@@ -1,11 +1,10 @@
import json
from base64 import b64encode
from collections import Iterable
from jinja2 import escape
from requests.cookies import RequestsCookieJar
from httprunner.compat import basestring, bytes, json, numeric_types, JSONDecodeError
def dumps_json(value):
""" dumps json value to indented string
@@ -67,13 +66,13 @@ def __stringify_request(request_data):
# request body is in json format
value = json.loads(value)
value = dumps_json(value)
except JSONDecodeError:
except json.JSONDecodeError:
pass
value = escape(value)
except UnicodeDecodeError:
pass
elif not isinstance(value, (basestring, numeric_types, Iterable)):
elif not isinstance(value, (str, bytes, int, float, Iterable)):
# class instance, e.g. MultipartEncoder()
value = repr(value)
@@ -132,7 +131,7 @@ def __stringify_response(response_data):
except UnicodeDecodeError:
pass
elif not isinstance(value, (basestring, numeric_types, Iterable)):
elif not isinstance(value, (str, bytes, int, float, Iterable)):
# class instance, e.g. MultipartEncoder()
value = repr(value)
@@ -205,7 +204,7 @@ def stringify_summary(summary):
for index, suite_summary in enumerate(summary["details"]):
if not suite_summary.get("name"):
suite_summary["name"] = "testcase {}".format(index)
suite_summary["name"] = f"testcase {index}"
for record in suite_summary.get("records"):
meta_datas = record['meta_datas']

View File

@@ -1,10 +1,11 @@
import json
import re
from collections import OrderedDict
import jsonpath
from loguru import logger
from httprunner import exceptions, logger, utils
from httprunner.compat import basestring, is_py2
from httprunner import exceptions, utils
text_extractor_regexp_compile = re.compile(r".*\(.*\).*")
@@ -32,39 +33,60 @@ class ResponseObject(object):
self.__dict__[key] = value
return value
except AttributeError:
err_msg = "ResponseObject does not have attribute: {}".format(key)
logger.log_error(err_msg)
err_msg = f"ResponseObject does not have attribute: {key}"
logger.error(err_msg)
raise exceptions.ParamsError(err_msg)
def _extract_field_with_jsonpath(self, field):
"""
def _extract_field_with_jsonpath(self, field: str) -> list:
""" extract field from response content with jsonpath expression.
JSONPath Docs: https://goessner.net/articles/JsonPath/
For example, response body like below:
{
"code": 200,
"data": {
"items": [{
"id": 1,
"name": "Bob"
},
{
"id": 2,
"name": "James"
}
]
},
"message": "success"
}
:param field: Jsonpath expression, e.g. 1)$.code 2) $..items.*.id
:return: A list that extracted from json repsonse example. 1) [200] 2) [1, 2]
Args:
field: jsonpath expression, e.g. $.code, $..items.*.id
Returns:
A list that extracted from json response example. 1) [200] 2) [1, 2]
Raises:
exceptions.ExtractFailure: If no content matched with jsonpath expression.
Examples:
For example, response body like below:
{
"code": 200,
"data": {
"items": [{
"id": 1,
"name": "Bob"
},
{
"id": 2,
"name": "James"
}
]
},
"message": "success"
}
>>> _extract_field_with_regex("$.code")
[200]
>>> _extract_field_with_regex("$..items.*.id")
[1, 2]
"""
result = jsonpath.jsonpath(self.parsed_body(), field)
if result:
try:
json_body = self.json
assert json_body
result = jsonpath.jsonpath(json_body, field)
assert result
return result
else:
raise exceptions.ExtractFailure("\tjsonpath {} get nothing\n".format(field))
except (AssertionError, exceptions.JSONDecodeError):
err_msg = f"Failed to extract data with jsonpath! => {field}\n"
err_msg += f"response body: {self.text}\n"
logger.error(err_msg)
raise exceptions.ExtractFailure(err_msg)
def _extract_field_with_regex(self, field):
""" extract field from response content with regex.
requests.Response body could be json or html text.
@@ -87,9 +109,9 @@ class ResponseObject(object):
"""
matched = re.search(field, self.text)
if not matched:
err_msg = u"Failed to extract data with regex! => {}\n".format(field)
err_msg += u"response body: {}\n".format(self.text)
logger.log_error(err_msg)
err_msg = f"Failed to extract data with regex! => {field}\n"
err_msg += f"response body: {self.text}\n"
logger.error(err_msg)
raise exceptions.ExtractFailure(err_msg)
return matched.group(1)
@@ -120,8 +142,8 @@ class ResponseObject(object):
if top_query in ["status_code", "encoding", "ok", "reason", "url"]:
if sub_query:
# status_code.XX
err_msg = u"Failed to extract: {}\n".format(field)
logger.log_error(err_msg)
err_msg = f"Failed to extract: {field}\n"
logger.error(err_msg)
raise exceptions.ParamsError(err_msg)
return getattr(self, top_query)
@@ -136,27 +158,27 @@ class ResponseObject(object):
try:
return cookies[sub_query]
except KeyError:
err_msg = u"Failed to extract cookie! => {}\n".format(field)
err_msg += u"response cookies: {}\n".format(cookies)
logger.log_error(err_msg)
err_msg = f"Failed to extract cookie! => {field}\n"
err_msg += f"response cookies: {cookies}\n"
logger.error(err_msg)
raise exceptions.ExtractFailure(err_msg)
# elapsed
elif top_query == "elapsed":
available_attributes = u"available attributes: days, seconds, microseconds, total_seconds"
if not sub_query:
err_msg = u"elapsed is datetime.timedelta instance, attribute should also be specified!\n"
err_msg = "elapsed is datetime.timedelta instance, attribute should also be specified!\n"
err_msg += available_attributes
logger.log_error(err_msg)
logger.error(err_msg)
raise exceptions.ParamsError(err_msg)
elif sub_query in ["days", "seconds", "microseconds"]:
return getattr(self.elapsed, sub_query)
elif sub_query == "total_seconds":
return self.elapsed.total_seconds()
else:
err_msg = "{} is not valid datetime.timedelta attribute.\n".format(sub_query)
err_msg = f"{sub_query} is not valid datetime.timedelta attribute.\n"
err_msg += available_attributes
logger.log_error(err_msg)
logger.error(err_msg)
raise exceptions.ParamsError(err_msg)
# headers
@@ -169,16 +191,16 @@ class ResponseObject(object):
try:
return headers[sub_query]
except KeyError:
err_msg = u"Failed to extract header! => {}\n".format(field)
err_msg += u"response headers: {}\n".format(headers)
logger.log_error(err_msg)
err_msg = f"Failed to extract header! => {field}\n"
err_msg += f"response headers: {headers}\n"
logger.error(err_msg)
raise exceptions.ExtractFailure(err_msg)
# response body
elif top_query in ["body", "content", "text", "json"]:
try:
body = self.json
except exceptions.JSONDecodeError:
except json.JSONDecodeError:
body = self.text
if not sub_query:
@@ -193,9 +215,9 @@ class ResponseObject(object):
return utils.query_json(body, sub_query)
else:
# content = "<html>abcdefg</html>", content.xxx
err_msg = u"Failed to extract attribute from response body! => {}\n".format(field)
err_msg += u"response body: {}\n".format(body)
logger.log_error(err_msg)
err_msg = f"Failed to extract attribute from response body! => {field}\n"
err_msg += f"response body: {body}\n"
logger.error(err_msg)
raise exceptions.ExtractFailure(err_msg)
# new set response attributes in teardown_hooks
@@ -214,30 +236,30 @@ class ResponseObject(object):
return utils.query_json(attributes, sub_query)
else:
# content = "attributes.new_attribute_not_exist"
err_msg = u"Failed to extract cumstom set attribute from teardown hooks! => {}\n".format(field)
err_msg += u"response set attributes: {}\n".format(attributes)
logger.log_error(err_msg)
err_msg = f"Failed to extract cumstom set attribute from teardown hooks! => {field}\n"
err_msg += f"response set attributes: {attributes}\n"
logger.error(err_msg)
raise exceptions.TeardownHooksFailure(err_msg)
# others
else:
err_msg = u"Failed to extract attribute from response! => {}\n".format(field)
err_msg += u"available response attributes: status_code, cookies, elapsed, headers, content, " \
u"text, json, encoding, ok, reason, url.\n\n"
err_msg += u"If you want to set attribute in teardown_hooks, take the following example as reference:\n"
err_msg += u"response.new_attribute = 'new_attribute_value'\n"
logger.log_error(err_msg)
err_msg = f"Failed to extract attribute from response! => {field}\n"
err_msg += "available response attributes: status_code, cookies, elapsed, headers, content, " \
"text, json, encoding, ok, reason, url.\n\n"
err_msg += "If you want to set attribute in teardown_hooks, take the following example as reference:\n"
err_msg += "response.new_attribute = 'new_attribute_value'\n"
logger.error(err_msg)
raise exceptions.ParamsError(err_msg)
def extract_field(self, field):
""" extract value from requests.Response.
"""
if not isinstance(field, basestring):
err_msg = u"Invalid extractor! => {}\n".format(field)
logger.log_error(err_msg)
if not isinstance(field, str):
err_msg = f"Invalid extractor! => {field}\n"
logger.error(err_msg)
raise exceptions.ParamsError(err_msg)
msg = "extract: {}".format(field)
msg = f"extract: {field}"
if field.startswith("$"):
value = self._extract_field_with_jsonpath(field)
@@ -246,11 +268,8 @@ class ResponseObject(object):
else:
value = self._extract_field_with_delimiter(field)
if is_py2 and isinstance(value, unicode):
value = value.encode("utf-8")
msg += "\t=> {}".format(value)
logger.log_debug(msg)
msg += f"\t=> {value}"
logger.debug(msg)
return value
@@ -274,7 +293,7 @@ class ResponseObject(object):
if not extractors:
return {}
logger.log_debug("start to extract from response object.")
logger.debug("start to extract from response object.")
extracted_variables_mapping = OrderedDict()
extract_binds_order_dict = utils.ensure_mapping_format(extractors)

View File

@@ -3,7 +3,9 @@
from enum import Enum
from unittest.case import SkipTest
from httprunner import exceptions, logger, response, utils
from loguru import logger
from httprunner import exceptions, response, utils
from httprunner.client import HttpSession
from httprunner.context import SessionContext
from httprunner.validator import Validator
@@ -116,12 +118,12 @@ class Runner(object):
elif "skipIf" in test_dict:
skip_if_condition = test_dict["skipIf"]
if self.session_context.eval_content(skip_if_condition):
skip_reason = "{} evaluate to True".format(skip_if_condition)
skip_reason = f"{skip_if_condition} evaluate to True"
elif "skipUnless" in test_dict:
skip_unless_condition = test_dict["skipUnless"]
if not self.session_context.eval_content(skip_unless_condition):
skip_reason = "{} evaluate to False".format(skip_unless_condition)
skip_reason = f"{skip_unless_condition} evaluate to False"
if skip_reason:
raise SkipTest(skip_reason)
@@ -140,7 +142,7 @@ class Runner(object):
hook_type (HookTypeEnum): setup/teardown
"""
logger.log_debug("call {} hook actions.".format(hook_type.name))
logger.debug(f"call {hook_type.name} hook actions.")
for action in actions:
if isinstance(action, dict) and len(action) == 1:
@@ -148,17 +150,14 @@ class Runner(object):
# {"var": "${func()}"}
var_name, hook_content = list(action.items())[0]
hook_content_eval = self.session_context.eval_content(hook_content)
logger.log_debug(
"assignment with hook: {} = {} => {}".format(
var_name, hook_content, hook_content_eval
)
)
logger.debug(
f"assignment with hook: {var_name} = {hook_content} => {hook_content_eval}")
self.session_context.update_test_variables(
var_name, hook_content_eval
)
else:
# format 2
logger.log_debug("call hook function: {}".format(action))
logger.debug(f"call hook function: {action}")
# TODO: check hook function if valid
self.session_context.eval_content(action)
@@ -230,9 +229,8 @@ class Runner(object):
except KeyError:
raise exceptions.ParamsError("URL or METHOD missed!")
logger.log_info("{method} {url}".format(method=method, url=parsed_url))
logger.log_debug(
"request kwargs(raw): {kwargs}".format(kwargs=parsed_test_request))
logger.info(f"{method} {parsed_url}")
logger.debug(f"request kwargs(raw): {parsed_test_request}")
# request
resp = self.http_client_session.request(
@@ -248,21 +246,22 @@ class Runner(object):
# log request
err_msg += "====== request details ======\n"
err_msg += "url: {}\n".format(parsed_url)
err_msg += "method: {}\n".format(method)
err_msg += "headers: {}\n".format(parsed_test_request.pop("headers", {}))
err_msg += f"url: {parsed_url}\n"
err_msg += f"method: {method}\n"
headers = parsed_test_request.pop("headers", {})
err_msg += f"headers: {headers}\n"
for k, v in parsed_test_request.items():
v = utils.omit_long_data(v)
err_msg += "{}: {}\n".format(k, repr(v))
err_msg += f"{k}: {repr(v)}\n"
err_msg += "\n"
# log response
err_msg += "====== response details ======\n"
err_msg += "status_code: {}\n".format(resp_obj.status_code)
err_msg += "headers: {}\n".format(resp_obj.headers)
err_msg += "body: {}\n".format(repr(resp_obj.text))
logger.log_error(err_msg)
err_msg += f"status_code: {resp_obj.status_code}\n"
err_msg += f"headers: {resp_obj.headers}\n"
err_msg += f"body: {repr(resp_obj.text)}\n"
logger.error(err_msg)
# teardown hooks
teardown_hooks = test_dict.get("teardown_hooks", [])
@@ -395,9 +394,9 @@ class Runner(object):
output = {}
for variable in output_variables_list:
if variable not in variables_mapping:
logger.log_warning(
"variable '{}' can not be found in variables mapping, "
"failed to export!".format(variable)
logger.warning(
f"variable '{variable}' can not be found in variables mapping, "
"failed to export!"
)
continue

View File

@@ -0,0 +1 @@
from .testcase import ProjectMeta, TestCase, TestCases

14
httprunner/schema/api.py Normal file
View File

@@ -0,0 +1,14 @@
from pydantic import BaseModel
from httprunner.schema import common
class Api(BaseModel):
name: common.Name
request: common.Request
variables: common.Variables
base_url: common.BaseUrl
setup_hooks: common.Hook
teardown_hooks: common.Hook
extract: common.Extract
validate: common.Validate

View File

@@ -0,0 +1,61 @@
from enum import Enum
from typing import Dict, List, Any, Tuple
from pydantic import BaseModel, HttpUrl, Field
Name = str
Url = HttpUrl
BaseUrl = str
Variables = Dict[str, Any]
Headers = Dict[str, str]
Verify = bool
Hook = List[str]
Export = List[str]
Extract = Dict[str, str]
Validate = List[Dict]
Env = Dict[str, Any]
class MethodEnum(str, Enum):
GET = 'GET'
POST = 'POST'
PUT = "PUT"
DELETE = "DELETE"
HEAD = "HEAD"
OPTIONS = "OPTIONS"
PATCH = "PATCH"
CONNECT = "CONNECT"
TRACE = "TRACE"
class TestsConfig(BaseModel):
name: Name
verify: Verify = False
base_url: BaseUrl = ""
variables: Variables = {}
setup_hooks: Hook = []
teardown_hooks: Hook = []
export: Export = []
class Config:
schema_extra = {
"examples": [
{
"name": "used in testcase/testsuite to configure common fields",
"verify": False,
"base_url": "https://httpbin.org"
}
]
}
class Request(BaseModel):
method: MethodEnum = MethodEnum.GET
url: Url
params: Dict[str, str] = {}
headers: Headers = {}
req_json: Dict = Field({}, alias="json")
cookies: Dict[str, str] = {}
timeout: int = 120
allow_redirects: bool = True
verify: Verify = False

View File

@@ -0,0 +1,85 @@
from typing import Dict, List, Text
from pydantic import BaseModel, Field
from httprunner.schema import common
class ProjectMeta(BaseModel):
debugtalk_py: Text = ""
variables: common.Variables = {}
env: common.Env = {}
class TestStep(BaseModel):
name: common.Name
request: common.Request
extract: Dict[str, str] = {}
validation: common.Validate = Field([], alias="validate")
class TestCase(BaseModel):
config: common.TestsConfig
teststeps: List[TestStep]
class Config:
schema_extra = {
"examples": [
{
"config": {
"name": "testcase name"
},
"teststeps": [
{
"name": "api 1",
"api": "/path/to/api1"
},
{
"name": "api 2",
"api": "/path/to/api2"
}
]
},
{
"config": {
"name": "demo testcase",
"variables": {
"device_sn": "ABC",
"username": "${ENV(USERNAME)}",
"password": "${ENV(PASSWORD)}"
},
"base_url": "http://127.0.0.1:5000"
},
"teststeps": [
{
"name": "demo step 1",
"api": "path/to/api1.yml",
"variables": {
"user_agent": "iOS/10.3",
"device_sn": "$device_sn"
},
"extract": [
{
"token": "content.token"
}
],
"validate": [
{
"eq": ["status_code", 200]
}
]
},
{
"name": "demo step 2",
"api": "path/to/api2.yml",
"variables": {
"token": "$token"
}
}
]
}
]
}
TestCases = List[TestCase]

View File

@@ -8,12 +8,11 @@ import json
import os.path
import re
import uuid
from datetime import datetime
import sentry_sdk
from loguru import logger
from httprunner import exceptions, logger, __version__
from httprunner.compat import basestring, bytes, is_py2
from httprunner import exceptions, __version__
from httprunner.exceptions import ParamsError
absolute_http_url_regexp = re.compile(r"^https?://", re.I)
@@ -22,7 +21,7 @@ absolute_http_url_regexp = re.compile(r"^https?://", re.I)
def init_sentry_sdk():
sentry_sdk.init(
dsn="https://cc6dd86fbe9f4e7fbd95248cfcff114d@sentry.io/1862849",
release="httprunner@{}".format(__version__)
release=f"httprunner@{__version__}"
)
with sentry_sdk.configure_scope() as scope:
@@ -34,7 +33,7 @@ def set_os_environ(variables_mapping):
"""
for variable in variables_mapping:
os.environ[variable] = variables_mapping[variable]
logger.log_debug("Set OS environment variable: {}".format(variable))
logger.debug(f"Set OS environment variable: {variable}")
def unset_os_environ(variables_mapping):
@@ -42,7 +41,7 @@ def unset_os_environ(variables_mapping):
"""
for variable in variables_mapping:
os.environ.pop(variable)
logger.log_debug("Unset OS environment variable: {}".format(variable))
logger.debug(f"Unset OS environment variable: {variable}")
def get_os_environ(variable_name):
@@ -109,24 +108,24 @@ def query_json(json_content, query, delimiter='.'):
"""
raise_flag = False
response_body = u"response body: {}\n".format(json_content)
response_body = f"response body: {json_content}\n"
try:
for key in query.split(delimiter):
if isinstance(json_content, (list, basestring)):
if isinstance(json_content, (list, str, bytes)):
json_content = json_content[int(key)]
elif isinstance(json_content, dict):
json_content = json_content[key]
else:
logger.log_error(
"invalid type value: {}({})".format(json_content, type(json_content)))
logger.error(
f"invalid type value: {json_content}({type(json_content)})")
raise_flag = True
except (KeyError, ValueError, IndexError):
raise_flag = True
if raise_flag:
err_msg = u"Failed to extract! => {}\n".format(query)
err_msg = f"Failed to extract! => {query}\n"
err_msg += response_body
logger.log_error(err_msg)
logger.error(err_msg)
raise exceptions.ExtractFailure(err_msg)
return json_content
@@ -355,38 +354,32 @@ def print_info(info_mapping):
elif value is None:
value = "None"
if is_py2:
if isinstance(key, unicode):
key = key.encode("utf-8")
if isinstance(value, unicode):
value = value.encode("utf-8")
content += content_format.format(key, value)
content += "-" * 48 + "\n"
logger.log_info(content)
logger.info(content)
def create_scaffold(project_name):
""" create scaffold with specified project name.
"""
if os.path.isdir(project_name):
logger.log_warning(u"Folder {} exists, please specify a new folder name.".format(project_name))
logger.warning(f"Folder {project_name} exists, please specify a new folder name.")
return
logger.color_print("Start to create new project: {}".format(project_name), "GREEN")
logger.color_print("CWD: {}\n".format(os.getcwd()), "BLUE")
logger.info(f"Start to create new project: {project_name}")
logger.info(f"CWD: {os.getcwd()}")
def create_folder(path):
os.makedirs(path)
msg = "created folder: {}".format(path)
logger.color_print(msg, "BLUE")
msg = f"created folder: {path}"
logger.info(msg)
def create_file(path, file_content=""):
with open(path, 'w') as f:
f.write(file_content)
msg = "created file: {}".format(path)
logger.color_print(msg, "BLUE")
msg = f"created file: {path}"
logger.info(msg)
demo_api_content = """
name: demo api
@@ -526,14 +519,14 @@ def prettify_json_file(file_list):
"""
for json_file in set(file_list):
if not json_file.endswith(".json"):
logger.log_warning("Only JSON file format can be prettified, skip: {}".format(json_file))
logger.warning(f"Only JSON file format can be prettified, skip: {json_file}")
continue
logger.color_print("Start to prettify JSON file: {}".format(json_file), "GREEN")
logger.info(f"Start to prettify JSON file: {json_file}")
dir_path = os.path.dirname(json_file)
file_name, file_suffix = os.path.splitext(os.path.basename(json_file))
outfile = os.path.join(dir_path, "{}.pretty.json".format(file_name))
outfile = os.path.join(dir_path, f"{file_name}.pretty.json")
with io.open(json_file, 'r', encoding='utf-8') as stream:
try:
@@ -545,13 +538,13 @@ def prettify_json_file(file_list):
json.dump(obj, out, indent=4, separators=(',', ': '))
out.write('\n')
print("success: {}".format(outfile))
print(f"success: {outfile}")
def omit_long_data(body, omit_len=512):
""" omit too long str/bytes
"""
if not isinstance(body, basestring):
if not isinstance(body, (str, bytes)):
return body
body_len = len(body)
@@ -560,7 +553,7 @@ def omit_long_data(body, omit_len=512):
omitted_body = body[0:omit_len]
appendix_str = " ... OMITTED {} CHARACTORS ...".format(body_len - omit_len)
appendix_str = f" ... OMITTED {body_len - omit_len} CHARACTORS ..."
if isinstance(body, bytes):
appendix_str = appendix_str.encode("utf-8")
@@ -583,59 +576,46 @@ def dump_json_file(json_data, json_file_abs_path):
try:
with io.open(json_file_abs_path, 'w', encoding='utf-8') as outfile:
if is_py2:
outfile.write(
unicode(json.dumps(
json_data,
indent=4,
separators=(',', ':'),
encoding="utf8",
ensure_ascii=False,
cls=PythonObjectEncoder
))
)
else:
json.dump(
json_data,
outfile,
indent=4,
separators=(',', ':'),
ensure_ascii=False,
cls=PythonObjectEncoder
)
json.dump(
json_data,
outfile,
indent=4,
separators=(',', ':'),
ensure_ascii=False,
cls=PythonObjectEncoder
)
msg = "dump file: {}".format(json_file_abs_path)
logger.color_print(msg, "BLUE")
msg = f"dump file: {json_file_abs_path}"
logger.info(msg)
except TypeError as ex:
msg = "Failed to dump json file: {}\nReason: {}".format(json_file_abs_path, ex)
logger.color_print(msg, "RED")
msg = f"Failed to dump json file: {json_file_abs_path}\nReason: {ex}"
logger.error(msg)
def prepare_dump_json_file_abs_path(project_mapping, tag_name):
def prepare_log_file_abs_path(project_mapping, file_name):
""" prepare dump json file absolute path.
"""
pwd_dir_path = project_mapping.get("PWD") or os.getcwd()
current_working_dir = os.getcwd()
test_path = project_mapping.get("test_path")
if not test_path:
# running passed in testcase/testsuite data structure
dump_file_name = "tests_mapping.{}.json".format(tag_name)
dumped_json_file_abs_path = os.path.join(pwd_dir_path, "logs", dump_file_name)
dump_file_name = f"tests_mapping.{file_name}"
dumped_json_file_abs_path = os.path.join(current_working_dir, "logs", dump_file_name)
return dumped_json_file_abs_path
# both test_path and pwd_dir_path are absolute path
logs_dir_path = os.path.join(pwd_dir_path, "logs")
test_path_relative_path = test_path[len(pwd_dir_path)+1:]
logs_dir_path = os.path.join(current_working_dir, "logs")
if os.path.isdir(test_path):
file_foder_path = os.path.join(logs_dir_path, test_path_relative_path)
dump_file_name = "all.{}.json".format(tag_name)
file_foder_path = os.path.join(logs_dir_path, test_path)
dump_file_name = f"all.{file_name}"
else:
file_relative_folder_path, test_file = os.path.split(test_path_relative_path)
file_relative_folder_path, test_file = os.path.split(test_path)
file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path)
test_file_name, _file_suffix = os.path.splitext(test_file)
dump_file_name = "{}.{}.json".format(test_file_name, tag_name)
dump_file_name = f"{test_file_name}.{file_name}"
dumped_json_file_abs_path = os.path.join(file_foder_path, dump_file_name)
return dumped_json_file_abs_path
@@ -651,18 +631,5 @@ def dump_logs(json_data, project_mapping, tag_name):
tag_name (str): tag name, loaded/parsed/summary
"""
json_file_abs_path = prepare_dump_json_file_abs_path(project_mapping, tag_name)
json_file_abs_path = prepare_log_file_abs_path(project_mapping, f"{tag_name}.json")
dump_json_file(json_data, json_file_abs_path)
def get_python2_retire_msg():
retire_day = datetime(2020, 1, 1)
today = datetime.now()
left_days = (retire_day - today).days
if left_days > 0:
retire_msg = "Python 2 will retire in {} days, why not move to Python 3?".format(left_days)
else:
retire_msg = "Python 2 has been retired, you should move to Python 3."
return retire_msg

View File

@@ -1,12 +1,12 @@
import io
import os
import shutil
import unittest
from httprunner import exceptions, loader, utils
from tests.base import ApiServerUnittest
class TestUtils(ApiServerUnittest):
class TestUtils(unittest.TestCase):
def test_set_os_environ(self):
self.assertNotIn("abc", os.environ)
@@ -187,14 +187,18 @@ class TestUtils(ApiServerUnittest):
self.assertEqual(extended_variables_mapping["var1"], "val1")
def test_deepcopy_dict(self):
license_path = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"LICENSE"
)
data = {
'a': 1,
'b': [2, 4],
'c': lambda x: x+1,
'd': open('LICENSE'),
'd': open(license_path),
'f': {
'f1': {'a1': 2},
'f2': io.open('LICENSE', 'rb'),
'f2': io.open(license_path, 'rb'),
}
}
new_data = utils.deepcopy_dict(data)
@@ -274,26 +278,22 @@ class TestUtils(ApiServerUnittest):
def test_prepare_dump_json_file_path_for_folder(self):
# hrun tests/httpbin/a.b.c/ --save-tests
project_working_directory = os.path.join(os.getcwd(), "tests")
project_mapping = {
"PWD": project_working_directory,
"test_path": os.path.join(os.getcwd(), "tests", "httpbin", "a.b.c")
"test_path": os.path.join("tests", "httpbin", "a.b.c")
}
self.assertEqual(
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
os.path.join(project_working_directory, "logs", "httpbin/a.b.c/all.loaded.json")
utils.prepare_log_file_abs_path(project_mapping, "loaded"),
os.path.join(os.getcwd(), "logs", "tests/httpbin/a.b.c/all.loaded.json")
)
def test_prepare_dump_json_file_path_for_file(self):
# hrun tests/httpbin/a.b.c/rpc.yml --save-tests
project_working_directory = os.path.join(os.getcwd(), "tests")
project_mapping = {
"PWD": project_working_directory,
"test_path": os.path.join(os.getcwd(), "tests", "httpbin", "a.b.c", "rpc.yml")
"test_path": os.path.join("tests", "httpbin", "a.b.c", "rpc.yml")
}
self.assertEqual(
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
os.path.join(project_working_directory, "logs", "httpbin/a.b.c/rpc.loaded.json")
utils.prepare_log_file_abs_path(project_mapping, "loaded"),
os.path.join(os.getcwd(), "logs", "tests/httpbin/a.b.c/rpc.loaded.json")
)
def test_prepare_dump_json_file_path_for_passed_testcase(self):
@@ -302,6 +302,6 @@ class TestUtils(ApiServerUnittest):
"PWD": project_working_directory
}
self.assertEqual(
utils.prepare_dump_json_file_abs_path(project_mapping, "loaded"),
os.path.join(project_working_directory, "logs", "tests_mapping.loaded.json")
utils.prepare_log_file_abs_path(project_mapping, "loaded"),
os.path.join(os.getcwd(), "logs", "tests_mapping.loaded.json")
)

View File

@@ -3,7 +3,9 @@
import sys
import traceback
from httprunner import exceptions, logger, parser
from loguru import logger
from httprunner import exceptions, parser
class Validator(object):
@@ -70,12 +72,12 @@ class Validator(object):
}
script = "\n ".join(script)
code = """
code = f"""
# encoding: utf-8
def run_validate_script():
{}
""".format(script)
{script}
"""
variables = {
"status_code": self.resp_obj.status_code,
@@ -88,12 +90,12 @@ def run_validate_script():
try:
exec(code, variables)
except SyntaxError as ex:
logger.log_warning("SyntaxError in python validate script: {}".format(ex))
logger.warning(f"SyntaxError in python validate script: {ex}")
result["check_result"] = "fail"
result["output"] = "<br/>".join([
"ErrorMessage: {}".format(ex.msg),
"ErrorLine: {}".format(ex.lineno),
"ErrorText: {}".format(ex.text)
f"ErrorMessage: {ex.msg}",
f"ErrorLine: {ex.lineno}",
f"ErrorText: {ex.text}"
])
return result
@@ -101,7 +103,7 @@ def run_validate_script():
# run python validate script
variables["run_validate_script"]()
except Exception as ex:
logger.log_warning("run python validate script failed: {}".format(ex))
logger.warning(f"run python validate script failed: {ex}")
result["check_result"] = "fail"
_type, _value, _tb = sys.exc_info()
@@ -118,8 +120,8 @@ def run_validate_script():
line_no = "N/A"
result["output"] = "<br/>".join([
"ErrorType: {}".format(_type.__name__),
"ErrorLine: {}".format(line_no)
f"ErrorType: {_type.__name__}",
f"ErrorLine: {line_no}"
])
return result
@@ -131,7 +133,7 @@ def run_validate_script():
if not validators:
return
logger.log_debug("start to validate.")
logger.debug("start to validate.")
validate_pass = True
failures = []
@@ -154,7 +156,7 @@ def run_validate_script():
# validator should be LazyFunction object
if not isinstance(validator, parser.LazyFunction):
raise exceptions.ValidationFailure(
"validator should be parsed first: {}".format(validators))
f"validator should be parsed first: {validators}")
# evaluate validator args with context variable mapping.
validator_args = validator.get_args()
@@ -171,18 +173,13 @@ def run_validate_script():
"expect": expect_item,
"expect_value": expect_value
}
validate_msg = "\nvalidate: {} {} {}({})".format(
check_item,
comparator,
expect_value,
type(expect_value).__name__
)
validate_msg = f"\nvalidate: {check_item} {comparator} {expect_value}({type(expect_value).__name__})"
try:
validator.to_value(self.session_context.test_variables_mapping)
validator_dict["check_result"] = "pass"
validate_msg += "\t==> pass"
logger.log_debug(validate_msg)
logger.debug(validate_msg)
except (AssertionError, TypeError):
validate_pass = False
validator_dict["check_result"] = "fail"
@@ -194,7 +191,7 @@ def run_validate_script():
expect_value,
type(expect_value).__name__
)
logger.log_error(validate_msg)
logger.error(validate_msg)
failures.append(validate_msg)
self.validation_results["validate_extractor"].append(validator_dict)

374
poetry.lock generated
View File

@@ -1,3 +1,17 @@
[[package]]
category = "main"
description = "Asyncio support for PEP-567 contextvars backport."
marker = "python_version < \"3.7\""
name = "aiocontextvars"
optional = false
python-versions = ">=3.5"
version = "0.2.2"
[package.dependencies]
[package.dependencies.contextvars]
python = "<3.7"
version = "2.4"
[[package]]
category = "main"
description = "Classes Without Boilerplate"
@@ -39,6 +53,7 @@ version = "7.0"
[[package]]
category = "main"
description = "Cross-platform colored terminal text."
marker = "sys_platform == \"win32\""
name = "colorama"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
@@ -46,36 +61,15 @@ version = "0.4.3"
[[package]]
category = "main"
description = "Log formatting with colors!"
name = "colorlog"
description = "PEP 567 Backport"
marker = "python_version < \"3.7\""
name = "contextvars"
optional = false
python-versions = "*"
version = "4.0.2"
version = "2.4"
[package.dependencies]
colorama = "*"
[[package]]
category = "main"
description = "Updated configparser from Python 3.7 for Python 2.6+."
marker = "python_version < \"3\""
name = "configparser"
optional = false
python-versions = ">=2.6"
version = "4.0.2"
[package.extras]
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2)", "pytest-flake8", "pytest-black-multipy"]
[[package]]
category = "main"
description = "Backports and enhancements for the contextlib module"
marker = "python_version < \"3\""
name = "contextlib2"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "0.6.0.post1"
immutables = ">=0.9"
[[package]]
category = "dev"
@@ -87,12 +81,30 @@ version = "4.5.4"
[[package]]
category = "main"
description = "Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4"
marker = "python_version >= \"2.7\" and python_version < \"2.8\""
name = "enum34"
description = "A backport of the dataclasses module for Python 3.6"
marker = "python_version < \"3.7\""
name = "dataclasses"
optional = false
python-versions = "*"
version = "1.1.6"
version = "0.6"
[[package]]
category = "dev"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
name = "fastapi"
optional = false
python-versions = ">=3.6"
version = "0.49.2"
[package.dependencies]
pydantic = ">=0.32.2,<2.0.0"
starlette = "0.12.9"
[package.extras]
all = ["requests", "aiofiles", "jinja2", "python-multipart", "itsdangerous", "pyyaml", "graphene", "ujson", "email-validator", "uvicorn", "async-exit-stack", "async-generator"]
dev = ["pyjwt", "passlib", "autoflake", "flake8", "uvicorn", "graphene"]
doc = ["mkdocs", "mkdocs-material", "markdown-include"]
test = ["pytest (>=4.0.0)", "pytest-cov", "mypy", "black", "isort", "requests", "email-validator", "sqlalchemy", "peewee", "databases", "orjson", "async-exit-stack", "async-generator", "python-multipart", "aiofiles", "ujson"]
[[package]]
category = "main"
@@ -117,22 +129,12 @@ click = ">=2.0"
itsdangerous = ">=0.21"
[[package]]
category = "main"
description = "Backport of the functools module from Python 3.2.3 for use on 2.7 and PyPy."
marker = "python_version < \"3\""
name = "functools32"
category = "dev"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
name = "h11"
optional = false
python-versions = "*"
version = "3.2.3-2"
[[package]]
category = "main"
description = "Clean single-source support for Python 3 and 2"
marker = "python_version >= \"2.7\" and python_version < \"2.8\""
name = "future"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
version = "0.18.2"
version = "0.9.0"
[[package]]
category = "main"
@@ -145,6 +147,18 @@ version = "0.3.1"
[package.dependencies]
PyYAML = "*"
[[package]]
category = "dev"
description = "A collection of framework independent HTTP protocol utils."
marker = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\""
name = "httptools"
optional = false
python-versions = "*"
version = "0.1.1"
[package.extras]
test = ["Cython (0.29.14)"]
[[package]]
category = "main"
description = "Internationalized Domain Names in Applications (IDNA)"
@@ -153,6 +167,15 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.8"
[[package]]
category = "main"
description = "Immutable Collections"
marker = "python_version < \"3.7\""
name = "immutables"
optional = false
python-versions = "*"
version = "0.11"
[[package]]
category = "main"
description = "Read metadata from Python packages"
@@ -165,18 +188,6 @@ version = "1.3.0"
[package.dependencies]
zipp = ">=0.5"
[package.dependencies.configparser]
python = "<3"
version = ">=3.5"
[package.dependencies.contextlib2]
python = "<3"
version = "*"
[package.dependencies.pathlib2]
python = "<3"
version = "*"
[package.extras]
docs = ["sphinx", "rst.linker"]
testing = ["packaging", "importlib-resources"]
@@ -225,10 +236,6 @@ pyrsistent = ">=0.14.0"
setuptools = "*"
six = ">=1.11.0"
[package.dependencies.functools32]
python = "<3"
version = "*"
[package.dependencies.importlib-metadata]
python = "<3.8"
version = "*"
@@ -237,6 +244,25 @@ version = "*"
format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"]
format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"]
[[package]]
category = "main"
description = "Python logging made (stupidly) simple"
name = "loguru"
optional = false
python-versions = ">=3.5"
version = "0.4.1"
[package.dependencies]
colorama = ">=0.3.4"
win32-setctime = ">=1.0.0"
[package.dependencies.aiocontextvars]
python = "<3.7"
version = ">=0.2.0"
[package.extras]
dev = ["codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "isort (>=4.3.20)", "tox (>=3.9.0)", "tox-travis (>=0.12)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "Sphinx (>=2.2.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "black (>=19.3b0)"]
[[package]]
category = "main"
description = "Safely add untrusted strings to HTML/XML markup."
@@ -259,19 +285,21 @@ six = ">=1.0.0,<2.0.0"
[[package]]
category = "main"
description = "Object-oriented filesystem paths"
marker = "python_version < \"3\""
name = "pathlib2"
description = "Data validation and settings management using python 3.6 type hinting"
name = "pydantic"
optional = false
python-versions = "*"
version = "2.3.5"
python-versions = ">=3.6"
version = "1.4"
[package.dependencies]
six = "*"
[package.dependencies.dataclasses]
python = "<3.7"
version = ">=0.6"
[package.dependencies.scandir]
python = "<3.5"
version = "*"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
typing_extensions = ["typing-extensions (>=3.7.2)"]
[[package]]
category = "main"
@@ -321,15 +349,6 @@ version = "0.9.1"
[package.dependencies]
requests = ">=2.0.1,<3.0.0"
[[package]]
category = "main"
description = "scandir, a better directory iterator and faster os.walk()"
marker = "python_version < \"3\""
name = "scandir"
optional = false
python-versions = "*"
version = "1.10.0"
[[package]]
category = "main"
description = "Python client for Sentry (https://getsentry.com)"
@@ -364,6 +383,17 @@ optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*"
version = "1.13.0"
[[package]]
category = "dev"
description = "The little ASGI library that shines."
name = "starlette"
optional = false
python-versions = ">=3.6"
version = "0.12.9"
[package.extras]
full = ["aiofiles", "graphene", "itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests", "ujson"]
[[package]]
category = "main"
description = "HTTP library with thread-safe connection pooling, file post, and more."
@@ -377,6 +407,38 @@ brotli = ["brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
[[package]]
category = "dev"
description = "The lightning-fast ASGI server."
name = "uvicorn"
optional = false
python-versions = "*"
version = "0.11.3"
[package.dependencies]
click = ">=7.0.0,<8.0.0"
h11 = ">=0.8,<0.10"
httptools = ">=0.1.0,<0.2.0"
uvloop = ">=0.14.0"
websockets = ">=8.0.0,<9.0.0"
[[package]]
category = "dev"
description = "Fast implementation of asyncio event loop on top of libuv"
marker = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\""
name = "uvloop"
optional = false
python-versions = "*"
version = "0.14.0"
[[package]]
category = "dev"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
name = "websockets"
optional = false
python-versions = ">=3.6"
version = "8.0.2"
[[package]]
category = "dev"
description = "The comprehensive WSGI web application library."
@@ -390,6 +452,18 @@ dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-i
termcolor = ["termcolor"]
watchdog = ["watchdog"]
[[package]]
category = "main"
description = "A small Python utility to set file creation time on Windows"
marker = "sys_platform == \"win32\""
name = "win32-setctime"
optional = false
python-versions = ">=3.5"
version = "1.0.1"
[package.extras]
dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[[package]]
category = "main"
description = "Backport of pathlib-compatible object wrapper for zip files"
@@ -407,10 +481,14 @@ docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["pathlib2", "contextlib2", "unittest2"]
[metadata]
content-hash = "843527171063a252e1b210f82037020d68f8aaed542c2d878e92d6c7e951a5f5"
python-versions = "~2.7 || ^3.5"
content-hash = "57ff78f24ca37a3421d5c64007bd71eba394d6751fdbb2d0b446f523cfed9c62"
python-versions = "^3.6"
[metadata.files]
aiocontextvars = [
{file = "aiocontextvars-0.2.2-py2.py3-none-any.whl", hash = "sha256:885daf8261818767d8f7cbd79f9d4482d118f024b6586ef6e67980236a27bfa3"},
{file = "aiocontextvars-0.2.2.tar.gz", hash = "sha256:f027372dc48641f683c559f247bd84962becaacdc9ba711d583c3871fb5652aa"},
]
attrs = [
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
{file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
@@ -431,17 +509,8 @@ colorama = [
{file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"},
{file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"},
]
colorlog = [
{file = "colorlog-4.0.2-py2.py3-none-any.whl", hash = "sha256:450f52ea2a2b6ebb308f034ea9a9b15cea51e65650593dca1da3eb792e4e4981"},
{file = "colorlog-4.0.2.tar.gz", hash = "sha256:3cf31b25cbc8f86ec01fef582ef3b840950dea414084ed19ab922c8b493f9b42"},
]
configparser = [
{file = "configparser-4.0.2-py2.py3-none-any.whl", hash = "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c"},
{file = "configparser-4.0.2.tar.gz", hash = "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"},
]
contextlib2 = [
{file = "contextlib2-0.6.0.post1-py2.py3-none-any.whl", hash = "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b"},
{file = "contextlib2-0.6.0.post1.tar.gz", hash = "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e"},
contextvars = [
{file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"},
]
coverage = [
{file = "coverage-4.5.4-cp26-cp26m-macosx_10_12_x86_64.whl", hash = "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28"},
@@ -477,11 +546,13 @@ coverage = [
{file = "coverage-4.5.4-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5"},
{file = "coverage-4.5.4.tar.gz", hash = "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c"},
]
enum34 = [
{file = "enum34-1.1.6-py2-none-any.whl", hash = "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79"},
{file = "enum34-1.1.6-py3-none-any.whl", hash = "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a"},
{file = "enum34-1.1.6.tar.gz", hash = "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"},
{file = "enum34-1.1.6.zip", hash = "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850"},
dataclasses = [
{file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"},
{file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"},
]
fastapi = [
{file = "fastapi-0.49.2-py3-none-any.whl", hash = "sha256:e3b479c61d8a02ec6c80ebbc2ee2d621a32855ffffedd55fd5f2993c6dbdcc1e"},
{file = "fastapi-0.49.2.tar.gz", hash = "sha256:68395725aac4342896b4f9aa335c7e7fb773b565df7f96e964e24bffb84dc5a3"},
]
filetype = [
{file = "filetype-1.0.5-py2.py3-none-any.whl", hash = "sha256:4967124d982a71700d94a08c49c4926423500e79382a92070f5ab248d44fe461"},
@@ -491,21 +562,50 @@ flask = [
{file = "Flask-0.12.4-py2.py3-none-any.whl", hash = "sha256:6c02dbaa5a9ef790d8219bdced392e2d549c10cd5a5ba4b6aa65126b2271af29"},
{file = "Flask-0.12.4.tar.gz", hash = "sha256:2ea22336f6d388b4b242bc3abf8a01244a8aa3e236e7407469ef78c16ba355dd"},
]
functools32 = [
{file = "functools32-3.2.3-2.tar.gz", hash = "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"},
{file = "functools32-3.2.3-2.zip", hash = "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0"},
]
future = [
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
h11 = [
{file = "h11-0.9.0-py2.py3-none-any.whl", hash = "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"},
{file = "h11-0.9.0.tar.gz", hash = "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1"},
]
har2case = [
{file = "har2case-0.3.1-py2.py3-none-any.whl", hash = "sha256:84d3a5cc9fbb16e45372e7e880a936c59bbe8e9b66bad81927769e64f608e2af"},
{file = "har2case-0.3.1.tar.gz", hash = "sha256:8f159ec7cba82ec4282f46af4a9dac89f65e62796521b2426d3c89c3c9fd8579"},
]
httptools = [
{file = "httptools-0.1.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce"},
{file = "httptools-0.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4"},
{file = "httptools-0.1.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:86c6acd66765a934e8730bf0e9dfaac6fdcf2a4334212bd4a0a1c78f16475ca6"},
{file = "httptools-0.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bc3114b9edbca5a1eb7ae7db698c669eb53eb8afbbebdde116c174925260849c"},
{file = "httptools-0.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:ac0aa11e99454b6a66989aa2d44bca41d4e0f968e395a0a8f164b401fefe359a"},
{file = "httptools-0.1.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:96da81e1992be8ac2fd5597bf0283d832287e20cb3cfde8996d2b00356d4e17f"},
{file = "httptools-0.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:56b6393c6ac7abe632f2294da53f30d279130a92e8ae39d8d14ee2e1b05ad1f2"},
{file = "httptools-0.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:96eb359252aeed57ea5c7b3d79839aaa0382c9d3149f7d24dd7172b1bcecb009"},
{file = "httptools-0.1.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437"},
{file = "httptools-0.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3592e854424ec94bd17dc3e0c96a64e459ec4147e6d53c0a42d0ebcef9cb9c5d"},
{file = "httptools-0.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a4b1b2012b28e68306575ad14ad5e9120b34fccd02a81eb08838d7e3bbb48be"},
{file = "httptools-0.1.1.tar.gz", hash = "sha256:41b573cf33f64a8f8f3400d0a7faf48e1888582b6f6e02b82b9bd4f0bf7497ce"},
]
idna = [
{file = "idna-2.8-py2.py3-none-any.whl", hash = "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"},
{file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"},
]
immutables = [
{file = "immutables-0.11-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:bce27277a2fe91509cca69181971ab509c2ee862e8b37b09f26b64f90e8fe8fb"},
{file = "immutables-0.11-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c7eb2d15c35c73bb168c002c6ea145b65f40131e10dede54b39db0b72849b280"},
{file = "immutables-0.11-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:2de2ec8dde1ca154f811776a8cbbeaea515c3b226c26036eab6484530eea28e0"},
{file = "immutables-0.11-cp35-cp35m-win32.whl", hash = "sha256:e87bd941cb4dfa35f16e1ff4b2d99a2931452dcc9cfd788dc8fe513f3d38551e"},
{file = "immutables-0.11-cp35-cp35m-win_amd64.whl", hash = "sha256:0aa055c745510238cbad2f1f709a37a1c9e30a38594de3b385e9876c48a25633"},
{file = "immutables-0.11-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:422c7d4c75c88057c625e32992248329507bca180b48cfb702b4ef608f581b50"},
{file = "immutables-0.11-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f5b93248552c9e7198558776da21c9157d3f70649905d7fdc083c2ab2fbc6088"},
{file = "immutables-0.11-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b268422a5802fbf934152b835329ac0d23b80b558eaee68034d45718edab4a11"},
{file = "immutables-0.11-cp36-cp36m-win32.whl", hash = "sha256:0f07c58122e1ce70a7165e68e18e795ac5fe94d7fee3e045ffcf6432602026df"},
{file = "immutables-0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:b8fed714f1c84a3242c7184838f5e9889139a22bbdd701a182b7fdc237ca3cbb"},
{file = "immutables-0.11-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:518f20945c1f600b618fb691922c2ab43b193f04dd2d4d2823220d0202014670"},
{file = "immutables-0.11-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2c536ff2bafeeff9a7865ea10a17a50f90b80b585e31396c349e8f57b0075bd4"},
{file = "immutables-0.11-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1c2e729aab250be0de0c13fa833241a778b51390ee2650e0457d1e45b318c441"},
{file = "immutables-0.11-cp37-cp37m-win32.whl", hash = "sha256:545186faab9237c102b8bcffd36d71f0b382174c93c501e061de239753cff694"},
{file = "immutables-0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:6b6d8d035e5888baad3db61dfb167476838a63afccecd927c365f228bb55754c"},
{file = "immutables-0.11.tar.gz", hash = "sha256:d6850578a0dc6530ac19113cfe4ddc13903df635212d498f176fe601a8a5a4a3"},
]
importlib-metadata = [
{file = "importlib_metadata-1.3.0-py2.py3-none-any.whl", hash = "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"},
{file = "importlib_metadata-1.3.0.tar.gz", hash = "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45"},
@@ -525,6 +625,10 @@ jsonschema = [
{file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"},
{file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"},
]
loguru = [
{file = "loguru-0.4.1-py3-none-any.whl", hash = "sha256:074b3caa6748452c1e4f2b302093c94b65d5a4c5a4d7743636b4121e06437b0e"},
{file = "loguru-0.4.1.tar.gz", hash = "sha256:a6101fd435ac89ba5205a105a26a6ede9e4ddbb4408a6e167852efca47806d11"},
]
markupsafe = [
{file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"},
{file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"},
@@ -560,9 +664,21 @@ more-itertools = [
{file = "more_itertools-5.0.0-py2-none-any.whl", hash = "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc"},
{file = "more_itertools-5.0.0-py3-none-any.whl", hash = "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"},
]
pathlib2 = [
{file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"},
{file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"},
pydantic = [
{file = "pydantic-1.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:07911aab70f3bc52bb845ce1748569c5e70478ac977e106a150dd9d0465ebf04"},
{file = "pydantic-1.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:012c422859bac2e03ab3151ea6624fecf0e249486be7eb8c6ee69c91740c6752"},
{file = "pydantic-1.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:61d22d36808087d3184ed6ac0d91dd71c533b66addb02e4a9930e1e30833202f"},
{file = "pydantic-1.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f863456d3d4bf817f2e5248553dee3974c5dc796f48e6ddb599383570f4215ac"},
{file = "pydantic-1.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:bbbed364376f4a0aebb9ea452ff7968b306499a9e74f4db69b28ff2cd4043a11"},
{file = "pydantic-1.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e27559cedbd7f59d2375bfd6eea29a330ea1a5b0589c34d6b4e0d7bec6027bbf"},
{file = "pydantic-1.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:50e4e948892a6815649ad5a9a9379ad1e5f090f17842ac206535dfaed75c6f2f"},
{file = "pydantic-1.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:8848b4eb458469739126e4c1a202d723dd092e087f8dbe3104371335f87ba5df"},
{file = "pydantic-1.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:831a0265a9e3933b3d0f04d1a81bba543bafbe4119c183ff2771871db70524ab"},
{file = "pydantic-1.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:47b8db7024ba3d46c3d4768535e1cf87b6c8cf92ccd81e76f4e1cb8ee47688b3"},
{file = "pydantic-1.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:51f11c8bbf794a68086540da099aae4a9107447c7a9d63151edbb7d50110cf21"},
{file = "pydantic-1.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6100d7862371115c40be55cc4b8d766a74b1d0dbaf99dbfe72bb4bac0faf89ed"},
{file = "pydantic-1.4-py36.py37.py38-none-any.whl", hash = "sha256:72184c1421103cca128300120f8f1185fb42a9ea73a1c9845b1c53db8c026a7d"},
{file = "pydantic-1.4.tar.gz", hash = "sha256:f17ec336e64d4583311249fb179528e9a2c27c8a2eaf590ec6ec2c6dece7cb3f"},
]
pyrsistent = [
{file = "pyrsistent-0.15.6.tar.gz", hash = "sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b"},
@@ -588,19 +704,6 @@ requests-toolbelt = [
{file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
{file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
]
scandir = [
{file = "scandir-1.10.0-cp27-cp27m-win32.whl", hash = "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188"},
{file = "scandir-1.10.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"},
{file = "scandir-1.10.0-cp34-cp34m-win32.whl", hash = "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f"},
{file = "scandir-1.10.0-cp34-cp34m-win_amd64.whl", hash = "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e"},
{file = "scandir-1.10.0-cp35-cp35m-win32.whl", hash = "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f"},
{file = "scandir-1.10.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32"},
{file = "scandir-1.10.0-cp36-cp36m-win32.whl", hash = "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022"},
{file = "scandir-1.10.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4"},
{file = "scandir-1.10.0-cp37-cp37m-win32.whl", hash = "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173"},
{file = "scandir-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d"},
{file = "scandir-1.10.0.tar.gz", hash = "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae"},
]
sentry-sdk = [
{file = "sentry-sdk-0.13.5.tar.gz", hash = "sha256:c6b919623e488134a728f16326c6f0bcdab7e3f59e7f4c472a90eea4d6d8fe82"},
{file = "sentry_sdk-0.13.5-py2.py3-none-any.whl", hash = "sha256:05285942901d38c7ce2498aba50d8e87b361fc603281a5902dda98f3f8c5e145"},
@@ -609,14 +712,49 @@ six = [
{file = "six-1.13.0-py2.py3-none-any.whl", hash = "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd"},
{file = "six-1.13.0.tar.gz", hash = "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"},
]
starlette = [
{file = "starlette-0.12.9.tar.gz", hash = "sha256:c2ac9a42e0e0328ad20fe444115ac5e3760c1ee2ac1ff8cdb5ec915c4a453411"},
]
urllib3 = [
{file = "urllib3-1.25.7-py2.py3-none-any.whl", hash = "sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293"},
{file = "urllib3-1.25.7.tar.gz", hash = "sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745"},
]
uvicorn = [
{file = "uvicorn-0.11.3-py3-none-any.whl", hash = "sha256:0f58170165c4495f563d8224b2f415a0829af0412baa034d6f777904613087fd"},
{file = "uvicorn-0.11.3.tar.gz", hash = "sha256:6fdaf8e53bf1b2ddf0fe9ed06079b5348d7d1d87b3365fe2549e6de0d49e631c"},
]
uvloop = [
{file = "uvloop-0.14.0-cp35-cp35m-macosx_10_11_x86_64.whl", hash = "sha256:08b109f0213af392150e2fe6f81d33261bb5ce968a288eb698aad4f46eb711bd"},
{file = "uvloop-0.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:4544dcf77d74f3a84f03dd6278174575c44c67d7165d4c42c71db3fdc3860726"},
{file = "uvloop-0.14.0-cp36-cp36m-macosx_10_11_x86_64.whl", hash = "sha256:b4f591aa4b3fa7f32fb51e2ee9fea1b495eb75b0b3c8d0ca52514ad675ae63f7"},
{file = "uvloop-0.14.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f07909cd9fc08c52d294b1570bba92186181ca01fe3dc9ffba68955273dd7362"},
{file = "uvloop-0.14.0-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:afd5513c0ae414ec71d24f6f123614a80f3d27ca655a4fcf6cabe50994cc1891"},
{file = "uvloop-0.14.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e7514d7a48c063226b7d06617cbb12a14278d4323a065a8d46a7962686ce2e95"},
{file = "uvloop-0.14.0-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:bcac356d62edd330080aed082e78d4b580ff260a677508718f88016333e2c9c5"},
{file = "uvloop-0.14.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4315d2ec3ca393dd5bc0b0089d23101276778c304d42faff5dc4579cb6caef09"},
{file = "uvloop-0.14.0.tar.gz", hash = "sha256:123ac9c0c7dd71464f58f1b4ee0bbd81285d96cdda8bc3519281b8973e3a461e"},
]
websockets = [
{file = "websockets-8.0.2-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:e906128532a14b9d264a43eb48f9b3080d53a9bda819ab45bf56b8039dc606ac"},
{file = "websockets-8.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:83e63aa73331b9ca21af61df8f115fb5fbcba3f281bee650a4ad16a40cd1ef15"},
{file = "websockets-8.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e9102043a81cdc8b7c8032ff4bce39f6229e4ac39cb2010946c912eeb84e2cb6"},
{file = "websockets-8.0.2-cp36-cp36m-win32.whl", hash = "sha256:8d7a20a2f97f1e98c765651d9fb9437201a9ccc2c70e94b0270f1c5ef29667a3"},
{file = "websockets-8.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:c82e286555f839846ef4f0fdd6910769a577952e1e26aa8ee7a6f45f040e3c2b"},
{file = "websockets-8.0.2-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:73ce69217e4655783ec72ce11c151053fcbd5b837cc39de7999e19605182e28a"},
{file = "websockets-8.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8c77f7d182a6ea2a9d09c2612059f3ad859a90243e899617137ee3f6b7f2b584"},
{file = "websockets-8.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a7affaeffbc5d55681934c16bb6b8fc82bb75b175e7fd4dcca798c938bde8dda"},
{file = "websockets-8.0.2-cp37-cp37m-win32.whl", hash = "sha256:f5cb2683367e32da6a256b60929a3af9c29c212b5091cf5bace9358d03011bf5"},
{file = "websockets-8.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:049e694abe33f8a1d99969fee7bfc0ae6761f7fd5f297c58ea933b27dd6805f2"},
{file = "websockets-8.0.2.tar.gz", hash = "sha256:882a7266fa867a2ebb2c0baaa0f9159cabf131cf18c1b4270d79ad42f9208dc5"},
]
werkzeug = [
{file = "Werkzeug-0.16.0-py2.py3-none-any.whl", hash = "sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4"},
{file = "Werkzeug-0.16.0.tar.gz", hash = "sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7"},
]
win32-setctime = [
{file = "win32_setctime-1.0.1-py3-none-any.whl", hash = "sha256:568fd636c68350bcc54755213fe01966fe0a6c90b386c0776425944a0382abef"},
{file = "win32_setctime-1.0.1.tar.gz", hash = "sha256:b47e5023ec7f0b4962950902b15bc56464a380d869f59d27dbf9ab423b23e8f9"},
]
zipp = [
{file = "zipp-0.6.0-py2.py3-none-any.whl", hash = "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"},
{file = "zipp-0.6.0.tar.gz", hash = "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e"},

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "httprunner"
version = "2.5.7"
version = "3.0.0-alpha"
description = "One-stop solution for HTTP(S) testing."
license = "Apache-2.0"
readme = "README.md"
@@ -20,8 +20,6 @@ classifiers = [
"Operating System :: MacOS",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8"
@@ -30,24 +28,24 @@ classifiers = [
include = ["docs/CHANGELOG.md"]
[tool.poetry.dependencies]
python = "~2.7 || ^3.5"
python = "^3.6"
requests = "^2.22.0"
requests-toolbelt = "^0.9.1"
pyyaml = "^5.1.2"
jinja2 = "^2.10.3"
har2case = "^0.3.1"
colorama = "^0.4.1"
colorlog = "^4.0.2"
filetype = "^1.0.5"
jsonpath = "^0.82"
sentry-sdk = "^0.13.5"
future = { version = "^0.18.1", python = "~2.7" }
enum34 = { version = "^1.1.6", python = "~2.7" }
jsonschema = "^3.2.0"
pydantic = "^1.4"
loguru = "^0.4.1"
[tool.poetry.dev-dependencies]
flask = "<1.0.0"
coverage = "^4.5.4"
uvicorn = "^0.11.3"
fastapi = "^0.49.0"
[tool.poetry.scripts]
hrun = "httprunner.cli:main"

View File

@@ -470,7 +470,6 @@ class TestHttpRunner(ApiServerUnittest):
self.assertEqual(summary["details"][1]["stat"]["total"], 1)
self.assertEqual(summary["details"][2]["stat"]["total"], 1)
def test_run_testcase_hardcode(self):
for testcase_file_path in self.testcase_file_path_list:
summary = self.runner.run(testcase_file_path)
@@ -479,7 +478,6 @@ class TestHttpRunner(ApiServerUnittest):
self.assertEqual(summary["stat"]["teststeps"]["total"], 3)
self.assertEqual(summary["stat"]["teststeps"]["successes"], 3)
def test_run_testcase_template_variables(self):
testcase_file_path = os.path.join(
os.getcwd(), 'tests/data/demo_testcase_variables.yml')

View File

@@ -56,10 +56,6 @@ class TestHttpClient(ApiServerUnittest):
def test_request_with_cookies(self):
url = "{}/api/users/1000".format(self.host)
data = {
'name': 'user1',
'password': '123456'
}
cookies = {
"a": "1",
"b": "2"
@@ -70,7 +66,6 @@ class TestHttpClient(ApiServerUnittest):
def test_request_redirect(self):
url = "{}/redirect-to?url=https%3A%2F%2Fgithub.com&status_code=302".format(HTTPBIN_SERVER)
headers = {"accept: text/html"}
cookies = {
"a": "1",
"b": "2"

View File

@@ -1,7 +1,6 @@
import requests
from httprunner import exceptions, response
from httprunner.compat import basestring, bytes
from tests.api_server import HTTPBIN_SERVER
from tests.base import ApiServerUnittest
@@ -257,7 +256,7 @@ class TestResponse(ApiServerUnittest):
]
extract_binds_dict = resp_obj.extract_response(extract_binds_list)
self.assertIsInstance(extract_binds_dict["resp_content"], basestring)
self.assertIsInstance(extract_binds_dict["resp_content"], str)
self.assertIn("httpbin.org", extract_binds_dict["resp_content"])
extract_binds_list = [

0
tests/test_schema.py Normal file
View File