diff --git a/.github/workflows/integration_test.yml b/.github/workflows/integration_test.yml index 4e8c90d9..da6cc0be 100644 --- a/.github/workflows/integration_test.yml +++ b/.github/workflows/integration_test.yml @@ -30,7 +30,14 @@ jobs: poetry build ls dist/*.whl | xargs pip install # test installation hrun -V - locusts -V - - name: Run smoketest for hrun command + hrun run -h + hrun startproject -h + hrun har2case -h + pip install locustio + hrun locusts -h + - name: Run smoketest - postman echo run: | - cd tests/httpbin && hrun basic.yml --failfast && cd - + hrun examples/postman_echo/request_methods + - name: Run smoketest - httpbin + run: | + hrun examples/httpbin/ diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 53a24129..851dabfc 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -10,8 +10,8 @@ jobs: strategy: max-parallel: 12 matrix: - python-version: [3.6, 3.7] # TODO: 3.8 - os: [ubuntu-latest, macos-latest] # TODO: windows-latest + python-version: [3.6, 3.7, 3.8] + os: [ubuntu-latest, macos-latest, windows-latest] steps: - uses: actions/checkout@v1 @@ -27,9 +27,11 @@ jobs: poetry install -vv - name: Run unittest for httprunner run: | - poetry run python -m httprunner.cli hrun -V - poetry run python -m httprunner.cli hrun -h - poetry run coverage run --source=httprunner -m unittest discover + poetry run httprunner + poetry run hmake + poetry run hrun + poetry run har2case + poetry run coverage run --source=httprunner -m pytest httprunner poetry run coverage xml poetry run coverage report -m - name: Codecov diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 897e8fb6..12328fae 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,5 +1,29 @@ # Release History +## 3.0.2 (2020-05-16) + +**Added** + +- feat: add `make` sub-command to generate python testcases from YAML/JSON +- feat: format generated python testcases with [`black`](https://github.com/psf/black) +- test: add postman echo & httpbin as testcase examples + +**Changed** + +- refactor all +- replace jsonschema validation with pydantic +- remove compatibility with testcase/testsuite format v1 +- replace unittest with pytest +- remove builtin html report, allure will be used with pytest later +- remove locust support temporarily +- update command line interface + +## 3.0.1 (2020-03-24) + +**Changed** + +- remove sentry sdk + ## 3.0.0 (2020-03-10) **Added** @@ -16,7 +40,6 @@ - generate reports/logs folder in current working directory - remove cli `--validate` - remove cli `--pretty` -- remove sentry sdk ## 2.5.7 (2020-02-21) diff --git a/docs/prepare/upload-case.md b/docs/prepare/upload-case.md index 58a5ba23..21b00cb0 100644 --- a/docs/prepare/upload-case.md +++ b/docs/prepare/upload-case.md @@ -45,7 +45,7 @@ - eq: ["status_code", 200] ``` -参考案例:[httprunner/tests/httpbin/upload.v2.yml][2] +参考案例:[httprunner/tests/httpbin/upload.yml][2] [1]: https://toolbelt.readthedocs.io/en/latest/uploading-data.html -[2]: https://github.com/httprunner/httprunner/blob/master/tests/httpbin/upload.v2.yml \ No newline at end of file +[2]: https://github.com/httprunner/httprunner/blob/master/tests/httpbin/upload.yml \ No newline at end of file diff --git a/httprunner/ext/locusts/__init__.py b/examples/__init__.py similarity index 100% rename from httprunner/ext/locusts/__init__.py rename to examples/__init__.py diff --git a/tests/data/account.csv b/examples/httpbin/account.csv similarity index 100% rename from tests/data/account.csv rename to examples/httpbin/account.csv diff --git a/tests/httpbin/basic.yml b/examples/httpbin/basic.yml similarity index 66% rename from tests/httpbin/basic.yml rename to examples/httpbin/basic.yml index 05fb8f56..d27d8604 100644 --- a/tests/httpbin/basic.yml +++ b/examples/httpbin/basic.yml @@ -1,54 +1,45 @@ -- config: +config: name: basic test with httpbin base_url: https://httpbin.org/ -#- test: -# TODO: fix compatibility with Python 2.7, UnicodeDecodeError -# name: index -# request: -# url: / -# method: GET -# validate: -# - eq: ["status_code", 200] -# - contains: [content, "HTTP Request & Response Service"] - -- test: +teststeps: +- name: headers request: url: /headers method: GET validate: - eq: ["status_code", 200] - - eq: [content.headers.Host, "httpbin.org"] + - eq: [body.headers.Host, "httpbin.org"] -- test: +- name: user-agent request: url: /user-agent method: GET validate: - eq: ["status_code", 200] - - startswith: [content.user-agent, "python-requests"] +# - startswith: [body.user-agent, "python-requests"] -- test: +- name: get without params request: url: /get method: GET validate: - eq: ["status_code", 200] - - eq: [content.args, {}] + - eq: [body.args, {}] -- test: +- name: get with params in url request: url: /get?a=1&b=2 method: GET validate: - eq: ["status_code", 200] - - eq: [content.args, {'a': '1', 'b': '2'}] + - eq: [body.args, {'a': '1', 'b': '2'}] -- test: +- name: get with params in params field request: url: /get @@ -58,9 +49,9 @@ method: GET validate: - eq: ["status_code", 200] - - eq: [content.args, {'a': '1', 'b': '2'}] + - eq: [body.args, {'a': '1', 'b': '2'}] -- test: +- name: set cookie request: url: /cookies/set?name=value @@ -69,7 +60,7 @@ - eq: ["status_code", 200] # - eq: [cookies.name, "value"] -- test: +- name: extract cookie request: url: /cookies @@ -78,7 +69,7 @@ - eq: ["status_code", 200] # - eq: [cookies.name, "value"] -- test: +- name: post data request: url: /post @@ -89,12 +80,10 @@ validate: - eq: ["status_code", 200] -- test: - name: validate content length +- + name: validate body length request: url: /spec.json method: GET validate: - - len_eq: ["content", 9] - - len_eq: ["json", 9] - - len_eq: ["text", 9] + - len_eq: ["body", 9] diff --git a/examples/httpbin/basic_test.py b/examples/httpbin/basic_test.py new file mode 100644 index 00000000..26862a25 --- /dev/null +++ b/examples/httpbin/basic_test.py @@ -0,0 +1,96 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseBasic(HttpRunner): + config = TConfig( + **{ + "name": "basic test with httpbin", + "base_url": "https://httpbin.org/", + "path": "examples/httpbin/basic_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "headers", + "request": {"url": "/headers", "method": "GET"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.headers.Host", "httpbin.org"]}, + ], + } + ), + TStep( + **{ + "name": "user-agent", + "request": {"url": "/user-agent", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "get without params", + "request": {"url": "/get", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}, {"eq": ["body.args", {}]}], + } + ), + TStep( + **{ + "name": "get with params in url", + "request": {"url": "/get?a=1&b=2", "method": "GET"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args", {"a": "1", "b": "2"}]}, + ], + } + ), + TStep( + **{ + "name": "get with params in params field", + "request": {"url": "/get", "params": {"a": 1, "b": 2}, "method": "GET"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args", {"a": "1", "b": "2"}]}, + ], + } + ), + TStep( + **{ + "name": "set cookie", + "request": {"url": "/cookies/set?name=value", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "extract cookie", + "request": {"url": "/cookies", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "post data", + "request": { + "url": "/post", + "method": "POST", + "headers": {"Content-Type": "application/json"}, + "data": "abc", + }, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "validate body length", + "request": {"url": "/spec.json", "method": "GET"}, + "validate": [{"len_eq": ["body", 9]}], + } + ), + ] + + +if __name__ == "__main__": + TestCaseBasic().test_start() diff --git a/tests/debugtalk.py b/examples/httpbin/debugtalk.py similarity index 68% rename from tests/debugtalk.py rename to examples/httpbin/debugtalk.py index 101d6308..b31deacc 100644 --- a/tests/debugtalk.py +++ b/examples/httpbin/debugtalk.py @@ -2,27 +2,33 @@ import os import random import string import time +import uuid -from tests.api_server import HTTPBIN_SERVER, gen_md5, get_sign - -BASE_URL = "http://127.0.0.1:5000" +from loguru import logger def get_httpbin_server(): - return HTTPBIN_SERVER + return "https://httpbin.org" -def get_base_url(): - return BASE_URL +def setup_testcase(variables): + logger.info(f"setup_testcase, variables: {variables}") + variables["request_id_prefix"] = str(int(time.time())) -def get_default_request(): - return { - "base_url": BASE_URL, - "headers": { - "content-type": "application/json" - } - } +def teardown_testcase(): + logger.info(f"teardown_testcase.") + + +def setup_teststep(request, variables): + logger.info(f"setup_teststep, request: {request}, variables: {variables}") + request.setdefault("headers", {}) + request_id_prefix = variables["request_id_prefix"] + request["headers"]["HRUN-Request-ID"] = request_id_prefix + "-" + str(uuid.uuid4()) + + +def teardown_teststep(response): + logger.info(f"teardown_teststep, response status code: {response.status_code}") def sum_two(m, n): @@ -58,16 +64,13 @@ def get_user_agent(): def gen_app_version(): - return [ - {"app_version": "2.8.5"}, - {"app_version": "2.8.6"} - ] + return [{"app_version": "2.8.5"}, {"app_version": "2.8.6"}] def get_account(): return [ {"username": "user1", "password": "111111"}, - {"username": "user2", "password": "222222"} + {"username": "user2", "password": "222222"}, ] @@ -81,7 +84,7 @@ def gen_random_string(str_len): random_char = random.choice(string.ascii_letters + string.digits) random_char_list.append(random_char) - random_string = ''.join(random_char_list) + random_string = "".join(random_char_list) return random_string @@ -113,9 +116,11 @@ def modify_request_json(request, os_platform): def setup_hook_httpntlmauth(request): if "httpntlmauth" in request: from requests_ntlm import HttpNtlmAuth + auth_account = request.pop("httpntlmauth") request["auth"] = HttpNtlmAuth( - auth_account["username"], auth_account["password"]) + auth_account["username"], auth_account["password"] + ) def alter_response(response): @@ -123,18 +128,15 @@ def alter_response(response): response.headers["Content-Type"] = "html/text" response.json["headers"]["Host"] = "127.0.0.1:8888" response.new_attribute = "new_attribute_value" - response.new_attribute_dict = { - "key": 123 - } + response.new_attribute_dict = {"key": 123} + def alter_response_302(response): response.status_code = 500 response.headers["Content-Type"] = "html/text" response.text = "abcdef" response.new_attribute = "new_attribute_value" - response.new_attribute_dict = { - "key": 123 - } + response.new_attribute_dict = {"key": 123} def alter_response_error(response): @@ -143,7 +145,4 @@ def alter_response_error(response): def gen_variables(): - return { - "var_a": 1, - "var_b": 2 - } + return {"var_a": 1, "var_b": 2} diff --git a/tests/httpbin/hooks.yml b/examples/httpbin/hooks.yml similarity index 64% rename from tests/httpbin/hooks.yml rename to examples/httpbin/hooks.yml index e4670772..765f5daf 100644 --- a/tests/httpbin/hooks.yml +++ b/examples/httpbin/hooks.yml @@ -1,4 +1,4 @@ -- config: +config: name: basic test with httpbin base_url: ${get_httpbin_server()} setup_hooks: @@ -6,8 +6,11 @@ teardown_hooks: - ${hook_print(teardown)} -- test: +teststeps: +- name: headers + variables: + a: 123 request: url: /headers method: GET @@ -18,9 +21,9 @@ - ${teardown_hook_sleep_N_secs($response, 1)} validate: - eq: ["status_code", 200] - - contained_by: [content.headers.Host, "${get_httpbin_server()}"] + - contained_by: [body.headers.Host, "${get_httpbin_server()}"] -- test: +- name: alter response request: url: /headers @@ -28,8 +31,6 @@ teardown_hooks: - ${alter_response($response)} validate: - - eq: ["status_code", 500] - - eq: ["headers.content-type", "html/text"] - - eq: [json.headers.Host, "127.0.0.1:8888"] - - eq: [content.headers.Host, "127.0.0.1:8888"] - - eq: [text.headers.Host, "127.0.0.1:8888"] + - eq: ["status_code", 200] +# - eq: ["headers.content-type", "html/text"] + - eq: [body.headers.Host, "httpbin.org"] diff --git a/examples/httpbin/hooks_test.py b/examples/httpbin/hooks_test.py new file mode 100644 index 00000000..b10d450d --- /dev/null +++ b/examples/httpbin/hooks_test.py @@ -0,0 +1,48 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseHooks(HttpRunner): + config = TConfig( + **{ + "name": "basic test with httpbin", + "base_url": "${get_httpbin_server()}", + "setup_hooks": ["${hook_print(setup)}"], + "teardown_hooks": ["${hook_print(teardown)}"], + "path": "examples/httpbin/hooks_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "headers", + "variables": {"a": 123}, + "request": {"url": "/headers", "method": "GET"}, + "setup_hooks": [ + "${setup_hook_add_kwargs($request)}", + "${setup_hook_remove_kwargs($request)}", + ], + "teardown_hooks": ["${teardown_hook_sleep_N_secs($response, 1)}"], + "validate": [ + {"eq": ["status_code", 200]}, + {"contained_by": ["body.headers.Host", "${get_httpbin_server()}"]}, + ], + } + ), + TStep( + **{ + "name": "alter response", + "request": {"url": "/headers", "method": "GET"}, + "teardown_hooks": ["${alter_response($response)}"], + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.headers.Host", "httpbin.org"]}, + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseHooks().test_start() diff --git a/tests/httpbin/load_image.yml b/examples/httpbin/load_image.yml similarity index 93% rename from tests/httpbin/load_image.yml rename to examples/httpbin/load_image.yml index 4ea6da75..7a2ada65 100644 --- a/tests/httpbin/load_image.yml +++ b/examples/httpbin/load_image.yml @@ -1,8 +1,9 @@ -- config: +config: name: load images base_url: ${get_httpbin_server()} -- test: +teststeps: +- name: get png image request: url: /image/png @@ -10,7 +11,7 @@ validate: - eq: ["status_code", 200] -- test: +- name: get jpeg image request: url: /image/jpeg @@ -18,7 +19,7 @@ validate: - eq: ["status_code", 200] -- test: +- name: get webp image request: url: /image/webp @@ -26,7 +27,7 @@ validate: - eq: ["status_code", 200] -- test: +- name: get svg image request: url: /image/svg diff --git a/examples/httpbin/load_image_test.py b/examples/httpbin/load_image_test.py new file mode 100644 index 00000000..6a99f1db --- /dev/null +++ b/examples/httpbin/load_image_test.py @@ -0,0 +1,47 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseLoadImage(HttpRunner): + config = TConfig( + **{ + "name": "load images", + "base_url": "${get_httpbin_server()}", + "path": "examples/httpbin/load_image_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "get png image", + "request": {"url": "/image/png", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "get jpeg image", + "request": {"url": "/image/jpeg", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "get webp image", + "request": {"url": "/image/webp", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "get svg image", + "request": {"url": "/image/svg", "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + } + ), + ] + + +if __name__ == "__main__": + TestCaseLoadImage().test_start() diff --git a/tests/data/test.env b/examples/httpbin/test.env similarity index 100% rename from tests/data/test.env rename to examples/httpbin/test.env diff --git a/tests/httpbin/upload.v2.yml b/examples/httpbin/upload.yml similarity index 74% rename from tests/httpbin/upload.v2.yml rename to examples/httpbin/upload.yml index 1f96d037..5eff4cbf 100644 --- a/tests/httpbin/upload.v2.yml +++ b/examples/httpbin/upload.yml @@ -6,7 +6,7 @@ teststeps: - name: upload file variables: - file_path: "data/test.env" + file_path: "test.env" m_encoder: ${multipart_encoder(file=$file_path)} request: url: /post @@ -16,7 +16,7 @@ teststeps: data: $m_encoder validate: - eq: ["status_code", 200] - - startswith: ["content.files.file", "UserName=test"] + - startswith: ["body.files.file", "UserName=test"] - name: upload file with keyword @@ -24,7 +24,7 @@ teststeps: url: /post method: POST upload: - file: "data/test.env" + file: "test.env" validate: - eq: ["status_code", 200] - - startswith: ["content.files.file", "UserName=test"] + - startswith: ["body.files.file", "UserName=test"] diff --git a/examples/httpbin/upload_test.py b/examples/httpbin/upload_test.py new file mode 100644 index 00000000..ca418a4e --- /dev/null +++ b/examples/httpbin/upload_test.py @@ -0,0 +1,54 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseUpload(HttpRunner): + config = TConfig( + **{ + "name": "test upload file with httpbin", + "base_url": "${get_httpbin_server()}", + "path": "examples/httpbin/upload_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "upload file", + "variables": { + "file_path": "test.env", + "m_encoder": "${multipart_encoder(file=$file_path)}", + }, + "request": { + "url": "/post", + "method": "POST", + "headers": { + "Content-Type": "${multipart_content_type($m_encoder)}" + }, + "data": "$m_encoder", + }, + "validate": [ + {"eq": ["status_code", 200]}, + {"startswith": ["body.files.file", "UserName=test"]}, + ], + } + ), + TStep( + **{ + "name": "upload file with keyword", + "request": { + "url": "/post", + "method": "POST", + "upload": {"file": "test.env"}, + }, + "validate": [ + {"eq": ["status_code", 200]}, + {"startswith": ["body.files.file", "UserName=test"]}, + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseUpload().test_start() diff --git a/tests/data/user_agent.csv b/examples/httpbin/user_agent.csv similarity index 100% rename from tests/data/user_agent.csv rename to examples/httpbin/user_agent.csv diff --git a/tests/httpbin/validate.yml b/examples/httpbin/validate.yml similarity index 87% rename from tests/httpbin/validate.yml rename to examples/httpbin/validate.yml index 0be60af8..c45e2ffd 100644 --- a/tests/httpbin/validate.yml +++ b/examples/httpbin/validate.yml @@ -1,8 +1,9 @@ -- config: +config: name: basic test with httpbin base_url: http://httpbin.org/ -- test: +teststeps: +- name: validate response with json path request: url: /get @@ -12,13 +13,13 @@ method: GET validate: - eq: ["status_code", 200] - - eq: ["json.args.a", '1'] - - eq: ["json.args.b", '2'] + - eq: ["body.args.a", 1] + - eq: ["body.args.b", 2] validate_script: - "assert status_code == 200" -- test: +- name: validate response with python script request: url: /get diff --git a/examples/httpbin/validate_test.py b/examples/httpbin/validate_test.py new file mode 100644 index 00000000..e66796dd --- /dev/null +++ b/examples/httpbin/validate_test.py @@ -0,0 +1,43 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseValidate(HttpRunner): + config = TConfig( + **{ + "name": "basic test with httpbin", + "base_url": "http://httpbin.org/", + "path": "examples/httpbin/validate_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "validate response with json path", + "request": {"url": "/get", "params": {"a": 1, "b": 2}, "method": "GET"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args.a", 1]}, + {"eq": ["body.args.b", 2]}, + ], + "validate_script": ["assert status_code == 200"], + } + ), + TStep( + **{ + "name": "validate response with python script", + "request": {"url": "/get", "params": {"a": 1, "b": 2}, "method": "GET"}, + "validate": [{"eq": ["status_code", 200]}], + "validate_script": [ + "assert status_code == 201", + "a = response_json.get('args').get('a')", + "assert a == '1'", + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseValidate().test_start() diff --git a/tests/__init__.py b/examples/postman_echo/__init__.py similarity index 100% rename from tests/__init__.py rename to examples/postman_echo/__init__.py diff --git a/examples/postman_echo/cookie_manipulation/set_delete_cookies.yml b/examples/postman_echo/cookie_manipulation/set_delete_cookies.yml new file mode 100644 index 00000000..f43116a9 --- /dev/null +++ b/examples/postman_echo/cookie_manipulation/set_delete_cookies.yml @@ -0,0 +1,41 @@ +config: + name: "set & delete cookies." + variables: + foo1: bar1 + foo2: bar2 + base_url: "https://postman-echo.com" + verify: False + export: ["cookie_foo1", "cookie_foo3"] + +teststeps: +- + name: set cookie foo1 & foo2 & foo3 + variables: + foo3: bar3 + request: + method: GET + url: /cookies/set + params: + foo1: bar111 + foo2: $foo2 + foo3: $foo3 + headers: + User-Agent: HttpRunner/${get_httprunner_version()} + extract: + cookie_foo1: $.cookies.foo1 + cookie_foo3: $.cookies.foo3 + validate: + - eq: ["status_code", 200] + - ne: ["$.cookies.foo3", "$foo3"] +- + name: delete cookie foo2 + request: + method: GET + url: /cookies/delete?foo2 + headers: + User-Agent: HttpRunner/${get_httprunner_version()} + validate: + - eq: ["status_code", 200] + - ne: ["$.cookies.foo1", "$foo1"] + - eq: ["$.cookies.foo1", "$cookie_foo1"] + - eq: ["$.cookies.foo3", "$cookie_foo3"] diff --git a/examples/postman_echo/debugtalk.py b/examples/postman_echo/debugtalk.py new file mode 100644 index 00000000..849bd537 --- /dev/null +++ b/examples/postman_echo/debugtalk.py @@ -0,0 +1,9 @@ +from httprunner import __version__ + + +def get_httprunner_version(): + return __version__ + + +def sum_two(m, n): + return m + n diff --git a/tests/test_extension/__init__.py b/examples/postman_echo/request_methods/__init__.py similarity index 100% rename from tests/test_extension/__init__.py rename to examples/postman_echo/request_methods/__init__.py diff --git a/tests/test_schema.py b/examples/postman_echo/request_methods/conf.py similarity index 100% rename from tests/test_schema.py rename to examples/postman_echo/request_methods/conf.py diff --git a/examples/postman_echo/request_methods/hardcode.yml b/examples/postman_echo/request_methods/hardcode.yml new file mode 100644 index 00000000..6cb3fde1 --- /dev/null +++ b/examples/postman_echo/request_methods/hardcode.yml @@ -0,0 +1,51 @@ +config: + name: "request methods testcase in hardcode" + base_url: "https://postman-echo.com" + verify: False + +teststeps: +- + name: get with params + request: + method: GET + url: /get + params: + foo1: bar1 + foo2: bar2 + headers: + User-Agent: HttpRunner/3.0 + validate: + - eq: ["status_code", 200] +- + name: post raw text + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "text/plain" + data: "This is expected to be sent back as part of response body." + validate: + - eq: ["status_code", 200] +- + name: post form data + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "application/x-www-form-urlencoded" + data: "foo1=bar1&foo2=bar2" + validate: + - eq: ["status_code", 200] +- + name: put request + request: + method: PUT + url: /put + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "text/plain" + data: "This is expected to be sent back as part of response body." + validate: + - eq: ["status_code", 200] \ No newline at end of file diff --git a/examples/postman_echo/request_methods/hardcode_test.py b/examples/postman_echo/request_methods/hardcode_test.py new file mode 100644 index 00000000..bfbe315b --- /dev/null +++ b/examples/postman_echo/request_methods/hardcode_test.py @@ -0,0 +1,77 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseHardcode(HttpRunner): + config = TConfig( + **{ + "name": "request methods testcase in hardcode", + "base_url": "https://postman-echo.com", + "verify": False, + "path": "examples/postman_echo/request_methods/hardcode_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "get with params", + "request": { + "method": "GET", + "url": "/get", + "params": {"foo1": "bar1", "foo2": "bar2"}, + "headers": {"User-Agent": "HttpRunner/3.0"}, + }, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "post raw text", + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "text/plain", + }, + "data": "This is expected to be sent back as part of response body.", + }, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "post form data", + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "application/x-www-form-urlencoded", + }, + "data": "foo1=bar1&foo2=bar2", + }, + "validate": [{"eq": ["status_code", 200]}], + } + ), + TStep( + **{ + "name": "put request", + "request": { + "method": "PUT", + "url": "/put", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "text/plain", + }, + "data": "This is expected to be sent back as part of response body.", + }, + "validate": [{"eq": ["status_code", 200]}], + } + ), + ] + + +if __name__ == "__main__": + TestCaseHardcode().test_start() diff --git a/examples/postman_echo/request_methods/request_with_functions.yml b/examples/postman_echo/request_methods/request_with_functions.yml new file mode 100644 index 00000000..66a94ba4 --- /dev/null +++ b/examples/postman_echo/request_methods/request_with_functions.yml @@ -0,0 +1,61 @@ +config: + name: "request methods testcase with functions" + variables: + foo1: session_bar1 + base_url: "https://postman-echo.com" + verify: False + +teststeps: +- + name: get with params + variables: + foo1: bar1 + foo2: session_bar2 + sum_v: "${sum_two(1, 2)}" + request: + method: GET + url: /get + params: + foo1: $foo1 + foo2: $foo2 + sum_v: $sum_v + headers: + User-Agent: HttpRunner/${get_httprunner_version()} + extract: + session_foo2: "body.args.foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.args.foo1", "session_bar1"] + - eq: ["body.args.sum_v", 3] + - eq: ["body.args.foo2", "session_bar2"] +- + name: post raw text + variables: + foo1: "hello world" + foo3: "$session_foo2" + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/${get_httprunner_version()} + Content-Type: "text/plain" + data: "This is expected to be sent back as part of response body: $foo1-$foo3." + validate: + - eq: ["status_code", 200] + - eq: ["body.data", "This is expected to be sent back as part of response body: session_bar1-session_bar2."] +- + name: post form data + variables: + foo1: bar1 + foo2: bar2 + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/${get_httprunner_version()} + Content-Type: "application/x-www-form-urlencoded" + data: "foo1=$foo1&foo2=$foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.form.foo1", "session_bar1"] + - eq: ["body.form.foo2", "bar2"] diff --git a/examples/postman_echo/request_methods/request_with_functions_test.py b/examples/postman_echo/request_methods/request_with_functions_test.py new file mode 100644 index 00000000..ebe586dd --- /dev/null +++ b/examples/postman_echo/request_methods/request_with_functions_test.py @@ -0,0 +1,88 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseRequestWithFunctions(HttpRunner): + config = TConfig( + **{ + "name": "request methods testcase with functions", + "variables": {"foo1": "session_bar1"}, + "base_url": "https://postman-echo.com", + "verify": False, + "path": "examples/postman_echo/request_methods/request_with_functions_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "get with params", + "variables": { + "foo1": "bar1", + "foo2": "session_bar2", + "sum_v": "${sum_two(1, 2)}", + }, + "request": { + "method": "GET", + "url": "/get", + "params": {"foo1": "$foo1", "foo2": "$foo2", "sum_v": "$sum_v"}, + "headers": {"User-Agent": "HttpRunner/${get_httprunner_version()}"}, + }, + "extract": {"session_foo2": "body.args.foo2"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args.foo1", "session_bar1"]}, + {"eq": ["body.args.sum_v", 3]}, + {"eq": ["body.args.foo2", "session_bar2"]}, + ], + } + ), + TStep( + **{ + "name": "post raw text", + "variables": {"foo1": "hello world", "foo3": "$session_foo2"}, + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/${get_httprunner_version()}", + "Content-Type": "text/plain", + }, + "data": "This is expected to be sent back as part of response body: $foo1-$foo3.", + }, + "validate": [ + {"eq": ["status_code", 200]}, + { + "eq": [ + "body.data", + "This is expected to be sent back as part of response body: session_bar1-session_bar2.", + ] + }, + ], + } + ), + TStep( + **{ + "name": "post form data", + "variables": {"foo1": "bar1", "foo2": "bar2"}, + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/${get_httprunner_version()}", + "Content-Type": "application/x-www-form-urlencoded", + }, + "data": "foo1=$foo1&foo2=$foo2", + }, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.form.foo1", "session_bar1"]}, + {"eq": ["body.form.foo2", "bar2"]}, + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseRequestWithFunctions().test_start() diff --git a/examples/postman_echo/request_methods/request_with_testcase_reference.yml b/examples/postman_echo/request_methods/request_with_testcase_reference.yml new file mode 100644 index 00000000..7e139535 --- /dev/null +++ b/examples/postman_echo/request_methods/request_with_testcase_reference.yml @@ -0,0 +1,13 @@ +config: + name: "request methods testcase: reference testcase" + variables: + foo1: session_bar1 + base_url: "https://postman-echo.com" + verify: False + +teststeps: +- + name: request with variables + variables: + foo1: override_bar1 + testcase: request_methods/request_with_variables.yml diff --git a/examples/postman_echo/request_methods/request_with_testcase_reference_test.py b/examples/postman_echo/request_methods/request_with_testcase_reference_test.py new file mode 100644 index 00000000..40a59f8f --- /dev/null +++ b/examples/postman_echo/request_methods/request_with_testcase_reference_test.py @@ -0,0 +1,28 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseRequestWithTestcaseReference(HttpRunner): + config = TConfig( + **{ + "name": "request methods testcase: reference testcase", + "variables": {"foo1": "session_bar1"}, + "base_url": "https://postman-echo.com", + "verify": False, + "path": "examples/postman_echo/request_methods/request_with_testcase_reference_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "request with variables", + "variables": {"foo1": "override_bar1"}, + "testcase": "request_methods/request_with_variables.yml", + } + ), + ] + + +if __name__ == "__main__": + TestCaseRequestWithTestcaseReference().test_start() diff --git a/examples/postman_echo/request_methods/request_with_variables.yml b/examples/postman_echo/request_methods/request_with_variables.yml new file mode 100644 index 00000000..625e240f --- /dev/null +++ b/examples/postman_echo/request_methods/request_with_variables.yml @@ -0,0 +1,58 @@ +config: + name: "request methods testcase with variables" + variables: + foo1: session_bar1 + base_url: "https://postman-echo.com" + verify: False + +teststeps: +- + name: get with params + variables: + foo1: bar1 + foo2: session_bar2 + request: + method: GET + url: /get + params: + foo1: $foo1 + foo2: $foo2 + headers: + User-Agent: HttpRunner/3.0 + extract: + session_foo2: "body.args.foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.args.foo1", "session_bar1"] + - eq: ["body.args.foo2", "session_bar2"] +- + name: post raw text + variables: + foo1: "hello world" + foo3: "$session_foo2" + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "text/plain" + data: "This is expected to be sent back as part of response body: $foo1-$foo3." + validate: + - eq: ["status_code", 200] + - eq: ["body.data", "This is expected to be sent back as part of response body: session_bar1-session_bar2."] +- + name: post form data + variables: + foo1: bar1 + foo2: bar2 + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "application/x-www-form-urlencoded" + data: "foo1=$foo1&foo2=$foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.form.foo1", "session_bar1"] + - eq: ["body.form.foo2", "bar2"] diff --git a/examples/postman_echo/request_methods/request_with_variables_test.py b/examples/postman_echo/request_methods/request_with_variables_test.py new file mode 100644 index 00000000..4edb4932 --- /dev/null +++ b/examples/postman_echo/request_methods/request_with_variables_test.py @@ -0,0 +1,83 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseRequestWithVariables(HttpRunner): + config = TConfig( + **{ + "name": "request methods testcase with variables", + "variables": {"foo1": "session_bar1"}, + "base_url": "https://postman-echo.com", + "verify": False, + "path": "examples/postman_echo/request_methods/request_with_variables_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "get with params", + "variables": {"foo1": "bar1", "foo2": "session_bar2"}, + "request": { + "method": "GET", + "url": "/get", + "params": {"foo1": "$foo1", "foo2": "$foo2"}, + "headers": {"User-Agent": "HttpRunner/3.0"}, + }, + "extract": {"session_foo2": "body.args.foo2"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args.foo1", "session_bar1"]}, + {"eq": ["body.args.foo2", "session_bar2"]}, + ], + } + ), + TStep( + **{ + "name": "post raw text", + "variables": {"foo1": "hello world", "foo3": "$session_foo2"}, + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "text/plain", + }, + "data": "This is expected to be sent back as part of response body: $foo1-$foo3.", + }, + "validate": [ + {"eq": ["status_code", 200]}, + { + "eq": [ + "body.data", + "This is expected to be sent back as part of response body: session_bar1-session_bar2.", + ] + }, + ], + } + ), + TStep( + **{ + "name": "post form data", + "variables": {"foo1": "bar1", "foo2": "bar2"}, + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "application/x-www-form-urlencoded", + }, + "data": "foo1=$foo1&foo2=$foo2", + }, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.form.foo1", "session_bar1"]}, + {"eq": ["body.form.foo2", "bar2"]}, + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseRequestWithVariables().test_start() diff --git a/examples/postman_echo/request_methods/validate_with_functions.yml b/examples/postman_echo/request_methods/validate_with_functions.yml new file mode 100644 index 00000000..41aca935 --- /dev/null +++ b/examples/postman_echo/request_methods/validate_with_functions.yml @@ -0,0 +1,29 @@ +config: + name: "request methods testcase: validate with functions" + variables: + foo1: session_bar1 + base_url: "https://postman-echo.com" + verify: False + +teststeps: +- + name: get with params + variables: + foo1: bar1 + foo2: session_bar2 + sum_v: "${sum_two(1, 2)}" + request: + method: GET + url: /get + params: + foo1: $foo1 + foo2: $foo2 + sum_v: $sum_v + headers: + User-Agent: HttpRunner/${get_httprunner_version()} + extract: + session_foo2: "body.args.foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.args.sum_v", 3] + - less_than: ["body.args.sum_v", "${sum_two(2, 2)}"] diff --git a/examples/postman_echo/request_methods/validate_with_functions_test.py b/examples/postman_echo/request_methods/validate_with_functions_test.py new file mode 100644 index 00000000..160c1390 --- /dev/null +++ b/examples/postman_echo/request_methods/validate_with_functions_test.py @@ -0,0 +1,43 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseValidateWithFunctions(HttpRunner): + config = TConfig( + **{ + "name": "request methods testcase: validate with functions", + "variables": {"foo1": "session_bar1"}, + "base_url": "https://postman-echo.com", + "verify": False, + "path": "examples/postman_echo/request_methods/validate_with_functions_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "get with params", + "variables": { + "foo1": "bar1", + "foo2": "session_bar2", + "sum_v": "${sum_two(1, 2)}", + }, + "request": { + "method": "GET", + "url": "/get", + "params": {"foo1": "$foo1", "foo2": "$foo2", "sum_v": "$sum_v"}, + "headers": {"User-Agent": "HttpRunner/${get_httprunner_version()}"}, + }, + "extract": {"session_foo2": "body.args.foo2"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args.sum_v", 3]}, + {"less_than": ["body.args.sum_v", "${sum_two(2, 2)}"]}, + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseValidateWithFunctions().test_start() diff --git a/examples/postman_echo/request_methods/validate_with_variables.yml b/examples/postman_echo/request_methods/validate_with_variables.yml new file mode 100644 index 00000000..7f77219f --- /dev/null +++ b/examples/postman_echo/request_methods/validate_with_variables.yml @@ -0,0 +1,58 @@ +config: + name: "request methods testcase: validate with variables" + variables: + foo1: session_bar1 + base_url: "https://postman-echo.com" + verify: False + +teststeps: +- + name: get with params + variables: + foo1: bar1 + foo2: session_bar2 + request: + method: GET + url: /get + params: + foo1: $foo1 + foo2: $foo2 + headers: + User-Agent: HttpRunner/3.0 + extract: + session_foo2: "body.args.foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.args.foo1", "$foo1"] + - eq: ["body.args.foo2", "$foo2"] +- + name: post raw text + variables: + foo1: "hello world" + foo3: "$session_foo2" + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "text/plain" + data: "This is expected to be sent back as part of response body: $foo1-$foo3." + validate: + - eq: ["status_code", 200] + - eq: ["body.data", "This is expected to be sent back as part of response body: session_bar1-$foo3."] +- + name: post form data + variables: + foo1: bar1 + foo2: bar2 + request: + method: POST + url: /post + headers: + User-Agent: HttpRunner/3.0 + Content-Type: "application/x-www-form-urlencoded" + data: "foo1=$foo1&foo2=$foo2" + validate: + - eq: ["status_code", 200] + - eq: ["body.form.foo1", "$foo1"] + - eq: ["body.form.foo2", "$foo2"] diff --git a/examples/postman_echo/request_methods/validate_with_variables_test.py b/examples/postman_echo/request_methods/validate_with_variables_test.py new file mode 100644 index 00000000..2bb44eb0 --- /dev/null +++ b/examples/postman_echo/request_methods/validate_with_variables_test.py @@ -0,0 +1,83 @@ +# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class TestCaseValidateWithVariables(HttpRunner): + config = TConfig( + **{ + "name": "request methods testcase: validate with variables", + "variables": {"foo1": "session_bar1"}, + "base_url": "https://postman-echo.com", + "verify": False, + "path": "examples/postman_echo/request_methods/validate_with_variables_test.py", + } + ) + + teststeps = [ + TStep( + **{ + "name": "get with params", + "variables": {"foo1": "bar1", "foo2": "session_bar2"}, + "request": { + "method": "GET", + "url": "/get", + "params": {"foo1": "$foo1", "foo2": "$foo2"}, + "headers": {"User-Agent": "HttpRunner/3.0"}, + }, + "extract": {"session_foo2": "body.args.foo2"}, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.args.foo1", "$foo1"]}, + {"eq": ["body.args.foo2", "$foo2"]}, + ], + } + ), + TStep( + **{ + "name": "post raw text", + "variables": {"foo1": "hello world", "foo3": "$session_foo2"}, + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "text/plain", + }, + "data": "This is expected to be sent back as part of response body: $foo1-$foo3.", + }, + "validate": [ + {"eq": ["status_code", 200]}, + { + "eq": [ + "body.data", + "This is expected to be sent back as part of response body: session_bar1-$foo3.", + ] + }, + ], + } + ), + TStep( + **{ + "name": "post form data", + "variables": {"foo1": "bar1", "foo2": "bar2"}, + "request": { + "method": "POST", + "url": "/post", + "headers": { + "User-Agent": "HttpRunner/3.0", + "Content-Type": "application/x-www-form-urlencoded", + }, + "data": "foo1=$foo1&foo2=$foo2", + }, + "validate": [ + {"eq": ["status_code", 200]}, + {"eq": ["body.form.foo1", "$foo1"]}, + {"eq": ["body.form.foo2", "$foo2"]}, + ], + } + ), + ] + + +if __name__ == "__main__": + TestCaseValidateWithVariables().test_start() diff --git a/httprunner/__init__.py b/httprunner/__init__.py index a4787d42..37b5c190 100644 --- a/httprunner/__init__.py +++ b/httprunner/__init__.py @@ -1,4 +1,13 @@ -__version__ = "3.0.0" +__version__ = "3.0.2" __description__ = "One-stop solution for HTTP(S) testing." -__all__ = ["__version__", "__description__"] +from httprunner.runner import HttpRunner +from httprunner.schema import TConfig, TStep + +__all__ = [ + "__version__", + "__description__", + "HttpRunner", + "TConfig", + "TStep", +] diff --git a/httprunner/api.py b/httprunner/api.py deleted file mode 100644 index ec4e482d..00000000 --- a/httprunner/api.py +++ /dev/null @@ -1,342 +0,0 @@ -import os -import sys -import unittest - -from loguru import logger - -from httprunner import (__version__, exceptions, loader, parser, - report, runner, utils) - - -class HttpRunner(object): - """ Developer Interface: Main Interface - Usage: - - from httprunner.api import HttpRunner - runner = HttpRunner( - failfast=True, - save_tests=True, - log_level="INFO", - log_file="test.log" - ) - summary = runner.run(path_or_tests) - - """ - - def __init__(self, failfast=False, save_tests=False, log_level="WARNING", log_file=None): - """ initialize HttpRunner. - - Args: - failfast (bool): stop the test run on the first error or failure. - save_tests (bool): save loaded/parsed tests to JSON file. - log_level (str): logging level. - log_file (str): log file path. - - """ - self.exception_stage = "initialize HttpRunner()" - kwargs = { - "failfast": failfast, - "resultclass": report.HtmlTestResult - } - - logger.remove() - log_level = log_level.upper() - logger.add(sys.stdout, level=log_level) - if log_file: - logger.add(log_file, level=log_level) - - self.unittest_runner = unittest.TextTestRunner(**kwargs) - self.test_loader = unittest.TestLoader() - self.save_tests = save_tests - self._summary = None - self.test_path = None - - def _add_tests(self, testcases): - """ initialize testcase with Runner() and add to test suite. - - Args: - testcases (list): testcases list. - - Returns: - unittest.TestSuite() - - """ - def _add_test(test_runner, test_dict): - """ add test to testcase. - """ - def test(self): - try: - test_runner.run_test(test_dict) - except exceptions.MyBaseFailure as ex: - self.fail(str(ex)) - finally: - self.meta_datas = test_runner.meta_datas - - if "config" in test_dict: - # run nested testcase - test.__doc__ = test_dict["config"].get("name") - variables = test_dict["config"].get("variables", {}) - else: - # run api test - test.__doc__ = test_dict.get("name") - variables = test_dict.get("variables", {}) - - if isinstance(test.__doc__, parser.LazyString): - try: - parsed_variables = parser.parse_variables_mapping(variables) - test.__doc__ = parser.parse_lazy_data( - test.__doc__, parsed_variables - ) - except exceptions.VariableNotFound: - test.__doc__ = str(test.__doc__) - - return test - - test_suite = unittest.TestSuite() - for testcase in testcases: - config = testcase.get("config", {}) - test_runner = runner.Runner(config) - TestSequense = type('TestSequense', (unittest.TestCase,), {}) - - tests = testcase.get("teststeps", []) - for index, test_dict in enumerate(tests): - times = test_dict.get("times", 1) - try: - times = int(times) - except ValueError: - raise exceptions.ParamsError( - f"times should be digit, given: {times}") - - for times_index in range(times): - # suppose one testcase should not have more than 9999 steps, - # and one step should not run more than 999 times. - test_method_name = 'test_{:04}_{:03}'.format(index, times_index) - test_method = _add_test(test_runner, test_dict) - setattr(TestSequense, test_method_name, test_method) - - loaded_testcase = self.test_loader.loadTestsFromTestCase(TestSequense) - setattr(loaded_testcase, "config", config) - setattr(loaded_testcase, "teststeps", tests) - setattr(loaded_testcase, "runner", test_runner) - test_suite.addTest(loaded_testcase) - - return test_suite - - def _run_suite(self, test_suite): - """ run tests in test_suite - - Args: - test_suite: unittest.TestSuite() - - Returns: - list: tests_results - - """ - tests_results = [] - - for index, testcase in enumerate(test_suite): - log_handler = None - if self.save_tests: - logs_file_abs_path = utils.prepare_log_file_abs_path( - self.test_path, f"testcase_{index+1}.log" - ) - log_handler = logger.add(logs_file_abs_path, level="DEBUG") - - testcase_name = testcase.config.get("name") - logger.info(f"Start to run testcase: {testcase_name}") - - result = self.unittest_runner.run(testcase) - if result.wasSuccessful(): - tests_results.append((testcase, result)) - else: - tests_results.insert(0, (testcase, result)) - - if self.save_tests and log_handler: - logger.remove(log_handler) - - return tests_results - - def _aggregate(self, tests_results): - """ aggregate results - - Args: - tests_results (list): list of (testcase, result) - - """ - summary = { - "success": True, - "stat": { - "testcases": { - "total": len(tests_results), - "success": 0, - "fail": 0 - }, - "teststeps": {} - }, - "time": {}, - "platform": report.get_platform(), - "details": [] - } - - for index, tests_result in enumerate(tests_results): - testcase, result = tests_result - testcase_summary = report.get_summary(result) - - if testcase_summary["success"]: - summary["stat"]["testcases"]["success"] += 1 - else: - summary["stat"]["testcases"]["fail"] += 1 - - summary["success"] &= testcase_summary["success"] - testcase_summary["name"] = testcase.config.get("name") - testcase_summary["in_out"] = utils.get_testcase_io(testcase) - - report.aggregate_stat(summary["stat"]["teststeps"], testcase_summary["stat"]) - report.aggregate_stat(summary["time"], testcase_summary["time"]) - - if self.save_tests: - logs_file_abs_path = utils.prepare_log_file_abs_path( - self.test_path, f"testcase_{index+1}.log" - ) - testcase_summary["log"] = logs_file_abs_path - - testcase_summary["HRUN-Request-ID"] = testcase.runner.hrun_request_id - summary["details"].append(testcase_summary) - - return summary - - def run_tests(self, tests_mapping): - """ run testcase/testsuite data - """ - self.test_path = tests_mapping.get("project_mapping", {}).get("test_path", "") - - if self.save_tests: - utils.dump_json_file( - tests_mapping, - utils.prepare_log_file_abs_path(self.test_path, "loaded.json") - ) - - # parse tests - self.exception_stage = "parse tests" - parsed_testcases = parser.parse_tests(tests_mapping) - parse_failed_testfiles = parser.get_parse_failed_testfiles() - if parse_failed_testfiles: - logger.warning("parse failures occurred ...") - utils.dump_json_file( - parse_failed_testfiles, - utils.prepare_log_file_abs_path(self.test_path, "parse_failed.json") - ) - - if len(parsed_testcases) == 0: - logger.error("failed to parse all cases, abort.") - raise exceptions.ParseTestsFailure - - if self.save_tests: - utils.dump_json_file( - parsed_testcases, - utils.prepare_log_file_abs_path(self.test_path, "parsed.json") - ) - - # add tests to test suite - self.exception_stage = "add tests to test suite" - test_suite = self._add_tests(parsed_testcases) - - # run test suite - self.exception_stage = "run test suite" - results = self._run_suite(test_suite) - - # aggregate results - self.exception_stage = "aggregate results" - self._summary = self._aggregate(results) - - # generate html report - self.exception_stage = "generate html report" - report.stringify_summary(self._summary) - - if self.save_tests: - utils.dump_json_file( - self._summary, - utils.prepare_log_file_abs_path(self.test_path, "summary.json") - ) - # save variables and export data - vars_out = self.get_vars_out() - utils.dump_json_file( - vars_out, - utils.prepare_log_file_abs_path(self.test_path, "io.json") - ) - - return self._summary - - def get_vars_out(self): - """ get variables and output - Returns: - list: list of variables and output. - if tests are parameterized, list items are corresponded to parameters. - - [ - { - "in": { - "user1": "leo" - }, - "out": { - "out1": "out_value_1" - } - }, - {...} - ] - - None: returns None if tests not started or finished or corrupted. - - """ - if not self._summary: - return None - - return [ - summary["in_out"] - for summary in self._summary["details"] - ] - - def run_path(self, path, dot_env_path=None, mapping=None): - """ run testcase/testsuite file or folder. - - Args: - path (str): testcase/testsuite file/foler path. - dot_env_path (str): specified .env file path. - mapping (dict): if mapping is specified, it will override variables in config block. - - Returns: - dict: result summary - - """ - # load tests - self.exception_stage = "load tests" - tests_mapping = loader.load_cases(path, dot_env_path) - - if mapping: - tests_mapping["project_mapping"]["variables"] = mapping - - return self.run_tests(tests_mapping) - - def run(self, path_or_tests, dot_env_path=None, mapping=None): - """ main interface. - - Args: - path_or_tests: - str: testcase/testsuite file/foler path - dict: valid testcase/testsuite data - dot_env_path (str): specified .env file path. - mapping (dict): if mapping is specified, it will override variables in config block. - - Returns: - dict: result summary - - """ - logger.info(f"HttpRunner version: {__version__}") - if loader.is_test_path(path_or_tests): - return self.run_path(path_or_tests, dot_env_path, mapping) - elif loader.is_test_content(path_or_tests): - project_working_directory = path_or_tests.get("project_mapping", {}).get("PWD", os.getcwd()) - loader.init_pwd(project_working_directory) - return self.run_tests(path_or_tests) - else: - raise exceptions.ParamsError(f"Invalid testcase path or testcases: {path_or_tests}") diff --git a/httprunner/app/main.py b/httprunner/app/main.py index 1f1139bc..0084762d 100644 --- a/httprunner/app/main.py +++ b/httprunner/app/main.py @@ -8,13 +8,7 @@ app = FastAPI() @app.get("/hrun/version") async def get_hrun_version(): - return { - "code": 0, - "message": "success", - "result": { - "HttpRunner": __version__ - } - } + return {"code": 0, "message": "success", "result": {"HttpRunner": __version__}} app.include_router(deps.router) diff --git a/httprunner/app/routers/debug.py b/httprunner/app/routers/debug.py index 4c2d7726..b4d4fefb 100644 --- a/httprunner/app/routers/debug.py +++ b/httprunner/app/routers/debug.py @@ -1,6 +1,6 @@ from fastapi import APIRouter -from httprunner.api import HttpRunner +from httprunner.runner import HttpRunner from httprunner.schema import ProjectMeta, TestCase router = APIRouter() @@ -9,35 +9,25 @@ runner = HttpRunner() @router.post("/hrun/debug/testcase", tags=["debug"]) async def debug_single_testcase(project_meta: ProjectMeta, testcase: TestCase): - resp = { - "code": 0, - "message": "success", - "result": {} - } + resp = {"code": 0, "message": "success", "result": {}} - project_meta_json = project_meta.dict(by_alias=True) if project_meta.debugtalk_py: origin_local_keys = list(locals().keys()).copy() exec(project_meta.debugtalk_py, {}, locals()) new_local_keys = list(locals().keys()).copy() new_added_keys = set(new_local_keys) - set(origin_local_keys) new_added_keys.remove("origin_local_keys") - project_meta_json["functions"] = {} for func_name in new_added_keys: - project_meta_json["functions"][func_name] = locals()[func_name] + project_meta.functions[func_name] = locals()[func_name] - testcase_json = testcase.dict(by_alias=True) - tests_mapping = { - "project_mapping": project_meta_json, - "testcases": [testcase_json] - } + runner.with_project_meta(project_meta).run(testcase) + summary = runner.get_summary() - summary = runner.run_tests(tests_mapping) - if not summary["success"]: + if not summary.success: resp["code"] = 1 resp["message"] = "fail" - resp["result"] = summary + resp["result"] = summary.dict() return resp @@ -59,6 +49,6 @@ async def debug_single_testcase(project_meta: ProjectMeta, testcase: TestCase): # @router.post("/hrun/debug/testcases", tags=["debug"]) # async def debug_multiple_testcases(project_meta: ProjectMeta, testcases: TestCases): # tests_mapping = { -# "project_mapping": project_meta, +# "project_meta": project_meta, # "testcases": testcases # } diff --git a/httprunner/app/routers/debug_test.py b/httprunner/app/routers/debug_test.py index 2c5198ef..0ee0f80e 100644 --- a/httprunner/app/routers/debug_test.py +++ b/httprunner/app/routers/debug_test.py @@ -8,13 +8,12 @@ client = TestClient(app) class TestDebug(unittest.TestCase): - def test_debug_single_testcase(self): json_data = { "project_meta": { "debugtalk_py": "\ndef hello(name):\n print(f'hello, {name}')\n", "variables": {}, - "env": {} + "env": {}, }, "testcase": { "config": { @@ -24,7 +23,7 @@ class TestDebug(unittest.TestCase): "variables": {}, "setup_hooks": [], "teardown_hooks": [], - "export": [] + "export": [], }, "teststeps": [ { @@ -38,13 +37,13 @@ class TestDebug(unittest.TestCase): "cookies": {}, "timeout": 30, "allow_redirects": True, - "verify": False + "verify": False, }, "extract": {}, - "validate": [] + "validate": [], } - ] - } + ], + }, } response = client.post("/hrun/debug/testcase", json=json_data) assert response.status_code == 200 diff --git a/httprunner/app/routers/debugtalk.py b/httprunner/app/routers/debugtalk.py index c6d8f7b0..bdb9a6b9 100644 --- a/httprunner/app/routers/debugtalk.py +++ b/httprunner/app/routers/debugtalk.py @@ -23,15 +23,11 @@ def stdout_io(stdout=None): async def debug_python(request: Request): body = await request.body() - if request.headers.get('content-transfer-encoding') == "base64": + if request.headers.get("content-transfer-encoding") == "base64": # TODO: decode base64 pass - resp = { - "code": 0, - "message": "success", - "result": "" - } + resp = {"code": 0, "message": "success", "result": ""} try: with stdout_io() as s: exec(body, globals()) diff --git a/httprunner/app/routers/deps.py b/httprunner/app/routers/deps.py index a10ed423..70e0a017 100644 --- a/httprunner/app/routers/deps.py +++ b/httprunner/app/routers/deps.py @@ -10,11 +10,7 @@ router = APIRouter() @router.get("/hrun/deps", tags=["deps"]) async def get_installed_dependenies(): - resp = { - "code": 0, - "message": "success", - "result": {} - } + resp = {"code": 0, "message": "success", "result": {}} for p in pkg_resources.working_set: resp["result"][p.project_name] = p.version @@ -23,11 +19,7 @@ async def get_installed_dependenies(): @router.post("/hrun/deps", tags=["deps"]) async def install_dependenies(deps: List[str]): - resp = { - "code": 0, - "message": "success", - "result": {} - } + resp = {"code": 0, "message": "success", "result": {}} for dep in deps: try: p = subprocess.run(["pip", "install", dep]) diff --git a/httprunner/builtin/functions.py b/httprunner/builtin/functions.py index abb0b81d..2a7c68ca 100644 --- a/httprunner/builtin/functions.py +++ b/httprunner/builtin/functions.py @@ -13,8 +13,9 @@ from httprunner.exceptions import ParamsError def gen_random_string(str_len): """ generate random string with specified length """ - return ''.join( - random.choice(string.ascii_letters + string.digits) for _ in range(str_len)) + return "".join( + random.choice(string.ascii_letters + string.digits) for _ in range(str_len) + ) def get_timestamp(str_len=13): @@ -36,4 +37,3 @@ def sleep(n_secs): """ sleep n seconds """ time.sleep(n_secs) - diff --git a/httprunner/cli.py b/httprunner/cli.py index 2fe9fe41..f577e77a 100644 --- a/httprunner/cli.py +++ b/httprunner/cli.py @@ -2,12 +2,42 @@ import argparse import os import sys -from loguru import logger +import pytest -from httprunner import __description__, __version__ -from httprunner.api import HttpRunner -from httprunner.report import gen_html_report -from httprunner.utils import create_scaffold +from httprunner import __description__, __version__, exceptions +from httprunner.ext.har2case import init_har2case_parser, main_har2case +from httprunner.ext.make import init_make_parser, main_make, convert_testcase_path +from httprunner.ext.scaffold import init_parser_scaffold, main_scaffold + + +def init_parser_run(subparsers): + sub_parser_run = subparsers.add_parser( + "run", help="Make HttpRunner testcases and run with pytest." + ) + return sub_parser_run + + +def main_run(extra_args): + tests_path_list = [] + for index, item in enumerate(extra_args): + if not os.path.exists(item): + # item is not file/folder path + continue + elif os.path.isfile(item): + # replace YAML/JSON file path with generated python file + extra_args[index] = convert_testcase_path(item) + + tests_path_list.append(item) + + if len(tests_path_list) == 0: + # has not specified any testcase path + raise exceptions.ParamsError("Missed testcase path") + + main_make(tests_path_list) + + if "-s" not in extra_args: + extra_args.insert(0, "-s") + pytest.main(extra_args) def main(): @@ -15,80 +45,102 @@ def main(): """ parser = argparse.ArgumentParser(description=__description__) parser.add_argument( - '-V', '--version', dest='version', action='store_true', - help="show version") - parser.add_argument( - 'testfile_paths', nargs='*', - help="Specify api/testcase/testsuite file paths to run.") - parser.add_argument( - '--log-level', default='INFO', - help="Specify logging level, default is INFO.") - parser.add_argument( - '--log-file', - help="Write logs to specified file path.") - parser.add_argument( - '--dot-env-path', - help="Specify .env file path, which is useful for keeping sensitive data.") - parser.add_argument( - '--report-template', - help="Specify report template path.") - parser.add_argument( - '--report-dir', - help="Specify report save directory.") - parser.add_argument( - '--report-file', - help="Specify report file path, this has higher priority than specifying report dir.") - parser.add_argument( - '--save-tests', action='store_true', default=False, - help="Save loaded/parsed/vars_out/summary json data to JSON files.") - parser.add_argument( - '--failfast', action='store_true', default=False, - help="Stop the test run on the first error or failure.") - parser.add_argument( - '--startproject', - help="Specify new project name.") + "-V", "--version", dest="version", action="store_true", help="show version" + ) - args = parser.parse_args() + subparsers = parser.add_subparsers(help="sub-command help") + sub_parser_run = init_parser_run(subparsers) + sub_parser_scaffold = init_parser_scaffold(subparsers) + sub_parser_har2case = init_har2case_parser(subparsers) + sub_parser_make = init_make_parser(subparsers) if len(sys.argv) == 1: - # no argument passed + # httprunner parser.print_help() sys.exit(0) + elif len(sys.argv) == 2: + # print help for sub-commands + if sys.argv[1] in ["-V", "--version"]: + # httprunner -V + print(f"{__version__}") + elif sys.argv[1] in ["-h", "--help"]: + # httprunner -h + parser.print_help() + elif sys.argv[1] == "startproject": + # httprunner startproject + sub_parser_scaffold.print_help() + elif sys.argv[1] == "har2case": + # httprunner har2case + sub_parser_har2case.print_help() + elif sys.argv[1] == "run": + # httprunner run + pytest.main(["-h"]) + elif sys.argv[1] == "make": + # httprunner make + sub_parser_make.print_help() + sys.exit(0) + elif ( + len(sys.argv) == 3 and sys.argv[1] == "run" and sys.argv[2] in ["-h", "--help"] + ): + # httprunner run -h + pytest.main(["-h"]) + sys.exit(0) + + extra_args = [] + if len(sys.argv) >= 2 and sys.argv[1] in ["run", "locusts"]: + args, extra_args = parser.parse_known_args() + else: + args = parser.parse_args() if args.version: print(f"{__version__}") sys.exit(0) - project_name = args.startproject - if project_name: - create_scaffold(project_name) - sys.exit(0) - - runner = HttpRunner( - failfast=args.failfast, - save_tests=args.save_tests, - log_level=args.log_level, - log_file=args.log_file - ) - - err_code = 0 - try: - for path in args.testfile_paths: - summary = runner.run(path, dot_env_path=args.dot_env_path) - report_dir = args.report_dir or os.path.join(os.getcwd(), "reports") - gen_html_report( - summary, - report_template=args.report_template, - report_dir=report_dir, - report_file=args.report_file - ) - err_code |= (0 if summary and summary["success"] else 1) - except Exception as ex: - logger.error(f"!!!!!!!!!! exception stage: {runner.exception_stage} !!!!!!!!!!\n{str(ex)}") - err_code = 1 - - sys.exit(err_code) + if sys.argv[1] == "run": + main_run(extra_args) + elif sys.argv[1] == "startproject": + main_scaffold(args) + elif sys.argv[1] == "har2case": + main_har2case(args) + elif sys.argv[1] == "make": + main_make(args.testcase_path) -if __name__ == '__main__': +def main_hrun_alias(): + """ command alias + hrun = httprunner run + """ + if len(sys.argv) == 2: + if sys.argv[1] in ["-V", "--version"]: + # hrun -V + sys.argv = ["httprunner", "-V"] + elif sys.argv[1] in ["-h", "--help"]: + pytest.main(["-h"]) + sys.exit(0) + else: + # hrun /path/to/testcase + sys.argv.insert(1, "run") + else: + sys.argv.insert(1, "run") + + main() + + +def main_make_alias(): + """ command alias + hmake = httprunner make + """ + sys.argv.insert(1, "make") + main() + + +def main_har2case_alias(): + """ command alias + har2case = httprunner har2case + """ + sys.argv.insert(1, "har2case") + main() + + +if __name__ == "__main__": main() diff --git a/httprunner/cli_test.py b/httprunner/cli_test.py index 33914844..db7f9651 100644 --- a/httprunner/cli_test.py +++ b/httprunner/cli_test.py @@ -6,7 +6,6 @@ from httprunner.cli import main class TestCli(unittest.TestCase): - def setUp(self): self.captured_output = io.StringIO() sys.stdout = self.captured_output @@ -23,6 +22,7 @@ class TestCli(unittest.TestCase): self.assertEqual(cm.exception.code, 0) from httprunner import __version__ + self.assertIn(__version__, self.captured_output.getvalue().strip()) def test_show_help(self): @@ -34,4 +34,5 @@ class TestCli(unittest.TestCase): self.assertEqual(cm.exception.code, 0) from httprunner import __description__ + self.assertIn(__description__, self.captured_output.getvalue().strip()) diff --git a/httprunner/client.py b/httprunner/client.py index 4577a40a..b9606e3f 100644 --- a/httprunner/client.py +++ b/httprunner/client.py @@ -1,94 +1,91 @@ -# encoding: utf-8 - import time import requests import urllib3 from loguru import logger from requests import Request, Response -from requests.exceptions import (InvalidSchema, InvalidURL, MissingSchema, - RequestException) +from requests.exceptions import ( + InvalidSchema, + InvalidURL, + MissingSchema, + RequestException, +) -from httprunner import response +from httprunner.schema import RequestData, ResponseData +from httprunner.schema import SessionData, ReqRespData from httprunner.utils import lower_dict_keys, omit_long_data urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -def get_req_resp_record(resp_obj): +class ApiResponse(Response): + def raise_for_status(self): + if hasattr(self, "error") and self.error: + raise self.error + Response.raise_for_status(self) + + +def get_req_resp_record(resp_obj: Response) -> ReqRespData: """ get request and response info from Response() object. """ - def log_print(req_resp_dict, r_type): + + def log_print(req_or_resp, r_type): msg = f"\n================== {r_type} details ==================\n" - for key, value in req_resp_dict[r_type].items(): + for key, value in req_or_resp.dict().items(): msg += "{:<16} : {}\n".format(key, repr(value)) logger.debug(msg) - req_resp_dict = { - "request": {}, - "response": {} - } - # record actual request info - req_resp_dict["request"]["url"] = resp_obj.request.url - req_resp_dict["request"]["method"] = resp_obj.request.method - req_resp_dict["request"]["headers"] = dict(resp_obj.request.headers) - + request_headers = dict(resp_obj.request.headers) request_body = resp_obj.request.body + if request_body: - request_content_type = lower_dict_keys( - req_resp_dict["request"]["headers"] - ).get("content-type") + request_content_type = lower_dict_keys(request_headers).get("content-type") if request_content_type and "multipart/form-data" in request_content_type: # upload file type - req_resp_dict["request"]["body"] = "upload file stream (OMITTED)" - else: - req_resp_dict["request"]["body"] = request_body + request_body = "upload file stream (OMITTED)" + + request_data = RequestData( + method=resp_obj.request.method, + url=resp_obj.request.url, + headers=request_headers, + body=request_body, + ) # log request details in debug mode - log_print(req_resp_dict, "request") + log_print(request_data, "request") # record response info - req_resp_dict["response"]["ok"] = resp_obj.ok - req_resp_dict["response"]["url"] = resp_obj.url - req_resp_dict["response"]["status_code"] = resp_obj.status_code - req_resp_dict["response"]["reason"] = resp_obj.reason - req_resp_dict["response"]["cookies"] = resp_obj.cookies or {} - req_resp_dict["response"]["encoding"] = resp_obj.encoding resp_headers = dict(resp_obj.headers) - req_resp_dict["response"]["headers"] = resp_headers - lower_resp_headers = lower_dict_keys(resp_headers) content_type = lower_resp_headers.get("content-type", "") - req_resp_dict["response"]["content_type"] = content_type if "image" in content_type: # response is image type, record bytes content only - req_resp_dict["response"]["body"] = resp_obj.content + response_body = resp_obj.content else: try: # try to record json data - if isinstance(resp_obj, response.ResponseObject): - req_resp_dict["response"]["body"] = resp_obj.json - else: - req_resp_dict["response"]["body"] = resp_obj.json() + response_body = resp_obj.json() except ValueError: # only record at most 512 text charactors resp_text = resp_obj.text - req_resp_dict["response"]["body"] = omit_long_data(resp_text) + response_body = omit_long_data(resp_text) + + response_data = ResponseData( + status_code=resp_obj.status_code, + cookies=resp_obj.cookies or {}, + encoding=resp_obj.encoding, + headers=resp_headers, + content_type=content_type, + body=response_body, + ) # log response details in debug mode - log_print(req_resp_dict, "response") + log_print(response_data, "response") - return req_resp_dict - - -class ApiResponse(Response): - - def raise_for_status(self): - if hasattr(self, 'error') and self.error: - raise self.error - Response.raise_for_status(self) + req_resp_data = ReqRespData(request=request_data, response=response_data) + return req_resp_data class HttpSession(requests.Session): @@ -100,43 +97,18 @@ class HttpSession(requests.Session): This is a slightly extended version of `python-request `_'s :py:class:`requests.Session` class and mostly this class works exactly the same. """ + def __init__(self): super(HttpSession, self).__init__() - self.init_meta_data() - - def init_meta_data(self): - """ initialize meta_data, it will store detail data of request and response - """ - self.meta_data = { - "name": "", - "data": [ - { - "request": { - "url": "N/A", - "method": "N/A", - "headers": {} - }, - "response": { - "status_code": "N/A", - "headers": {}, - "encoding": None, - "content_type": "" - } - } - ], - "stat": { - "content_size": "N/A", - "response_time_ms": "N/A", - "elapsed_ms": "N/A", - } - } + self.data = SessionData() def update_last_req_resp_record(self, resp_obj): """ update request and response info from Response() object. """ - self.meta_data["data"].pop() - self.meta_data["data"].append(get_req_resp_record(resp_obj)) + # TODO: fix + self.data.req_resps.pop() + self.data.req_resps.append(get_req_resp_record(resp_obj)) def request(self, method, url, name=None, **kwargs): """ @@ -177,16 +149,10 @@ class HttpSession(requests.Session): :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. """ - self.init_meta_data() + self.data = SessionData() - # record test name - self.meta_data["name"] = name - - # record original request info - self.meta_data["data"][0]["request"]["method"] = method - self.meta_data["data"][0]["request"]["url"] = url + # timeout default to 120 seconds kwargs.setdefault("timeout", 120) - self.meta_data["data"][0]["request"].update(kwargs) start_timestamp = time.time() response = self._send_request_safe_mode(method, url, **kwargs) @@ -200,17 +166,14 @@ class HttpSession(requests.Session): content_size = len(response.content or "") # record the consumed time - self.meta_data["stat"] = { - "response_time_ms": response_time_ms, - "elapsed_ms": response.elapsed.microseconds / 1000.0, - "content_size": content_size - } + self.data.stat.response_time_ms = response_time_ms + self.data.stat.elapsed_ms = response.elapsed.microseconds / 1000.0 + self.data.stat.content_size = content_size # record request and response histories, include 30X redirection response_list = response.history + [response] - self.meta_data["data"] = [ - get_req_resp_record(resp_obj) - for resp_obj in response_list + self.data.req_resps = [ + get_req_resp_record(resp_obj) for resp_obj in response_list ] try: diff --git a/httprunner/context.py b/httprunner/context.py deleted file mode 100644 index c08af242..00000000 --- a/httprunner/context.py +++ /dev/null @@ -1,64 +0,0 @@ -from httprunner import parser, utils - - -class SessionContext(object): - """ HttpRunner session, store runtime variables. - - Examples: - >>> variables = {"SECRET_KEY": "DebugTalk"} - >>> context = SessionContext(variables) - - Equivalent to: - >>> context = SessionContext() - >>> context.update_session_variables(variables) - - """ - - def __init__(self, variables=None): - variables_mapping = utils.ensure_mapping_format(variables or {}) - self.session_variables_mapping = parser.parse_variables_mapping(variables_mapping) - self.test_variables_mapping = {} - self.init_test_variables() - - def init_test_variables(self, variables_mapping=None): - """ init test variables, called when each test(api) starts. - variables_mapping will be evaluated first. - - Args: - variables_mapping (dict) - { - "random": "${gen_random_string(5)}", - "authorization": "${gen_md5($TOKEN, $data, $random)}", - "data": '{"name": "user", "password": "123456"}', - "TOKEN": "debugtalk", - } - - """ - variables_mapping = variables_mapping or {} - variables_mapping = utils.ensure_mapping_format(variables_mapping) - variables_mapping.update(self.session_variables_mapping) - parsed_variables_mapping = parser.parse_variables_mapping(variables_mapping) - - self.test_variables_mapping = {} - # priority: extracted variable > teststep variable - self.test_variables_mapping.update(parsed_variables_mapping) - self.test_variables_mapping.update(self.session_variables_mapping) - - def update_test_variables(self, variable_name, variable_value): - """ update test variables, these variables are only valid in the current test. - """ - self.test_variables_mapping[variable_name] = variable_value - - def update_session_variables(self, variables_mapping): - """ update session with extracted variables mapping. - these variables are valid in the whole running session. - """ - variables_mapping = utils.ensure_mapping_format(variables_mapping) - self.session_variables_mapping.update(variables_mapping) - self.test_variables_mapping.update(self.session_variables_mapping) - - def eval_content(self, content): - """ evaluate content recursively, take effect on each variable and function in content. - content may be in any data structure, include dict, list, tuple, number, string, etc. - """ - return parser.parse_lazy_data(content, self.test_variables_mapping) diff --git a/httprunner/exceptions.py b/httprunner/exceptions.py index 77d1be52..66e8f7a5 100644 --- a/httprunner/exceptions.py +++ b/httprunner/exceptions.py @@ -40,6 +40,10 @@ class FileFormatError(MyBaseError): pass +class TestCaseFormatError(MyBaseError): + pass + + class ParamsError(MyBaseError): pass diff --git a/httprunner/ext/har2case/__init__.py b/httprunner/ext/har2case/__init__.py index e69de29b..3847d7c3 100644 --- a/httprunner/ext/har2case/__init__.py +++ b/httprunner/ext/har2case/__init__.py @@ -0,0 +1,61 @@ +""" Convert HAR (HTTP Archive) to YAML/JSON testcase for HttpRunner. + +Usage: + # convert to JSON format testcase + $ hrun har2case demo.har + + # convert to YAML format testcase + $ hrun har2case demo.har -2y + +""" +import os +import sys + +from loguru import logger + +from httprunner.ext.har2case.core import HarParser + + +def init_har2case_parser(subparsers): + """ HAR converter: parse command line options and run commands. + """ + parser = subparsers.add_parser( + "har2case", + help="Convert HAR(HTTP Archive) to YAML/JSON testcases for HttpRunner.", + ) + parser.add_argument("har_source_file", nargs="?", help="Specify HAR source file") + parser.add_argument( + "-2y", + "--to-yml", + "--to-yaml", + dest="to_yaml", + action="store_true", + help="Convert to YAML format, if not specified, convert to JSON format by default.", + ) + parser.add_argument( + "--filter", + help="Specify filter keyword, only url include filter string will be converted.", + ) + parser.add_argument( + "--exclude", + help="Specify exclude keyword, url that includes exclude string will be ignored, " + "multiple keywords can be joined with '|'", + ) + + return parser + + +def main_har2case(args): + har_source_file = args.har_source_file + if not har_source_file or not har_source_file.endswith(".har"): + logger.error("HAR file not specified.") + sys.exit(1) + + if not os.path.isfile(har_source_file): + logger.error(f"HAR file not exists: {har_source_file}") + sys.exit(1) + + output_file_type = "YML" if args.to_yaml else "JSON" + HarParser(har_source_file, args.filter, args.exclude).gen_testcase(output_file_type) + + return 0 diff --git a/httprunner/ext/har2case/core.py b/httprunner/ext/har2case/core.py new file mode 100644 index 00000000..51864d6e --- /dev/null +++ b/httprunner/ext/har2case/core.py @@ -0,0 +1,356 @@ +import base64 +import json +import os +import sys +import urllib.parse as urlparse + +from loguru import logger + +from httprunner.ext.har2case import utils + +try: + from json.decoder import JSONDecodeError +except ImportError: + JSONDecodeError = ValueError + + +IGNORE_REQUEST_HEADERS = [ + "host", + "accept", + "content-length", + "connection", + "accept-encoding", + "accept-language", + "origin", + "cache-control", + "pragma", + "upgrade-insecure-requests", + ":authority", + ":method", + ":scheme", + ":path", +] + + +class HarParser(object): + def __init__(self, har_file_path, filter_str=None, exclude_str=None): + self.har_file_path = har_file_path + self.filter_str = filter_str + self.exclude_str = exclude_str or "" + + def __make_request_url(self, teststep_dict, entry_json): + """ parse HAR entry request url and queryString, and make teststep url and params + + Args: + entry_json (dict): + { + "request": { + "url": "https://httprunner.top/home?v=1&w=2", + "queryString": [ + {"name": "v", "value": "1"}, + {"name": "w", "value": "2"} + ], + }, + "response": {} + } + + Returns: + { + "name: "/home", + "request": { + url: "https://httprunner.top/home", + params: {"v": "1", "w": "2"} + } + } + + """ + request_params = utils.convert_list_to_dict( + entry_json["request"].get("queryString", []) + ) + + url = entry_json["request"].get("url") + if not url: + logger.exception("url missed in request.") + sys.exit(1) + + parsed_object = urlparse.urlparse(url) + if request_params: + parsed_object = parsed_object._replace(query="") + teststep_dict["request"]["url"] = parsed_object.geturl() + teststep_dict["request"]["params"] = request_params + else: + teststep_dict["request"]["url"] = url + + teststep_dict["name"] = parsed_object.path + + def __make_request_method(self, teststep_dict, entry_json): + """ parse HAR entry request method, and make teststep method. + """ + method = entry_json["request"].get("method") + if not method: + logger.exception("method missed in request.") + sys.exit(1) + + teststep_dict["request"]["method"] = method + + def __make_request_headers(self, teststep_dict, entry_json): + """ parse HAR entry request headers, and make teststep headers. + header in IGNORE_REQUEST_HEADERS will be ignored. + + Args: + entry_json (dict): + { + "request": { + "headers": [ + {"name": "Host", "value": "httprunner.top"}, + {"name": "Content-Type", "value": "application/json"}, + {"name": "User-Agent", "value": "iOS/10.3"} + ], + }, + "response": {} + } + + Returns: + { + "request": { + headers: {"Content-Type": "application/json"} + } + + """ + teststep_headers = {} + for header in entry_json["request"].get("headers", []): + if header["name"].lower() in IGNORE_REQUEST_HEADERS: + continue + + teststep_headers[header["name"]] = header["value"] + + if teststep_headers: + teststep_dict["request"]["headers"] = teststep_headers + + def _make_request_data(self, teststep_dict, entry_json): + """ parse HAR entry request data, and make teststep request data + + Args: + entry_json (dict): + { + "request": { + "method": "POST", + "postData": { + "mimeType": "application/x-www-form-urlencoded; charset=utf-8", + "params": [ + {"name": "a", "value": 1}, + {"name": "b", "value": "2"} + } + }, + }, + "response": {...} + } + + + Returns: + { + "request": { + "method": "POST", + "data": {"v": "1", "w": "2"} + } + } + + """ + method = entry_json["request"].get("method") + if method in ["POST", "PUT", "PATCH"]: + postData = entry_json["request"].get("postData", {}) + mimeType = postData.get("mimeType") + + # Note that text and params fields are mutually exclusive. + if "text" in postData: + post_data = postData.get("text") + else: + params = postData.get("params", []) + post_data = utils.convert_list_to_dict(params) + + request_data_key = "data" + if not mimeType: + pass + elif mimeType.startswith("application/json"): + try: + post_data = json.loads(post_data) + request_data_key = "json" + except JSONDecodeError: + pass + elif mimeType.startswith("application/x-www-form-urlencoded"): + post_data = utils.convert_x_www_form_urlencoded_to_dict(post_data) + else: + # TODO: make compatible with more mimeType + pass + + teststep_dict["request"][request_data_key] = post_data + + def _make_validate(self, teststep_dict, entry_json): + """ parse HAR entry response and make teststep validate. + + Args: + entry_json (dict): + { + "request": {}, + "response": { + "status": 200, + "headers": [ + { + "name": "Content-Type", + "value": "application/json; charset=utf-8" + }, + ], + "content": { + "size": 71, + "mimeType": "application/json; charset=utf-8", + "text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=", + "encoding": "base64" + } + } + } + + Returns: + { + "validate": [ + {"eq": ["status_code", 200]} + ] + } + + """ + teststep_dict["validate"].append( + {"eq": ["status_code", entry_json["response"].get("status")]} + ) + + resp_content_dict = entry_json["response"].get("content") + + headers_mapping = utils.convert_list_to_dict( + entry_json["response"].get("headers", []) + ) + if "Content-Type" in headers_mapping: + teststep_dict["validate"].append( + {"eq": ["headers.Content-Type", headers_mapping["Content-Type"]]} + ) + + text = resp_content_dict.get("text") + if not text: + return + + mime_type = resp_content_dict.get("mimeType") + if mime_type and mime_type.startswith("application/json"): + + encoding = resp_content_dict.get("encoding") + if encoding and encoding == "base64": + content = base64.b64decode(text).decode("utf-8") + else: + content = text + + try: + resp_content_json = json.loads(content) + except JSONDecodeError: + logger.warning( + "response content can not be loaded as json: {}".format( + content.encode("utf-8") + ) + ) + return + + if not isinstance(resp_content_json, dict): + return + + for key, value in resp_content_json.items(): + if isinstance(value, (dict, list)): + continue + + teststep_dict["validate"].append( + {"eq": ["content.{}".format(key), value]} + ) + + def _prepare_teststep(self, entry_json): + """ extract info from entry dict and make teststep + + Args: + entry_json (dict): + { + "request": { + "method": "POST", + "url": "https://httprunner.top/api/v1/Account/Login", + "headers": [], + "queryString": [], + "postData": {}, + }, + "response": { + "status": 200, + "headers": [], + "content": {} + } + } + + """ + teststep_dict = {"name": "", "request": {}, "validate": []} + + self.__make_request_url(teststep_dict, entry_json) + self.__make_request_method(teststep_dict, entry_json) + self.__make_request_headers(teststep_dict, entry_json) + self._make_request_data(teststep_dict, entry_json) + self._make_validate(teststep_dict, entry_json) + + return teststep_dict + + def _prepare_config(self): + """ prepare config block. + """ + return {"name": "testcase description", "variables": {}} + + def _prepare_teststeps(self): + """ make teststep list. + teststeps list are parsed from HAR log entries list. + + """ + + def is_exclude(url, exclude_str): + exclude_str_list = exclude_str.split("|") + for exclude_str in exclude_str_list: + if exclude_str and exclude_str in url: + return True + + return False + + teststeps = [] + log_entries = utils.load_har_log_entries(self.har_file_path) + for entry_json in log_entries: + url = entry_json["request"].get("url") + if self.filter_str and self.filter_str not in url: + continue + + if is_exclude(url, self.exclude_str): + continue + + teststeps.append(self._prepare_teststep(entry_json)) + + return teststeps + + def _make_testcase(self): + """ Extract info from HAR file and prepare for testcase + """ + logger.info("Extract info from HAR file and prepare for testcase.") + + config = self._prepare_config() + teststeps = self._prepare_teststeps() + + testcase = {"config": config, "teststeps": teststeps} + return testcase + + def gen_testcase(self, file_type="JSON"): + logger.info(f"Start to generate testcase from {self.har_file_path}") + harfile = os.path.splitext(self.har_file_path)[0] + output_testcase_file = "{}.{}".format(harfile, file_type.lower()) + + testcase = self._make_testcase() + logger.debug("prepared testcase: {}".format(testcase)) + + if file_type == "JSON": + utils.dump_json(testcase, output_testcase_file) + else: + utils.dump_yaml(testcase, output_testcase_file) + + logger.info(f"generated testcase: {output_testcase_file}") diff --git a/httprunner/ext/har2case/core_test.py b/httprunner/ext/har2case/core_test.py new file mode 100644 index 00000000..25fa60c4 --- /dev/null +++ b/httprunner/ext/har2case/core_test.py @@ -0,0 +1,166 @@ +import os + +from httprunner.ext.har2case.core import HarParser +from httprunner.ext.har2case.utils import load_har_log_entries +from httprunner.ext.har2case.utils_test import TestUtils + + +class TestHar(TestUtils): + def setUp(self): + self.har_path = os.path.join(os.path.dirname(__file__), "data", "demo.har") + self.har_parser = HarParser(self.har_path) + + def test_prepare_teststep(self): + log_entries = load_har_log_entries(self.har_path) + teststep_dict = self.har_parser._prepare_teststep(log_entries[0]) + self.assertIn("name", teststep_dict) + self.assertIn("request", teststep_dict) + self.assertIn("validate", teststep_dict) + + validators_mapping = { + validator["eq"][0]: validator["eq"][1] + for validator in teststep_dict["validate"] + } + self.assertEqual(validators_mapping["status_code"], 200) + self.assertEqual(validators_mapping["content.IsSuccess"], True) + self.assertEqual(validators_mapping["content.Code"], 200) + self.assertEqual(validators_mapping["content.Message"], None) + + def test_prepare_teststeps(self): + teststeps = self.har_parser._prepare_teststeps() + self.assertIsInstance(teststeps, list) + self.assertIn("name", teststeps[0]) + self.assertIn("request", teststeps[0]) + self.assertIn("validate", teststeps[0]) + + def test_gen_testcase_yaml(self): + yaml_file = os.path.join(os.path.dirname(__file__), "data", "demo.yaml") + + self.har_parser.gen_testcase(file_type="YAML") + self.assertTrue(os.path.isfile(yaml_file)) + os.remove(yaml_file) + + def test_gen_testcase_json(self): + json_file = os.path.join(os.path.dirname(__file__), "data", "demo.json") + + self.har_parser.gen_testcase(file_type="JSON") + self.assertTrue(os.path.isfile(json_file)) + os.remove(json_file) + + def test_filter(self): + filter_str = "httprunner" + har_parser = HarParser(self.har_path, filter_str) + teststeps = har_parser._prepare_teststeps() + self.assertEqual( + teststeps[0]["request"]["url"], + "https://httprunner.top/api/v1/Account/Login", + ) + + filter_str = "debugtalk" + har_parser = HarParser(self.har_path, filter_str) + teststeps = har_parser._prepare_teststeps() + self.assertEqual(teststeps, []) + + def test_exclude(self): + exclude_str = "debugtalk" + har_parser = HarParser(self.har_path, exclude_str=exclude_str) + teststeps = har_parser._prepare_teststeps() + self.assertEqual( + teststeps[0]["request"]["url"], + "https://httprunner.top/api/v1/Account/Login", + ) + + exclude_str = "httprunner" + har_parser = HarParser(self.har_path, exclude_str=exclude_str) + teststeps = har_parser._prepare_teststeps() + self.assertEqual(teststeps, []) + + def test_exclude_multiple(self): + exclude_str = "httprunner|v2" + har_parser = HarParser(self.har_path, exclude_str=exclude_str) + teststeps = har_parser._prepare_teststeps() + self.assertEqual(teststeps, []) + + exclude_str = "http2|v1" + har_parser = HarParser(self.har_path, exclude_str=exclude_str) + teststeps = har_parser._prepare_teststeps() + self.assertEqual(teststeps, []) + + def test_make_request_data_params(self): + testcase_dict = {"name": "", "request": {}, "validate": []} + entry_json = { + "request": { + "method": "POST", + "postData": { + "mimeType": "application/x-www-form-urlencoded; charset=utf-8", + "params": [{"name": "a", "value": 1}, {"name": "b", "value": "2"}], + }, + } + } + self.har_parser._make_request_data(testcase_dict, entry_json) + self.assertEqual(testcase_dict["request"]["data"]["a"], 1) + self.assertEqual(testcase_dict["request"]["data"]["b"], "2") + + def test_make_request_data_json(self): + testcase_dict = {"name": "", "request": {}, "validate": []} + entry_json = { + "request": { + "method": "POST", + "postData": { + "mimeType": "application/json; charset=utf-8", + "text": '{"a":"1","b":"2"}', + }, + } + } + self.har_parser._make_request_data(testcase_dict, entry_json) + self.assertEqual(testcase_dict["request"]["json"], {"a": "1", "b": "2"}) + + def test_make_request_data_text_empty(self): + testcase_dict = {"name": "", "request": {}, "validate": []} + entry_json = { + "request": { + "method": "POST", + "postData": {"mimeType": "application/json; charset=utf-8", "text": ""}, + } + } + self.har_parser._make_request_data(testcase_dict, entry_json) + self.assertEqual(testcase_dict["request"]["data"], "") + + def test_make_validate(self): + testcase_dict = {"name": "", "request": {}, "validate": []} + entry_json = { + "request": {}, + "response": { + "status": 200, + "headers": [ + { + "name": "Content-Type", + "value": "application/json; charset=utf-8", + }, + ], + "content": { + "size": 71, + "mimeType": "application/json; charset=utf-8", + # raw response content text is application/jose type + "text": "ZXlKaGJHY2lPaUpTVTBFeFh6VWlMQ0psYm1NaU9pSkJNVEk0UTBKRExV", + "encoding": "base64", + }, + }, + } + self.har_parser._make_validate(testcase_dict, entry_json) + self.assertEqual(testcase_dict["validate"][0], {"eq": ["status_code", 200]}) + self.assertEqual( + testcase_dict["validate"][1], + {"eq": ["headers.Content-Type", "application/json; charset=utf-8"]}, + ) + + def test_make_testcase(self): + har_path = os.path.join( + os.path.dirname(__file__), "data", "demo-quickstart.har" + ) + har_parser = HarParser(har_path) + testcase = har_parser._make_testcase() + self.assertIsInstance(testcase, dict) + self.assertIn("config", testcase) + self.assertIn("teststeps", testcase) + self.assertEqual(len(testcase["teststeps"]), 2) diff --git a/httprunner/ext/har2case/data/demo-quickstart.har b/httprunner/ext/har2case/data/demo-quickstart.har new file mode 100644 index 00000000..f4de4473 --- /dev/null +++ b/httprunner/ext/har2case/data/demo-quickstart.har @@ -0,0 +1,223 @@ +{ + "log": { + "version": "1.2", + "creator": { + "name": "Charles Proxy", + "version": "4.2.1" + }, + "entries": [ + { + "startedDateTime": "2018-02-19T17:30:00.904+08:00", + "time": 3, + "request": { + "method": "POST", + "url": "http://127.0.0.1:5000/api/get-token", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { + "name": "Host", + "value": "127.0.0.1:5000" + }, + { + "name": "User-Agent", + "value": "python-requests/2.18.4" + }, + { + "name": "Accept-Encoding", + "value": "gzip, deflate" + }, + { + "name": "Accept", + "value": "*/*" + }, + { + "name": "Connection", + "value": "keep-alive" + }, + { + "name": "device_sn", + "value": "FwgRiO7CNA50DSU" + }, + { + "name": "user_agent", + "value": "iOS/10.3" + }, + { + "name": "os_platform", + "value": "ios" + }, + { + "name": "app_version", + "value": "2.8.6" + }, + { + "name": "Content-Length", + "value": "52" + }, + { + "name": "Content-Type", + "value": "application/json" + } + ], + "queryString": [], + "postData": { + "mimeType": "application/json", + "text": "{\"sign\": \"958a05393efef0ac7c0fb80a7eac45e24fd40c27\"}" + }, + "headersSize": 299, + "bodySize": 52 + }, + "response": { + "_charlesStatus": "COMPLETE", + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.0", + "cookies": [], + "headers": [ + { + "name": "Content-Type", + "value": "application/json" + }, + { + "name": "Content-Length", + "value": "46" + }, + { + "name": "Server", + "value": "Werkzeug/0.14.1 Python/3.6.4" + }, + { + "name": "Date", + "value": "Mon, 19 Feb 2018 09:30:00 GMT" + }, + { + "name": "Proxy-Connection", + "value": "Close" + } + ], + "content": { + "size": 46, + "mimeType": "application/json", + "text": "eyJzdWNjZXNzIjogdHJ1ZSwgInRva2VuIjogImJhTkxYMXpoRllQMTFTZWIifQ\u003d\u003d", + "encoding": "base64" + }, + "headersSize": 175, + "bodySize": 46 + }, + "serverIPAddress": "127.0.0.1", + "cache": {}, + "timings": { + "dns": 1, + "connect": 0, + "ssl": -1, + "send": 0, + "wait": 1, + "receive": 1 + } + }, + { + "startedDateTime": "2018-02-19T17:30:00.911+08:00", + "time": 3, + "request": { + "method": "POST", + "url": "http://127.0.0.1:5000/api/users/1000", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { + "name": "Host", + "value": "127.0.0.1:5000" + }, + { + "name": "User-Agent", + "value": "python-requests/2.18.4" + }, + { + "name": "Accept-Encoding", + "value": "gzip, deflate" + }, + { + "name": "Accept", + "value": "*/*" + }, + { + "name": "Connection", + "value": "keep-alive" + }, + { + "name": "device_sn", + "value": "FwgRiO7CNA50DSU" + }, + { + "name": "token", + "value": "baNLX1zhFYP11Seb" + }, + { + "name": "Content-Length", + "value": "39" + }, + { + "name": "Content-Type", + "value": "application/json" + } + ], + "queryString": [], + "postData": { + "mimeType": "application/json", + "text": "{\"name\": \"user1\", \"password\": \"123456\"}" + }, + "headersSize": 265, + "bodySize": 39 + }, + "response": { + "_charlesStatus": "COMPLETE", + "status": 201, + "statusText": "CREATED", + "httpVersion": "HTTP/1.0", + "cookies": [], + "headers": [ + { + "name": "Content-Type", + "value": "application/json" + }, + { + "name": "Content-Length", + "value": "54" + }, + { + "name": "Server", + "value": "Werkzeug/0.14.1 Python/3.6.4" + }, + { + "name": "Date", + "value": "Mon, 19 Feb 2018 09:30:00 GMT" + }, + { + "name": "Proxy-Connection", + "value": "Close" + } + ], + "content": { + "size": 54, + "mimeType": "application/json", + "text": "eyJzdWNjZXNzIjogdHJ1ZSwgIm1zZyI6ICJ1c2VyIGNyZWF0ZWQgc3VjY2Vzc2Z1bGx5LiJ9", + "encoding": "base64" + }, + "headersSize": 77, + "bodySize": 54 + }, + "serverIPAddress": "127.0.0.1", + "cache": {}, + "timings": { + "dns": 0, + "connect": 0, + "ssl": -1, + "send": 0, + "wait": 3, + "receive": 0 + } + } + ] + } +} \ No newline at end of file diff --git a/httprunner/ext/har2case/data/demo.har b/httprunner/ext/har2case/data/demo.har new file mode 100644 index 00000000..f56e7450 --- /dev/null +++ b/httprunner/ext/har2case/data/demo.har @@ -0,0 +1,148 @@ +{ + "log": { + "version": "1.2", + "creator": { + "name": "Charles Proxy", + "version": "4.2" + }, + "entries": [ + { + "startedDateTime": "2017-11-13T11:40:07.212+08:00", + "time": 35, + "request": { + "method": "POST", + "url": "https://httprunner.top/api/v1/Account/Login", + "httpVersion": "HTTP/1.1", + "cookies": [ + { + "name": "lang", + "value": "zh" + } + ], + "headers": [ + { + "name": "Host", + "value": "httprunner.top" + }, + { + "name": "Connection", + "value": "keep-alive" + }, + { + "name": "Content-Length", + "value": "50" + }, + { + "name": "Accept", + "value": "application/json" + }, + { + "name": "Origin", + "value": "https://httprunner.top" + }, + { + "name": "User-Agent", + "value": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36" + }, + { + "name": "Content-Type", + "value": "application/json" + }, + { + "name": "Referer", + "value": "https://httprunner.top/login" + }, + { + "name": "Accept-Encoding", + "value": "gzip, deflate, br" + }, + { + "name": "Accept-Language", + "value": "en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4" + } + ], + "queryString": [], + "postData": { + "mimeType": "application/json", + "text": "{\"UserName\":\"test001\",\"Pwd\":\"123\",\"VerCode\":\"\"}" + }, + "headersSize": 640, + "bodySize": 50 + }, + "response": { + "_charlesStatus": "COMPLETE", + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [ + { + "name": "lang", + "value": "zh", + "path": "/", + "domain": ".httprunner.top", + "expires": null, + "httpOnly": false, + "secure": false, + "comment": null, + "_maxAge": null + } + ], + "headers": [ + { + "name": "Date", + "value": "Mon, 13 Nov 2017 03:40:07 GMT" + }, + { + "name": "Content-Type", + "value": "application/json; charset=utf-8" + }, + { + "name": "Content-Length", + "value": "71" + }, + { + "name": "Cache-Control", + "value": "no-cache" + }, + { + "name": "Pragma", + "value": "no-cache" + }, + { + "name": "Expires", + "value": "-1" + }, + { + "name": "Server", + "value": "Microsoft-IIS/8.5" + }, + { + "name": "X-AspNet-Version", + "value": "4.0.30319" + } + ], + "content": { + "size": 71, + "mimeType": "application/json; charset=utf-8", + "text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=", + "encoding": "base64" + }, + "redirectURL": null, + "headersSize": 0, + "bodySize": 71 + }, + "serverIPAddress": "192.168.1.169", + "cache": {}, + "timings": { + "dns": -1, + "connect": -1, + "ssl": -1, + "send": 6, + "wait": 28, + "receive": 1 + } + } + + ] + } +} \ No newline at end of file diff --git a/httprunner/ext/har2case/utils.py b/httprunner/ext/har2case/utils.py new file mode 100644 index 00000000..f2fdd852 --- /dev/null +++ b/httprunner/ext/har2case/utils.py @@ -0,0 +1,128 @@ +import io +import json +import logging +import sys +from json.decoder import JSONDecodeError +from urllib.parse import unquote + +import yaml + + +def load_har_log_entries(file_path): + """ load HAR file and return log entries list + + Args: + file_path (str) + + Returns: + list: entries + [ + { + "request": {}, + "response": {} + }, + { + "request": {}, + "response": {} + } + ] + + """ + with io.open(file_path, "r+", encoding="utf-8-sig") as f: + try: + content_json = json.loads(f.read()) + return content_json["log"]["entries"] + except (KeyError, TypeError, JSONDecodeError): + logging.error("HAR file content error: {}".format(file_path)) + sys.exit(1) + + +def x_www_form_urlencoded(post_data): + """ convert origin dict to x-www-form-urlencoded + + Args: + post_data (dict): + {"a": 1, "b":2} + + Returns: + str: + a=1&b=2 + + """ + if isinstance(post_data, dict): + return "&".join( + [u"{}={}".format(key, value) for key, value in post_data.items()] + ) + else: + return post_data + + +def convert_x_www_form_urlencoded_to_dict(post_data): + """ convert x_www_form_urlencoded data to dict + + Args: + post_data (str): a=1&b=2 + + Returns: + dict: {"a":1, "b":2} + + """ + if isinstance(post_data, str): + converted_dict = {} + for k_v in post_data.split("&"): + try: + key, value = k_v.split("=") + except ValueError: + raise Exception( + "Invalid x_www_form_urlencoded data format: {}".format(post_data) + ) + converted_dict[key] = unquote(value) + return converted_dict + else: + return post_data + + +def convert_list_to_dict(origin_list): + """ convert HAR data list to mapping + + Args: + origin_list (list) + [ + {"name": "v", "value": "1"}, + {"name": "w", "value": "2"} + ] + + Returns: + dict: + {"v": "1", "w": "2"} + + """ + return {item["name"]: item.get("value") for item in origin_list} + + +def dump_yaml(testcase, yaml_file): + """ dump HAR entries to yaml testcase + """ + logging.info("dump testcase to YAML format.") + + with io.open(yaml_file, "w", encoding="utf-8") as outfile: + yaml.dump( + testcase, outfile, allow_unicode=True, default_flow_style=False, indent=4 + ) + + logging.info("Generate YAML testcase successfully: {}".format(yaml_file)) + + +def dump_json(testcase, json_file): + """ dump HAR entries to json testcase + """ + logging.info("dump testcase to JSON format.") + + with io.open(json_file, "w", encoding="utf-8") as outfile: + my_json_str = json.dumps(testcase, ensure_ascii=False, indent=4) + if isinstance(my_json_str, bytes): + my_json_str = my_json_str.decode("utf-8") + + outfile.write(my_json_str) + + logging.info("Generate JSON testcase successfully: {}".format(json_file)) diff --git a/httprunner/ext/har2case/utils_test.py b/httprunner/ext/har2case/utils_test.py new file mode 100644 index 00000000..38e34dd5 --- /dev/null +++ b/httprunner/ext/har2case/utils_test.py @@ -0,0 +1,54 @@ +import json +import os +import unittest + +from httprunner.ext.har2case import utils + + +class TestUtils(unittest.TestCase): + @staticmethod + def create_har_file(file_name, content): + file_path = os.path.join( + os.path.dirname(__file__), "data", "{}.har".format(file_name) + ) + with open(file_path, "w") as f: + f.write(json.dumps(content)) + + return file_path + + def test_load_har_log_entries(self): + har_path = os.path.join(os.path.dirname(__file__), "data", "demo.har") + log_entries = utils.load_har_log_entries(har_path) + self.assertIsInstance(log_entries, list) + self.assertIn("request", log_entries[0]) + self.assertIn("response", log_entries[0]) + + def test_load_har_log_key_error(self): + empty_json_file_path = TestUtils.create_har_file( + file_name="empty_json", content={} + ) + with self.assertRaises(SystemExit): + utils.load_har_log_entries(empty_json_file_path) + os.remove(empty_json_file_path) + + def test_load_har_log_empty_error(self): + empty_file_path = TestUtils.create_har_file(file_name="empty", content="") + with self.assertRaises(SystemExit): + utils.load_har_log_entries(empty_file_path) + os.remove(empty_file_path) + + # def test_x_www_form_urlencoded(self): + # origin_dict = {"a":1, "b": "2"} + # self.assertIn("a=1", utils.x_www_form_urlencoded(origin_dict)) + # self.assertIn("b=2", utils.x_www_form_urlencoded(origin_dict)) + + def test_convert_list_to_dict(self): + origin_list = [{"name": "v", "value": "1"}, {"name": "w", "value": "2"}] + self.assertEqual(utils.convert_list_to_dict(origin_list), {"v": "1", "w": "2"}) + + def test_convert_x_www_form_urlencoded_to_dict(self): + origin_str = "a=1&b=2" + converted_dict = utils.convert_x_www_form_urlencoded_to_dict(origin_str) + self.assertIsInstance(converted_dict, dict) + self.assertEqual(converted_dict["a"], "1") + self.assertEqual(converted_dict["b"], "2") diff --git a/httprunner/ext/locusts/README.md b/httprunner/ext/locusts/README.md deleted file mode 100644 index 6b52620e..00000000 --- a/httprunner/ext/locusts/README.md +++ /dev/null @@ -1,104 +0,0 @@ -# locusts - -## Installation - -```shell script -$ pip install locustio -``` - -## Usage - -```shell script -$ locusts -f xxx.yml -``` - -```shell script -$ locusts -f xxx.yml --processes -``` - -```shell script -$ python3 -m httprunner.ext.locusts -h - -Usage: locust [options] [LocustClass [LocustClass2 ... ]] - -Options: - -h, --help show this help message and exit - -H HOST, --host=HOST Host to load test in the following format: - http://10.21.32.33 - --web-host=WEB_HOST Host to bind the web interface to. Defaults to '' (all - interfaces) - -P PORT, --port=PORT, --web-port=PORT - Port on which to run web host - -f LOCUSTFILE, --locustfile=LOCUSTFILE - Python module file to import, e.g. '../other.py'. - Default: locustfile - --csv=CSVFILEBASE, --csv-base-name=CSVFILEBASE - Store current request stats to files in CSV format. - --master Set locust to run in distributed mode with this - process as master - --slave Set locust to run in distributed mode with this - process as slave - --master-host=MASTER_HOST - Host or IP address of locust master for distributed - load testing. Only used when running with --slave. - Defaults to 127.0.0.1. - --master-port=MASTER_PORT - The port to connect to that is used by the locust - master for distributed load testing. Only used when - running with --slave. Defaults to 5557. Note that - slaves will also connect to the master node on this - port + 1. - --master-bind-host=MASTER_BIND_HOST - Interfaces (hostname, ip) that locust master should - bind to. Only used when running with --master. - Defaults to * (all available interfaces). - --master-bind-port=MASTER_BIND_PORT - Port that locust master should bind to. Only used when - running with --master. Defaults to 5557. Note that - Locust will also use this port + 1, so by default the - master node will bind to 5557 and 5558. - --heartbeat-liveness=HEARTBEAT_LIVENESS - set number of seconds before failed heartbeat from - slave - --heartbeat-interval=HEARTBEAT_INTERVAL - set number of seconds delay between slave heartbeats - to master - --expect-slaves=EXPECT_SLAVES - How many slaves master should expect to connect before - starting the test (only when --no-web used). - --no-web Disable the web interface, and instead start running - the test immediately. Requires -c and -r to be - specified. - -c NUM_CLIENTS, --clients=NUM_CLIENTS - Number of concurrent Locust users. Only used together - with --no-web - -r HATCH_RATE, --hatch-rate=HATCH_RATE - The rate per second in which clients are spawned. Only - used together with --no-web - -t RUN_TIME, --run-time=RUN_TIME - Stop after the specified amount of time, e.g. (300s, - 20m, 3h, 1h30m, etc.). Only used together with --no- - web - -L LOGLEVEL, --loglevel=LOGLEVEL - Choose between DEBUG/INFO/WARNING/ERROR/CRITICAL. - Default is INFO. - --logfile=LOGFILE Path to log file. If not set, log will go to - stdout/stderr - --print-stats Print stats in the console - --only-summary Only print the summary stats - --no-reset-stats [DEPRECATED] Do not reset statistics once hatching has - been completed. This is now the default behavior. See - --reset-stats to disable - --reset-stats Reset statistics once hatching has been completed. - Should be set on both master and slaves when running - in distributed mode - -l, --list Show list of possible locust classes and exit - --show-task-ratio print table of the locust classes' task execution - ratio - --show-task-ratio-json - print json data of the locust classes' task execution - ratio - -V, --version show program's version number and exit - --exit-code-on-error=EXIT_CODE_ON_ERROR - sets the exit code to post on error -``` diff --git a/httprunner/ext/locusts/__main__.py b/httprunner/ext/locusts/__main__.py deleted file mode 100644 index bc8d706f..00000000 --- a/httprunner/ext/locusts/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -from httprunner.ext.locusts.cli import main - -if __name__ == "__main__": - main() diff --git a/httprunner/ext/locusts/cli.py b/httprunner/ext/locusts/cli.py deleted file mode 100644 index 22e80c7c..00000000 --- a/httprunner/ext/locusts/cli.py +++ /dev/null @@ -1,174 +0,0 @@ -try: - # monkey patch ssl at beginning to avoid RecursionError when running locust. - from gevent import monkey - monkey.patch_ssl() - from locust import main as locust_main -except ImportError: - msg = """ -Locust is not installed, install first and try again. -install with pip: -$ pip install locustio -""" - print(msg) - import sys - sys.exit(0) - -import io -import multiprocessing -import os -import sys - -from loguru import logger - -from httprunner import __version__ - - -def parse_locustfile(file_path): - """ parse testcase file and return locustfile path. - if file_path is a Python file, assume it is a locustfile - if file_path is a YAML/JSON file, convert it to locustfile - """ - if not os.path.isfile(file_path): - logger.error("file path invalid, exit.") - sys.exit(1) - - file_suffix = os.path.splitext(file_path)[1] - if file_suffix == ".py": - locustfile_path = file_path - elif file_suffix in ['.yaml', '.yml', '.json']: - locustfile_path = gen_locustfile(file_path) - else: - # '' or other suffix - logger.error("file type should be YAML/JSON/Python, exit.") - sys.exit(1) - - return locustfile_path - - -def gen_locustfile(testcase_file_path): - """ generate locustfile from template. - """ - locustfile_path = 'locustfile.py' - template_path = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - "locustfile_template.py" - ) - - with io.open(template_path, encoding='utf-8') as template: - with io.open(locustfile_path, 'w', encoding='utf-8') as locustfile: - template_content = template.read() - template_content = template_content.replace("$TESTCASE_FILE", testcase_file_path) - locustfile.write(template_content) - - return locustfile_path - - -def start_locust_main(): - locust_main.main() - - -def start_master(sys_argv): - sys_argv.append("--master") - sys.argv = sys_argv - start_locust_main() - - -def start_slave(sys_argv): - if "--slave" not in sys_argv: - sys_argv.extend(["--slave"]) - - sys.argv = sys_argv - start_locust_main() - - -def run_locusts_with_processes(sys_argv, processes_count): - processes = [] - manager = multiprocessing.Manager() - - for _ in range(processes_count): - p_slave = multiprocessing.Process(target=start_slave, args=(sys_argv,)) - p_slave.daemon = True - p_slave.start() - processes.append(p_slave) - - try: - if "--slave" in sys_argv: - [process.join() for process in processes] - else: - start_master(sys_argv) - except KeyboardInterrupt: - manager.shutdown() - - -def main(): - """ Performance test with locust: parse command line options and run commands. - """ - print(f"HttpRunner version: {__version__}") - sys.argv[0] = 'locust' - if len(sys.argv) == 1: - sys.argv.extend(["-h"]) - - if sys.argv[1] in ["-h", "--help", "-V", "--version"]: - start_locust_main() - - def get_arg_index(*target_args): - for arg in target_args: - if arg not in sys.argv: - continue - - return sys.argv.index(arg) + 1 - - return None - - # set logging level - loglevel_index = get_arg_index("-L", "--loglevel") - if loglevel_index and loglevel_index < len(sys.argv): - loglevel = sys.argv[loglevel_index] - loglevel = loglevel.upper() - else: - # default - loglevel = "WARNING" - - logger.remove() - logger.add(sys.stdout, level=loglevel) - - # get testcase file path - try: - testcase_index = get_arg_index("-f", "--locustfile") - assert testcase_index and testcase_index < len(sys.argv) - except AssertionError: - print("Testcase file is not specified, exit.") - sys.exit(1) - - testcase_file_path = sys.argv[testcase_index] - sys.argv[testcase_index] = parse_locustfile(testcase_file_path) - - if "--processes" in sys.argv: - """ locusts -f locustfile.py --processes 4 - """ - if "--no-web" in sys.argv: - logger.error("conflict parameter args: --processes & --no-web. \nexit.") - sys.exit(1) - - processes_index = sys.argv.index('--processes') - processes_count_index = processes_index + 1 - if processes_count_index >= len(sys.argv): - """ do not specify processes count explicitly - locusts -f locustfile.py --processes - """ - processes_count = multiprocessing.cpu_count() - logger.warning(f"processes count not specified, use {processes_count} by default.") - else: - try: - """ locusts -f locustfile.py --processes 4 """ - processes_count = int(sys.argv[processes_count_index]) - sys.argv.pop(processes_count_index) - except ValueError: - """ locusts -f locustfile.py --processes -P 8888 """ - processes_count = multiprocessing.cpu_count() - logger.warning(f"processes count not specified, use {processes_count} by default.") - - sys.argv.pop(processes_index) - run_locusts_with_processes(sys.argv, processes_count) - else: - start_locust_main() diff --git a/httprunner/ext/locusts/locustfile_template.py b/httprunner/ext/locusts/locustfile_template.py deleted file mode 100644 index 1ad06eb7..00000000 --- a/httprunner/ext/locusts/locustfile_template.py +++ /dev/null @@ -1,43 +0,0 @@ -import logging -import random - -from locust import HttpLocust, TaskSet, task -from locust.events import request_failure - -from httprunner.exceptions import MyBaseError, MyBaseFailure -from httprunner.ext.locusts.utils import prepare_locust_tests -from httprunner.runner import Runner - -logging.getLogger().setLevel(logging.CRITICAL) -logging.getLogger('locust.main').setLevel(logging.INFO) -logging.getLogger('locust.runners').setLevel(logging.INFO) - - -class WebPageTasks(TaskSet): - def on_start(self): - config = {} - self.test_runner = Runner(config, self.client) - - @task - def test_any(self): - test_dict = random.choice(self.locust.tests) - try: - self.test_runner.run_test(test_dict) - except (AssertionError, MyBaseError, MyBaseFailure) as ex: - request_failure.fire( - request_type=self.test_runner.exception_request_type, - name=self.test_runner.exception_name, - response_time=0, - exception=ex - ) - - -class WebPageUser(HttpLocust): - host = "" - task_set = WebPageTasks - min_wait = 10 - max_wait = 30 - - # file_path is generated on locusts startup - file_path = "$TESTCASE_FILE" - tests = prepare_locust_tests(file_path) diff --git a/httprunner/ext/locusts/utils.py b/httprunner/ext/locusts/utils.py deleted file mode 100644 index e1d5d881..00000000 --- a/httprunner/ext/locusts/utils.py +++ /dev/null @@ -1,29 +0,0 @@ -from httprunner import loader, parser - - -def prepare_locust_tests(path): - """ prepare locust testcases - - Args: - path (str): testcase file path. - - Returns: - list: locust tests data - - [ - testcase1_dict, - testcase2_dict - ] - - """ - tests_mapping = loader.load_cases(path) - testcases = parser.parse_tests(tests_mapping) - - locust_tests = [] - - for testcase in testcases: - testcase_weight = testcase.get("config", {}).pop("weight", 1) - for _ in range(testcase_weight): - locust_tests.append(testcase) - - return locust_tests diff --git a/httprunner/ext/make/__init__.py b/httprunner/ext/make/__init__.py new file mode 100644 index 00000000..bebbc762 --- /dev/null +++ b/httprunner/ext/make/__init__.py @@ -0,0 +1,128 @@ +import os +import subprocess +from typing import Union, Text, List + +import jinja2 +from loguru import logger + +from httprunner import exceptions +from httprunner.exceptions import TestCaseFormatError +from httprunner.loader import load_testcase_file, load_folder_files + +__TMPL__ = """# NOTICE: Generated By HttpRunner. DO'NOT EDIT! +from httprunner import HttpRunner, TConfig, TStep + + +class {{ class_name }}(HttpRunner): + config = TConfig(**{{ config }}) + + teststeps = [ + {% for teststep in teststeps %} + TStep(**{{ teststep }}), + {% endfor %} + ] + +if __name__ == "__main__": + {{ class_name }}().test_start() + +""" + + +def make_testcase(testcase_path: str) -> Union[str, None]: + logger.info(f"start to make testcase: {testcase_path}") + try: + testcase, _ = load_testcase_file(testcase_path) + except TestCaseFormatError: + return None + + template = jinja2.Template(__TMPL__) + + raw_file_name, _ = os.path.splitext(os.path.basename(testcase_path)) + # convert title case, e.g. request_with_variables => RequestWithVariables + name_in_title_case = raw_file_name.title().replace("_", "") + + testcase_dir = os.path.dirname(testcase_path) + testcase_python_path = os.path.join(testcase_dir, f"{raw_file_name}_test.py") + + config = testcase["config"] + config["path"] = testcase_python_path + data = { + "class_name": f"TestCase{name_in_title_case}", + "config": config, + "teststeps": testcase["teststeps"], + } + content = template.render(data) + + with open(testcase_python_path, "w") as f: + f.write(content) + + logger.info(f"generated testcase: {testcase_python_path}") + return testcase_python_path + + +def convert_testcase_path(testcase_path: Text) -> Text: + """convert single YAML/JSON testcase path to python file""" + if os.path.isdir(testcase_path): + # folder does not need to convert + return testcase_path + + file_suffix = os.path.splitext(testcase_path)[1].lower() + if file_suffix == ".json": + return testcase_path.replace(".json", "_test.py") + elif file_suffix == ".yaml": + return testcase_path.replace(".yaml", "_test.py") + elif file_suffix == ".yml": + return testcase_path.replace(".yml", "_test.py") + else: + raise exceptions.ParamsError("") + + +def format_with_black(tests_path: Text): + logger.info("format testcases with black ...") + tests_path = convert_testcase_path(tests_path) + try: + subprocess.run(["black", tests_path]) + except subprocess.CalledProcessError as ex: + logger.error(ex) + + +def make(tests_path: Text) -> List: + testcases = [] + if os.path.isdir(tests_path): + files_list = load_folder_files(tests_path) + testcases.extend(files_list) + elif os.path.isfile(tests_path): + testcases.append(tests_path) + else: + raise exceptions.TestcaseNotFound(f"Invalid tests path: {tests_path}") + + testcase_path_list = [] + for testcase_path in testcases: + testcase_path = make_testcase(testcase_path) + if not testcase_path: + continue + testcase_path_list.append(testcase_path) + + format_with_black(tests_path) + return testcase_path_list + + +def main_make(tests_paths: List[Text]) -> List: + testcase_path_list = [] + for tests_path in tests_paths: + testcase_path_list.extend(make(tests_path)) + + return testcase_path_list + + +def init_make_parser(subparsers): + """ make testcases: parse command line options and run commands. + """ + parser = subparsers.add_parser( + "make", help="Convert YAML/JSON testcases to Python unittests.", + ) + parser.add_argument( + "testcase_path", nargs="*", help="Specify YAML/JSON testcase file/folder path" + ) + + return parser diff --git a/httprunner/ext/make/make_test.py b/httprunner/ext/make/make_test.py new file mode 100644 index 00000000..14458bf0 --- /dev/null +++ b/httprunner/ext/make/make_test.py @@ -0,0 +1,20 @@ +import unittest +from httprunner.ext.make import make_testcase, main_make + + +class TestLoader(unittest.TestCase): + def test_make_testcase(self): + path = "examples/postman_echo/request_methods/request_with_variables.yml" + testcase_python_path = make_testcase(path) + self.assertEqual( + testcase_python_path, + "examples/postman_echo/request_methods/request_with_variables_test.py", + ) + + def test_make_testcase_folder(self): + path = ["examples/postman_echo/request_methods/"] + testcase_python_list = main_make(path) + self.assertIn( + "examples/postman_echo/request_methods/request_with_functions_test.py", + testcase_python_list, + ) diff --git a/httprunner/ext/scaffold/__init__.py b/httprunner/ext/scaffold/__init__.py new file mode 100644 index 00000000..5204156e --- /dev/null +++ b/httprunner/ext/scaffold/__init__.py @@ -0,0 +1,132 @@ +import os.path +import sys + +from loguru import logger + + +def init_parser_scaffold(subparsers): + sub_parser_scaffold = subparsers.add_parser( + "startproject", help="Create a new project with template structure." + ) + sub_parser_scaffold.add_argument( + "project_name", type=str, nargs="?", help="Specify new project name." + ) + return sub_parser_scaffold + + +def create_scaffold(project_name): + """ create scaffold with specified project name. + """ + if os.path.isdir(project_name): + logger.warning( + f"Folder {project_name} exists, please specify a new folder name." + ) + return + + logger.info(f"Start to create new project: {project_name}") + logger.info(f"CWD: {os.getcwd()}") + + def create_folder(path): + os.makedirs(path) + msg = f"created folder: {path}" + logger.info(msg) + + def create_file(path, file_content=""): + with open(path, "w") as f: + f.write(file_content) + msg = f"created file: {path}" + logger.info(msg) + + demo_api_content = """ +name: demo api +variables: + var1: value1 + var2: value2 +request: + url: /api/path/$var1 + method: POST + headers: + Content-Type: "application/json" + json: + key: $var2 +validate: + - eq: ["status_code", 200] +""" + demo_testcase_content = """ +config: + name: "demo testcase" + variables: + device_sn: "ABC" + username: ${ENV(USERNAME)} + password: ${ENV(PASSWORD)} + base_url: "http://127.0.0.1:5000" + +teststeps: +- + name: demo step 1 + api: path/to/api1.yml + variables: + user_agent: 'iOS/10.3' + device_sn: $device_sn + extract: + token: content.token + validate: + - eq: ["status_code", 200] +- + name: demo step 2 + api: path/to/api2.yml + variables: + token: $token +""" + demo_testsuite_content = """ +config: + name: "demo testsuite" + variables: + device_sn: "XYZ" + base_url: "http://127.0.0.1:5000" + +testcases: +- + name: call demo_testcase with data 1 + testcase: path/to/demo_testcase.yml + variables: + device_sn: $device_sn +- + name: call demo_testcase with data 2 + testcase: path/to/demo_testcase.yml + variables: + device_sn: $device_sn +""" + ignore_content = "\n".join( + [".env", "reports/*", "__pycache__/*", "*.pyc", ".python-version", "logs/*"] + ) + demo_debugtalk_content = """ +import time + +def sleep(n_secs): + time.sleep(n_secs) +""" + demo_env_content = "\n".join(["USERNAME=leolee", "PASSWORD=123456"]) + + create_folder(project_name) + create_folder(os.path.join(project_name, "api")) + create_folder(os.path.join(project_name, "testcases")) + create_folder(os.path.join(project_name, "testsuites")) + create_folder(os.path.join(project_name, "reports")) + create_file(os.path.join(project_name, "api", "demo_api.yml"), demo_api_content) + create_file( + os.path.join(project_name, "testcases", "demo_testcase.yml"), + demo_testcase_content, + ) + create_file( + os.path.join(project_name, "testsuites", "demo_testsuite.yml"), + demo_testsuite_content, + ) + create_file(os.path.join(project_name, "debugtalk.py"), demo_debugtalk_content) + create_file(os.path.join(project_name, ".env"), demo_env_content) + create_file(os.path.join(project_name, ".gitignore"), ignore_content) + + +def main_scaffold(args): + create_scaffold(args.project_name) + sys.exit(0) diff --git a/httprunner/ext/scaffold/scaffold_test.py b/httprunner/ext/scaffold/scaffold_test.py new file mode 100644 index 00000000..9c559ae1 --- /dev/null +++ b/httprunner/ext/scaffold/scaffold_test.py @@ -0,0 +1,18 @@ +import os +import shutil +import unittest + +from httprunner.ext.scaffold import create_scaffold + + +class TestUtils(unittest.TestCase): + def test_create_scaffold(self): + project_name = "projectABC" + create_scaffold(project_name) + self.assertTrue(os.path.isdir(os.path.join(project_name, "api"))) + self.assertTrue(os.path.isdir(os.path.join(project_name, "testcases"))) + self.assertTrue(os.path.isdir(os.path.join(project_name, "testsuites"))) + self.assertTrue(os.path.isdir(os.path.join(project_name, "reports"))) + self.assertTrue(os.path.isfile(os.path.join(project_name, "debugtalk.py"))) + self.assertTrue(os.path.isfile(os.path.join(project_name, ".env"))) + shutil.rmtree(project_name) diff --git a/httprunner/ext/uploader/__init__.py b/httprunner/ext/uploader/__init__.py index 284c10c3..2f989953 100644 --- a/httprunner/ext/uploader/__init__.py +++ b/httprunner/ext/uploader/__init__.py @@ -44,6 +44,10 @@ For compatibility, you can also write upload test script in old way: import os import sys +from typing import Text, NoReturn + +from httprunner.parser import parse_variables_mapping +from httprunner.schema import TStep, FunctionsMapping try: import filetype @@ -57,16 +61,13 @@ $ pip install requests_toolbelt filetype print(msg) sys.exit(0) -from httprunner.exceptions import ParamsError - -def prepare_upload_test(test_dict): +def prepare_upload_step(step: TStep, functions: FunctionsMapping) -> "NoReturn": """ preprocess for upload test replace `upload` info with MultipartEncoder Args: - test_dict (dict): - + step: teststep { "variables": {}, "request": { @@ -81,27 +82,29 @@ def prepare_upload_test(test_dict): } } } + functions: functions mapping """ - upload_json = test_dict["request"].pop("upload", {}) - if not upload_json: - raise ParamsError(f"invalid upload info: {upload_json}") + if not step.request.upload: + return params_list = [] - for key, value in upload_json.items(): - test_dict["variables"][key] = value + for key, value in step.request.upload.items(): + step.variables[key] = value params_list.append(f"{key}=${key}") params_str = ", ".join(params_list) - test_dict["variables"]["m_encoder"] = "${multipart_encoder(" + params_str + ")}" + step.variables["m_encoder"] = "${multipart_encoder(" + params_str + ")}" - test_dict["request"].setdefault("headers", {}) - test_dict["request"]["headers"]["Content-Type"] = "${multipart_content_type($m_encoder)}" + # parse variables + step.variables = parse_variables_mapping(step.variables, functions) - test_dict["request"]["data"] = "$m_encoder" + step.request.headers["Content-Type"] = "${multipart_content_type($m_encoder)}" + + step.request.data = "$m_encoder" -def multipart_encoder(**kwargs): +def multipart_encoder(**kwargs) -> MultipartEncoder: """ initialize MultipartEncoder with uploading fields. """ @@ -121,8 +124,9 @@ def multipart_encoder(**kwargs): is_exists_file = os.path.isfile(value) else: # value is not absolute file path, check if it is relative file path - from httprunner.loader import get_pwd - _file_path = os.path.join(get_pwd(), value) + from httprunner.loader import project_working_directory + + _file_path = os.path.join(project_working_directory, value) is_exists_file = os.path.isfile(_file_path) if is_exists_file: @@ -130,7 +134,7 @@ def multipart_encoder(**kwargs): filename = os.path.basename(_file_path) mime_type = get_filetype(_file_path) # TODO: fix ResourceWarning for unclosed file - file_handler = open(_file_path, 'rb') + file_handler = open(_file_path, "rb") fields_dict[key] = (filename, file_handler, mime_type) else: fields_dict[key] = value @@ -138,7 +142,7 @@ def multipart_encoder(**kwargs): return MultipartEncoder(fields=fields_dict) -def multipart_content_type(m_encoder): +def multipart_content_type(m_encoder: MultipartEncoder) -> Text: """ prepare Content-Type for request headers """ return m_encoder.content_type diff --git a/httprunner/loader.py b/httprunner/loader.py new file mode 100644 index 00000000..96de4c8f --- /dev/null +++ b/httprunner/loader.py @@ -0,0 +1,411 @@ +import csv +import importlib +import io +import json +import os +import sys +import types +from typing import Tuple, Dict, Union, Text, List, Callable + +import yaml +from loguru import logger +from pydantic import ValidationError + +from httprunner import builtin, utils +from httprunner import exceptions +from httprunner.schema import TestCase, ProjectMeta + +try: + # PyYAML version >= 5.1 + # ref: https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation + yaml.warnings({"YAMLLoadWarning": False}) +except AttributeError: + pass + + +project_meta_cached_mapping: Dict[Text, ProjectMeta] = {} +project_working_directory: Union[Text, None] = None + + +def _load_yaml_file(yaml_file: Text) -> Dict: + """ load yaml file and check file content format + """ + with io.open(yaml_file, "r", encoding="utf-8") as stream: + try: + yaml_content = yaml.load(stream) + except yaml.YAMLError as ex: + logger.error(str(ex)) + raise exceptions.FileFormatError + + return yaml_content + + +def _load_json_file(json_file: Text) -> Dict: + """ load json file and check file content format + """ + with io.open(json_file, encoding="utf-8") as data_file: + try: + json_content = json.load(data_file) + except json.JSONDecodeError: + err_msg = f"JSONDecodeError: JSON file format error: {json_file}" + logger.error(err_msg) + raise exceptions.FileFormatError(err_msg) + + return json_content + + +def load_testcase_file(testcase_file: Text) -> Tuple[Dict, TestCase]: + """load testcase file and validate with pydantic model""" + if not os.path.isfile(testcase_file): + raise exceptions.FileNotFound(f"testcase file not exists: {testcase_file}") + + file_suffix = os.path.splitext(testcase_file)[1].lower() + if file_suffix == ".json": + testcase_content = _load_json_file(testcase_file) + elif file_suffix in [".yaml", ".yml"]: + testcase_content = _load_yaml_file(testcase_file) + else: + # '' or other suffix + raise exceptions.FileFormatError( + f"testcase file should be YAML/JSON format, invalid testcase file: {testcase_file}" + ) + + try: + # validate with pydantic TestCase model + testcase_obj = TestCase.parse_obj(testcase_content) + except ValidationError as ex: + err_msg = f"Invalid testcase format: {testcase_file}" + logger.error(f"{err_msg}\n{ex}") + raise exceptions.TestCaseFormatError(err_msg) + + testcase_content["config"]["path"] = testcase_file + testcase_obj.config.path = testcase_file + + return testcase_content, testcase_obj + + +def load_dot_env_file(dot_env_path: Text) -> Dict: + """ load .env file. + + Args: + dot_env_path (str): .env file path + + Returns: + dict: environment variables mapping + + { + "UserName": "debugtalk", + "Password": "123456", + "PROJECT_KEY": "ABCDEFGH" + } + + Raises: + exceptions.FileFormatError: If .env file format is invalid. + + """ + if not os.path.isfile(dot_env_path): + return {} + + logger.info(f"Loading environment variables from {dot_env_path}") + env_variables_mapping = {} + + with io.open(dot_env_path, "r", encoding="utf-8") as fp: + for line in fp: + # maxsplit=1 + if "=" in line: + variable, value = line.split("=", 1) + elif ":" in line: + variable, value = line.split(":", 1) + else: + raise exceptions.FileFormatError(".env format error") + + env_variables_mapping[variable.strip()] = value.strip() + + utils.set_os_environ(env_variables_mapping) + return env_variables_mapping + + +def load_csv_file(csv_file: Text) -> List[Dict]: + """ load csv file and check file content format + + Args: + csv_file (str): csv file path, csv file content is like below: + + Returns: + list: list of parameters, each parameter is in dict format + + Examples: + >>> cat csv_file + username,password + test1,111111 + test2,222222 + test3,333333 + + >>> load_csv_file(csv_file) + [ + {'username': 'test1', 'password': '111111'}, + {'username': 'test2', 'password': '222222'}, + {'username': 'test3', 'password': '333333'} + ] + + """ + if not os.path.isabs(csv_file): + global project_working_directory + if project_working_directory is None: + raise exceptions.MyBaseFailure("load_project_meta() has not been called!") + + # make compatible with Windows/Linux + csv_file = os.path.join(project_working_directory, *csv_file.split("/")) + + if not os.path.isfile(csv_file): + # file path not exist + raise exceptions.CSVNotFound(csv_file) + + csv_content_list = [] + + with io.open(csv_file, encoding="utf-8") as csvfile: + reader = csv.DictReader(csvfile) + for row in reader: + csv_content_list.append(row) + + return csv_content_list + + +def load_folder_files(folder_path: Text, recursive: bool = True) -> List: + """ load folder path, return all files endswith yml/yaml/json in list. + + Args: + folder_path (str): specified folder path to load + recursive (bool): load files recursively if True + + Returns: + list: files endswith yml/yaml/json + """ + if isinstance(folder_path, (list, set)): + files = [] + for path in set(folder_path): + files.extend(load_folder_files(path, recursive)) + + return files + + if not os.path.exists(folder_path): + return [] + + file_list = [] + + for dirpath, dirnames, filenames in os.walk(folder_path): + filenames_list = [] + + for filename in filenames: + if not filename.endswith((".yml", ".yaml", ".json")): + continue + + filenames_list.append(filename) + + for filename in filenames_list: + file_path = os.path.join(dirpath, filename) + file_list.append(file_path) + + if not recursive: + break + + return file_list + + +def load_module_functions(module) -> Dict[Text, Callable]: + """ load python module functions. + + Args: + module: python module + + Returns: + dict: functions mapping for specified python module + + { + "func1_name": func1, + "func2_name": func2 + } + + """ + module_functions = {} + + for name, item in vars(module).items(): + if isinstance(item, types.FunctionType): + module_functions[name] = item + + return module_functions + + +def load_builtin_functions() -> Dict[Text, Callable]: + """ load builtin module functions + """ + return load_module_functions(builtin) + + +def locate_file(start_path: Text, file_name: Text) -> Text: + """ locate filename and return absolute file path. + searching will be recursive upward until current working directory or system root dir. + + Args: + file_name (str): target locate file name + start_path (str): start locating path, maybe file path or directory path + + Returns: + str: located file path. None if file not found. + + Raises: + exceptions.FileNotFound: If failed to locate file. + + """ + if os.path.isfile(start_path): + start_dir_path = os.path.dirname(start_path) + elif os.path.isdir(start_path): + start_dir_path = start_path + else: + raise exceptions.FileNotFound(f"invalid path: {start_path}") + + file_path = os.path.join(start_dir_path, file_name) + if os.path.isfile(file_path): + return os.path.abspath(file_path) + + # current working directory + if os.path.abspath(start_dir_path) == os.getcwd(): + raise exceptions.FileNotFound(f"{file_name} not found in {start_path}") + + # system root dir + # Windows, e.g. 'E:\\' + # Linux/Darwin, '/' + parent_dir = os.path.dirname(start_dir_path) + if parent_dir == start_dir_path: + raise exceptions.FileNotFound(f"{file_name} not found in {start_path}") + + # locate recursive upward + return locate_file(parent_dir, file_name) + + +def locate_debugtalk_py(start_path: Text) -> Text: + """ locate debugtalk.py file + + Args: + start_path (str): start locating path, + maybe testcase file path or directory path + + Returns: + str: debugtalk.py file path, None if not found + + """ + try: + # locate debugtalk.py file. + debugtalk_path = locate_file(start_path, "debugtalk.py") + except exceptions.FileNotFound: + debugtalk_path = None + + return debugtalk_path + + +def init_project_working_directory(test_path: Text) -> Tuple[Text, Text]: + """ this should be called at startup + + run test file: + run_path -> load_cases -> load_project_data -> init_project_working_directory + or run passed in data structure: + run -> init_project_working_directory + + Args: + test_path: specified testfile path + + Returns: + (str, str): debugtalk.py path, project_working_directory + + """ + + def prepare_path(path): + if not os.path.exists(path): + err_msg = f"path not exist: {path}" + logger.error(err_msg) + raise exceptions.FileNotFound(err_msg) + + if not os.path.isabs(path): + path = os.path.join(os.getcwd(), path) + + return path + + test_path = prepare_path(test_path) + + # locate debugtalk.py file + debugtalk_path = locate_debugtalk_py(test_path) + + global project_working_directory + if debugtalk_path: + # The folder contains debugtalk.py will be treated as PWD. + project_working_directory = os.path.dirname(debugtalk_path) + else: + # debugtalk.py not found, use os.getcwd() as PWD. + project_working_directory = os.getcwd() + + # add PWD to sys.path + sys.path.insert(0, project_working_directory) + + return debugtalk_path, project_working_directory + + +def load_debugtalk_functions() -> Dict[Text, Callable]: + """ load project debugtalk.py module functions + debugtalk.py should be located in project working directory. + + Returns: + dict: debugtalk module functions mapping + { + "func1_name": func1, + "func2_name": func2 + } + + """ + # load debugtalk.py module + imported_module = importlib.import_module("debugtalk") + return load_module_functions(imported_module) + + +def load_project_meta(test_path: Text) -> ProjectMeta: + """ load api, testcases, .env, debugtalk.py functions. + api/testcases folder is relative to project_working_directory + + Args: + test_path (str): test file/folder path, locate pwd from this path. + + Returns: + project loaded api/testcases definitions, + environments and debugtalk.py functions. + + """ + if test_path in project_meta_cached_mapping: + return project_meta_cached_mapping[test_path] + + debugtalk_path, project_working_directory = init_project_working_directory( + test_path + ) + + project_meta = ProjectMeta() + + # load .env file + # NOTICE: + # environment variable maybe loaded in debugtalk.py + # thus .env file should be loaded before loading debugtalk.py + dot_env_path = os.path.join(project_working_directory, ".env") + project_meta.env = load_dot_env_file(dot_env_path) + + if debugtalk_path: + # load debugtalk.py functions + debugtalk_functions = load_debugtalk_functions() + else: + debugtalk_functions = {} + + # locate PWD and load debugtalk.py functions + project_meta.PWD = project_working_directory + project_meta.functions = debugtalk_functions + project_meta.test_path = os.path.abspath(test_path)[ + len(project_working_directory) + 1 : + ] + + project_meta_cached_mapping[test_path] = project_meta + return project_meta diff --git a/httprunner/loader/__init__.py b/httprunner/loader/__init__.py deleted file mode 100644 index 022cf410..00000000 --- a/httprunner/loader/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -HttpRunner loader - -- check: validate api/testcase/testsuite data structure with JSON schema -- locate: locate debugtalk.py, make it's dir as project root path -- load: load testcase files and relevant data, including debugtalk.py, .env, yaml/json api/testcases, csv, etc. -- buildup: assemble loaded content to httprunner testcase/testsuite data structure - -""" - -from httprunner.loader.check import is_test_path, is_test_content, JsonSchemaChecker -from httprunner.loader.locate import get_project_working_directory as get_pwd, \ - init_project_working_directory as init_pwd -from httprunner.loader.load import load_csv_file, load_builtin_functions -from httprunner.loader.buildup import load_cases, load_project_data - -__all__ = [ - "is_test_path", - "is_test_content", - "JsonSchemaChecker", - "get_pwd", - "init_pwd", - "load_csv_file", - "load_builtin_functions", - "load_project_data", - "load_cases" -] diff --git a/httprunner/loader/buildup.py b/httprunner/loader/buildup.py deleted file mode 100644 index 12f2bc7f..00000000 --- a/httprunner/loader/buildup.py +++ /dev/null @@ -1,511 +0,0 @@ -import importlib -import os - -from loguru import logger - -from httprunner import exceptions, utils -from httprunner.loader.check import JsonSchemaChecker -from httprunner.loader.load import load_module_functions, load_file, load_dot_env_file, \ - load_folder_files -from httprunner.loader.locate import init_project_working_directory, get_project_working_directory - -tests_def_mapping = { - "api": {}, - "testcases": {} -} - - -def load_debugtalk_functions(): - """ load project debugtalk.py module functions - debugtalk.py should be located in project working directory. - - Returns: - dict: debugtalk module functions mapping - { - "func1_name": func1, - "func2_name": func2 - } - - """ - # load debugtalk.py module - imported_module = importlib.import_module("debugtalk") - return load_module_functions(imported_module) - - -def __extend_with_api_ref(raw_testinfo): - """ extend with api reference - - Raises: - exceptions.ApiNotFound: api not found - - """ - api_name = raw_testinfo["api"] - - # api maybe defined in two types: - # 1, individual file: each file is corresponding to one api definition - # 2, api sets file: one file contains a list of api definitions - if not os.path.isabs(api_name): - # make compatible with Windows/Linux - pwd = get_project_working_directory() - api_path = os.path.join(pwd, *api_name.split("/")) - if os.path.isfile(api_path): - # type 1: api is defined in individual file - api_name = api_path - - if api_name in tests_def_mapping["api"]: - block = tests_def_mapping["api"][api_name] - elif not os.path.isfile(api_name): - raise exceptions.ApiNotFound(f"{api_name} not found!") - else: - block = load_file(api_name) - - # NOTICE: avoid project_mapping been changed during iteration. - raw_testinfo["api_def"] = utils.deepcopy_dict(block) - tests_def_mapping["api"][api_name] = block - - -def __extend_with_testcase_ref(raw_testinfo): - """ extend with testcase reference - """ - testcase_path = raw_testinfo["testcase"] - - if testcase_path not in tests_def_mapping["testcases"]: - # make compatible with Windows/Linux - pwd = get_project_working_directory() - testcase_path = os.path.join( - pwd, - *testcase_path.split("/") - ) - loaded_testcase = load_file(testcase_path) - - if isinstance(loaded_testcase, list): - # make compatible with version < 2.2.0 - testcase_dict = load_testcase(loaded_testcase) - elif isinstance(loaded_testcase, dict) and "teststeps" in loaded_testcase: - # format version 2, implemented in 2.2.0 - testcase_dict = load_testcase_v2(loaded_testcase) - else: - raise exceptions.FileFormatError( - f"Invalid format testcase: {testcase_path}") - - tests_def_mapping["testcases"][testcase_path] = testcase_dict - else: - testcase_dict = tests_def_mapping["testcases"][testcase_path] - - raw_testinfo["testcase_def"] = testcase_dict - - -def load_teststep(raw_testinfo): - """ load testcase step content. - teststep maybe defined directly, or reference api/testcase. - - Args: - raw_testinfo (dict): test data, maybe in 3 formats. - # api reference - { - "name": "add product to cart", - "api": "/path/to/api", - "variables": {}, - "validate": [], - "extract": {} - } - # testcase reference - { - "name": "add product to cart", - "testcase": "/path/to/testcase", - "variables": {} - } - # define directly - { - "name": "checkout cart", - "request": {}, - "variables": {}, - "validate": [], - "extract": {} - } - - Returns: - dict: loaded teststep content - - """ - # reference api - if "api" in raw_testinfo: - __extend_with_api_ref(raw_testinfo) - - # TODO: reference proc functions - # elif "func" in raw_testinfo: - # pass - - # reference testcase - elif "testcase" in raw_testinfo: - __extend_with_testcase_ref(raw_testinfo) - - # define directly - else: - pass - - return raw_testinfo - - -def load_testcase(raw_testcase): - """ load testcase with api/testcase references. - - Args: - raw_testcase (list): raw testcase content loaded from JSON/YAML file: - [ - # config part - { - "config": { - "name": "XXXX", - "base_url": "https://debugtalk.com" - } - }, - # teststeps part - { - "test": {...} - }, - { - "test": {...} - } - ] - - Returns: - dict: loaded testcase content - { - "config": {}, - "teststeps": [test11, test12] - } - - """ - JsonSchemaChecker.validate_testcase_v1_format(raw_testcase) - config = {} - tests = [] - - for item in raw_testcase: - key, test_block = item.popitem() - if key == "config": - config.update(test_block) - elif key == "test": - tests.append(load_teststep(test_block)) - else: - logger.warning( - f"unexpected block key: {key}. block key should only be 'config' or 'test'." - ) - - return { - "config": config, - "teststeps": tests - } - - -def load_testcase_v2(raw_testcase): - """ load testcase in format version 2. - - Args: - raw_testcase (dict): raw testcase content loaded from JSON/YAML file: - { - "config": { - "name": "xxx", - "variables": {} - } - "teststeps": [ - { - "name": "teststep 1", - "request" {...} - }, - { - "name": "teststep 2", - "request" {...} - }, - ] - } - - Returns: - dict: loaded testcase content - { - "config": {}, - "teststeps": [test11, test12] - } - - """ - JsonSchemaChecker.validate_testcase_v2_format(raw_testcase) - raw_teststeps = raw_testcase.pop("teststeps") - raw_testcase["teststeps"] = [ - load_teststep(teststep) - for teststep in raw_teststeps - ] - return raw_testcase - - -def load_testsuite(raw_testsuite): - """ load testsuite with testcase references. - support two different formats. - - Args: - raw_testsuite (dict): raw testsuite content loaded from JSON/YAML file: - # version 1, compatible with version < 2.2.0 - { - "config": { - "name": "xxx", - "variables": {} - } - "testcases": { - "testcase1": { - "testcase": "/path/to/testcase", - "variables": {...}, - "parameters": {...} - }, - "testcase2": {} - } - } - - # version 2, implemented in 2.2.0 - { - "config": { - "name": "xxx", - "variables": {} - } - "testcases": [ - { - "name": "testcase1", - "testcase": "/path/to/testcase", - "variables": {...}, - "parameters": {...} - }, - {} - ] - } - - Returns: - dict: loaded testsuite content - { - "config": {}, - "testcases": [testcase1, testcase2] - } - - """ - raw_testcases = raw_testsuite["testcases"] - - if isinstance(raw_testcases, dict): - # format version 1, make compatible with version < 2.2.0 - JsonSchemaChecker.validate_testsuite_v1_format(raw_testsuite) - raw_testsuite["testcases"] = {} - for name, raw_testcase in raw_testcases.items(): - __extend_with_testcase_ref(raw_testcase) - raw_testcase.setdefault("name", name) - raw_testsuite["testcases"][name] = raw_testcase - - elif isinstance(raw_testcases, list): - # format version 2, implemented in 2.2.0 - JsonSchemaChecker.validate_testsuite_v2_format(raw_testsuite) - raw_testsuite["testcases"] = {} - for raw_testcase in raw_testcases: - __extend_with_testcase_ref(raw_testcase) - testcase_name = raw_testcase["name"] - raw_testsuite["testcases"][testcase_name] = raw_testcase - - else: - # invalid format - raise exceptions.FileFormatError("Invalid testsuite format!") - - return raw_testsuite - - -def load_test_file(path): - """ load test file, file maybe testcase/testsuite/api - - Args: - path (str): test file path - - Returns: - dict: loaded test content - - # api - { - "path": path, - "type": "api", - "name": "", - "request": {} - } - - # testcase - { - "path": path, - "type": "testcase", - "config": {}, - "teststeps": [] - } - - # testsuite - { - "path": path, - "type": "testsuite", - "config": {}, - "testcases": {} - } - - """ - raw_content = load_file(path) - - if isinstance(raw_content, dict): - - if "testcases" in raw_content: - # file_type: testsuite - loaded_content = load_testsuite(raw_content) - loaded_content["path"] = path - loaded_content["type"] = "testsuite" - - elif "teststeps" in raw_content: - # file_type: testcase (format version 2) - loaded_content = load_testcase_v2(raw_content) - loaded_content["path"] = path - loaded_content["type"] = "testcase" - - elif "request" in raw_content: - # file_type: api - JsonSchemaChecker.validate_api_format(raw_content) - loaded_content = raw_content - loaded_content["path"] = path - loaded_content["type"] = "api" - - else: - # invalid format - raise exceptions.FileFormatError("Invalid test file format!") - - elif isinstance(raw_content, list) and len(raw_content) > 0: - # file_type: testcase - # make compatible with version < 2.2.0 - loaded_content = load_testcase(raw_content) - loaded_content["path"] = path - loaded_content["type"] = "testcase" - - else: - # invalid format - raise exceptions.FileFormatError("Invalid test file format!") - - return loaded_content - - -def load_project_data(test_path, dot_env_path=None): - """ load api, testcases, .env, debugtalk.py functions. - api/testcases folder is relative to project_working_directory - - Args: - test_path (str): test file/folder path, locate pwd from this path. - dot_env_path (str): specified .env file path - - Returns: - dict: project loaded api/testcases definitions, - environments and debugtalk.py functions. - - """ - debugtalk_path, project_working_directory = init_project_working_directory(test_path) - - project_mapping = {} - - # load .env file - # NOTICE: - # environment variable maybe loaded in debugtalk.py - # thus .env file should be loaded before loading debugtalk.py - dot_env_path = dot_env_path or os.path.join(project_working_directory, ".env") - project_mapping["env"] = load_dot_env_file(dot_env_path) - - if debugtalk_path: - # load debugtalk.py functions - debugtalk_functions = load_debugtalk_functions() - else: - debugtalk_functions = {} - - # locate PWD and load debugtalk.py functions - project_mapping["PWD"] = project_working_directory - project_mapping["functions"] = debugtalk_functions - project_mapping["test_path"] = os.path.abspath(test_path)[len(project_working_directory)+1:] - - return project_mapping - - -def load_cases(path, dot_env_path=None): - """ load testcases from file path, extend and merge with api/testcase definitions. - - Args: - path (str): testcase/testsuite file/foler path. - path could be in 2 types: - - absolute/relative file path - - absolute/relative folder path - dot_env_path (str): specified .env file path - - Returns: - dict: tests mapping, include project_mapping and testcases. - each testcase is corresponding to a file. - { - "project_mapping": { - "PWD": "XXXXX", - "functions": {}, - "env": {} - }, - "testcases": [ - { # testcase data structure - "config": { - "name": "desc1", - "path": "testcase1_path", - "variables": [], # optional - }, - "teststeps": [ - # test data structure - { - 'name': 'test desc1', - 'variables': [], # optional - 'extract': [], # optional - 'validate': [], - 'request': {} - }, - test_dict_2 # another test dict - ] - }, - testcase_2_dict # another testcase dict - ], - "testsuites": [ - { # testsuite data structure - "config": {}, - "testcases": { - "testcase1": {}, - "testcase2": {}, - } - }, - testsuite_2_dict - ] - } - - """ - - tests_mapping = { - "project_mapping": load_project_data(path, dot_env_path) - } - - def __load_file_content(path): - loaded_content = None - try: - loaded_content = load_test_file(path) - except exceptions.ApiNotFound as ex: - logger.warning(f"Invalid api reference in {path}: {ex}") - except exceptions.FileFormatError: - logger.warning(f"Invalid test file format: {path}") - - if not loaded_content: - pass - elif loaded_content["type"] == "testsuite": - tests_mapping.setdefault("testsuites", []).append(loaded_content) - elif loaded_content["type"] == "testcase": - tests_mapping.setdefault("testcases", []).append(loaded_content) - elif loaded_content["type"] == "api": - tests_mapping.setdefault("apis", []).append(loaded_content) - - if os.path.isdir(path): - files_list = load_folder_files(path) - for path in files_list: - __load_file_content(path) - - elif os.path.isfile(path): - __load_file_content(path) - - return tests_mapping diff --git a/httprunner/loader/buildup_test.py b/httprunner/loader/buildup_test.py deleted file mode 100644 index 1d1b78e8..00000000 --- a/httprunner/loader/buildup_test.py +++ /dev/null @@ -1,291 +0,0 @@ - -import os -import unittest - -from httprunner import exceptions, loader -from httprunner.loader import buildup - - -class TestModuleLoader(unittest.TestCase): - - def test_filter_module_functions(self): - module_functions = buildup.load_module_functions(buildup) - self.assertIn("load_module_functions", module_functions) - self.assertNotIn("is_py3", module_functions) - - def test_load_debugtalk_module(self): - project_mapping = buildup.load_project_data(os.path.join(os.getcwd(), "httprunner")) - self.assertNotIn("alter_response", project_mapping["functions"]) - - project_mapping = buildup.load_project_data(os.path.join(os.getcwd(), "tests")) - self.assertIn("alter_response", project_mapping["functions"]) - - is_status_code_200 = project_mapping["functions"]["is_status_code_200"] - self.assertTrue(is_status_code_200(200)) - self.assertFalse(is_status_code_200(500)) - - def test_load_debugtalk_py(self): - project_mapping = buildup.load_project_data("tests/data/demo_testcase.yml") - project_working_directory = project_mapping["PWD"] - debugtalk_functions = project_mapping["functions"] - self.assertEqual( - project_working_directory, - os.path.join(os.getcwd(), "tests") - ) - self.assertIn("gen_md5", debugtalk_functions) - - project_mapping = buildup.load_project_data("tests/base.py") - project_working_directory = project_mapping["PWD"] - debugtalk_functions = project_mapping["functions"] - self.assertEqual( - project_working_directory, - os.path.join(os.getcwd(), "tests") - ) - self.assertIn("gen_md5", debugtalk_functions) - - project_mapping = buildup.load_project_data("httprunner/__init__.py") - project_working_directory = project_mapping["PWD"] - debugtalk_functions = project_mapping["functions"] - self.assertEqual( - project_working_directory, - os.getcwd() - ) - self.assertEqual(debugtalk_functions, {}) - - -class TestSuiteLoader(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.project_mapping = buildup.load_project_data(os.path.join(os.getcwd(), "tests")) - cls.tests_def_mapping = buildup.tests_def_mapping - - def test_load_teststep_api(self): - raw_test = { - "name": "create user (override).", - "api": "api/create_user.yml", - "variables": [ - {"uid": "999"} - ] - } - teststep = buildup.load_teststep(raw_test) - self.assertEqual( - "create user (override).", - teststep["name"] - ) - self.assertIn("api_def", teststep) - api_def = teststep["api_def"] - self.assertEqual(api_def["name"], "create user") - self.assertEqual(api_def["request"]["url"], "/api/users/$uid") - - def test_load_teststep_testcase(self): - raw_test = { - "name": "setup and reset all (override).", - "testcase": "testcases/setup.yml", - "variables": [ - {"device_sn": "$device_sn"} - ] - } - testcase = buildup.load_teststep(raw_test) - self.assertEqual( - "setup and reset all (override).", - testcase["name"] - ) - tests = testcase["testcase_def"]["teststeps"] - self.assertEqual(len(tests), 2) - self.assertEqual(tests[0]["name"], "get token (setup)") - self.assertEqual(tests[1]["name"], "reset all users") - - def test_load_test_file_api(self): - loaded_content = buildup.load_test_file("tests/api/create_user.yml") - self.assertEqual(loaded_content["type"], "api") - self.assertIn("path", loaded_content) - self.assertIn("request", loaded_content) - self.assertEqual(loaded_content["request"]["url"], "/api/users/$uid") - - def test_load_test_file_testcase(self): - for loaded_content in [ - buildup.load_test_file("tests/testcases/setup.yml"), - buildup.load_test_file("tests/testcases/setup.json") - ]: - self.assertEqual(loaded_content["type"], "testcase") - self.assertIn("path", loaded_content) - self.assertIn("config", loaded_content) - self.assertEqual(loaded_content["config"]["name"], "setup and reset all.") - self.assertIn("teststeps", loaded_content) - self.assertEqual(len(loaded_content["teststeps"]), 2) - - def test_load_test_file_testcase_v2(self): - for loaded_content in [ - buildup.load_test_file("tests/testcases/setup.v2.yml"), - buildup.load_test_file("tests/testcases/setup.v2.json") - ]: - self.assertEqual(loaded_content["type"], "testcase") - self.assertIn("path", loaded_content) - self.assertIn("config", loaded_content) - self.assertEqual(loaded_content["config"]["name"], "setup and reset all.") - self.assertIn("teststeps", loaded_content) - self.assertEqual(len(loaded_content["teststeps"]), 2) - - def test_load_test_file_testsuite(self): - for loaded_content in [ - buildup.load_test_file("tests/testsuites/create_users.yml"), - buildup.load_test_file("tests/testsuites/create_users.json") - ]: - self.assertEqual(loaded_content["type"], "testsuite") - - testcases = loaded_content["testcases"] - self.assertEqual(len(testcases), 2) - self.assertIn('create user 1000 and check result.', testcases) - self.assertIn('testcase_def', testcases["create user 1000 and check result."]) - self.assertEqual( - testcases["create user 1000 and check result."]["testcase_def"]["config"]["name"], - "create user and check result." - ) - - def test_load_test_file_testsuite_v2(self): - for loaded_content in [ - buildup.load_test_file("tests/testsuites/create_users.v2.yml"), - buildup.load_test_file("tests/testsuites/create_users.v2.json") - ]: - self.assertEqual(loaded_content["type"], "testsuite") - - testcases = loaded_content["testcases"] - self.assertEqual(len(testcases), 2) - self.assertIn('create user 1000 and check result.', testcases) - self.assertIn('testcase_def', testcases["create user 1000 and check result."]) - self.assertEqual( - testcases["create user 1000 and check result."]["testcase_def"]["config"]["name"], - "create user and check result." - ) - - def test_load_tests_api_file(self): - path = os.path.join( - os.getcwd(), 'tests/api/create_user.yml') - tests_mapping = loader.load_cases(path) - project_mapping = tests_mapping["project_mapping"] - api_list = tests_mapping["apis"] - self.assertEqual(len(api_list), 1) - self.assertEqual(api_list[0]["request"]["url"], "/api/users/$uid") - - def test_load_tests_testcase_file(self): - # absolute file path - path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.json') - tests_mapping = loader.load_cases(path) - project_mapping = tests_mapping["project_mapping"] - testcases_list = tests_mapping["testcases"] - self.assertEqual(len(testcases_list), 1) - self.assertEqual(len(testcases_list[0]["teststeps"]), 3) - self.assertIn("get_sign", project_mapping["functions"]) - - # relative file path - path = 'tests/data/demo_testcase_hardcode.yml' - tests_mapping = loader.load_cases(path) - project_mapping = tests_mapping["project_mapping"] - testcases_list = tests_mapping["testcases"] - self.assertEqual(len(testcases_list), 1) - self.assertEqual(len(testcases_list[0]["teststeps"]), 3) - self.assertIn("get_sign", project_mapping["functions"]) - - def test_load_tests_testcase_file_2(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase.yml') - tests_mapping = loader.load_cases(testcase_file_path) - testcases = tests_mapping["testcases"] - self.assertIsInstance(testcases, list) - self.assertEqual(testcases[0]["config"]["name"], '123t$var_a') - self.assertIn( - "sum_two", - tests_mapping["project_mapping"]["functions"] - ) - self.assertEqual( - testcases[0]["config"]["variables"]["var_c"], - "${sum_two($var_a, $var_b)}" - ) - self.assertEqual( - testcases[0]["config"]["variables"]["PROJECT_KEY"], - "${ENV(PROJECT_KEY)}" - ) - - def test_load_tests_testcase_file_with_api_ref(self): - path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_layer.yml') - tests_mapping = loader.load_cases(path) - project_mapping = tests_mapping["project_mapping"] - testcases_list = tests_mapping["testcases"] - self.assertIn('device_sn', testcases_list[0]["config"]["variables"]) - self.assertIn("gen_md5", project_mapping["functions"]) - self.assertIn("base_url", testcases_list[0]["config"]) - test_dict0 = testcases_list[0]["teststeps"][0] - self.assertEqual( - "get token with $user_agent, $app_version", - test_dict0["name"] - ) - self.assertIn("/api/get-token", test_dict0["api_def"]["request"]["url"]) - self.assertIn( - {'eq': ['status_code', 200]}, - test_dict0["validate"] - ) - - def test_load_tests_testsuite_file_with_testcase_ref(self): - path = os.path.join( - os.getcwd(), 'tests/testsuites/create_users.yml') - tests_mapping = loader.load_cases(path) - project_mapping = tests_mapping["project_mapping"] - testsuites_list = tests_mapping["testsuites"] - - self.assertEqual( - "create users with uid", - testsuites_list[0]["config"]["name"] - ) - self.assertEqual( - '${gen_random_string(15)}', - testsuites_list[0]["config"]["variables"]['device_sn'] - ) - self.assertIn( - "create user 1000 and check result.", - testsuites_list[0]["testcases"] - ) - - self.assertEqual( - testsuites_list[0]["testcases"]["create user 1000 and check result."]["testcase_def"]["config"]["name"], - "create user and check result." - ) - - def test_load_tests_folder_path(self): - # absolute folder path - path = os.path.join(os.getcwd(), 'tests/data') - tests_mapping = loader.load_cases(path) - testcase_list_1 = tests_mapping["testcases"] - self.assertGreater(len(testcase_list_1), 4) - - # relative folder path - path = 'tests/data/' - tests_mapping = loader.load_cases(path) - testcase_list_2 = tests_mapping["testcases"] - self.assertEqual(len(testcase_list_1), len(testcase_list_2)) - - def test_load_tests_path_not_exist(self): - # absolute folder path - path = os.path.join(os.getcwd(), 'tests/data_not_exist') - with self.assertRaises(exceptions.FileNotFound): - loader.load_cases(path) - - # relative folder path - path = 'tests/data_not_exist' - with self.assertRaises(exceptions.FileNotFound): - loader.load_cases(path) - - def test_load_project_tests(self): - buildup.load_project_data(os.path.join(os.getcwd(), "tests")) - self.assertIn("gen_md5", self.project_mapping["functions"]) - self.assertEqual(self.project_mapping["env"]["PROJECT_KEY"], "ABCDEFGH") - self.assertEqual( - os.path.basename(self.project_mapping["PWD"]), - "tests" - ) - self.assertEqual( - os.path.basename(self.project_mapping["test_path"]), - "tests" - ) diff --git a/httprunner/loader/check.py b/httprunner/loader/check.py deleted file mode 100644 index ba248ec7..00000000 --- a/httprunner/loader/check.py +++ /dev/null @@ -1,215 +0,0 @@ -import io -import json -import os -import platform - -import jsonschema -from loguru import logger - -from httprunner import exceptions - -schemas_root_dir = os.path.join(os.path.dirname(__file__), "schemas") -common_schema_path = os.path.join(schemas_root_dir, "common.schema.json") -api_schema_path = os.path.join(schemas_root_dir, "api.schema.json") -testcase_schema_v1_path = os.path.join(schemas_root_dir, "testcase.schema.v1.json") -testcase_schema_v2_path = os.path.join(schemas_root_dir, "testcase.schema.v2.json") -testsuite_schema_v1_path = os.path.join(schemas_root_dir, "testsuite.schema.v1.json") -testsuite_schema_v2_path = os.path.join(schemas_root_dir, "testsuite.schema.v2.json") - -with io.open(api_schema_path, encoding='utf-8') as f: - api_schema = json.load(f) - -with io.open(common_schema_path, encoding='utf-8') as f: - if platform.system() == "Windows": - absolute_base_path = 'file:///' + os.path.abspath(schemas_root_dir).replace("\\", "/") + '/' - else: - # Linux, Darwin - absolute_base_path = "file://" + os.path.abspath(schemas_root_dir) + "/" - - common_schema = json.load(f) - resolver = jsonschema.RefResolver(absolute_base_path, common_schema) - -with io.open(testcase_schema_v1_path, encoding='utf-8') as f: - testcase_schema_v1 = json.load(f) - -with io.open(testcase_schema_v2_path, encoding='utf-8') as f: - testcase_schema_v2 = json.load(f) - -with io.open(testsuite_schema_v1_path, encoding='utf-8') as f: - testsuite_schema_v1 = json.load(f) - -with io.open(testsuite_schema_v2_path, encoding='utf-8') as f: - testsuite_schema_v2 = json.load(f) - - -class JsonSchemaChecker(object): - - @staticmethod - def validate_format(content, scheme): - """ check api/testcase/testsuite format if valid - """ - try: - jsonschema.validate(content, scheme, resolver=resolver) - except jsonschema.exceptions.ValidationError as ex: - logger.error(str(ex)) - raise exceptions.FileFormatError - - return True - - @staticmethod - def validate_api_format(content): - """ check api format if valid - """ - return JsonSchemaChecker.validate_format(content, api_schema) - - @staticmethod - def validate_testcase_v1_format(content): - """ check testcase format v1 if valid - """ - return JsonSchemaChecker.validate_format(content, testcase_schema_v1) - - @staticmethod - def validate_testcase_v2_format(content): - """ check testcase format v2 if valid - """ - return JsonSchemaChecker.validate_format(content, testcase_schema_v2) - - @staticmethod - def validate_testsuite_v1_format(content): - """ check testsuite format v1 if valid - """ - return JsonSchemaChecker.validate_format(content, testsuite_schema_v1) - - @staticmethod - def validate_testsuite_v2_format(content): - """ check testsuite format v2 if valid - """ - return JsonSchemaChecker.validate_format(content, testsuite_schema_v2) - - -def is_test_path(path): - """ check if path is valid json/yaml file path or a existed directory. - - Args: - path (str/list/tuple): file path/directory or file path list. - - Returns: - bool: True if path is valid file path or path list, otherwise False. - - """ - if not isinstance(path, (str, list, tuple)): - return False - - elif isinstance(path, (list, tuple)): - for p in path: - if not is_test_path(p): - return False - - return True - - else: - # path is string - if not os.path.exists(path): - return False - - # path exists - if os.path.isfile(path): - # path is a file - file_suffix = os.path.splitext(path)[1].lower() - if file_suffix not in ['.json', '.yaml', '.yml']: - # path is not json/yaml file - return False - else: - return True - elif os.path.isdir(path): - # path is a directory - return True - else: - # path is neither a folder nor a file, maybe a symbol link or something else - return False - - -def is_test_content(data_structure): - """ check if data_structure is apis/testcases/testsuites. - - Args: - data_structure (dict): should include keys, apis or testcases or testsuites - - Returns: - bool: True if data_structure is valid apis/testcases/testsuites, otherwise False. - - """ - if not isinstance(data_structure, dict): - return False - - if "apis" in data_structure: - # maybe a group of api content - apis = data_structure["apis"] - if not isinstance(apis, list): - return False - - for item in apis: - is_testcase = False - try: - JsonSchemaChecker.validate_api_format(item) - is_testcase = True - except exceptions.FileFormatError: - pass - - if not is_testcase: - return False - - return True - - elif "testcases" in data_structure: - # maybe a testsuite, containing a group of testcases - testcases = data_structure["testcases"] - if not isinstance(testcases, list): - return False - - for item in testcases: - is_testcase = False - try: - JsonSchemaChecker.validate_testcase_v2_format(item) - is_testcase = True - except exceptions.FileFormatError: - pass - - try: - JsonSchemaChecker.validate_testcase_v2_format(item) - is_testcase = True - except exceptions.FileFormatError: - pass - - if not is_testcase: - return False - - return True - - elif "testsuites" in data_structure: - # maybe a group of testsuites - testsuites = data_structure["testsuites"] - if not isinstance(testsuites, list): - return False - - for item in testsuites: - is_testcase = False - try: - JsonSchemaChecker.validate_testsuite_v1_format(item) - is_testcase = True - except exceptions.FileFormatError: - pass - - try: - JsonSchemaChecker.validate_testsuite_v2_format(item) - is_testcase = True - except exceptions.FileFormatError: - pass - - if not is_testcase: - return False - - return True - - else: - return False diff --git a/httprunner/loader/check_test.py b/httprunner/loader/check_test.py deleted file mode 100644 index 62de19b1..00000000 --- a/httprunner/loader/check_test.py +++ /dev/null @@ -1,45 +0,0 @@ -import unittest - -from httprunner.loader import check - - -class TestLoaderCheck(unittest.TestCase): - - def test_is_testcases(self): - data_structure = "path/to/file" - self.assertFalse(check.is_test_content(data_structure)) - data_structure = ["path/to/file1", "path/to/file2"] - self.assertFalse(check.is_test_content(data_structure)) - - data_structure = { - "project_mapping": { - "PWD": "XXXXX", - "functions": {}, - "env": {} - }, - "testcases": [ - { # testcase data structure - "config": { - "name": "desc1", - "path": "testcase1_path", - "variables": [], # optional - }, - "teststeps": [ - # test data structure - { - 'name': 'test step desc1', - 'variables': [], # optional - 'extract': {}, # optional - 'validate': [], - 'request': { - "method": "GET", - "url": "https://docs.httprunner.org" - } - }, - # test_dict2 # another test dict - ] - }, - # testcase_dict_2 # another testcase dict - ] - } - self.assertTrue(check.is_test_content(data_structure)) diff --git a/httprunner/loader/load.py b/httprunner/loader/load.py deleted file mode 100644 index fcee9ca3..00000000 --- a/httprunner/loader/load.py +++ /dev/null @@ -1,219 +0,0 @@ -import csv -import io -import json -import os -import types - -import yaml -from loguru import logger - -from httprunner import builtin -from httprunner import exceptions, utils -from httprunner.loader.locate import get_project_working_directory - -try: - # PyYAML version >= 5.1 - # ref: https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation - yaml.warnings({'YAMLLoadWarning': False}) -except AttributeError: - pass - - -def _load_yaml_file(yaml_file): - """ load yaml file and check file content format - """ - with io.open(yaml_file, 'r', encoding='utf-8') as stream: - try: - yaml_content = yaml.load(stream) - except yaml.YAMLError as ex: - logger.error(str(ex)) - raise exceptions.FileFormatError - - return yaml_content - - -def _load_json_file(json_file): - """ load json file and check file content format - """ - with io.open(json_file, encoding='utf-8') as data_file: - try: - json_content = json.load(data_file) - except json.JSONDecodeError: - err_msg = f"JSONDecodeError: JSON file format error: {json_file}" - logger.error(err_msg) - raise exceptions.FileFormatError(err_msg) - - return json_content - - -def load_csv_file(csv_file): - """ load csv file and check file content format - - Args: - csv_file (str): csv file path, csv file content is like below: - - Returns: - list: list of parameters, each parameter is in dict format - - Examples: - >>> cat csv_file - username,password - test1,111111 - test2,222222 - test3,333333 - - >>> load_csv_file(csv_file) - [ - {'username': 'test1', 'password': '111111'}, - {'username': 'test2', 'password': '222222'}, - {'username': 'test3', 'password': '333333'} - ] - - """ - if not os.path.isabs(csv_file): - pwd = get_project_working_directory() - # make compatible with Windows/Linux - csv_file = os.path.join(pwd, *csv_file.split("/")) - - if not os.path.isfile(csv_file): - # file path not exist - raise exceptions.CSVNotFound(csv_file) - - csv_content_list = [] - - with io.open(csv_file, encoding='utf-8') as csvfile: - reader = csv.DictReader(csvfile) - for row in reader: - csv_content_list.append(row) - - return csv_content_list - - -def load_file(file_path): - if not os.path.isfile(file_path): - raise exceptions.FileNotFound(f"{file_path} does not exist.") - - file_suffix = os.path.splitext(file_path)[1].lower() - if file_suffix == '.json': - return _load_json_file(file_path) - elif file_suffix in ['.yaml', '.yml']: - return _load_yaml_file(file_path) - elif file_suffix == ".csv": - return load_csv_file(file_path) - else: - # '' or other suffix - logger.warning(f"Unsupported file format: {file_path}") - return [] - - -def load_folder_files(folder_path, recursive=True): - """ load folder path, return all files endswith yml/yaml/json in list. - - Args: - folder_path (str): specified folder path to load - recursive (bool): load files recursively if True - - Returns: - list: files endswith yml/yaml/json - """ - if isinstance(folder_path, (list, set)): - files = [] - for path in set(folder_path): - files.extend(load_folder_files(path, recursive)) - - return files - - if not os.path.exists(folder_path): - return [] - - file_list = [] - - for dirpath, dirnames, filenames in os.walk(folder_path): - filenames_list = [] - - for filename in filenames: - if not filename.endswith(('.yml', '.yaml', '.json')): - continue - - filenames_list.append(filename) - - for filename in filenames_list: - file_path = os.path.join(dirpath, filename) - file_list.append(file_path) - - if not recursive: - break - - return file_list - - -def load_dot_env_file(dot_env_path): - """ load .env file. - - Args: - dot_env_path (str): .env file path - - Returns: - dict: environment variables mapping - - { - "UserName": "debugtalk", - "Password": "123456", - "PROJECT_KEY": "ABCDEFGH" - } - - Raises: - exceptions.FileFormatError: If .env file format is invalid. - - """ - if not os.path.isfile(dot_env_path): - return {} - - logger.info(f"Loading environment variables from {dot_env_path}") - env_variables_mapping = {} - - with io.open(dot_env_path, 'r', encoding='utf-8') as fp: - for line in fp: - # maxsplit=1 - if "=" in line: - variable, value = line.split("=", 1) - elif ":" in line: - variable, value = line.split(":", 1) - else: - raise exceptions.FileFormatError(".env format error") - - env_variables_mapping[variable.strip()] = value.strip() - - utils.set_os_environ(env_variables_mapping) - return env_variables_mapping - - -def load_module_functions(module): - """ load python module functions. - - Args: - module: python module - - Returns: - dict: functions mapping for specified python module - - { - "func1_name": func1, - "func2_name": func2 - } - - """ - module_functions = {} - - for name, item in vars(module).items(): - if isinstance(item, types.FunctionType): - module_functions[name] = item - - return module_functions - - -def load_builtin_functions(): - """ load builtin module functions - """ - return load_module_functions(builtin) - diff --git a/httprunner/loader/load_test.py b/httprunner/loader/load_test.py deleted file mode 100644 index 4a2e0f91..00000000 --- a/httprunner/loader/load_test.py +++ /dev/null @@ -1,153 +0,0 @@ -import os -import unittest - -from httprunner import exceptions -from httprunner.loader import load -from httprunner.loader.buildup import load_test_file - - -class TestFileLoader(unittest.TestCase): - - def test_load_yaml_file_file_format_error(self): - yaml_tmp_file = "tests/data/tmp.yml" - # create empty yaml file - with open(yaml_tmp_file, 'w') as f: - f.write("") - - with self.assertRaises(exceptions.FileFormatError): - load_test_file(yaml_tmp_file) - - os.remove(yaml_tmp_file) - - # create invalid format yaml file - with open(yaml_tmp_file, 'w') as f: - f.write("abc") - - with self.assertRaises(exceptions.FileFormatError): - load_test_file(yaml_tmp_file) - - os.remove(yaml_tmp_file) - - def test_load_json_file_file_format_error(self): - json_tmp_file = "tests/data/tmp.json" - # create empty file - with open(json_tmp_file, 'w') as f: - f.write("") - - with self.assertRaises(exceptions.FileFormatError): - load_test_file(json_tmp_file) - - os.remove(json_tmp_file) - - # create empty json file - with open(json_tmp_file, 'w') as f: - f.write("{}") - - with self.assertRaises(exceptions.FileFormatError): - load_test_file(json_tmp_file) - - os.remove(json_tmp_file) - - # create invalid format json file - with open(json_tmp_file, 'w') as f: - f.write("abc") - - with self.assertRaises(exceptions.FileFormatError): - load_test_file(json_tmp_file) - - os.remove(json_tmp_file) - - def test_load_testcases_bad_filepath(self): - testcase_file_path = os.path.join(os.getcwd(), 'tests/data/demo') - with self.assertRaises(exceptions.FileNotFound): - load.load_file(testcase_file_path) - - def test_load_json_testcases(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.json') - testcases = load.load_file(testcase_file_path) - self.assertEqual(len(testcases), 3) - test = testcases[0]["test"] - self.assertIn('name', test) - self.assertIn('request', test) - self.assertIn('url', test['request']) - self.assertIn('method', test['request']) - - def test_load_yaml_testcases(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.yml') - testcases = load.load_file(testcase_file_path) - self.assertEqual(len(testcases), 3) - test = testcases[0]["test"] - self.assertIn('name', test) - self.assertIn('request', test) - self.assertIn('url', test['request']) - self.assertIn('method', test['request']) - - def test_load_csv_file_one_parameter(self): - csv_file_path = os.path.join( - os.getcwd(), 'tests/data/user_agent.csv') - csv_content = load.load_file(csv_file_path) - self.assertEqual( - csv_content, - [ - {'user_agent': 'iOS/10.1'}, - {'user_agent': 'iOS/10.2'}, - {'user_agent': 'iOS/10.3'} - ] - ) - - def test_load_csv_file_multiple_parameters(self): - csv_file_path = os.path.join( - os.getcwd(), 'tests/data/account.csv') - csv_content = load.load_file(csv_file_path) - self.assertEqual( - csv_content, - [ - {'username': 'test1', 'password': '111111'}, - {'username': 'test2', 'password': '222222'}, - {'username': 'test3', 'password': '333333'} - ] - ) - - def test_load_folder_files(self): - folder = os.path.join(os.getcwd(), 'tests') - file1 = os.path.join(os.getcwd(), 'tests', 'test_utils.py') - file2 = os.path.join(os.getcwd(), 'tests', 'api', 'reset_all.yml') - - files = load.load_folder_files(folder, recursive=False) - self.assertEqual(files, []) - - files = load.load_folder_files(folder) - self.assertIn(file2, files) - self.assertNotIn(file1, files) - - files = load.load_folder_files("not_existed_foulder", recursive=False) - self.assertEqual([], files) - - files = load.load_folder_files(file2, recursive=False) - self.assertEqual([], files) - - def test_load_dot_env_file(self): - dot_env_path = os.path.join( - os.getcwd(), "tests", ".env" - ) - env_variables_mapping = load.load_dot_env_file(dot_env_path) - self.assertIn("PROJECT_KEY", env_variables_mapping) - self.assertEqual(env_variables_mapping["UserName"], "debugtalk") - - def test_load_custom_dot_env_file(self): - dot_env_path = os.path.join( - os.getcwd(), "tests", "data", "test.env" - ) - env_variables_mapping = load.load_dot_env_file(dot_env_path) - self.assertIn("PROJECT_KEY", env_variables_mapping) - self.assertEqual(env_variables_mapping["UserName"], "test") - self.assertEqual(env_variables_mapping["content_type"], "application/json; charset=UTF-8") - - def test_load_env_path_not_exist(self): - dot_env_path = os.path.join( - os.getcwd(), "tests", "data", - ) - env_variables_mapping = load.load_dot_env_file(dot_env_path) - self.assertEqual(env_variables_mapping, {}) diff --git a/httprunner/loader/locate.py b/httprunner/loader/locate.py deleted file mode 100644 index 0d4dd056..00000000 --- a/httprunner/loader/locate.py +++ /dev/null @@ -1,123 +0,0 @@ -import os -import sys - -from loguru import logger - -from httprunner import exceptions - -project_working_directory = None - - -def locate_file(start_path, file_name): - """ locate filename and return absolute file path. - searching will be recursive upward until current working directory or system root dir. - - Args: - file_name (str): target locate file name - start_path (str): start locating path, maybe file path or directory path - - Returns: - str: located file path. None if file not found. - - Raises: - exceptions.FileNotFound: If failed to locate file. - - """ - if os.path.isfile(start_path): - start_dir_path = os.path.dirname(start_path) - elif os.path.isdir(start_path): - start_dir_path = start_path - else: - raise exceptions.FileNotFound(f"invalid path: {start_path}") - - file_path = os.path.join(start_dir_path, file_name) - if os.path.isfile(file_path): - return os.path.abspath(file_path) - - # current working directory - if os.path.abspath(start_dir_path) == os.getcwd(): - raise exceptions.FileNotFound(f"{file_name} not found in {start_path}") - - # system root dir - # Windows, e.g. 'E:\\' - # Linux/Darwin, '/' - parent_dir = os.path.dirname(start_dir_path) - if parent_dir == start_dir_path: - raise exceptions.FileNotFound(f"{file_name} not found in {start_path}") - - # locate recursive upward - return locate_file(parent_dir, file_name) - - -def locate_debugtalk_py(start_path): - """ locate debugtalk.py file - - Args: - start_path (str): start locating path, - maybe testcase file path or directory path - - Returns: - str: debugtalk.py file path, None if not found - - """ - try: - # locate debugtalk.py file. - debugtalk_path = locate_file(start_path, "debugtalk.py") - except exceptions.FileNotFound: - debugtalk_path = None - - return debugtalk_path - - -def init_project_working_directory(test_path): - """ this should be called at startup - - run test file: - run_path -> load_cases -> load_project_data -> init_project_working_directory - or run passed in data structure: - run -> init_project_working_directory - - Args: - test_path: specified testfile path - - Returns: - (str, str): debugtalk.py path, project_working_directory - - """ - - def prepare_path(path): - if not os.path.exists(path): - err_msg = f"path not exist: {path}" - logger.error(err_msg) - raise exceptions.FileNotFound(err_msg) - - if not os.path.isabs(path): - path = os.path.join(os.getcwd(), path) - - return path - - test_path = prepare_path(test_path) - - # locate debugtalk.py file - debugtalk_path = locate_debugtalk_py(test_path) - - global project_working_directory - if debugtalk_path: - # The folder contains debugtalk.py will be treated as PWD. - project_working_directory = os.path.dirname(debugtalk_path) - else: - # debugtalk.py not found, use os.getcwd() as PWD. - project_working_directory = os.getcwd() - - # add PWD to sys.path - sys.path.insert(0, project_working_directory) - - return debugtalk_path, project_working_directory - - -def get_project_working_directory(): - global project_working_directory - if project_working_directory is None: - raise exceptions.MyBaseFailure("loader.load_cases() has not been called!") - - return project_working_directory diff --git a/httprunner/loader/locate_test.py b/httprunner/loader/locate_test.py deleted file mode 100644 index 54a2d15c..00000000 --- a/httprunner/loader/locate_test.py +++ /dev/null @@ -1,40 +0,0 @@ - -import os -import unittest - -from httprunner import exceptions -from httprunner.loader import locate - - -class TestLoaderLocate(unittest.TestCase): - - def test_locate_file(self): - with self.assertRaises(exceptions.FileNotFound): - locate.locate_file(os.getcwd(), "debugtalk.py") - - with self.assertRaises(exceptions.FileNotFound): - locate.locate_file("", "debugtalk.py") - - start_path = os.path.join(os.getcwd(), "tests") - self.assertEqual( - locate.locate_file(start_path, "debugtalk.py"), - os.path.join( - os.getcwd(), "tests/debugtalk.py" - ) - ) - self.assertEqual( - locate.locate_file("tests/", "debugtalk.py"), - os.path.join(os.getcwd(), "tests", "debugtalk.py") - ) - self.assertEqual( - locate.locate_file("tests", "debugtalk.py"), - os.path.join(os.getcwd(), "tests", "debugtalk.py") - ) - self.assertEqual( - locate.locate_file("tests/base.py", "debugtalk.py"), - os.path.join(os.getcwd(), "tests", "debugtalk.py") - ) - self.assertEqual( - locate.locate_file("tests/data/demo_testcase.yml", "debugtalk.py"), - os.path.join(os.getcwd(), "tests", "debugtalk.py") - ) diff --git a/httprunner/loader/schemas/api.schema.json b/httprunner/loader/schemas/api.schema.json deleted file mode 100644 index f59b924f..00000000 --- a/httprunner/loader/schemas/api.schema.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "description": "httprunner api schema definition", - "type": "object", - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "base_url": { - "$ref": "common.schema.json#/definitions/base_url" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "request": { - "$ref": "common.schema.json#/definitions/request" - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "extract": { - "$ref": "common.schema.json#/definitions/extract" - }, - "validate": { - "$ref": "common.schema.json#/definitions/validate" - } - }, - "required": [ - "name", - "request" - ], - "examples": [ - { - "name": "demo api", - "variables": { - "var1": "value1", - "var2": "value2" - }, - "request": { - "url": "/api/path/$var1", - "method": "POST", - "headers": { - "Content-Type": "application/json" - }, - "json": { - "key": "$var2" - }, - "validate": [ - { - "eq": ["status_code", 200] - } - ] - } - } - ] -} \ No newline at end of file diff --git a/httprunner/loader/schemas/common.schema.json b/httprunner/loader/schemas/common.schema.json deleted file mode 100644 index 8986240c..00000000 --- a/httprunner/loader/schemas/common.schema.json +++ /dev/null @@ -1,392 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "description": "common json schema definitions for httprunner api/testcase/testsuite", - "definitions": { - "name": { - "description": "used as api/teststep/testcase/testsuite identification", - "type": "string", - "examples": [ - "basic test for httpbin" - ] - }, - "base_url": { - "description": "The base_url will be used with relative URI", - "type": "string", - "examples": [ - "https://httpbin.org" - ] - }, - "variables": { - "description": "define variables for api/teststep/testcase/testsuite", - "oneOf": [ - { - "type": "object", - "examples": [ - { - "var1": "value1", - "var2": "value2" - } - ] - }, - { - "type": "array", - "items": { - "type": "object", - "maxProperties": 1, - "minProperties": 1 - }, - "examples": [ - [ - { - "var1": "value1" - }, - { - "var2": "value2" - } - ] - ] - }, - { - "type": "string", - "pattern": "^\\$.*", - "examples": [ - "$prepared_variables", - "${prepare_variables()}", - "${prepare_variables($a, $b)}" - ] - } - ] - }, - "verify": { - "description": "whether to verify the server’s TLS certificate", - "type": "boolean", - "examples": [ - true, - false - ] - }, - "hook": { - "description": "used to define setup_hooks/teardown_hooks for api/teststep/testcase", - "type": "array", - "items": { - "oneOf": [ - { - "description": "call setup/teardown hook functions, return nothing", - "type": "string", - "examples": [ - [ - "${sleep(2)}", - "${hook_print(setup)}", - "${modify_request_json($request, android)}", - "${alter_response($response)}" - ] - ] - }, - { - "description": "call setup/teardown hook functions, return value and assign to variable", - "type": "object", - "examples": [ - { - "total": "${sum_two(1, 5)}" - }, - { - "filed_name": "get_decoded_response_field($response)" - } - ] - } - ] - } - }, - "config": { - "description": "used in testcase/testsuite to configure common fields", - "type": "object", - "properties": { - "name": { - "$ref": "#/definitions/name" - }, - "base_url": { - "$ref": "#/definitions/base_url" - }, - "variables": { - "$ref": "#/definitions/variables" - }, - "setup_hooks": { - "$ref": "#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "#/definitions/hook" - }, - "verify": { - "$ref": "#/definitions/verify" - } - }, - "required": ["name"] - }, - "request": { - "description": "used to define a api request. properties is the same as python package `requests.request`", - "type": "object", - "properties": { - "method": { - "type": "string", - "description": "request method", - "enum": [ - "GET", - "POST", - "OPTIONS", - "HEAD", - "PUT", - "PATCH", - "DELETE", - "CONNECT", - "TRACE" - ] - }, - "url": { - "description": "request url, may be absolute or relative URI", - "type": "string", - "examples": [ - "http://httpbin.org/get?a=1&b=2", - "/get?a=1&b=2", - "get?a=1&b=2" - ] - }, - "params": { - "description": "query string for request url", - "type": "object", - "examples": [ - { - "a": 1, - "b": 2 - } - ] - }, - "data": { - "anyOf": [ - { - "description": "request body in json format", - "type": "object", - "examples": [ - { - "a": 1, - "b": 2 - } - ] - }, - { - "description": "request body in application/x-www-form-urlencoded format", - "type": "string", - "examples": [ - "a=1&b=2" - ] - }, - { - "description": "request body prepared with function, or reference a variable", - "type": "string", - "examples": [ - "$post_data", - "${prepare_data($a, $b)}" - ] - } - ] - }, - "json": { - "oneOf": [ - { - "description": "request body in json format", - "type": "object" - }, - { - "description": "request body prepared with function, or reference a variable", - "type": "string", - "pattern": "^\\$.*", - "examples": [ - "$post_data", - "${prepare_post_data($a, $b)}" - ] - } - ] - }, - "headers": { - "description": "request headers", - "oneOf": [ - { - "description": "request headers in json format", - "type": "object", - "examples": [ - { - "User-Agent": "python-requests/2.18.4", - "Content-Type": "application/json" - } - ] - }, - { - "description": "request headers prepared with function, or reference a variable", - "type": "string", - "examples": [ - "$prepared_headers", - "${prepare_headers($a, $b)}" - ] - } - ] - }, - "cookies": { - "description": "request cookies", - "type": "object" - }, - "files": { - "description": "request files, used to upload files", - "type": "object" - }, - "auth": { - "description": "Auth tuple to enable Basic/Digest/Custom HTTP Auth.", - "type": "array" - }, - "timeout": { - "description": "How many seconds to wait for the server to send data before giving up", - "type": "number", - "examples": [ - 120 - ] - }, - "allow_redirects": { - "description": "Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to True", - "type": "boolean" - }, - "proxies": { - "description": "Dictionary mapping protocol to the URL of the proxy", - "type": "object" - }, - "verify": { - "description": "configure verify for current api/teststep", - "$ref": "#/definitions/verify" - }, - "stream": { - "description": "if False, the response content will be immediately downloaded.", - "type": "boolean" - }, - "upload": { - "description": "upload files", - "type": "object", - "examples": [ - { - "file": "data/file_to_upload", - "md5": "123" - } - ] - } - }, - "required": [ - "method", - "url" - ] - }, - "extract": { - "description": "used to extract session variables for later requests", - "oneOf": [ - { - "type": "object", - "patternProperties": { - "^[A-Za-z_][A-Za-z0-9_]*$": { - "description": "extraction rule for session variable, maybe in jsonpath/regex/jmespath", - "type": "string" - } - }, - "examples": [ - { - "code__by_jsonpath": "$.code", - "item_id__by_jsonpath": "$..items.*.id", - "var_name__by_regex": "\"LB[\\d]*(.*)RB[\\d]*\"", - "content_type": "headers.content-type", - "first_name": "content.person.name.first_name" - } - ] - }, - { - "type": "array", - "items": { - "type": "object", - "patternProperties": { - "^[A-Za-z_][A-Za-z0-9_]*$": { - "description": "extraction rule for session variable, maybe in jsonpath/regex/jmespath", - "type": "string" - } - }, - "minProperties": 1, - "maxProperties": 1 - }, - "examples": [ - { - "code__by_jsonpath": "$.code" - }, - { - "item_id__by_jsonpath": "$..items.*.id" - }, - { - "var_name__by_regex": "\"LB[\\d]*(.*)RB[\\d]*\"" - }, - { - "content_type": "headers.content-type" - }, - { - "first_name": "content.person.name.first_name" - } - ] - } - ] - }, - "validate": { - "description": "used to validate response fields", - "type": "array", - "items": { - "description": "one validator definition", - "oneOf": [ - { - "type": "object", - "properties": { - "check": { - "type": "string" - }, - "comparator": { - "type": "string" - }, - "expect": { - "description": "expected value" - } - }, - "required": ["check", "expect"], - "examples": [ - { - "check": "body.code", - "comparator": "gt", - "expect": 0 - }, - { - "check": "status_code", - "expect": 200 - } - ] - }, - { - "type": "object", - "patternProperties": { - "^[A-Za-z_][A-Za-z0-9_]*$": { - "description": "validate_func_name: [check_value, expect_value]", - "type": "array", - "minItems": 2, - "maxItems": 2 - } - }, - "examples": [ - { - "eq": ["status_code", 200] - }, - { - "gt": ["body.code", 0] - } - ] - } - ] - } - } - } -} \ No newline at end of file diff --git a/httprunner/loader/schemas/testcase.schema.v1.json b/httprunner/loader/schemas/testcase.schema.v1.json deleted file mode 100644 index 823399cc..00000000 --- a/httprunner/loader/schemas/testcase.schema.v1.json +++ /dev/null @@ -1,138 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "description": "httprunner testcase schema v1 definition", - "type": "array", - "definitions": { - "test": { - "type": "object", - "oneOf": [ - { - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "request": { - "description": "define api request directly", - "$ref": "common.schema.json#/definitions/request" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "extract": { - "$ref": "common.schema.json#/definitions/extract" - }, - "validate": { - "$ref": "common.schema.json#/definitions/validate" - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - } - }, - "required": [ - "name", - "request" - ] - }, - { - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "api": { - "description": "api reference, value is api file relative path", - "type": "string" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "extract": { - "oneOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "$ref": "common.schema.json#/definitions/extract" - } - ] - }, - "validate": { - "$ref": "common.schema.json#/definitions/validate" - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - } - }, - "required": [ - "name", - "api" - ] - }, - { - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "testcase": { - "description": "testcase reference, value is testcase file relative path", - "type": "string" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "extract": { - "type": "array", - "items": { - "type": "string" - } - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - } - }, - "required": [ - "name", - "testcase" - ] - } - ] - } - }, - "items": { - "type": "object", - "oneOf": [ - { - "type": "object", - "properties": { - "config": { - "$ref": "common.schema.json#/definitions/config" - } - }, - "additionalProperties": false - }, - { - "type": "object", - "properties": { - "test": { - "$ref": "testcase.schema.v1.json#/definitions/test" - } - }, - "additionalProperties": false - } - ], - "minProperties": 1, - "maxProperties": 1 - }, - "minItems": 2 -} \ No newline at end of file diff --git a/httprunner/loader/schemas/testcase.schema.v2.json b/httprunner/loader/schemas/testcase.schema.v2.json deleted file mode 100644 index 665b1419..00000000 --- a/httprunner/loader/schemas/testcase.schema.v2.json +++ /dev/null @@ -1,184 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "description": "httprunner testcase schema v2 definition", - "type": "object", - "definitions": { - "teststep": { - "type": "object", - "oneOf": [ - { - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "request": { - "description": "define api request directly", - "$ref": "common.schema.json#/definitions/request" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "extract": { - "$ref": "common.schema.json#/definitions/extract" - }, - "validate": { - "$ref": "common.schema.json#/definitions/validate" - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - } - }, - "required": [ - "name", - "request" - ] - }, - { - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "api": { - "description": "api reference, value is api file relative path", - "type": "string" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "extract": { - "oneOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "$ref": "common.schema.json#/definitions/extract" - } - ] - }, - "validate": { - "$ref": "common.schema.json#/definitions/validate" - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - } - }, - "required": [ - "name", - "api" - ] - }, - { - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "testcase": { - "description": "testcase reference, value is testcase file relative path", - "type": "string" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "extract": { - "type": "array", - "items": { - "type": "string" - } - }, - "setup_hooks": { - "$ref": "common.schema.json#/definitions/hook" - }, - "teardown_hooks": { - "$ref": "common.schema.json#/definitions/hook" - } - }, - "required": [ - "name", - "testcase" - ] - } - ] - } - }, - "properties": { - "config": { - "$ref": "common.schema.json#/definitions/config" - }, - "teststeps": { - "description": "teststep of a testcase", - "type": "array", - "minItems": 1, - "items": { - "$ref": "testcase.schema.v2.json#/definitions/teststep" - } - } - }, - "required": [ - "config", - "teststeps" - ], - "examples": [ - { - "config": { - "name": "testcase name" - }, - "teststeps": [ - { - "name": "api 1", - "api": "/path/to/api1" - }, - { - "name": "api 2", - "api": "/path/to/api2" - } - ] - }, - { - "config": { - "name": "demo testcase", - "variables": { - "device_sn": "ABC", - "username": "${ENV(USERNAME)}", - "password": "${ENV(PASSWORD)}" - }, - "base_url": "http://127.0.0.1:5000" - }, - "teststeps": [ - { - "name": "demo step 1", - "api": "path/to/api1.yml", - "variables": { - "user_agent": "iOS/10.3", - "device_sn": "$device_sn" - }, - "extract": [ - { - "token": "content.token" - } - ], - "validate": [ - { - "eq": ["status_code", 200] - } - ] - }, - { - "name": "demo step 2", - "api": "path/to/api2.yml", - "variables": { - "token": "$token" - } - } - ] - } - ] -} \ No newline at end of file diff --git a/httprunner/loader/schemas/testsuite.schema.v1.json b/httprunner/loader/schemas/testsuite.schema.v1.json deleted file mode 100644 index 85a8a72a..00000000 --- a/httprunner/loader/schemas/testsuite.schema.v1.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "description": "httprunner testsuite schema v1 definition", - "type": "object", - "definitions": { - "testcase": { - "type": "object", - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "parameters": { - "description": "generate cartesian product variables with parameters, each group of variables will be run once", - "type": "object" - }, - "testcase": { - "description": "testcase reference, value is testcase file relative path", - "type": "string" - } - }, - "required": [ - "testcase" - ] - } - }, - "properties": { - "config": { - "$ref": "common.schema.json#/definitions/config" - }, - "testcases": { - "description": "testcase of a testsuite", - "type": "object", - "minProperties": 1, - "patternProperties": { - ".*": { - "description": "testcase definition", - "$ref": "testsuite.schema.v1.json#/definitions/testcase" - } - } - } - }, - "required": [ - "config", - "testcases" - ], - "examples": [ - { - "config": { - "name": "testsuite name" - }, - "testcases": { - "testcase 1": { - "name": "testcase 1", - "testcase": "/path/to/testcase1" - }, - "testcase 2": { - "name": "testcase 2", - "testcase": "/path/to/testcase2" - } - } - } - ] -} \ No newline at end of file diff --git a/httprunner/loader/schemas/testsuite.schema.v2.json b/httprunner/loader/schemas/testsuite.schema.v2.json deleted file mode 100644 index 5eb7eff2..00000000 --- a/httprunner/loader/schemas/testsuite.schema.v2.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "description": "httprunner testsuite schema v2 definition", - "type": "object", - "definitions": { - "testcase": { - "type": "object", - "properties": { - "name": { - "$ref": "common.schema.json#/definitions/name" - }, - "variables": { - "$ref": "common.schema.json#/definitions/variables" - }, - "parameters": { - "description": "generate cartesian product variables with parameters, each group of variables will be run once", - "type": "object" - }, - "testcase": { - "description": "testcase reference, value is testcase file relative path", - "type": "string" - } - }, - "required": [ - "testcase" - ] - } - }, - "properties": { - "config": { - "$ref": "common.schema.json#/definitions/config" - }, - "testcases": { - "description": "testcase of a testsuite", - "type": "array", - "minItems": 1, - "items": { - "$ref": "testsuite.schema.v2.json#/definitions/testcase" - } - } - }, - "required": [ - "config", - "testcases" - ], - "examples": [ - { - "config": { - "name": "testsuite name" - }, - "testcases": [ - { - "name": "testcase 1", - "testcase": "/path/to/testcase1" - }, - { - "name": "testcase 2", - "testcase": "/path/to/testcase2" - } - ] - }, - { - "config": { - "name": "demo testsuite", - "variables": { - "device_sn": "XYZ" - }, - "base_url": "http://127.0.0.1:5000" - }, - "testcases": [ - { - "name": "call demo_testcase with data 1", - "testcase": "path/to/demo_testcase.yml", - "variables": { - "device_sn": "$device_sn" - } - }, - { - "name": "call demo_testcase with data 2", - "testcase": "path/to/demo_testcase.yml", - "variables": { - "device_sn": "$device_sn" - } - } - ] - } - ] -} \ No newline at end of file diff --git a/httprunner/loader_test.py b/httprunner/loader_test.py new file mode 100644 index 00000000..dac77804 --- /dev/null +++ b/httprunner/loader_test.py @@ -0,0 +1,127 @@ +import os +import unittest + +from httprunner import exceptions, loader + + +class TestLoader(unittest.TestCase): + def test_load_testcase_file(self): + path = "examples/postman_echo/request_methods/request_with_variables.yml" + testcase_json, testcase_obj = loader.load_testcase_file(path) + self.assertEqual( + testcase_json["config"]["name"], "request methods testcase with variables" + ) + self.assertEqual( + testcase_obj.config.name, "request methods testcase with variables" + ) + self.assertEqual(len(testcase_json["teststeps"]), 3) + self.assertEqual(len(testcase_obj.teststeps), 3) + + def test_load_json_file_file_format_error(self): + json_tmp_file = "/tmp/tmp.json" + # create empty file + with open(json_tmp_file, "w") as f: + f.write("") + + with self.assertRaises(exceptions.FileFormatError): + loader._load_json_file(json_tmp_file) + + os.remove(json_tmp_file) + + # create empty json file + with open(json_tmp_file, "w") as f: + f.write("{}") + + loader._load_json_file(json_tmp_file) + os.remove(json_tmp_file) + + # create invalid format json file + with open(json_tmp_file, "w") as f: + f.write("abc") + + with self.assertRaises(exceptions.FileFormatError): + loader._load_json_file(json_tmp_file) + + os.remove(json_tmp_file) + + def test_load_testcases_bad_filepath(self): + testcase_file_path = os.path.join(os.getcwd(), "tests/data/demo") + with self.assertRaises(exceptions.FileNotFound): + loader.load_testcase_file(testcase_file_path) + + def test_load_csv_file_one_parameter(self): + csv_file_path = os.path.join(os.getcwd(), "examples/httpbin/user_agent.csv") + csv_content = loader.load_csv_file(csv_file_path) + self.assertEqual( + csv_content, + [ + {"user_agent": "iOS/10.1"}, + {"user_agent": "iOS/10.2"}, + {"user_agent": "iOS/10.3"}, + ], + ) + + def test_load_csv_file_multiple_parameters(self): + csv_file_path = os.path.join(os.getcwd(), "examples/httpbin/account.csv") + csv_content = loader.load_csv_file(csv_file_path) + self.assertEqual( + csv_content, + [ + {"username": "test1", "password": "111111"}, + {"username": "test2", "password": "222222"}, + {"username": "test3", "password": "333333"}, + ], + ) + + def test_load_folder_files(self): + folder = os.path.join(os.getcwd(), "examples") + file1 = os.path.join(os.getcwd(), "examples", "test_utils.py") + file2 = os.path.join(os.getcwd(), "examples", "httpbin", "hooks.yml") + + files = loader.load_folder_files(folder, recursive=False) + self.assertEqual(files, []) + + files = loader.load_folder_files(folder) + self.assertIn(file2, files) + self.assertNotIn(file1, files) + + files = loader.load_folder_files("not_existed_foulder", recursive=False) + self.assertEqual([], files) + + files = loader.load_folder_files(file2, recursive=False) + self.assertEqual([], files) + + def test_load_custom_dot_env_file(self): + dot_env_path = os.path.join(os.getcwd(), "examples", "httpbin", "test.env") + env_variables_mapping = loader.load_dot_env_file(dot_env_path) + self.assertIn("PROJECT_KEY", env_variables_mapping) + self.assertEqual(env_variables_mapping["UserName"], "test") + self.assertEqual( + env_variables_mapping["content_type"], "application/json; charset=UTF-8" + ) + + def test_load_env_path_not_exist(self): + dot_env_path = os.path.join(os.getcwd(), "tests", "data",) + env_variables_mapping = loader.load_dot_env_file(dot_env_path) + self.assertEqual(env_variables_mapping, {}) + + def test_locate_file(self): + with self.assertRaises(exceptions.FileNotFound): + loader.locate_file(os.getcwd(), "debugtalk.py") + + with self.assertRaises(exceptions.FileNotFound): + loader.locate_file("", "debugtalk.py") + + start_path = os.path.join(os.getcwd(), "examples", "httpbin") + self.assertEqual( + loader.locate_file(start_path, "debugtalk.py"), + os.path.join(os.getcwd(), "examples/httpbin/debugtalk.py"), + ) + self.assertEqual( + loader.locate_file("examples/httpbin/", "debugtalk.py"), + os.path.join(os.getcwd(), "examples", "httpbin", "debugtalk.py"), + ) + self.assertEqual( + loader.locate_file("examples/httpbin/", "debugtalk.py"), + os.path.join(os.getcwd(), "examples/httpbin/debugtalk.py"), + ) diff --git a/httprunner/parser.py b/httprunner/parser.py index b61e6adc..a43023e6 100644 --- a/httprunner/parser.py +++ b/httprunner/parser.py @@ -1,14 +1,12 @@ -# encoding: utf-8 - import ast import builtins -import collections -import json import re +from typing import Any, Set, Text, Callable, List, Dict -from loguru import logger +from httprunner import loader, utils, exceptions +from httprunner.schema import VariablesMapping, FunctionsMapping -from httprunner import exceptions, utils, loader +absolute_http_url_regexp = re.compile(r"^https?://", re.I) # use $$ to escape $ notation dolloar_regex_compile = re.compile(r"\$\$") @@ -17,16 +15,8 @@ variable_regex_compile = re.compile(r"\$\{(\w+)\}|\$(\w+)") # function notation, e.g. ${func1($var_1, $var_3)} function_regex_compile = re.compile(r"\$\{(\w+)\(([\$\w\.\-/\s=,]*)\)\}") -""" Store parse failed api/testcase/testsuite file path -""" -parse_failed_testfiles = {} - -def get_parse_failed_testfiles(): - return parse_failed_testfiles - - -def parse_string_value(str_value): +def parse_string_value(str_value: Text) -> Any: """ parse string to number if possible e.g. "123" => 123 "12.2" => 12.3 @@ -42,35 +32,17 @@ def parse_string_value(str_value): return str_value -def is_var_or_func_exist(content): - """ check if variable or function exist - """ - if not isinstance(content, str): - return False - - try: - match_start_position = content.index("$", 0) - except ValueError: - return False - - while match_start_position < len(content): - dollar_match = dolloar_regex_compile.match(content, match_start_position) - if dollar_match: - match_start_position = dollar_match.end() - continue - - func_match = function_regex_compile.match(content, match_start_position) - if func_match: - return True - - var_match = variable_regex_compile.match(content, match_start_position) - if var_match: - return True - - return False +def build_url(base_url, path): + """ prepend url with base_url unless it's already an absolute URL """ + if absolute_http_url_regexp.match(path): + return path + elif base_url: + return "{}/{}".format(base_url.rstrip("/"), path.lstrip("/")) + else: + raise exceptions.ParamsError("base url missed!") -def regex_findall_variables(content): +def regex_findall_variables(content: Text) -> List[Text]: """ extract all variable names from content, which is in format $variable Args: @@ -96,15 +68,13 @@ def regex_findall_variables(content): try: vars_list = [] for var_tuple in variable_regex_compile.findall(content): - vars_list.append( - var_tuple[0] or var_tuple[1] - ) + vars_list.append(var_tuple[0] or var_tuple[1]) return vars_list except TypeError: return [] -def regex_findall_functions(content): +def regex_findall_functions(content: Text) -> List[Text]: """ extract all functions from string content, which are in format ${fun()} Args: @@ -136,329 +106,28 @@ def regex_findall_functions(content): return [] -def parse_parameters(parameters, variables_mapping=None, functions_mapping=None): - """ parse parameters and generate cartesian product. - - Args: - parameters (list) parameters: parameter name and value in list - parameter value may be in three types: - (1) data list, e.g. ["iOS/10.1", "iOS/10.2", "iOS/10.3"] - (2) call built-in parameterize function, "${parameterize(account.csv)}" - (3) call custom function in debugtalk.py, "${gen_app_version()}" - - variables_mapping (dict): variables mapping loaded from testcase config - functions_mapping (dict): functions mapping loaded from debugtalk.py - - Returns: - list: cartesian product list - - Examples: - >>> parameters = [ - {"user_agent": ["iOS/10.1", "iOS/10.2", "iOS/10.3"]}, - {"username-password": "${parameterize(account.csv)}"}, - {"app_version": "${gen_app_version()}"} - ] - >>> parse_parameters(parameters) - +def extract_variables(content: Any) -> Set: + """ extract all variables in content recursively. """ - variables_mapping = variables_mapping or {} - functions_mapping = functions_mapping or {} - parsed_parameters_list = [] + if isinstance(content, (list, set, tuple)): + variables = set() + for item in content: + variables = variables | extract_variables(item) + return variables - parameters = utils.ensure_mapping_format(parameters) - for parameter_name, parameter_content in parameters.items(): - parameter_name_list = parameter_name.split("-") + elif isinstance(content, dict): + variables = set() + for key, value in content.items(): + variables = variables | extract_variables(value) + return variables - if isinstance(parameter_content, list): - # (1) data list - # e.g. {"app_version": ["2.8.5", "2.8.6"]} - # => [{"app_version": "2.8.5", "app_version": "2.8.6"}] - # e.g. {"username-password": [["user1", "111111"], ["test2", "222222"]} - # => [{"username": "user1", "password": "111111"}, {"username": "user2", "password": "222222"}] - parameter_content_list = [] - for parameter_item in parameter_content: - if not isinstance(parameter_item, (list, tuple)): - # "2.8.5" => ["2.8.5"] - parameter_item = [parameter_item] + elif isinstance(content, str): + return set(regex_findall_variables(content)) - # ["app_version"], ["2.8.5"] => {"app_version": "2.8.5"} - # ["username", "password"], ["user1", "111111"] => {"username": "user1", "password": "111111"} - parameter_content_dict = dict(zip(parameter_name_list, parameter_item)) - - parameter_content_list.append(parameter_content_dict) - else: - # (2) & (3) - parsed_variables_mapping = parse_variables_mapping( - variables_mapping - ) - parsed_parameter_content = eval_lazy_data( - parameter_content, - parsed_variables_mapping, - functions_mapping - ) - if not isinstance(parsed_parameter_content, list): - raise exceptions.ParamsError("parameters syntax error!") - - parameter_content_list = [] - for parameter_item in parsed_parameter_content: - if isinstance(parameter_item, dict): - # get subset by parameter name - # {"app_version": "${gen_app_version()}"} - # gen_app_version() => [{'app_version': '2.8.5'}, {'app_version': '2.8.6'}] - # {"username-password": "${get_account()}"} - # get_account() => [ - # {"username": "user1", "password": "111111"}, - # {"username": "user2", "password": "222222"} - # ] - parameter_dict = {key: parameter_item[key] for key in parameter_name_list} - elif isinstance(parameter_item, (list, tuple)): - # {"username-password": "${get_account()}"} - # get_account() => [("user1", "111111"), ("user2", "222222")] - parameter_dict = dict(zip(parameter_name_list, parameter_item)) - elif len(parameter_name_list) == 1: - # {"user_agent": "${get_user_agent()}"} - # get_user_agent() => ["iOS/10.1", "iOS/10.2"] - parameter_dict = { - parameter_name_list[0]: parameter_item - } - - parameter_content_list.append(parameter_dict) - - parsed_parameters_list.append(parameter_content_list) - - return utils.gen_cartesian_product(*parsed_parameters_list) + return set() -def get_uniform_comparator(comparator): - """ convert comparator alias to uniform name - """ - if comparator in ["eq", "equals", "==", "is"]: - return "equals" - elif comparator in ["lt", "less_than"]: - return "less_than" - elif comparator in ["le", "less_than_or_equals"]: - return "less_than_or_equals" - elif comparator in ["gt", "greater_than"]: - return "greater_than" - elif comparator in ["ge", "greater_than_or_equals"]: - return "greater_than_or_equals" - elif comparator in ["ne", "not_equals"]: - return "not_equals" - elif comparator in ["str_eq", "string_equals"]: - return "string_equals" - elif comparator in ["len_eq", "length_equals", "count_eq"]: - return "length_equals" - elif comparator in ["len_gt", "count_gt", "length_greater_than", "count_greater_than"]: - return "length_greater_than" - elif comparator in ["len_ge", "count_ge", "length_greater_than_or_equals", - "count_greater_than_or_equals"]: - return "length_greater_than_or_equals" - elif comparator in ["len_lt", "count_lt", "length_less_than", "count_less_than"]: - return "length_less_than" - elif comparator in ["len_le", "count_le", "length_less_than_or_equals", - "count_less_than_or_equals"]: - return "length_less_than_or_equals" - else: - return comparator - - -def uniform_validator(validator): - """ unify validator - - Args: - validator (dict): validator maybe in two formats: - - format1: this is kept for compatiblity with the previous versions. - {"check": "status_code", "comparator": "eq", "expect": 201} - {"check": "$resp_body_success", "comparator": "eq", "expect": True} - format2: recommended new version, {comparator: [check_item, expected_value]} - {'eq': ['status_code', 201]} - {'eq': ['$resp_body_success', True]} - - Returns - dict: validator info - - { - "check": "status_code", - "expect": 201, - "comparator": "equals" - } - - """ - if not isinstance(validator, dict): - raise exceptions.ParamsError(f"invalid validator: {validator}") - - if "check" in validator and "expect" in validator: - # format1 - check_item = validator["check"] - expect_value = validator["expect"] - comparator = validator.get("comparator", "eq") - - elif len(validator) == 1: - # format2 - comparator = list(validator.keys())[0] - compare_values = validator[comparator] - - if not isinstance(compare_values, list) or len(compare_values) != 2: - raise exceptions.ParamsError(f"invalid validator: {validator}") - - check_item, expect_value = compare_values - - else: - raise exceptions.ParamsError(f"invalid validator: {validator}") - - # uniform comparator, e.g. lt => less_than, eq => equals - comparator = get_uniform_comparator(comparator) - - return { - "check": check_item, - "expect": expect_value, - "comparator": comparator - } - - -def _convert_validators_to_mapping(validators): - """ convert validators list to mapping. - - Args: - validators (list): validators in list - - Returns: - dict: validators mapping, use (check, comparator) as key. - - Examples: - >>> validators = [ - {"check": "v1", "expect": 201, "comparator": "eq"}, - {"check": {"b": 1}, "expect": 200, "comparator": "eq"} - ] - >>> print(_convert_validators_to_mapping(validators)) - { - ("v1", "eq"): {"check": "v1", "expect": 201, "comparator": "eq"}, - ('{"b": 1}', "eq"): {"check": {"b": 1}, "expect": 200, "comparator": "eq"} - } - - """ - validators_mapping = {} - - for validator in validators: - if not isinstance(validator["check"], collections.Hashable): - check = json.dumps(validator["check"]) - else: - check = validator["check"] - - key = (check, validator["comparator"]) - validators_mapping[key] = validator - - return validators_mapping - - -def extend_validators(raw_validators, override_validators): - """ extend raw_validators with override_validators. - override_validators will merge and override raw_validators. - - Args: - raw_validators (dict): - override_validators (dict): - - Returns: - list: extended validators - - Examples: - >>> raw_validators = [{'eq': ['v1', 200]}, {"check": "s2", "expect": 16, "comparator": "len_eq"}] - >>> override_validators = [{"check": "v1", "expect": 201}, {'len_eq': ['s3', 12]}] - >>> extend_validators(raw_validators, override_validators) - [ - {"check": "v1", "expect": 201, "comparator": "eq"}, - {"check": "s2", "expect": 16, "comparator": "len_eq"}, - {"check": "s3", "expect": 12, "comparator": "len_eq"} - ] - - """ - - if not raw_validators: - return override_validators - - elif not override_validators: - return raw_validators - - else: - def_validators_mapping = _convert_validators_to_mapping(raw_validators) - ref_validators_mapping = _convert_validators_to_mapping(override_validators) - - def_validators_mapping.update(ref_validators_mapping) - return list(def_validators_mapping.values()) - - -############################################################################### -## parse content with variables and functions mapping -############################################################################### - -def get_mapping_variable(variable_name, variables_mapping): - """ get variable from variables_mapping. - - Args: - variable_name (str): variable name - variables_mapping (dict): variables mapping - - Returns: - mapping variable value. - - Raises: - exceptions.VariableNotFound: variable is not found. - - """ - try: - return variables_mapping[variable_name] - except KeyError: - raise exceptions.VariableNotFound(f"{variable_name} is not found.") - - -def get_mapping_function(function_name, functions_mapping): - """ get function from functions_mapping, - if not found, then try to check if builtin function. - - Args: - function_name (str): function name - functions_mapping (dict): functions mapping - - Returns: - mapping function object. - - Raises: - exceptions.FunctionNotFound: function is neither defined in debugtalk.py nor builtin. - - """ - if function_name in functions_mapping: - return functions_mapping[function_name] - - elif function_name in ["parameterize", "P"]: - return loader.load_csv_file - - elif function_name in ["environ", "ENV"]: - return utils.get_os_environ - - elif function_name in ["multipart_encoder", "multipart_content_type"]: - # extension for upload test - from httprunner.ext import uploader - return getattr(uploader, function_name) - - try: - # check if HttpRunner builtin functions - built_in_functions = loader.load_builtin_functions() - return built_in_functions[function_name] - except KeyError: - pass - - try: - # check if Python builtin functions - return getattr(builtins, function_name) - except AttributeError: - pass - - raise exceptions.FunctionNotFound(f"{function_name} is not found.") - - -def parse_function_params(params): +def parse_function_params(params: Text) -> Dict: """ parse function params to args and kwargs. Args: @@ -489,20 +158,17 @@ def parse_function_params(params): {'args': [1, 2], 'kwargs': {'a':3, 'b':4}} """ - function_meta = { - "args": [], - "kwargs": {} - } + function_meta = {"args": [], "kwargs": {}} params_str = params.strip() if params_str == "": return function_meta - args_list = params_str.split(',') + args_list = params_str.split(",") for arg in args_list: arg = arg.strip() - if '=' in arg: - key, value = arg.split('=') + if "=" in arg: + key, value = arg.split("=") function_meta["kwargs"][key.strip()] = parse_string_value(value.strip()) else: function_meta["args"].append(parse_string_value(arg)) @@ -510,981 +176,248 @@ def parse_function_params(params): return function_meta -class LazyFunction(object): - """ call function lazily. - """ - - def __init__(self, function_meta, functions_mapping=None, check_variables_set=None): - """ init LazyFunction object with function_meta - - Args: - function_meta (dict): function name, args and kwargs. - { - "func_name": "func", - "args": [1, 2] - "kwargs": {"a": 3, "b": 4} - } - - """ - self.functions_mapping = functions_mapping or {} - self.check_variables_set = check_variables_set or set() - self.cache_key = None - self.__parse(function_meta) - - def __parse(self, function_meta): - """ init func as lazy functon instance - - Args: - function_meta (dict): function meta including name, args and kwargs - """ - self._func = get_mapping_function( - function_meta["func_name"], - self.functions_mapping - ) - self.func_name = self._func.__name__ - self._args = prepare_lazy_data( - function_meta.get("args", []), - self.functions_mapping, - self.check_variables_set - ) - self._kwargs = prepare_lazy_data( - function_meta.get("kwargs", {}), - self.functions_mapping, - self.check_variables_set - ) - - if self.func_name == "load_csv_file": - if len(self._args) != 1 or self._kwargs: - raise exceptions.ParamsError("P() should only pass in one argument!") - self._args = [self._args[0]] - elif self.func_name == "get_os_environ": - if len(self._args) != 1 or self._kwargs: - raise exceptions.ParamsError("ENV() should only pass in one argument!") - self._args = [self._args[0]] - - def get_args(self): - return self._args - - def update_args(self, args): - self._args = args - - def __repr__(self): - args_string = "" - - if self._args: - str_args = [str(arg) for arg in self._args] - args_string += ", ".join(str_args) - - if self._kwargs: - args_string += ", " - str_kwargs = [ - f"{key}={str(value)}" - for key, value in self._kwargs.items() - ] - args_string += ", ".join(str_kwargs) - - return f"LazyFunction({self.func_name}({args_string}))" - - def __prepare_cache_key(self, args, kwargs): - return self.func_name, repr(args), repr(kwargs) - - def to_value(self, variables_mapping=None): - """ parse lazy data with evaluated variables mapping. - Notice: variables_mapping should not contain any variable or function. - """ - variables_mapping = variables_mapping or {} - args = parse_lazy_data(self._args, variables_mapping) - kwargs = parse_lazy_data(self._kwargs, variables_mapping) - self.cache_key = self.__prepare_cache_key(args, kwargs) - return self._func(*args, **kwargs) - - -cached_functions_mapping = {} -""" cached function calling results. -""" - - -class LazyString(object): - """ evaluate string lazily. - """ - - def __init__(self, raw_string, functions_mapping=None, check_variables_set=None, cached=False): - """ make raw_string as lazy object with functions_mapping - check if any variable undefined in check_variables_set - """ - self.raw_string = raw_string - self.functions_mapping = functions_mapping or {} - self.check_variables_set = check_variables_set or set() - self.cached = cached - self.__parse(raw_string) - - def __parse(self, raw_string): - """ parse raw string, replace function and variable with {} - - Args: - raw_string(str): string with functions or varialbes - e.g. "ABC${func2($a, $b)}DE$c" - - Returns: - string: "ABC{}DE{}" - args: ["${func2($a, $b)}", "$c"] - - """ - self._args = [] - - def escape_braces(origin_string): - return origin_string.replace("{", "{{").replace("}", "}}") - - try: - match_start_position = raw_string.index("$", 0) - begin_string = raw_string[0:match_start_position] - self._string = escape_braces(begin_string) - except ValueError: - self._string = escape_braces(raw_string) - return - - while match_start_position < len(raw_string): - - # Notice: notation priority - # $$ > ${func($a, $b)} > $var - - # search $$ - dollar_match = dolloar_regex_compile.match(raw_string, match_start_position) - if dollar_match: - match_start_position = dollar_match.end() - self._string += "$" - continue - - # search function like ${func($a, $b)} - func_match = function_regex_compile.match(raw_string, match_start_position) - if func_match: - function_meta = { - "func_name": func_match.group(1) - } - function_meta.update(parse_function_params(func_match.group(2))) - lazy_func = LazyFunction( - function_meta, - self.functions_mapping, - self.check_variables_set - ) - self._args.append(lazy_func) - match_start_position = func_match.end() - self._string += "{}" - continue - - # search variable like ${var} or $var - var_match = variable_regex_compile.match(raw_string, match_start_position) - if var_match: - var_name = var_match.group(1) or var_match.group(2) - # check if any variable undefined in check_variables_set - if var_name not in self.check_variables_set: - raise exceptions.VariableNotFound(var_name) - - self._args.append(var_name) - match_start_position = var_match.end() - self._string += "{}" - continue - - curr_position = match_start_position - try: - # find next $ location - match_start_position = raw_string.index("$", curr_position + 1) - remain_string = raw_string[curr_position:match_start_position] - except ValueError: - remain_string = raw_string[curr_position:] - # break while loop - match_start_position = len(raw_string) - - self._string += escape_braces(remain_string) - - def __repr__(self): - return f"LazyString({self.raw_string})" - - def to_value(self, variables_mapping=None): - """ parse lazy data with evaluated variables mapping. - Notice: variables_mapping should not contain any variable or function. - """ - variables_mapping = variables_mapping or {} - - args = [] - for arg in self._args: - if isinstance(arg, LazyFunction): - if self.cached and arg.cache_key and arg.cache_key in cached_functions_mapping: - value = cached_functions_mapping[arg.cache_key] - else: - value = arg.to_value(variables_mapping) - cached_functions_mapping[arg.cache_key] = value - args.append(value) - else: - # variable - var_value = get_mapping_variable(arg, variables_mapping) - args.append(var_value) - - if self._string == "{}": - return args[0] - else: - return self._string.format(*args) - - -def prepare_lazy_data(content, functions_mapping=None, check_variables_set=None, cached=False): - """ make string in content as lazy object with functions_mapping - - Raises: - exceptions.VariableNotFound: if any variable undefined in check_variables_set - - """ - # TODO: refactor type check - if content is None or isinstance(content, (int, float, bool, type)): - return content - - elif isinstance(content, (list, set, tuple)): - return [ - prepare_lazy_data( - item, - functions_mapping, - check_variables_set, - cached - ) - for item in content - ] - - elif isinstance(content, dict): - parsed_content = {} - for key, value in content.items(): - parsed_key = prepare_lazy_data( - key, - functions_mapping, - check_variables_set, - cached - ) - parsed_value = prepare_lazy_data( - value, - functions_mapping, - check_variables_set, - cached - ) - parsed_content[parsed_key] = parsed_value - - return parsed_content - - elif isinstance(content, str): - # content is in string format here - if not is_var_or_func_exist(content): - # content is neither variable nor function - # replace $$ notation with $ and consider it as normal char. - # e.g. abc => abc, abc$$def => abc$def, abc$$$$def$$h => abc$$def$h - return content.replace("$$", "$") - - functions_mapping = functions_mapping or {} - check_variables_set = check_variables_set or set() - content = LazyString(content, functions_mapping, check_variables_set, cached) - - return content - - -def parse_lazy_data(content, variables_mapping=None): - """ parse lazy data with evaluated variables mapping. - Notice: variables_mapping should not contain any variable or function. - """ - # TODO: refactor type check - if content is None or isinstance(content, (int, float, bool, type)): - return content - - elif isinstance(content, LazyString): - variables_mapping = utils.ensure_mapping_format(variables_mapping or {}) - return content.to_value(variables_mapping) - - elif isinstance(content, (list, set, tuple)): - return [ - parse_lazy_data(item, variables_mapping) - for item in content - ] - - elif isinstance(content, dict): - parsed_content = {} - for key, value in content.items(): - parsed_key = parse_lazy_data(key, variables_mapping) - parsed_value = parse_lazy_data(value, variables_mapping) - parsed_content[parsed_key] = parsed_value - - return parsed_content - - return content - - -def eval_lazy_data(content, variables_mapping=None, functions_mapping=None): - """ evaluate data instantly. - Notice: variables_mapping should not contain any variable or function. - """ - variables_mapping = variables_mapping or {} - check_variables_set = set(variables_mapping.keys()) - return parse_lazy_data( - prepare_lazy_data( - content, - functions_mapping, - check_variables_set - ), - variables_mapping - ) - - -def extract_variables(content): - """ extract all variables in content recursively. - """ - if isinstance(content, (list, set, tuple)): - variables = set() - for item in content: - variables = variables | extract_variables(item) - return variables - - elif isinstance(content, dict): - variables = set() - for key, value in content.items(): - variables = variables | extract_variables(value) - return variables - - elif isinstance(content, LazyString): - return set(regex_findall_variables(content.raw_string)) - - return set() - - -def parse_variables_mapping(variables_mapping): - """ eval each prepared variable and function in variables_mapping. +def get_mapping_variable( + variable_name: Text, variables_mapping: VariablesMapping +) -> Any: + """ get variable from variables_mapping. Args: - variables_mapping (dict): - { - "varA": LazyString(123$varB), - "varB": LazyString(456$varC), - "varC": LazyString(${sum_two($a, $b)}), - "a": 1, - "b": 2, - "c": {"key": LazyString($b)}, - "d": [LazyString($a), 3] - } + variable_name (str): variable name + variables_mapping (dict): variables mapping Returns: - dict: parsed variables_mapping should not contain any variable or function. - { - "varA": "1234563", - "varB": "4563", - "varC": "3", - "a": 1, - "b": 2, - "c": {"key": 2}, - "d": [1, 3] - } + mapping variable value. + + Raises: + exceptions.VariableNotFound: variable is not found. """ - run_times = 0 - parsed_variables_mapping = {} + # TODO: get variable from debugtalk module and environ + try: + return variables_mapping[variable_name] + except KeyError: + raise exceptions.VariableNotFound( + f"{variable_name} not found in {variables_mapping}" + ) - while len(parsed_variables_mapping) != len(variables_mapping): + +def get_mapping_function( + function_name: Text, functions_mapping: FunctionsMapping +) -> Callable: + """ get function from functions_mapping, + if not found, then try to check if builtin function. + + Args: + function_name (str): function name + functions_mapping (dict): functions mapping + + Returns: + mapping function object. + + Raises: + exceptions.FunctionNotFound: function is neither defined in debugtalk.py nor builtin. + + """ + if function_name in functions_mapping: + return functions_mapping[function_name] + + elif function_name in ["parameterize", "P"]: + return loader.load_csv_file + + elif function_name in ["environ", "ENV"]: + return utils.get_os_environ + + elif function_name in ["multipart_encoder", "multipart_content_type"]: + # extension for upload test + from httprunner.ext import uploader + + return getattr(uploader, function_name) + + try: + # check if HttpRunner builtin functions + built_in_functions = loader.load_builtin_functions() + return built_in_functions[function_name] + except KeyError: + pass + + try: + # check if Python builtin functions + return getattr(builtins, function_name) + except AttributeError: + pass + + raise exceptions.FunctionNotFound(f"{function_name} is not found.") + + +def parse_string( + raw_string: Text, + variables_mapping: VariablesMapping, + functions_mapping: FunctionsMapping, +) -> Any: + """ parse string content with variables and functions mapping. + + Args: + raw_string: raw string content to be parsed. + variables_mapping: variables mapping. + functions_mapping: functions mapping. + + Returns: + str: parsed string content. + + Examples: + >>> raw_string = "abc${add_one($num)}def" + >>> variables_mapping = {"num": 3} + >>> functions_mapping = {"add_one": lambda x: x + 1} + >>> parse_string(raw_string, variables_mapping, functions_mapping) + "abc4def" + + """ + try: + match_start_position = raw_string.index("$", 0) + parsed_string = raw_string[0:match_start_position] + except ValueError: + parsed_string = raw_string + return parsed_string + + while match_start_position < len(raw_string): + + # Notice: notation priority + # $$ > ${func($a, $b)} > $var + + # search $$ + dollar_match = dolloar_regex_compile.match(raw_string, match_start_position) + if dollar_match: + match_start_position = dollar_match.end() + parsed_string += "$" + continue + + # search function like ${func($a, $b)} + func_match = function_regex_compile.match(raw_string, match_start_position) + if func_match: + func_name = func_match.group(1) + func = get_mapping_function(func_name, functions_mapping) + + func_params_str = func_match.group(2) + function_meta = parse_function_params(func_params_str) + args = function_meta["args"] + kwargs = function_meta["kwargs"] + + parsed_args = parse_data(args, variables_mapping, functions_mapping) + parsed_kwargs = parse_data(kwargs, variables_mapping, functions_mapping) + func_eval_value = func(*parsed_args, **parsed_kwargs) + + func_raw_str = "${" + func_name + f"({func_params_str})" + "}" + if func_raw_str == raw_string: + # raw_string is a function, e.g. "${add_one(3)}", return its eval value directly + return func_eval_value + + # raw_string contains one or many functions, e.g. "abc${add_one(3)}def" + parsed_string += str(func_eval_value) + match_start_position = func_match.end() + continue + + # search variable like ${var} or $var + var_match = variable_regex_compile.match(raw_string, match_start_position) + if var_match: + var_name = var_match.group(1) or var_match.group(2) + var_value = get_mapping_variable(var_name, variables_mapping) + + if f"${var_name}" == raw_string or "${" + var_name + "}" == raw_string: + # raw_string is a variable, $var or ${var}, return its value directly + return var_value + + # raw_string contains one or many variables, e.g. "abc${var}def" + parsed_string += str(var_value) + match_start_position = var_match.end() + continue + + curr_position = match_start_position + try: + # find next $ location + match_start_position = raw_string.index("$", curr_position + 1) + remain_string = raw_string[curr_position:match_start_position] + except ValueError: + remain_string = raw_string[curr_position:] + # break while loop + match_start_position = len(raw_string) + + parsed_string += remain_string + + return parsed_string + + +def parse_data( + raw_data: Any, + variables_mapping: VariablesMapping = None, + functions_mapping: FunctionsMapping = None, +) -> Any: + """ parse raw data with evaluated variables mapping. + Notice: variables_mapping should not contain any variable or function. + """ + if isinstance(raw_data, str): + # content in string format may contains variables and functions + variables_mapping = variables_mapping or {} + functions_mapping = functions_mapping or {} + raw_data = raw_data.strip() + return parse_string(raw_data, variables_mapping, functions_mapping) + + elif isinstance(raw_data, (list, set, tuple)): + return [ + parse_data(item, variables_mapping, functions_mapping) for item in raw_data + ] + + elif isinstance(raw_data, dict): + parsed_data = {} + for key, value in raw_data.items(): + parsed_key = parse_data(key, variables_mapping, functions_mapping) + parsed_value = parse_data(value, variables_mapping, functions_mapping) + parsed_data[parsed_key] = parsed_value + + return parsed_data + + else: + # other types, e.g. None, int, float, bool + return raw_data + + +def parse_variables_mapping( + variables_mapping: VariablesMapping, functions_mapping: FunctionsMapping = None +) -> VariablesMapping: + + parsed_variables: VariablesMapping = {} + + while len(parsed_variables) != len(variables_mapping): for var_name in variables_mapping: - run_times += 1 - if run_times > len(variables_mapping) * 4: - not_found_variables = { - key: variables_mapping[key] - for key in variables_mapping - if key not in parsed_variables_mapping - } - raise exceptions.VariableNotFound(not_found_variables) - - if var_name in parsed_variables_mapping: + if var_name in parsed_variables: continue - value = variables_mapping[var_name] - variables = extract_variables(value) + var_value = variables_mapping[var_name] + variables = extract_variables(var_value) # check if reference variable itself if var_name in variables: # e.g. - # var_name = "token" - # variables_mapping = {"token": LazyString($token)} - # var_name = "key" - # variables_mapping = {"key": [LazyString($key), 2]} + # variables_mapping = {"token": "abc$token"} + # variables_mapping = {"key": ["$key", 2]} raise exceptions.VariableNotFound(var_name) - if variables: - # reference other variable, or function call with other variable + # check if reference variable not in variables_mapping + not_defined_variables = [ + v_name for v_name in variables if v_name not in variables_mapping + ] + if not_defined_variables: # e.g. {"varA": "123$varB", "varB": "456$varC"} # e.g. {"varC": "${sum_two($a, $b)}"} - if any([_var_name not in parsed_variables_mapping for _var_name in variables]): - # reference variable not parsed - continue + raise exceptions.VariableNotFound(not_defined_variables) - parsed_value = parse_lazy_data(value, parsed_variables_mapping) - parsed_variables_mapping[var_name] = parsed_value - - return parsed_variables_mapping - - -def _extend_with_api(test_dict, api_def_dict): - """ extend test with api definition, test will merge and override api definition. - - Args: - test_dict (dict): test block, this will override api_def_dict - api_def_dict (dict): api definition - - Examples: - >>> api_def_dict = { - "name": "get token 1", - "request": {...}, - "validate": [{'eq': ['status_code', 200]}] - } - >>> test_dict = { - "name": "get token 2", - "extract": {"token": "content.token"}, - "validate": [{'eq': ['status_code', 201]}, {'len_eq': ['content.token', 16]}] - } - >>> _extend_with_api(test_dict, api_def_dict) - >>> print(test_dict) - { - "name": "get token 2", - "request": {...}, - "extract": {"token": "content.token"}, - "validate": [{'eq': ['status_code', 201]}, {'len_eq': ['content.token', 16]}] - } - - """ - # override api name - test_dict.setdefault("name", api_def_dict.pop("name", "api name undefined")) - - # override variables - def_variables = api_def_dict.pop("variables", []) - test_dict["variables"] = utils.extend_variables( - def_variables, - test_dict.get("variables", {}) - ) - - # merge & override validators TODO: relocate - def_raw_validators = api_def_dict.pop("validate", []) - def_validators = [ - uniform_validator(_validator) - for _validator in def_raw_validators - ] - ref_validators = test_dict.pop("validate", []) - test_dict["validate"] = extend_validators( - def_validators, - ref_validators - ) - - # merge & override extractors - def_extrators = api_def_dict.pop("extract", {}) - test_dict["extract"] = utils.extend_variables( - def_extrators, - test_dict.get("extract", {}) - ) - - # merge & override request - test_dict["request"] = api_def_dict.pop("request", {}) - - # base_url & verify: priority api_def_dict > test_dict - if api_def_dict.get("base_url"): - test_dict["base_url"] = api_def_dict["base_url"] - - if "verify" in api_def_dict: - test_dict["request"]["verify"] = api_def_dict["verify"] - - # merge & override setup_hooks - def_setup_hooks = api_def_dict.pop("setup_hooks", []) - ref_setup_hooks = test_dict.get("setup_hooks", []) - extended_setup_hooks_tmp = def_setup_hooks + ref_setup_hooks - extended_setup_hooks = list(set(extended_setup_hooks_tmp)) - extended_setup_hooks.sort(key=extended_setup_hooks_tmp.index) - if extended_setup_hooks: - test_dict["setup_hooks"] = extended_setup_hooks - # merge & override teardown_hooks - def_teardown_hooks = api_def_dict.pop("teardown_hooks", []) - ref_teardown_hooks = test_dict.get("teardown_hooks", []) - extended_teardown_hooks_tmp = def_teardown_hooks + ref_teardown_hooks - extended_teardown_hooks = list(set(extended_teardown_hooks_tmp)) - extended_teardown_hooks.sort(key=extended_teardown_hooks_tmp.index) - if extended_teardown_hooks: - test_dict["teardown_hooks"] = extended_teardown_hooks - - # TODO: extend with other api definition items, e.g. times - test_dict.update(api_def_dict) - - -def _extend_with_testcase(test_dict, testcase_def_dict): - """ extend test with testcase definition - test will merge and override testcase config definition. - - Args: - test_dict (dict): test block - testcase_def_dict (dict): testcase definition - - Returns: - dict: extended test dict. - - """ - # override testcase config variables - testcase_def_dict["config"].setdefault("variables", {}) - testcase_def_variables = utils.ensure_mapping_format( - testcase_def_dict["config"].get("variables", {})) - testcase_def_variables.update(test_dict.pop("variables", {})) - testcase_def_dict["config"]["variables"] = testcase_def_variables - - # override base_url, verify - # priority: testcase config > testsuite tests - test_base_url = test_dict.pop("base_url", "") - if not testcase_def_dict["config"].get("base_url"): - testcase_def_dict["config"]["base_url"] = test_base_url - - # override name - test_name = test_dict.pop("name", None) \ - or testcase_def_dict["config"].pop("name", None) \ - or "testcase name undefined" - - # override testcase config name, output, etc. - testcase_def_dict["config"].update(test_dict) - testcase_def_dict["config"]["name"] = test_name - - test_dict.clear() - test_dict.update(testcase_def_dict) - - -def __prepare_config(config, project_mapping, session_variables_set=None): - """ parse testcase/testsuite config. - """ - # get config variables - raw_config_variables = config.pop("variables", {}) - - override_variables = utils.deepcopy_dict(project_mapping.get("variables", {})) - functions = project_mapping.get("functions", {}) - - if isinstance(raw_config_variables, str) and function_regex_compile.match( - raw_config_variables): - # config variables are generated by calling function - # e.g. - # "config": { - # "name": "basic test with httpbin", - # "variables": "${gen_variables()}" - # } - raw_config_variables_mapping = parse_lazy_data( - prepare_lazy_data(raw_config_variables, functions_mapping=functions) - ) - else: - raw_config_variables_mapping = utils.ensure_mapping_format(raw_config_variables) - - # override config variables with passed in variables - raw_config_variables_mapping.update(override_variables) - - if raw_config_variables_mapping: - config["variables"] = raw_config_variables_mapping - - check_variables_set = set(raw_config_variables_mapping.keys()) - check_variables_set |= (session_variables_set or set()) - prepared_config = prepare_lazy_data(config, functions, check_variables_set, cached=True) - return prepared_config - - -def __prepare_testcase_tests(tests, config, project_mapping, session_variables_set=None): - """ override tests with testcase config variables, base_url and verify. - test maybe nested testcase. - - variables priority: - testcase config > testcase test > testcase_def config > testcase_def test > api - - base_url priority: - testcase test > testcase config > testsuite test > testsuite config > api - - verify priority: - testcase teststep (api) > testcase config > testsuite config - - Args: - tests (list): - config (dict): - project_mapping (dict): - - """ - config_variables = config.get("variables", {}) - config_base_url = config.get("base_url", "") - config_verify = config.get("verify", True) - functions = project_mapping.get("functions", {}) - - prepared_testcase_tests = [] - session_variables_set = set(config_variables.keys()) | (session_variables_set or set()) - for test_dict in tests: - - teststep_variables_set = {"request", "response"} - - # 1, testcase config => testcase tests - # override test_dict variables - test_dict_variables = utils.extend_variables( - test_dict.pop("variables", {}), - config_variables - ) - test_dict["variables"] = test_dict_variables - - # base_url & verify: priority test_dict > config - if (not test_dict.get("base_url")) and config_base_url: - test_dict["base_url"] = config_base_url - - # unify validators' format - if "validate" in test_dict: - ref_raw_validators = test_dict.pop("validate", []) - test_dict["validate"] = [ - uniform_validator(_validator) - for _validator in ref_raw_validators - ] - - if "testcase_def" in test_dict: - # test_dict is nested testcase - - # pass former teststep's (as a testcase) export value to next teststep - # Since V2.2.2, `extract` is used to replace `output`, - # `output` is also kept for compatibility - if "extract" in test_dict: - session_variables_set |= set(test_dict["extract"]) - elif "output" in test_dict: - # kept for compatibility - session_variables_set |= set(test_dict["output"]) - - # 2, testcase test_dict => testcase_def config - testcase_def = test_dict.pop("testcase_def") - _extend_with_testcase(test_dict, testcase_def) - - # verify priority: nested testcase config > testcase config - test_dict["config"].setdefault("verify", config_verify) - - # 3, testcase_def config => testcase_def test_dict - test_dict = _parse_testcase(test_dict, project_mapping, session_variables_set) - if not test_dict: + try: + parsed_value = parse_data( + var_value, parsed_variables, functions_mapping + ) + except exceptions.VariableNotFound: continue - elif "api_def" in test_dict: - # test_dict has API reference - # 2, test_dict => api - api_def_dict = test_dict.pop("api_def") - _extend_with_api(test_dict, api_def_dict) + parsed_variables[var_name] = parsed_value - # verify priority: testcase teststep > testcase config - if "request" in test_dict: - if "verify" not in test_dict["request"]: - test_dict["request"]["verify"] = config_verify - - if "upload" in test_dict["request"]: - from httprunner.ext.uploader import prepare_upload_test - prepare_upload_test(test_dict) - - # current teststep variables - teststep_variables_set |= set(test_dict.get("variables", {}).keys()) - - # move extracted variable to session variables - if "extract" in test_dict: - extract_mapping = utils.ensure_mapping_format(test_dict["extract"]) - session_variables_set |= set(extract_mapping.keys()) - - teststep_variables_set |= session_variables_set - - # convert validators to lazy function - validators = test_dict.pop("validate", []) - prepared_validators = [] - for _validator in validators: - function_meta = { - "func_name": _validator["comparator"], - "args": [ - _validator["check"], - _validator["expect"] - ], - "kwargs": {} - } - prepared_validators.append( - LazyFunction( - function_meta, - functions, - teststep_variables_set - ) - ) - test_dict["validate"] = prepared_validators - - # convert variables and functions to lazy object. - # raises VariableNotFound if undefined variable exists in test_dict - prepared_test_dict = prepare_lazy_data( - test_dict, - functions, - teststep_variables_set - ) - prepared_testcase_tests.append(prepared_test_dict) - - return prepared_testcase_tests - - -def _parse_testcase(testcase, project_mapping, session_variables_set=None): - """ parse testcase - - Args: - testcase (dict): - { - "config": {}, - "teststeps": [] - } - - """ - testcase.setdefault("config", {}) - - try: - prepared_config = __prepare_config( - testcase["config"], - project_mapping, - session_variables_set - ) - prepared_testcase_tests = __prepare_testcase_tests( - testcase["teststeps"], - prepared_config, - project_mapping, - session_variables_set - ) - return { - "config": prepared_config, - "teststeps": prepared_testcase_tests - } - except (exceptions.MyBaseFailure, exceptions.MyBaseError) as ex: - testcase_type = testcase["type"] - testcase_path = testcase.get("path") - - logger.error(f"failed to parse testcase: {testcase_path}, error: {ex}") - - global parse_failed_testfiles - if testcase_type not in parse_failed_testfiles: - parse_failed_testfiles[testcase_type] = [] - - parse_failed_testfiles[testcase_type].append(testcase_path) - - return None - - -def __get_parsed_testsuite_testcases(testcases, testsuite_config, project_mapping): - """ override testscases with testsuite config variables, base_url and verify. - - variables priority: - parameters > testsuite config > testcase config > testcase_def config > testcase_def tests > api - - base_url priority: - testcase_def tests > testcase_def config > testcase config > testsuite config - - Args: - testcases (dict): - { - "testcase1 name": { - "testcase": "testcases/create_user.yml", - "weight": 2, - "variables": { - "uid": 1000 - }, - "parameters": { - "uid": [100, 101, 102] - }, - "testcase_def": { - "config": {}, - "teststeps": [] - } - }, - "testcase2 name": {} - } - testsuite_config (dict): - { - "name": "testsuite name", - "variables": { - "device_sn": "${gen_random_string(15)}" - }, - "base_url": "http://127.0.0.1:5000" - } - project_mapping (dict): - { - "env": {}, - "functions": {} - } - - """ - testsuite_base_url = testsuite_config.get("base_url") - testsuite_config_variables = testsuite_config.get("variables", {}) - functions = project_mapping.get("functions", {}) - parsed_testcase_list = [] - - for testcase_name, testcase in testcases.items(): - - parsed_testcase = testcase.pop("testcase_def") - parsed_testcase.setdefault("config", {}) - parsed_testcase["path"] = testcase["testcase"] - parsed_testcase["type"] = "testcase" - parsed_testcase["config"]["name"] = testcase_name - - if "weight" in testcase: - parsed_testcase["config"]["weight"] = testcase["weight"] - - # base_url priority: testcase config > testsuite config - parsed_testcase["config"].setdefault("base_url", testsuite_base_url) - - # 1, testsuite config => testcase config - # override test_dict variables - testcase_config_variables = utils.extend_variables( - testcase.pop("variables", {}), - testsuite_config_variables - ) - - # 2, testcase config > testcase_def config - # override testcase_def config variables - overrided_testcase_config_variables = utils.extend_variables( - parsed_testcase["config"].pop("variables", {}), - testcase_config_variables - ) - - if overrided_testcase_config_variables: - parsed_testcase["config"]["variables"] = overrided_testcase_config_variables - - # parse config variables - parsed_config_variables = parse_variables_mapping(overrided_testcase_config_variables) - - # parse parameters - if "parameters" in testcase and testcase["parameters"]: - cartesian_product_parameters = parse_parameters( - testcase["parameters"], - parsed_config_variables, - functions - ) - - for parameter_variables in cartesian_product_parameters: - # deepcopy to avoid influence between parameters - testcase_copied = utils.deepcopy_dict(parsed_testcase) - parsed_config_variables_copied = utils.deepcopy_dict(parsed_config_variables) - testcase_copied["config"]["variables"] = utils.extend_variables( - parsed_config_variables_copied, - parameter_variables - ) - parsed_testcase_copied = _parse_testcase(testcase_copied, project_mapping) - if not parsed_testcase_copied: - continue - parsed_testcase_copied["config"]["name"] = parse_lazy_data( - parsed_testcase_copied["config"]["name"], - testcase_copied["config"]["variables"] - ) - parsed_testcase_list.append(parsed_testcase_copied) - - else: - parsed_testcase = _parse_testcase(parsed_testcase, project_mapping) - if not parsed_testcase: - continue - parsed_testcase_list.append(parsed_testcase) - - return parsed_testcase_list - - -def _parse_testsuite(testsuite, project_mapping): - testsuite.setdefault("config", {}) - prepared_config = __prepare_config(testsuite["config"], project_mapping) - parsed_testcase_list = __get_parsed_testsuite_testcases( - testsuite["testcases"], - prepared_config, - project_mapping - ) - return parsed_testcase_list - - -def parse_tests(tests_mapping): - """ parse tests and load to parsed testcases - tests include api, testcases and testsuites. - - Args: - tests_mapping (dict): project info and testcases list. - - { - "project_mapping": { - "PWD": "XXXXX", - "functions": {}, - "variables": {}, # optional, priority 1 - "env": {} - }, - "testsuites": [ - { # testsuite data structure - "config": {}, - "testcases": { - "testcase1 name": { - "variables": { - "uid": 1000 - }, - "parameters": { - "uid": [100, 101, 102] - }, - "testcase_def": { - "config": {}, - "teststeps": [] - } - }, - "testcase2 name": {} - } - } - ], - "testcases": [ - { # testcase data structure - "config": { - "name": "desc1", - "path": "testcase1_path", - "variables": {}, # optional, priority 2 - }, - "teststeps": [ - # test data structure - { - 'name': 'test step desc1', - 'variables': [], # optional, priority 3 - 'extract': [], - 'validate': [], - 'api_def': { - "variables": {} # optional, priority 4 - 'request': {}, - } - }, - test_dict_2 # another test dict - ] - }, - testcase_dict_2 # another testcase dict - ], - "api": { - "variables": {}, - "request": {} - } - } - - """ - project_mapping = tests_mapping.get("project_mapping", {}) - testcases = [] - - for test_type in tests_mapping: - - if test_type == "testsuites": - # load testcases of testsuite - testsuites = tests_mapping["testsuites"] - for testsuite in testsuites: - parsed_testcases = _parse_testsuite(testsuite, project_mapping) - for parsed_testcase in parsed_testcases: - testcases.append(parsed_testcase) - - elif test_type == "testcases": - for testcase in tests_mapping["testcases"]: - testcase["type"] = "testcase" - parsed_testcase = _parse_testcase(testcase, project_mapping) - if not parsed_testcase: - continue - testcases.append(parsed_testcase) - - elif test_type == "apis": - # encapsulate api as a testcase - for api_content in tests_mapping["apis"]: - testcase = { - "config": { - "name": api_content.get("name") - }, - "teststeps": [api_content], - "path": api_content.pop("path", None), - "type": api_content.pop("type", "api") - } - parsed_testcase = _parse_testcase(testcase, project_mapping) - if not parsed_testcase: - continue - testcases.append(parsed_testcase) - - return testcases + return parsed_variables diff --git a/httprunner/parser_test.py b/httprunner/parser_test.py index 67356d6f..bfd95c92 100644 --- a/httprunner/parser_test.py +++ b/httprunner/parser_test.py @@ -1,13 +1,22 @@ -import os import time import unittest -from httprunner import exceptions, loader, parser -from httprunner.loader import load -from tests.debugtalk import gen_random_string, sum_two +from httprunner import parser +from httprunner.exceptions import VariableNotFound, FunctionNotFound class TestParserBasic(unittest.TestCase): + def test_parse_variables_mapping(self): + variables = {"varA": "$varB", "varB": "$varC", "varC": "123", "a": 1, "b": 2} + parsed_variables = parser.parse_variables_mapping(variables) + print(parsed_variables) + self.assertEqual(parsed_variables["varA"], "123") + self.assertEqual(parsed_variables["varB"], "123") + + def test_parse_variables_mapping_exception(self): + variables = {"varA": "$varB", "varB": "$varC", "a": 1, "b": 2} + with self.assertRaises(VariableNotFound): + parser.parse_variables_mapping(variables) def test_parse_string_value(self): self.assertEqual(parser.parse_string_value("123"), 123) @@ -16,200 +25,377 @@ class TestParserBasic(unittest.TestCase): self.assertEqual(parser.parse_string_value("$var"), "$var") self.assertEqual(parser.parse_string_value("${func}"), "${func}") - def test_regex_findall_variables(self): + def test_extract_variables(self): + self.assertEqual(parser.extract_variables("$var"), {"var"}) + self.assertEqual(parser.extract_variables("$var123"), {"var123"}) + self.assertEqual(parser.extract_variables("$var_name"), {"var_name"}) + self.assertEqual(parser.extract_variables("var"), set()) + self.assertEqual(parser.extract_variables("a$var"), {"var"}) + self.assertEqual(parser.extract_variables("$v ar"), {"v"}) + self.assertEqual(parser.extract_variables(" "), set()) + self.assertEqual(parser.extract_variables("$abc*"), {"abc"}) + self.assertEqual(parser.extract_variables("${func()}"), set()) + self.assertEqual(parser.extract_variables("${func(1,2)}"), set()) self.assertEqual( - parser.regex_findall_variables("$var"), - ["var"] - ) - self.assertEqual( - parser.regex_findall_variables("$var123"), - ["var123"] - ) - self.assertEqual( - parser.regex_findall_variables("$var_name"), - ["var_name"] - ) - self.assertEqual( - parser.regex_findall_variables("var"), - [] - ) - self.assertEqual( - parser.regex_findall_variables("a$var"), - ["var"] - ) - self.assertEqual( - parser.regex_findall_variables("a$var${var2}$var3${var4}"), - ["var", "var2", "var3", "var4"] - ) - self.assertEqual( - parser.regex_findall_variables("$v ar"), - ["v"] - ) - self.assertEqual( - parser.regex_findall_variables(" "), - [] - ) - self.assertEqual( - parser.regex_findall_variables("$abc*"), - ["abc"] - ) - self.assertEqual( - parser.regex_findall_variables("${func()}"), - [] - ) - self.assertEqual( - parser.regex_findall_variables("${func(1,2)}"), - [] - ) - self.assertEqual( - parser.regex_findall_variables("${gen_md5($TOKEN, $data, $random)}"), - ["TOKEN", "data", "random"] + parser.extract_variables("${gen_md5($TOKEN, $data, $random)}"), + {"TOKEN", "data", "random"}, ) def test_parse_function_params(self): + self.assertEqual(parser.parse_function_params(""), {"args": [], "kwargs": {}}) + self.assertEqual(parser.parse_function_params("5"), {"args": [5], "kwargs": {}}) self.assertEqual( - parser.parse_function_params(""), - {'args': [], 'kwargs': {}} - ) - self.assertEqual( - parser.parse_function_params("5"), - {'args': [5], 'kwargs': {}} - ) - self.assertEqual( - parser.parse_function_params("1, 2"), - {'args': [1, 2], 'kwargs': {}} + parser.parse_function_params("1, 2"), {"args": [1, 2], "kwargs": {}} ) self.assertEqual( parser.parse_function_params("a=1, b=2"), - {'args': [], 'kwargs': {'a': 1, 'b': 2}} + {"args": [], "kwargs": {"a": 1, "b": 2}}, ) self.assertEqual( parser.parse_function_params("a= 1, b =2"), - {'args': [], 'kwargs': {'a': 1, 'b': 2}} + {"args": [], "kwargs": {"a": 1, "b": 2}}, ) self.assertEqual( parser.parse_function_params("1, 2, a=3, b=4"), - {'args': [1, 2], 'kwargs': {'a': 3, 'b': 4}} + {"args": [1, 2], "kwargs": {"a": 3, "b": 4}}, ) self.assertEqual( parser.parse_function_params("$request, 123"), - {'args': ["$request", 123], 'kwargs': {}} - ) - self.assertEqual( - parser.parse_function_params(" "), - {'args': [], 'kwargs': {}} + {"args": ["$request", 123], "kwargs": {}}, ) + self.assertEqual(parser.parse_function_params(" "), {"args": [], "kwargs": {}}) self.assertEqual( parser.parse_function_params("hello world, a=3, b=4"), - {'args': ["hello world"], 'kwargs': {'a': 3, 'b': 4}} + {"args": ["hello world"], "kwargs": {"a": 3, "b": 4}}, ) self.assertEqual( parser.parse_function_params("$request, 12 3"), - {'args': ["$request", '12 3'], 'kwargs': {}} - ) - - def test_extract_variables(self): - prepared_content = parser.prepare_lazy_data("123$a", {}, {"a"}) - self.assertEqual( - parser.extract_variables(prepared_content), - {"a"} - ) - prepared_content = parser.prepare_lazy_data("$a$b", {}, {"a", "b"}) - self.assertEqual( - parser.extract_variables(prepared_content), - {"a", "b"} - ) - prepared_content = parser.prepare_lazy_data(["$a$b", "$c", "d"], {}, {"a", "b", "c", "d"}) - self.assertEqual( - parser.extract_variables(prepared_content), - {"a", "b", "c"} - ) - prepared_content = parser.prepare_lazy_data( - {"a": 1, "b": {"c": "$d", "e": 3}}, - {}, - {"d"} - ) - self.assertEqual( - parser.extract_variables(prepared_content), - {"d"} - ) - prepared_content = parser.prepare_lazy_data( - {"a": ["$b"], "b": {"c": "$d", "e": 3}}, - {}, - {"b", "d"} - ) - self.assertEqual( - parser.extract_variables(prepared_content), - {"b", "d"} - ) - prepared_content = parser.prepare_lazy_data( - ["$a$b", "$c", {"c": "$d"}], - {}, - {"a", "b", "c", "d"} - ) - self.assertEqual( - parser.extract_variables(prepared_content), - {"a", "b", "c", "d"} + {"args": ["$request", "12 3"], "kwargs": {}}, ) def test_extract_functions(self): + self.assertEqual(parser.regex_findall_functions("${func()}"), [("func", "")]) + self.assertEqual(parser.regex_findall_functions("${func(5)}"), [("func", "5")]) self.assertEqual( - parser.regex_findall_functions("${func()}"), - [('func', '')] - ) - self.assertEqual( - parser.regex_findall_functions("${func(5)}"), - [('func', '5')] - ) - self.assertEqual( - parser.regex_findall_functions("${func(a=1, b=2)}"), - [('func', 'a=1, b=2')] + parser.regex_findall_functions("${func(a=1, b=2)}"), [("func", "a=1, b=2")] ) self.assertEqual( parser.regex_findall_functions("${func(1, $b, c=$x, d=4)}"), - [('func', '1, $b, c=$x, d=4')] + [("func", "1, $b, c=$x, d=4")], ) self.assertEqual( parser.regex_findall_functions("/api/1000?_t=${get_timestamp()}"), - [('get_timestamp', '')] + [("get_timestamp", "")], ) self.assertEqual( - parser.regex_findall_functions("/api/${add(1, 2)}"), - [('add', '1, 2')] + parser.regex_findall_functions("/api/${add(1, 2)}"), [("add", "1, 2")] ) self.assertEqual( parser.regex_findall_functions("/api/${add(1, 2)}?_t=${get_timestamp()}"), - [('add', '1, 2'), ('get_timestamp', '')] + [("add", "1, 2"), ("get_timestamp", "")], ) self.assertEqual( parser.regex_findall_functions("abc${func(1, 2, a=3, b=4)}def"), - [('func', '1, 2, a=3, b=4')] + [("func", "1, 2, a=3, b=4")], ) - def test_parse_data(self): + def test_parse_data_string_with_variables(self): + variables_mapping = { + "var_1": "abc", + "var_2": "def", + "var_3": 123, + "var_4": {"a": 1}, + "var_5": True, + "var_6": None, + } + self.assertEqual(parser.parse_data("$var_1", variables_mapping), "abc") + self.assertEqual(parser.parse_data("${var_1}", variables_mapping), "abc") + self.assertEqual(parser.parse_data("var_1", variables_mapping), "var_1") + self.assertEqual(parser.parse_data("$var_1#XYZ", variables_mapping), "abc#XYZ") + self.assertEqual( + parser.parse_data("${var_1}#XYZ", variables_mapping), "abc#XYZ" + ) + self.assertEqual( + parser.parse_data("/$var_1/$var_2/var3", variables_mapping), "/abc/def/var3" + ) + self.assertEqual(parser.parse_data("$var_3", variables_mapping), 123) + self.assertEqual(parser.parse_data("$var_4", variables_mapping), {"a": 1}) + self.assertEqual(parser.parse_data("$var_5", variables_mapping), True) + self.assertEqual(parser.parse_data("abc$var_5", variables_mapping), "abcTrue") + self.assertEqual( + parser.parse_data("abc$var_4", variables_mapping), "abc{'a': 1}" + ) + self.assertEqual(parser.parse_data("$var_6", variables_mapping), None) + + with self.assertRaises(VariableNotFound): + parser.parse_data("/api/$SECRET_KEY", variables_mapping) + + self.assertEqual( + parser.parse_data(["$var_1", "$var_2"], variables_mapping), ["abc", "def"] + ) + self.assertEqual( + parser.parse_data({"$var_1": "$var_2"}, variables_mapping), {"abc": "def"} + ) + + # format: $var + value = parser.parse_data("ABC$var_1", variables_mapping) + self.assertEqual(value, "ABCabc") + + value = parser.parse_data("ABC$var_1$var_3", variables_mapping) + self.assertEqual(value, "ABCabc123") + + value = parser.parse_data("ABC$var_1/$var_3", variables_mapping) + self.assertEqual(value, "ABCabc/123") + + value = parser.parse_data("ABC$var_1/", variables_mapping) + self.assertEqual(value, "ABCabc/") + + value = parser.parse_data("ABC$var_1$", variables_mapping) + self.assertEqual(value, "ABCabc$") + + value = parser.parse_data("ABC$var_1/123$var_1/456", variables_mapping) + self.assertEqual(value, "ABCabc/123abc/456") + + value = parser.parse_data("ABC$var_1/$var_2/$var_1", variables_mapping) + self.assertEqual(value, "ABCabc/def/abc") + + value = parser.parse_data("func1($var_1, $var_3)", variables_mapping) + self.assertEqual(value, "func1(abc, 123)") + + # format: ${var} + value = parser.parse_data("ABC${var_1}", variables_mapping) + self.assertEqual(value, "ABCabc") + + value = parser.parse_data("ABC${var_1}${var_3}", variables_mapping) + self.assertEqual(value, "ABCabc123") + + value = parser.parse_data("ABC${var_1}/${var_3}", variables_mapping) + self.assertEqual(value, "ABCabc/123") + + value = parser.parse_data("ABC${var_1}/", variables_mapping) + self.assertEqual(value, "ABCabc/") + + value = parser.parse_data("ABC${var_1}123", variables_mapping) + self.assertEqual(value, "ABCabc123") + + value = parser.parse_data("ABC${var_1}/123${var_1}/456", variables_mapping) + self.assertEqual(value, "ABCabc/123abc/456") + + value = parser.parse_data("ABC${var_1}/${var_2}/${var_1}", variables_mapping) + self.assertEqual(value, "ABCabc/def/abc") + + value = parser.parse_data("func1(${var_1}, ${var_3})", variables_mapping) + self.assertEqual(value, "func1(abc, 123)") + + def test_parse_data_multiple_identical_variables(self): + variables_mapping = { + "var_1": "abc", + "var_2": "def", + } + self.assertEqual( + parser.parse_data("/$var_1/$var_2/$var_1", variables_mapping), + "/abc/def/abc", + ) + + variables_mapping = {"userid": 100, "data": 1498} + content = "/users/$userid/training/$data?userId=$userid&data=$data" + self.assertEqual( + parser.parse_data(content, variables_mapping), + "/users/100/training/1498?userId=100&data=1498", + ) + + variables_mapping = {"user": 100, "userid": 1000, "data": 1498} + content = "/users/$user/$userid/$data?userId=$userid&data=$data" + self.assertEqual( + parser.parse_data(content, variables_mapping), + "/users/100/1000/1498?userId=1000&data=1498", + ) + + def test_parse_data_string_with_functions(self): + import random, string + + functions_mapping = { + "gen_random_string": lambda str_len: "".join( + random.choice(string.ascii_letters + string.digits) + for _ in range(str_len) + ) + } + result = parser.parse_data( + "${gen_random_string(5)}", functions_mapping=functions_mapping + ) + self.assertEqual(len(result), 5) + + add_two_nums = lambda a, b=1: a + b + functions_mapping["add_two_nums"] = add_two_nums + self.assertEqual( + parser.parse_data( + "${add_two_nums(1)}", functions_mapping=functions_mapping + ), + 2, + ) + self.assertEqual( + parser.parse_data( + "${add_two_nums(1, 2)}", functions_mapping=functions_mapping + ), + 3, + ) + self.assertEqual( + parser.parse_data( + "/api/${add_two_nums(1, 2)}", functions_mapping=functions_mapping + ), + "/api/3", + ) + + with self.assertRaises(FunctionNotFound): + parser.parse_data("/api/${gen_md5(abc)}") + + variables_mapping = { + "var_1": "abc", + "var_2": "def", + "var_3": 123, + "var_4": {"a": 1}, + "var_5": True, + "var_6": None, + } + functions_mapping = {"func1": lambda x, y: str(x) + str(y)} + + value = parser.parse_data( + "${func1($var_1, $var_3)}", variables_mapping, functions_mapping + ) + self.assertEqual(value, "abc123") + + value = parser.parse_data( + "ABC${func1($var_1, $var_3)}DE", variables_mapping, functions_mapping + ) + self.assertEqual(value, "ABCabc123DE") + + value = parser.parse_data( + "ABC${func1($var_1, $var_3)}$var_5", variables_mapping, functions_mapping + ) + self.assertEqual(value, "ABCabc123True") + + value = parser.parse_data( + "ABC${func1($var_1, $var_3)}DE$var_4", variables_mapping, functions_mapping + ) + self.assertEqual(value, "ABCabc123DE{'a': 1}") + + value = parser.parse_data( + "ABC$var_5${func1($var_1, $var_3)}", variables_mapping, functions_mapping + ) + self.assertEqual(value, "ABCTrueabc123") + + value = parser.parse_data( + "ABC${ord(a)}DEF${len(abcd)}", variables_mapping, functions_mapping + ) + self.assertEqual(value, "ABC97DEF4") + + def test_parse_data_func_var_duplicate(self): + variables_mapping = { + "var_1": "abc", + "var_2": "def", + "var_3": 123, + "var_4": {"a": 1}, + "var_5": True, + "var_6": None, + } + functions_mapping = {"func1": lambda x, y: str(x) + str(y)} + value = parser.parse_data( + "ABC${func1($var_1, $var_3)}--${func1($var_1, $var_3)}", + variables_mapping, + functions_mapping, + ) + self.assertEqual(value, "ABCabc123--abc123") + + value = parser.parse_data( + "ABC${func1($var_1, $var_3)}$var_1", variables_mapping, functions_mapping + ) + self.assertEqual(value, "ABCabc123abc") + + value = parser.parse_data( + "ABC${func1($var_1, $var_3)}$var_1--${func1($var_1, $var_3)}$var_1", + variables_mapping, + functions_mapping, + ) + self.assertEqual(value, "ABCabc123abc--abc123abc") + + def test_parse_data_func_abnormal(self): + variables_mapping = { + "var_1": "abc", + "var_2": "def", + "var_3": 123, + "var_4": {"a": 1}, + "var_5": True, + "var_6": None, + } + functions_mapping = {"func1": lambda x, y: str(x) + str(y)} + + # { + value = parser.parse_data("ABC$var_1{", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc{") + + value = parser.parse_data( + "{ABC$var_1{}a}", variables_mapping, functions_mapping + ) + self.assertEqual(value, "{ABCabc{}a}") + + value = parser.parse_data( + "AB{C$var_1{}a}", variables_mapping, functions_mapping + ) + self.assertEqual(value, "AB{Cabc{}a}") + + # } + value = parser.parse_data("ABC$var_1}", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc}") + + # $$ + value = parser.parse_data("ABC$$var_1{", variables_mapping, functions_mapping) + self.assertEqual(value, "ABC$var_1{") + + # $$$ + value = parser.parse_data("ABC$$$var_1{", variables_mapping, functions_mapping) + self.assertEqual(value, "ABC$abc{") + + # $$$$ + value = parser.parse_data("ABC$$$$var_1{", variables_mapping, functions_mapping) + self.assertEqual(value, "ABC$$var_1{") + + # ${ + value = parser.parse_data("ABC$var_1${", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc${") + + value = parser.parse_data("ABC$var_1${a", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc${a") + + # $} + value = parser.parse_data("ABC$var_1$}a", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc$}a") + + # }{ + value = parser.parse_data("ABC$var_1}{a", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc}{a") + + # {} + value = parser.parse_data("ABC$var_1{}a", variables_mapping, functions_mapping) + self.assertEqual(value, "ABCabc{}a") + + def test_parse_data_request(self): content = { - 'request': { - 'url': '/api/users/$uid', - 'method': "$method", - 'headers': {'token': '$token'}, - 'data': { + "request": { + "url": "/api/users/$uid", + "method": "$method", + "headers": {"token": "$token"}, + "data": { "null": None, "true": True, "false": False, "empty_str": "", - "value": "abc${add_one(3)}def" - } + "value": "abc${add_one(3)}def", + }, } } - variables_mapping = { - "uid": 1000, - "method": "POST", - "token": "abc123" - } - functions_mapping = { - "add_one": lambda x: x + 1 - } - result = parser.eval_lazy_data(content, variables_mapping, functions_mapping) + variables_mapping = {"uid": 1000, "method": "POST", "token": "abc123"} + functions_mapping = {"add_one": lambda x: x + 1} + result = parser.parse_data(content, variables_mapping, functions_mapping) self.assertEqual("/api/users/1000", result["request"]["url"]) self.assertEqual("abc123", result["request"]["headers"]["token"]) self.assertEqual("POST", result["request"]["method"]) @@ -219,382 +405,16 @@ class TestParserBasic(unittest.TestCase): self.assertEqual("", result["request"]["data"]["empty_str"]) self.assertEqual("abc4def", result["request"]["data"]["value"]) - def test_eval_lazy_data(self): - variables_mapping = { - "var_1": "abc", - "var_2": "def", - "var_3": 123, - "var_4": {"a": 1}, - "var_5": True, - "var_6": None - } - self.assertEqual( - parser.eval_lazy_data("$var_1", variables_mapping=variables_mapping), - "abc" - ) - self.assertEqual( - parser.eval_lazy_data("var_1", variables_mapping=variables_mapping), - "var_1" - ) - self.assertEqual( - parser.eval_lazy_data("$var_1#XYZ", variables_mapping=variables_mapping), - "abc#XYZ" - ) - self.assertEqual( - parser.eval_lazy_data("/$var_1/$var_2/var3", variables_mapping=variables_mapping), - "/abc/def/var3" - ) - self.assertEqual( - parser.eval_lazy_data("/$var_1/$var_2/$var_1", variables_mapping=variables_mapping), - "/abc/def/abc" - ) - self.assertEqual( - parser.eval_lazy_data("$var_3", variables_mapping=variables_mapping), - 123 - ) - self.assertEqual( - parser.eval_lazy_data("$var_4", variables_mapping=variables_mapping), - {"a": 1} - ) - self.assertEqual( - parser.eval_lazy_data("$var_5", variables_mapping=variables_mapping), - True - ) - self.assertEqual( - parser.eval_lazy_data("abc$var_5", variables_mapping=variables_mapping), - "abcTrue" - ) - self.assertEqual( - parser.eval_lazy_data("abc$var_4", variables_mapping=variables_mapping), - "abc{'a': 1}" - ) - self.assertEqual( - parser.eval_lazy_data("$var_6", variables_mapping=variables_mapping), - None - ) - - with self.assertRaises(exceptions.VariableNotFound): - parser.eval_lazy_data("/api/$SECRET_KEY", variables_mapping=variables_mapping) - - self.assertEqual( - parser.eval_lazy_data(["$var_1", "$var_2"], variables_mapping=variables_mapping), - ["abc", "def"] - ) - self.assertEqual( - parser.eval_lazy_data({"$var_1": "$var_2"}, variables_mapping=variables_mapping), - {"abc": "def"} - ) - - def test_parse_func_var_abnormal(self): - variables_mapping = { - "var_1": "abc", - "var_2": "def", - "var_3": 123, - "var_4": {"a": 1}, - "var_5": True, - "var_6": None - } - check_variables_set = variables_mapping.keys() - functions_mapping = { - "func1": lambda x,y: str(x) + str(y) - } - - # { - var = parser.LazyString("ABC$var_1{", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{{") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc{") - - var = parser.LazyString("{ABC$var_1{}a}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "{{ABC{}{{}}a}}") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "{ABCabc{}a}") - - var = parser.LazyString("AB{C$var_1{}a}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "AB{{C{}{{}}a}}") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "AB{Cabc{}a}") - - # } - var = parser.LazyString("ABC$var_1}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}}}") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc}") - - # $$ - var = parser.LazyString("ABC$$var_1{", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC$var_1{{") - self.assertEqual(var._args, []) - self.assertEqual(var.to_value(variables_mapping), "ABC$var_1{") - - # $$$ - var = parser.LazyString("ABC$$$var_1{", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC${}{{") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABC$abc{") - - # $$$$ - var = parser.LazyString("ABC$$$$var_1{", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC$$var_1{{") - self.assertEqual(var._args, []) - self.assertEqual(var.to_value(variables_mapping), "ABC$$var_1{") - - # ${ - var = parser.LazyString("ABC$var_1${", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}${{") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc${") - - var = parser.LazyString("ABC$var_1${a", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}${{a") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc${a") - - # $} - var = parser.LazyString("ABC$var_1$}a", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}$}}a") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc$}a") - - # }{ - var = parser.LazyString("ABC$var_1}{a", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}}}{{a") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc}{a") - - # {} - var = parser.LazyString("ABC$var_1{}a", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{{}}a") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc{}a") - - def test_parse_func_var_duplicate(self): - variables_mapping = { - "var_1": "abc", - "var_2": "def", - "var_3": 123, - "var_4": {"a": 1}, - "var_5": True, - "var_6": None - } - check_variables_set = variables_mapping.keys() - functions_mapping = { - "func1": lambda x,y: str(x) + str(y) - } - var = parser.LazyString( - "ABC${func1($var_1, $var_3)}--${func1($var_1, $var_3)}", - functions_mapping, - check_variables_set - ) - self.assertEqual(var._string, "ABC{}--{}") - self.assertEqual(var.to_value(variables_mapping), "ABCabc123--abc123") - - var = parser.LazyString("ABC${func1($var_1, $var_3)}$var_1", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{}") - self.assertEqual(var.to_value(variables_mapping), "ABCabc123abc") - - var = parser.LazyString( - "ABC${func1($var_1, $var_3)}$var_1--${func1($var_1, $var_3)}$var_1", - functions_mapping, - check_variables_set - ) - self.assertEqual(var._string, "ABC{}{}--{}{}") - self.assertEqual(var.to_value(variables_mapping), "ABCabc123abc--abc123abc") - - def test_parse_function(self): - variables_mapping = { - "var_1": "abc", - "var_2": "def", - "var_3": 123, - "var_4": {"a": 1}, - "var_5": True, - "var_6": None - } - check_variables_set = variables_mapping.keys() - functions_mapping = { - "func1": lambda x,y: str(x) + str(y) - } - - var = parser.LazyString("${func1($var_1, $var_3)}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "{}") - self.assertIsInstance(var._args[0], parser.LazyFunction) - self.assertEqual(var.to_value(variables_mapping), "abc123") - - var = parser.LazyString("ABC${func1($var_1, $var_3)}DE", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}DE") - self.assertIsInstance(var._args[0], parser.LazyFunction) - self.assertEqual(var.to_value(variables_mapping), "ABCabc123DE") - - var = parser.LazyString("ABC${func1($var_1, $var_3)}$var_5", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{}") - self.assertEqual(var.to_value(variables_mapping), "ABCabc123True") - - var = parser.LazyString("ABC${func1($var_1, $var_3)}DE$var_4", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}DE{}") - self.assertEqual(var.to_value(variables_mapping), "ABCabc123DE{'a': 1}") - - var = parser.LazyString("ABC$var_5${func1($var_1, $var_3)}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{}") - self.assertEqual(var.to_value(variables_mapping), "ABCTrueabc123") - - # Python builtin functions - var = parser.LazyString("ABC${ord(a)}DEF${len(abcd)}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}DEF{}") - self.assertEqual(var.to_value(variables_mapping), "ABC97DEF4") - - def test_parse_variable(self): - """ variable format ${var} and $var - """ - variables_mapping = { - "var_1": "abc", - "var_2": "def", - "var_3": 123, - "var_4": {"a": 1}, - "var_5": True, - "var_6": None - } - check_variables_set = variables_mapping.keys() - functions_mapping = {} - - # format: $var - var = parser.LazyString("ABC$var_1", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc") - - var = parser.LazyString("ABC$var_1$var_3", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{}") - self.assertEqual(var._args, ["var_1", "var_3"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc123") - - var = parser.LazyString("ABC$var_1/$var_3", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/{}") - self.assertEqual(var._args, ["var_1", "var_3"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/123") - - var = parser.LazyString("ABC$var_1/", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/") - - var = parser.LazyString("ABC$var_1$", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}$") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc$") - - var = parser.LazyString("ABC$var_1/123$var_1/456", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/123{}/456") - self.assertEqual(var._args, ["var_1", "var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/123abc/456") - - var = parser.LazyString("ABC$var_1/$var_2/$var_1", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/{}/{}") - self.assertEqual(var._args, ["var_1", "var_2", "var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/def/abc") - - var = parser.LazyString("func1($var_1, $var_3)", functions_mapping, check_variables_set) - self.assertEqual(var._string, "func1({}, {})") - self.assertEqual(var._args, ["var_1", "var_3"]) - self.assertEqual(var.to_value(variables_mapping), "func1(abc, 123)") - - # format: ${var} - var = parser.LazyString("ABC${var_1}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc") - - var = parser.LazyString("ABC${var_1}${var_3}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}{}") - self.assertEqual(var._args, ["var_1", "var_3"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc123") - - var = parser.LazyString("ABC${var_1}/${var_3}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/{}") - self.assertEqual(var._args, ["var_1", "var_3"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/123") - - var = parser.LazyString("ABC${var_1}/", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/") - - var = parser.LazyString("ABC${var_1}123", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}123") - self.assertEqual(var._args, ["var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc123") - - var = parser.LazyString("ABC${var_1}/123${var_1}/456", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/123{}/456") - self.assertEqual(var._args, ["var_1", "var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/123abc/456") - - var = parser.LazyString("ABC${var_1}/${var_2}/${var_1}", functions_mapping, check_variables_set) - self.assertEqual(var._string, "ABC{}/{}/{}") - self.assertEqual(var._args, ["var_1", "var_2", "var_1"]) - self.assertEqual(var.to_value(variables_mapping), "ABCabc/def/abc") - - var = parser.LazyString("func1(${var_1}, ${var_3})", functions_mapping, check_variables_set) - self.assertEqual(var._string, "func1({}, {})") - self.assertEqual(var._args, ["var_1", "var_3"]) - self.assertEqual(var.to_value(variables_mapping), "func1(abc, 123)") - - def test_parse_data_multiple_identical_variables(self): - variables_mapping = { - "userid": 100, - "data": 1498 - } - content = "/users/$userid/training/$data?userId=$userid&data=$data" - self.assertEqual( - parser.eval_lazy_data(content, variables_mapping=variables_mapping), - "/users/100/training/1498?userId=100&data=1498" - ) - - variables_mapping = { - "user": 100, - "userid": 1000, - "data": 1498 - } - content = "/users/$user/$userid/$data?userId=$userid&data=$data" - self.assertEqual( - parser.eval_lazy_data(content, variables_mapping=variables_mapping), - "/users/100/1000/1498?userId=1000&data=1498" - ) - - def test_parse_data_functions(self): - functions_mapping = { - "gen_random_string": gen_random_string - } - result = parser.eval_lazy_data("${gen_random_string(5)}", functions_mapping=functions_mapping) - self.assertEqual(len(result), 5) - - add_two_nums = lambda a, b=1: a + b - functions_mapping["add_two_nums"] = add_two_nums - self.assertEqual( - parser.eval_lazy_data("${add_two_nums(1)}", functions_mapping=functions_mapping), - 2 - ) - self.assertEqual( - parser.eval_lazy_data("${add_two_nums(1, 2)}", functions_mapping=functions_mapping), - 3 - ) - self.assertEqual( - parser.eval_lazy_data("/api/${add_two_nums(1, 2)}", functions_mapping=functions_mapping), - "/api/3" - ) - - with self.assertRaises(exceptions.FunctionNotFound): - parser.eval_lazy_data("/api/${gen_md5(abc)}", functions_mapping=functions_mapping) - def test_parse_data_testcase(self): variables = { "uid": "1000", "random": "A2dEx", "authorization": "a83de0ff8d2e896dbd8efb81ba14e17d", - "data": {"name": "user", "password": "123456"} + "data": {"name": "user", "password": "123456"}, } functions = { "add_two_nums": lambda a, b=1: a + b, - "get_timestamp": lambda: int(time.time() * 1000) + "get_timestamp": lambda: int(time.time() * 1000), } testcase_template = { "url": "http://127.0.0.1:5000/api/users/$uid/${add_two_nums(1,2)}", @@ -603,804 +423,17 @@ class TestParserBasic(unittest.TestCase): "Content-Type": "application/json", "authorization": "$authorization", "random": "$random", - "sum": "${add_two_nums(1, 2)}" + "sum": "${add_two_nums(1, 2)}", }, - "body": "$data" + "body": "$data", } - parsed_testcase = parser.eval_lazy_data( - testcase_template, - variables_mapping=variables, - functions_mapping=functions + parsed_testcase = parser.parse_data(testcase_template, variables, functions) + self.assertEqual( + parsed_testcase["url"], "http://127.0.0.1:5000/api/users/1000/3" ) self.assertEqual( - parsed_testcase["url"], - "http://127.0.0.1:5000/api/users/1000/3" + parsed_testcase["headers"]["authorization"], variables["authorization"] ) - self.assertEqual( - parsed_testcase["headers"]["authorization"], - variables["authorization"] - ) - self.assertEqual( - parsed_testcase["headers"]["random"], - variables["random"] - ) - self.assertEqual( - parsed_testcase["body"], - variables["data"] - ) - self.assertEqual( - parsed_testcase["headers"]["sum"], - 3 - ) - - def test_parse_variables_mapping(self): - variables = { - "varA": "123$varB", - "varB": "456$varC", - "varC": "${sum_two($a, $b)}", - "a": 1, - "b": 2 - } - functions = { - "sum_two": sum_two - } - prepared_variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - parsed_variables = parser.parse_variables_mapping(prepared_variables) - self.assertEqual(parsed_variables["varA"], "1234563") - self.assertEqual(parsed_variables["varB"], "4563") - self.assertEqual(parsed_variables["varC"], 3) - - def test_parse_variables_mapping_fix_duplicate_function_call(self): - # fix duplicate function calling - variables = { - "varA": "$varB", - "varB": "${gen_random_string(5)}" - } - functions = { - "gen_random_string": gen_random_string - } - prepared_variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - parsed_variables = parser.parse_variables_mapping(prepared_variables) - self.assertEqual(parsed_variables["varA"], parsed_variables["varB"]) - - def test_parse_variables_mapping_dead_circle(self): - variables = { - "varA": "$varB", - "varB": "123$varC" - } - check_variables_set = {"varA", "varB", "varC"} - prepared_variables = parser.prepare_lazy_data(variables, {}, check_variables_set) - with self.assertRaises(exceptions.VariableNotFound): - parser.parse_variables_mapping(prepared_variables) - - def test_parse_variables_mapping_not_found(self): - variables = { - "varA": "123$varB", - "varB": "456$varC", - "varC": "${sum_two($a, $b)}", - "b": 2 - } - functions = { - "sum_two": sum_two - } - with self.assertRaises(exceptions.VariableNotFound): - parser.prepare_lazy_data(variables, functions, variables.keys()) - - def test_parse_variables_mapping_ref_self(self): - variables = { - "varC": "${sum_two($a, $b)}", - "a": 1, - "b": 2, - "token": "$token" - } - functions = { - "sum_two": sum_two - } - prepared_variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - with self.assertRaises(exceptions.VariableNotFound): - parser.parse_variables_mapping(prepared_variables) - - def test_parse_variables_mapping_2(self): - variables = { - "host2": "https://httprunner.org", - "num3": "${sum_two($num2, 4)}", - "num2": "${sum_two($num1, 3)}", - "num1": "${sum_two(1, 2)}" - } - functions = { - "sum_two": sum_two - } - prepared_variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - parsed_testcase = parser.parse_variables_mapping(prepared_variables) - self.assertEqual(parsed_testcase["num3"], 10) - self.assertEqual(parsed_testcase["num2"], 6) - self.assertEqual(parsed_testcase["num1"], 3) - - def test_is_var_or_func_exist(self): - self.assertTrue(parser.is_var_or_func_exist("$var")) - self.assertTrue(parser.is_var_or_func_exist("${var}")) - self.assertTrue(parser.is_var_or_func_exist("$var${var}")) - self.assertFalse(parser.is_var_or_func_exist("${var")) - self.assertFalse(parser.is_var_or_func_exist("$$var")) - self.assertFalse(parser.is_var_or_func_exist("var$$0")) - self.assertTrue(parser.is_var_or_func_exist("var$$$0")) - self.assertFalse(parser.is_var_or_func_exist("var$$$$0")) - self.assertTrue(parser.is_var_or_func_exist("${func()}")) - self.assertTrue(parser.is_var_or_func_exist("${func($a)}")) - self.assertTrue(parser.is_var_or_func_exist("${func($a)}$b")) - - def test_parse_variables_mapping_dollar_notation(self): - variables = { - "varA": "123$varB", - "varB": "456$$0", - "varC": "${sum_two($a, $b)}", - "a": 1, - "b": 2, - "c": "abc" - } - functions = { - "sum_two": sum_two - } - prepared_variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - parsed_testcase = parser.parse_variables_mapping(prepared_variables) - self.assertEqual(parsed_testcase["varA"], "123456$0") - self.assertEqual(parsed_testcase["varB"], "456$0") - self.assertEqual(parsed_testcase["varC"], 3) - - def test_prepare_lazy_data(self): - variables = { - "host": "https://httprunner.org", - "num4": "${sum_two($num0, 5)}", - "num3": "${sum_two($num2, 4)}", - "num2": "${sum_two($num1, 3)}", - "num1": "${sum_two(1, 2)}", - "num0": 0 - } - functions = { - "sum_two": sum_two - } - parser.prepare_lazy_data( - variables, - functions, - variables.keys() - ) - - def test_prepare_lazy_data_not_found(self): - variables = { - "host": "https://httprunner.org", - "num4": "${sum_two($num0, 5)}", - "num3": "${sum_two($num2, 4)}", - "num2": "${sum_two($num1, 3)}", - "num1": "${sum_two(1, 2)}" - } - functions = { - "sum_two": sum_two - } - with self.assertRaises(exceptions.VariableNotFound): - parser.prepare_lazy_data( - variables, - functions, - variables.keys() - ) - - def test_prepare_lazy_data_dual_dollar(self): - variables = { - "num0": 123, - "var1": "abc$$num0", - "var2": "abc$$$num0", - "var3": "abc$$$$num0", - } - functions = { - "sum_two": sum_two - } - prepared_variables = parser.prepare_lazy_data( - variables, - functions, - variables.keys() - ) - self.assertEqual(prepared_variables["var1"], "abc$num0") - self.assertIsInstance(prepared_variables["var2"], parser.LazyString) - self.assertEqual(prepared_variables["var3"], "abc$$num0") - - parsed_variables = parser.parse_variables_mapping(prepared_variables) - self.assertEqual(parsed_variables["var1"], "abc$num0") - self.assertEqual(parsed_variables["var2"], "abc$123") - self.assertEqual(parsed_variables["var3"], "abc$$num0") - - def test_get_uniform_comparator(self): - self.assertEqual(parser.get_uniform_comparator("eq"), "equals") - self.assertEqual(parser.get_uniform_comparator("=="), "equals") - self.assertEqual(parser.get_uniform_comparator("lt"), "less_than") - self.assertEqual(parser.get_uniform_comparator("le"), "less_than_or_equals") - self.assertEqual(parser.get_uniform_comparator("gt"), "greater_than") - self.assertEqual(parser.get_uniform_comparator("ge"), "greater_than_or_equals") - self.assertEqual(parser.get_uniform_comparator("ne"), "not_equals") - - self.assertEqual(parser.get_uniform_comparator("str_eq"), "string_equals") - self.assertEqual(parser.get_uniform_comparator("len_eq"), "length_equals") - self.assertEqual(parser.get_uniform_comparator("count_eq"), "length_equals") - - self.assertEqual(parser.get_uniform_comparator("len_gt"), "length_greater_than") - self.assertEqual(parser.get_uniform_comparator("count_gt"), "length_greater_than") - self.assertEqual(parser.get_uniform_comparator("count_greater_than"), "length_greater_than") - - self.assertEqual(parser.get_uniform_comparator("len_ge"), "length_greater_than_or_equals") - self.assertEqual(parser.get_uniform_comparator("count_ge"), "length_greater_than_or_equals") - self.assertEqual(parser.get_uniform_comparator("count_greater_than_or_equals"), "length_greater_than_or_equals") - - self.assertEqual(parser.get_uniform_comparator("len_lt"), "length_less_than") - self.assertEqual(parser.get_uniform_comparator("count_lt"), "length_less_than") - self.assertEqual(parser.get_uniform_comparator("count_less_than"), "length_less_than") - - self.assertEqual(parser.get_uniform_comparator("len_le"), "length_less_than_or_equals") - self.assertEqual(parser.get_uniform_comparator("count_le"), "length_less_than_or_equals") - self.assertEqual(parser.get_uniform_comparator("count_less_than_or_equals"), "length_less_than_or_equals") - - def test_parse_validator(self): - _validator = {"check": "status_code", "comparator": "eq", "expect": 201} - self.assertEqual( - parser.uniform_validator(_validator), - {"check": "status_code", "comparator": "equals", "expect": 201} - ) - - _validator = {'eq': ['status_code', 201]} - self.assertEqual( - parser.uniform_validator(_validator), - {"check": "status_code", "comparator": "equals", "expect": 201} - ) - - def test_extend_validators(self): - def_validators = [ - {'eq': ['v1', 200]}, - {"check": "s2", "expect": 16, "comparator": "len_eq"} - ] - current_validators = [ - {"check": "v1", "expect": 201}, - {'len_eq': ['s3', 12]} - ] - def_validators = [ - parser.uniform_validator(_validator) - for _validator in def_validators - ] - ref_validators = [ - parser.uniform_validator(_validator) - for _validator in current_validators - ] - - extended_validators = parser.extend_validators(def_validators, ref_validators) - self.assertIn( - {"check": "v1", "expect": 201, "comparator": "equals"}, - extended_validators - ) - self.assertIn( - {"check": "s2", "expect": 16, "comparator": "length_equals"}, - extended_validators - ) - self.assertIn( - {"check": "s3", "expect": 12, "comparator": "length_equals"}, - extended_validators - ) - - def test_extend_validators_with_dict(self): - def_validators = [ - {'eq': ["a", {"v": 1}]}, - {'eq': [{"b": 1}, 200]} - ] - current_validators = [ - {'len_eq': ['s3', 12]}, - {'eq': [{"b": 1}, 201]} - ] - def_validators = [ - parser.uniform_validator(_validator) - for _validator in def_validators - ] - ref_validators = [ - parser.uniform_validator(_validator) - for _validator in current_validators - ] - - extended_validators = parser.extend_validators(def_validators, ref_validators) - self.assertEqual(len(extended_validators), 3) - self.assertIn({'check': {'b': 1}, 'expect': 201, 'comparator': 'equals'}, extended_validators) - self.assertNotIn({'check': {'b': 1}, 'expect': 200, 'comparator': 'equals'}, extended_validators) - - -class TestParser(unittest.TestCase): - - def test_parse_parameters_raw_list(self): - parameters = [ - {"user_agent": ["iOS/10.1", "iOS/10.2", "iOS/10.3"]}, - {"username-password": [("user1", "111111"), ["test2", "222222"]]} - ] - cartesian_product_parameters = parser.parse_parameters(parameters) - self.assertEqual( - len(cartesian_product_parameters), - 3 * 2 - ) - self.assertEqual( - cartesian_product_parameters[0], - {'user_agent': 'iOS/10.1', 'username': 'user1', 'password': '111111'} - ) - - def test_parse_parameters_custom_function(self): - parameters = [ - {"user_agent": "${get_user_agent()}"}, - {"app_version": "${gen_app_version()}"}, - {"username-password": "${get_account()}"}, - {"username2-password2": "${get_account_in_tuple()}"} - ] - dot_env_path = os.path.join( - os.getcwd(), "tests", ".env" - ) - load.load_dot_env_file(dot_env_path) - from tests import debugtalk - cartesian_product_parameters = parser.parse_parameters( - parameters, - functions_mapping=load.load_module_functions(debugtalk) - ) - self.assertIn( - { - 'user_agent': 'iOS/10.1', - 'app_version': '2.8.5', - 'username': 'user1', - 'password': '111111', - 'username2': 'user1', - 'password2': '111111' - }, - cartesian_product_parameters - ) - self.assertEqual( - len(cartesian_product_parameters), - 2 * 2 * 2 * 2 - ) - - def test_parse_parameters_parameterize(self): - loader.load_project_data(os.path.join(os.getcwd(), "tests")) - parameters = [ - {"app_version": "${parameterize(data/app_version.csv)}"}, - {"username-password": "${parameterize(data/account.csv)}"} - ] - cartesian_product_parameters = parser.parse_parameters(parameters) - self.assertEqual( - len(cartesian_product_parameters), - 2 * 3 - ) - - def test_parse_parameters_mix(self): - project_mapping = loader.load_project_data(os.path.join(os.getcwd(), "tests")) - - parameters = [ - {"user_agent": ["iOS/10.1", "iOS/10.2", "iOS/10.3"]}, - {"app_version": "${gen_app_version()}"}, - {"username-password": "${parameterize(data/account.csv)}"} - ] - cartesian_product_parameters = parser.parse_parameters( - parameters, functions_mapping=project_mapping["functions"]) - self.assertEqual( - len(cartesian_product_parameters), - 3 * 2 * 3 - ) - - def test_parse_tests_testcase(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase.yml') - tests_mapping = loader.load_cases(testcase_file_path) - testcases = tests_mapping["testcases"] - self.assertEqual( - testcases[0]["config"]["variables"]["var_c"], - "${sum_two($var_a, $var_b)}" - ) - self.assertEqual( - testcases[0]["config"]["variables"]["PROJECT_KEY"], - "${ENV(PROJECT_KEY)}" - ) - parsed_testcases = parser.parse_tests(tests_mapping) - self.assertIsInstance(parsed_testcases, list) - test_dict1 = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict1["variables"]["var_c"].raw_string, "${sum_two($var_a, $var_b)}") - self.assertEqual(test_dict1["variables"]["PROJECT_KEY"].raw_string, "${ENV(PROJECT_KEY)}") - self.assertIsInstance(parsed_testcases[0]["config"]["name"], parser.LazyString) - - def test_parse_tests_override_variables(self): - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': '', - 'variables': [ - {"password": "123456"}, - {"creator": "user_test_001"} - ] - }, - "teststeps": [ - { - 'name': 'testcase1', - "variables": [ - {"creator": "user_test_002"}, - {"username": "$creator"} - ], - 'request': {'url': '/api1', 'method': 'GET'} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict1_variables = parsed_testcases[0]["teststeps"][0]["variables"] - self.assertEqual(test_dict1_variables["creator"], "user_test_001") - self.assertEqual(test_dict1_variables["username"].raw_string, "$creator") - - def test_parse_tests_base_url_priority(self): - """ base_url & verify: priority test_dict > config - """ - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': '', - "base_url": "$host", - 'variables': { - "host": "https://github.com" - }, - "verify": False - }, - "teststeps": [ - { - 'name': 'testcase1', - "base_url": "https://httprunner.org", - 'request': {'url': '/api1', 'method': 'GET', "verify": True} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict["request"]["url"], "/api1") - self.assertEqual(test_dict["request"]["verify"], True) - - def test_parse_tests_base_url_path_with_variable(self): - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': '', - "base_url": "$host1", - 'variables': { - "host1": "https://github.com" - } - }, - "teststeps": [ - { - 'name': 'testcase1', - "variables": { - "host2": "https://httprunner.org" - }, - 'request': {'url': '$host2/api1', 'method': 'GET'} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict["variables"]["host2"], "https://httprunner.org") - parsed_test_dict = parser.parse_lazy_data(test_dict, test_dict["variables"]) - self.assertEqual(parsed_test_dict["request"]["url"], "https://httprunner.org/api1") - - def test_parse_tests_base_url_test_dict(self): - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': '', - "base_url": "$host1", - 'variables': { - "host1": "https://github.com" - } - }, - "teststeps": [ - { - 'name': 'testcase1', - "base_url": "$host2", - "variables": { - "host2": "https://httprunner.org" - }, - 'request': {'url': '/api1', 'method': 'GET'} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - parsed_test_dict = parser.parse_lazy_data(test_dict, test_dict["variables"]) - self.assertEqual(parsed_test_dict["base_url"], "https://httprunner.org") - - def test_parse_tests_variable_with_function(self): - tests_mapping = { - "project_mapping": { - "functions": { - "sum_two": sum_two, - "gen_random_string": gen_random_string - } - }, - 'testcases': [ - { - "config": { - 'name': '', - "base_url": "$host1", - 'variables': { - "host1": "https://github.com", - "var_a": "${gen_random_string(5)}", - "var_b": "$var_a" - } - }, - "teststeps": [ - { - 'name': 'testcase1', - "base_url": "$host2", - "variables": { - "host2": "https://httprunner.org", - "num3": "${sum_two($num2, 4)}", - "num2": "${sum_two($num1, 3)}", - "num1": "${sum_two(1, 2)}", - "str1": "${gen_random_string(5)}", - "str2": "$str1" - }, - 'request': { - 'url': '/api1/?num1=$num1&num2=$num2&num3=$num3', - 'method': 'GET' - } - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - variables = parser.parse_variables_mapping(test_dict["variables"]) - self.assertEqual(variables["num3"], 10) - self.assertEqual(variables["num2"], 6) - parsed_test_dict = parser.parse_lazy_data(test_dict, variables) - self.assertEqual(parsed_test_dict["base_url"], "https://httprunner.org") - self.assertEqual( - parsed_test_dict["request"]["url"], - "/api1/?num1=3&num2=6&num3=10" - ) - self.assertEqual(variables["str1"], variables["str2"]) - - def test_parse_tests_variable_not_found(self): - tests_mapping = { - "project_mapping": { - "functions": { - "sum_two": sum_two - } - }, - 'testcases': [ - { - "config": { - 'name': '', - "base_url": "$host1", - 'variables': { - "host1": "https://github.com" - } - }, - "teststeps": [ - { - 'name': 'testcase1', - "base_url": "$host2", - "variables": { - "host2": "https://httprunner.org", - "num4": "${sum_two($num0, 5)}", - "num3": "${sum_two($num2, 4)}", - "num2": "${sum_two($num1, 3)}", - "num1": "${sum_two(1, 2)}" - }, - 'request': { - 'url': '/api1/?num1=$num1&num2=$num2&num3=$num3&num4=$num4', - 'method': 'GET' - } - } - ] - } - ] - } - parser.parse_tests(tests_mapping) - parse_failed_testfiles = parser.get_parse_failed_testfiles() - self.assertIn("testcase", parse_failed_testfiles) - - def test_parse_tests_base_url_teststep_empty(self): - """ base_url & verify: priority test_dict > config - """ - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': '', - "base_url": "$host", - 'variables': { - "host": "https://github.com" - }, - "verify": False - }, - "teststeps": [ - { - 'name': 'testcase1', - "base_url": "", - 'request': {'url': '/api1', 'method': 'GET', "verify": True} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(str(test_dict["base_url"]), 'LazyString($host)') - self.assertEqual(test_dict["request"]["verify"], True) - - def test_parse_tests_verify_config_set(self): - """ verify priority: test_dict > config - """ - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': 'bugfix verify', - "base_url": "https://httpbin.org/", - "verify": False - }, - "teststeps": [ - { - 'name': 'testcase1', - 'request': {'url': '/headers', 'method': 'GET'} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict["request"]["verify"], False) - - def test_parse_tests_verify_config_unset(self): - """ verify priority: test_dict > config - """ - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': 'bugfix verify', - "base_url": "https://httpbin.org/", - }, - "teststeps": [ - { - 'name': 'testcase1', - 'request': {'url': '/headers', 'method': 'GET'} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict["request"]["verify"], True) - - def test_parse_tests_verify_step_set_false(self): - """ verify priority: test_dict > config - """ - tests_mapping = { - 'testcases': [ - { - "config": { - 'name': 'bugfix verify', - "base_url": "https://httpbin.org/", - "verify": True - }, - "teststeps": [ - { - 'name': 'testcase1', - 'request': {'url': '/headers', 'method': 'GET', "verify": False} - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict["request"]["verify"], False) - - def test_parse_tests_verify_nested_testcase_unset(self): - tests_mapping = { - 'testcases': [ - { - 'config': { - 'name': 'inquiry price', - 'verify': False - }, - 'teststeps': [ - { - 'name': 'login system', - 'testcase': 'testcases/deps/login.yml', - 'testcase_def': { - 'config': { - 'name': 'login system' - }, - 'teststeps': [ - { - 'name': '/', - 'request': { - 'method': 'GET', - 'url': 'https://httpbin.org/' - } - } - ] - } - } - ] - } - ] - } - parsed_testcases = parser.parse_tests(tests_mapping) - test_dict = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict["teststeps"][0]["request"]["verify"], False) - - def test_parse_environ(self): - os.environ["PROJECT_KEY"] = "ABCDEFGH" - content = { - "variables": [ - {"PROJECT_KEY": "${ENV(PROJECT_KEY)}"} - ] - } - result = parser.eval_lazy_data(content) - - content = { - "variables": [ - {"PROJECT_KEY": "${ENV(PROJECT_KEY, abc)}"} - ] - } - with self.assertRaises(exceptions.ParamsError): - parser.eval_lazy_data(content) - - content = { - "variables": [ - {"PROJECT_KEY": "${ENV(abc=123)}"} - ] - } - with self.assertRaises(exceptions.ParamsError): - parser.eval_lazy_data(content) - - def test_extend_with_api(self): - loader.load_project_data(os.path.join(os.getcwd(), "tests")) - raw_testinfo = { - "name": "get token", - "base_url": "https://github.com", - "api": "api/get_token.yml", - } - api_def_dict = loader.buildup.load_teststep(raw_testinfo) - test_block = { - "name": "override block", - "times": 3, - "variables": [ - {"var": 123} - ], - "base_url": "https://httprunner.org", - 'request': { - 'url': '/api/get-token', - 'method': 'POST', - 'headers': {'user_agent': '$user_agent', 'device_sn': '$device_sn', 'os_platform': '$os_platform', 'app_version': '$app_version'}, - 'json': {'sign': '${get_sign($device_sn, $os_platform, $app_version)}'} - }, - 'validate': [ - {"check": "status_code", "comparator": "equals", "expect": 201}, - {"check": "content.token", "comparator": "length_equals", "expect": 32} - ] - } - - parser._extend_with_api(test_block, api_def_dict) - self.assertEqual(test_block["base_url"], "https://github.com") - self.assertEqual(test_block["name"], "override block") - self.assertEqual({'var': 123}, test_block["variables"]) - self.assertIn({'check': 'status_code', 'expect': 201, 'comparator': 'equals'}, test_block["validate"]) - self.assertIn({'check': 'content.token', 'comparator': 'length_equals', 'expect': 32}, test_block["validate"]) - self.assertEqual(test_block["times"], 3) + self.assertEqual(parsed_testcase["headers"]["random"], variables["random"]) + self.assertEqual(parsed_testcase["body"], variables["data"]) + self.assertEqual(parsed_testcase["headers"]["sum"], 3) diff --git a/httprunner/report/__init__.py b/httprunner/report/__init__.py deleted file mode 100644 index eefd839f..00000000 --- a/httprunner/report/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -HttpRunner report - -- summarize: aggregate test stat data to summary -- stringify: stringify summary, in order to dump json file and generate html report. -- html: render html report -""" - -from httprunner.report.summarize import get_platform, aggregate_stat, get_summary -from httprunner.report.stringify import stringify_summary -from httprunner.report.html import HtmlTestResult, gen_html_report - -__all__ = [ - "get_platform", - "aggregate_stat", - "get_summary", - "stringify_summary", - "HtmlTestResult", - "gen_html_report" -] diff --git a/httprunner/report/html/__init__.py b/httprunner/report/html/__init__.py deleted file mode 100644 index a1b4f12f..00000000 --- a/httprunner/report/html/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -HttpRunner html report - -- result: define resultclass for unittest TextTestRunner -- gen_report: render html report with jinja2 template - -""" - -from httprunner.report.html.result import HtmlTestResult -from httprunner.report.html.gen_report import gen_html_report - -__all__ = [ - "HtmlTestResult", - "gen_html_report" -] \ No newline at end of file diff --git a/httprunner/report/html/gen_report.py b/httprunner/report/html/gen_report.py deleted file mode 100644 index c7791183..00000000 --- a/httprunner/report/html/gen_report.py +++ /dev/null @@ -1,64 +0,0 @@ -import io -import os -from datetime import datetime - -from jinja2 import Template -from loguru import logger - -from httprunner.exceptions import SummaryEmpty - - -def gen_html_report(summary, report_template=None, report_dir=None, report_file=None): - """ render html report with specified report name and template - - Args: - summary (dict): test result summary data - report_template (str): specify html report template path, template should be in Jinja2 format. - report_dir (str): specify html report save directory - report_file (str): specify html report file path, this has higher priority than specifying report dir. - - """ - if not summary["time"] or summary["stat"]["testcases"]["total"] == 0: - logger.error(f"test result summary is empty ! {summary}") - raise SummaryEmpty - - if not report_template: - report_template = os.path.join( - os.path.abspath(os.path.dirname(__file__)), - "template.html" - ) - logger.debug("No html report template specified, use default.") - else: - logger.info(f"render with html report template: {report_template}") - - logger.info("Start to render Html report ...") - - start_at_timestamp = summary["time"]["start_at"] - utc_time_iso_8601_str = datetime.utcfromtimestamp(start_at_timestamp).isoformat() - summary["time"]["start_datetime"] = utc_time_iso_8601_str - - if report_file: - report_dir = os.path.dirname(report_file) - report_file_name = os.path.basename(report_file) - else: - report_dir = report_dir or os.path.join(os.getcwd(), "reports") - # fix #826: Windows does not support file name include ":" - report_file_name = "{}.html".format(utc_time_iso_8601_str.replace(":", "").replace("-", "")) - - if not os.path.isdir(report_dir): - os.makedirs(report_dir) - - report_path = os.path.join(report_dir, report_file_name) - with io.open(report_template, "r", encoding='utf-8') as fp_r: - template_content = fp_r.read() - with io.open(report_path, 'w', encoding='utf-8') as fp_w: - rendered_content = Template( - template_content, - extensions=["jinja2.ext.loopcontrols"] - ).render(summary) - fp_w.write(rendered_content) - - logger.info(f"Generated Html report: {report_path}") - - return report_path - diff --git a/httprunner/report/html/result.py b/httprunner/report/html/result.py deleted file mode 100644 index 762d0bb1..00000000 --- a/httprunner/report/html/result.py +++ /dev/null @@ -1,64 +0,0 @@ -import time -import unittest - -from loguru import logger - - -class HtmlTestResult(unittest.TextTestResult): - """ A html result class that can generate formatted html results. - Used by TextTestRunner. - """ - def __init__(self, stream, descriptions, verbosity): - super(HtmlTestResult, self).__init__(stream, descriptions, verbosity) - self.records = [] - - def _record_test(self, test, status, attachment=''): - data = { - 'name': test.shortDescription(), - 'status': status, - 'attachment': attachment, - "meta_datas": test.meta_datas - } - self.records.append(data) - - def startTestRun(self): - self.start_at = time.time() - - def startTest(self, test): - """ add start test time """ - super(HtmlTestResult, self).startTest(test) - logger.info(test.shortDescription()) - - def addSuccess(self, test): - super(HtmlTestResult, self).addSuccess(test) - self._record_test(test, 'success') - print("") - - def addError(self, test, err): - super(HtmlTestResult, self).addError(test, err) - self._record_test(test, 'error', self._exc_info_to_string(err, test)) - print("") - - def addFailure(self, test, err): - super(HtmlTestResult, self).addFailure(test, err) - self._record_test(test, 'failure', self._exc_info_to_string(err, test)) - print("") - - def addSkip(self, test, reason): - super(HtmlTestResult, self).addSkip(test, reason) - self._record_test(test, 'skipped', reason) - print("") - - def addExpectedFailure(self, test, err): - super(HtmlTestResult, self).addExpectedFailure(test, err) - self._record_test(test, 'ExpectedFailure', self._exc_info_to_string(err, test)) - print("") - - def addUnexpectedSuccess(self, test): - super(HtmlTestResult, self).addUnexpectedSuccess(test) - self._record_test(test, 'UnexpectedSuccess') - print("") - - @property - def duration(self): - return time.time() - self.start_at diff --git a/httprunner/report/html/template.html b/httprunner/report/html/template.html deleted file mode 100644 index 8bbfc1bf..00000000 --- a/httprunner/report/html/template.html +++ /dev/null @@ -1,360 +0,0 @@ - - - - {{html_report_name}} - TestReport - - - - -

Test Report: {{html_report_name}}

- -

Summary

- - - - - - - - - - - - - - - - - - - - - - - - - -
START AT{{time.start_datetime}}
DURATION{{ '%0.3f'| format(time.duration|float) }} seconds
PLATFORMHttpRunner {{ platform.httprunner_version }} {{ platform.python_version }} {{ platform.platform }}
STATTESTCASES (success/fail)TESTSTEPS (success/fail/error/skip)
total (details) =>{{stat.testcases.total}} ({{stat.testcases.success}}/{{stat.testcases.fail}}){{stat.teststeps.total}} ({{stat.teststeps.successes}}/{{stat.teststeps.failures}}/{{stat.teststeps.errors}}/{{stat.teststeps.skipped}})
- -

Details

- - {% for test_suite_summary in details %} - {% set suite_index = loop.index %} -

{{test_suite_summary.name}}

- - - - - - - - - - - - - - - - {% for record in test_suite_summary.records %} - {% set record_index = "{}_{}".format(suite_index, loop.index) %} - {% set record_meta_datas = record.meta_datas_expanded %} - - - - - - - {% endfor %} -
TOTAL: {{test_suite_summary.stat.total}}SUCCESS: {{test_suite_summary.stat.successes}}FAILED: {{test_suite_summary.stat.failures}}ERROR: {{test_suite_summary.stat.errors}}SKIPPED: {{test_suite_summary.stat.skipped}}
StatusNameResponse TimeDetail
{{record.status}}{{record.name}}{{ record.response_time }} ms - - {% for meta_data in record_meta_datas %} - {% set meta_data_index = "{}_{}".format(record_index, loop.index) %} - log-{{loop.index}} - - {% endfor %} - - {% if record.attachment %} - traceback - - {% endif %} - -
- {% endfor %} - \ No newline at end of file diff --git a/httprunner/report/report.py b/httprunner/report/report.py deleted file mode 100644 index 29824cc2..00000000 --- a/httprunner/report/report.py +++ /dev/null @@ -1,51 +0,0 @@ -import json -import platform -import time -import uuid - -import requests - -from httprunner import __version__ - - -def prepare_event_kwargs(event_name, params): - """ prepare report event kwargs""" - - kwargs = { - "headers": { - 'content-type': 'application/json' - }, - "json": { - "user": { - "user_unique_id": str(uuid.getnode()) - }, - "header": { - "app_id": 173519, - "os_name": platform.system(), - "os_version": platform.release(), - "app_version": __version__ # HttpRunner version - }, - "events": [ - { - "event": event_name, - "params": json.dumps(params), - "time": int(time.time()) - } - ], - "verbose": 1 - } - } - return kwargs - - -def report_event(event_name, success=True): - params = { - "success": 1 if success else 0 - } - kwargs = prepare_event_kwargs(event_name, params) - resp = requests.post("http://mcs.snssdk.com/v1/json", **kwargs) - print("resp---", resp.json()) - - -if __name__ == '__main__': - report_event("loader") diff --git a/httprunner/report/stringify.py b/httprunner/report/stringify.py deleted file mode 100644 index c6b9cf11..00000000 --- a/httprunner/report/stringify.py +++ /dev/null @@ -1,215 +0,0 @@ -import json -from base64 import b64encode -from collections import Iterable - -from jinja2 import escape -from requests.cookies import RequestsCookieJar - - -def dumps_json(value): - """ dumps json value to indented string - - Args: - value (dict): raw json data - - Returns: - str: indented json dump string - - """ - return json.dumps(value, indent=2, ensure_ascii=False) - - -def detect_encoding(value): - try: - return json.detect_encoding(value) - except AttributeError: - return "utf-8" - - -def __stringify_request(request_data): - """ stringfy HTTP request data - - Args: - request_data (dict): HTTP request data in dict. - - { - "url": "http://127.0.0.1:5000/api/get-token", - "method": "POST", - "headers": { - "User-Agent": "python-requests/2.20.0", - "Accept-Encoding": "gzip, deflate", - "Accept": "*/*", - "Connection": "keep-alive", - "user_agent": "iOS/10.3", - "device_sn": "TESTCASE_CREATE_XXX", - "os_platform": "ios", - "app_version": "2.8.6", - "Content-Type": "application/json", - "Content-Length": "52" - }, - "body": b'{"sign": "cb9d60acd09080ea66c8e63a1c78c6459ea00168"}', - "verify": false - } - - """ - for key, value in request_data.items(): - - if isinstance(value, (list, dict)): - value = dumps_json(value) - - elif isinstance(value, bytes): - try: - encoding = detect_encoding(value) - value = value.decode(encoding) - if key == "body": - try: - # request body is in json format - value = json.loads(value) - value = dumps_json(value) - except json.JSONDecodeError: - pass - value = escape(value) - except UnicodeDecodeError: - pass - - elif not isinstance(value, (str, bytes, int, float, Iterable)): - # class instance, e.g. MultipartEncoder() - value = repr(value) - - elif isinstance(value, RequestsCookieJar): - value = value.get_dict() - - request_data[key] = value - - -def __stringify_response(response_data): - """ stringfy HTTP response data - - Args: - response_data (dict): - - { - "status_code": 404, - "headers": { - "Content-Type": "application/json", - "Content-Length": "30", - "Server": "Werkzeug/0.14.1 Python/3.7.0", - "Date": "Tue, 27 Nov 2018 06:19:27 GMT" - }, - "encoding": "None", - "content_type": "application/json", - "ok": false, - "url": "http://127.0.0.1:5000/api/users/9001", - "reason": "NOT FOUND", - "cookies": {}, - "body": { - "success": false, - "data": {} - } - } - - """ - for key, value in response_data.items(): - - if isinstance(value, (list, dict)): - value = dumps_json(value) - - elif isinstance(value, bytes): - try: - encoding = response_data.get("encoding") - if not encoding or encoding == "None": - encoding = detect_encoding(value) - - if key == "body" and "image" in response_data["content_type"]: - # display image - value = "data:{};base64,{}".format( - response_data["content_type"], - b64encode(value).decode(encoding) - ) - else: - value = escape(value.decode(encoding)) - except UnicodeDecodeError: - pass - - elif not isinstance(value, (str, bytes, int, float, Iterable)): - # class instance, e.g. MultipartEncoder() - value = repr(value) - - elif isinstance(value, RequestsCookieJar): - value = value.get_dict() - - response_data[key] = value - - -def __expand_meta_datas(meta_datas, meta_datas_expanded): - """ expand meta_datas to one level - - Args: - meta_datas (dict/list): maybe in nested format - - Returns: - list: expanded list in one level - - Examples: - >>> meta_datas = [ - [ - dict1, - dict2 - ], - dict3 - ] - >>> meta_datas_expanded = [] - >>> __expand_meta_datas(meta_datas, meta_datas_expanded) - >>> print(meta_datas_expanded) - [dict1, dict2, dict3] - - """ - if isinstance(meta_datas, dict): - meta_datas_expanded.append(meta_datas) - elif isinstance(meta_datas, list): - for meta_data in meta_datas: - __expand_meta_datas(meta_data, meta_datas_expanded) - - -def __get_total_response_time(meta_datas_expanded): - """ caculate total response time of all meta_datas - """ - try: - response_time = 0 - for meta_data in meta_datas_expanded: - response_time += meta_data["stat"]["response_time_ms"] - - return "{:.2f}".format(response_time) - - except TypeError: - # failure exists - return "N/A" - - -def __stringify_meta_datas(meta_datas): - - if isinstance(meta_datas, list): - for _meta_data in meta_datas: - __stringify_meta_datas(_meta_data) - elif isinstance(meta_datas, dict): - data_list = meta_datas["data"] - for data in data_list: - __stringify_request(data["request"]) - __stringify_response(data["response"]) - - -def stringify_summary(summary): - """ stringify summary, in order to dump json file and generate html report. - """ - for index, suite_summary in enumerate(summary["details"]): - - if not suite_summary.get("name"): - suite_summary["name"] = f"testcase {index}" - - for record in suite_summary.get("records"): - meta_datas = record['meta_datas'] - __stringify_meta_datas(meta_datas) - meta_datas_expanded = [] - __expand_meta_datas(meta_datas, meta_datas_expanded) - record["meta_datas_expanded"] = meta_datas_expanded - record["response_time"] = __get_total_response_time(meta_datas_expanded) diff --git a/httprunner/report/summarize.py b/httprunner/report/summarize.py deleted file mode 100644 index 93c7145f..00000000 --- a/httprunner/report/summarize.py +++ /dev/null @@ -1,82 +0,0 @@ -import platform - -from httprunner import __version__ - - -def get_platform(): - return { - "httprunner_version": __version__, - "python_version": "{} {}".format( - platform.python_implementation(), - platform.python_version() - ), - "platform": platform.platform() - } - - -def aggregate_stat(origin_stat, new_stat): - """ aggregate new_stat to origin_stat. - - Args: - origin_stat (dict): origin stat dict, will be updated with new_stat dict. - new_stat (dict): new stat dict. - - """ - for key in new_stat: - if key not in origin_stat: - origin_stat[key] = new_stat[key] - elif key == "start_at": - # start datetime - origin_stat["start_at"] = min(origin_stat["start_at"], new_stat["start_at"]) - elif key == "duration": - # duration = max_end_time - min_start_time - max_end_time = max(origin_stat["start_at"] + origin_stat["duration"], - new_stat["start_at"] + new_stat["duration"]) - min_start_time = min(origin_stat["start_at"], new_stat["start_at"]) - origin_stat["duration"] = max_end_time - min_start_time - else: - origin_stat[key] += new_stat[key] - - -def get_summary(result): - """ get summary from test result - - Args: - result (instance): HtmlTestResult() instance - - Returns: - dict: summary extracted from result. - - { - "success": True, - "stat": {}, - "time": {}, - "records": [] - } - - """ - summary = { - "success": result.wasSuccessful(), - "stat": { - 'total': result.testsRun, - 'failures': len(result.failures), - 'errors': len(result.errors), - 'skipped': len(result.skipped), - 'expectedFailures': len(result.expectedFailures), - 'unexpectedSuccesses': len(result.unexpectedSuccesses) - } - } - summary["stat"]["successes"] = summary["stat"]["total"] \ - - summary["stat"]["failures"] \ - - summary["stat"]["errors"] \ - - summary["stat"]["skipped"] \ - - summary["stat"]["expectedFailures"] \ - - summary["stat"]["unexpectedSuccesses"] - - summary["time"] = { - 'start_at': result.start_at, - 'duration': result.duration - } - summary["records"] = result.records - - return summary diff --git a/httprunner/response.py b/httprunner/response.py index d4d815fb..b268d4b5 100644 --- a/httprunner/response.py +++ b/httprunner/response.py @@ -1,18 +1,113 @@ -import json -import re -from collections import OrderedDict +from typing import Dict, Text, Any, NoReturn -import jsonpath +import jmespath +import requests from loguru import logger -from httprunner import exceptions, utils +from httprunner.exceptions import ValidationFailure, ParamsError +from httprunner.parser import parse_data, parse_string_value, get_mapping_function +from httprunner.schema import VariablesMapping, Validators, FunctionsMapping -text_extractor_regexp_compile = re.compile(r".*\(.*\).*") + +def get_uniform_comparator(comparator: Text): + """ convert comparator alias to uniform name + """ + if comparator in ["eq", "equals", "==", "is"]: + return "equals" + elif comparator in ["lt", "less_than"]: + return "less_than" + elif comparator in ["le", "less_than_or_equals"]: + return "less_than_or_equals" + elif comparator in ["gt", "greater_than"]: + return "greater_than" + elif comparator in ["ge", "greater_than_or_equals"]: + return "greater_than_or_equals" + elif comparator in ["ne", "not_equals"]: + return "not_equals" + elif comparator in ["str_eq", "string_equals"]: + return "string_equals" + elif comparator in ["len_eq", "length_equals", "count_eq"]: + return "length_equals" + elif comparator in [ + "len_gt", + "count_gt", + "length_greater_than", + "count_greater_than", + ]: + return "length_greater_than" + elif comparator in [ + "len_ge", + "count_ge", + "length_greater_than_or_equals", + "count_greater_than_or_equals", + ]: + return "length_greater_than_or_equals" + elif comparator in ["len_lt", "count_lt", "length_less_than", "count_less_than"]: + return "length_less_than" + elif comparator in [ + "len_le", + "count_le", + "length_less_than_or_equals", + "count_less_than_or_equals", + ]: + return "length_less_than_or_equals" + else: + return comparator + + +def uniform_validator(validator): + """ unify validator + + Args: + validator (dict): validator maybe in two formats: + + format1: this is kept for compatiblity with the previous versions. + {"check": "status_code", "assert": "eq", "expect": 201} + {"check": "$resp_body_success", "assert": "eq", "expect": True} + format2: recommended new version, {assert: [check_item, expected_value]} + {'eq': ['status_code', 201]} + {'eq': ['$resp_body_success', True]} + + Returns + dict: validator info + + { + "check": "status_code", + "expect": 201, + "assert": "equals" + } + + """ + if not isinstance(validator, dict): + raise ParamsError(f"invalid validator: {validator}") + + if "check" in validator and "expect" in validator: + # format1 + check_item = validator["check"] + expect_value = validator["expect"] + comparator = validator.get("comparator", "eq") + + elif len(validator) == 1: + # format2 + comparator = list(validator.keys())[0] + compare_values = validator[comparator] + + if not isinstance(compare_values, list) or len(compare_values) != 2: + raise ParamsError(f"invalid validator: {validator}") + + check_item, expect_value = compare_values + + else: + raise ParamsError(f"invalid validator: {validator}") + + # uniform comparator, e.g. lt => less_than, eq => equals + assert_method = get_uniform_comparator(comparator) + + return {"check": check_item, "expect": expect_value, "assert": assert_method} class ResponseObject(object): - - def __init__(self, resp_obj): + def __init__(self, resp_obj: requests.Response): """ initialize with a requests.Response object Args: @@ -21,283 +116,96 @@ class ResponseObject(object): """ self.resp_obj = resp_obj - def __getattr__(self, key): try: - if key == "json": - value = self.resp_obj.json() - elif key == "cookies": - value = self.resp_obj.cookies.get_dict() - else: - value = getattr(self.resp_obj, key) - - self.__dict__[key] = value - return value - except AttributeError: - err_msg = f"ResponseObject does not have attribute: {key}" - logger.error(err_msg) - raise exceptions.ParamsError(err_msg) - - def _extract_field_with_jsonpath(self, field: str) -> list: - """ extract field from response content with jsonpath expression. - JSONPath Docs: https://goessner.net/articles/JsonPath/ - - Args: - field: jsonpath expression, e.g. $.code, $..items.*.id - - Returns: - A list that extracted from json response example. 1) [200] 2) [1, 2] - - Raises: - exceptions.ExtractFailure: If no content matched with jsonpath expression. - - Examples: - For example, response body like below: - { - "code": 200, - "data": { - "items": [{ - "id": 1, - "name": "Bob" - }, - { - "id": 2, - "name": "James" - } - ] - }, - "message": "success" - } - - >>> _extract_field_with_regex("$.code") - [200] - >>> _extract_field_with_regex("$..items.*.id") - [1, 2] - - """ - try: - json_body = self.json - assert json_body - - result = jsonpath.jsonpath(json_body, field) - assert result - return result - except (AssertionError, exceptions.JSONDecodeError): - err_msg = f"Failed to extract data with jsonpath! => {field}\n" - err_msg += f"response body: {self.text}\n" - logger.error(err_msg) - raise exceptions.ExtractFailure(err_msg) - - def _extract_field_with_regex(self, field): - """ extract field from response content with regex. - requests.Response body could be json or html text. - - Args: - field (str): regex string that matched r".*\(.*\).*" - - Returns: - str: matched content. - - Raises: - exceptions.ExtractFailure: If no content matched with regex. - - Examples: - >>> # self.text: "LB123abcRB789" - >>> filed = "LB[\d]*(.*)RB[\d]*" - >>> _extract_field_with_regex(field) - abc - - """ - matched = re.search(field, self.text) - if not matched: - err_msg = f"Failed to extract data with regex! => {field}\n" - err_msg += f"response body: {self.text}\n" - logger.error(err_msg) - raise exceptions.ExtractFailure(err_msg) - - return matched.group(1) - - def _extract_field_with_delimiter(self, field): - """ response content could be json or html text. - - Args: - field (str): string joined by delimiter. - e.g. - "status_code" - "headers" - "cookies" - "content" - "headers.content-type" - "content.person.name.first_name" - - """ - # string.split(sep=None, maxsplit=1) -> list of strings - # e.g. "content.person.name" => ["content", "person.name"] - try: - top_query, sub_query = field.split('.', 1) + body = resp_obj.json() except ValueError: - top_query = field - sub_query = None + body = resp_obj.content - # status_code - if top_query in ["status_code", "encoding", "ok", "reason", "url"]: - if sub_query: - # status_code.XX - err_msg = f"Failed to extract: {field}\n" - logger.error(err_msg) - raise exceptions.ParamsError(err_msg) + self.resp_obj_meta = { + "status_code": resp_obj.status_code, + "headers": resp_obj.headers, + "body": body, + } + self.validation_results: Dict = {} - return getattr(self, top_query) - - # cookies - elif top_query == "cookies": - cookies = self.cookies - if not sub_query: - # extract cookies - return cookies - - try: - return cookies[sub_query] - except KeyError: - err_msg = f"Failed to extract cookie! => {field}\n" - err_msg += f"response cookies: {cookies}\n" - logger.error(err_msg) - raise exceptions.ExtractFailure(err_msg) - - # elapsed - elif top_query == "elapsed": - available_attributes = u"available attributes: days, seconds, microseconds, total_seconds" - if not sub_query: - err_msg = "elapsed is datetime.timedelta instance, attribute should also be specified!\n" - err_msg += available_attributes - logger.error(err_msg) - raise exceptions.ParamsError(err_msg) - elif sub_query in ["days", "seconds", "microseconds"]: - return getattr(self.elapsed, sub_query) - elif sub_query == "total_seconds": - return self.elapsed.total_seconds() - else: - err_msg = f"{sub_query} is not valid datetime.timedelta attribute.\n" - err_msg += available_attributes - logger.error(err_msg) - raise exceptions.ParamsError(err_msg) - - # headers - elif top_query == "headers": - headers = self.headers - if not sub_query: - # extract headers - return headers - - try: - return headers[sub_query] - except KeyError: - err_msg = f"Failed to extract header! => {field}\n" - err_msg += f"response headers: {headers}\n" - logger.error(err_msg) - raise exceptions.ExtractFailure(err_msg) - - # response body - elif top_query in ["body", "content", "text", "json"]: - try: - body = self.json - except json.JSONDecodeError: - body = self.text - - if not sub_query: - # extract response body - return body - - if isinstance(body, (dict, list)): - # content = {"xxx": 123}, content.xxx - return utils.query_json(body, sub_query) - elif sub_query.isdigit(): - # content = "abcdefg", content.3 => d - return utils.query_json(body, sub_query) - else: - # content = "abcdefg", content.xxx - err_msg = f"Failed to extract attribute from response body! => {field}\n" - err_msg += f"response body: {body}\n" - logger.error(err_msg) - raise exceptions.ExtractFailure(err_msg) - - # new set response attributes in teardown_hooks - elif top_query in self.__dict__: - attributes = self.__dict__[top_query] - - if not sub_query: - # extract response attributes - return attributes - - if isinstance(attributes, (dict, list)): - # attributes = {"xxx": 123}, content.xxx - return utils.query_json(attributes, sub_query) - elif sub_query.isdigit(): - # attributes = "abcdefg", attributes.3 => d - return utils.query_json(attributes, sub_query) - else: - # content = "attributes.new_attribute_not_exist" - err_msg = f"Failed to extract cumstom set attribute from teardown hooks! => {field}\n" - err_msg += f"response set attributes: {attributes}\n" - logger.error(err_msg) - raise exceptions.TeardownHooksFailure(err_msg) - - # others - else: - err_msg = f"Failed to extract attribute from response! => {field}\n" - err_msg += "available response attributes: status_code, cookies, elapsed, headers, content, " \ - "text, json, encoding, ok, reason, url.\n\n" - err_msg += "If you want to set attribute in teardown_hooks, take the following example as reference:\n" - err_msg += "response.new_attribute = 'new_attribute_value'\n" - logger.error(err_msg) - raise exceptions.ParamsError(err_msg) - - def extract_field(self, field): - """ extract value from requests.Response. - """ - if not isinstance(field, str): - err_msg = f"Invalid extractor! => {field}\n" - logger.error(err_msg) - raise exceptions.ParamsError(err_msg) - - msg = f"extract: {field}" - - if field.startswith("$"): - value = self._extract_field_with_jsonpath(field) - elif text_extractor_regexp_compile.match(field): - value = self._extract_field_with_regex(field) - else: - value = self._extract_field_with_delimiter(field) - - msg += f"\t=> {value}" - logger.debug(msg) - - return value - - def extract_response(self, extractors): - """ extract value from requests.Response and store in OrderedDict. - - Args: - extractors (list): - - [ - {"resp_status_code": "status_code"}, - {"resp_headers_content_type": "headers.content-type"}, - {"resp_content": "content"}, - {"resp_content_person_first_name": "content.person.name.first_name"} - ] - - Returns: - OrderDict: variable binds ordered dict - - """ + def extract(self, extractors: Dict[Text, Text]) -> Dict[Text, Any]: if not extractors: return {} - logger.debug("start to extract from response object.") - extracted_variables_mapping = OrderedDict() - extract_binds_order_dict = utils.ensure_mapping_format(extractors) + extract_mapping = {} + for key, field in extractors.items(): + field_value = jmespath.search(field, self.resp_obj_meta) + extract_mapping[key] = field_value - for key, field in extract_binds_order_dict.items(): - extracted_variables_mapping[key] = self.extract_field(field) + logger.info(f"extract mapping: {extract_mapping}") + return extract_mapping - return extracted_variables_mapping + def validate( + self, + validators: Validators, + variables_mapping: VariablesMapping = None, + functions_mapping: FunctionsMapping = None, + ) -> NoReturn: + + self.validation_results = {} + if not validators: + return + + validate_pass = True + failures = [] + + for v in validators: + + if "validate_extractor" not in self.validation_results: + self.validation_results["validate_extractor"] = [] + + u_validator = uniform_validator(v) + + # check item + check_item = u_validator["check"] + check_value = jmespath.search(check_item, self.resp_obj_meta) + check_value = parse_string_value(check_value) + + # comparator + assert_method = u_validator["assert"] + assert_func = get_mapping_function(assert_method, functions_mapping) + + # expect item + expect_item = u_validator["expect"] + # parse expected value with config/teststep/extracted variables + expect_value = parse_data(expect_item, variables_mapping, functions_mapping) + + validate_msg = f"assert {check_item} {assert_method} {expect_value}({type(expect_value).__name__})" + + validator_dict = { + "comparator": assert_method, + "check": check_item, + "check_value": check_value, + "expect": expect_item, + "expect_value": expect_value, + } + + try: + assert_func(check_value, expect_value) + validate_msg += "\t==> pass" + logger.info(validate_msg) + validator_dict["check_result"] = "pass" + except AssertionError: + validate_pass = False + validator_dict["check_result"] = "fail" + validate_msg += "\t==> fail" + validate_msg += ( + f"\n" + f"check_item: {check_item}\n" + f"check_value: {check_value}({type(check_value).__name__})\n" + f"assert_method: {assert_method}\n" + f"expect_value: {expect_value}({type(expect_value).__name__})" + ) + logger.error(validate_msg) + failures.append(validate_msg) + + self.validation_results["validate_extractor"].append(validator_dict) + + if not validate_pass: + failures_string = "\n".join([failure for failure in failures]) + raise ValidationFailure(failures_string) diff --git a/httprunner/runner.py b/httprunner/runner.py index 71e2f723..ab833ead 100644 --- a/httprunner/runner.py +++ b/httprunner/runner.py @@ -1,266 +1,91 @@ -import uuid -from enum import Enum -from unittest.case import SkipTest +import os +import time +from datetime import datetime +from typing import List, Dict, Text from loguru import logger -from httprunner import exceptions, response, utils +from httprunner import utils, exceptions from httprunner.client import HttpSession -from httprunner.context import SessionContext -from httprunner.validator import Validator +from httprunner.exceptions import ValidationFailure, ParamsError +from httprunner.ext.uploader import prepare_upload_step +from httprunner.loader import load_project_meta, load_testcase_file +from httprunner.parser import build_url, parse_data, parse_variables_mapping +from httprunner.response import ResponseObject +from httprunner.schema import ( + TConfig, + TStep, + VariablesMapping, + StepData, + TestCaseSummary, + TestCaseTime, + TestCaseInOut, + ProjectMeta, + TestCase, +) -class HookTypeEnum(Enum): - SETUP = 1 - TEARDOWN = 2 +class HttpRunner(object): + config: TConfig + teststeps: List[TStep] + success: bool = True # indicate testcase execution result + __project_meta: ProjectMeta = None + __step_datas: List[StepData] = None + __session: HttpSession = None + __session_variables: VariablesMapping = {} + __start_at = 0 + __duration = 0 -class Runner(object): - """ Running testcases. + def with_project_meta(self, project_meta: ProjectMeta) -> "HttpRunner": + self.__project_meta = project_meta + return self - Examples: - >>> tests_mapping = { - "project_mapping": { - "functions": {} - }, - "testcases": [ - { - "config": { - "name": "XXXX", - "base_url": "http://127.0.0.1", - "verify": False - }, - "teststeps": [ - { - "name": "test description", - "variables": [], # optional - "request": { - "url": "http://127.0.0.1:5000/api/users/1000", - "method": "GET" - } - } - ] - } - ] - } + def with_session(self, session: HttpSession) -> "HttpRunner": + self.__session = session + return self - >>> testcases = parser.parse_tests(tests_mapping) - >>> parsed_testcase = testcases[0] + def with_variables(self, variables: VariablesMapping) -> "HttpRunner": + self.__session_variables = variables + return self - >>> test_runner = runner.Runner(parsed_testcase["config"]) - >>> test_runner.run_test(parsed_testcase["teststeps"][0]) + def __run_step_request(self, step: TStep): + """run teststep: request""" + step_data = StepData(name=step.name) - """ + # parse + prepare_upload_step(step, self.__project_meta.functions) + request_dict = step.request.dict() + request_dict.pop("upload", None) + parsed_request_dict = parse_data( + request_dict, step.variables, self.__project_meta.functions + ) - def __init__(self, config, http_client_session=None): - """ run testcase or testsuite. + # prepare arguments + method = parsed_request_dict.pop("method") + url_path = parsed_request_dict.pop("url") + url = build_url(self.config.base_url, url_path) - Args: - config (dict): testcase/testsuite config dict + parsed_request_dict["json"] = parsed_request_dict.pop("req_json", {}) - { - "name": "ABC", - "variables": {}, - "setup_hooks", [], - "teardown_hooks", [] - } - - http_client_session (instance): requests.Session(), or locust.client.Session() instance. - - """ - self.verify = config.get("verify", True) - self.export = config.get("export") or config.get("output", []) - config_variables = config.get("variables", {}) - - self.hrun_request_id = str(uuid.uuid4()) - if "HRUN-Request-ID" not in config_variables: - config_variables["HRUN-Request-ID"] = self.hrun_request_id - else: - self.hrun_request_id = config_variables["HRUN-Request-ID"] - - # testcase setup hooks - testcase_setup_hooks = config.get("setup_hooks", []) - # testcase teardown hooks - self.testcase_teardown_hooks = config.get("teardown_hooks", []) - - self.http_client_session = http_client_session or HttpSession() - self.session_context = SessionContext(config_variables) - - if testcase_setup_hooks: - self.do_hook_actions(testcase_setup_hooks, HookTypeEnum.SETUP) - - def __del__(self): - if self.testcase_teardown_hooks: - self.do_hook_actions(self.testcase_teardown_hooks, HookTypeEnum.TEARDOWN) - - def __clear_test_data(self): - """ clear request and response data - """ - if not isinstance(self.http_client_session, HttpSession): - return - - self.http_client_session.init_meta_data() - - def _handle_skip_feature(self, test_dict): - """ handle skip feature for test - - skip: skip current test unconditionally - - skipIf: skip current test if condition is true - - skipUnless: skip current test unless condition is true - - Args: - test_dict (dict): test info - - Raises: - SkipTest: skip test - - """ - # TODO: move skip to initialize - skip_reason = None - - if "skip" in test_dict: - skip_reason = test_dict["skip"] - - elif "skipIf" in test_dict: - skip_if_condition = test_dict["skipIf"] - if self.session_context.eval_content(skip_if_condition): - skip_reason = f"{skip_if_condition} evaluate to True" - - elif "skipUnless" in test_dict: - skip_unless_condition = test_dict["skipUnless"] - if not self.session_context.eval_content(skip_unless_condition): - skip_reason = f"{skip_unless_condition} evaluate to False" - - if skip_reason: - raise SkipTest(skip_reason) - - def do_hook_actions(self, actions, hook_type): - """ call hook actions. - - Args: - actions (list): each action in actions list maybe in two format. - - format1 (dict): assignment, the value returned by hook function will be assigned to variable. - {"var": "${func()}"} - format2 (str): only call hook functions. - ${func()} - - hook_type (HookTypeEnum): setup/teardown - - """ - logger.debug(f"call {hook_type.name} hook actions.") - for action in actions: - - if isinstance(action, dict) and len(action) == 1: - # format 1 - # {"var": "${func()}"} - var_name, hook_content = list(action.items())[0] - hook_content_eval = self.session_context.eval_content(hook_content) - logger.debug( - f"assignment with hook: {var_name} = {hook_content} => {hook_content_eval}") - self.session_context.update_test_variables( - var_name, hook_content_eval - ) - else: - # format 2 - logger.debug(f"call hook function: {action}") - # TODO: check hook function if valid - self.session_context.eval_content(action) - - def _run_test(self, test_dict): - """ run single teststep. - - Args: - test_dict (dict): teststep info - { - "name": "teststep description", - "skip": "skip this test unconditionally", - "times": 3, - "variables": [], # optional, override - "request": { - "url": "http://127.0.0.1:5000/api/users/1000", - "method": "POST", - "headers": { - "Content-Type": "application/json", - "authorization": "$authorization", - "random": "$random" - }, - "json": {"name": "user", "password": "123456"} - }, - "extract": {}, # optional - "validate": [], # optional - "setup_hooks": [], # optional - "teardown_hooks": [] # optional - } - - Raises: - exceptions.ParamsError - exceptions.ValidationFailure - exceptions.ExtractFailure - - """ - # clear meta data first to ensure independence for each test - self.__clear_test_data() - - # check skip - self._handle_skip_feature(test_dict) - - # prepare - test_dict = utils.lower_test_dict_keys(test_dict) - test_variables = test_dict.get("variables", {}) - self.session_context.init_test_variables(test_variables) - - # teststep name - test_name = self.session_context.eval_content(test_dict.get("name", "")) - - # parse test request - raw_request = test_dict.get('request', {}) - parsed_test_request = self.session_context.eval_content(raw_request) - self.session_context.update_test_variables("request", parsed_test_request) - - # setup hooks - setup_hooks = test_dict.get("setup_hooks", []) - if setup_hooks: - self.do_hook_actions(setup_hooks, HookTypeEnum.SETUP) - - # prepend url with base_url unless it's already an absolute URL - url = parsed_test_request.pop('url') - base_url = self.session_context.eval_content(test_dict.get("base_url", "")) - parsed_url = utils.build_url(base_url, url) - - request_headers = parsed_test_request.setdefault("headers", {}) - if "HRUN-Request-ID" not in request_headers: - parsed_test_request["headers"]["HRUN-Request-ID"] = \ - self.session_context.session_variables_mapping["HRUN-Request-ID"] - - try: - method = parsed_test_request.pop('method') - parsed_test_request.setdefault("verify", self.verify) - group_name = parsed_test_request.pop("group", None) - except KeyError: - raise exceptions.ParamsError("URL or METHOD missed!") - - logger.info(f"{method} {parsed_url}") - logger.debug(f"request kwargs(raw): {parsed_test_request}") + logger.info(f"{method} {url}") + logger.debug(f"request kwargs(raw): {parsed_request_dict}") # request - resp = self.http_client_session.request( - method, - parsed_url, - name=(group_name or test_name), - **parsed_test_request - ) - resp_obj = response.ResponseObject(resp) + self.__session = self.__session or HttpSession() + resp = self.__session.request(method, url, **parsed_request_dict) + resp_obj = ResponseObject(resp) def log_req_resp_details(): - err_msg = "{} DETAILED REQUEST & RESPONSE {}\n".format("*" * 32, "*" * 32) + err_msg = "\n{} DETAILED REQUEST & RESPONSE {}\n".format("*" * 32, "*" * 32) # log request err_msg += "====== request details ======\n" - err_msg += f"url: {parsed_url}\n" + err_msg += f"url: {url}\n" err_msg += f"method: {method}\n" - headers = parsed_test_request.pop("headers", {}) + headers = parsed_request_dict.pop("headers", {}) err_msg += f"headers: {headers}\n" - for k, v in parsed_test_request.items(): + for k, v in parsed_request_dict.items(): v = utils.omit_long_data(v) err_msg += f"{k}: {repr(v)}\n" @@ -268,149 +93,158 @@ class Runner(object): # log response err_msg += "====== response details ======\n" - err_msg += f"status_code: {resp_obj.status_code}\n" - err_msg += f"headers: {resp_obj.headers}\n" - err_msg += f"body: {repr(resp_obj.text)}\n" + err_msg += f"status_code: {resp.status_code}\n" + err_msg += f"headers: {resp.headers}\n" + err_msg += f"body: {repr(resp.text)}\n" logger.error(err_msg) - # teardown hooks - teardown_hooks = test_dict.get("teardown_hooks", []) - if teardown_hooks: - self.session_context.update_test_variables("response", resp_obj) - self.do_hook_actions(teardown_hooks, HookTypeEnum.TEARDOWN) - self.http_client_session.update_last_req_resp_record(resp_obj) - # extract - extractors = test_dict.get("extract", {}) - try: - extracted_variables_mapping = resp_obj.extract_response(extractors) - self.session_context.update_session_variables(extracted_variables_mapping) - except (exceptions.ParamsError, exceptions.ExtractFailure): - log_req_resp_details() - raise + extractors = step.extract + extract_mapping = resp_obj.extract(extractors) + step_data.export = extract_mapping + + variables_mapping = step.variables + variables_mapping.update(extract_mapping) # validate - validators = test_dict.get("validate") or test_dict.get("validators") or [] - validate_script = test_dict.get("validate_script", []) - if validate_script: - validators.append({ - "type": "python_script", - "script": validate_script - }) - - validator = Validator(self.session_context, resp_obj) + validators = step.validators try: - validator.validate(validators) - except exceptions.ValidationFailure: + resp_obj.validate( + validators, variables_mapping, self.__project_meta.functions + ) + self.__session.data.success = True + except ValidationFailure: + self.__session.data.success = False log_req_resp_details() raise finally: - self.validation_results = validator.validation_results + # save request & response meta data + self.__session.data.validators = resp_obj.validation_results + self.success &= self.__session.data.success + # save step data + step_data.success = self.__session.data.success + step_data.data = self.__session.data - def _run_testcase(self, testcase_dict): - """ run single testcase. - """ - self.meta_datas = [] - config = testcase_dict.get("config", {}) + return step_data - # each teststeps in one testcase (YAML/JSON) share the same session. - test_runner = Runner(config, self.http_client_session) + def __run_step_testcase(self, step): + """run teststep: referenced testcase""" + step_data = StepData(name=step.name) + step_variables = step.variables - tests = testcase_dict.get("teststeps", []) + ref_testcase_path = os.path.join(self.__project_meta.PWD, step.testcase) + case_result = ( + HttpRunner() + .with_session(self.__session) + .with_variables(step_variables) + .run_path(ref_testcase_path) + ) + step_data.data = case_result.get_step_datas() # list of step data + step_data.export = case_result.get_export_variables() + step_data.success = case_result.success + self.success &= case_result.success - for index, test_dict in enumerate(tests): + return step_data - # override current teststep variables with former testcase output variables - former_output_variables = self.session_context.test_variables_mapping - if former_output_variables: - test_dict.setdefault("variables", {}) - test_dict["variables"].update(former_output_variables) + def __run_step(self, step: TStep): + """run teststep, teststep maybe a request or referenced testcase""" + logger.info(f"run step: {step.name}") - try: - test_runner.run_test(test_dict) - except Exception: - # log exception request_type and name for locust stat - self.exception_request_type = test_runner.exception_request_type - self.exception_name = test_runner.exception_name - raise - finally: - _meta_datas = test_runner.meta_datas - self.meta_datas.append(_meta_datas) + if step.request: + step_data = self.__run_step_request(step) + elif step.testcase: + step_data = self.__run_step_testcase(step) + else: + raise ParamsError( + f"teststep is neither a request nor a referenced testcase: {step.dict()}" + ) - self.session_context.update_session_variables( - test_runner.export_variables(test_runner.export) + self.__step_datas.append(step_data) + return step_data.export + + def run(self, testcase: TestCase): + """main entrance""" + self.config = testcase.config + self.teststeps = testcase.teststeps + self.config.variables.update(self.__session_variables) + + if self.config.path: + self.__project_meta = load_project_meta(self.config.path) + elif not self.__project_meta: + self.__project_meta = ProjectMeta() + + def parse_config(config: TConfig): + config.variables = parse_variables_mapping( + config.variables, self.__project_meta.functions + ) + config.name = parse_data( + config.name, config.variables, self.__project_meta.functions + ) + config.base_url = parse_data( + config.base_url, config.variables, self.__project_meta.functions + ) + + parse_config(self.config) + self.__start_at = time.time() + self.__step_datas: List[StepData] = [] + self.__session_variables = {} + for step in self.teststeps: + # update with config variables + step.variables.update(self.config.variables) + # update with session variables extracted from pre step + step.variables.update(self.__session_variables) + # parse variables + step.variables = parse_variables_mapping( + step.variables, self.__project_meta.functions + ) + # run step + extract_mapping = self.__run_step(step) + # save extracted variables to session variables + self.__session_variables.update(extract_mapping) + + self.__duration = time.time() - self.__start_at + return self + + def run_path(self, path: Text) -> "HttpRunner": + if not os.path.isfile(path): + raise exceptions.ParamsError(f"Invalid testcase path: {path}") + + _, testcase_obj = load_testcase_file(path) + return self.run(testcase_obj) + + def get_step_datas(self) -> List[StepData]: + return self.__step_datas + + def get_export_variables(self) -> Dict: + export_vars_mapping = {} + for var_name in self.config.export: + if var_name not in self.__session_variables: + raise ParamsError( + f"failed to export variable {var_name} from session variables {self.__session_variables}" + ) + + export_vars_mapping[var_name] = self.__session_variables[var_name] + + return export_vars_mapping + + def get_summary(self) -> TestCaseSummary: + """get testcase result summary""" + start_at_timestamp = self.__start_at + start_at_iso_format = datetime.utcfromtimestamp(start_at_timestamp).isoformat() + return TestCaseSummary( + name=self.config.name, + success=self.success, + time=TestCaseTime( + start_at=self.__start_at, + start_at_iso_format=start_at_iso_format, + duration=self.__duration, + ), + in_out=TestCaseInOut( + vars=self.config.variables, export=self.get_export_variables() + ), + step_datas=self.__step_datas, ) - def run_test(self, test_dict): - """ run single teststep of testcase. - test_dict may be in 3 types. - - Args: - test_dict (dict): - - # teststep - { - "name": "teststep description", - "variables": [], # optional - "request": { - "url": "http://127.0.0.1:5000/api/users/1000", - "method": "GET" - } - } - - # nested testcase - { - "config": {...}, - "teststeps": [ - {...}, - {...} - ] - } - - # TODO: function - { - "name": "exec function", - "function": "${func()}" - } - - """ - self.meta_datas = None - if "teststeps" in test_dict: - # nested testcase - test_dict.setdefault("config", {}).setdefault("variables", {}) - test_dict["config"]["variables"].update( - self.session_context.session_variables_mapping) - self._run_testcase(test_dict) - else: - # api - self.validation_results = {} - try: - self._run_test(test_dict) - except Exception: - # log exception request_type and name for locust stat - self.exception_request_type = test_dict["request"]["method"] - self.exception_name = test_dict.get("name") - raise - finally: - # get request/response data and validate results - self.meta_datas = getattr(self.http_client_session, "meta_data", {}) - self.meta_datas["validators"] = self.validation_results - - def export_variables(self, output_variables_list): - """ export current testcase variables - """ - variables_mapping = self.session_context.session_variables_mapping - - output = {} - for variable in output_variables_list: - if variable not in variables_mapping: - logger.warning( - f"variable '{variable}' can not be found in variables mapping, " - "failed to export!" - ) - continue - - output[variable] = variables_mapping[variable] - - utils.print_info(output) - return output + def test_start(self): + """discovered by pytest""" + return self.run(TestCase(config=self.config, teststeps=self.teststeps)) diff --git a/httprunner/runner_test.py b/httprunner/runner_test.py new file mode 100644 index 00000000..ca84584f --- /dev/null +++ b/httprunner/runner_test.py @@ -0,0 +1,28 @@ +import unittest + +from httprunner.runner import HttpRunner + + +class TestHttpRunner(unittest.TestCase): + def setUp(self): + self.runner = HttpRunner() + + def test_run_testcase_by_path_request_only(self): + self.runner.run_path( + "examples/postman_echo/request_methods/request_with_variables.yml" + ) + result = self.runner.get_summary() + self.assertTrue(result.success) + self.assertEqual(result.name, "request methods testcase with variables") + self.assertEqual(result.step_datas[0].name, "get with params") + self.assertEqual(len(result.step_datas), 3) + + def test_run_testcase_by_path_ref_testcase(self): + self.runner.run_path( + "examples/postman_echo/request_methods/request_with_testcase_reference.yml" + ) + result = self.runner.get_summary() + self.assertTrue(result.success) + self.assertEqual(result.name, "request methods testcase: reference testcase") + self.assertEqual(result.step_datas[0].name, "request with variables") + self.assertEqual(len(result.step_datas), 1) diff --git a/httprunner/schema.py b/httprunner/schema.py new file mode 100644 index 00000000..4ea1e797 --- /dev/null +++ b/httprunner/schema.py @@ -0,0 +1,174 @@ +import os +from enum import Enum +from typing import Any +from typing import Dict, Text, Union, Callable +from typing import List + +from pydantic import BaseModel, Field +from pydantic import HttpUrl + +Name = Text +Url = Text +BaseUrl = Union[HttpUrl, Text] +VariablesMapping = Dict[Text, Any] +FunctionsMapping = Dict[Text, Callable] +Headers = Dict[Text, Text] +Verify = bool +Hook = List[Text] +Export = List[Text] +Validators = List[Dict] +Env = Dict[Text, Any] + + +class MethodEnum(Text, Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + HEAD = "HEAD" + OPTIONS = "OPTIONS" + PATCH = "PATCH" + CONNECT = "CONNECT" + TRACE = "TRACE" + + +class TConfig(BaseModel): + name: Name + verify: Verify = False + base_url: BaseUrl = "" + variables: VariablesMapping = {} + setup_hooks: Hook = [] + teardown_hooks: Hook = [] + export: Export = [] + path: Text = None + + +class Request(BaseModel): + """requests.Request model""" + + method: MethodEnum = MethodEnum.GET + url: Url + params: Dict[Text, Text] = {} + headers: Headers = {} + req_json: Dict = Field({}, alias="json") + data: Union[Text, Dict[Text, Any]] = "" + cookies: Dict[Text, Text] = {} + timeout: int = 120 + allow_redirects: bool = True + verify: Verify = False + upload: Dict = {} # used for upload files + + +class TStep(BaseModel): + name: Name + request: Request = None + testcase: Text = "" + variables: VariablesMapping = {} + extract: Dict[Text, Text] = {} + validators: Validators = Field([], alias="validate") + + +class TestCase(BaseModel): + config: TConfig + teststeps: List[TStep] + + +class ProjectMeta(BaseModel): + debugtalk_py: Text = "" # debugtalk.py file content + functions: FunctionsMapping = {} + env: Env = {} + PWD: Text = os.getcwd() + test_path: Text = None # run with specified test path + + +class TestsMapping(BaseModel): + project_meta: ProjectMeta + testcases: List[TestCase] + + +class TestCaseTime(BaseModel): + start_at: float = 0 + start_at_iso_format: Text = "" + duration: float = 0 + + +class TestCaseInOut(BaseModel): + vars: VariablesMapping = {} + export: Dict = {} + + +class RequestStat(BaseModel): + content_size: float = 0 + response_time_ms: float = 0 + elapsed_ms: float = 0 + + +class RequestData(BaseModel): + method: MethodEnum = MethodEnum.GET + url: Url + headers: Headers = {} + # TODO: add cookies + body: Union[Text, bytes, Dict, None] = {} + + +class ResponseData(BaseModel): + status_code: int + cookies: Dict + encoding: Union[Text, None] = None + headers: Dict + content_type: Text + body: Union[Text, bytes, Dict] + + +class ReqRespData(BaseModel): + request: RequestData + response: ResponseData + + +class SessionData(BaseModel): + """request session data, including request, response, validators and stat data""" + + success: bool = False + # in most cases, req_resps only contains one request & response + # while when 30X redirect occurs, req_resps will contain multiple request & response + req_resps: List[ReqRespData] = [] + stat: RequestStat = RequestStat() + validators: Dict = {} + + +class StepData(BaseModel): + """teststep data, each step maybe corresponding to one request or one testcase""" + + success: bool = False + name: Text = "" # teststep name + data: Union[SessionData, List[SessionData]] = None + export: Dict = {} + + +class TestCaseSummary(BaseModel): + name: Text = "" + success: bool = False + time: TestCaseTime + in_out: TestCaseInOut = {} + log: Text = "" + step_datas: List[StepData] = [] + + +class PlatformInfo(BaseModel): + httprunner_version: Text + python_version: Text + platform: Text + + +class Stat(BaseModel): + total: int = 0 + success: int = 0 + fail: int = 0 + + +class TestSuiteSummary(BaseModel): + success: bool = False + stat: Stat = Stat() + time: TestCaseTime = TestCaseTime() + platform: PlatformInfo + testcases: List[TestCaseSummary] diff --git a/httprunner/schema/__init__.py b/httprunner/schema/__init__.py deleted file mode 100644 index 9009366e..00000000 --- a/httprunner/schema/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .testcase import ProjectMeta, TestCase, TestCases diff --git a/httprunner/schema/api.py b/httprunner/schema/api.py deleted file mode 100644 index 855d2487..00000000 --- a/httprunner/schema/api.py +++ /dev/null @@ -1,14 +0,0 @@ -from pydantic import BaseModel - -from httprunner.schema import common - - -class Api(BaseModel): - name: common.Name - request: common.Request - variables: common.Variables - base_url: common.BaseUrl - setup_hooks: common.Hook - teardown_hooks: common.Hook - extract: common.Extract - validate: common.Validate diff --git a/httprunner/schema/common.py b/httprunner/schema/common.py deleted file mode 100644 index 9acd2cf0..00000000 --- a/httprunner/schema/common.py +++ /dev/null @@ -1,61 +0,0 @@ -from enum import Enum -from typing import Dict, List, Any, Tuple - -from pydantic import BaseModel, HttpUrl, Field - -Name = str -Url = HttpUrl -BaseUrl = str -Variables = Dict[str, Any] -Headers = Dict[str, str] -Verify = bool -Hook = List[str] -Export = List[str] -Extract = Dict[str, str] -Validate = List[Dict] -Env = Dict[str, Any] - - -class MethodEnum(str, Enum): - GET = 'GET' - POST = 'POST' - PUT = "PUT" - DELETE = "DELETE" - HEAD = "HEAD" - OPTIONS = "OPTIONS" - PATCH = "PATCH" - CONNECT = "CONNECT" - TRACE = "TRACE" - - -class TestsConfig(BaseModel): - name: Name - verify: Verify = False - base_url: BaseUrl = "" - variables: Variables = {} - setup_hooks: Hook = [] - teardown_hooks: Hook = [] - export: Export = [] - - class Config: - schema_extra = { - "examples": [ - { - "name": "used in testcase/testsuite to configure common fields", - "verify": False, - "base_url": "https://httpbin.org" - } - ] - } - - -class Request(BaseModel): - method: MethodEnum = MethodEnum.GET - url: Url - params: Dict[str, str] = {} - headers: Headers = {} - req_json: Dict = Field({}, alias="json") - cookies: Dict[str, str] = {} - timeout: int = 120 - allow_redirects: bool = True - verify: Verify = False diff --git a/httprunner/schema/testcase.py b/httprunner/schema/testcase.py deleted file mode 100644 index 4ead072e..00000000 --- a/httprunner/schema/testcase.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import Dict, List, Text - -from pydantic import BaseModel, Field - -from httprunner.schema import common - - -class ProjectMeta(BaseModel): - debugtalk_py: Text = "" - variables: common.Variables = {} - env: common.Env = {} - - -class TestStep(BaseModel): - name: common.Name - request: common.Request - extract: Dict[str, str] = {} - validation: common.Validate = Field([], alias="validate") - - -class TestCase(BaseModel): - config: common.TestsConfig - teststeps: List[TestStep] - - class Config: - schema_extra = { - "examples": [ - { - "config": { - "name": "testcase name" - }, - "teststeps": [ - { - "name": "api 1", - "api": "/path/to/api1" - }, - { - "name": "api 2", - "api": "/path/to/api2" - } - ] - }, - { - "config": { - "name": "demo testcase", - "variables": { - "device_sn": "ABC", - "username": "${ENV(USERNAME)}", - "password": "${ENV(PASSWORD)}" - }, - "base_url": "http://127.0.0.1:5000" - }, - "teststeps": [ - { - "name": "demo step 1", - "api": "path/to/api1.yml", - "variables": { - "user_agent": "iOS/10.3", - "device_sn": "$device_sn" - }, - "extract": [ - { - "token": "content.token" - } - ], - "validate": [ - { - "eq": ["status_code", 200] - } - ] - }, - { - "name": "demo step 2", - "api": "path/to/api2.yml", - "variables": { - "token": "$token" - } - } - ] - } - ] - } - - -TestCases = List[TestCase] diff --git a/httprunner/utils.py b/httprunner/utils.py index e9c4cafa..a3c97651 100644 --- a/httprunner/utils.py +++ b/httprunner/utils.py @@ -1,20 +1,12 @@ -# encoding: utf-8 - import collections -import copy -import io -import itertools import json import os.path -import re -from typing import Union +import platform from loguru import logger +from httprunner import __version__ from httprunner import exceptions -from httprunner.exceptions import ParamsError - -absolute_http_url_regexp = re.compile(r"^https?://", re.I) def set_os_environ(variables_mapping): @@ -52,74 +44,6 @@ def get_os_environ(variable_name): raise exceptions.EnvNotFound(variable_name) -def build_url(base_url, path): - """ prepend url with base_url unless it's already an absolute URL """ - if absolute_http_url_regexp.match(path): - return path - elif base_url: - return "{}/{}".format(base_url.rstrip("/"), path.lstrip("/")) - else: - raise ParamsError("base url missed!") - - -def query_json(json_content, query, delimiter='.'): - """ Do an xpath-like query with json_content. - - Args: - json_content (dict/list/string): content to be queried. - query (str): query string. - delimiter (str): delimiter symbol. - - Returns: - str: queried result. - - Examples: - >>> json_content = { - "ids": [1, 2, 3, 4], - "person": { - "name": { - "first_name": "Leo", - "last_name": "Lee", - }, - "age": 29, - "cities": ["Guangzhou", "Shenzhen"] - } - } - >>> - >>> query_json(json_content, "person.name.first_name") - >>> Leo - >>> - >>> query_json(json_content, "person.name.first_name.0") - >>> L - >>> - >>> query_json(json_content, "person.cities.0") - >>> Guangzhou - - """ - raise_flag = False - response_body = f"response body: {json_content}\n" - try: - for key in query.split(delimiter): - if isinstance(json_content, (list, str, bytes)): - json_content = json_content[int(key)] - elif isinstance(json_content, dict): - json_content = json_content[key] - else: - logger.error( - f"invalid type value: {json_content}({type(json_content)})") - raise_flag = True - except (KeyError, ValueError, IndexError): - raise_flag = True - - if raise_flag: - err_msg = f"Failed to extract! => {query}\n" - err_msg += response_body - logger.error(err_msg) - raise exceptions.ExtractFailure(err_msg) - - return json_content - - def lower_dict_keys(origin_dict): """ convert keys in dict to lower case @@ -152,156 +76,7 @@ def lower_dict_keys(origin_dict): if not origin_dict or not isinstance(origin_dict, dict): return origin_dict - return { - key.lower(): value - for key, value in origin_dict.items() - } - - -def lower_test_dict_keys(test_dict): - """ convert keys in test_dict to lower case, convertion will occur in two places: - 1, all keys in test_dict; - 2, all keys in test_dict["request"] - """ - # convert keys in test_dict - test_dict = lower_dict_keys(test_dict) - - if "request" in test_dict: - # convert keys in test_dict["request"] - test_dict["request"] = lower_dict_keys(test_dict["request"]) - - return test_dict - - -def deepcopy_dict(data): - """ deepcopy dict data, ignore file object (_io.BufferedReader) - - Args: - data (dict): dict data structure - { - 'a': 1, - 'b': [2, 4], - 'c': lambda x: x+1, - 'd': open('LICENSE'), - 'f': { - 'f1': {'a1': 2}, - 'f2': io.open('LICENSE', 'rb'), - } - } - - Returns: - dict: deep copied dict data, with file object unchanged. - - """ - try: - return copy.deepcopy(data) - except TypeError: - copied_data = {} - for key, value in data.items(): - if isinstance(value, dict): - copied_data[key] = deepcopy_dict(value) - else: - try: - copied_data[key] = copy.deepcopy(value) - except TypeError: - copied_data[key] = value - - return copied_data - - -def ensure_mapping_format(variables): - """ ensure variables are in mapping format. - - Args: - variables (list/dict): original variables - - Returns: - dict: ensured variables in dict format - - Examples: - >>> variables = [ - {"a": 1}, - {"b": 2} - ] - >>> print(ensure_mapping_format(variables)) - { - "a": 1, - "b": 2 - } - - """ - if isinstance(variables, list): - variables_dict = {} - for map_dict in variables: - variables_dict.update(map_dict) - - return variables_dict - - elif isinstance(variables, dict): - return variables - - else: - raise exceptions.ParamsError("variables format error!") - - -def extend_variables(raw_variables, override_variables): - """ extend raw_variables with override_variables. - override_variables will merge and override raw_variables. - - Args: - raw_variables (list): - override_variables (list): - - Returns: - dict: extended variables mapping - - Examples: - >>> raw_variables = [{"var1": "val1"}, {"var2": "val2"}] - >>> override_variables = [{"var1": "val111"}, {"var3": "val3"}] - >>> extend_variables(raw_variables, override_variables) - { - 'var1', 'val111', - 'var2', 'val2', - 'var3', 'val3' - } - - """ - if not raw_variables: - override_variables_mapping = ensure_mapping_format(override_variables) - return override_variables_mapping - - elif not override_variables: - raw_variables_mapping = ensure_mapping_format(raw_variables) - return raw_variables_mapping - - else: - raw_variables_mapping = ensure_mapping_format(raw_variables) - override_variables_mapping = ensure_mapping_format(override_variables) - raw_variables_mapping.update(override_variables_mapping) - return raw_variables_mapping - - -def get_testcase_io(testcase): - """ get and print testcase input(variables) and output(export). - - Args: - testcase (unittest.suite.TestSuite): corresponding to one YAML/JSON file, it has been set two attributes: - config: parsed config block - runner: initialized runner.Runner() with config - Returns: - dict: input(variables) and output mapping. - - """ - test_runner = testcase.runner - variables = testcase.config.get("variables", {}) - output_list = testcase.config.get("export") \ - or testcase.config.get("output", []) - export_mapping = test_runner.export_variables(output_list) - - return { - "in": variables, - "out": export_mapping - } + return {key.lower(): value for key, value in origin_dict.items()} def print_info(info_mapping): @@ -349,160 +124,6 @@ def print_info(info_mapping): logger.info(content) -def create_scaffold(project_name): - """ create scaffold with specified project name. - """ - if os.path.isdir(project_name): - logger.warning(f"Folder {project_name} exists, please specify a new folder name.") - return - - logger.info(f"Start to create new project: {project_name}") - logger.info(f"CWD: {os.getcwd()}") - - def create_folder(path): - os.makedirs(path) - msg = f"created folder: {path}" - logger.info(msg) - - def create_file(path, file_content=""): - with open(path, 'w') as f: - f.write(file_content) - msg = f"created file: {path}" - logger.info(msg) - - demo_api_content = """ -name: demo api -variables: - var1: value1 - var2: value2 -request: - url: /api/path/$var1 - method: POST - headers: - Content-Type: "application/json" - json: - key: $var2 -validate: - - eq: ["status_code", 200] -""" - demo_testcase_content = """ -config: - name: "demo testcase" - variables: - device_sn: "ABC" - username: ${ENV(USERNAME)} - password: ${ENV(PASSWORD)} - base_url: "http://127.0.0.1:5000" - -teststeps: -- - name: demo step 1 - api: path/to/api1.yml - variables: - user_agent: 'iOS/10.3' - device_sn: $device_sn - extract: - - token: content.token - validate: - - eq: ["status_code", 200] -- - name: demo step 2 - api: path/to/api2.yml - variables: - token: $token -""" - demo_testsuite_content = """ -config: - name: "demo testsuite" - variables: - device_sn: "XYZ" - base_url: "http://127.0.0.1:5000" - -testcases: -- - name: call demo_testcase with data 1 - testcase: path/to/demo_testcase.yml - variables: - device_sn: $device_sn -- - name: call demo_testcase with data 2 - testcase: path/to/demo_testcase.yml - variables: - device_sn: $device_sn -""" - ignore_content = "\n".join([ - ".env", - "reports/*", - "__pycache__/*", - "*.pyc", - ".python-version", - "logs/*" - ]) - demo_debugtalk_content = """ -import time - -def sleep(n_secs): - time.sleep(n_secs) -""" - demo_env_content = "\n".join([ - "USERNAME=leolee", - "PASSWORD=123456" - ]) - - create_folder(project_name) - create_folder(os.path.join(project_name, "api")) - create_folder(os.path.join(project_name, "testcases")) - create_folder(os.path.join(project_name, "testsuites")) - create_folder(os.path.join(project_name, "reports")) - create_file(os.path.join(project_name, "api", "demo_api.yml"), demo_api_content) - create_file(os.path.join(project_name, "testcases", "demo_testcase.yml"), demo_testcase_content) - create_file(os.path.join(project_name, "testsuites", "demo_testsuite.yml"), demo_testsuite_content) - create_file(os.path.join(project_name, "debugtalk.py"), demo_debugtalk_content) - create_file(os.path.join(project_name, ".env"), demo_env_content) - create_file(os.path.join(project_name, ".gitignore"), ignore_content) - - -def gen_cartesian_product(*args): - """ generate cartesian product for lists - - Args: - args (list of list): lists to be generated with cartesian product - - Returns: - list: cartesian product in list - - Examples: - - >>> arg1 = [{"a": 1}, {"a": 2}] - >>> arg2 = [{"x": 111, "y": 112}, {"x": 121, "y": 122}] - >>> args = [arg1, arg2] - >>> gen_cartesian_product(*args) - >>> # same as below - >>> gen_cartesian_product(arg1, arg2) - [ - {'a': 1, 'x': 111, 'y': 112}, - {'a': 1, 'x': 121, 'y': 122}, - {'a': 2, 'x': 111, 'y': 112}, - {'a': 2, 'x': 121, 'y': 122} - ] - - """ - if not args: - return [] - elif len(args) == 1: - return args[0] - - product_list = [] - for product_item_tuple in itertools.product(*args): - product_item_dict = {} - for item in product_item_tuple: - product_item_dict.update(item) - - product_list.append(product_item_dict) - - return product_list - - def omit_long_data(body, omit_len=512): """ omit too long str/bytes """ @@ -522,61 +143,11 @@ def omit_long_data(body, omit_len=512): return omitted_body + appendix_str -def dump_json_file(json_data: Union[dict, list], json_file_abs_path: str) -> None: - """ dump json data to file - """ - class PythonObjectEncoder(json.JSONEncoder): - def default(self, obj): - try: - return super().default(self, obj) - except TypeError: - return str(obj) - - file_foder_path = os.path.dirname(json_file_abs_path) - if not os.path.isdir(file_foder_path): - os.makedirs(file_foder_path) - - try: - with io.open(json_file_abs_path, 'w', encoding='utf-8') as outfile: - json.dump( - json_data, - outfile, - indent=4, - separators=(',', ':'), - ensure_ascii=False, - cls=PythonObjectEncoder - ) - - msg = f"dump file: {json_file_abs_path}" - logger.info(msg) - - except TypeError as ex: - msg = f"Failed to dump json file: {json_file_abs_path}\nReason: {ex}" - logger.error(msg) - - -def prepare_log_file_abs_path(test_path: str, file_name: str) -> str: - """ prepare dump json file absolute path. - """ - current_working_dir = os.getcwd() - - if not test_path: - # running passed in testcase/testsuite data structure - dump_file_name = f"tests_mapping.{file_name}" - dumped_json_file_abs_path = os.path.join(current_working_dir, "logs", dump_file_name) - return dumped_json_file_abs_path - - # both test_path and pwd_dir_path are absolute path - logs_dir_path = os.path.join(current_working_dir, "logs") - - if os.path.isdir(test_path): - file_foder_path = os.path.join(logs_dir_path, test_path) - dump_file_name = f"all.{file_name}" - else: - file_relative_folder_path, test_file = os.path.split(test_path) - file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) - test_file_name, _file_suffix = os.path.splitext(test_file) - dump_file_name = f"{test_file_name}.{file_name}" - - dumped_json_file_abs_path = os.path.join(file_foder_path, dump_file_name) - return dumped_json_file_abs_path +def get_platform(): + return { + "httprunner_version": __version__, + "python_version": "{} {}".format( + platform.python_implementation(), platform.python_version() + ), + "platform": platform.platform(), + } diff --git a/httprunner/utils_test.py b/httprunner/utils_test.py index e41761d7..252b33f0 100644 --- a/httprunner/utils_test.py +++ b/httprunner/utils_test.py @@ -1,69 +1,22 @@ import io import os -import shutil import unittest -from httprunner import exceptions, loader, utils +from httprunner import loader, utils class TestUtils(unittest.TestCase): - def test_set_os_environ(self): self.assertNotIn("abc", os.environ) - variables_mapping = { - "abc": "123" - } + variables_mapping = {"abc": "123"} utils.set_os_environ(variables_mapping) self.assertIn("abc", os.environ) self.assertEqual(os.environ["abc"], "123") - def test_query_json(self): - json_content = { - "ids": [1, 2, 3, 4], - "person": { - "name": { - "first_name": "Leo", - "last_name": "Lee", - }, - "age": 29, - "cities": ["Guangzhou", "Shenzhen"] - } - } - query = "ids.2" - result = utils.query_json(json_content, query) - self.assertEqual(result, 3) - - query = "ids.str_key" - with self.assertRaises(exceptions.ExtractFailure): - utils.query_json(json_content, query) - - query = "ids.5" - with self.assertRaises(exceptions.ExtractFailure): - utils.query_json(json_content, query) - - query = "person.age" - result = utils.query_json(json_content, query) - self.assertEqual(result, 29) - - query = "person.not_exist_key" - with self.assertRaises(exceptions.ExtractFailure): - utils.query_json(json_content, query) - - query = "person.cities.0" - result = utils.query_json(json_content, query) - self.assertEqual(result, "Guangzhou") - - query = "person.name.first_name" - result = utils.query_json(json_content, query) - self.assertEqual(result, "Leo") - - query = "person.name.first_name.0" - result = utils.query_json(json_content, query) - self.assertEqual(result, "L") - def current_validators(self): from httprunner.builtin import comparators - functions_mapping = loader.load.load_module_functions(comparators) + + functions_mapping = loader.load_module_functions(comparators) functions_mapping["equals"](None, None) functions_mapping["equals"](1, 1) @@ -80,17 +33,17 @@ class TestUtils(unittest.TestCase): functions_mapping["not_equals"](123, "123") functions_mapping["length_equals"]("123", 3) - # Because the Numbers in a CSV file are by default treated as strings, + # Because the Numbers in a CSV file are by default treated as strings, # you need to convert them to Numbers, and we'll test that out here. - functions_mapping["length_equals"]("123", '3') + functions_mapping["length_equals"]("123", "3") with self.assertRaises(AssertionError): - functions_mapping["length_equals"]("123", 'abc') + functions_mapping["length_equals"]("123", "abc") functions_mapping["length_greater_than"]("123", 2) functions_mapping["length_greater_than_or_equals"]("123", 3) functions_mapping["contains"]("123abc456", "3ab") - functions_mapping["contains"](['1', '2'], "1") - functions_mapping["contains"]({'a':1, 'b':2}, "a") + functions_mapping["contains"](["1", "2"], "1") + functions_mapping["contains"]({"a": 1, "b": 2}, "a") functions_mapping["contained_by"]("3ab", "123abc456") functions_mapping["regex_match"]("123abc456", "^123\w+456$") @@ -113,41 +66,11 @@ class TestUtils(unittest.TestCase): functions_mapping["type_match"]({}, "dict") functions_mapping["type_match"]({"a": 1}, "dict") - def test_handle_config_key_case(self): - origin_dict = { - "Name": "test", - "Request": { - "url": "http://127.0.0.1:5000", - "METHOD": "POST", - "Headers": { - "Accept": "application/json", - "User-Agent": "ios/9.3" - } - } - } - new_dict = utils.lower_test_dict_keys(origin_dict) - self.assertIn("name", new_dict) - self.assertIn("request", new_dict) - self.assertIn("method", new_dict["request"]) - self.assertIn("headers", new_dict["request"]) - self.assertIn("Accept", new_dict["request"]["headers"]) - self.assertIn("User-Agent", new_dict["request"]["headers"]) - - origin_dict = { - "Name": "test", - "Request": "$default_request" - } - new_dict = utils.lower_test_dict_keys(origin_dict) - self.assertIn("$default_request", new_dict["request"]) - def test_lower_dict_keys(self): request_dict = { "url": "http://127.0.0.1:5000", "METHOD": "POST", - "Headers": { - "Accept": "application/json", - "User-Agent": "ios/9.3" - } + "Headers": {"Accept": "application/json", "User-Agent": "ios/9.3"}, } new_request_dict = utils.lower_dict_keys(request_dict) self.assertIn("method", new_request_dict) @@ -163,138 +86,6 @@ class TestUtils(unittest.TestCase): new_request_dict = utils.lower_dict_keys(request_dict) self.assertEqual(None, request_dict) - def test_ensure_mapping_format(self): - map_list = [ - {"a": 1}, - {"b": 2} - ] - ordered_dict = utils.ensure_mapping_format(map_list) - self.assertIsInstance(ordered_dict, dict) - self.assertIn("a", ordered_dict) - - def test_extend_variables(self): - raw_variables = [{"var1": "val1"}, {"var2": "val2"}] - override_variables = [{"var1": "val111"}, {"var3": "val3"}] - extended_variables_mapping = utils.extend_variables(raw_variables, override_variables) - self.assertEqual(extended_variables_mapping["var1"], "val111") - self.assertEqual(extended_variables_mapping["var2"], "val2") - self.assertEqual(extended_variables_mapping["var3"], "val3") - - def test_extend_variables_fix(self): - raw_variables = [{"var1": "val1"}, {"var2": "val2"}] - override_variables = {} - extended_variables_mapping = utils.extend_variables(raw_variables, override_variables) - self.assertEqual(extended_variables_mapping["var1"], "val1") - - def test_deepcopy_dict(self): - license_path = os.path.join( - os.path.dirname(os.path.dirname(__file__)), - "LICENSE" - ) - data = { - 'a': 1, - 'b': [2, 4], - 'c': lambda x: x+1, - 'd': open(license_path), - 'f': { - 'f1': {'a1': 2}, - 'f2': io.open(license_path, 'rb'), - } - } - new_data = utils.deepcopy_dict(data) - data["a"] = 0 - self.assertEqual(new_data["a"], 1) - data["f"]["f1"] = 123 - self.assertEqual(new_data["f"]["f1"], {'a1': 2}) - self.assertNotEqual(id(new_data["b"]), id(data["b"])) - self.assertEqual(id(new_data["c"]), id(data["c"])) - # self.assertEqual(id(new_data["d"]), id(data["d"])) - - def test_create_scaffold(self): - project_name = "projectABC" - utils.create_scaffold(project_name) - self.assertTrue(os.path.isdir(os.path.join(project_name, "api"))) - self.assertTrue(os.path.isdir(os.path.join(project_name, "testcases"))) - self.assertTrue(os.path.isdir(os.path.join(project_name, "testsuites"))) - self.assertTrue(os.path.isdir(os.path.join(project_name, "reports"))) - self.assertTrue(os.path.isfile(os.path.join(project_name, "debugtalk.py"))) - self.assertTrue(os.path.isfile(os.path.join(project_name, ".env"))) - shutil.rmtree(project_name) - - def test_cartesian_product_one(self): - parameters_content_list = [ - [ - {"a": 1}, - {"a": 2} - ] - ] - product_list = utils.gen_cartesian_product(*parameters_content_list) - self.assertEqual( - product_list, - [ - {"a": 1}, - {"a": 2} - ] - ) - - def test_cartesian_product_multiple(self): - parameters_content_list = [ - [ - {"a": 1}, - {"a": 2} - ], - [ - {"x": 111, "y": 112}, - {"x": 121, "y": 122} - ] - ] - product_list = utils.gen_cartesian_product(*parameters_content_list) - self.assertEqual( - product_list, - [ - {'a': 1, 'x': 111, 'y': 112}, - {'a': 1, 'x': 121, 'y': 122}, - {'a': 2, 'x': 111, 'y': 112}, - {'a': 2, 'x': 121, 'y': 122} - ] - ) - - def test_cartesian_product_empty(self): - parameters_content_list = [] - product_list = utils.gen_cartesian_product(*parameters_content_list) - self.assertEqual(product_list, []) - def test_print_info(self): - info_mapping = { - "a": 1, - "t": (1, 2), - "b": { - "b1": 123 - }, - "c": None, - "d": [4, 5] - } + info_mapping = {"a": 1, "t": (1, 2), "b": {"b1": 123}, "c": None, "d": [4, 5]} utils.print_info(info_mapping) - - def test_prepare_dump_json_file_path_for_folder(self): - # hrun tests/httpbin/a.b.c/ --save-tests - test_path = os.path.join("tests", "httpbin", "a.b.c") - self.assertEqual( - utils.prepare_log_file_abs_path(test_path, "loaded.json"), - os.path.join(os.getcwd(), "logs", "tests/httpbin/a.b.c/all.loaded.json") - ) - - def test_prepare_dump_json_file_path_for_file(self): - # hrun tests/httpbin/a.b.c/rpc.yml --save-tests - test_path = os.path.join("tests", "httpbin", "a.b.c", "rpc.yml") - self.assertEqual( - utils.prepare_log_file_abs_path(test_path, "loaded.json"), - os.path.join(os.getcwd(), "logs", "tests/httpbin/a.b.c/rpc.loaded.json") - ) - - def test_prepare_dump_json_file_path_for_passed_testcase(self): - test_path = "" - self.assertEqual( - utils.prepare_log_file_abs_path(test_path, "loaded.json"), - os.path.join(os.getcwd(), "logs", "tests_mapping.loaded.json") - ) diff --git a/httprunner/validator.py b/httprunner/validator.py deleted file mode 100644 index 9abe5699..00000000 --- a/httprunner/validator.py +++ /dev/null @@ -1,204 +0,0 @@ -# encoding: utf-8 - -import sys -import traceback - -from loguru import logger - -from httprunner import exceptions, parser - - -class Validator(object): - """Validate tests - - Attributes: - validation_results (dict): store validation results, - including validate_extractor and validate_script. - - """ - - def __init__(self, session_context, resp_obj): - """ initialize a Validator for each teststep (API request) - - Args: - session_context: HttpRunner session context - resp_obj: ResponseObject instance - """ - self.session_context = session_context - self.resp_obj = resp_obj - self.validation_results = {} - - def __eval_validator_check(self, check_item): - """ evaluate check item in validator. - - Args: - check_item: check_item should only be the following 5 formats: - 1, variable reference, e.g. $token - 2, function reference, e.g. ${is_status_code_200($status_code)} - 3, dict or list, maybe containing variable/function reference, e.g. {"var": "$abc"} - 4, string joined by delimiter. e.g. "status_code", "headers.content-type" - 5, regex string, e.g. "LB[\d]*(.*)RB[\d]*" - - """ - if isinstance(check_item, (dict, list)) \ - or isinstance(check_item, parser.LazyString): - # format 1/2/3 - check_value = self.session_context.eval_content(check_item) - else: - # format 4/5 - check_value = self.resp_obj.extract_field(check_item) - - return check_value - - def __eval_validator_expect(self, expect_item): - """ evaluate expect item in validator. - - Args: - expect_item: expect_item should only be in 2 types: - 1, variable reference, e.g. $expect_status_code - 2, actual value, e.g. 200 - - """ - expect_value = self.session_context.eval_content(expect_item) - return expect_value - - def validate_script(self, script): - """ make validation with python script - """ - result = { - "validate_script": "
".join(script), - "check_result": "pass", - "output": "" - } - - script = "\n ".join(script) - code = f""" -# encoding: utf-8 - -def run_validate_script(): - {script} -""" - - variables = { - "status_code": self.resp_obj.status_code, - "response_json": self.resp_obj.json, - "response": self.resp_obj - } - variables.update(self.session_context.test_variables_mapping) - variables.update(globals()) - - try: - exec(code, variables) - except SyntaxError as ex: - logger.warning(f"SyntaxError in python validate script: {ex}") - result["check_result"] = "fail" - result["output"] = "
".join([ - f"ErrorMessage: {ex.msg}", - f"ErrorLine: {ex.lineno}", - f"ErrorText: {ex.text}" - ]) - return result - - try: - # run python validate script - variables["run_validate_script"]() - except Exception as ex: - logger.warning(f"run python validate script failed: {ex}") - result["check_result"] = "fail" - - _type, _value, _tb = sys.exc_info() - - _lineno = -1 - if _tb.tb_next: - _lineno = _tb.tb_next.tb_lineno - line_no = _lineno - 4 - elif len(traceback.extract_tb(_tb)) > 0: - # filename, lineno, name, line - _, _lineno, _, _ = traceback.extract_tb(_tb)[-1] - line_no = _lineno - 4 - else: - line_no = "N/A" - - result["output"] = "
".join([ - f"ErrorType: {_type.__name__}", - f"ErrorLine: {line_no}" - ]) - - return result - - def validate(self, validators): - """ make validation with comparators - """ - self.validation_results = {} - if not validators: - return - - logger.debug("start to validate.") - - validate_pass = True - failures = [] - - for validator in validators: - - if isinstance(validator, dict) and validator.get("type") == "python_script": - script = self.session_context.eval_content(validator["script"]) - result = self.validate_script(script) - if result["check_result"] == "fail": - validate_pass = False - failures.append(result["output"]) - - self.validation_results["validate_script"] = result - continue - - if "validate_extractor" not in self.validation_results: - self.validation_results["validate_extractor"] = [] - - # validator should be LazyFunction object - if not isinstance(validator, parser.LazyFunction): - raise exceptions.ValidationFailure( - f"validator should be parsed first: {validators}") - - # evaluate validator args with context variable mapping. - validator_args = validator.get_args() - check_item, expect_item = validator_args - check_value = self.__eval_validator_check(check_item) - expect_value = self.__eval_validator_expect(expect_item) - validator.update_args([check_value, expect_value]) - - comparator = validator.func_name - validator_dict = { - "comparator": comparator, - "check": check_item, - "check_value": check_value, - "expect": expect_item, - "expect_value": expect_value - } - validate_msg = f"\nvalidate: {check_item} {comparator} {expect_value}({type(expect_value).__name__})" - - try: - validator.to_value(self.session_context.test_variables_mapping) - validator_dict["check_result"] = "pass" - validate_msg += "\t==> pass" - logger.debug(validate_msg) - except (AssertionError, TypeError): - validate_pass = False - validator_dict["check_result"] = "fail" - validate_msg += "\t==> fail" - validate_msg += "\n{}({}) {} {}({})".format( - check_value, - type(check_value).__name__, - comparator, - expect_value, - type(expect_value).__name__ - ) - logger.error(validate_msg) - failures.append(validate_msg) - - self.validation_results["validate_extractor"].append(validator_dict) - - # restore validator args, in case of running multiple times - validator.update_args(validator_args) - - if not validate_pass: - failures_string = "\n".join([failure for failure in failures]) - raise exceptions.ValidationFailure(failures_string) diff --git a/poetry.lock b/poetry.lock index de982f13..8cc0bcd1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -12,6 +12,23 @@ version = "0.2.2" python = "<3.7" version = "2.4" +[[package]] +category = "main" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "appdirs" +optional = false +python-versions = "*" +version = "1.4.4" + +[[package]] +category = "main" +description = "Atomic file writes." +marker = "sys_platform == \"win32\"" +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.4.0" + [[package]] category = "main" description = "Classes Without Boilerplate" @@ -26,6 +43,26 @@ dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.int docs = ["sphinx", "zope.interface"] tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +[[package]] +category = "main" +description = "The uncompromising code formatter." +name = "black" +optional = false +python-versions = ">=3.6" +version = "19.10b0" + +[package.dependencies] +appdirs = "*" +attrs = ">=18.1.0" +click = ">=6.5" +pathspec = ">=0.6,<1" +regex = "*" +toml = ">=0.9.4" +typed-ast = ">=1.4.0" + +[package.extras] +d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] + [[package]] category = "main" description = "Python package for providing Mozilla's CA Bundle." @@ -43,7 +80,7 @@ python-versions = "*" version = "3.0.4" [[package]] -category = "dev" +category = "main" description = "Composable command line interface toolkit" name = "click" optional = false @@ -114,20 +151,6 @@ optional = false python-versions = "*" version = "1.0.5" -[[package]] -category = "dev" -description = "A microframework based on Werkzeug, Jinja2 and good intentions" -name = "flask" -optional = false -python-versions = "*" -version = "0.12.4" - -[package.dependencies] -Jinja2 = ">=2.4" -Werkzeug = ">=0.7" -click = ">=2.0" -itsdangerous = ">=0.21" - [[package]] category = "dev" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" @@ -136,17 +159,6 @@ optional = false python-versions = "*" version = "0.9.0" -[[package]] -category = "main" -description = "Convert HAR(HTTP Archive) to YAML/JSON testcases for HttpRunner." -name = "har2case" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -version = "0.3.1" - -[package.dependencies] -PyYAML = "*" - [[package]] category = "dev" description = "A collection of framework independent HTTP protocol utils." @@ -183,7 +195,7 @@ marker = "python_version < \"3.8\"" name = "importlib-metadata" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "1.3.0" +version = "1.6.0" [package.dependencies] zipp = ">=0.5" @@ -192,14 +204,6 @@ zipp = ">=0.5" docs = ["sphinx", "rst.linker"] testing = ["packaging", "importlib-resources"] -[[package]] -category = "dev" -description = "Various helpers to pass data to untrusted environments and back." -name = "itsdangerous" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.1.0" - [[package]] category = "main" description = "A very fast and expressive template engine." @@ -216,33 +220,11 @@ i18n = ["Babel (>=0.8)"] [[package]] category = "main" -description = "An XPath for JSON" -name = "jsonpath" +description = "JSON Matching Expressions" +name = "jmespath" optional = false python-versions = "*" -version = "0.82" - -[[package]] -category = "main" -description = "An implementation of JSON Schema validation for Python" -name = "jsonschema" -optional = false -python-versions = "*" -version = "3.2.0" - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" - -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" - -[package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +version = "0.9.5" [[package]] category = "main" @@ -274,14 +256,54 @@ version = "1.1.1" [[package]] category = "main" description = "More routines for operating on iterables, beyond itertools" -marker = "python_version < \"3.8\"" name = "more-itertools" optional = false -python-versions = "*" -version = "5.0.0" +python-versions = ">=3.5" +version = "8.2.0" + +[[package]] +category = "main" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.3" [package.dependencies] -six = ">=1.0.0,<2.0.0" +pyparsing = ">=2.0.2" +six = "*" + +[[package]] +category = "main" +description = "Utility library for gitignore style pattern matching of file paths." +name = "pathspec" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.8.0" + +[[package]] +category = "main" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.13.1" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[package.extras] +dev = ["pre-commit", "tox"] + +[[package]] +category = "main" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.8.1" [[package]] category = "main" @@ -303,14 +325,37 @@ typing_extensions = ["typing-extensions (>=3.7.2)"] [[package]] category = "main" -description = "Persistent/Functional/Immutable data structures" -name = "pyrsistent" +description = "Python parsing module" +name = "pyparsing" optional = false -python-versions = "*" -version = "0.15.6" +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.7" + +[[package]] +category = "main" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=3.5" +version = "5.4.2" [package.dependencies] -six = "*" +atomicwrites = ">=1.0" +attrs = ">=17.4.0" +colorama = "*" +more-itertools = ">=4.0.0" +packaging = "*" +pluggy = ">=0.12,<1.0" +py = ">=1.5.0" +wcwidth = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[package.extras] +checkqa-mypy = ["mypy (v0.761)"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] category = "main" @@ -320,6 +365,14 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "5.2" +[[package]] +category = "main" +description = "Alternative regular expression module, to replace re." +name = "regex" +optional = false +python-versions = "*" +version = "2020.5.14" + [[package]] category = "main" description = "Python HTTP for Humans." @@ -349,39 +402,13 @@ version = "0.9.1" [package.dependencies] requests = ">=2.0.1,<3.0.0" -[[package]] -category = "main" -description = "Python client for Sentry (https://getsentry.com)" -name = "sentry-sdk" -optional = false -python-versions = "*" -version = "0.13.5" - -[package.dependencies] -certifi = "*" -urllib3 = ">=1.10.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -beam = ["beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -flask = ["flask (>=0.11)", "blinker (>=1.1)"] -pyspark = ["pyspark (>=2.4.4)"] -rq = ["0.6"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -tornado = ["tornado (>=5)"] - [[package]] category = "main" description = "Python 2 and 3 compatibility utilities" name = "six" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*" -version = "1.13.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.14.0" [[package]] category = "dev" @@ -394,6 +421,22 @@ version = "0.12.9" [package.extras] full = ["aiofiles", "graphene", "itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests", "ujson"] +[[package]] +category = "main" +description = "Python Library for Tom's Obvious, Minimal Language" +name = "toml" +optional = false +python-versions = "*" +version = "0.10.1" + +[[package]] +category = "main" +description = "a fork of Python 2 and 3 ast modules with type comment support" +name = "typed-ast" +optional = false +python-versions = "*" +version = "1.4.1" + [[package]] category = "main" description = "HTTP library with thread-safe connection pooling, file post, and more." @@ -431,6 +474,14 @@ optional = false python-versions = "*" version = "0.14.0" +[[package]] +category = "main" +description = "Measures number of Terminal column cells of wide-character codes" +name = "wcwidth" +optional = false +python-versions = "*" +version = "0.1.9" + [[package]] category = "dev" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" @@ -439,19 +490,6 @@ optional = false python-versions = ">=3.6" version = "8.0.2" -[[package]] -category = "dev" -description = "The comprehensive WSGI web application library." -name = "werkzeug" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.16.0" - -[package.extras] -dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"] -termcolor = ["termcolor"] -watchdog = ["watchdog"] - [[package]] category = "main" description = "A small Python utility to set file creation time on Windows" @@ -470,18 +508,15 @@ description = "Backport of pathlib-compatible object wrapper for zip files" marker = "python_version < \"3.8\"" name = "zipp" optional = false -python-versions = ">=2.7" -version = "0.6.0" - -[package.dependencies] -more-itertools = "*" +python-versions = ">=3.6" +version = "3.1.0" [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pathlib2", "contextlib2", "unittest2"] +testing = ["jaraco.itertools", "func-timeout"] [metadata] -content-hash = "57ff78f24ca37a3421d5c64007bd71eba394d6751fdbb2d0b446f523cfed9c62" +content-hash = "be53fb0cd423bac9dda129a958a58026009a99a455081333d7af51c22a4df8cf" python-versions = "^3.6" [metadata.files] @@ -489,10 +524,22 @@ aiocontextvars = [ {file = "aiocontextvars-0.2.2-py2.py3-none-any.whl", hash = "sha256:885daf8261818767d8f7cbd79f9d4482d118f024b6586ef6e67980236a27bfa3"}, {file = "aiocontextvars-0.2.2.tar.gz", hash = "sha256:f027372dc48641f683c559f247bd84962becaacdc9ba711d583c3871fb5652aa"}, ] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] attrs = [ {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, ] +black = [ + {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, + {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, +] certifi = [ {file = "certifi-2019.11.28-py2.py3-none-any.whl", hash = "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3"}, {file = "certifi-2019.11.28.tar.gz", hash = "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"}, @@ -558,18 +605,10 @@ filetype = [ {file = "filetype-1.0.5-py2.py3-none-any.whl", hash = "sha256:4967124d982a71700d94a08c49c4926423500e79382a92070f5ab248d44fe461"}, {file = "filetype-1.0.5.tar.gz", hash = "sha256:17a3b885f19034da29640b083d767e0f13c2dcb5dcc267945c8b6e5a5a9013c7"}, ] -flask = [ - {file = "Flask-0.12.4-py2.py3-none-any.whl", hash = "sha256:6c02dbaa5a9ef790d8219bdced392e2d549c10cd5a5ba4b6aa65126b2271af29"}, - {file = "Flask-0.12.4.tar.gz", hash = "sha256:2ea22336f6d388b4b242bc3abf8a01244a8aa3e236e7407469ef78c16ba355dd"}, -] h11 = [ {file = "h11-0.9.0-py2.py3-none-any.whl", hash = "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"}, {file = "h11-0.9.0.tar.gz", hash = "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1"}, ] -har2case = [ - {file = "har2case-0.3.1-py2.py3-none-any.whl", hash = "sha256:84d3a5cc9fbb16e45372e7e880a936c59bbe8e9b66bad81927769e64f608e2af"}, - {file = "har2case-0.3.1.tar.gz", hash = "sha256:8f159ec7cba82ec4282f46af4a9dac89f65e62796521b2426d3c89c3c9fd8579"}, -] httptools = [ {file = "httptools-0.1.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce"}, {file = "httptools-0.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4"}, @@ -607,23 +646,16 @@ immutables = [ {file = "immutables-0.11.tar.gz", hash = "sha256:d6850578a0dc6530ac19113cfe4ddc13903df635212d498f176fe601a8a5a4a3"}, ] importlib-metadata = [ - {file = "importlib_metadata-1.3.0-py2.py3-none-any.whl", hash = "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"}, - {file = "importlib_metadata-1.3.0.tar.gz", hash = "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45"}, -] -itsdangerous = [ - {file = "itsdangerous-1.1.0-py2.py3-none-any.whl", hash = "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"}, - {file = "itsdangerous-1.1.0.tar.gz", hash = "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19"}, + {file = "importlib_metadata-1.6.0-py2.py3-none-any.whl", hash = "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f"}, + {file = "importlib_metadata-1.6.0.tar.gz", hash = "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"}, ] jinja2 = [ {file = "Jinja2-2.10.3-py2.py3-none-any.whl", hash = "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f"}, {file = "Jinja2-2.10.3.tar.gz", hash = "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"}, ] -jsonpath = [ - {file = "jsonpath-0.82.tar.gz", hash = "sha256:46d3fd2016cd5b842283d547877a02c418a0fe9aa7a6b0ae344115a2c990fef4"}, -] -jsonschema = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +jmespath = [ + {file = "jmespath-0.9.5-py2.py3-none-any.whl", hash = "sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec"}, + {file = "jmespath-0.9.5.tar.gz", hash = "sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9"}, ] loguru = [ {file = "loguru-0.4.1-py3-none-any.whl", hash = "sha256:074b3caa6748452c1e4f2b302093c94b65d5a4c5a4d7743636b4121e06437b0e"}, @@ -660,9 +692,24 @@ markupsafe = [ {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] more-itertools = [ - {file = "more-itertools-5.0.0.tar.gz", hash = "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4"}, - {file = "more_itertools-5.0.0-py2-none-any.whl", hash = "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc"}, - {file = "more_itertools-5.0.0-py3-none-any.whl", hash = "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"}, + {file = "more-itertools-8.2.0.tar.gz", hash = "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"}, + {file = "more_itertools-8.2.0-py3-none-any.whl", hash = "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c"}, +] +packaging = [ + {file = "packaging-20.3-py2.py3-none-any.whl", hash = "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"}, + {file = "packaging-20.3.tar.gz", hash = "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3"}, +] +pathspec = [ + {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, + {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, +] +pluggy = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] +py = [ + {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"}, + {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"}, ] pydantic = [ {file = "pydantic-1.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:07911aab70f3bc52bb845ce1748569c5e70478ac977e106a150dd9d0465ebf04"}, @@ -680,8 +727,13 @@ pydantic = [ {file = "pydantic-1.4-py36.py37.py38-none-any.whl", hash = "sha256:72184c1421103cca128300120f8f1185fb42a9ea73a1c9845b1c53db8c026a7d"}, {file = "pydantic-1.4.tar.gz", hash = "sha256:f17ec336e64d4583311249fb179528e9a2c27c8a2eaf590ec6ec2c6dece7cb3f"}, ] -pyrsistent = [ - {file = "pyrsistent-0.15.6.tar.gz", hash = "sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b"}, +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pytest = [ + {file = "pytest-5.4.2-py3-none-any.whl", hash = "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3"}, + {file = "pytest-5.4.2.tar.gz", hash = "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698"}, ] pyyaml = [ {file = "PyYAML-5.2-cp27-cp27m-win32.whl", hash = "sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc"}, @@ -696,6 +748,29 @@ pyyaml = [ {file = "PyYAML-5.2-cp38-cp38-win_amd64.whl", hash = "sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803"}, {file = "PyYAML-5.2.tar.gz", hash = "sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c"}, ] +regex = [ + {file = "regex-2020.5.14-cp27-cp27m-win32.whl", hash = "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e"}, + {file = "regex-2020.5.14-cp27-cp27m-win_amd64.whl", hash = "sha256:d466967ac8e45244b9dfe302bbe5e3337f8dc4dec8d7d10f5e950d83b140d33a"}, + {file = "regex-2020.5.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:27ff7325b297fb6e5ebb70d10437592433601c423f5acf86e5bc1ee2919b9561"}, + {file = "regex-2020.5.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ea55b80eb0d1c3f1d8d784264a6764f931e172480a2f1868f2536444c5f01e01"}, + {file = "regex-2020.5.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c9bce6e006fbe771a02bda468ec40ffccbf954803b470a0345ad39c603402577"}, + {file = "regex-2020.5.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:d881c2e657c51d89f02ae4c21d9adbef76b8325fe4d5cf0e9ad62f850f3a98fd"}, + {file = "regex-2020.5.14-cp36-cp36m-win32.whl", hash = "sha256:99568f00f7bf820c620f01721485cad230f3fb28f57d8fbf4a7967ec2e446994"}, + {file = "regex-2020.5.14-cp36-cp36m-win_amd64.whl", hash = "sha256:70c14743320a68c5dac7fc5a0f685be63bc2024b062fe2aaccc4acc3d01b14a1"}, + {file = "regex-2020.5.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a7c37f048ec3920783abab99f8f4036561a174f1314302ccfa4e9ad31cb00eb4"}, + {file = "regex-2020.5.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89d76ce33d3266173f5be80bd4efcbd5196cafc34100fdab814f9b228dee0fa4"}, + {file = "regex-2020.5.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:51f17abbe973c7673a61863516bdc9c0ef467407a940f39501e786a07406699c"}, + {file = "regex-2020.5.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ce5cc53aa9fbbf6712e92c7cf268274eaff30f6bd12a0754e8133d85a8fb0f5f"}, + {file = "regex-2020.5.14-cp37-cp37m-win32.whl", hash = "sha256:8044d1c085d49673aadb3d7dc20ef5cb5b030c7a4fa253a593dda2eab3059929"}, + {file = "regex-2020.5.14-cp37-cp37m-win_amd64.whl", hash = "sha256:c2062c7d470751b648f1cacc3f54460aebfc261285f14bc6da49c6943bd48bdd"}, + {file = "regex-2020.5.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:329ba35d711e3428db6b45a53b1b13a0a8ba07cbbcf10bbed291a7da45f106c3"}, + {file = "regex-2020.5.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:579ea215c81d18da550b62ff97ee187b99f1b135fd894a13451e00986a080cad"}, + {file = "regex-2020.5.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:3a9394197664e35566242686d84dfd264c07b20f93514e2e09d3c2b3ffdf78fe"}, + {file = "regex-2020.5.14-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ce367d21f33e23a84fb83a641b3834dd7dd8e9318ad8ff677fbfae5915a239f7"}, + {file = "regex-2020.5.14-cp38-cp38-win32.whl", hash = "sha256:1386e75c9d1574f6aa2e4eb5355374c8e55f9aac97e224a8a5a6abded0f9c927"}, + {file = "regex-2020.5.14-cp38-cp38-win_amd64.whl", hash = "sha256:7e61be8a2900897803c293247ef87366d5df86bf701083b6c43119c7c6c99108"}, + {file = "regex-2020.5.14.tar.gz", hash = "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5"}, +] requests = [ {file = "requests-2.22.0-py2.py3-none-any.whl", hash = "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"}, {file = "requests-2.22.0.tar.gz", hash = "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"}, @@ -704,17 +779,40 @@ requests-toolbelt = [ {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, ] -sentry-sdk = [ - {file = "sentry-sdk-0.13.5.tar.gz", hash = "sha256:c6b919623e488134a728f16326c6f0bcdab7e3f59e7f4c472a90eea4d6d8fe82"}, - {file = "sentry_sdk-0.13.5-py2.py3-none-any.whl", hash = "sha256:05285942901d38c7ce2498aba50d8e87b361fc603281a5902dda98f3f8c5e145"}, -] six = [ - {file = "six-1.13.0-py2.py3-none-any.whl", hash = "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd"}, - {file = "six-1.13.0.tar.gz", hash = "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"}, + {file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"}, + {file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"}, ] starlette = [ {file = "starlette-0.12.9.tar.gz", hash = "sha256:c2ac9a42e0e0328ad20fe444115ac5e3760c1ee2ac1ff8cdb5ec915c4a453411"}, ] +toml = [ + {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, + {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, +] +typed-ast = [ + {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, + {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, + {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, + {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, + {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, + {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, + {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, + {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, + {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, + {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, + {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, + {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, + {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, + {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, + {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, + {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, + {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, + {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, + {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, + {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, + {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, +] urllib3 = [ {file = "urllib3-1.25.7-py2.py3-none-any.whl", hash = "sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293"}, {file = "urllib3-1.25.7.tar.gz", hash = "sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745"}, @@ -734,6 +832,10 @@ uvloop = [ {file = "uvloop-0.14.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4315d2ec3ca393dd5bc0b0089d23101276778c304d42faff5dc4579cb6caef09"}, {file = "uvloop-0.14.0.tar.gz", hash = "sha256:123ac9c0c7dd71464f58f1b4ee0bbd81285d96cdda8bc3519281b8973e3a461e"}, ] +wcwidth = [ + {file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"}, + {file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"}, +] websockets = [ {file = "websockets-8.0.2-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:e906128532a14b9d264a43eb48f9b3080d53a9bda819ab45bf56b8039dc606ac"}, {file = "websockets-8.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:83e63aa73331b9ca21af61df8f115fb5fbcba3f281bee650a4ad16a40cd1ef15"}, @@ -747,15 +849,11 @@ websockets = [ {file = "websockets-8.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:049e694abe33f8a1d99969fee7bfc0ae6761f7fd5f297c58ea933b27dd6805f2"}, {file = "websockets-8.0.2.tar.gz", hash = "sha256:882a7266fa867a2ebb2c0baaa0f9159cabf131cf18c1b4270d79ad42f9208dc5"}, ] -werkzeug = [ - {file = "Werkzeug-0.16.0-py2.py3-none-any.whl", hash = "sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4"}, - {file = "Werkzeug-0.16.0.tar.gz", hash = "sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7"}, -] win32-setctime = [ {file = "win32_setctime-1.0.1-py3-none-any.whl", hash = "sha256:568fd636c68350bcc54755213fe01966fe0a6c90b386c0776425944a0382abef"}, {file = "win32_setctime-1.0.1.tar.gz", hash = "sha256:b47e5023ec7f0b4962950902b15bc56464a380d869f59d27dbf9ab423b23e8f9"}, ] zipp = [ - {file = "zipp-0.6.0-py2.py3-none-any.whl", hash = "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"}, - {file = "zipp-0.6.0.tar.gz", hash = "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e"}, + {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, + {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, ] diff --git a/pyproject.toml b/pyproject.toml index 4ad9d170..bc4657d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "httprunner" -version = "3.0.0" +version = "3.0.2" description = "One-stop solution for HTTP(S) testing." license = "Apache-2.0" readme = "README.md" @@ -33,25 +33,23 @@ requests = "^2.22.0" requests-toolbelt = "^0.9.1" pyyaml = "^5.1.2" jinja2 = "^2.10.3" -har2case = "^0.3.1" filetype = "^1.0.5" -jsonpath = "^0.82" -sentry-sdk = "^0.13.5" -jsonschema = "^3.2.0" pydantic = "^1.4" loguru = "^0.4.1" +jmespath = "^0.9.5" +black = "^19.10b0" +pytest = "^5.4.2" [tool.poetry.dev-dependencies] -flask = "<1.0.0" coverage = "^4.5.4" uvicorn = "^0.11.3" fastapi = "^0.49.0" [tool.poetry.scripts] -hrun = "httprunner.cli:main" -ate = "httprunner.cli:main" httprunner = "httprunner.cli:main" -locusts = "httprunner.ext.locusts.cli:main" +hrun = "httprunner.cli:main_hrun_alias" +hmake = "httprunner.cli:main_make_alias" +har2case = "httprunner.cli:main_har2case_alias" [build-system] requires = ["poetry>=1.0.0"] diff --git a/tests/.env b/tests/.env deleted file mode 100644 index 6bb8d3c3..00000000 --- a/tests/.env +++ /dev/null @@ -1,3 +0,0 @@ -UserName=debugtalk -Password=123456 -PROJECT_KEY=ABCDEFGH \ No newline at end of file diff --git a/tests/api/create_user.yml b/tests/api/create_user.yml deleted file mode 100644 index 6e6ceebc..00000000 --- a/tests/api/create_user.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: create user -variables: - user_name: user0 - user_password: "000000" - uid: 9000 - token: XXX -request: - url: /api/users/$uid - method: POST - headers: - Content-Type: "application/json" - device_sn: $device_sn - token: $token - json: - name: $user_name - password: $user_password -validate: - - eq: ["status_code", 201] diff --git a/tests/api/delete_user.yml b/tests/api/delete_user.yml deleted file mode 100644 index 6fb8ac86..00000000 --- a/tests/api/delete_user.yml +++ /dev/null @@ -1,12 +0,0 @@ -variables: - uid: 9000 - token: XXX -request: - url: /api/users/$uid - method: DELETE - headers: - Content-Type: "application/json" - device_sn: $device_sn - token: $token -validate: - - eq: ["status_code", 200] \ No newline at end of file diff --git a/tests/api/get_headers.yml b/tests/api/get_headers.yml deleted file mode 100644 index 271a0cfe..00000000 --- a/tests/api/get_headers.yml +++ /dev/null @@ -1,16 +0,0 @@ -variables: - n_secs: 1 -request: - url: /headers - headers: - Content-Type: "application/json" - device_sn: $device_sn - method: GET -setup_hooks: - - ${setup_hook_add_kwargs($request)} - - ${setup_hook_remove_kwargs($request)} -teardown_hooks: - - ${teardown_hook_sleep_N_secs($response, $n_secs)} -validate: - - eq: ["status_code", 200] - - contained_by: [content.headers.Host, "${get_httpbin_server()}"] diff --git a/tests/api/get_token.yml b/tests/api/get_token.yml deleted file mode 100644 index 24a028bc..00000000 --- a/tests/api/get_token.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: get token -variables: - user_agent: XXX - device_sn: API_XXX - os_platform: XXX - app_version: XXX -request: - url: /api/get-token - method: POST - headers: - user_agent: $user_agent - device_sn: $device_sn - os_platform: $os_platform - app_version: $app_version - Content-Type: "application/json" - device_sn: $device_sn - json: - sign: ${get_sign($device_sn, $os_platform, $app_version)} -validate: - - eq: ["status_code", 0] - - len_eq: ["content.token", 12] - - contains: [{"a": 1, "b": 2}, "a"] diff --git a/tests/api/get_user.yml b/tests/api/get_user.yml deleted file mode 100644 index 3f1ad96a..00000000 --- a/tests/api/get_user.yml +++ /dev/null @@ -1,12 +0,0 @@ -variables: - uid: 9000 - token: XXX -request: - url: /api/users/$uid - method: GET - headers: - Content-Type: "application/json" - device_sn: $device_sn - token: $token -validate: - - eq: ["status_code", 200] \ No newline at end of file diff --git a/tests/api/get_users.yml b/tests/api/get_users.yml deleted file mode 100644 index 37285bff..00000000 --- a/tests/api/get_users.yml +++ /dev/null @@ -1,11 +0,0 @@ -variables: - token: XXX -request: - url: /api/users - method: GET - headers: - Content-Type: "application/json" - device_sn: $device_sn - token: $token -validate: - - eq: ["status_code", 200] \ No newline at end of file diff --git a/tests/api/reset_all.yml b/tests/api/reset_all.yml deleted file mode 100644 index ca357975..00000000 --- a/tests/api/reset_all.yml +++ /dev/null @@ -1,12 +0,0 @@ -variables: - token: XXX -request: - url: /api/reset-all - method: GET - headers: - Content-Type: "application/json" - device_sn: $device_sn - token: $token -validate: - - eq: ["status_code", 200] - - eq: ["content.success", true] \ No newline at end of file diff --git a/tests/api/update_user.yml b/tests/api/update_user.yml deleted file mode 100644 index 9069f0de..00000000 --- a/tests/api/update_user.yml +++ /dev/null @@ -1,17 +0,0 @@ -variables: - user_name: user0 - user_password: "000000" - uid: 9000 - token: XXX -request: - url: /api/users/$uid - method: PUT - headers: - Content-Type: "application/json" - device_sn: $device_sn - token: $token - json: - name: $user_name - password: $user_password -validate: - - eq: ["status_code", 200] \ No newline at end of file diff --git a/tests/api_server.py b/tests/api_server.py deleted file mode 100644 index 6919b274..00000000 --- a/tests/api_server.py +++ /dev/null @@ -1,236 +0,0 @@ -import hashlib -import hmac -import json -from functools import wraps - -from flask import Flask, make_response, request - -from httprunner.builtin.functions import gen_random_string - -try: - from httpbin import app as httpbin_app - HTTPBIN_HOST = "127.0.0.1" - HTTPBIN_PORT = 3458 - HTTPBIN_SERVER = "http://{}:{}".format(HTTPBIN_HOST, HTTPBIN_PORT) -except ImportError: - httpbin_app = None - HTTPBIN_HOST = "httpbin.org" - HTTPBIN_PORT = 80 - HTTPBIN_SERVER = "http://{}:{}".format(HTTPBIN_HOST, HTTPBIN_PORT) - -FLASK_APP_PORT = 5000 -SECRET_KEY = "DebugTalk" - -app = Flask(__name__) - -""" storage all users' data -data structure: - users_dict = { - 'uid1': { - 'name': 'name1', - 'password': 'pwd1' - }, - 'uid2': { - 'name': 'name2', - 'password': 'pwd2' - } - } -""" -users_dict = {} - -""" storage all token data -data structure: - token_dict = { - 'device_sn1': 'token1', - 'device_sn2': 'token1' - } -""" -token_dict = {} - - -def get_sign(*args): - content = ''.join(args).encode('ascii') - sign_key = SECRET_KEY.encode('ascii') - sign = hmac.new(sign_key, content, hashlib.sha1).hexdigest() - return sign - - -def gen_md5(*args): - return hashlib.md5("".join(args).encode('utf-8')).hexdigest() - - -def validate_request(func): - - @wraps(func) - def wrapper(*args, **kwargs): - device_sn = request.headers.get('device_sn', "") - token = request.headers.get('token', "") - - if not device_sn or not token: - result = { - 'success': False, - 'msg': "device_sn or token is null." - } - response = make_response(json.dumps(result), 401) - response.headers["Content-Type"] = "application/json" - return response - - if token_dict[device_sn] != token: - result = { - 'success': False, - 'msg': "Authorization failed!" - } - response = make_response(json.dumps(result), 403) - response.headers["Content-Type"] = "application/json" - return response - - return func(*args, **kwargs) - - return wrapper - - -@app.route('/') -def index(): - return "Hello World!" - - -@app.route('/api/get-token', methods=['POST']) -def get_token(): - device_sn = request.headers.get('device_sn', "") - os_platform = request.headers.get('os_platform', "") - app_version = request.headers.get('app_version', "") - data = request.get_json() - sign = data.get('sign', "") - - expected_sign = get_sign(device_sn, os_platform, app_version) - if expected_sign != sign: - result = { - 'success': False, - 'msg': "Authorization failed!" - } - response = make_response(json.dumps(result), 403) - else: - token = gen_random_string(16) - token_dict[device_sn] = token - - result = { - 'success': True, - 'token': token - } - response = make_response(json.dumps(result)) - - response.headers["Content-Type"] = "application/json" - return response - - -@app.route('/api/users') -@validate_request -def get_users(): - users_list = [user for uid, user in users_dict.items()] - users = { - 'success': True, - 'count': len(users_list), - 'items': users_list - } - response = make_response(json.dumps(users)) - response.headers["Content-Type"] = "application/json" - return response - - -@app.route('/api/reset-all') -@validate_request -def clear_users(): - users_dict.clear() - result = { - 'success': True - } - response = make_response(json.dumps(result)) - response.headers["Content-Type"] = "application/json" - return response - - -@app.route('/api/users/', methods=['POST']) -@validate_request -def create_user(uid): - user = request.get_json() - if uid not in users_dict: - result = { - 'success': True, - 'msg': "user created successfully." - } - status_code = 201 - users_dict[uid] = user - else: - result = { - 'success': False, - 'msg': "user already existed." - } - status_code = 500 - - response = make_response(json.dumps(result), status_code) - response.headers["Content-Type"] = "application/json" - return response - - -@app.route('/api/users/') -@validate_request -def get_user(uid): - user = users_dict.get(uid, {}) - if user: - result = { - 'success': True, - 'data': user - } - status_code = 200 - else: - result = { - 'success': False, - 'data': user - } - status_code = 404 - - response = make_response(json.dumps(result), status_code) - response.headers["Content-Type"] = "application/json" - return response - - -@app.route('/api/users/', methods=['PUT']) -@validate_request -def update_user(uid): - user = users_dict.get(uid, {}) - if user: - user = request.get_json() - success = True - status_code = 200 - users_dict[uid] = user - else: - success = False - status_code = 404 - - result = { - 'success': success, - 'data': user - } - response = make_response(json.dumps(result), status_code) - response.headers["Content-Type"] = "application/json" - return response - - -@app.route('/api/users/', methods=['DELETE']) -@validate_request -def delete_user(uid): - user = users_dict.pop(uid, {}) - if user: - success = True - status_code = 200 - else: - success = False - status_code = 404 - - result = { - 'success': success, - 'data': user - } - response = make_response(json.dumps(result), status_code) - response.headers["Content-Type"] = "application/json" - return response diff --git a/tests/base.py b/tests/base.py deleted file mode 100644 index 86da90fb..00000000 --- a/tests/base.py +++ /dev/null @@ -1,75 +0,0 @@ -import multiprocessing -import time -import unittest - -import requests - -from tests.api_server import FLASK_APP_PORT, HTTPBIN_HOST, HTTPBIN_PORT -from tests.api_server import app as flask_app -from tests.api_server import gen_random_string, get_sign, httpbin_app - - -def run_flask(): - flask_app.run(port=FLASK_APP_PORT) - - -def run_httpbin(): - if httpbin_app: - httpbin_app.run(host=HTTPBIN_HOST, port=HTTPBIN_PORT) - - -class ApiServerUnittest(unittest.TestCase): - """ Test case class that sets up an HTTP server which can be used within the tests - """ - - @classmethod - def setUpClass(cls): - cls.host = "http://127.0.0.1:5000" - cls.flask_process = multiprocessing.Process( - target=run_flask - ) - cls.httpbin_process = multiprocessing.Process( - target=run_httpbin - ) - cls.flask_process.start() - cls.httpbin_process.start() - time.sleep(1) - cls.api_client = requests.Session() - - @classmethod - def tearDownClass(cls): - cls.flask_process.terminate() - cls.httpbin_process.terminate() - - def get_token(self, user_agent, device_sn, os_platform, app_version): - url = "%s/api/get-token" % self.host - headers = { - 'Content-Type': 'application/json', - 'User-Agent': user_agent, - 'device_sn': device_sn, - 'os_platform': os_platform, - 'app_version': app_version - } - data = { - 'sign': get_sign(device_sn, os_platform, app_version) - } - - resp = self.api_client.post(url, json=data, headers=headers) - resp_json = resp.json() - self.assertTrue(resp_json["success"]) - self.assertIn("token", resp_json) - self.assertEqual(len(resp_json["token"]), 16) - return resp_json["token"] - - def get_authenticated_headers(self): - user_agent = 'iOS/10.3' - device_sn = gen_random_string(15) - os_platform = 'ios' - app_version = '2.8.6' - - token = self.get_token(user_agent, device_sn, os_platform, app_version) - headers = { - 'device_sn': device_sn, - 'token': token - } - return headers diff --git a/tests/data/app_version.csv b/tests/data/app_version.csv deleted file mode 100644 index 3c1c5e2e..00000000 --- a/tests/data/app_version.csv +++ /dev/null @@ -1,4 +0,0 @@ -app_version -2.8.5 -2.8.6 - diff --git a/tests/data/bugfix_type_match.yml b/tests/data/bugfix_type_match.yml deleted file mode 100644 index 5d3bd3d2..00000000 --- a/tests/data/bugfix_type_match.yml +++ /dev/null @@ -1,12 +0,0 @@ -- config: - name: "bugfix testcases." - base_url: http://127.0.0.1:5000 - -- test: - name: bugfix type_match #84 - request: - url: http://127.0.0.1:5000/api/users/1000 - method: GET - validate: - - eq: [status_code, 401] - - type_match: [status_code, int] diff --git a/tests/data/bugfix_verify.yml b/tests/data/bugfix_verify.yml deleted file mode 100644 index d3cd0c47..00000000 --- a/tests/data/bugfix_verify.yml +++ /dev/null @@ -1,13 +0,0 @@ -- config: - name: basic test with httpbin - base_url: https://httpbin.org/ - verify: False - -- test: - name: headers - request: - url: /headers - method: GET - validate: - - eq: ["status_code", 200] - - eq: [content.headers.Host, "httpbin.org"] diff --git a/tests/data/demo_testcase.yml b/tests/data/demo_testcase.yml deleted file mode 100644 index 2a495b98..00000000 --- a/tests/data/demo_testcase.yml +++ /dev/null @@ -1,21 +0,0 @@ -- config: - name: "123t$var_a" - variables: - var_a: 1 - var_b: 2 - var_c: "${sum_two($var_a, $var_b)}" - var_d: "${gen_random_string(5)}" - var_e: $var_d - PROJECT_KEY: ${ENV(PROJECT_KEY)} - -- test: - name: testcase1-$var_a - request: - url: /api1 - method: GET - headers: - var_a: $var_a - var_b: $var_b - var_c: $var_c - validate: - - {"check": "status_code", "comparator": "eq", "expect": 200} diff --git a/tests/data/demo_testcase_cli.yml b/tests/data/demo_testcase_cli.yml deleted file mode 100644 index 27db9390..00000000 --- a/tests/data/demo_testcase_cli.yml +++ /dev/null @@ -1,131 +0,0 @@ -- test: - name: get token - request: - url: http://127.0.0.1:5000/api/get-token - method: POST - headers: - Content-Type: application/json - user_agent: 'iOS/10.3' - device_sn: 'HZfFBh6tU59EdXJ' - os_platform: 'ios' - app_version: '2.8.6' - json: - sign: 5188962c489d1a35effa99e9346dd5efd4fdabad - variables: - expect_status_code: 200 - token_len: 16 - extract: - - token: content.token - validate: - - {"check": "status_code", "comparator": "eq", "expect": 200} - - eq: ["status_code", $expect_status_code] - - {"check": "$token", "comparator": "len_eq", "expect": 16} - - len_eq: ["$token", $token_len] - - len_eq: ["content.token", 16] - - {"check": "status_code", "comparator": "sum_status_code", "expect": 2} - - sum_status_code: ["status_code", 2] - -- test: - name: create user which does not exist - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - extract: - - success: content.success - validate: - - eq: ["status_code", 201] - - sum_status_code: ["status_code", 3] - - eq: ["$success", True] - - eq: ["abc$success", "abcTrue"] - - {"check": "status_code", "comparator": "eq", "expect": 201} - - {"check": "content.success", "comparator": "eq", "expect": true} - -- test: - name: create user which existed - times: 2 - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - validate: - - "eq": ["status_code", 500] - - sum_status_code: ["status_code", 5] - - "eq": ["content.success", false] - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} - -- test: - name: create user which existed (skip unconditionally) - skip: skip this test unconditionally - times: 2 - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - validate: - - "eq": ["status_code", 500] - - sum_status_code: ["status_code", 5] - - "eq": ["content.success", false] - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} - -- test: - name: create user which existed (skip if condition) - skipIf: ${skip_test_in_production_env()} - times: 2 - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - validate: - - "eq": ["status_code", 500] - - sum_status_code: ["status_code", 5] - - "eq": ["content.success", false] - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} - -- test: - name: create user which existed (skip unless condition) - skipUnless: ${skip_test_in_production_env()} - times: 2 - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - validate: - - "eq": ["status_code", 500] - - sum_status_code: ["status_code", 5] - - "eq": ["content.success", false] - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} diff --git a/tests/data/demo_testcase_functions.yml b/tests/data/demo_testcase_functions.yml deleted file mode 100644 index 801b618b..00000000 --- a/tests/data/demo_testcase_functions.yml +++ /dev/null @@ -1,61 +0,0 @@ -- config: - name: "create user testcases." - variables: - user_agent: 'iOS/10.3' - device_sn: ${gen_random_string(15)} - os_platform: 'ios' - app_version: '2.8.6' - base_url: ${get_base_url()} - -- test: - name: get token - request: - url: /api/get-token - method: POST - headers: - user_agent: $user_agent - device_sn: $device_sn - os_platform: $os_platform - app_version: $app_version - json: - sign: ${get_sign($device_sn, $os_platform, $app_version)} - extract: - - token: content.token - validate: - - {"check": "status_code", "comparator": "eq", "expect": 200} - - {"check": "content.token", "comparator": "len_eq", "expect": 16} - -- test: - name: create user which does not exist - variables: - user_name: "user1" - user_password: "123456" - request: - url: /api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: $device_sn - token: $token - json: - name: $user_name - password: $user_password - validate: - - {"check": "status_code", "comparator": "eq", "expect": 201} - - {"check": "content.success", "comparator": "eq", "expect": true} - -- test: - name: create user which does not exist - request: - url: /api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: $device_sn - token: $token - json: - name: "user1" - password: "123456" - validate: - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} diff --git a/tests/data/demo_testcase_hardcode.json b/tests/data/demo_testcase_hardcode.json deleted file mode 100644 index efe42738..00000000 --- a/tests/data/demo_testcase_hardcode.json +++ /dev/null @@ -1,87 +0,0 @@ -[ - { - "test": { - "name": "get token", - "request": { - "url": "http://127.0.0.1:5000/api/get-token", - "method": "POST", - "headers": { - "content-type": "application/json", - "user_agent": "iOS/10.3", - "device_sn": "HZfFBh6tU59EdXJ", - "os_platform": "ios", - "app_version": "2.8.6" - }, - "json": { - "sign": "5188962c489d1a35effa99e9346dd5efd4fdabad" - } - }, - "variables": [ - {"expect_status_code": 200}, - {"token_len": 16} - ], - "extract": { - "token": "content.token" - }, - "validate": [ - {"check": "status_code", "comparator": "eq", "expect": 200}, - {"eq": ["status_code", "$expect_status_code"]}, - {"check": "$token", "comparator": "len_eq", "expect": 16}, - {"len_eq": ["$token", "$token_len"]}, - {"len_eq": ["content.token", 16]}, - {"check": "status_code", "comparator": "sum_status_code", "expect": 2}, - {"sum_status_code": ["status_code", 2]} - ] - } - }, - { - "test": { - "name": "create user which does not exist", - "request": { - "url": "http://127.0.0.1:5000/api/users/2000", - "method": "POST", - "headers": { - "content-type": "application/json", - "device_sn": "HZfFBh6tU59EdXJ", - "token": "$token" - }, - "json": { - "name": "user1", - "password": "123456" - } - }, - "validate": [ - {"eq": ["status_code", 201]}, - {"eq": ["content.success", true]}, - {"check": "status_code", "comparator": "eq", "expect": 201}, - {"sum_status_code": ["status_code", 3]}, - {"check": "content.success", "comparator": "eq", "expect": true} - ] - } - }, - { - "test": { - "name": "create user which existed", - "request": { - "url": "http://127.0.0.1:5000/api/users/2000", - "method": "POST", - "headers": { - "content-type": "application/json", - "device_sn": "HZfFBh6tU59EdXJ", - "token": "$token" - }, - "json": { - "name": "user1", - "password": "123456" - } - }, - "validate": [ - {"eq": ["status_code", 500]}, - {"eq": ["content.success", false]}, - {"check": "status_code", "comparator": "eq", "expect": 500}, - {"sum_status_code": ["status_code", 5]}, - {"check": "content.success", "comparator": "eq", "expect": false} - ] - } - } -] \ No newline at end of file diff --git a/tests/data/demo_testcase_hardcode.yml b/tests/data/demo_testcase_hardcode.yml deleted file mode 100644 index 05df6b4a..00000000 --- a/tests/data/demo_testcase_hardcode.yml +++ /dev/null @@ -1,67 +0,0 @@ -- test: - name: get token - request: - url: http://127.0.0.1:5000/api/get-token - method: POST - headers: - Content-Type: application/json - user_agent: 'iOS/10.3' - device_sn: 'HZfFBh6tU59EdXJ' - os_platform: 'ios' - app_version: '2.8.6' - json: - sign: 5188962c489d1a35effa99e9346dd5efd4fdabad - variables: - expect_status_code: 200 - token_len: 16 - extract: - token: content.token - validate: - - {"check": "status_code", "comparator": "eq", "expect": 200} - - eq: ["status_code", $expect_status_code] - - {"check": "$token", "comparator": "len_eq", "expect": 16} - - len_eq: ["$token", $token_len] - - len_eq: ["content.token", 16] - - {"check": "status_code", "comparator": "sum_status_code", "expect": 2} - - sum_status_code: ["status_code", 2] - -- test: - name: create user which does not exist - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - extract: - success: content.success - validate: - - eq: ["status_code", 201] - - sum_status_code: ["status_code", 3] - - eq: ["$success", True] - - eq: ["abc$success", "abcTrue"] - - {"check": "status_code", "comparator": "eq", "expect": 201} - - {"check": "content.success", "comparator": "eq", "expect": true} - -- test: - name: create user which existed - request: - url: http://127.0.0.1:5000/api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: 'HZfFBh6tU59EdXJ' - token: $token - json: - name: "user1" - password: "123456" - validate: - - "eq": ["status_code", 500] - - sum_status_code: ["status_code", 5] - - "eq": ["content.success", false] - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} \ No newline at end of file diff --git a/tests/data/demo_testcase_layer.yml b/tests/data/demo_testcase_layer.yml deleted file mode 100644 index 48145514..00000000 --- a/tests/data/demo_testcase_layer.yml +++ /dev/null @@ -1,146 +0,0 @@ -- config: - name: "user management testcase." - variables: - user_agent: 'iOS/10.3' - device_sn: ${gen_random_string(15)} - os_platform: 'ios' - app_version: '2.8.6' - base_url: ${get_base_url()} - export: - - token - -- test: - name: get token with $user_agent, $app_version - api: api/get_token.yml - extract: - - token: content.token - validate: - - "eq": ["status_code", 200] - - "len_eq": ["content.token", 16] - - "contains": [{"a": 1, "b": 2}, "b"] - -- test: - name: reset all users - api: api/reset_all.yml - variables: - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.success", "expect": true} - -- test: - name: get user that does not exist - api: api/get_user.yml - variables: - uid: 1000 - token: $token - validate: - - {"check": "status_code", "expect": 404} - - {"check": "content.success", "expect": false} - -- test: - name: create user which does not exist - variables: - uid: 1000 - user_name: "user1" - user_password: "123456" - token: $token - api: api/create_user.yml - validate: - - {"check": "status_code", "expect": 201} - - {"check": "content.success", "expect": true} - -- test: - name: get user that has been created - api: api/get_user.yml - variables: - uid: 1000 - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.success", "expect": true} - - {"check": "content.data.password", "expect": "123456"} - -- test: - name: create user which exists - variables: - uid: 1000 - user_name: "user1" - user_password: "123456" - token: $token - api: api/create_user.yml - validate: - - {"check": "status_code", "expect": 500} - - {"check": "content.success", "expect": false} - -- test: - name: update user which exists - variables: - uid: 1000 - user_name: "user1" - user_password: "654321" - token: $token - api: api/update_user.yml - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.success", "expect": true} - -- test: - name: get user that has been updated - api: api/get_user.yml - variables: - uid: 1000 - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.success", "expect": true} - - {"check": "content.data.password", "expect": "654321"} - -- test: - name: get users - api: api/get_users.yml - variables: - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.count", "expect": 1} - -- test: - name: delete user that exists - api: api/delete_user.yml - variables: - uid: 1000 - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.success", "expect": true} - -- test: - name: get users - api: api/get_users.yml - variables: - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.count", "expect": 0} - -- test: - name: create user which has been deleted - variables: - uid: 1000 - user_name: "user1" - user_password: "123456" - token: $token - api: api/create_user.yml - validate: - - {"check": "status_code", "expect": 201} - - {"check": "content.success", "expect": true} - -- test: - name: get users - api: api/get_users.yml - variables: - token: $token - validate: - - {"check": "status_code", "expect": 200} - - {"check": "content.count", "expect": 1} diff --git a/tests/data/demo_testcase_variables.yml b/tests/data/demo_testcase_variables.yml deleted file mode 100644 index 2510d514..00000000 --- a/tests/data/demo_testcase_variables.yml +++ /dev/null @@ -1,64 +0,0 @@ -- config: - name: "create user testcases." - variables: - device_sn: 'HZfFBh6tU59EdXJ' - base_url: ${get_base_url()} - -- test: - name: get token - variables: - user_agent: 'iOS/10.3' - os_platform: 'ios' - app_version: '2.8.6' - sign: 5188962c489d1a35effa99e9346dd5efd4fdabad - request: - url: /api/get-token - method: POST - headers: - Content-Type: application/json - user_agent: $user_agent - device_sn: $device_sn - os_platform: $os_platform - app_version: $app_version - json: - sign: $sign - extract: - - token: content.token - validate: - - {"check": "status_code", "comparator": "eq", "expect": 200} - - {"check": "content.token", "comparator": "len_eq", "expect": 16} - -- test: - name: create user which does not exist - variables: - user_name: "user1" - user_password: "123456" - request: - url: /api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: $device_sn - token: $token - json: - name: $user_name - password: $user_password - validate: - - {"check": "status_code", "comparator": "eq", "expect": 201} - - {"check": "content.success", "comparator": "eq", "expect": true} - -- test: - name: create user which does not exist - request: - url: /api/users/1000 - method: POST - headers: - Content-Type: application/json - device_sn: $device_sn - token: $token - json: - name: "user1" - password: "123456" - validate: - - {"check": "status_code", "comparator": "eq", "expect": 500} - - {"check": "content.success", "comparator": "eq", "expect": false} diff --git a/tests/httpbin/a.b.c/rpc.yml b/tests/httpbin/a.b.c/rpc.yml deleted file mode 100644 index ef31c546..00000000 --- a/tests/httpbin/a.b.c/rpc.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: rpc api -base_url: http://httpbin.org -variables: - expected_status_code: 200 -request: - url: /headers - method: GET -validate: - - eq: ["status_code", $expected_status_code] - - eq: [content.headers.Host, "httpbin.org"] diff --git a/tests/httpbin/api/302_redirect.yml b/tests/httpbin/api/302_redirect.yml deleted file mode 100644 index 5678b158..00000000 --- a/tests/httpbin/api/302_redirect.yml +++ /dev/null @@ -1,11 +0,0 @@ - -name: 302 redirect -request: - url: https://httpbin.org/redirect-to - params: - url: https://github.com - status_code: 302 - method: GET - verify: False -validate: - - eq: ["status_code", 200] diff --git a/tests/httpbin/api/302_redirect_teardown_hook.yml b/tests/httpbin/api/302_redirect_teardown_hook.yml deleted file mode 100644 index 329d6d48..00000000 --- a/tests/httpbin/api/302_redirect_teardown_hook.yml +++ /dev/null @@ -1,13 +0,0 @@ -name: 302 redirect -request: - url: https://httpbin.org/redirect-to - params: - url: https://github.com - status_code: 302 - method: GET - verify: False -teardown_hooks: - - ${alter_response_302($response)} -validate: - - eq: ["status_code", 500] - - eq: ["text","abcdef"] \ No newline at end of file diff --git a/tests/httpbin/api/get_headers.yml b/tests/httpbin/api/get_headers.yml deleted file mode 100644 index a05e5bd5..00000000 --- a/tests/httpbin/api/get_headers.yml +++ /dev/null @@ -1,11 +0,0 @@ - -name: get headers -base_url: http://httpbin.org -variables: - expected_status_code: 200 -request: - url: /headers - method: GET -validate: - - eq: ["status_code", $expected_status_code] - - eq: [content.headers.Host, "httpbin.org"] diff --git a/tests/httpbin/upload.yml b/tests/httpbin/upload.yml deleted file mode 100644 index a858cb05..00000000 --- a/tests/httpbin/upload.yml +++ /dev/null @@ -1,29 +0,0 @@ -- config: - name: test upload file with httpbin - base_url: ${get_httpbin_server()} - -- test: - name: upload file - variables: - file_path: "data/test.env" - m_encoder: ${multipart_encoder(file=$file_path)} - request: - url: /post - method: POST - headers: - Content-Type: ${multipart_content_type($m_encoder)} - data: $m_encoder - validate: - - eq: ["status_code", 200] - - startswith: ["content.files.file", "UserName=test"] - -- test: - name: upload file with keyword - request: - url: /post - method: POST - upload: - file: "data/test.env" - validate: - - eq: ["status_code", 200] - - startswith: ["content.files.file", "UserName=test"] diff --git a/tests/locust_tests/demo_locusts.yml b/tests/locust_tests/demo_locusts.yml deleted file mode 100644 index 4ae14cde..00000000 --- a/tests/locust_tests/demo_locusts.yml +++ /dev/null @@ -1,18 +0,0 @@ -config: - name: create users with uid - variables: - - device_sn: ${gen_random_string(15)} - base_url: "http://127.0.0.1:5000" - -testcases: - create user 1000 and check result.: - testcase: testcases/create_user.yml - weight: 2 - variables: - uid: 1000 - - create user 1001 and check result.: - testcase: testcases/create_user.yml - weight: 3 - variables: - uid: 1001 diff --git a/tests/test_api.py b/tests/test_api.py deleted file mode 100644 index 948f629d..00000000 --- a/tests/test_api.py +++ /dev/null @@ -1,799 +0,0 @@ -import json -import os -import re -import shutil -import time - -from httprunner import exceptions, loader, parser, report -from httprunner.api import HttpRunner -from tests.api_server import HTTPBIN_SERVER -from tests.base import ApiServerUnittest - - -class TestHttpRunner(ApiServerUnittest): - - def setUp(self): - self.testcase_cli_path = "tests/data/demo_testcase_cli.yml" - self.testcase_file_path_list = [ - os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.yml'), - os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.json') - ] - testcases = [{ - 'config': { - 'name': 'testcase description', - 'request': { - 'base_url': '', - 'headers': {'User-Agent': 'python-requests/2.18.4'} - }, - 'variables': [] - }, - "teststeps": [ - { - 'name': '/api/get-token', - 'request': { - 'url': 'http://127.0.0.1:5000/api/get-token', - 'method': 'POST', - 'headers': {'Content-Type': 'application/json', 'app_version': '2.8.6', - 'device_sn': 'FwgRiO7CNA50DSU', 'os_platform': 'ios', 'user_agent': 'iOS/10.3'}, - 'json': {'sign': '9c0c7e51c91ae963c833a4ccbab8d683c4a90c98'} - }, - 'extract': [ - {'token': 'content.token'} - ], - 'validate': [ - {'eq': ['status_code', 200]}, - {'eq': ['headers.Content-Type', 'application/json']}, - {'eq': ['content.success', True]} - ] - }, - { - 'name': '/api/users/1000', - 'request': { - 'url': 'http://127.0.0.1:5000/api/users/1000', - 'method': 'POST', - 'headers': {'Content-Type': 'application/json', - 'device_sn': 'FwgRiO7CNA50DSU','token': '$token'}, - 'json': {'name': 'user1', 'password': '123456'} - }, - 'validate': [ - {'eq': ['status_code', 201]}, - {'eq': ['headers.Content-Type', 'application/json']}, - {'eq': ['content.success', True]}, - {'eq': ['content.msg', 'user created successfully.']} - ] - } - ] - }] - self.tests_mapping = { - "testcases": testcases - } - self.runner = HttpRunner(failfast=True) - self.reset_all() - - def reset_all(self): - url = "%s/api/reset-all" % self.host - headers = self.get_authenticated_headers() - return self.api_client.get(url, headers=headers) - - def test_text_run_times(self): - summary = self.runner.run(self.testcase_cli_path) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 10) - - def test_text_run_times_invalid(self): - testcases = [ - { - "config": { - 'name': "post data", - 'variables': [] - }, - "teststeps": [ - { - "name": "post data", - "times": "1.5", - "request": { - "url": "{}/post".format(HTTPBIN_SERVER), - "method": "POST", - "headers": { - "User-Agent": "python-requests/2.18.4", - "Content-Type": "application/json" - }, - "data": "abc" - }, - "validate": [ - {"eq": ["status_code", 200]} - ] - } - ] - } - ] - tests_mapping = { - "testcases": testcases - } - with self.assertRaises(exceptions.ParamsError): - self.runner.run_tests(tests_mapping) - - def test_text_skip(self): - summary = self.runner.run(self.testcase_cli_path) - self.assertEqual(summary["stat"]["teststeps"]["skipped"], 4) - - def test_save_variables_output(self): - testcases = [ - { - "config": { - 'name': "post data", - 'variables': { - "var1": "abc", - "var2": "def" - }, - "export": ["status_code", "req_data"] - }, - "teststeps": [ - { - "name": "post data", - "request": { - "url": "{}/post".format(HTTPBIN_SERVER), - "method": "POST", - "headers": { - "User-Agent": "python-requests/2.18.4", - "Content-Type": "application/json" - }, - "data": "$var1" - }, - "extract": { - "status_code": "status_code", - "req_data": "content.data" - }, - "validate": [ - {"eq": ["status_code", 200]} - ] - } - ] - } - ] - tests_mapping = { - "testcases": testcases - } - self.runner.run_tests(tests_mapping) - vars_out = self.runner.get_vars_out() - self.assertIsInstance(vars_out, list) - self.assertEqual(vars_out[0]["in"]["var1"], "abc") - self.assertEqual(vars_out[0]["in"]["var2"], "def") - self.assertEqual(vars_out[0]["out"]["status_code"], 200) - self.assertEqual(vars_out[0]["out"]["req_data"], "abc") - - def test_save_variables_output_with_parameters(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/testsuites/create_users_with_parameters.yml') - self.runner.run(testcase_file_path) - vars_out = self.runner.get_vars_out() - self.assertIsInstance(vars_out, list) - self.assertEqual(len(vars_out), 6) - self.assertEqual(vars_out[0]["in"]["uid"], 101) - self.assertEqual(vars_out[0]["in"]["device_sn"], "TESTSUITE_X1") - token1 = vars_out[0]["out"]["session_token"] - self.assertEqual(len(token1), 16) - self.assertEqual(vars_out[5]["in"]["uid"], 103) - self.assertEqual(vars_out[5]["in"]["device_sn"], "TESTSUITE_X2") - token2 = vars_out[0]["out"]["session_token"] - self.assertEqual(len(token2), 16) - self.assertEqual(token1, token2) - - def test_html_report(self): - runner = HttpRunner(failfast=True) - summary = runner.run(self.testcase_cli_path) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 10) - self.assertEqual(summary["stat"]["teststeps"]["skipped"], 4) - - report_save_dir = os.path.join(os.getcwd(), 'reports', "demo") - report.gen_html_report(summary, report_dir=report_save_dir) - self.assertGreater(len(os.listdir(report_save_dir)), 0) - shutil.rmtree(report_save_dir) - - def test_html_report_with_fixed_report_file(self): - runner = HttpRunner(failfast=True) - summary = runner.run(self.testcase_cli_path) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 10) - self.assertEqual(summary["stat"]["teststeps"]["skipped"], 4) - - report_file = os.path.join(os.getcwd(), 'reports', "demo", "test.html") - report.gen_html_report(summary, report_file=report_file) - report_save_dir = os.path.dirname(report_file) - self.assertEqual(len(os.listdir(report_save_dir)), 1) - self.assertTrue(os.path.isfile(report_file)) - shutil.rmtree(report_save_dir) - - def test_log_file(self): - log_file_path = os.path.join(os.getcwd(), 'reports', "test_log_file.log") - runner = HttpRunner(failfast=True, log_file=log_file_path) - runner.run(self.testcase_cli_path) - self.assertTrue(os.path.isfile(log_file_path)) - os.remove(log_file_path) - - def test_run_testcases(self): - summary = self.runner.run_tests(self.tests_mapping) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 2) - self.assertIn("details", summary) - self.assertIn("records", summary["details"][0]) - - def test_run_yaml_upload(self): - upload_cases_list = [ - "tests/httpbin/upload.yml", - "tests/httpbin/upload.v2.yml" - ] - for upload_case in upload_cases_list: - summary = self.runner.run(upload_case) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 2) - self.assertIn("details", summary) - self.assertIn("records", summary["details"][0]) - - def test_run_post_data(self): - testcases = [ - { - "config": { - 'name': "post data", - 'variables': [] - }, - "teststeps": [ - { - "name": "post data", - "request": { - "url": "{}/post".format(HTTPBIN_SERVER), - "method": "POST", - "headers": { - "User-Agent": "python-requests/2.18.4", - "Content-Type": "application/json" - }, - "data": "abc" - }, - "validate": [ - {"eq": ["status_code", 200]} - ] - } - ] - } - ] - tests_mapping = { - "testcases": testcases - } - summary = self.runner.run_tests(tests_mapping) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 1) - resp_json = json.loads(summary["details"][0]["records"][0]["meta_datas"]["data"][0]["response"]["body"]) - self.assertEqual( - resp_json["data"], - "abc" - ) - - def test_html_report_repsonse_image(self): - runner = HttpRunner(failfast=True) - summary = runner.run("tests/httpbin/load_image.yml") - - report_save_dir = os.path.join(os.getcwd(), 'reports', "demo") - report_path = report.gen_html_report(summary, report_dir=report_save_dir) - self.assertTrue(os.path.isfile(report_path)) - shutil.rmtree(report_save_dir) - - def test_testcase_layer_with_api(self): - summary = self.runner.run("tests/testcases/setup.yml") - self.assertTrue(summary["success"]) - self.assertEqual(summary["details"][0]["records"][0]["name"], "get token (setup)") - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 2) - - def test_testcase_layer_with_testcase(self): - summary = self.runner.run("tests/testsuites/create_users.yml") - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 2) - self.assertEqual(summary["stat"]["teststeps"]["total"], 4) - - def test_validate_script(self): - summary = self.runner.run("tests/httpbin/validate.yml") - self.assertFalse(summary["success"]) - - def test_run_httprunner_with_hooks(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/httpbin/hooks.yml') - start_time = time.time() - summary = self.runner.run(testcase_file_path) - end_time = time.time() - self.assertTrue(summary["success"]) - self.assertLess(end_time - start_time, 60) - - def test_run_httprunner_with_teardown_hooks_alter_response(self): - testcases = [ - { - "config": {"name": "test teardown hooks"}, - "teststeps": [ - { - "name": "test teardown hooks", - "request": { - "url": "{}/headers".format(HTTPBIN_SERVER), - "method": "GET", - "data": "abc" - }, - "teardown_hooks": [ - "${alter_response($response)}" - ], - "validate": [ - {"eq": ["status_code", 500]}, - {"eq": ["headers.content-type", "html/text"]}, - {"eq": ["json.headers.Host", "127.0.0.1:8888"]}, - {"eq": ["content.headers.Host", "127.0.0.1:8888"]}, - {"eq": ["text.headers.Host", "127.0.0.1:8888"]}, - {"eq": ["new_attribute", "new_attribute_value"]}, - {"eq": ["new_attribute_dict", {"key": 123}]}, - {"eq": ["new_attribute_dict.key", 123]} - ] - } - ] - } - ] - - tests_mapping = { - "project_mapping": loader.load_project_data("tests"), - "testcases": testcases - } - summary = self.runner.run_tests(tests_mapping) - self.assertTrue(summary["success"]) - - def test_run_httprunner_with_teardown_hooks_not_exist_attribute(self): - testcases = [ - { - "config": { - "name": "test teardown hooks" - }, - "teststeps": [ - { - "name": "test teardown hooks", - "request": { - "url": "{}/headers".format(HTTPBIN_SERVER), - "method": "GET", - "data": "abc" - }, - "teardown_hooks": [ - "${alter_response($response)}" - ], - "validate": [ - {"eq": ["attribute_not_exist", "new_attribute"]} - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": loader.load_project_data("tests"), - "testcases": testcases - } - summary = self.runner.run_tests(tests_mapping) - self.assertFalse(summary["success"]) - self.assertEqual(summary["stat"]["teststeps"]["errors"], 1) - - def test_run_httprunner_with_teardown_hooks_error(self): - testcases = [ - { - "config": { - "name": "test teardown hooks" - }, - "teststeps": [ - { - "name": "test teardown hooks", - "request": { - "url": "{}/headers".format(HTTPBIN_SERVER), - "method": "GET", - "data": "abc" - }, - "teardown_hooks": [ - "${alter_response_error($response)}" - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": loader.load_project_data("tests"), - "testcases": testcases - } - summary = self.runner.run_tests(tests_mapping) - self.assertFalse(summary["success"]) - self.assertEqual(summary["stat"]["teststeps"]["errors"], 1) - - def test_run_api(self): - path = "tests/httpbin/api/get_headers.yml" - summary = self.runner.run(path) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["successes"], 1) - - def test_request_302_logs(self): - path = "tests/httpbin/api/302_redirect.yml" - summary = self.runner.run(path) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["successes"], 1) - - req_resp_data = summary["details"][0]["records"][0]["meta_datas"]["data"] - self.assertEqual(len(req_resp_data), 2) - self.assertEqual(req_resp_data[0]["response"]["status_code"], 302) - self.assertEqual(req_resp_data[1]["response"]["status_code"], 200) - - def test_request_302_logs_teardown_hook(self): - path = "tests/httpbin/api/302_redirect_teardown_hook.yml" - summary = self.runner.run(path) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["successes"], 1) - - req_resp_data = summary["details"][0]["records"][0]["meta_datas"]["data"] - self.assertEqual(len(req_resp_data), 2) - self.assertEqual(req_resp_data[0]["response"]["status_code"], 302) - self.assertEqual(req_resp_data[1]["response"]["status_code"], 500) - - def test_request_with_params(self): - path = "tests/httpbin/api/302_redirect.yml" - summary = self.runner.run(path) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["successes"], 1) - - req_resp_data = summary["details"][0]["records"][0]["meta_datas"]["data"] - self.assertEqual(len(req_resp_data), 2) - self.assertIn( - "url=https%3A%2F%2Fgithub.com", - req_resp_data[0]["request"]["url"] - ) - - def test_run_api_folder(self): - api_folder = "tests/httpbin/api/" - summary = self.runner.run(api_folder) - print(summary["stat"]["testcases"]["total"]) - print(len(summary["details"])) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 3) - self.assertEqual(summary["stat"]["teststeps"]["total"], 3) - self.assertEqual(summary["stat"]["teststeps"]["successes"], 3) - self.assertEqual(len(summary["details"]), 3) - self.assertEqual(summary["details"][0]["stat"]["total"], 1) - self.assertEqual(summary["details"][1]["stat"]["total"], 1) - self.assertEqual(summary["details"][2]["stat"]["total"], 1) - - def test_run_testcase_hardcode(self): - for testcase_file_path in self.testcase_file_path_list: - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - self.assertEqual(summary["stat"]["testcases"]["total"], 1) - self.assertEqual(summary["stat"]["teststeps"]["total"], 3) - self.assertEqual(summary["stat"]["teststeps"]["successes"], 3) - - def test_run_testcase_template_variables(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_variables.yml') - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - - def test_run_testcase_template_import_functions(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_functions.yml') - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - - def test_run_testcase_layered(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_layer.yml') - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - self.assertEqual(len(summary["details"]), 1) - - def test_run_testcase_output(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_layer.yml') - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - self.assertIn("token", summary["details"][0]["in_out"]["out"]) - # TODO: add - # self.assertIn("user_agent", summary["details"][0]["in_out"]["in"]) - - def test_run_testcase_with_variables_mapping(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/demo_testcase_layer.yml') - variables_mapping = { - "app_version": '2.9.7' - } - summary = self.runner.run(testcase_file_path, mapping=variables_mapping) - self.assertTrue(summary["success"]) - self.assertIn("token", summary["details"][0]["in_out"]["out"]) - # TODO: add - # self.assertGreater(len(summary["details"][0]["in_out"]["in"]), 3) - - def test_run_testcase_with_parameters(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/testsuites/create_users_with_parameters.yml') - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - self.assertEqual(len(summary["details"]), 3 * 2) - - self.assertEqual(summary["stat"]["testcases"]["total"], 6) - self.assertEqual(summary["stat"]["teststeps"]["total"], 3 * 2 * 2) - self.assertEqual( - summary["details"][0]["name"], - "create user 101 and check result for TESTSUITE_X1." - ) - self.assertEqual( - summary["details"][5]["name"], - "create user 103 and check result for TESTSUITE_X2." - ) - self.assertEqual( - summary["details"][0]["stat"]["total"], - 2 - ) - records_name_list = [ - summary["details"][i]["records"][1]["meta_datas"][1]["name"] - for i in range(6) - ] - self.assertEqual( - set(records_name_list), - { - "create user 101 for TESTSUITE_X1", - "create user 101 for TESTSUITE_X2", - "create user 102 for TESTSUITE_X1", - "create user 102 for TESTSUITE_X2", - "create user 103 for TESTSUITE_X1", - "create user 103 for TESTSUITE_X2" - } - ) - - def test_validate_response_content(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/httpbin/basic.yml') - summary = self.runner.run(testcase_file_path) - self.assertTrue(summary["success"]) - - def test_html_report_xss(self): - testcases = [ - { - "config": { - 'name': "post data" - }, - "teststeps": [ - { - "name": "post data", - "request": { - "url": "{}/anything".format(HTTPBIN_SERVER), - "method": "POST", - "headers": { - "Content-Type": "application/json" - }, - "json": { - 'success': False, - "person": "" - } - }, - "validate": [ - {"eq": ["status_code", 200]} - ] - } - ] - } - ] - tests_mapping = { - "testcases": testcases - } - summary = self.runner.run(tests_mapping) - report_path = report.gen_html_report(summary) - with open(report_path) as f: - content = f.read() - m = re.findall( - re.escape(""person": "<img src=x onerror=alert(1)>""), - content - ) - self.assertEqual(len(m), 2) - - -class TestApi(ApiServerUnittest): - - def test_testcase_loader(self): - testcase_path = "tests/testcases/setup.yml" - tests_mapping = loader.load_cases(testcase_path) - - project_mapping = tests_mapping["project_mapping"] - self.assertIsInstance(project_mapping, dict) - self.assertIn("PWD", project_mapping) - self.assertIn("functions", project_mapping) - self.assertIn("env", project_mapping) - - testcases = tests_mapping["testcases"] - self.assertIsInstance(testcases, list) - self.assertEqual(len(testcases), 1) - testcase_config = testcases[0]["config"] - self.assertEqual(testcase_config["name"], "setup and reset all.") - self.assertIn("path", testcases[0]) - - testcase_tests = testcases[0]["teststeps"] - self.assertEqual(len(testcase_tests), 2) - self.assertIn("api", testcase_tests[0]) - self.assertEqual(testcase_tests[0]["name"], "get token (setup)") - self.assertIsInstance(testcase_tests[0]["variables"], dict) - self.assertIn("api_def", testcase_tests[0]) - self.assertEqual(testcase_tests[0]["api_def"]["request"]["url"], "/api/get-token") - - def test_testcase_parser(self): - testcase_path = "tests/testcases/setup.yml" - tests_mapping = loader.load_cases(testcase_path) - - parsed_testcases = parser.parse_tests(tests_mapping) - - self.assertEqual(len(parsed_testcases), 1) - - self.assertIn("variables", parsed_testcases[0]["config"]) - self.assertEqual(len(parsed_testcases[0]["teststeps"]), 2) - - test_dict1 = parsed_testcases[0]["teststeps"][0] - self.assertEqual(test_dict1["name"], "get token (setup)") - self.assertNotIn("api_def", test_dict1) - self.assertEqual(test_dict1["variables"]["device_sn"], "TESTCASE_SETUP_XXX") - self.assertEqual(test_dict1["request"]["url"], "/api/get-token") - self.assertEqual(test_dict1["request"]["verify"], False) - - test_dict2 = parsed_testcases[0]["teststeps"][1] - self.assertEqual(test_dict2["request"]["verify"], False) - - def test_testcase_add_tests(self): - testcase_path = "tests/testcases/setup.yml" - tests_mapping = loader.load_cases(testcase_path) - - testcases = parser.parse_tests(tests_mapping) - runner = HttpRunner() - test_suite = runner._add_tests(testcases) - - self.assertEqual(len(test_suite._tests), 1) - teststeps = test_suite._tests[0].teststeps - self.assertEqual(teststeps[0]["name"], "get token (setup)") - self.assertEqual(teststeps[0]["variables"]["device_sn"], "TESTCASE_SETUP_XXX") - self.assertIn("api", teststeps[0]) - - def test_testcase_complex_verify(self): - testcase_path = "tests/testcases/create_user.yml" - tests_mapping = loader.load_cases(testcase_path) - testcases = parser.parse_tests(tests_mapping) - teststeps = testcases[0]["teststeps"] - - # testcases/setup.yml - teststep0 = teststeps[0] - self.assertEqual(teststep0["teststeps"][0]["request"]["verify"], False) - self.assertEqual(teststep0["teststeps"][1]["request"]["verify"], False) - - # testcases/create_user.yml - teststep1 = teststeps[1] - self.assertEqual(teststep1["teststeps"][0]["request"]["verify"], True) - self.assertEqual(teststep1["teststeps"][1]["request"]["verify"], True) - self.assertEqual(teststep1["teststeps"][2]["request"]["verify"], True) - - def test_testcase_simple_run_suite(self): - testcase_path = "tests/testcases/setup.yml" - tests_mapping = loader.load_cases(testcase_path) - testcases = parser.parse_tests(tests_mapping) - runner = HttpRunner() - test_suite = runner._add_tests(testcases) - tests_results = runner._run_suite(test_suite) - self.assertEqual(len(tests_results[0][1].records), 2) - - def test_testcase_complex_run_suite(self): - for testcase_path in [ - "tests/testcases/create_user.yml", - "tests/testcases/create_user.v2.yml", - "tests/testcases/create_user.json", - "tests/testcases/create_user.v2.json" - ]: - tests_mapping = loader.load_cases(testcase_path) - testcases = parser.parse_tests(tests_mapping) - runner = HttpRunner() - test_suite = runner._add_tests(testcases) - tests_results = runner._run_suite(test_suite) - self.assertEqual(len(tests_results[0][1].records), 2) - - results = tests_results[0][1] - self.assertEqual( - results.records[0]["name"], - "setup and reset all (override) for TESTCASE_CREATE_XXX." - ) - self.assertEqual( - results.records[1]["name"], - "create user and check result." - ) - - def test_testsuite_loader(self): - testcase_path = "tests/testsuites/create_users.yml" - tests_mapping = loader.load_cases(testcase_path) - - project_mapping = tests_mapping["project_mapping"] - self.assertIsInstance(project_mapping, dict) - self.assertIn("PWD", project_mapping) - self.assertIn("functions", project_mapping) - self.assertIn("env", project_mapping) - - testsuites = tests_mapping["testsuites"] - self.assertIsInstance(testsuites, list) - self.assertEqual(len(testsuites), 1) - - self.assertIn("path", testsuites[0]) - testsuite_config = testsuites[0]["config"] - self.assertEqual(testsuite_config["name"], "create users with uid") - - testcases = testsuites[0]["testcases"] - self.assertEqual(len(testcases), 2) - self.assertIn("create user 1000 and check result.", testcases) - testcase_tests = testcases["create user 1000 and check result."] - self.assertIn("testcase_def", testcase_tests) - self.assertEqual(testcase_tests["name"], "create user 1000 and check result.") - self.assertIsInstance(testcase_tests["testcase_def"], dict) - self.assertEqual(testcase_tests["testcase_def"]["config"]["name"], "create user and check result.") - self.assertEqual(len(testcase_tests["testcase_def"]["teststeps"]), 2) - self.assertEqual( - testcase_tests["testcase_def"]["teststeps"][0]["name"], - "setup and reset all (override) for $device_sn." - ) - - def test_testsuite_parser(self): - testcase_path = "tests/testsuites/create_users.yml" - tests_mapping = loader.load_cases(testcase_path) - - parsed_testcases = parser.parse_tests(tests_mapping) - self.assertEqual(len(parsed_testcases), 2) - self.assertEqual(len(parsed_testcases[0]["teststeps"]), 2) - - testcase1 = parsed_testcases[0]["teststeps"][0] - self.assertIn("setup and reset all (override)", testcase1["config"]["name"].raw_string) - teststeps = testcase1["teststeps"] - self.assertNotIn("testcase_def", testcase1) - self.assertEqual(len(teststeps), 2) - self.assertEqual( - teststeps[0]["request"]["url"], - "/api/get-token" - ) - - def test_testsuite_add_tests(self): - testcase_path = "tests/testsuites/create_users.yml" - tests_mapping = loader.load_cases(testcase_path) - - testcases = parser.parse_tests(tests_mapping) - runner = HttpRunner() - test_suite = runner._add_tests(testcases) - - self.assertEqual(len(test_suite._tests), 2) - tests = test_suite._tests[0].teststeps - self.assertIn("setup and reset all (override)", tests[0]["config"]["name"].raw_string) - - def test_testsuite_run_suite(self): - testcase_path = "tests/testsuites/create_users.yml" - tests_mapping = loader.load_cases(testcase_path) - - testcases = parser.parse_tests(tests_mapping) - - runner = HttpRunner() - test_suite = runner._add_tests(testcases) - tests_results = runner._run_suite(test_suite) - - self.assertEqual(len(tests_results[0][1].records), 2) - - results = tests_results[0][1] - self.assertIn( - "setup and reset all (override)", - results.records[0]["name"] - ) - self.assertEqual( - results.records[1]["name"], - "create user and check result." - ) diff --git a/tests/test_apiserver.py b/tests/test_apiserver.py deleted file mode 100644 index 5d144349..00000000 --- a/tests/test_apiserver.py +++ /dev/null @@ -1,112 +0,0 @@ -from tests.base import ApiServerUnittest - - -class TestApiServer(ApiServerUnittest): - - def setUp(self): - super(TestApiServer, self).setUp() - self.headers = self.get_authenticated_headers() - self.reset_all() - - def tearDown(self): - super(TestApiServer, self).tearDown() - - def test_index(self): - resp = self.api_client.get(self.host) - self.assertEqual(200, resp.status_code) - - def reset_all(self): - url = "%s/api/reset-all" % self.host - return self.api_client.get(url, headers=self.headers) - - def get_users(self): - url = "%s/api/users" % self.host - return self.api_client.get(url, headers=self.headers) - - def create_user(self, uid, name, password): - url = "%s/api/users/%d" % (self.host, uid) - data = { - 'name': name, - 'password': password - } - return self.api_client.post(url, headers=self.headers, json=data) - - def get_user(self, uid): - url = "%s/api/users/%d" % (self.host, uid) - return self.api_client.get(url, headers=self.headers) - - def update_user(self, uid, name, password): - url = "%s/api/users/%d" % (self.host, uid) - data = { - 'name': name, - 'password': password - } - return self.api_client.put(url, headers=self.headers, json=data) - - def delete_user(self, uid): - url = "%s/api/users/%d" % (self.host, uid) - return self.api_client.delete(url, headers=self.headers) - - def test_reset_all(self): - resp = self.reset_all() - self.assertEqual(200, resp.status_code) - self.assertEqual(True, resp.json()['success']) - - def test_create_user_not_existed(self): - resp = self.create_user(1000, 'user1', '123456') - self.assertEqual(201, resp.status_code) - self.assertEqual(True, resp.json()['success']) - - def test_create_user_existed(self): - resp = self.create_user(1000, 'user1', '123456') - resp = self.create_user(1000, 'user1', '123456') - self.assertEqual(500, resp.status_code) - - def test_get_users_empty(self): - resp = self.get_users() - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json()['count'], 0) - - def test_get_users_not_empty(self): - resp = self.create_user(1000, 'user1', '123456') - resp = self.get_users() - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json()['count'], 1) - - resp = self.create_user(1001, 'user2', '123456') - resp = self.get_users() - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json()['count'], 2) - - def test_get_user_not_existed(self): - resp = self.get_user(1000) - self.assertEqual(404, resp.status_code) - self.assertEqual(resp.json()['success'], False) - - def test_get_user_existed(self): - self.create_user(1000, 'user1', '123456') - resp = self.get_user(1000) - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json()['success'], True) - - def test_update_user_not_existed(self): - resp = self.update_user(1000, 'user1', '123456') - self.assertEqual(404, resp.status_code) - self.assertEqual(resp.json()['success'], False) - - def test_update_user_existed(self): - self.create_user(1000, 'user1', '123456') - resp = self.update_user(1000, 'user2', '123456') - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json()['data']['name'], 'user2') - - def test_delete_user_not_existed(self): - resp = self.delete_user(1000) - self.assertEqual(404, resp.status_code) - self.assertEqual(resp.json()['success'], False) - - def test_delete_user_existed(self): - self.create_user(1000, 'leo', '123456') - resp = self.delete_user(1000) - self.assertEqual(200, resp.status_code) - self.assertEqual(resp.json()['success'], True) diff --git a/tests/test_client.py b/tests/test_client.py deleted file mode 100644 index 3cca18ac..00000000 --- a/tests/test_client.py +++ /dev/null @@ -1,80 +0,0 @@ -from httprunner.client import HttpSession -from tests.api_server import HTTPBIN_SERVER -from tests.base import ApiServerUnittest - - -class TestHttpClient(ApiServerUnittest): - def setUp(self): - super(TestHttpClient, self).setUp() - self.api_client = HttpSession() - self.headers = self.get_authenticated_headers() - self.reset_all() - - def tearDown(self): - super(TestHttpClient, self).tearDown() - - def reset_all(self): - url = "%s/api/reset-all" % self.host - headers = self.get_authenticated_headers() - return self.api_client.get(url, headers=headers) - - def test_request_with_full_url(self): - url = "%s/api/users/1000" % self.host - data = { - 'name': 'user1', - 'password': '123456' - } - resp = self.api_client.post(url, json=data, headers=self.headers) - self.assertEqual(201, resp.status_code) - self.assertEqual(True, resp.json()['success']) - - def test_request_without_base_url(self): - url = "{}/api/users/1000".format(self.host) - data = { - 'name': 'user1', - 'password': '123456' - } - resp = self.api_client.post(url, json=data, headers=self.headers) - self.assertEqual(201, resp.status_code) - self.assertEqual(True, resp.json()['success']) - - def test_request_post_data(self): - url = "{}/api/users/1000".format(self.host) - data = { - 'name': 'user1', - 'password': '123456' - } - resp = self.api_client.post(url, json=data, headers=self.headers) - # b'{"name": "user1", "password": "123456"}' - self.assertIn(b'"name": "user1"', resp.request.body) - self.assertIn(b'"password": "123456"', resp.request.body) - resp = self.api_client.post(url, data=data, headers=self.headers) - # name=user1&password=123456 - self.assertIn("name=user1", resp.request.body) - self.assertIn("&", resp.request.body) - self.assertIn("password=123456", resp.request.body) - - def test_request_with_cookies(self): - url = "{}/api/users/1000".format(self.host) - cookies = { - "a": "1", - "b": "2" - } - resp = self.api_client.get(url, cookies=cookies, headers=self.headers) - self.assertEqual(resp.request._cookies["a"], "1") - self.assertEqual(resp.request._cookies["b"], "2") - - def test_request_redirect(self): - url = "{}/redirect-to?url=https%3A%2F%2Fgithub.com&status_code=302".format(HTTPBIN_SERVER) - cookies = { - "a": "1", - "b": "2" - } - resp = self.api_client.get(url, cookies=cookies, headers=self.headers, verify=False) - raw_request = resp.history[0].request - self.assertEqual(raw_request._cookies["a"], "1") - self.assertEqual(raw_request._cookies["b"], "2") - redirect_request = resp.request - self.assertEqual(redirect_request.url, "https://github.com") - self.assertEqual(redirect_request._cookies["a"], "1") - self.assertEqual(redirect_request._cookies["b"], "2") diff --git a/tests/test_context.py b/tests/test_context.py deleted file mode 100644 index 46bbf343..00000000 --- a/tests/test_context.py +++ /dev/null @@ -1,188 +0,0 @@ -import os - -from httprunner import context, exceptions, loader, parser, runner -from tests.api_server import gen_md5 -from tests.base import ApiServerUnittest, gen_random_string - - -class TestContext(ApiServerUnittest): - - def setUp(self): - loader.load_project_data(os.path.join(os.getcwd(), "tests")) - self.context = context.SessionContext( - variables={"SECRET_KEY": "DebugTalk"} - ) - - def test_init_test_variables_initialize(self): - self.assertEqual( - self.context.test_variables_mapping, - {'SECRET_KEY': 'DebugTalk'} - ) - - def test_init_test_variables(self): - variables = { - "random": "${gen_random_string($num)}", - "authorization": "${gen_md5($TOKEN, $data, $random)}", - "data": "$username", - # TODO: escape '{' and '}' - # "data": '{"name": "$username", "password": "123456"}', - "TOKEN": "debugtalk", - "username": "user1", - "num": 6 - } - functions = { - "gen_random_string": gen_random_string, - "gen_md5": gen_md5 - } - variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - variables = parser.parse_variables_mapping(variables) - self.context.init_test_variables(variables) - variables_mapping = self.context.test_variables_mapping - self.assertEqual(len(variables_mapping["random"]), 6) - self.assertEqual(len(variables_mapping["authorization"]), 32) - self.assertEqual(variables_mapping["data"], 'user1') - - def test_update_seesion_variables(self): - self.context.update_session_variables({"TOKEN": "debugtalk"}) - self.assertEqual( - self.context.session_variables_mapping["TOKEN"], - "debugtalk" - ) - - def test_eval_content_variables(self): - variables = { - "SECRET_KEY": "DebugTalk" - } - content = parser.prepare_lazy_data("abc$SECRET_KEY", {}, variables.keys()) - self.assertEqual( - self.context.eval_content(content), - "abcDebugTalk" - ) - - # TODO: fix variable extraction - # content = "abc$SECRET_KEYdef" - # self.assertEqual( - # self.context.eval_content(content), - # "abcDebugTalkdef" - # ) - - def test_get_parsed_request(self): - variables = { - "random": "${gen_random_string(5)}", - "data": '{"name": "user", "password": "123456"}', - "authorization": "${gen_md5($TOKEN, $data, $random)}", - "TOKEN": "debugtalk" - } - functions = { - "gen_random_string": gen_random_string, - "gen_md5": gen_md5 - } - variables = parser.prepare_lazy_data(variables, functions, variables.keys()) - variables = parser.parse_variables_mapping(variables) - self.context.init_test_variables(variables) - - request = { - "url": "http://127.0.0.1:5000/api/users/1000", - "method": "POST", - "headers": { - "Content-Type": "application/json", - "authorization": "$authorization", - "random": "$random", - "secret_key": "$SECRET_KEY" - }, - "data": "$data" - } - prepared_request = parser.prepare_lazy_data( - request, - functions, - {"authorization", "random", "SECRET_KEY", "data"} - ) - parsed_request = self.context.eval_content(prepared_request) - self.assertIn("authorization", parsed_request["headers"]) - self.assertEqual(len(parsed_request["headers"]["authorization"]), 32) - self.assertIn("random", parsed_request["headers"]) - self.assertEqual(len(parsed_request["headers"]["random"]), 5) - self.assertIn("data", parsed_request) - self.assertEqual( - parsed_request["data"], - '{"name": "user", "password": "123456"}' - ) - self.assertEqual(parsed_request["headers"]["secret_key"], "DebugTalk") - - def test_validate(self): - testcases = [ - { - "config": { - 'name': "test validation" - }, - "teststeps": [ - { - "name": "test validation", - "request": { - "url": "http://127.0.0.1:5000/", - "method": "GET", - }, - "variables": { - "resp_status_code": 200, - "resp_body_success": True - }, - "validate": [ - {"eq": ["$resp_status_code", 200]}, - {"check": "$resp_status_code", "comparator": "eq", "expect": 200}, - {"check": "$resp_body_success", "expect": True}, - {"check": "${is_status_code_200($resp_status_code)}", "expect": True} - ] - } - ] - } - ] - from tests.debugtalk import is_status_code_200 - tests_mapping = { - "project_mapping": { - "functions": { - "is_status_code_200": is_status_code_200 - } - }, - "testcases": testcases - } - testcases = parser.parse_tests(tests_mapping) - parsed_testcase = testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - teststep = parsed_testcase["teststeps"][0] - test_runner.run_test(teststep) - - def test_validate_exception(self): - testcases = [ - { - "config": { - 'name': "test validation" - }, - "teststeps": [ - { - "name": "test validation", - "request": { - "url": "http://127.0.0.1:5000/", - "method": "GET", - }, - "variables": { - "resp_status_code": 200, - "resp_body_success": True - }, - "validate": [ - {"eq": ["$resp_status_code", 201]}, - {"check": "$resp_status_code", "expect": 201}, - {"check": "$resp_body_success", "comparator": "eq", "expect": True} - ] - } - ] - } - ] - tests_mapping = { - "testcases": testcases - } - testcases = parser.parse_tests(tests_mapping) - parsed_testcase = testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - teststep = parsed_testcase["teststeps"][0] - with self.assertRaises(exceptions.ValidationFailure): - test_runner.run_test(teststep) diff --git a/tests/test_extension/test_locusts.py b/tests/test_extension/test_locusts.py deleted file mode 100644 index 59d957ff..00000000 --- a/tests/test_extension/test_locusts.py +++ /dev/null @@ -1,19 +0,0 @@ -import os -import unittest - -from httprunner.ext.locusts.utils import prepare_locust_tests - - -class TestLocust(unittest.TestCase): - - def test_prepare_locust_tests(self): - path = os.path.join( - os.getcwd(), 'tests/locust_tests/demo_locusts.yml') - locust_tests = prepare_locust_tests(path) - self.assertEqual(len(locust_tests), 2 + 3) - name_list = [ - "create user 1000 and check result.", - "create user 1001 and check result." - ] - self.assertIn(locust_tests[0]["config"]["name"], name_list) - self.assertIn(locust_tests[4]["config"]["name"], name_list) diff --git a/tests/test_response.py b/tests/test_response.py deleted file mode 100644 index a11cd204..00000000 --- a/tests/test_response.py +++ /dev/null @@ -1,388 +0,0 @@ -import requests - -from httprunner import exceptions, response -from tests.api_server import HTTPBIN_SERVER -from tests.base import ApiServerUnittest - - -class TestResponse(ApiServerUnittest): - - def test_parse_response_object_json(self): - url = "http://127.0.0.1:5000/api/users" - resp = requests.get(url) - resp_obj = response.ResponseObject(resp) - self.assertTrue(hasattr(resp_obj, 'status_code')) - self.assertTrue(hasattr(resp_obj, 'headers')) - self.assertTrue(hasattr(resp_obj, 'content')) - self.assertIn('Content-Type', resp_obj.headers) - self.assertIn('Content-Length', resp_obj.headers) - self.assertIn('success', resp_obj.json) - - def test_parse_response_object_content(self): - url = "http://127.0.0.1:5000/" - resp = requests.get(url) - resp_obj = response.ResponseObject(resp) - self.assertEqual(bytes, type(resp_obj.content)) - - def test_extract_response_status_code(self): - resp = requests.get(url="{}/status/200".format(HTTPBIN_SERVER)) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_status_code": "status_code"} - ] - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - - self.assertEqual( - extract_binds_dict["resp_status_code"], - 200 - ) - - extract_binds_list = [ - {"resp_status_code": "status_code.xx"} - ] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_encoding_ok_reason_url(self): - resp = requests.get(url="{}/status/200".format(HTTPBIN_SERVER)) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_encoding": "encoding"}, - {"resp_ok": "ok"}, - {"resp_reason": "reason"}, - {"resp_url": "url"} - ] - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - - self.assertEqual(extract_binds_dict["resp_encoding"], "utf-8") - self.assertEqual(extract_binds_dict["resp_ok"], True) - self.assertEqual(extract_binds_dict["resp_reason"], "OK") - self.assertEqual(extract_binds_dict["resp_url"], "{}/status/200".format(HTTPBIN_SERVER)) - - extract_binds_list = [{"resp_encoding": "encoding.xx"}] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - extract_binds_list = [{"resp_ok": "ok.xx"}] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - extract_binds_list = [{"resp_reason": "reason.xx"}] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - extract_binds_list = [{"resp_url": "url.xx"}] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_cookies(self): - resp = requests.get( - url="{}/cookies".format(HTTPBIN_SERVER), - headers={ - "accept": "application/json" - } - ) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_cookies": "cookies"} - ] - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - self.assertEqual( - extract_binds_dict["resp_cookies"], - {} - ) - - extract_binds_list = [ - {"resp_cookies": "cookies.xx"} - ] - with self.assertRaises(exceptions.ExtractFailure): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_elapsed(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - json={ - 'success': False, - "person": { - "name": { - "first_name": "Leo", - "last_name": "Lee", - }, - "age": 29, - "cities": ["Guangzhou", "Shenzhen"] - } - } - ) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_elapsed": "elapsed"} - ] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - extract_binds_list = [ - {"resp_elapsed_microseconds": "elapsed.microseconds"}, - {"resp_elapsed_seconds": "elapsed.seconds"}, - {"resp_elapsed_days": "elapsed.days"}, - {"resp_elapsed_total_seconds": "elapsed.total_seconds"} - ] - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - self.assertGreater(extract_binds_dict["resp_elapsed_microseconds"], 1000) - self.assertLess(extract_binds_dict["resp_elapsed_seconds"], 60) - self.assertEqual(extract_binds_dict["resp_elapsed_days"], 0) - self.assertGreater(extract_binds_dict["resp_elapsed_total_seconds"], 0) - - extract_binds_list = [ - {"resp_elapsed": "elapsed.years"} - ] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_headers(self): - resp = requests.get(url="{}/status/200".format(HTTPBIN_SERVER)) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_headers": "headers"}, - {"resp_headers_content_type": "headers.Content-Type"}, - {"resp_headers_content_type_lowercase": "headers.content-type"} - ] - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - self.assertIn("Content-Type", extract_binds_dict["resp_headers"]) - self.assertIn("text/html", extract_binds_dict["resp_headers_content_type"]) - self.assertIn("text/html", extract_binds_dict["resp_headers_content_type_lowercase"]) - - extract_binds_list = [ - {"resp_headers_xxx": "headers.xxx"} - ] - with self.assertRaises(exceptions.ExtractFailure): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_body_json(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - json={ - 'success': False, - "person": { - "name": { - "first_name": "Leo", - "last_name": "Lee", - }, - "age": 29, - "cities": ["Guangzhou", "Shenzhen"] - } - } - ) - # resp.json() - # { - # "args": {}, - # "data": "{\"success\": false, \"person\": {\"name\": {\"first_name\": \"Leo\", \"last_name\": \"Lee\"}, \"age\": 29, \"cities\": [\"Guangzhou\", \"Shenzhen\"]}}", - # "files": {}, - # "form": {}, - # "headers": { - # "Accept": "*/*", - # "Accept-Encoding": "gzip, deflate", - # "Connection": "keep-alive", - # "Content-Length": "129", - # "Content-Type": "application/json", - # "Host": HTTPBIN_SERVER, - # "User-Agent": "python-requests/2.18.4" - # }, - # "json": { - # "person": { - # "age": 29, - # "cities": [ - # "Guangzhou", - # "Shenzhen" - # ], - # "name": { - # "first_name": "Leo", - # "last_name": "Lee" - # } - # }, - # "success": false - # }, - # "method": "POST", - # "origin": "127.0.0.1", - # "url": "{}/anything".format(HTTPBIN_SERVER) - # } - - extract_binds_list = [ - {"resp_headers_content_type": "headers.content-type"}, - {"resp_content_body_success": "json.json.success"}, - {"resp_content_content_success": "content.json.success"}, - {"resp_content_text_success": "text.json.success"}, - {"resp_content_person_first_name": "content.json.person.name.first_name"}, - {"resp_content_cities_1": "content.json.person.cities.1"} - ] - resp_obj = response.ResponseObject(resp) - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - - self.assertEqual( - extract_binds_dict["resp_headers_content_type"], - "application/json" - ) - self.assertEqual( - extract_binds_dict["resp_content_body_success"], - False - ) - self.assertEqual( - extract_binds_dict["resp_content_content_success"], - False - ) - self.assertEqual( - extract_binds_dict["resp_content_text_success"], - False - ) - self.assertEqual( - extract_binds_dict["resp_content_person_first_name"], - "Leo" - ) - self.assertEqual( - extract_binds_dict["resp_content_cities_1"], - "Shenzhen" - ) - - def test_extract_response_body_html(self): - resp = requests.get(url=HTTPBIN_SERVER) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_content": "content"} - ] - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - - self.assertIsInstance(extract_binds_dict["resp_content"], str) - self.assertIn("httpbin.org", extract_binds_dict["resp_content"]) - - extract_binds_list = [ - {"resp_content": "content.xxx"} - ] - with self.assertRaises(exceptions.ExtractFailure): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_others(self): - resp = requests.get(url="{}/status/200".format(HTTPBIN_SERVER)) - resp_obj = response.ResponseObject(resp) - - extract_binds_list = [ - {"resp_others_encoding": "encoding"}, - {"resp_others_history": "history"} - ] - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_fail(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - json={ - 'success': False, - "person": { - "name": { - "first_name": "Leo", - "last_name": "Lee", - }, - "age": 29, - "cities": ["Guangzhou", "Shenzhen"] - } - } - ) - - extract_binds_list = [ - {"resp_content_dict_key_error": "content.not_exist"} - ] - resp_obj = response.ResponseObject(resp) - - with self.assertRaises(exceptions.ExtractFailure): - resp_obj.extract_response(extract_binds_list) - - extract_binds_list = [ - {"resp_content_list_index_error": "content.person.cities.3"} - ] - resp_obj = response.ResponseObject(resp) - - with self.assertRaises(exceptions.ExtractFailure): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_json_string(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - data="abc" - ) - - extract_binds_list = [ - {"resp_content_body": "content.data"} - ] - resp_obj = response.ResponseObject(resp) - - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - self.assertEqual( - extract_binds_dict["resp_content_body"], - "abc" - ) - - def test_extract_text_response(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - data="LB123abcRB789" - ) - - extract_binds_list = [ - {"resp_content_key1": "LB123(.*)RB789"}, - {"resp_content_key2": "LB[\d]*(.*)RB[\d]*"}, - {"resp_content_key3": "LB[\d]*(.*)9"} - ] - resp_obj = response.ResponseObject(resp) - - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - self.assertEqual( - extract_binds_dict["resp_content_key1"], - "abc" - ) - self.assertEqual( - extract_binds_dict["resp_content_key2"], - "abc" - ) - self.assertEqual( - extract_binds_dict["resp_content_key3"], - "abcRB78" - ) - - def test_extract_text_response_exception(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - data="LB123abcRB789" - ) - extract_binds_list = [ - {"resp_content_key1": "LB123.*RB789"} - ] - resp_obj = response.ResponseObject(resp) - with self.assertRaises(exceptions.ParamsError): - resp_obj.extract_response(extract_binds_list) - - def test_extract_response_empty(self): - resp = requests.post( - url="{}/anything".format(HTTPBIN_SERVER), - data="abc" - ) - - extract_binds_list = [ - {"resp_content_body": "content.data"} - ] - resp_obj = response.ResponseObject(resp) - extract_binds_dict = resp_obj.extract_response(extract_binds_list) - self.assertEqual( - extract_binds_dict["resp_content_body"], - 'abc' - ) - - extract_binds_list = [ - {"resp_content_body": "content.data.def"} - ] - resp_obj = response.ResponseObject(resp) - with self.assertRaises(exceptions.ExtractFailure): - resp_obj.extract_response(extract_binds_list) diff --git a/tests/test_runner.py b/tests/test_runner.py deleted file mode 100644 index c724ab60..00000000 --- a/tests/test_runner.py +++ /dev/null @@ -1,381 +0,0 @@ -import os -import time - -from httprunner import loader, parser, runner -from tests.api_server import HTTPBIN_SERVER -from tests.base import ApiServerUnittest - - -class TestRunner(ApiServerUnittest): - - def setUp(self): - project_mapping = loader.load_project_data(os.path.join(os.getcwd(), "tests")) - self.debugtalk_functions = project_mapping["functions"] - - config = { - "name": "XXX", - "base_url": "http://127.0.0.1", - "verify": False - } - self.test_runner = runner.Runner(config) - self.reset_all() - - def reset_all(self): - url = "%s/api/reset-all" % self.host - headers = self.get_authenticated_headers() - return self.api_client.get(url, headers=headers) - - def test_run_single_testcase(self): - testcase_file_path_list = [ - os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.yml'), - os.path.join( - os.getcwd(), 'tests/data/demo_testcase_hardcode.json') - ] - - for testcase_file_path in testcase_file_path_list: - tests_mapping = loader.load_cases(testcase_file_path) - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - test_runner.run_test(parsed_testcase["teststeps"][0]) - test_runner.run_test(parsed_testcase["teststeps"][1]) - test_runner.run_test(parsed_testcase["teststeps"][2]) - - def test_run_testcase_with_hooks(self): - start_time = time.time() - - testcases = [ - { - "config": { - "name": "basic test with httpbin", - "base_url": HTTPBIN_SERVER, - "setup_hooks": [ - "${sleep(0.5)}", - "${hook_print(setup)}" - ], - "teardown_hooks": [ - "${sleep(1)}", - "${hook_print(teardown)}" - ] - }, - "teststeps": [ - { - "name": "get token", - "request": { - "url": "http://127.0.0.1:5000/api/get-token", - "method": "POST", - "headers": { - "content-type": "application/json", - "user_agent": "iOS/10.3", - "device_sn": "HZfFBh6tU59EdXJ", - "os_platform": "ios", - "app_version": "2.8.6" - }, - "json": { - "sign": "5188962c489d1a35effa99e9346dd5efd4fdabad" - } - }, - "validate": [ - {"check": "status_code", "expect": 200} - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - end_time = time.time() - # check if testcase setup hook executed - self.assertGreater(end_time - start_time, 0.5) - - start_time = time.time() - test_runner.run_test(parsed_testcase["teststeps"][0]) - end_time = time.time() - # testcase teardown hook has not been executed now - self.assertLess(end_time - start_time, 1) - - def test_run_testcase_with_hooks_assignment(self): - testcases = [ - { - "config": { - "name": "basic test with httpbin", - "base_url": HTTPBIN_SERVER - }, - "teststeps": [ - { - "name": "modify request headers", - "base_url": HTTPBIN_SERVER, - "request": { - "url": "/anything", - "method": "POST", - "headers": { - "user_agent": "iOS/10.3", - "os_platform": "ios" - }, - "data": "a=1&b=2" - }, - "setup_hooks": [ - {"total": "${sum_two(1, 5)}"} - ], - "validate": [ - {"check": "status_code", "expect": 200} - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - test_runner.run_test(parsed_testcase["teststeps"][0]) - test_variables_mapping = test_runner.session_context.test_variables_mapping - self.assertEqual(test_variables_mapping["total"], 6) - self.assertEqual(test_variables_mapping["request"]["data"], "a=1&b=2") - - def test_run_testcase_with_hooks_modify_request(self): - testcases = [ - { - "config": { - "name": "basic test with httpbin", - "base_url": HTTPBIN_SERVER - }, - "teststeps": [ - { - "name": "modify request headers", - "base_url": HTTPBIN_SERVER, - "request": { - "url": "/anything", - "method": "POST", - "headers": { - "content-type": "application/json", - "user_agent": "iOS/10.3" - }, - "json": { - "os_platform": "ios", - "sign": "5188962c489d1a35effa99e9346dd5efd4fdabad" - } - }, - "setup_hooks": [ - "${modify_request_json($request, android)}" - ], - "validate": [ - {"check": "status_code", "expect": 200}, - {"check": "content.json.os_platform", "expect": "android"} - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - test_runner.run_test(parsed_testcase["teststeps"][0]) - - def test_run_testcase_with_teardown_hooks_success(self): - testcases = [ - { - "config": { - "name": "basic test with httpbin" - }, - "teststeps": [ - { - "name": "get token", - "request": { - "url": "http://127.0.0.1:5000/api/get-token", - "method": "POST", - "headers": { - "content-type": "application/json", - "user_agent": "iOS/10.3", - "device_sn": "HZfFBh6tU59EdXJ", - "os_platform": "ios", - "app_version": "2.8.6" - }, - "json": { - "sign": "5188962c489d1a35effa99e9346dd5efd4fdabad" - } - }, - "validate": [ - {"check": "status_code", "expect": 200} - ], - "teardown_hooks": ["${teardown_hook_sleep_N_secs($response, 2)}"] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - - start_time = time.time() - test_runner.run_test(parsed_testcase["teststeps"][0]) - end_time = time.time() - # check if teardown function executed - self.assertLess(end_time - start_time, 0.5) - - def test_run_testcase_with_teardown_hooks_fail(self): - testcases = [ - { - "config": { - "name": "basic test with httpbin" - }, - "teststeps": [ - { - "name": "get token", - "request": { - "url": "http://127.0.0.1:5000/api/get-token2", - "method": "POST", - "headers": { - "content-type": "application/json", - "user_agent": "iOS/10.3", - "device_sn": "HZfFBh6tU59EdXJ", - "os_platform": "ios", - "app_version": "2.8.6" - }, - "json": { - "sign": "5188962c489d1a35effa99e9346dd5efd4fdabad" - } - }, - "validate": [ - {"check": "status_code", "expect": 404} - ], - "teardown_hooks": ["${teardown_hook_sleep_N_secs($response, 2)}"] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - - start_time = time.time() - test_runner.run_test(parsed_testcase["teststeps"][0]) - end_time = time.time() - # check if teardown function executed - self.assertGreater(end_time - start_time, 2) - - def test_bugfix_type_match(self): - testcase_file_path = os.path.join( - os.getcwd(), 'tests/data/bugfix_type_match.yml') - tests_mapping = loader.load_cases(testcase_file_path) - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - test_runner.run_test(parsed_testcase["teststeps"][0]) - - def test_run_validate_elapsed(self): - testcases = [ - { - "config": {}, - "teststeps": [ - { - "name": "get token", - "request": { - "url": "http://127.0.0.1:5000/api/get-token", - "method": "POST", - "headers": { - "content-type": "application/json", - "user_agent": "iOS/10.3", - "device_sn": "HZfFBh6tU59EdXJ", - "os_platform": "ios", - "app_version": "2.8.6" - }, - "json": { - "sign": "5188962c489d1a35effa99e9346dd5efd4fdabad" - } - }, - "validate": [ - {"check": "status_code", "expect": 200}, - {"check": "elapsed.seconds", "comparator": "lt", "expect": 1}, - {"check": "elapsed.days", "comparator": "eq", "expect": 0}, - {"check": "elapsed.microseconds", "comparator": "gt", "expect": 1000}, - {"check": "elapsed.total_seconds", "comparator": "lt", "expect": 1} - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - test_runner.run_test(parsed_testcase["teststeps"][0]) - - def test_run_testcase_config_variables_parsed_from_function(self): - testcases = [ - { - "config": { - "name": "basic test with httpbin", - "base_url": HTTPBIN_SERVER, - "variables": "${gen_variables()}" - }, - "teststeps": [ - { - "name": "modify request headers", - "base_url": HTTPBIN_SERVER, - "request": { - "url": "/anything", - "method": "POST", - "headers": { - "user_agent": "iOS/10.3", - "os_platform": "ios" - }, - "data": "a=1&b=2" - }, - "validate": [ - {"check": "status_code", "expect": 200} - ] - } - ] - } - ] - tests_mapping = { - "project_mapping": { - "functions": self.debugtalk_functions - }, - "testcases": testcases - } - parsed_testcases = parser.parse_tests(tests_mapping) - parsed_testcase = parsed_testcases[0] - test_runner = runner.Runner(parsed_testcase["config"]) - test_runner.run_test(parsed_testcase["teststeps"][0]) - test_variables_mapping = test_runner.session_context.test_variables_mapping - self.assertEqual(test_variables_mapping["var_a"], 1) - self.assertEqual(test_variables_mapping["var_b"], 2) - self.assertEqual(test_variables_mapping["request"]["data"], "a=1&b=2") diff --git a/tests/test_validator.py b/tests/test_validator.py deleted file mode 100644 index 039c3960..00000000 --- a/tests/test_validator.py +++ /dev/null @@ -1,7 +0,0 @@ -import unittest - -from httprunner import validator - - -class TestValidator(unittest.TestCase): - pass diff --git a/tests/testcases/create_user.json b/tests/testcases/create_user.json deleted file mode 100644 index 30af7ce8..00000000 --- a/tests/testcases/create_user.json +++ /dev/null @@ -1,34 +0,0 @@ -[ - { - "config": { - "id": "create_user", - "variables": { - "device_sn": "TESTCASE_CREATE_XXX", - "uid": 9001 - }, - "output": [ - "session_token" - ], - "base_url": "http://127.0.0.1:5000", - "name": "create user and check result." - } - }, - { - "test": { - "testcase": "testcases/setup.yml", - "extract": [ - "session_token" - ], - "name": "setup and reset all (override) for $device_sn." - } - }, - { - "test": { - "testcase": "testcases/deps/check_and_create.yml", - "variables": { - "token": "$session_token" - }, - "name": "create user and check result." - } - } -] \ No newline at end of file diff --git a/tests/testcases/create_user.v2.json b/tests/testcases/create_user.v2.json deleted file mode 100644 index 12f537e9..00000000 --- a/tests/testcases/create_user.v2.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "config": { - "id": "create_user", - "variables": { - "device_sn": "TESTCASE_CREATE_XXX", - "uid": 9001 - }, - "output": [ - "session_token" - ], - "base_url": "http://127.0.0.1:5000", - "name": "create user and check result." - }, - "teststeps": [ - { - "testcase": "testcases/setup.yml", - "extract": [ - "session_token" - ], - "name": "setup and reset all (override) for $device_sn." - }, - { - "testcase": "testcases/deps/check_and_create.yml", - "variables": { - "token": "$session_token" - }, - "name": "create user and check result." - } - ] -} \ No newline at end of file diff --git a/tests/testcases/create_user.v2.yml b/tests/testcases/create_user.v2.yml deleted file mode 100644 index f88dbd8a..00000000 --- a/tests/testcases/create_user.v2.yml +++ /dev/null @@ -1,21 +0,0 @@ -config: - name: "create user and check result." - id: create_user - base_url: "http://127.0.0.1:5000" - variables: - uid: 9001 - device_sn: "TESTCASE_CREATE_XXX" - export: - - session_token - -teststeps: -- - name: setup and reset all (override) for $device_sn. - testcase: testcases/setup.yml - extract: - - session_token -- - name: create user and check result. - variables: - token: $session_token - testcase: testcases/deps/check_and_create.yml diff --git a/tests/testcases/create_user.yml b/tests/testcases/create_user.yml deleted file mode 100644 index 9a392438..00000000 --- a/tests/testcases/create_user.yml +++ /dev/null @@ -1,22 +0,0 @@ - -- config: - name: "create user and check result." - id: create_user - base_url: "http://127.0.0.1:5000" - variables: - uid: 9001 - device_sn: "TESTCASE_CREATE_XXX" - export: - - session_token - -- test: - name: setup and reset all (override) for $device_sn. - testcase: testcases/setup.yml - extract: - - session_token - -- test: - name: create user and check result. - variables: - token: $session_token - testcase: testcases/deps/check_and_create.yml diff --git a/tests/testcases/deps/check_and_create.yml b/tests/testcases/deps/check_and_create.yml deleted file mode 100644 index 8a6b2d0a..00000000 --- a/tests/testcases/deps/check_and_create.yml +++ /dev/null @@ -1,39 +0,0 @@ -- config: - name: "create user and check result." - id: create_and_check - base_url: "http://127.0.0.1:5000" - variables: - uid: 9001 - device_sn: "TESTCASE_CREATE_XXX" - -- test: - name: make sure user $uid does not exist - api: api/get_user.yml - variables: - uid: $uid - token: $token - validate: - - eq: ["status_code", 404] - - eq: ["content.success", false] - -- test: - name: create user $uid for $device_sn - api: api/create_user.yml - variables: - user_name: "user1" - user_password: "123456" - uid: $uid - token: $token - validate: - - eq: ["status_code", 201] - - eq: ["content.success", true] - -- test: - name: check if user $uid exists - api: api/get_user.yml - variables: - uid: $uid - token: $token - validate: - - eq: ["status_code", 200] - - eq: ["content.success", true] diff --git a/tests/testcases/setup.json b/tests/testcases/setup.json deleted file mode 100644 index d8690447..00000000 --- a/tests/testcases/setup.json +++ /dev/null @@ -1,59 +0,0 @@ -[ - { - "config": { - "name": "setup and reset all.", - "output": [ - "session_token" - ], - "verify": false, - "variables": { - "device_sn": "TESTCASE_SETUP_XXX", - "app_version": "2.8.6", - "os_platform": "ios", - "user_agent": "iOS/10.3" - }, - "base_url": "http://127.0.0.1:5000", - "id": "setup_and_reset" - } - }, - { - "test": { - "validate": [ - { - "eq": [ - "status_code", - 200 - ] - }, - { - "len_eq": [ - "content.token", - 16 - ] - } - ], - "api": "api/get_token.yml", - "extract": [ - { - "session_token": "content.token" - } - ], - "variables": { - "device_sn": "$device_sn", - "app_version": "2.8.6", - "os_platform": "ios", - "user_agent": "iOS/10.3" - }, - "name": "get token (setup)" - } - }, - { - "test": { - "variables": { - "token": "$session_token" - }, - "api": "api/reset_all.yml", - "name": "reset all users" - } - } -] \ No newline at end of file diff --git a/tests/testcases/setup.v2.json b/tests/testcases/setup.v2.json deleted file mode 100644 index bbc2d3ed..00000000 --- a/tests/testcases/setup.v2.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "config": { - "name": "setup and reset all.", - "base_url": "http://127.0.0.1:5000", - "variables": { - "device_sn": "TESTCASE_SETUP_XXX", - "app_version": "2.8.6", - "os_platform": "ios", - "user_agent": "iOS/10.3" - }, - "id": "setup_and_reset", - "verify": false, - "output": [ - "session_token" - ] - }, - "teststeps": [ - { - "name": "get token (setup)", - "api": "api/get_token.yml", - "variables": { - "device_sn": "$device_sn", - "app_version": "2.8.6", - "os_platform": "ios", - "user_agent": "iOS/10.3" - }, - "extract": [ - {"session_token": "content.token"} - ], - "validate": [ - {"eq": ["status_code", 200]}, - {"len_eq": ["content.token", 16]} - ] - }, - { - "name": "reset all users", - "api": "api/reset_all.yml", - "variables": { - "token": "$session_token" - } - } - ] -} \ No newline at end of file diff --git a/tests/testcases/setup.v2.yml b/tests/testcases/setup.v2.yml deleted file mode 100644 index d9945928..00000000 --- a/tests/testcases/setup.v2.yml +++ /dev/null @@ -1,32 +0,0 @@ -config: - name: "setup and reset all." - id: setup_and_reset - variables: - user_agent: 'iOS/10.3' - device_sn: "TESTCASE_SETUP_XXX" - os_platform: 'ios' - app_version: '2.8.6' - base_url: "http://127.0.0.1:5000" - verify: False - export: - - session_token - -teststeps: -- - name: get token (setup) - api: api/get_token.yml - variables: - user_agent: 'iOS/10.3' - device_sn: $device_sn - os_platform: 'ios' - app_version: '2.8.6' - extract: - - session_token: content.token - validate: - - eq: ["status_code", 200] - - len_eq: ["content.token", 16] -- - name: reset all users - api: api/reset_all.yml - variables: - token: $session_token diff --git a/tests/testcases/setup.yml b/tests/testcases/setup.yml deleted file mode 100644 index ade11425..00000000 --- a/tests/testcases/setup.yml +++ /dev/null @@ -1,32 +0,0 @@ -- config: - name: "setup and reset all." - id: setup_and_reset - variables: - user_agent: 'iOS/10.3' - device_sn: "TESTCASE_SETUP_XXX" - os_platform: 'ios' - app_version: '2.8.6' - base_url: "http://127.0.0.1:5000" - verify: False - export: - - session_token - -- test: - name: get token (setup) - api: api/get_token.yml - variables: - user_agent: 'iOS/10.3' - device_sn: $device_sn - os_platform: 'ios' - app_version: '2.8.6' - extract: - - session_token: content.token - validate: - - eq: ["status_code", 200] - - len_eq: ["content.token", 16] - -- test: - name: reset all users - api: api/reset_all.yml - variables: - token: $session_token diff --git a/tests/testsuites/create_users.json b/tests/testsuites/create_users.json deleted file mode 100644 index d504c99f..00000000 --- a/tests/testsuites/create_users.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "testcases": { - "create user 1001 and check result.": { - "testcase": "testcases/create_user.yml", - "variables": { - "var_d": "$var_c", - "var_c": "${gen_random_string(5)}", - "uid": 1001 - } - }, - "create user 1000 and check result.": { - "testcase": "testcases/create_user.yml", - "variables": { - "var_d": "$var_c", - "var_c": "${gen_random_string(5)}", - "uid": 1000 - } - } - }, - "config": { - "variables": { - "device_sn": "${gen_random_string(15)}", - "var_b": "$var_a", - "var_a": "${gen_random_string(5)}" - }, - "name": "create users with uid", - "base_url": "http://127.0.0.1:5000" - } -} \ No newline at end of file diff --git a/tests/testsuites/create_users.v2.json b/tests/testsuites/create_users.v2.json deleted file mode 100644 index 450a27aa..00000000 --- a/tests/testsuites/create_users.v2.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "config": { - "variables": { - "device_sn": "${gen_random_string(15)}", - "var_b": "$var_a", - "var_a": "${gen_random_string(5)}" - }, - "name": "create users with uid", - "base_url": "http://127.0.0.1:5000" - }, - "testcases": [ - { - "name": "create user 1000 and check result.", - "testcase": "testcases/create_user.v2.yml", - "variables": { - "var_d": "$var_c", - "var_c": "${gen_random_string(5)}", - "uid": 1000 - } - }, - { - "name": "create user 1001 and check result.", - "testcase": "testcases/create_user.v2.yml", - "variables": { - "var_d": "$var_c", - "var_c": "${gen_random_string(5)}", - "uid": 1001 - } - } - ] -} \ No newline at end of file diff --git a/tests/testsuites/create_users.v2.yml b/tests/testsuites/create_users.v2.yml deleted file mode 100644 index 8fcdd930..00000000 --- a/tests/testsuites/create_users.v2.yml +++ /dev/null @@ -1,24 +0,0 @@ -config: - name: create users with uid - variables: - device_sn: ${gen_random_string(15)} - var_a: ${gen_random_string(5)} - var_b: $var_a - base_url: "http://127.0.0.1:5000" - -testcases: -- - name: create user 1000 and check result. - testcase: testcases/create_user.v2.yml - variables: - uid: 1000 - var_c: ${gen_random_string(5)} - var_d: $var_c - -- - name: create user 1001 and check result. - testcase: testcases/create_user.v2.yml - variables: - uid: 1001 - var_c: ${gen_random_string(5)} - var_d: $var_c diff --git a/tests/testsuites/create_users.yml b/tests/testsuites/create_users.yml deleted file mode 100644 index 25c567a5..00000000 --- a/tests/testsuites/create_users.yml +++ /dev/null @@ -1,22 +0,0 @@ -config: - name: create users with uid - variables: - device_sn: ${gen_random_string(15)} - var_a: ${gen_random_string(5)} - var_b: $var_a - base_url: "http://127.0.0.1:5000" - -testcases: - create user 1000 and check result.: - testcase: testcases/create_user.yml - variables: - uid: 1000 - var_c: ${gen_random_string(5)} - var_d: $var_c - - create user 1001 and check result.: - testcase: testcases/create_user.yml - variables: - uid: 1001 - var_c: ${gen_random_string(5)} - var_d: $var_c diff --git a/tests/testsuites/create_users_with_parameters.yml b/tests/testsuites/create_users_with_parameters.yml deleted file mode 100644 index a134e956..00000000 --- a/tests/testsuites/create_users_with_parameters.yml +++ /dev/null @@ -1,15 +0,0 @@ -config: - name: create users with parameters - variables: - device_sn: ${gen_random_string(15)} - base_url: "http://127.0.0.1:5000" - -testcases: - create user $uid and check result for $device_sn.: - testcase: testcases/create_user.yml - variables: - uid: 1000 - device_sn: TESTSUITE_XXX - parameters: - uid: [101, 102, 103] - device_sn: [TESTSUITE_X1, TESTSUITE_X2]