change: make har2case as hrun sub-command, usage: hrun har2case -h

This commit is contained in:
debugtalk
2020-04-12 17:41:29 +08:00
parent ac4122198f
commit 7a0b1b69e4
11 changed files with 1205 additions and 17 deletions

View File

@@ -7,6 +7,7 @@
- replace jsonschema validation with pydantic
- remove compatibility with testcase/testsuite format v1
- make `startproject` as hrun sub-command, usage: `hrun startproject <project_name>`
- make `har2case` as hrun sub-command, usage: `hrun har2case -h`
## 3.0.1 (2020-03-24)

View File

@@ -6,6 +6,7 @@ from loguru import logger
from httprunner import __description__, __version__
from httprunner.api import HttpRunner
from httprunner.ext.har2case import init_har2case_parser, main_har2case
from httprunner.ext.scaffold import init_parser_scaffold, main_scaffold
from httprunner.report import gen_html_report
@@ -83,6 +84,7 @@ def main():
subparsers = parser.add_subparsers(help='sub-command help')
sub_parser_run = init_parser_run(subparsers)
sub_parser_scaffold = init_parser_scaffold(subparsers)
sub_parser_har2case = init_har2case_parser(subparsers)
args = parser.parse_args()
@@ -111,6 +113,14 @@ def main():
main_scaffold(args)
elif sys.argv[1] == "har2case":
# hrun har2case
if len(sys.argv) == 2:
sub_parser_har2case.print_help()
sys.exit(0)
main_har2case(args)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,55 @@
""" Convert HAR (HTTP Archive) to YAML/JSON testcase for HttpRunner.
Usage:
# convert to JSON format testcase
$ hrun har2case demo.har
# convert to YAML format testcase
$ hrun har2case demo.har -2y
"""
import os
import sys
from loguru import logger
from httprunner.ext.har2case.core import HarParser
def init_har2case_parser(subparsers):
""" HAR converter: parse command line options and run commands.
"""
parser = subparsers.add_parser(
"har2case", help="Convert HAR(HTTP Archive) to YAML/JSON testcases for HttpRunner.")
parser.add_argument('har_source_file', nargs='?',
help="Specify HAR source file")
parser.add_argument(
'-2y', '--to-yml', '--to-yaml',
dest='to_yaml', action='store_true',
help="Convert to YAML format, if not specified, convert to JSON format by default.")
parser.add_argument(
'--filter', help="Specify filter keyword, only url include filter string will be converted.")
parser.add_argument(
'--exclude',
help="Specify exclude keyword, url that includes exclude string will be ignored, "
"multiple keywords can be joined with '|'")
return parser
def main_har2case(args):
har_source_file = args.har_source_file
if not har_source_file or not har_source_file.endswith(".har"):
logger.error("HAR file not specified.")
sys.exit(1)
if not os.path.isfile(har_source_file):
logger.error(f"HAR file not exists: {har_source_file}")
sys.exit(1)
output_file_type = "YML" if args.to_yaml else "JSON"
HarParser(
har_source_file, args.filter, args.exclude
).gen_testcase(output_file_type)
return 0

View File

@@ -0,0 +1,366 @@
import base64
import json
import os
import sys
import urllib.parse as urlparse
from loguru import logger
from httprunner.ext.har2case import utils
try:
from json.decoder import JSONDecodeError
except ImportError:
JSONDecodeError = ValueError
IGNORE_REQUEST_HEADERS = [
"host",
"accept",
"content-length",
"connection",
"accept-encoding",
"accept-language",
"origin",
"cache-control",
"pragma",
"upgrade-insecure-requests",
":authority",
":method",
":scheme",
":path"
]
class HarParser(object):
def __init__(self, har_file_path, filter_str=None, exclude_str=None):
self.har_file_path = har_file_path
self.filter_str = filter_str
self.exclude_str = exclude_str or ""
def __make_request_url(self, teststep_dict, entry_json):
""" parse HAR entry request url and queryString, and make teststep url and params
Args:
entry_json (dict):
{
"request": {
"url": "https://httprunner.top/home?v=1&w=2",
"queryString": [
{"name": "v", "value": "1"},
{"name": "w", "value": "2"}
],
},
"response": {}
}
Returns:
{
"name: "/home",
"request": {
url: "https://httprunner.top/home",
params: {"v": "1", "w": "2"}
}
}
"""
request_params = utils.convert_list_to_dict(
entry_json["request"].get("queryString", [])
)
url = entry_json["request"].get("url")
if not url:
logger.exception("url missed in request.")
sys.exit(1)
parsed_object = urlparse.urlparse(url)
if request_params:
parsed_object = parsed_object._replace(query='')
teststep_dict["request"]["url"] = parsed_object.geturl()
teststep_dict["request"]["params"] = request_params
else:
teststep_dict["request"]["url"] = url
teststep_dict["name"] = parsed_object.path
def __make_request_method(self, teststep_dict, entry_json):
""" parse HAR entry request method, and make teststep method.
"""
method = entry_json["request"].get("method")
if not method:
logger.exception("method missed in request.")
sys.exit(1)
teststep_dict["request"]["method"] = method
def __make_request_headers(self, teststep_dict, entry_json):
""" parse HAR entry request headers, and make teststep headers.
header in IGNORE_REQUEST_HEADERS will be ignored.
Args:
entry_json (dict):
{
"request": {
"headers": [
{"name": "Host", "value": "httprunner.top"},
{"name": "Content-Type", "value": "application/json"},
{"name": "User-Agent", "value": "iOS/10.3"}
],
},
"response": {}
}
Returns:
{
"request": {
headers: {"Content-Type": "application/json"}
}
"""
teststep_headers = {}
for header in entry_json["request"].get("headers", []):
if header["name"].lower() in IGNORE_REQUEST_HEADERS:
continue
teststep_headers[header["name"]] = header["value"]
if teststep_headers:
teststep_dict["request"]["headers"] = teststep_headers
def _make_request_data(self, teststep_dict, entry_json):
""" parse HAR entry request data, and make teststep request data
Args:
entry_json (dict):
{
"request": {
"method": "POST",
"postData": {
"mimeType": "application/x-www-form-urlencoded; charset=utf-8",
"params": [
{"name": "a", "value": 1},
{"name": "b", "value": "2"}
}
},
},
"response": {...}
}
Returns:
{
"request": {
"method": "POST",
"data": {"v": "1", "w": "2"}
}
}
"""
method = entry_json["request"].get("method")
if method in ["POST", "PUT", "PATCH"]:
postData = entry_json["request"].get("postData", {})
mimeType = postData.get("mimeType")
# Note that text and params fields are mutually exclusive.
if "text" in postData:
post_data = postData.get("text")
else:
params = postData.get("params", [])
post_data = utils.convert_list_to_dict(params)
request_data_key = "data"
if not mimeType:
pass
elif mimeType.startswith("application/json"):
try:
post_data = json.loads(post_data)
request_data_key = "json"
except JSONDecodeError:
pass
elif mimeType.startswith("application/x-www-form-urlencoded"):
post_data = utils.convert_x_www_form_urlencoded_to_dict(post_data)
else:
# TODO: make compatible with more mimeType
pass
teststep_dict["request"][request_data_key] = post_data
def _make_validate(self, teststep_dict, entry_json):
""" parse HAR entry response and make teststep validate.
Args:
entry_json (dict):
{
"request": {},
"response": {
"status": 200,
"headers": [
{
"name": "Content-Type",
"value": "application/json; charset=utf-8"
},
],
"content": {
"size": 71,
"mimeType": "application/json; charset=utf-8",
"text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=",
"encoding": "base64"
}
}
}
Returns:
{
"validate": [
{"eq": ["status_code", 200]}
]
}
"""
teststep_dict["validate"].append(
{"eq": ["status_code", entry_json["response"].get("status")]}
)
resp_content_dict = entry_json["response"].get("content")
headers_mapping = utils.convert_list_to_dict(
entry_json["response"].get("headers", [])
)
if "Content-Type" in headers_mapping:
teststep_dict["validate"].append(
{"eq": ["headers.Content-Type", headers_mapping["Content-Type"]]}
)
text = resp_content_dict.get("text")
if not text:
return
mime_type = resp_content_dict.get("mimeType")
if mime_type and mime_type.startswith("application/json"):
encoding = resp_content_dict.get("encoding")
if encoding and encoding == "base64":
content = base64.b64decode(text).decode('utf-8')
else:
content = text
try:
resp_content_json = json.loads(content)
except JSONDecodeError:
logger.warning(
"response content can not be loaded as json: {}".format(content.encode("utf-8"))
)
return
if not isinstance(resp_content_json, dict):
return
for key, value in resp_content_json.items():
if isinstance(value, (dict, list)):
continue
teststep_dict["validate"].append(
{"eq": ["content.{}".format(key), value]}
)
def _prepare_teststep(self, entry_json):
""" extract info from entry dict and make teststep
Args:
entry_json (dict):
{
"request": {
"method": "POST",
"url": "https://httprunner.top/api/v1/Account/Login",
"headers": [],
"queryString": [],
"postData": {},
},
"response": {
"status": 200,
"headers": [],
"content": {}
}
}
"""
teststep_dict = {
"name": "",
"request": {},
"validate": []
}
self.__make_request_url(teststep_dict, entry_json)
self.__make_request_method(teststep_dict, entry_json)
self.__make_request_headers(teststep_dict, entry_json)
self._make_request_data(teststep_dict, entry_json)
self._make_validate(teststep_dict, entry_json)
return teststep_dict
def _prepare_config(self):
""" prepare config block.
"""
return {
"name": "testcase description",
"variables": {}
}
def _prepare_teststeps(self):
""" make teststep list.
teststeps list are parsed from HAR log entries list.
"""
def is_exclude(url, exclude_str):
exclude_str_list = exclude_str.split("|")
for exclude_str in exclude_str_list:
if exclude_str and exclude_str in url:
return True
return False
teststeps = []
log_entries = utils.load_har_log_entries(self.har_file_path)
for entry_json in log_entries:
url = entry_json["request"].get("url")
if self.filter_str and self.filter_str not in url:
continue
if is_exclude(url, self.exclude_str):
continue
teststeps.append(
self._prepare_teststep(entry_json)
)
return teststeps
def _make_testcase(self):
""" Extract info from HAR file and prepare for testcase
"""
logger.info("Extract info from HAR file and prepare for testcase.")
config = self._prepare_config()
teststeps = self._prepare_teststeps()
testcase = {
"config": config,
"teststeps": teststeps
}
return testcase
def gen_testcase(self, file_type="JSON"):
logger.info(f"Start to generate testcase from {self.har_file_path}")
harfile = os.path.splitext(self.har_file_path)[0]
output_testcase_file = "{}.{}".format(harfile, file_type.lower())
testcase = self._make_testcase()
logger.debug("prepared testcase: {}".format(testcase))
if file_type == "JSON":
utils.dump_json(testcase, output_testcase_file)
else:
utils.dump_yaml(testcase, output_testcase_file)
logger.info(f"generated testcase: {output_testcase_file}")

View File

@@ -0,0 +1,223 @@
{
"log": {
"version": "1.2",
"creator": {
"name": "Charles Proxy",
"version": "4.2.1"
},
"entries": [
{
"startedDateTime": "2018-02-19T17:30:00.904+08:00",
"time": 3,
"request": {
"method": "POST",
"url": "http://127.0.0.1:5000/api/get-token",
"httpVersion": "HTTP/1.1",
"cookies": [],
"headers": [
{
"name": "Host",
"value": "127.0.0.1:5000"
},
{
"name": "User-Agent",
"value": "python-requests/2.18.4"
},
{
"name": "Accept-Encoding",
"value": "gzip, deflate"
},
{
"name": "Accept",
"value": "*/*"
},
{
"name": "Connection",
"value": "keep-alive"
},
{
"name": "device_sn",
"value": "FwgRiO7CNA50DSU"
},
{
"name": "user_agent",
"value": "iOS/10.3"
},
{
"name": "os_platform",
"value": "ios"
},
{
"name": "app_version",
"value": "2.8.6"
},
{
"name": "Content-Length",
"value": "52"
},
{
"name": "Content-Type",
"value": "application/json"
}
],
"queryString": [],
"postData": {
"mimeType": "application/json",
"text": "{\"sign\": \"958a05393efef0ac7c0fb80a7eac45e24fd40c27\"}"
},
"headersSize": 299,
"bodySize": 52
},
"response": {
"_charlesStatus": "COMPLETE",
"status": 200,
"statusText": "OK",
"httpVersion": "HTTP/1.0",
"cookies": [],
"headers": [
{
"name": "Content-Type",
"value": "application/json"
},
{
"name": "Content-Length",
"value": "46"
},
{
"name": "Server",
"value": "Werkzeug/0.14.1 Python/3.6.4"
},
{
"name": "Date",
"value": "Mon, 19 Feb 2018 09:30:00 GMT"
},
{
"name": "Proxy-Connection",
"value": "Close"
}
],
"content": {
"size": 46,
"mimeType": "application/json",
"text": "eyJzdWNjZXNzIjogdHJ1ZSwgInRva2VuIjogImJhTkxYMXpoRllQMTFTZWIifQ\u003d\u003d",
"encoding": "base64"
},
"headersSize": 175,
"bodySize": 46
},
"serverIPAddress": "127.0.0.1",
"cache": {},
"timings": {
"dns": 1,
"connect": 0,
"ssl": -1,
"send": 0,
"wait": 1,
"receive": 1
}
},
{
"startedDateTime": "2018-02-19T17:30:00.911+08:00",
"time": 3,
"request": {
"method": "POST",
"url": "http://127.0.0.1:5000/api/users/1000",
"httpVersion": "HTTP/1.1",
"cookies": [],
"headers": [
{
"name": "Host",
"value": "127.0.0.1:5000"
},
{
"name": "User-Agent",
"value": "python-requests/2.18.4"
},
{
"name": "Accept-Encoding",
"value": "gzip, deflate"
},
{
"name": "Accept",
"value": "*/*"
},
{
"name": "Connection",
"value": "keep-alive"
},
{
"name": "device_sn",
"value": "FwgRiO7CNA50DSU"
},
{
"name": "token",
"value": "baNLX1zhFYP11Seb"
},
{
"name": "Content-Length",
"value": "39"
},
{
"name": "Content-Type",
"value": "application/json"
}
],
"queryString": [],
"postData": {
"mimeType": "application/json",
"text": "{\"name\": \"user1\", \"password\": \"123456\"}"
},
"headersSize": 265,
"bodySize": 39
},
"response": {
"_charlesStatus": "COMPLETE",
"status": 201,
"statusText": "CREATED",
"httpVersion": "HTTP/1.0",
"cookies": [],
"headers": [
{
"name": "Content-Type",
"value": "application/json"
},
{
"name": "Content-Length",
"value": "54"
},
{
"name": "Server",
"value": "Werkzeug/0.14.1 Python/3.6.4"
},
{
"name": "Date",
"value": "Mon, 19 Feb 2018 09:30:00 GMT"
},
{
"name": "Proxy-Connection",
"value": "Close"
}
],
"content": {
"size": 54,
"mimeType": "application/json",
"text": "eyJzdWNjZXNzIjogdHJ1ZSwgIm1zZyI6ICJ1c2VyIGNyZWF0ZWQgc3VjY2Vzc2Z1bGx5LiJ9",
"encoding": "base64"
},
"headersSize": 77,
"bodySize": 54
},
"serverIPAddress": "127.0.0.1",
"cache": {},
"timings": {
"dns": 0,
"connect": 0,
"ssl": -1,
"send": 0,
"wait": 3,
"receive": 0
}
}
]
}
}

View File

@@ -0,0 +1,148 @@
{
"log": {
"version": "1.2",
"creator": {
"name": "Charles Proxy",
"version": "4.2"
},
"entries": [
{
"startedDateTime": "2017-11-13T11:40:07.212+08:00",
"time": 35,
"request": {
"method": "POST",
"url": "https://httprunner.top/api/v1/Account/Login",
"httpVersion": "HTTP/1.1",
"cookies": [
{
"name": "lang",
"value": "zh"
}
],
"headers": [
{
"name": "Host",
"value": "httprunner.top"
},
{
"name": "Connection",
"value": "keep-alive"
},
{
"name": "Content-Length",
"value": "50"
},
{
"name": "Accept",
"value": "application/json"
},
{
"name": "Origin",
"value": "https://httprunner.top"
},
{
"name": "User-Agent",
"value": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36"
},
{
"name": "Content-Type",
"value": "application/json"
},
{
"name": "Referer",
"value": "https://httprunner.top/login"
},
{
"name": "Accept-Encoding",
"value": "gzip, deflate, br"
},
{
"name": "Accept-Language",
"value": "en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4"
}
],
"queryString": [],
"postData": {
"mimeType": "application/json",
"text": "{\"UserName\":\"test001\",\"Pwd\":\"123\",\"VerCode\":\"\"}"
},
"headersSize": 640,
"bodySize": 50
},
"response": {
"_charlesStatus": "COMPLETE",
"status": 200,
"statusText": "OK",
"httpVersion": "HTTP/1.1",
"cookies": [
{
"name": "lang",
"value": "zh",
"path": "/",
"domain": ".httprunner.top",
"expires": null,
"httpOnly": false,
"secure": false,
"comment": null,
"_maxAge": null
}
],
"headers": [
{
"name": "Date",
"value": "Mon, 13 Nov 2017 03:40:07 GMT"
},
{
"name": "Content-Type",
"value": "application/json; charset=utf-8"
},
{
"name": "Content-Length",
"value": "71"
},
{
"name": "Cache-Control",
"value": "no-cache"
},
{
"name": "Pragma",
"value": "no-cache"
},
{
"name": "Expires",
"value": "-1"
},
{
"name": "Server",
"value": "Microsoft-IIS/8.5"
},
{
"name": "X-AspNet-Version",
"value": "4.0.30319"
}
],
"content": {
"size": 71,
"mimeType": "application/json; charset=utf-8",
"text": "eyJJc1N1Y2Nlc3MiOnRydWUsIkNvZGUiOjIwMCwiTWVzc2FnZSI6bnVsbCwiVmFsdWUiOnsiQmxuUmVzdWx0Ijp0cnVlfX0=",
"encoding": "base64"
},
"redirectURL": null,
"headersSize": 0,
"bodySize": 71
},
"serverIPAddress": "192.168.1.169",
"cache": {},
"timings": {
"dns": -1,
"connect": -1,
"ssl": -1,
"send": 6,
"wait": 28,
"receive": 1
}
}
]
}
}

View File

@@ -0,0 +1,206 @@
import os
from httprunner.ext.har2case.utils import load_har_log_entries
from httprunner.ext.har2case.core import HarParser
from httprunner.ext.har2case.test_utils import TestUtils
class TestHar(TestUtils):
def setUp(self):
self.har_parser = HarParser(self.har_path)
def test_prepare_teststep(self):
log_entries = load_har_log_entries(self.har_path)
teststep_dict = self.har_parser._prepare_teststep(log_entries[0])
self.assertIn("name", teststep_dict)
self.assertIn("request", teststep_dict)
self.assertIn("validate", teststep_dict)
validators_mapping = {
validator["eq"][0]: validator["eq"][1]
for validator in teststep_dict["validate"]
}
self.assertEqual(
validators_mapping["status_code"], 200
)
self.assertEqual(
validators_mapping["content.IsSuccess"], True
)
self.assertEqual(
validators_mapping["content.Code"], 200
)
self.assertEqual(
validators_mapping["content.Message"], None
)
def test_prepare_teststeps(self):
teststeps = self.har_parser._prepare_teststeps()
self.assertIsInstance(teststeps, list)
self.assertIn("name", teststeps[0])
self.assertIn("request", teststeps[0])
self.assertIn("validate", teststeps[0])
def test_gen_testcase_yaml(self):
yaml_file = os.path.join(
os.path.dirname(__file__), "data", "demo.yaml")
self.har_parser.gen_testcase(file_type="YAML")
self.assertTrue(os.path.isfile(yaml_file))
os.remove(yaml_file)
def test_gen_testcase_json(self):
json_file = os.path.join(
os.path.dirname(__file__), "data", "demo.json")
self.har_parser.gen_testcase(file_type="JSON")
self.assertTrue(os.path.isfile(json_file))
os.remove(json_file)
def test_filter(self):
filter_str = "httprunner"
har_parser = HarParser(self.har_path, filter_str)
teststeps = har_parser._prepare_teststeps()
self.assertEqual(
teststeps[0]["request"]["url"],
"https://httprunner.top/api/v1/Account/Login"
)
filter_str = "debugtalk"
har_parser = HarParser(self.har_path, filter_str)
teststeps = har_parser._prepare_teststeps()
self.assertEqual(teststeps, [])
def test_exclude(self):
exclude_str = "debugtalk"
har_parser = HarParser(self.har_path, exclude_str=exclude_str)
teststeps = har_parser._prepare_teststeps()
self.assertEqual(
teststeps[0]["request"]["url"],
"https://httprunner.top/api/v1/Account/Login"
)
exclude_str = "httprunner"
har_parser = HarParser(self.har_path, exclude_str=exclude_str)
teststeps = har_parser._prepare_teststeps()
self.assertEqual(teststeps, [])
def test_exclude_multiple(self):
exclude_str = "httprunner|v2"
har_parser = HarParser(self.har_path, exclude_str=exclude_str)
teststeps = har_parser._prepare_teststeps()
self.assertEqual(teststeps, [])
exclude_str = "http2|v1"
har_parser = HarParser(self.har_path, exclude_str=exclude_str)
teststeps = har_parser._prepare_teststeps()
self.assertEqual(teststeps, [])
def test_make_request_data_params(self):
testcase_dict = {
"name": "",
"request": {},
"validate": []
}
entry_json = {
"request": {
"method": "POST",
"postData": {
"mimeType": "application/x-www-form-urlencoded; charset=utf-8",
"params": [
{"name": "a", "value": 1},
{"name": "b", "value": "2"}
]
},
}
}
self.har_parser._make_request_data(testcase_dict, entry_json)
self.assertEqual(testcase_dict["request"]["data"]["a"], 1)
self.assertEqual(testcase_dict["request"]["data"]["b"], "2")
def test_make_request_data_json(self):
testcase_dict = {
"name": "",
"request": {},
"validate": []
}
entry_json = {
"request": {
"method": "POST",
"postData": {
"mimeType": "application/json; charset=utf-8",
"text": "{\"a\":\"1\",\"b\":\"2\"}"
},
}
}
self.har_parser._make_request_data(testcase_dict, entry_json)
self.assertEqual(
testcase_dict["request"]["json"],
{'a': '1', 'b': '2'}
)
def test_make_request_data_text_empty(self):
testcase_dict = {
"name": "",
"request": {},
"validate": []
}
entry_json = {
"request": {
"method": "POST",
"postData": {
"mimeType": "application/json; charset=utf-8",
"text": ""
},
}
}
self.har_parser._make_request_data(testcase_dict, entry_json)
self.assertEqual(
testcase_dict["request"]["data"],
""
)
def test_make_validate(self):
testcase_dict = {
"name": "",
"request": {},
"validate": []
}
entry_json = {
"request": {},
"response": {
"status": 200,
"headers": [
{
"name": "Content-Type",
"value": "application/json; charset=utf-8"
},
],
"content": {
"size": 71,
"mimeType": "application/json; charset=utf-8",
# raw response content text is application/jose type
"text": "ZXlKaGJHY2lPaUpTVTBFeFh6VWlMQ0psYm1NaU9pSkJNVEk0UTBKRExV",
"encoding": "base64"
}
}
}
self.har_parser._make_validate(testcase_dict, entry_json)
self.assertEqual(
testcase_dict["validate"][0],
{"eq": ["status_code", 200]}
)
self.assertEqual(
testcase_dict["validate"][1],
{"eq": ["headers.Content-Type", "application/json; charset=utf-8"]}
)
def test_make_testcase(self):
har_path = os.path.join(
os.path.dirname(__file__), "data", "demo-quickstart.har")
har_parser = HarParser(har_path)
testcase = har_parser._make_testcase()
self.assertIsInstance(testcase, dict)
self.assertIn("config", testcase)
self.assertIn("teststeps", testcase)
self.assertEqual(len(testcase["teststeps"]), 2)

View File

@@ -0,0 +1,65 @@
import json
import os
import unittest
from httprunner.ext.har2case import utils
class TestUtils(unittest.TestCase):
@staticmethod
def create_har_file(file_name, content):
file_path = os.path.join(
os.path.dirname(__file__), "data", "{}.har".format(file_name))
with open(file_path, "w") as f:
f.write(json.dumps(content))
return file_path
@classmethod
def setUpClass(cls):
cls.har_path = os.path.join(
os.path.dirname(__file__), "data", "demo.har")
cls.empty_file_path = TestUtils.create_har_file(file_name="empty", content="")
cls.empty_json_file_path = TestUtils.create_har_file(file_name="empty_json", content={})
@classmethod
def tearDownClass(cls):
os.remove(cls.empty_file_path)
os.remove(cls.empty_json_file_path)
def test_load_har_log_entries(self):
log_entries = utils.load_har_log_entries(self.har_path)
self.assertIsInstance(log_entries, list)
self.assertIn("request", log_entries[0])
self.assertIn("response", log_entries[0])
def test_load_har_log_key_error(self):
with self.assertRaises(SystemExit):
utils.load_har_log_entries(self.empty_json_file_path)
def test_load_har_log_empty_error(self):
with self.assertRaises(SystemExit):
utils.load_har_log_entries(self.empty_file_path)
# def test_x_www_form_urlencoded(self):
# origin_dict = {"a":1, "b": "2"}
# self.assertIn("a=1", utils.x_www_form_urlencoded(origin_dict))
# self.assertIn("b=2", utils.x_www_form_urlencoded(origin_dict))
def test_convert_list_to_dict(self):
origin_list = [
{"name": "v", "value": "1"},
{"name": "w", "value": "2"}
]
self.assertEqual(
utils.convert_list_to_dict(origin_list),
{"v": "1", "w": "2"}
)
def test_convert_x_www_form_urlencoded_to_dict(self):
origin_str = "a=1&b=2"
converted_dict = utils.convert_x_www_form_urlencoded_to_dict(origin_str)
self.assertIsInstance(converted_dict, dict)
self.assertEqual(converted_dict["a"], "1")
self.assertEqual(converted_dict["b"], "2")

View File

@@ -0,0 +1,130 @@
import io
import json
import logging
import sys
from json.decoder import JSONDecodeError
from urllib.parse import unquote
import yaml
def load_har_log_entries(file_path):
""" load HAR file and return log entries list
Args:
file_path (str)
Returns:
list: entries
[
{
"request": {},
"response": {}
},
{
"request": {},
"response": {}
}
]
"""
with io.open(file_path, "r+", encoding="utf-8-sig") as f:
try:
content_json = json.loads(f.read())
return content_json["log"]["entries"]
except (KeyError, TypeError, JSONDecodeError):
logging.error("HAR file content error: {}".format(file_path))
sys.exit(1)
def x_www_form_urlencoded(post_data):
""" convert origin dict to x-www-form-urlencoded
Args:
post_data (dict):
{"a": 1, "b":2}
Returns:
str:
a=1&b=2
"""
if isinstance(post_data, dict):
return "&".join([
u"{}={}".format(key, value)
for key, value in post_data.items()
])
else:
return post_data
def convert_x_www_form_urlencoded_to_dict(post_data):
""" convert x_www_form_urlencoded data to dict
Args:
post_data (str): a=1&b=2
Returns:
dict: {"a":1, "b":2}
"""
if isinstance(post_data, str):
converted_dict = {}
for k_v in post_data.split("&"):
try:
key, value = k_v.split("=")
except ValueError:
raise Exception(
"Invalid x_www_form_urlencoded data format: {}".format(post_data)
)
converted_dict[key] = unquote(value)
return converted_dict
else:
return post_data
def convert_list_to_dict(origin_list):
""" convert HAR data list to mapping
Args:
origin_list (list)
[
{"name": "v", "value": "1"},
{"name": "w", "value": "2"}
]
Returns:
dict:
{"v": "1", "w": "2"}
"""
return {
item["name"]: item.get("value")
for item in origin_list
}
def dump_yaml(testcase, yaml_file):
""" dump HAR entries to yaml testcase
"""
logging.info("dump testcase to YAML format.")
with io.open(yaml_file, 'w', encoding="utf-8") as outfile:
yaml.dump(testcase, outfile, allow_unicode=True, default_flow_style=False, indent=4)
logging.info("Generate YAML testcase successfully: {}".format(yaml_file))
def dump_json(testcase, json_file):
""" dump HAR entries to json testcase
"""
logging.info("dump testcase to JSON format.")
with io.open(json_file, 'w', encoding="utf-8") as outfile:
my_json_str = json.dumps(testcase, ensure_ascii=False, indent=4)
if isinstance(my_json_str, bytes):
my_json_str = my_json_str.decode("utf-8")
outfile.write(my_json_str)
logging.info("Generate JSON testcase successfully: {}".format(json_file))

17
poetry.lock generated
View File

@@ -122,17 +122,6 @@ optional = false
python-versions = "*"
version = "0.9.0"
[[package]]
category = "main"
description = "Convert HAR(HTTP Archive) to YAML/JSON testcases for HttpRunner."
name = "har2case"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4"
version = "0.3.1"
[package.dependencies]
PyYAML = "*"
[[package]]
category = "dev"
description = "A collection of framework independent HTTP protocol utils."
@@ -356,7 +345,7 @@ version = "1.0.1"
dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[metadata]
content-hash = "ff30b34e5d7a0934029a6ea20ebd43fc9ea769c4bbb2e63890de6d399d756da0"
content-hash = "e1204ede1ab227bc33783b362d866c2a0b1fb8faba283216b2973e2261b0b966"
python-versions = "^3.6"
[metadata.files]
@@ -437,10 +426,6 @@ h11 = [
{file = "h11-0.9.0-py2.py3-none-any.whl", hash = "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"},
{file = "h11-0.9.0.tar.gz", hash = "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1"},
]
har2case = [
{file = "har2case-0.3.1-py2.py3-none-any.whl", hash = "sha256:84d3a5cc9fbb16e45372e7e880a936c59bbe8e9b66bad81927769e64f608e2af"},
{file = "har2case-0.3.1.tar.gz", hash = "sha256:8f159ec7cba82ec4282f46af4a9dac89f65e62796521b2426d3c89c3c9fd8579"},
]
httptools = [
{file = "httptools-0.1.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce"},
{file = "httptools-0.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4"},

View File

@@ -33,7 +33,6 @@ requests = "^2.22.0"
requests-toolbelt = "^0.9.1"
pyyaml = "^5.1.2"
jinja2 = "^2.10.3"
har2case = "^0.3.1"
filetype = "^1.0.5"
jsonpath = "^0.82"
pydantic = "^1.4"