mirror of
https://github.com/httprunner/httprunner.git
synced 2026-05-14 15:27:35 +08:00
omit request and response data length greater than 512
This commit is contained in:
@@ -5,7 +5,7 @@ import time
|
||||
import requests
|
||||
import urllib3
|
||||
from httprunner import logger
|
||||
from httprunner.utils import build_url, lower_dict_keys
|
||||
from httprunner.utils import build_url, lower_dict_keys, omit_long_data
|
||||
from requests import Request, Response
|
||||
from requests.exceptions import (InvalidSchema, InvalidURL, MissingSchema,
|
||||
RequestException)
|
||||
@@ -115,6 +115,10 @@ class HttpSession(requests.Session):
|
||||
self.meta_data["request"].update(kwargs)
|
||||
self.meta_data["request"]["start_timestamp"] = time.time()
|
||||
|
||||
request_data = self.meta_data["request"].get("data")
|
||||
if request_data:
|
||||
self.meta_data["request"]["data"] = omit_long_data(request_data)
|
||||
|
||||
# prepend url with hostname unless it's already an absolute URL
|
||||
url = build_url(self.base_url, url)
|
||||
|
||||
@@ -155,14 +159,9 @@ class HttpSession(requests.Session):
|
||||
# try to record json data
|
||||
self.meta_data["response"]["json"] = response.json()
|
||||
except ValueError:
|
||||
# only record at most 1000 text charactors
|
||||
# only record at most 512 text charactors
|
||||
resp_text = response.text
|
||||
resp_text_length = len(resp_text)
|
||||
if resp_text_length > 1000:
|
||||
resp_text = resp_text[0:1000] \
|
||||
+ " ... OMITTED {} CHARACTORS ...".format(resp_text_length-1000)
|
||||
|
||||
self.meta_data["response"]["text"] = resp_text
|
||||
self.meta_data["response"]["text"] = omit_long_data(resp_text)
|
||||
|
||||
# get the length of the content, but if the argument stream is set to True, we take
|
||||
# the size from the content-length header, in order to not trigger fetching of the body
|
||||
|
||||
@@ -31,6 +31,9 @@ class ResponseObject(object):
|
||||
else:
|
||||
value = getattr(self.resp_obj, key)
|
||||
|
||||
if key in ["text", "content"]:
|
||||
value = utils.omit_long_data(value)
|
||||
|
||||
self.__dict__[key] = value
|
||||
return value
|
||||
except AttributeError:
|
||||
|
||||
@@ -252,19 +252,23 @@ class Runner(object):
|
||||
self.session_context.validate(validators, resp_obj)
|
||||
|
||||
except (exceptions.ParamsError, exceptions.ValidationFailure, exceptions.ExtractFailure):
|
||||
err_msg = "{} DETAILED REQUEST & RESPONSE {}\n".format("*" * 32, "*" * 32)
|
||||
|
||||
# log request
|
||||
err_req_msg = "request: \n"
|
||||
err_req_msg += "headers: {}\n".format(parsed_test_request.pop("headers", {}))
|
||||
err_msg += "====== request details ======\n"
|
||||
err_msg += "headers: {}\n".format(parsed_test_request.pop("headers", {}))
|
||||
for k, v in parsed_test_request.items():
|
||||
err_req_msg += "{}: {}\n".format(k, repr(v))
|
||||
logger.log_error(err_req_msg)
|
||||
v = utils.omit_long_data(v)
|
||||
err_msg += "{}: {}\n".format(k, repr(v))
|
||||
|
||||
err_msg += "\n"
|
||||
|
||||
# log response
|
||||
err_resp_msg = "response: \n"
|
||||
err_resp_msg += "status_code: {}\n".format(resp_obj.status_code)
|
||||
err_resp_msg += "headers: {}\n".format(resp_obj.headers)
|
||||
err_resp_msg += "body: {}\n".format(repr(resp_obj.text))
|
||||
logger.log_error(err_resp_msg)
|
||||
err_msg += "====== response details ======\n"
|
||||
err_msg += "status_code: {}\n".format(resp_obj.status_code)
|
||||
err_msg += "headers: {}\n".format(resp_obj.headers)
|
||||
err_msg += "body: {}\n".format(repr(resp_obj.text))
|
||||
logger.log_error(err_msg)
|
||||
|
||||
raise
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import string
|
||||
from datetime import datetime
|
||||
|
||||
from httprunner import exceptions, logger
|
||||
from httprunner.compat import basestring, is_py2
|
||||
from httprunner.compat import basestring, bytes, is_py2
|
||||
from httprunner.exceptions import ParamsError
|
||||
|
||||
absolute_http_url_regexp = re.compile(r"^https?://", re.I)
|
||||
@@ -631,6 +631,25 @@ def prettify_json_file(file_list):
|
||||
print("success: {}".format(outfile))
|
||||
|
||||
|
||||
def omit_long_data(body, omit_len=512):
|
||||
""" omit too long str/bytes
|
||||
"""
|
||||
if not isinstance(body, basestring):
|
||||
return body
|
||||
|
||||
body_len = len(body)
|
||||
if body_len <= omit_len:
|
||||
return body
|
||||
|
||||
omitted_body = body[0:omit_len]
|
||||
|
||||
appendix_str = " ... OMITTED {} CHARACTORS ...".format(body_len - omit_len)
|
||||
if isinstance(body, bytes):
|
||||
appendix_str = appendix_str.encode("utf-8")
|
||||
|
||||
return omitted_body + appendix_str
|
||||
|
||||
|
||||
def dump_json_file(json_data, pwd_dir_path, dump_file_name):
|
||||
""" dump json data to file
|
||||
"""
|
||||
@@ -640,21 +659,26 @@ def dump_json_file(json_data, pwd_dir_path, dump_file_name):
|
||||
|
||||
dump_file_path = os.path.join(logs_dir_path, dump_file_name)
|
||||
|
||||
with io.open(dump_file_path, 'w', encoding='utf-8') as outfile:
|
||||
if is_py2:
|
||||
outfile.write(
|
||||
unicode(json.dumps(
|
||||
json_data,
|
||||
indent=4,
|
||||
separators=(',', ': '),
|
||||
ensure_ascii=False
|
||||
))
|
||||
)
|
||||
else:
|
||||
json.dump(json_data, outfile, indent=4, separators=(',', ': '))
|
||||
try:
|
||||
with io.open(dump_file_path, 'w', encoding='utf-8') as outfile:
|
||||
if is_py2:
|
||||
outfile.write(
|
||||
unicode(json.dumps(
|
||||
json_data,
|
||||
indent=4,
|
||||
separators=(',', ':'),
|
||||
ensure_ascii=False
|
||||
))
|
||||
)
|
||||
else:
|
||||
json.dump(json_data, outfile, indent=4, separators=(',', ':'))
|
||||
|
||||
msg = "dump file: {}".format(dump_file_path)
|
||||
logger.color_print(msg, "BLUE")
|
||||
msg = "dump file: {}".format(dump_file_path)
|
||||
logger.color_print(msg, "BLUE")
|
||||
|
||||
except TypeError:
|
||||
msg = "Failed to dump json file: {}".format(dump_file_path)
|
||||
logger.color_print(msg, "RED")
|
||||
|
||||
|
||||
def _prepare_dump_info(project_mapping, tag_name):
|
||||
|
||||
Reference in New Issue
Block a user