mirror of
https://github.com/httprunner/httprunner.git
synced 2026-05-11 18:11:21 +08:00
Merge pull request #1285 from billduan/master
pytest engine support new step types: - sql handler - thrift protocol
This commit is contained in:
@@ -1,12 +1,23 @@
|
||||
__version__ = "v4.0.0"
|
||||
__description__ = "One-stop solution for HTTP(S) testing."
|
||||
|
||||
|
||||
from httprunner.config import Config
|
||||
from httprunner.parser import parse_parameters as Parameters
|
||||
from httprunner.runner import HttpRunner
|
||||
from httprunner.step import Step
|
||||
from httprunner.step_request import RunRequest
|
||||
from httprunner.step_sql_request import (
|
||||
RunSqlRequest,
|
||||
StepSqlRequestExtraction,
|
||||
StepSqlRequestValidation,
|
||||
)
|
||||
from httprunner.step_testcase import RunTestCase
|
||||
from httprunner.step_thrift_request import (
|
||||
RunThriftRequest,
|
||||
StepThriftRequestExtraction,
|
||||
StepThriftRequestValidation,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__version__",
|
||||
@@ -15,6 +26,12 @@ __all__ = [
|
||||
"Config",
|
||||
"Step",
|
||||
"RunRequest",
|
||||
"RunSqlRequest",
|
||||
"StepSqlRequestValidation",
|
||||
"StepSqlRequestExtraction",
|
||||
"RunTestCase",
|
||||
"Parameters",
|
||||
"RunThriftRequest",
|
||||
"StepThriftRequestValidation",
|
||||
"StepThriftRequestExtraction",
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import inspect
|
||||
from typing import Text
|
||||
|
||||
from httprunner.models import TConfig, TConfigThrift
|
||||
from httprunner.models import TConfig, TConfigThrift, TConfigDB, ProtoType
|
||||
|
||||
|
||||
class ConfigThrift(object):
|
||||
@@ -21,8 +21,65 @@ class ConfigThrift(object):
|
||||
self.__config.thrift.cluster = cluster
|
||||
return self
|
||||
|
||||
def target(self, target: Text) -> "ConfigThrift":
|
||||
self.__config.thrift.target = target
|
||||
def service_name(self, service_name: Text) -> "ConfigThrift":
|
||||
self.__config.thrift.service_name = service_name
|
||||
return self
|
||||
|
||||
def method(self, method: Text) -> "ConfigThrift":
|
||||
self.__config.thrift.method = method
|
||||
return self
|
||||
|
||||
def ip(self, service_name_: Text) -> "ConfigThrift":
|
||||
self.__config.thrift.service_name = service_name_
|
||||
return self
|
||||
|
||||
def port(self, port: int) -> "ConfigThrift":
|
||||
self.__config.thrift.port = port
|
||||
return self
|
||||
|
||||
def timeout(self, timeout: int) -> "ConfigThrift":
|
||||
self.__config.thrift.timeout = timeout
|
||||
return self
|
||||
|
||||
def proto_type(self, proto_type: ProtoType) -> "ConfigThrift":
|
||||
self.__config.thrift.proto_type = proto_type
|
||||
return self
|
||||
|
||||
def trans_type(self, trans_type: ProtoType) -> "ConfigThrift":
|
||||
self.__config.thrift.trans_type = trans_type
|
||||
return self
|
||||
|
||||
def struct(self) -> TConfig:
|
||||
return self.__config
|
||||
|
||||
|
||||
class ConfigDB(object):
|
||||
def __init__(self, config: TConfig):
|
||||
self.__config = config
|
||||
self.__config.db = TConfigDB()
|
||||
|
||||
def psm(self, psm):
|
||||
self.__config.db.psm = psm
|
||||
return self
|
||||
|
||||
def user(self, user):
|
||||
self.__config.db.user = user
|
||||
return self
|
||||
|
||||
def password(self, password):
|
||||
self.__config.db.password = password
|
||||
return self
|
||||
|
||||
def ip(self, ip):
|
||||
self.__config.db.ip = ip
|
||||
return self
|
||||
|
||||
def port(self, port: int):
|
||||
self.__config.db.port = port
|
||||
return self
|
||||
|
||||
def database(self, database: Text):
|
||||
self.__config.db.database = database
|
||||
return self
|
||||
|
||||
def struct(self) -> TConfig:
|
||||
@@ -64,3 +121,6 @@ class Config(object):
|
||||
|
||||
def thrift(self) -> ConfigThrift:
|
||||
return ConfigThrift(self.__config)
|
||||
|
||||
def db(self) -> ConfigDB:
|
||||
return ConfigDB(self.__config)
|
||||
|
||||
78
httprunner/database/engine.py
Normal file
78
httprunner/database/engine.py
Normal file
@@ -0,0 +1,78 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import json
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
class DBEngine(object):
|
||||
def __init__(self, db_uri):
|
||||
"""
|
||||
db_uri = f'mysql+pymysql://{username}:{password}@{host}:{port}/{database}?charset=utf8mb4'
|
||||
|
||||
"""
|
||||
engine = create_engine(db_uri)
|
||||
self.session = sessionmaker(bind=engine)()
|
||||
|
||||
@staticmethod
|
||||
def value_decode(row: dict):
|
||||
"""
|
||||
Try to decode value of table
|
||||
datetime.datetime-->string
|
||||
datetime.date-->string
|
||||
json str-->dict
|
||||
:param row:
|
||||
:return:
|
||||
"""
|
||||
for k, v in row.items():
|
||||
if isinstance(v, datetime.datetime):
|
||||
row[k] = v.strftime("%Y-%m-%d %H:%M:%S")
|
||||
elif isinstance(v, datetime.date):
|
||||
row[k] = v.strftime("%Y-%m-%d")
|
||||
elif isinstance(v, str):
|
||||
try:
|
||||
row[k] = json.loads(v)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def _fetch(self, query, size=-1, commit=True):
|
||||
result = self.session.execute(query)
|
||||
self.session.commit() if commit else 0
|
||||
if query.upper()[:6] == "SELECT":
|
||||
if size < 0:
|
||||
al = result.fetchall()
|
||||
al = [dict(el) for el in al]
|
||||
return al or None
|
||||
elif size == 1:
|
||||
on = dict(result.fetchone())
|
||||
self.value_decode(on)
|
||||
return on or None
|
||||
else:
|
||||
mny = result.fetchmany(size)
|
||||
mny = [dict(el) for el in mny]
|
||||
return mny or None
|
||||
elif query.upper()[:6] in ("UPDATE", "DELETE", "INSERT"):
|
||||
return {"rowcount": result.rowcount}
|
||||
|
||||
def fetchone(self, query, commit=True):
|
||||
return self._fetch(query, size=1, commit=commit)
|
||||
|
||||
def fetchmany(self, query, size, commit=True):
|
||||
return self._fetch(query=query, size=size, commit=commit)
|
||||
|
||||
def fetchall(self, query, commit=True):
|
||||
return self._fetch(query=query, size=-1, commit=commit)
|
||||
|
||||
def insert(self, query, commit=True):
|
||||
return self._fetch(query=query, commit=commit)
|
||||
|
||||
def delete(self, query, commit=True):
|
||||
return self._fetch(query=query, commit=commit)
|
||||
|
||||
def update(self, query, commit=True):
|
||||
return self._fetch(query=query, commit=commit)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
db = DBEngine(f"mysql+pymysql://xxxxx:xxxxx@10.0.0.1:3306/dbname?charset=utf8mb4")
|
||||
@@ -86,3 +86,7 @@ class TestcaseNotFound(NotFoundError):
|
||||
|
||||
class SummaryEmpty(MyBaseError):
|
||||
"""test result summary data is empty"""
|
||||
|
||||
|
||||
class SqlMethodNotSupport(MyBaseError):
|
||||
pass
|
||||
|
||||
@@ -28,6 +28,20 @@ class MethodEnum(Text, Enum):
|
||||
PATCH = "PATCH"
|
||||
|
||||
|
||||
class ProtoType(Enum):
|
||||
Binary = 1
|
||||
CyBinary = 2
|
||||
Compact = 3
|
||||
Json = 4
|
||||
|
||||
|
||||
class TransType(Enum):
|
||||
Buffered = 1
|
||||
CyBuffered = 2
|
||||
Framed = 3
|
||||
CyFramed = 4
|
||||
|
||||
|
||||
# configs for thrift rpc
|
||||
class TConfigThrift(BaseModel):
|
||||
psm: Text = None
|
||||
@@ -36,6 +50,68 @@ class TConfigThrift(BaseModel):
|
||||
target: Text = None
|
||||
include_dirs: List[Text] = None
|
||||
thrift_client: Any = None
|
||||
timeout: int = 10
|
||||
idl_path: Text = None
|
||||
method: Text = None
|
||||
ip: Text = "127.0.0.1"
|
||||
port: int = 9000
|
||||
service_name: Text = None
|
||||
proto_type: ProtoType = ProtoType.Binary
|
||||
trans_type: TransType = TransType.Buffered
|
||||
|
||||
|
||||
# configs for db
|
||||
class TConfigDB(BaseModel):
|
||||
psm: Text = None
|
||||
user: Text = None
|
||||
password: Text = None
|
||||
ip: Text = None
|
||||
port: int = 3306
|
||||
database: Text = None
|
||||
|
||||
|
||||
class TransportEnum(Text, Enum):
|
||||
BUFFERED = "buffered"
|
||||
FRAMED = "framed"
|
||||
|
||||
|
||||
class TThriftRequest(BaseModel):
|
||||
"""rpc request model"""
|
||||
|
||||
method: Text = ""
|
||||
params: Dict = {}
|
||||
thrift_client: Any = None
|
||||
idl_path: Text = "" # idl local path
|
||||
timeout: int = 10 # sec
|
||||
transport: TransportEnum = TransportEnum.BUFFERED
|
||||
include_dirs: List[Union[Text, None]] = [] # param of thriftpy2.load
|
||||
target: Text = "" # tcp://{ip}:{port} or sd://psm?cluster=xx&env=xx
|
||||
env: Text = "prod"
|
||||
cluster: Text = "default"
|
||||
psm: Text = ""
|
||||
service_name: Text = None
|
||||
ip: Text = None
|
||||
port: int = None
|
||||
proto_type: ProtoType = None
|
||||
trans_type: TransType = None
|
||||
|
||||
|
||||
class SqlMethodEnum(Text, Enum):
|
||||
FETCHONE = "FETCHONE"
|
||||
FETCHMANY = "FETCHMANY"
|
||||
FETCHALL = "FETCHALL"
|
||||
INSERT = "INSERT"
|
||||
UPDATE = "UPDATE"
|
||||
DELETE = "DELETE"
|
||||
|
||||
|
||||
class TSqlRequest(BaseModel):
|
||||
"""sql request model"""
|
||||
|
||||
db_config: TConfigDB = TConfigDB()
|
||||
method: SqlMethodEnum = None
|
||||
sql: Text = None
|
||||
size: int = 0 # limit nums of sql result
|
||||
|
||||
|
||||
class TConfig(BaseModel):
|
||||
@@ -51,6 +127,7 @@ class TConfig(BaseModel):
|
||||
path: Text = None
|
||||
# configs for other protocols
|
||||
thrift: TConfigThrift = None
|
||||
db: TConfigDB = TConfigDB()
|
||||
|
||||
|
||||
class TRequest(BaseModel):
|
||||
@@ -84,6 +161,8 @@ class TStep(BaseModel):
|
||||
validate_script: List[Text] = []
|
||||
retry_times: int = 0
|
||||
retry_interval: int = 0 # sec
|
||||
thrift_request: Union[TThriftRequest, None] = None
|
||||
sql_request: Union[TSqlRequest, None] = None
|
||||
|
||||
|
||||
class TestCase(BaseModel):
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from typing import Any, Dict, Text
|
||||
from typing import Dict, Text, Any
|
||||
|
||||
import jmespath
|
||||
import requests
|
||||
from jmespath.exceptions import JMESPathError
|
||||
from loguru import logger
|
||||
|
||||
from httprunner import exceptions
|
||||
from httprunner.exceptions import ParamsError, ValidationFailure
|
||||
from httprunner.models import Validators, VariablesMapping
|
||||
from httprunner.parser import Parser, parse_string_value
|
||||
from httprunner.exceptions import ValidationFailure, ParamsError
|
||||
from httprunner.models import VariablesMapping, Validators
|
||||
from httprunner.parser import parse_string_value, Parser
|
||||
|
||||
|
||||
def get_uniform_comparator(comparator: Text):
|
||||
@@ -113,9 +112,9 @@ def uniform_validator(validator):
|
||||
}
|
||||
|
||||
|
||||
class ResponseObject(object):
|
||||
def __init__(self, resp_obj: requests.Response, parser: Parser):
|
||||
"""initialize with a requests.Response object
|
||||
class ResponseObjectBase(object):
|
||||
def __init__(self, resp_obj, parser: Parser):
|
||||
"""initialize with a response object
|
||||
|
||||
Args:
|
||||
resp_obj (instance): requests.Response instance
|
||||
@@ -125,48 +124,6 @@ class ResponseObject(object):
|
||||
self.parser = parser
|
||||
self.validation_results: Dict = {}
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key in ["json", "content", "body"]:
|
||||
try:
|
||||
value = self.resp_obj.json()
|
||||
except ValueError:
|
||||
value = self.resp_obj.content
|
||||
elif key == "cookies":
|
||||
value = self.resp_obj.cookies.get_dict()
|
||||
else:
|
||||
try:
|
||||
value = getattr(self.resp_obj, key)
|
||||
except AttributeError:
|
||||
err_msg = "ResponseObject does not have attribute: {}".format(key)
|
||||
logger.error(err_msg)
|
||||
raise exceptions.ParamsError(err_msg)
|
||||
|
||||
self.__dict__[key] = value
|
||||
return value
|
||||
|
||||
def _search_jmespath(self, expr: Text) -> Any:
|
||||
resp_obj_meta = {
|
||||
"status_code": self.status_code,
|
||||
"headers": self.headers,
|
||||
"cookies": self.cookies,
|
||||
"body": self.body,
|
||||
}
|
||||
if not expr.startswith(tuple(resp_obj_meta.keys())):
|
||||
return expr
|
||||
|
||||
try:
|
||||
check_value = jmespath.search(expr, resp_obj_meta)
|
||||
except JMESPathError as ex:
|
||||
logger.error(
|
||||
f"failed to search with jmespath\n"
|
||||
f"expression: {expr}\n"
|
||||
f"data: {resp_obj_meta}\n"
|
||||
f"exception: {ex}"
|
||||
)
|
||||
raise
|
||||
|
||||
return check_value
|
||||
|
||||
def extract(
|
||||
self,
|
||||
extractors: Dict[Text, Text],
|
||||
@@ -186,6 +143,19 @@ class ResponseObject(object):
|
||||
logger.info(f"extract mapping: {extract_mapping}")
|
||||
return extract_mapping
|
||||
|
||||
def _search_jmespath(self, expr: Text) -> Any:
|
||||
try:
|
||||
check_value = jmespath.search(expr, self.resp_obj)
|
||||
except JMESPathError as ex:
|
||||
logger.error(
|
||||
f"failed to search with jmespath\n"
|
||||
f"expression: {expr}\n"
|
||||
f"data: {self.resp_obj}\n"
|
||||
f"exception: {ex}"
|
||||
)
|
||||
raise
|
||||
return check_value
|
||||
|
||||
def validate(
|
||||
self,
|
||||
validators: Validators,
|
||||
@@ -274,3 +244,55 @@ class ResponseObject(object):
|
||||
if not validate_pass:
|
||||
failures_string = "\n".join([failure for failure in failures])
|
||||
raise ValidationFailure(failures_string)
|
||||
|
||||
|
||||
class ResponseObject(ResponseObjectBase):
|
||||
def __getattr__(self, key):
|
||||
if key in ["json", "content", "body"]:
|
||||
try:
|
||||
value = self.resp_obj.json()
|
||||
except ValueError:
|
||||
value = self.resp_obj.content
|
||||
elif key == "cookies":
|
||||
value = self.resp_obj.cookies.get_dict()
|
||||
else:
|
||||
try:
|
||||
value = getattr(self.resp_obj, key)
|
||||
except AttributeError:
|
||||
err_msg = "ResponseObject does not have attribute: {}".format(key)
|
||||
logger.error(err_msg)
|
||||
raise exceptions.ParamsError(err_msg)
|
||||
|
||||
self.__dict__[key] = value
|
||||
return value
|
||||
|
||||
def _search_jmespath(self, expr: Text) -> Any:
|
||||
resp_obj_meta = {
|
||||
"status_code": self.status_code,
|
||||
"headers": self.headers,
|
||||
"cookies": self.cookies,
|
||||
"body": self.body,
|
||||
}
|
||||
if not expr.startswith(tuple(resp_obj_meta.keys())):
|
||||
return expr
|
||||
|
||||
try:
|
||||
check_value = jmespath.search(expr, resp_obj_meta)
|
||||
except JMESPathError as ex:
|
||||
logger.error(
|
||||
f"failed to search with jmespath\n"
|
||||
f"expression: {expr}\n"
|
||||
f"data: {resp_obj_meta}\n"
|
||||
f"exception: {ex}"
|
||||
)
|
||||
raise
|
||||
|
||||
return check_value
|
||||
|
||||
|
||||
class ThriftResponseObject(ResponseObjectBase):
|
||||
pass
|
||||
|
||||
|
||||
class SqlResponseObject(ResponseObjectBase):
|
||||
pass
|
||||
|
||||
@@ -38,6 +38,8 @@ class SessionRunner(object):
|
||||
session: HttpSession = None
|
||||
case_id: Text = ""
|
||||
root_dir: Text = ""
|
||||
thrift_client = None
|
||||
db_engine = None
|
||||
|
||||
__config: TConfig
|
||||
__project_meta: ProjectMeta = None
|
||||
@@ -87,6 +89,14 @@ class SessionRunner(object):
|
||||
self.__export = export
|
||||
return self
|
||||
|
||||
def with_thrift_client(self, thrift_client) -> "SessionRunner":
|
||||
self.thrift_client = thrift_client
|
||||
return self
|
||||
|
||||
def with_db_engine(self, db_engine) -> "SessionRunner":
|
||||
self.db_engine = db_engine
|
||||
return self
|
||||
|
||||
def __parse_config(self, param: Dict = None) -> None:
|
||||
# parse config variables
|
||||
self.__config.variables.update(self.__session_variables)
|
||||
|
||||
@@ -1,13 +1,23 @@
|
||||
from typing import Union
|
||||
|
||||
from httprunner import HttpRunner
|
||||
from httprunner.models import StepResult, TRequest, TStep, TestCase
|
||||
from httprunner.runner import HttpRunner
|
||||
from httprunner.step_request import (
|
||||
RequestWithOptionalArgs,
|
||||
StepRequestExtraction,
|
||||
StepRequestValidation,
|
||||
)
|
||||
from httprunner.step_sql_request import (
|
||||
RunSqlRequest,
|
||||
StepSqlRequestExtraction,
|
||||
StepSqlRequestValidation,
|
||||
)
|
||||
from httprunner.step_testcase import StepRefCase
|
||||
from httprunner.step_thrift_request import (
|
||||
RunThriftRequest,
|
||||
StepThriftRequestExtraction,
|
||||
StepThriftRequestValidation,
|
||||
)
|
||||
|
||||
|
||||
class Step(object):
|
||||
@@ -18,6 +28,12 @@ class Step(object):
|
||||
StepRequestExtraction,
|
||||
RequestWithOptionalArgs,
|
||||
StepRefCase,
|
||||
RunSqlRequest,
|
||||
StepSqlRequestValidation,
|
||||
StepSqlRequestExtraction,
|
||||
RunThriftRequest,
|
||||
StepThriftRequestValidation,
|
||||
StepThriftRequestExtraction,
|
||||
],
|
||||
):
|
||||
self.__step = step
|
||||
@@ -48,4 +64,4 @@ class Step(object):
|
||||
return self.__step.type()
|
||||
|
||||
def run(self, runner: HttpRunner) -> StepResult:
|
||||
return self.__step.run(runner)
|
||||
return self.__step.run(runner)
|
||||
286
httprunner/step_sql_request.py
Normal file
286
httprunner/step_sql_request.py
Normal file
@@ -0,0 +1,286 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import time
|
||||
from typing import Text
|
||||
from loguru import logger
|
||||
|
||||
from httprunner import utils
|
||||
from httprunner.exceptions import SqlMethodNotSupport
|
||||
from httprunner.exceptions import ValidationFailure
|
||||
from httprunner.models import IStep, StepResult, TStep
|
||||
from httprunner.models import SqlMethodEnum, TSqlRequest
|
||||
from httprunner.response import SqlResponseObject
|
||||
from httprunner.runner import HttpRunner
|
||||
from httprunner.step_request import (StepRequestExtraction, StepRequestValidation, call_hooks)
|
||||
|
||||
try:
|
||||
import sqlalchemy
|
||||
import pymysql
|
||||
|
||||
SQL_READY = True
|
||||
except ModuleNotFoundError:
|
||||
SQL_READY = False
|
||||
|
||||
|
||||
def ensure_sql_ready():
|
||||
if SQL_READY:
|
||||
return
|
||||
|
||||
msg = """
|
||||
uploader extension dependencies uninstalled, install first and try again.
|
||||
install with pip:
|
||||
$ pip install sqlalchemy pymysql
|
||||
|
||||
or you can install httprunner with optional upload dependencies:
|
||||
$ pip install "httprunner[sql]"
|
||||
"""
|
||||
logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def run_step_sql_request(runner: HttpRunner, step: TStep) -> StepResult:
|
||||
"""run teststep:sql request"""
|
||||
start_time = time.time()
|
||||
|
||||
step_result = StepResult(
|
||||
name=step.name,
|
||||
success=False,
|
||||
)
|
||||
step.variables = runner.merge_step_variables(step.variables)
|
||||
# parse
|
||||
request_dict = step.sql_request.dict()
|
||||
parsed_request_dict = runner.parser.parse_data(request_dict, step.variables)
|
||||
config = runner.get_config()
|
||||
parsed_request_dict["db_config"]["psm"] = (
|
||||
parsed_request_dict["db_config"]["psm"] or config.db.psm
|
||||
)
|
||||
parsed_request_dict["db_config"]["user"] = (
|
||||
parsed_request_dict["db_config"]["user"] or config.db.user
|
||||
)
|
||||
parsed_request_dict["db_config"]["password"] = (
|
||||
parsed_request_dict["db_config"]["password"] or config.db.password
|
||||
)
|
||||
parsed_request_dict["db_config"]["ip"] = (
|
||||
parsed_request_dict["db_config"]["ip"] or config.db.ip
|
||||
)
|
||||
parsed_request_dict["db_config"]["port"] = (
|
||||
parsed_request_dict["db_config"]["port"] or config.db.port
|
||||
)
|
||||
parsed_request_dict["db_config"]["database"] = (
|
||||
parsed_request_dict["db_config"]["database"] or config.db.database
|
||||
)
|
||||
|
||||
if not runner.db_engine:
|
||||
ensure_sql_ready()
|
||||
from httprunner.database.engine import DBEngine
|
||||
runner.db_engine = DBEngine(
|
||||
f'mysql+pymysql://{parsed_request_dict["db_config"]["user"]}:'
|
||||
f'{parsed_request_dict["db_config"]["password"]}@{parsed_request_dict["db_config"]["ip"]}:'
|
||||
f'{parsed_request_dict["db_config"]["port"]}/{parsed_request_dict["db_config"]["database"]}'
|
||||
f"?charset=utf8mb4"
|
||||
)
|
||||
|
||||
# parsed_request_dict["headers"].setdefault(
|
||||
# "HRUN-Request-ID",
|
||||
# f"HRUN-{self.__case_id}-{str(int(time.time() * 1000))[-6:]}",
|
||||
# )
|
||||
|
||||
# setup hooks
|
||||
if step.setup_hooks:
|
||||
call_hooks(runner, step.setup_hooks, step.variables, "setup request")
|
||||
|
||||
logger.info(f"Executing SQL: {parsed_request_dict['sql']}")
|
||||
if step.sql_request.method == SqlMethodEnum.FETCHONE:
|
||||
sql_resp = runner.db_engine.fetchone(parsed_request_dict["sql"])
|
||||
elif step.sql_request.method == SqlMethodEnum.INSERT:
|
||||
sql_resp = runner.db_engine.insert(parsed_request_dict["sql"])
|
||||
elif step.sql_request.method == SqlMethodEnum.FETCHMANY:
|
||||
sql_resp = runner.db_engine.fetchmany(
|
||||
parsed_request_dict["sql"], parsed_request_dict["size"]
|
||||
)
|
||||
elif step.sql_request.method == SqlMethodEnum.FETCHALL:
|
||||
sql_resp = runner.db_engine.fetchall(parsed_request_dict["sql"])
|
||||
elif step.sql_request.method == SqlMethodEnum.UPDATE:
|
||||
sql_resp = runner.db_engine.update(parsed_request_dict["sql"])
|
||||
elif step.sql_request.method == SqlMethodEnum.DELETE:
|
||||
sql_resp = runner.db_engine.delete(parsed_request_dict["sql"])
|
||||
else:
|
||||
raise SqlMethodNotSupport(
|
||||
f"step.sql_request.method {parsed_request_dict['method']} not support"
|
||||
)
|
||||
resp_obj = SqlResponseObject(sql_resp, parser=runner.parser)
|
||||
step.variables["sql_response"] = resp_obj
|
||||
|
||||
# teardown hooks
|
||||
if step.teardown_hooks:
|
||||
call_hooks(runner, step.teardown_hooks, step.variables, "teardown request")
|
||||
|
||||
def log_sql_req_resp_details():
|
||||
err_msg = "\n{} SQL DETAILED REQUEST & RESPONSE {}\n".format("*" * 32, "*" * 32)
|
||||
|
||||
# log request
|
||||
err_msg += "====== sql request details ======\n"
|
||||
err_msg += f"sql: {step.sql_request.sql}\n"
|
||||
for k, v in parsed_request_dict.items():
|
||||
v = utils.omit_long_data(v)
|
||||
err_msg += f"{k}: {repr(v)}\n"
|
||||
|
||||
err_msg += "\n"
|
||||
|
||||
# log response
|
||||
err_msg += "====== sql response details ======\n"
|
||||
for k, v in sql_resp.items():
|
||||
v = utils.omit_long_data(v)
|
||||
err_msg += f"{k}: {repr(v)}\n"
|
||||
logger.error(err_msg)
|
||||
|
||||
# extract
|
||||
extractors = step.extract
|
||||
extract_mapping = resp_obj.extract(extractors)
|
||||
step_result.export_vars = extract_mapping
|
||||
|
||||
variables_mapping = step.variables
|
||||
variables_mapping.update(extract_mapping)
|
||||
|
||||
# validate
|
||||
validators = step.validators
|
||||
try:
|
||||
resp_obj.validate(validators, variables_mapping)
|
||||
step_result.success = True
|
||||
except ValidationFailure:
|
||||
log_sql_req_resp_details()
|
||||
raise
|
||||
finally:
|
||||
session_data = runner.session.data
|
||||
session_data.success = step_result.success
|
||||
session_data.validators = resp_obj.validation_results
|
||||
# save step data
|
||||
step_result.data = session_data
|
||||
step_result.elapsed = time.time() - start_time
|
||||
return step_result
|
||||
|
||||
|
||||
class StepSqlRequestValidation(StepRequestValidation):
|
||||
def __init__(self, step: TStep):
|
||||
self.__step = step
|
||||
super().__init__(step)
|
||||
|
||||
def run(self, runner: HttpRunner):
|
||||
return run_step_sql_request(runner, self.__step)
|
||||
|
||||
|
||||
class StepSqlRequestExtraction(StepRequestExtraction):
|
||||
def __init__(self, step: TStep):
|
||||
self.__step = step
|
||||
super().__init__(step)
|
||||
|
||||
def run(self, runner: HttpRunner):
|
||||
return run_step_sql_request(runner, self.__step)
|
||||
|
||||
def validate(self) -> StepSqlRequestValidation:
|
||||
return StepSqlRequestValidation(self.__step)
|
||||
|
||||
|
||||
class RunSqlRequest(IStep):
|
||||
def __init__(self, name: Text):
|
||||
self.__step = TStep(name=name)
|
||||
self.__step.sql_request = TSqlRequest()
|
||||
|
||||
def with_variables(self, **variables) -> "RunSqlRequest":
|
||||
self.__step.variables.update(variables)
|
||||
return self
|
||||
|
||||
def with_db_config(
|
||||
self, user=None, password=None, ip=None, port=None, database=None, psm=None
|
||||
):
|
||||
if user:
|
||||
self.__step.sql_request.db_config.user = user
|
||||
if password:
|
||||
self.__step.sql_request.db_config.password = password
|
||||
if ip:
|
||||
self.__step.sql_request.db_config.ip = ip
|
||||
if port:
|
||||
self.__step.sql_request.db_config.port = port
|
||||
if database:
|
||||
self.__step.sql_request.db_config.database = database
|
||||
if psm:
|
||||
self.__step.sql_request.db_config.psm = psm
|
||||
return self
|
||||
|
||||
def fetchone(self, sql) -> "RunSqlRequest":
|
||||
self.__step.sql_request.method = SqlMethodEnum.FETCHONE
|
||||
self.__step.sql_request.sql = sql
|
||||
return self
|
||||
|
||||
def fetchmany(self, sql, size) -> "RunSqlRequest":
|
||||
self.__step.sql_request.method = SqlMethodEnum.FETCHMANY
|
||||
self.__step.sql_request.sql = sql
|
||||
self.__step.sql_request.size = size
|
||||
return self
|
||||
|
||||
def fetchall(self, sql) -> "RunSqlRequest":
|
||||
self.__step.sql_request.method = SqlMethodEnum.FETCHALL
|
||||
self.__step.sql_request.sql = sql
|
||||
return self
|
||||
|
||||
def update(self, sql) -> "RunSqlRequest":
|
||||
self.__step.sql_request.method = SqlMethodEnum.UPDATE
|
||||
self.__step.sql_request.sql = sql
|
||||
return self
|
||||
|
||||
def delete(self, sql) -> "RunSqlRequest":
|
||||
self.__step.sql_request.method = SqlMethodEnum.DELETE
|
||||
self.__step.sql_request.sql = sql
|
||||
return self
|
||||
|
||||
def insert(self, sql) -> "RunSqlRequest":
|
||||
self.__step.sql_request.method = SqlMethodEnum.INSERT
|
||||
self.__step.sql_request.sql = sql
|
||||
return self
|
||||
|
||||
def with_retry(self, retry_times, retry_interval) -> "RunSqlRequest":
|
||||
self.__step.retry_times = retry_times
|
||||
self.__step.retry_interval = retry_interval
|
||||
return self
|
||||
|
||||
def teardown_hook(
|
||||
self, hook: Text, assign_var_name: Text = None
|
||||
) -> "RunSqlRequest":
|
||||
if assign_var_name:
|
||||
self.__step.teardown_hooks.append({assign_var_name: hook})
|
||||
else:
|
||||
self.__step.teardown_hooks.append(hook)
|
||||
|
||||
return self
|
||||
|
||||
def setup_hook(self, hook: Text, assign_var_name: Text = None) -> "RunSqlRequest":
|
||||
if assign_var_name:
|
||||
self.__step.setup_hooks.append({assign_var_name: hook})
|
||||
else:
|
||||
self.__step.setup_hooks.append(hook)
|
||||
|
||||
return self
|
||||
|
||||
def struct(self) -> TStep:
|
||||
return self.__step
|
||||
|
||||
def name(self) -> Text:
|
||||
return self.__step.name
|
||||
|
||||
def type(self) -> Text:
|
||||
return f"sql-request-{self.__step.sql_request.sql}"
|
||||
|
||||
def run(self, runner) -> StepResult:
|
||||
return run_step_sql_request(runner, self.__step)
|
||||
|
||||
def extract(self) -> StepSqlRequestExtraction:
|
||||
return StepSqlRequestExtraction(self.__step)
|
||||
|
||||
def validate(self) -> StepSqlRequestValidation:
|
||||
return StepSqlRequestValidation(self.__step)
|
||||
|
||||
def with_jmespath(
|
||||
self, jmes_path: Text, var_name: Text
|
||||
) -> "StepSqlRequestExtraction":
|
||||
self.__step.extract[var_name] = jmes_path
|
||||
return StepSqlRequestExtraction(self.__step)
|
||||
293
httprunner/step_thrift_request.py
Normal file
293
httprunner/step_thrift_request.py
Normal file
@@ -0,0 +1,293 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
from typing import Text, Union
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from httprunner import utils
|
||||
from httprunner.exceptions import ValidationFailure
|
||||
from httprunner.models import (
|
||||
IStep,
|
||||
ProtoType,
|
||||
StepResult,
|
||||
TStep,
|
||||
TThriftRequest,
|
||||
TransType,
|
||||
)
|
||||
from httprunner.response import ThriftResponseObject
|
||||
from httprunner.runner import HttpRunner
|
||||
from httprunner.step_request import (
|
||||
StepRequestExtraction,
|
||||
StepRequestValidation,
|
||||
call_hooks,
|
||||
)
|
||||
|
||||
try:
|
||||
import thriftpy2
|
||||
from thrift.Thrift import TType
|
||||
|
||||
THRIFT_READY = True
|
||||
except ModuleNotFoundError:
|
||||
THRIFT_READY = False
|
||||
|
||||
|
||||
def ensure_thrift_ready():
|
||||
assert platform.system() != "Windows", "Sorry,thrift not support Windows for now"
|
||||
if THRIFT_READY:
|
||||
return
|
||||
|
||||
msg = """
|
||||
uploader extension dependencies uninstalled, install first and try again.
|
||||
install with pip:
|
||||
$ pip install cython thriftpy2 thrift
|
||||
|
||||
or you can install httprunner with optional upload dependencies:
|
||||
$ pip install "httprunner[thrift]"
|
||||
"""
|
||||
logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def run_step_thrift_request(runner: HttpRunner, step: TStep) -> StepResult:
|
||||
"""run teststep:thrift request"""
|
||||
start_time = time.time()
|
||||
|
||||
step_result = StepResult(
|
||||
name=step.name,
|
||||
success=False,
|
||||
)
|
||||
step.variables = runner.merge_step_variables(step.variables)
|
||||
# parse
|
||||
request_dict = step.thrift_request.dict()
|
||||
parsed_request_dict = runner.parser.parse_data(request_dict, step.variables)
|
||||
config = runner.get_config()
|
||||
parsed_request_dict["psm"] = parsed_request_dict["psm"] or config.thrift.psm
|
||||
parsed_request_dict["env"] = parsed_request_dict["env"] or config.thrift.env
|
||||
parsed_request_dict["cluster"] = (
|
||||
parsed_request_dict["cluster"] or config.thrift.cluster
|
||||
)
|
||||
parsed_request_dict["idl_path"] = (
|
||||
parsed_request_dict["idl_path"] or config.thrift.idl_path
|
||||
)
|
||||
parsed_request_dict["include_dirs"] = (
|
||||
parsed_request_dict["include_dirs"] or config.thrift.include_dirs
|
||||
)
|
||||
parsed_request_dict["method"] = (
|
||||
parsed_request_dict["method"] or config.thrift.method
|
||||
)
|
||||
parsed_request_dict["service_name"] = (
|
||||
parsed_request_dict["service_name"] or config.thrift.service_name
|
||||
)
|
||||
parsed_request_dict["ip"] = parsed_request_dict["ip"] or config.thrift.ip
|
||||
parsed_request_dict["port"] = parsed_request_dict["port"] or config.thrift.port
|
||||
parsed_request_dict["proto_type"] = (
|
||||
parsed_request_dict["proto_type"] or config.thrift.proto_type
|
||||
)
|
||||
parsed_request_dict["trans_port"] = (
|
||||
parsed_request_dict["trans_type"] or config.thrift.trans_type
|
||||
)
|
||||
parsed_request_dict["timeout"] = (
|
||||
parsed_request_dict["timeout"] or config.thrift.timeout
|
||||
)
|
||||
parsed_request_dict["thrift_client"] = parsed_request_dict["thrift_client"]
|
||||
|
||||
# parsed_request_dict["headers"].setdefault(
|
||||
# "HRUN-Request-ID",
|
||||
# f"HRUN-{self.__case_id}-{str(int(time.time() * 1000))[-6:]}",
|
||||
# )
|
||||
step.variables["thrift_request"] = parsed_request_dict
|
||||
|
||||
psm = parsed_request_dict["psm"]
|
||||
if not runner.thrift_client:
|
||||
runner.thrift_client = parsed_request_dict["thrift_client"]
|
||||
if not runner.thrift_client:
|
||||
ensure_thrift_ready()
|
||||
from httprunner.thrift.thrift_client import ThriftClient
|
||||
runner.thrift_client = ThriftClient(
|
||||
thrift_file=parsed_request_dict["idl_path"],
|
||||
service_name=parsed_request_dict["service_name"],
|
||||
ip=parsed_request_dict["ip"],
|
||||
port=parsed_request_dict["port"],
|
||||
include_dirs=parsed_request_dict["include_dirs"],
|
||||
timeout=parsed_request_dict["timeout"],
|
||||
proto_type=parsed_request_dict["proto_type"],
|
||||
trans_type=parsed_request_dict["trans_port"],
|
||||
)
|
||||
|
||||
# setup hooks
|
||||
if step.setup_hooks:
|
||||
call_hooks(runner, step.setup_hooks, step.variables, "setup request")
|
||||
|
||||
# thrift request
|
||||
resp = runner.thrift_client.send_request(
|
||||
parsed_request_dict["params"], parsed_request_dict["method"]
|
||||
)
|
||||
resp_obj = ThriftResponseObject(resp, parser=runner.parser)
|
||||
step.variables["thrift_response"] = resp_obj
|
||||
|
||||
# teardown hooks
|
||||
if step.teardown_hooks:
|
||||
call_hooks(runner, step.teardown_hooks, step.variables, "teardown request")
|
||||
|
||||
def log_thrift_req_resp_details():
|
||||
err_msg = "\n{} THRIFT DETAILED REQUEST & RESPONSE {}\n".format(
|
||||
"*" * 32, "*" * 32
|
||||
)
|
||||
|
||||
# log request
|
||||
err_msg += "====== thrift request details ======\n"
|
||||
err_msg += f"psm: {psm}\n"
|
||||
for k, v in parsed_request_dict.items():
|
||||
v = utils.omit_long_data(v)
|
||||
err_msg += f"{k}: {repr(v)}\n"
|
||||
|
||||
err_msg += "\n"
|
||||
|
||||
# log response
|
||||
err_msg += "====== thrift response details ======\n"
|
||||
for k, v in resp.items():
|
||||
v = utils.omit_long_data(v)
|
||||
err_msg += f"{k}: {repr(v)}\n"
|
||||
logger.error(err_msg)
|
||||
|
||||
# extract
|
||||
extractors = step.extract
|
||||
extract_mapping = resp_obj.extract(extractors)
|
||||
step_result.export_vars = extract_mapping
|
||||
|
||||
variables_mapping = step.variables
|
||||
variables_mapping.update(extract_mapping)
|
||||
|
||||
# validate
|
||||
validators = step.validators
|
||||
try:
|
||||
resp_obj.validate(validators, variables_mapping)
|
||||
step_result.success = True
|
||||
except ValidationFailure:
|
||||
log_thrift_req_resp_details()
|
||||
raise
|
||||
finally:
|
||||
session_data = runner.session.data
|
||||
session_data.success = step_result.success
|
||||
session_data.validators = resp_obj.validation_results
|
||||
# save step data
|
||||
step_result.data = session_data
|
||||
step_result.elapsed = time.time() - start_time
|
||||
return step_result
|
||||
|
||||
|
||||
class StepThriftRequestValidation(StepRequestValidation):
|
||||
def __init__(self, step: TStep):
|
||||
self.__step = step
|
||||
super().__init__(step)
|
||||
|
||||
def run(self, runner: HttpRunner):
|
||||
return run_step_thrift_request(runner, self.__step)
|
||||
|
||||
|
||||
class StepThriftRequestExtraction(StepRequestExtraction):
|
||||
def __init__(self, step: TStep):
|
||||
self.__step = step
|
||||
super().__init__(step)
|
||||
|
||||
def run(self, runner: HttpRunner):
|
||||
return run_step_thrift_request(runner, self.__step)
|
||||
|
||||
def validate(self) -> StepThriftRequestValidation:
|
||||
return StepThriftRequestValidation(self.__step)
|
||||
|
||||
|
||||
class RunThriftRequest(IStep):
|
||||
def __init__(self, name: Text):
|
||||
self.__step = TStep(name=name)
|
||||
self.__step.thrift_request = TThriftRequest()
|
||||
|
||||
def with_variables(self, **variables) -> "RunThriftRequest":
|
||||
self.__step.variables.update(variables)
|
||||
return self
|
||||
|
||||
def with_retry(self, retry_times, retry_interval) -> "RunThriftRequest":
|
||||
self.__step.retry_times = retry_times
|
||||
self.__step.retry_interval = retry_interval
|
||||
return self
|
||||
|
||||
def teardown_hook(
|
||||
self, hook: Text, assign_var_name: Text = None
|
||||
) -> "RunThriftRequest":
|
||||
if assign_var_name:
|
||||
self.__step.teardown_hooks.append({assign_var_name: hook})
|
||||
else:
|
||||
self.__step.teardown_hooks.append(hook)
|
||||
|
||||
return self
|
||||
|
||||
def setup_hook(
|
||||
self, hook: Text, assign_var_name: Text = None
|
||||
) -> "RunThriftRequest":
|
||||
if assign_var_name:
|
||||
self.__step.setup_hooks.append({assign_var_name: hook})
|
||||
else:
|
||||
self.__step.setup_hooks.append(hook)
|
||||
|
||||
return self
|
||||
|
||||
def with_params(self, **params) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.params.update(params)
|
||||
return self
|
||||
|
||||
def with_method(self, method) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.method = method
|
||||
return self
|
||||
|
||||
def with_idl_path(self, idl_path, idl_root_path) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.idl_path = idl_path
|
||||
self.__step.thrift_request.include_dirs = [idl_root_path]
|
||||
return self
|
||||
|
||||
def with_thrift_client(
|
||||
self, thrift_client: Union["ThriftClient", str]
|
||||
) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.thrift_client = thrift_client
|
||||
return self
|
||||
|
||||
def with_ip(self, ip: str) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.ip = ip
|
||||
return self
|
||||
|
||||
def with_port(self, port: int) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.port = port
|
||||
return self
|
||||
|
||||
def with_proto_type(self, proto_type: ProtoType) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.proto_type = proto_type
|
||||
return self
|
||||
|
||||
def with_trans_type(self, trans_type: TransType) -> "RunThriftRequest":
|
||||
self.__step.thrift_request.proto_type = trans_type
|
||||
return self
|
||||
|
||||
def struct(self) -> TStep:
|
||||
return self.__step
|
||||
|
||||
def name(self) -> Text:
|
||||
return self.__step.name
|
||||
|
||||
def type(self) -> Text:
|
||||
return f"thrift-request-{self.__step.thrift_request.psm}-{self.__step.thrift_request.method}"
|
||||
|
||||
def run(self, runner) -> StepResult:
|
||||
return run_step_thrift_request(runner, self.__step)
|
||||
|
||||
def extract(self) -> StepThriftRequestExtraction:
|
||||
return StepThriftRequestExtraction(self.__step)
|
||||
|
||||
def validate(self) -> StepThriftRequestValidation:
|
||||
return StepThriftRequestValidation(self.__step)
|
||||
|
||||
def with_jmespath(
|
||||
self, jmes_path: Text, var_name: Text
|
||||
) -> "StepThriftRequestExtraction":
|
||||
self.__step.extract[var_name] = jmes_path
|
||||
return StepThriftRequestExtraction(self.__step)
|
||||
471
httprunner/thrift/data_convertor.py
Normal file
471
httprunner/thrift/data_convertor.py
Normal file
@@ -0,0 +1,471 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import json
|
||||
import traceback
|
||||
import re
|
||||
import logging
|
||||
import base64
|
||||
|
||||
from thrift.Thrift import TType
|
||||
|
||||
try:
|
||||
from _json import encode_basestring_ascii as c_encode_basestring_ascii
|
||||
except ImportError:
|
||||
c_encode_basestring_ascii = None
|
||||
|
||||
text_characters = "".join(map(chr, range(32, 127))) + "\n\r\t\b"
|
||||
_null_trans = str.maketrans("", "")
|
||||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
||||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
||||
HAS_UTF8 = re.compile(r"[\x80-\xff]")
|
||||
ESCAPE_DCT = {
|
||||
"\\": "\\\\",
|
||||
'"': '\\"',
|
||||
"\b": "\\b",
|
||||
"\f": "\\f",
|
||||
"\n": "\\n",
|
||||
"\r": "\\r",
|
||||
"\t": "\\t",
|
||||
}
|
||||
for i in range(0x20):
|
||||
ESCAPE_DCT.setdefault(chr(i), "\\u{0:04x}".format(i))
|
||||
# ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
||||
|
||||
INFINITY = float("inf")
|
||||
FLOAT_REPR = repr
|
||||
|
||||
|
||||
def istext(s_input):
|
||||
"""
|
||||
既然我们要判断这串内容是不是可以做为Json的value,那为什么不放下试试呢?
|
||||
:param s_input:
|
||||
:return:
|
||||
"""
|
||||
return not isinstance(s_input, bytes)
|
||||
|
||||
|
||||
def unicode_2_utf8_keep_native(para):
|
||||
# if type(para) is str:
|
||||
# return ''.join(filter(lambda x: not str.isalpha(x), para))
|
||||
if type(para) is str:
|
||||
return para
|
||||
|
||||
if type(para) is list:
|
||||
for i in range(len(para)):
|
||||
para[i] = unicode_2_utf8_keep_native(para[i])
|
||||
return para
|
||||
elif type(para) is dict:
|
||||
newpara = {}
|
||||
for (key, value) in para.items():
|
||||
key = unicode_2_utf8_keep_native(key)
|
||||
value = unicode_2_utf8_keep_native(value)
|
||||
newpara[key] = value
|
||||
return newpara
|
||||
elif type(para) is tuple:
|
||||
return tuple(unicode_2_utf8_keep_native(list(para)))
|
||||
elif type(para) is str:
|
||||
return para.encode("utf-8")
|
||||
else:
|
||||
logging.debug("type========", type(para))
|
||||
# if issubclass(type(para), dict):
|
||||
if isinstance(para, dict):
|
||||
logging.debug("type ************in dict: %s" % (type(para)))
|
||||
return unicode_2_utf8_keep_native(dict(para))
|
||||
else:
|
||||
return para
|
||||
|
||||
|
||||
def encode_basestring(s):
|
||||
"""Return a JSON representation of a Python string"""
|
||||
|
||||
def replace(match):
|
||||
return ESCAPE_DCT[match.group(0)]
|
||||
|
||||
return '"' + ESCAPE.sub(replace, s) + '"'
|
||||
|
||||
|
||||
def py_encode_basestring_ascii(s):
|
||||
"""Return an ASCII-only JSON representation of a Python string"""
|
||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
||||
s = s.decode("utf-8")
|
||||
|
||||
def replace(match):
|
||||
s = match.group(0)
|
||||
try:
|
||||
return ESCAPE_DCT[s]
|
||||
except KeyError:
|
||||
n = ord(s)
|
||||
if n < 0x10000:
|
||||
return "\\u{0:04x}".format(n)
|
||||
# return '\\u%04x' % (n,)
|
||||
else:
|
||||
# surrogate pair
|
||||
n -= 0x10000
|
||||
s1 = 0xD800 | ((n >> 10) & 0x3FF)
|
||||
s2 = 0xDC00 | (n & 0x3FF)
|
||||
return "\\u{0:04x}\\u{1:04x}".format(s1, s2)
|
||||
# return '\\u%04x\\u%04x' % (s1, s2)
|
||||
|
||||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
||||
|
||||
|
||||
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
|
||||
|
||||
|
||||
class ThriftJSONDecoder(json.JSONDecoder):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._thrift_class = kwargs.pop("thrift_class")
|
||||
super(ThriftJSONDecoder, self).__init__(*args, **kwargs)
|
||||
|
||||
def decode(self, json_str):
|
||||
if isinstance(json_str, dict):
|
||||
dct = json_str
|
||||
else:
|
||||
dct = super(ThriftJSONDecoder, self).decode(json_str)
|
||||
return self._convert(
|
||||
dct,
|
||||
TType.STRUCT,
|
||||
# (self._thrift_class, self._thrift_class.thrift_spec))
|
||||
self._thrift_class,
|
||||
)
|
||||
|
||||
def _convert(self, val, ttype, ttype_info):
|
||||
if ttype == TType.STRUCT:
|
||||
if val is None:
|
||||
ret = None
|
||||
else:
|
||||
# (thrift_class, thrift_spec) = ttype_info
|
||||
thrift_class = ttype_info
|
||||
thrift_spec = ttype_info.thrift_spec
|
||||
ret = thrift_class()
|
||||
for tag, field in thrift_spec.items():
|
||||
if field is None:
|
||||
continue
|
||||
# {1: (15, 'ad_ids', 10, False), 255: (12, 'Base', <class 'base.Base'>, False)}
|
||||
# {1: (15, 'models', (12, <class 'adcommon.Ad'>), False), 255: (12, 'BaseResp', <class 'base.BaseResp'>, False)}
|
||||
if len(field) <= 3:
|
||||
(field_ttype, field_name, dummy) = field
|
||||
field_ttype_info = None
|
||||
else:
|
||||
(field_ttype, field_name, field_ttype_info, dummy) = field
|
||||
|
||||
if val is None or field_name not in val:
|
||||
continue
|
||||
converted_val = self._convert(
|
||||
val[field_name], field_ttype, field_ttype_info
|
||||
)
|
||||
setattr(ret, field_name, converted_val)
|
||||
elif ttype == TType.LIST:
|
||||
if type(ttype_info) != tuple: # 说明是基础类型了, 无法在细分
|
||||
(element_ttype, element_ttype_info) = (ttype_info, None)
|
||||
else:
|
||||
(element_ttype, element_ttype_info) = ttype_info
|
||||
if val is not None:
|
||||
ret = [self._convert(x, element_ttype, element_ttype_info) for x in val]
|
||||
else:
|
||||
ret = None
|
||||
|
||||
elif ttype == TType.SET:
|
||||
if type(ttype_info) != tuple: # 说明是基础类型了, 无法在细分
|
||||
(element_ttype, element_ttype_info) = (ttype_info, None)
|
||||
else:
|
||||
(element_ttype, element_ttype_info) = ttype_info
|
||||
if val is not None:
|
||||
ret = set(
|
||||
[self._convert(x, element_ttype, element_ttype_info) for x in val]
|
||||
)
|
||||
else:
|
||||
ret = None
|
||||
|
||||
elif ttype == TType.MAP:
|
||||
# key处理
|
||||
if type(ttype_info[0]) == tuple:
|
||||
key_ttype, key_ttype_info = ttype_info[0]
|
||||
else:
|
||||
key_ttype, key_ttype_info = ttype_info[0], None
|
||||
|
||||
# value处理
|
||||
if type(ttype_info[1]) != tuple: # 说明value为基础类型, 已不可在细分
|
||||
val_ttype = ttype_info[1]
|
||||
val_ttype_info = None
|
||||
else:
|
||||
val_ttype, val_ttype_info = ttype_info[1]
|
||||
|
||||
if val is not None:
|
||||
ret = dict(
|
||||
[
|
||||
(
|
||||
self._convert(k, key_ttype, key_ttype_info),
|
||||
self._convert(v, val_ttype, val_ttype_info),
|
||||
)
|
||||
for (k, v) in val.items()
|
||||
]
|
||||
)
|
||||
else:
|
||||
ret = None
|
||||
elif ttype == TType.STRING:
|
||||
if isinstance(val, str):
|
||||
ret = val.encode("utf8")
|
||||
elif val is None:
|
||||
ret = None
|
||||
else:
|
||||
ret = str(val)
|
||||
# 判断string字段是否是base64编码后的string, 如果是则此处需要对该string字段进行b64decode, 还原成原本的字符串
|
||||
# todo : 留待实现
|
||||
|
||||
elif ttype == TType.DOUBLE:
|
||||
if val is not None:
|
||||
ret = float(val)
|
||||
else:
|
||||
ret = None
|
||||
elif ttype == TType.I64:
|
||||
if val is not None:
|
||||
ret = int(val)
|
||||
else:
|
||||
ret = None
|
||||
elif ttype == TType.I32 or ttype == TType.I16 or ttype == TType.BYTE:
|
||||
if val is not None:
|
||||
ret = int(val)
|
||||
else:
|
||||
ret = None
|
||||
elif ttype == TType.BOOL:
|
||||
if val is not None:
|
||||
ret = bool(val)
|
||||
else:
|
||||
ret = None
|
||||
else:
|
||||
raise TypeError("Unrecognized thrift field type: %s" % ttype)
|
||||
return ret
|
||||
|
||||
|
||||
def json2thrift(json_str, thrift_class):
|
||||
logging.debug(json_str)
|
||||
return json.loads(
|
||||
json_str, cls=ThriftJSONDecoder, thrift_class=thrift_class, strict=False
|
||||
)
|
||||
|
||||
|
||||
def dumper(obj):
|
||||
try:
|
||||
return json.dumps(obj, default=lambda o: o.__dict__, sort_keys=True, indent=2)
|
||||
except:
|
||||
return obj.__dict__
|
||||
|
||||
|
||||
class MyJSONEncoder(json.JSONEncoder):
|
||||
def __init__(
|
||||
self,
|
||||
skipkeys=False,
|
||||
ensure_ascii=True,
|
||||
check_circular=True,
|
||||
allow_nan=True,
|
||||
indent=None,
|
||||
separators=None,
|
||||
encoding="utf-8",
|
||||
default=None,
|
||||
sort_keys=False,
|
||||
**kw
|
||||
):
|
||||
super(MyJSONEncoder, self).__init__(
|
||||
skipkeys=skipkeys,
|
||||
ensure_ascii=ensure_ascii,
|
||||
check_circular=check_circular,
|
||||
allow_nan=allow_nan,
|
||||
indent=indent,
|
||||
separators=separators,
|
||||
encoding=encoding,
|
||||
default=default,
|
||||
sort_keys=sort_keys,
|
||||
)
|
||||
self.skip_nonutf8_value = kw.get(
|
||||
"skip_nonutf8_value", False
|
||||
) # 默认不skip忽略非utf-8编码的字段
|
||||
|
||||
def encode(self, o):
|
||||
"""Return a JSON string representation of a Python data structure.
|
||||
JSONEncoder().encode({"foo": ["bar", "baz"]})
|
||||
'{"foo": ["bar", "baz"]}'
|
||||
|
||||
"""
|
||||
# This is for extremely simple cases and benchmarks.
|
||||
|
||||
if isinstance(o, str):
|
||||
|
||||
if isinstance(o, str):
|
||||
_encoding = self.encoding
|
||||
if _encoding is not None and not (_encoding == "utf-8"):
|
||||
o = o.decode(_encoding)
|
||||
if self.ensure_ascii:
|
||||
return encode_basestring_ascii(o)
|
||||
else:
|
||||
return encode_basestring(o)
|
||||
# This doesn't pass the iterator directly to ''.join() because the
|
||||
# exceptions aren't as detailed. The list call should be roughly
|
||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
||||
chunks = self.iterencode(o, _one_shot=True)
|
||||
if not isinstance(chunks, (list, tuple)):
|
||||
chunks = list(chunks)
|
||||
# add by braver(braver@bytedance.com)
|
||||
# todo: fix 'utf8' codec can't decode byte 0x91 in position 3: invalid start byte"
|
||||
if self.skip_nonutf8_value: # 缺省为false
|
||||
tmp_chunks = []
|
||||
for chunk in chunks:
|
||||
try:
|
||||
tmp_chunks.append(unicode_2_utf8_keep_native(chunk))
|
||||
except Exception as err:
|
||||
logging.debug(traceback.format_exc())
|
||||
return "".join(tmp_chunks)
|
||||
|
||||
# 保留老的逻辑, /usr/lib/python2.7/package/json/__init__.py dumps接口
|
||||
return "".join(chunks)
|
||||
|
||||
|
||||
class ThriftJSONEncoder(json.JSONEncoder):
|
||||
"""
|
||||
add by braver(Braver@bytedance.com)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
skipkeys=False,
|
||||
ensure_ascii=True,
|
||||
check_circular=True,
|
||||
allow_nan=True,
|
||||
indent=None,
|
||||
separators=None,
|
||||
default=None,
|
||||
sort_keys=False,
|
||||
**kw
|
||||
):
|
||||
|
||||
super(ThriftJSONEncoder, self).__init__(
|
||||
skipkeys=skipkeys,
|
||||
ensure_ascii=ensure_ascii,
|
||||
check_circular=check_circular,
|
||||
allow_nan=allow_nan,
|
||||
indent=indent,
|
||||
separators=separators,
|
||||
default=default,
|
||||
sort_keys=sort_keys,
|
||||
)
|
||||
self.skip_nonutf8_value = kw.get(
|
||||
"skip_nonutf8_value", False
|
||||
) # 默认不skip忽略非utf-8编码的字段
|
||||
|
||||
def encode(self, o):
|
||||
"""Return a JSON string representation of a Python data structure.
|
||||
JSONEncoder().encode({"foo": ["bar", "baz"]})
|
||||
'{"foo": ["bar", "baz"]}'
|
||||
|
||||
"""
|
||||
# This is for extremely simple cases and benchmarks.
|
||||
|
||||
if isinstance(o, str):
|
||||
if isinstance(o, str):
|
||||
_encoding = self.encoding
|
||||
if _encoding is not None and not (_encoding == "utf-8"):
|
||||
o = o.decode(_encoding)
|
||||
if self.ensure_ascii:
|
||||
return encode_basestring_ascii(o)
|
||||
else:
|
||||
return encode_basestring(o)
|
||||
# This doesn't pass the iterator directly to ''.join() because the
|
||||
# exceptions aren't as detailed. The list call should be roughly
|
||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
||||
chunks = self.iterencode(o, _one_shot=True)
|
||||
if not isinstance(chunks, (list, tuple)):
|
||||
chunks = list(chunks)
|
||||
# add by braver(braver@bytedance.com)
|
||||
# todo: fix 'utf8' codec can't decode byte 0x91 in position 3: invalid start byte"
|
||||
if self.skip_nonutf8_value: # 缺省为false
|
||||
tmp_chunks = []
|
||||
for chunk in chunks:
|
||||
try:
|
||||
tmp_chunks.append(unicode_2_utf8_keep_native(chunk))
|
||||
except Exception as err:
|
||||
logging.debug(traceback.format_exc())
|
||||
return "".join(tmp_chunks)
|
||||
|
||||
# 保留老的逻辑, /usr/lib/python2.7/package/json/__init__.py dumps接口
|
||||
return "".join(chunks)
|
||||
|
||||
def default(self, o):
|
||||
if isinstance(o, bytes):
|
||||
return str(o, encoding="utf-8")
|
||||
if not hasattr(o, "thrift_spec"):
|
||||
return super(ThriftJSONEncoder, self).default(o)
|
||||
|
||||
spec = getattr(o, "thrift_spec")
|
||||
ret = {}
|
||||
for tag, field in spec.items():
|
||||
if field is None:
|
||||
continue
|
||||
# (tag, field_ttype, field_name, field_ttype_info, default) = field
|
||||
field_name = field[1]
|
||||
default = field[-1]
|
||||
field_type = field[0]
|
||||
field_ttype_info = field[2]
|
||||
# if field_type in [TType.STRING, TType.BINARY]: # 说明是string(明文string或者binary)
|
||||
# if field_type in [TType.STRING, TType.BYTE]: # 说明是string(明文string或者binary)
|
||||
if field_name in o.__dict__:
|
||||
val = o.__dict__[field_name]
|
||||
if field_type in [TType.LIST, TType.SET]: # 数组类型
|
||||
if val: # val为非空数组/Set
|
||||
val = list(val) # 统一转成数组(list/set)
|
||||
is_need_binary_bs64 = False
|
||||
if type(field_ttype_info) != tuple: # 基础类型
|
||||
if (
|
||||
field_ttype_info in [TType.BYTE]
|
||||
and type(val[0]) in [str]
|
||||
and not istext(val[0])
|
||||
):
|
||||
is_need_binary_bs64 = True
|
||||
if is_need_binary_bs64:
|
||||
for index, item in enumerate(val):
|
||||
if item and type(item) in [str] and not istext(item):
|
||||
val[index] = base64.b64encode(
|
||||
item
|
||||
) # 判断为二进制字符串, 需要进行base64编码
|
||||
if field_type in [TType.BYTE] and type(val) in [
|
||||
str
|
||||
]: # 说明是string(明文string或者binary)
|
||||
# 需要对二进制字节字符串字段进行base64编码, 将二进制字节串字段->ascii字符编码的base64编码明文串
|
||||
if val and not istext(val): # 说明是该字段非空且为binary string
|
||||
print("4" * 100, val)
|
||||
val = base64.b64encode(val.encode("utf-8"))
|
||||
# val = base64.b64encode(val) # 进行base64编码处理, 不然该字段序列化为json时会报错
|
||||
# if val != default:
|
||||
ret[field_name] = val
|
||||
if "request_id" in o.__dict__:
|
||||
ret["request_id"] = o.__dict__["request_id"]
|
||||
if "rpc_latency" in o.__dict__:
|
||||
ret["rpc_latency"] = o.__dict__["rpc_latency"]
|
||||
return ret
|
||||
|
||||
|
||||
def thrift2json(obj, skip_nonutf8_value=False):
|
||||
return json.dumps(
|
||||
obj,
|
||||
cls=ThriftJSONEncoder,
|
||||
ensure_ascii=False,
|
||||
skip_nonutf8_value=skip_nonutf8_value,
|
||||
)
|
||||
|
||||
|
||||
def thrift2dict(obj):
|
||||
str = thrift2json(obj)
|
||||
return json.loads(str)
|
||||
|
||||
|
||||
dict2thrift = json2thrift
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(istext("Всего за {$price$}, а доставка - бесплатно!"))
|
||||
print(istext(b"\xe4\xb8\xad\xe6\x96\x87"))
|
||||
print(
|
||||
istext(
|
||||
'{"web_uri":"ad-site-i18n-sg/202103185d0d723d88b7f642452dac73","height":336,"width":336,"file_name":""}'
|
||||
)
|
||||
)
|
||||
139
httprunner/thrift/thrift_client.py
Normal file
139
httprunner/thrift/thrift_client.py
Normal file
@@ -0,0 +1,139 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import enum
|
||||
import json
|
||||
|
||||
import thriftpy2
|
||||
from loguru import logger
|
||||
from thriftpy2.protocol import (
|
||||
TBinaryProtocolFactory,
|
||||
TCompactProtocolFactory,
|
||||
TCyBinaryProtocolFactory,
|
||||
TJSONProtocolFactory,
|
||||
)
|
||||
from thriftpy2.rpc import make_client
|
||||
from thriftpy2.transport import (
|
||||
TBufferedTransportFactory,
|
||||
TCyBufferedTransportFactory,
|
||||
TCyFramedTransportFactory,
|
||||
TFramedTransportFactory,
|
||||
)
|
||||
|
||||
from httprunner.thrift.data_convertor import json2thrift, thrift2dict
|
||||
|
||||
|
||||
class ProtoType(enum.Enum):
|
||||
Binary = 1
|
||||
CyBinary = 2
|
||||
Compact = 3
|
||||
Json = 4
|
||||
|
||||
|
||||
class TransType(enum.Enum):
|
||||
Buffered = 1
|
||||
CyBuffered = 2
|
||||
Framed = 3
|
||||
CyFramed = 4
|
||||
|
||||
|
||||
class RequestFormat(enum.Enum):
|
||||
json = 1
|
||||
binary = 2
|
||||
|
||||
|
||||
def get_proto_factory(proto_type):
|
||||
if proto_type == ProtoType.Binary:
|
||||
return TBinaryProtocolFactory()
|
||||
if proto_type == ProtoType.CyBinary:
|
||||
return TCyBinaryProtocolFactory()
|
||||
if proto_type == ProtoType.Compact:
|
||||
return TCompactProtocolFactory()
|
||||
if proto_type == ProtoType.Json:
|
||||
return TJSONProtocolFactory()
|
||||
|
||||
|
||||
def get_trans_factory(trans_type):
|
||||
if trans_type == TransType.Buffered:
|
||||
return TBufferedTransportFactory()
|
||||
if trans_type == TransType.CyBuffered:
|
||||
return TCyBufferedTransportFactory()
|
||||
if trans_type == TransType.Framed:
|
||||
return TFramedTransportFactory()
|
||||
if trans_type == TransType.CyFramed:
|
||||
return TCyFramedTransportFactory()
|
||||
|
||||
|
||||
class ThriftClient(object):
|
||||
def __init__(
|
||||
self,
|
||||
thrift_file,
|
||||
service_name,
|
||||
ip,
|
||||
port,
|
||||
include_dirs=None,
|
||||
timeout=3000,
|
||||
proto_type=ProtoType.CyBinary,
|
||||
trans_type=TransType.CyBuffered,
|
||||
):
|
||||
self.thrift_file = thrift_file
|
||||
self.include_dirs = include_dirs
|
||||
self.service_name = service_name
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.timeout = timeout
|
||||
self.proto_type = proto_type
|
||||
self.trans_type = trans_type
|
||||
try:
|
||||
logger.debug(
|
||||
"init thrift module: thrift_file=%s, module_name=%s",
|
||||
thrift_file,
|
||||
str(self.service_name) + "_thrift",
|
||||
)
|
||||
self.thrift_module = thriftpy2.load(
|
||||
self.thrift_file,
|
||||
module_name=str(self.service_name) + "_thrift",
|
||||
include_dirs=self.include_dirs,
|
||||
)
|
||||
self.thrift_service_obj = getattr(self.thrift_module, self.service_name)
|
||||
logger.debug(
|
||||
"init thrift client: service_name=%s, ip=%s, port=%s",
|
||||
self.thrift_service_obj,
|
||||
ip,
|
||||
port,
|
||||
)
|
||||
self.client = make_client(
|
||||
self.thrift_service_obj,
|
||||
self.ip,
|
||||
int(self.port),
|
||||
timeout=self.timeout,
|
||||
proto_factory=get_proto_factory(self.proto_type),
|
||||
trans_factory=get_trans_factory(self.trans_type),
|
||||
)
|
||||
except Exception as e:
|
||||
self.thrift_module = None
|
||||
self.thrift_service_obj = None
|
||||
self.client = None
|
||||
logger.exception("init thrift module and client failed: {}".format(e))
|
||||
finally:
|
||||
thriftpy2.parser.parser.thrift_stack = []
|
||||
|
||||
def get_client(self):
|
||||
return self.client
|
||||
|
||||
def send_request(self, request_data, request_method=""):
|
||||
thrift_req_cls = getattr(
|
||||
self.thrift_service_obj, request_method + "_args"
|
||||
).thrift_spec[1][2]
|
||||
request_obj = json2thrift(json.dumps(request_data), thrift_req_cls)
|
||||
logger.debug(
|
||||
"send thrift request: request_method=%s, request_obj=%s",
|
||||
request_method,
|
||||
request_obj,
|
||||
)
|
||||
response_obj = getattr(self.client, request_method)(request_obj)
|
||||
logger.debug("thrift response = %s", response_obj)
|
||||
return thrift2dict(response_obj)
|
||||
|
||||
def __del__(self):
|
||||
self.client.close()
|
||||
307
poetry.lock
generated
307
poetry.lock
generated
@@ -179,6 +179,19 @@ type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "cython"
|
||||
version = "0.29.28"
|
||||
description = "The Cython compiler for writing C extensions for the Python language."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "filetype"
|
||||
version = "1.0.10"
|
||||
@@ -192,6 +205,22 @@ type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "1.1.2"
|
||||
description = "Lightweight in-process concurrent programming"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx"]
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.3"
|
||||
@@ -384,6 +413,19 @@ type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "ply"
|
||||
version = "3.11"
|
||||
description = "Python Lex & Yacc"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "py"
|
||||
version = "1.11.0"
|
||||
@@ -417,6 +459,23 @@ type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "pymysql"
|
||||
version = "1.0.2"
|
||||
description = "Pure Python MySQL Driver"
|
||||
category = "main"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
ed25519 = ["PyNaCl (>=1.4.0)"]
|
||||
rsa = ["cryptography"]
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.7"
|
||||
@@ -581,7 +640,7 @@ name = "six"
|
||||
version = "1.16.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
category = "main"
|
||||
optional = true
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[package.source]
|
||||
@@ -589,6 +648,85 @@ type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "1.4.36"
|
||||
description = "Database Abstraction Library"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
|
||||
[package.dependencies]
|
||||
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
|
||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
||||
|
||||
[package.extras]
|
||||
aiomysql = ["greenlet (!=0.4.17)", "aiomysql"]
|
||||
aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"]
|
||||
asyncio = ["greenlet (!=0.4.17)"]
|
||||
asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"]
|
||||
mariadb_connector = ["mariadb (>=1.0.1)"]
|
||||
mssql = ["pyodbc"]
|
||||
mssql_pymssql = ["pymssql"]
|
||||
mssql_pyodbc = ["pyodbc"]
|
||||
mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"]
|
||||
mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"]
|
||||
mysql_connector = ["mysql-connector-python"]
|
||||
oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"]
|
||||
postgresql = ["psycopg2 (>=2.7)"]
|
||||
postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"]
|
||||
postgresql_pg8000 = ["pg8000 (>=1.16.6)"]
|
||||
postgresql_psycopg2binary = ["psycopg2-binary"]
|
||||
postgresql_psycopg2cffi = ["psycopg2cffi"]
|
||||
pymysql = ["pymysql (<1)", "pymysql"]
|
||||
sqlcipher = ["sqlcipher3-binary"]
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "thrift"
|
||||
version = "0.16.0"
|
||||
description = "Python bindings for the Apache Thrift RPC system"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.7.2"
|
||||
|
||||
[package.extras]
|
||||
all = ["tornado (>=4.0)", "twisted"]
|
||||
tornado = ["tornado (>=4.0)"]
|
||||
twisted = ["twisted"]
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "thriftpy2"
|
||||
version = "0.4.14"
|
||||
description = "Pure python implementation of Apache Thrift."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[package.dependencies]
|
||||
ply = ">=3.4,<4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["cython (>=0.28.4)", "flake8 (>=2.5)", "pytest (>=2.8)", "sphinx-rtd-theme (>=0.1.9)", "sphinx (>=1.3)", "tornado (>=4.0,<6.0)"]
|
||||
tornado = ["tornado (>=4.0,<6.0)"]
|
||||
|
||||
[package.source]
|
||||
type = "legacy"
|
||||
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
|
||||
reference = "tsinghua"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
@@ -694,12 +832,14 @@ reference = "tsinghua"
|
||||
|
||||
[extras]
|
||||
allure = ["allure-pytest"]
|
||||
sql = ["sqlalchemy", "pymysql"]
|
||||
thrift = ["cython", "thrift", "thriftpy2"]
|
||||
upload = ["requests-toolbelt", "filetype"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "8dc444117b1b9a55f00d25b86e69a26d9ceaac2d331731b676415fd4a019f57a"
|
||||
content-hash = "a00de4a66e9c8b73709f339d266be673ca6057dfd4023504677054697611986d"
|
||||
|
||||
[metadata.files]
|
||||
allure-pytest = [
|
||||
@@ -755,6 +895,9 @@ brotli = [
|
||||
{file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"},
|
||||
{file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"},
|
||||
{file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"},
|
||||
@@ -766,12 +909,18 @@ brotli = [
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"},
|
||||
{file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"},
|
||||
{file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"},
|
||||
@@ -779,6 +928,9 @@ brotli = [
|
||||
{file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"},
|
||||
{file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"},
|
||||
@@ -786,6 +938,9 @@ brotli = [
|
||||
{file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"},
|
||||
{file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"},
|
||||
{file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"},
|
||||
@@ -842,10 +997,105 @@ coverage = [
|
||||
{file = "coverage-4.5.4-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5"},
|
||||
{file = "coverage-4.5.4.tar.gz", hash = "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c"},
|
||||
]
|
||||
cython = [
|
||||
{file = "Cython-0.29.28-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:75686c586e37b1fed0fe4a2c053474f96fc07da0063bbfc98023454540515d31"},
|
||||
{file = "Cython-0.29.28-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:16f2e74fcac223c53e298ecead62c353d3cffa107bea5d8232e4b2ba40781634"},
|
||||
{file = "Cython-0.29.28-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b6c77cc24861a33714e74212abfab4e54bf42e1ad602623f193b8e369389af2f"},
|
||||
{file = "Cython-0.29.28-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:59f4e86b415620a097cf0ec602adf5a7ee3cc33e8220567ded96566f753483f8"},
|
||||
{file = "Cython-0.29.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:31465dce7fd3f058d02afb98b13af962848cc607052388814428dc801cc26f57"},
|
||||
{file = "Cython-0.29.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5658fa477e80d96c49d5ff011938dd4b62da9aa428f771b91f1a7c49af45aad8"},
|
||||
{file = "Cython-0.29.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:33b69ac9bbf2b93d8cae336cfe48889397a857e6ceeb5cef0b2f0b31b6c54f2b"},
|
||||
{file = "Cython-0.29.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9d39ee7ddef6856413f950b8959e852d83376d9db1c509505e3f4873df32aa70"},
|
||||
{file = "Cython-0.29.28-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c9848a423a14e8f51bd4bbf8e2ff37031764ce66bdc7c6bc06c70d4084eb23c7"},
|
||||
{file = "Cython-0.29.28-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:09448aadb818387160ca4d1e1b82dbb7001526b6d0bed7529c4e8ac12e3b6f4c"},
|
||||
{file = "Cython-0.29.28-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:341917bdb2c95bcf8322aacfe50bbe6b4794880b16fa8b2300330520e123a5e5"},
|
||||
{file = "Cython-0.29.28-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fdcef7abb09fd827691e3abe6fd42c6c34beaccfa0bc2df6074f0a49949df6a8"},
|
||||
{file = "Cython-0.29.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:43eca77169f855dd04be11921a585c8854a174f30bc925257e92bc7b9197fbd2"},
|
||||
{file = "Cython-0.29.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7962a78ceb80cdec21345fb5088e675060fa65982030d446069f2d675d30e3cd"},
|
||||
{file = "Cython-0.29.28-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ed32c206e1d68056a34b21d2ec0cf0f23d338d6531476a68c73e21e20bd7bb63"},
|
||||
{file = "Cython-0.29.28-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a0ed39c63ba52edd03a39ea9d6da6f5326aaee5d333c317feba543270a1b3af5"},
|
||||
{file = "Cython-0.29.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:ded4fd3da4dee2f4414c35214244e29befa7f6fede3e9be317e765169df2cbc7"},
|
||||
{file = "Cython-0.29.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e24bd94946ffa37f30fcb865f2340fb6d429a3c7bf87b47b22f7d22e0e68a15c"},
|
||||
{file = "Cython-0.29.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:076aa8da83383e2bed0ca5f92c13a7e76e684bc41fe8e438bbed735f5b1c2731"},
|
||||
{file = "Cython-0.29.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:004387d8b94c64681ee05660d6a234e125396097726cf2f419c0fa2ac38034d6"},
|
||||
{file = "Cython-0.29.28-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d6036f6a5a0c7fb1af88889872268b15bf20dd9cefe33a6602d79ba18b8db20f"},
|
||||
{file = "Cython-0.29.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1612d7439590ba3b8de5f907bf0e54bd8e024eafb8c59261531a7988030c182d"},
|
||||
{file = "Cython-0.29.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d7d7beb600d5dd551e9322e1393b74286f4a3d4aa387f7bfbaccc1495a98603b"},
|
||||
{file = "Cython-0.29.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5e82f6b3dc2133b2e0e2c5c63d352d40a695e40cc7ed99f4cbe83334bcf9ab39"},
|
||||
{file = "Cython-0.29.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:49076747b731ed78acf203666c3b3c5d664754ea01ca4527f62f6d8675703688"},
|
||||
{file = "Cython-0.29.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f2b7c86a73db0d8dbbd885fe67f04c7b787df37a3848b9867270d3484101fbd"},
|
||||
{file = "Cython-0.29.28-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a3b27812ac9e9737026bfbb1dd47434f3e84013f430bafe1c6cbaf1cd51b5518"},
|
||||
{file = "Cython-0.29.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0378a14d2580dcea234d7a2dc8d75f60c091105885096e6dd5b032be97542c16"},
|
||||
{file = "Cython-0.29.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d7c98727397c2547a56aa0c3c98140f1873c69a0642edc9446c6c870d0d8a5b5"},
|
||||
{file = "Cython-0.29.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6626f9691ce2093ccbcc9932f449efe3b6e1c893b556910881d177c61612e8ff"},
|
||||
{file = "Cython-0.29.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:e9cc6af0c9c477c5e175e807dce439509934efefc24ea2da9fced7fbc8170591"},
|
||||
{file = "Cython-0.29.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05edfa51c0ff31a8df3cb291b90ca93ab499686d023b9b81c216cd3509f73def"},
|
||||
{file = "Cython-0.29.28-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4b3089255b6b1cc69e4b854626a41193e6acae5332263d24707976b3cb8ca644"},
|
||||
{file = "Cython-0.29.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03b749e4f0bbf631cee472add2806d338a7d496f8383f6fb28cc5fdc34b7fdb8"},
|
||||
{file = "Cython-0.29.28-py2.py3-none-any.whl", hash = "sha256:26d8d0ededca42be50e0ac377c08408e18802b1391caa3aea045a72c1bff47ac"},
|
||||
{file = "Cython-0.29.28.tar.gz", hash = "sha256:d6fac2342802c30e51426828fe084ff4deb1b3387367cf98976bb2e64b6f8e45"},
|
||||
]
|
||||
filetype = [
|
||||
{file = "filetype-1.0.10-py2.py3-none-any.whl", hash = "sha256:63fbe6e818a3d1cfac1d62b196574a7a4b7fc8e06a6c500d53577c018ef127d9"},
|
||||
{file = "filetype-1.0.10.tar.gz", hash = "sha256:323a13500731b6c65a253bc3930bbce9a56dfba71e90b60ffd968ab69d9ae937"},
|
||||
]
|
||||
greenlet = [
|
||||
{file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"},
|
||||
{file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"},
|
||||
{file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"},
|
||||
{file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"},
|
||||
{file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"},
|
||||
{file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"},
|
||||
{file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"},
|
||||
{file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"},
|
||||
{file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"},
|
||||
{file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"},
|
||||
{file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"},
|
||||
{file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"},
|
||||
{file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"},
|
||||
{file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"},
|
||||
{file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"},
|
||||
{file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"},
|
||||
{file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"},
|
||||
{file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"},
|
||||
{file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"},
|
||||
{file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"},
|
||||
{file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"},
|
||||
{file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"},
|
||||
{file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
|
||||
@@ -932,6 +1182,10 @@ pluggy = [
|
||||
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
|
||||
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||
]
|
||||
ply = [
|
||||
{file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
|
||||
{file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
|
||||
]
|
||||
py = [
|
||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
@@ -960,6 +1214,10 @@ pydantic = [
|
||||
{file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"},
|
||||
{file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"},
|
||||
]
|
||||
pymysql = [
|
||||
{file = "PyMySQL-1.0.2-py3-none-any.whl", hash = "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641"},
|
||||
{file = "PyMySQL-1.0.2.tar.gz", hash = "sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
|
||||
{file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
|
||||
@@ -1023,6 +1281,51 @@ six = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
sqlalchemy = [
|
||||
{file = "SQLAlchemy-1.4.36-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:81e53bd383c2c33de9d578bfcc243f559bd3801a0e57f2bcc9a943c790662e0c"},
|
||||
{file = "SQLAlchemy-1.4.36-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6e1fe00ee85c768807f2a139b83469c1e52a9ffd58a6eb51aa7aeb524325ab18"},
|
||||
{file = "SQLAlchemy-1.4.36-cp27-cp27m-win32.whl", hash = "sha256:d57ac32f8dc731fddeb6f5d1358b4ca5456e72594e664769f0a9163f13df2a31"},
|
||||
{file = "SQLAlchemy-1.4.36-cp27-cp27m-win_amd64.whl", hash = "sha256:fca8322e04b2dde722fcb0558682740eebd3bd239bea7a0d0febbc190e99dc15"},
|
||||
{file = "SQLAlchemy-1.4.36-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:53d2d9ee93970c969bc4e3c78b1277d7129554642f6ffea039c282c7dc4577bc"},
|
||||
{file = "SQLAlchemy-1.4.36-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f0394a3acfb8925db178f7728adb38c027ed7e303665b225906bfa8099dc1ce8"},
|
||||
{file = "SQLAlchemy-1.4.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c606d8238feae2f360b8742ffbe67741937eb0a05b57f536948d198a3def96"},
|
||||
{file = "SQLAlchemy-1.4.36-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8d07fe2de0325d06e7e73281e9a9b5e259fbd7cbfbe398a0433cbb0082ad8fa7"},
|
||||
{file = "SQLAlchemy-1.4.36-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5041474dcab7973baa91ec1f3112049a9dd4652898d6a95a6a895ff5c58beb6b"},
|
||||
{file = "SQLAlchemy-1.4.36-cp310-cp310-win32.whl", hash = "sha256:be094460930087e50fd08297db9d7aadaed8408ad896baf758e9190c335632da"},
|
||||
{file = "SQLAlchemy-1.4.36-cp310-cp310-win_amd64.whl", hash = "sha256:64d796e9af522162f7f2bf7a3c5531a0a550764c426782797bbeed809d0646c5"},
|
||||
{file = "SQLAlchemy-1.4.36-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a0ae3aa2e86a4613f2d4c49eb7da23da536e6ce80b2bfd60bbb2f55fc02b0b32"},
|
||||
{file = "SQLAlchemy-1.4.36-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d50cb71c1dbed70646d521a0975fb0f92b7c3f84c61fa59e07be23a1aaeecfc"},
|
||||
{file = "SQLAlchemy-1.4.36-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:16abf35af37a3d5af92725fc9ec507dd9e9183d261c2069b6606d60981ed1c6e"},
|
||||
{file = "SQLAlchemy-1.4.36-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5864a83bd345871ad9699ce466388f836db7572003d67d9392a71998092210e3"},
|
||||
{file = "SQLAlchemy-1.4.36-cp36-cp36m-win32.whl", hash = "sha256:fbf8c09fe9728168f8cc1b40c239eab10baf9c422c18be7f53213d70434dea43"},
|
||||
{file = "SQLAlchemy-1.4.36-cp36-cp36m-win_amd64.whl", hash = "sha256:6e859fa96605027bd50d8e966db1c4e1b03e7b3267abbc4b89ae658c99393c58"},
|
||||
{file = "SQLAlchemy-1.4.36-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:166a3887ec355f7d2f12738f7fa25dc8ac541867147a255f790f2f41f614cb44"},
|
||||
{file = "SQLAlchemy-1.4.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e885548da361aa3f8a9433db4cfb335b2107e533bf314359ae3952821d84b3e"},
|
||||
{file = "SQLAlchemy-1.4.36-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5c90ef955d429966d84326d772eb34333178737ebb669845f1d529eb00c75e72"},
|
||||
{file = "SQLAlchemy-1.4.36-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a052bd9f53004f8993c624c452dfad8ec600f572dd0ed0445fbe64b22f5570e"},
|
||||
{file = "SQLAlchemy-1.4.36-cp37-cp37m-win32.whl", hash = "sha256:dce3468bf1fc12374a1a732c9efd146ce034f91bb0482b602a9311cb6166a920"},
|
||||
{file = "SQLAlchemy-1.4.36-cp37-cp37m-win_amd64.whl", hash = "sha256:6cb4c4f57a20710cea277edf720d249d514e587f796b75785ad2c25e1c0fed26"},
|
||||
{file = "SQLAlchemy-1.4.36-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e74ce103b81c375c3853b436297952ef8d7863d801dcffb6728d01544e5191b5"},
|
||||
{file = "SQLAlchemy-1.4.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b20c4178ead9bc398be479428568ff31b6c296eb22e75776273781a6551973f"},
|
||||
{file = "SQLAlchemy-1.4.36-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:af2587ae11400157753115612d6c6ad255143efba791406ad8a0cbcccf2edcb3"},
|
||||
{file = "SQLAlchemy-1.4.36-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83cf3077712be9f65c9aaa0b5bc47bc1a44789fd45053e2e3ecd59ff17c63fe9"},
|
||||
{file = "SQLAlchemy-1.4.36-cp38-cp38-win32.whl", hash = "sha256:ce20f5da141f8af26c123ebaa1b7771835ca6c161225ce728962a79054f528c3"},
|
||||
{file = "SQLAlchemy-1.4.36-cp38-cp38-win_amd64.whl", hash = "sha256:316c7e5304dda3e3ad711569ac5d02698bbc71299b168ac56a7076b86259f7ea"},
|
||||
{file = "SQLAlchemy-1.4.36-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:f522214f6749bc073262529c056f7dfd660f3b5ec4180c5354d985eb7219801e"},
|
||||
{file = "SQLAlchemy-1.4.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ecac4db8c1aa4a269f5829df7e706639a24b780d2ac46b3e485cbbd27ec0028"},
|
||||
{file = "SQLAlchemy-1.4.36-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3db741beaa983d4cbf9087558620e7787106319f7e63a066990a70657dd6b35"},
|
||||
{file = "SQLAlchemy-1.4.36-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec89bf98cc6a0f5d1e28e3ad28e9be6f3b4bdbd521a4053c7ae8d5e1289a8a1"},
|
||||
{file = "SQLAlchemy-1.4.36-cp39-cp39-win32.whl", hash = "sha256:e12532c4d3f614678623da5d852f038ace1f01869b89f003ed6fe8c793f0c6a3"},
|
||||
{file = "SQLAlchemy-1.4.36-cp39-cp39-win_amd64.whl", hash = "sha256:cb441ca461bf97d00877b607f132772644b623518b39ced54da433215adce691"},
|
||||
{file = "SQLAlchemy-1.4.36.tar.gz", hash = "sha256:64678ac321d64a45901ef2e24725ec5e783f1f4a588305e196431447e7ace243"},
|
||||
]
|
||||
thrift = [
|
||||
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
|
||||
]
|
||||
thriftpy2 = [
|
||||
{file = "thriftpy2-0.4.14-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b4aae6f6c1d8d12e63c45f68ec1a25267e7d3af1ced1e5a82cbabaaed4bcebc9"},
|
||||
{file = "thriftpy2-0.4.14.tar.gz", hash = "sha256:1758ccaeb2a40d8779b50cdd3d7a3b43e8c5752f21ad0a54ded7c251d05219e8"},
|
||||
]
|
||||
toml = [
|
||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
|
||||
@@ -45,10 +45,17 @@ filetype = {version = "^1.0.7", optional = true}
|
||||
Brotli = "^1.0.9"
|
||||
jinja2 = "^3.0.3"
|
||||
toml = "^0.10.2"
|
||||
sqlalchemy = {version = "^1.4.36", optional = true}
|
||||
pymysql = {version = "^1.0.2",optional = true}
|
||||
cython = {version = "^0.29.28", optional = true}
|
||||
thriftpy2 = {version = "^0.4.14", optional = true}
|
||||
thrift = {version = "^0.16.0", optional = true}
|
||||
|
||||
[tool.poetry.extras]
|
||||
allure = ["allure-pytest"] # pip install "httprunner[allure]", poetry install -E allure
|
||||
upload = ["requests-toolbelt", "filetype"] # pip install "httprunner[upload]", poetry install -E upload
|
||||
sql = ["sqlalchemy","pymysql"] # pip install "httprunner[sql]", poetry install -E sql
|
||||
thrift = ["cython","thrift","thriftpy2"] # pip install "httprunner[thrift]", poetry install -E sql
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
coverage = "^4.5.4"
|
||||
|
||||
Reference in New Issue
Block a user