chore: cleanup pycodestyle E rules (#8269)

This commit is contained in:
Bowen Liang
2024-09-11 18:55:00 +08:00
committed by GitHub
parent f515af2232
commit 781d294f49
14 changed files with 25 additions and 24 deletions

View File

@@ -74,7 +74,7 @@ class Signer:
def sign(request, credentials):
if request.path == "":
request.path = "/"
if request.method != "GET" and not ("Content-Type" in request.headers):
if request.method != "GET" and "Content-Type" not in request.headers:
request.headers["Content-Type"] = "application/x-www-form-urlencoded; charset=utf-8"
format_date = Signer.get_current_format_date()

View File

@@ -31,7 +31,7 @@ class Service:
self.service_info.scheme = scheme
def get(self, api, params, doseq=0):
if not (api in self.api_info):
if api not in self.api_info:
raise Exception("no such api")
api_info = self.api_info[api]
@@ -49,7 +49,7 @@ class Service:
raise Exception(resp.text)
def post(self, api, params, form):
if not (api in self.api_info):
if api not in self.api_info:
raise Exception("no such api")
api_info = self.api_info[api]
r = self.prepare_request(api_info, params)
@@ -72,7 +72,7 @@ class Service:
raise Exception(resp.text)
def json(self, api, params, body):
if not (api in self.api_info):
if api not in self.api_info:
raise Exception("no such api")
api_info = self.api_info[api]
r = self.prepare_request(api_info, params)

View File

@@ -109,7 +109,7 @@ class MaasService(Service):
if not self._apikey and not credentials_exist:
raise new_client_sdk_request_error("no valid credential", req_id)
if not (api in self.api_info):
if api not in self.api_info:
raise new_client_sdk_request_error("no such api", req_id)
def _call(self, endpoint_id, api, req_id, params, body, apikey=None, stream=False):

View File

@@ -71,7 +71,7 @@ class BingSearchTool(BuiltinTool):
text = ""
if search_results:
for i, result in enumerate(search_results):
text += f'{i+1}: {result.get("name", "")} - {result.get("snippet", "")}\n'
text += f'{i + 1}: {result.get("name", "")} - {result.get("snippet", "")}\n'
if computation and "expression" in computation and "value" in computation:
text += "\nComputation:\n"

View File

@@ -83,5 +83,5 @@ class DIDApp:
if status["status"] == "done":
return status
elif status["status"] == "error" or status["status"] == "rejected":
raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error",{}).get("description")}')
raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error", {}).get("description")}')
time.sleep(poll_interval)

View File

@@ -142,7 +142,7 @@ class ListWorksheetRecordsTool(BuiltinTool):
for control in controls:
control_type_id = self.get_real_type_id(control)
if (control_type_id in self._get_ignore_types()) or (
allow_fields and not control["controlId"] in allow_fields
allow_fields and control["controlId"] not in allow_fields
):
continue
else:

View File

@@ -67,7 +67,7 @@ class ListWorksheetsTool(BuiltinTool):
items = []
tables = ""
for item in section.get("items", []):
if item.get("type") == 0 and (not "notes" in item or item.get("notes") != "NO"):
if item.get("type") == 0 and ("notes" not in item or item.get("notes") != "NO"):
if type == "json":
filtered_item = {"id": item["id"], "name": item["name"], "notes": item.get("notes", "")}
items.append(filtered_item)

View File

@@ -310,7 +310,7 @@ class Graph(BaseModel):
parallel_branch_node_ids["default"].append(graph_edge.target_node_id)
else:
condition_hash = graph_edge.run_condition.hash
if not condition_hash in condition_edge_mappings:
if condition_hash not in condition_edge_mappings:
condition_edge_mappings[condition_hash] = []
condition_edge_mappings[condition_hash].append(graph_edge)

View File

@@ -90,9 +90,9 @@ class GraphEngine:
thread_pool_max_submit_count = 100
thread_pool_max_workers = 10
## init thread pool
# init thread pool
if thread_pool_id:
if not thread_pool_id in GraphEngine.workflow_thread_pool_mapping:
if thread_pool_id not in GraphEngine.workflow_thread_pool_mapping:
raise ValueError(f"Max submit count {thread_pool_max_submit_count} of workflow thread pool reached.")
self.thread_pool_id = thread_pool_id