You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Description
A clear and concise description of what the bug is.
Agent.deploy is broken with AttributeError: 'str' object has no attribute 'items' error.
Reproduction
A minimal code sample demonstrating the bug. Reproducible in serverless. We are working on GEN AI hackathon and we have been only provisioned with Serverless for this. This works fine in Spot-DBR 15.4 LTS, when we tried.
Is it a regression?
Did this work in a previous version of the SDK? If so, which versions did you try?
Yes. When I ran in August-2024. I assume it would have been 0.30.0, Now strangely in no version it is working.
Debug Logs
The SDK logs helpful debugging information when debug logging is enabled. Set the log level to debug by adding logging.basicConfig(level=logging.DEBUG) to your program, and include the logs here.
agents.deploy(UC_MODEL_NAME, version)
AttributeError: 'str' object has no attribute 'items'
File , line 3
1 import mlflow
2 from databricks import agents
----> 3 agents.deploy(UC_MODEL_NAME, version)
4 import logging
5 logging.basicConfig(level=logging.DEBUG)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/agents/deployments.py:445, in deploy(model_name, model_version, scale_to_zero, environment_vars, instance_profile_arn, tags, workload_size, **kwargs)
422 def deploy(
423 model_name: str,
424 model_version: int,
(...)
430 **kwargs,
431 ) -> Deployment:
432 """
433 Deploy new version of the agents.
434
(...)
443 :return: Chain deployment metadata.
444 """
--> 445 _check_model_is_rag_compatible(model_name, model_version)
446 _check_model_name(model_name)
447 endpoint_name = kwargs.get("endpoint_name", _create_endpoint_name(model_name))
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/agents/utils/mlflow_utils.py:53, in _check_model_is_rag_compatible(model_name, version)
51 return False
52 mlflow.set_registry_uri("databricks-uc")
---> 53 loaded_model = mlflow.pyfunc.load_model(f"models:/{model_name}/{str(version)}")
54 input_schema = loaded_model.metadata.get_input_schema()
56 chat_completions_request_properties = convert_dataclass_to_schema(
57 ChatCompletionRequest
58 ).to_dict()[0]
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/tracing/provider.py:268, in trace_disabled..wrapper(*args, **kwargs)
266 disable()
267 try:
--> 268 is_func_called, result = True, f(*args, **kwargs)
269 finally:
270 enable()
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/pyfunc/init.py:1017, in load_model(model_uri, suppress_warnings, dst_path, model_config)
1013 entity_list.append(Entity(job=job_entity))
1015 lineage_header_info = LineageHeaderInfo(entities=entity_list) if entity_list else None
-> 1017 local_path = _download_artifact_from_uri(
1018 artifact_uri=model_uri, output_path=dst_path, lineage_header_info=lineage_header_info
1019 )
1021 if not suppress_warnings:
1022 model_requirements = _get_pip_requirements_from_model_path(local_path)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/tracking/artifact_utils.py:111, in _download_artifact_from_uri(artifact_uri, output_path, lineage_header_info)
108 repo = get_artifact_repository(artifact_uri=root_uri)
110 if isinstance(repo, ModelsArtifactRepository):
--> 111 return repo.download_artifacts(
112 artifact_path=artifact_path,
113 dst_path=output_path,
114 lineage_header_info=lineage_header_info,
115 )
116 return repo.download_artifacts(artifact_path=artifact_path, dst_path=output_path)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/models_artifact_repo.py:196, in ModelsArtifactRepository.download_artifacts(self, artifact_path, dst_path, lineage_header_info)
194 # Pass lineage header info if model is registered in UC
195 if isinstance(self.repo, UnityCatalogModelsArtifactRepository):
--> 196 model_path = self.repo.download_artifacts(
197 artifact_path, dst_path, lineage_header_info=lineage_header_info
198 )
199 else:
200 model_path = self.repo.download_artifacts(artifact_path, dst_path)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/unity_catalog_models_artifact_repo.py:140, in UnityCatalogModelsArtifactRepository.download_artifacts(self, artifact_path, dst_path, lineage_header_info)
139 def download_artifacts(self, artifact_path, dst_path=None, lineage_header_info=None):
--> 140 return self._get_artifact_repo(lineage_header_info=lineage_header_info).download_artifacts(
141 artifact_path, dst_path
142 )
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/artifact_repo.py:250, in ArtifactRepository.download_artifacts(self, artifact_path, dst_path)
248 # Submit download tasks
249 futures = {}
--> 250 if self._is_directory(artifact_path):
251 for file_info in self._iter_artifacts_recursive(artifact_path):
252 if file_info.is_dir: # Empty directory
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/artifact_repo.py:149, in ArtifactRepository._is_directory(self, artifact_path)
148 def _is_directory(self, artifact_path):
--> 149 listing = self.list_artifacts(artifact_path)
150 return len(listing) > 0
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/presigned_url_artifact_repo.py:103, in PresignedUrlArtifactRepository.list_artifacts(self, path)
100 req_body = json.dumps({"page_token": page_token}) if page_token else ""
102 response_proto = ListDirectoryResponse()
--> 103 resp = call_endpoint(
104 host_creds=self.db_creds,
105 endpoint=endpoint,
106 method="GET",
107 json_body=req_body,
108 response_proto=response_proto,
109 )
110 for dir_entry in resp.contents:
111 rel_path = posixpath.relpath(dir_entry.path, self.artifact_uri)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/utils/rest_utils.py:365, in call_endpoint(host_creds, endpoint, method, json_body, response_proto, extra_headers)
363 if method == "GET":
364 call_kwargs["params"] = json_body
--> 365 response = http_request(**call_kwargs)
366 else:
367 call_kwargs["json"] = json_body
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/utils/rest_utils.py:111, in http_request(host_creds, endpoint, method, max_retries, backoff_factor, backoff_jitter, extra_headers, retry_codes, timeout, raise_on_status, respect_retry_after_header, **kwargs)
106 ws_client = WorkspaceClient(config=config)
107 try:
108 # Databricks SDK APIClient.do API is for making request using
109 # HTTP
110 # https://github.com/databricks/databricks-sdk-py/blob/a714146d9c155dd1e3567475be78623f72028ee0/databricks/sdk/core.py#L134" target="_blank" rel="noopener noreferrer">https://github.com/databricks/databricks-sdk-py/blob/a714146d9c155dd1e3567475be78623f72028ee0/databricks/sdk/core.py#L134
--> 111 raw_response = ws_client.api_client.do(
112 method=method,
113 path=endpoint,
114 headers=extra_headers,
115 raw=True,
116 query=kwargs.get("params"),
117 body=kwargs.get("json"),
118 files=kwargs.get("files"),
119 data=kwargs.get("data"),
120 )
121 return raw_response["contents"]._response
122 except DatabricksError as e:
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/core.py:157, in ApiClient.do(self, method, path, url, query, headers, body, raw, files, data, auth, response_headers)
153 headers['User-Agent'] = self._user_agent_base
154 retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
155 is_retryable=self._is_retryable,
156 clock=self._cfg.clock)
--> 157 response = retryable(self._perform)(method,
158 url,
159 query=query,
160 headers=headers,
161 body=body,
162 raw=raw,
163 files=files,
164 data=data,
165 auth=auth)
167 resp = dict()
168 for header in response_headers if response_headers else []:
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/retries.py:54, in retried..decorator..wrapper(*args, **kwargs)
50 retry_reason = f'{type(err).name} is allowed to retry'
52 if retry_reason is None:
53 # raise if exception is not retryable
---> 54 raise err
56 logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
57 clock.sleep(sleep + random())
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/retries.py:33, in retried..decorator..wrapper(*args, **kwargs)
31 while clock.time() < deadline:
32 try:
---> 33 return func(*args, **kwargs)
34 except Exception as err:
35 last_err = err
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/core.py:257, in ApiClient._perform(self, method, url, query, headers, body, raw, files, data, auth)
245 def _perform(self,
246 method: str,
247 url: str,
(...)
253 data=None,
254 auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
255 response = self._session.request(method,
256 url,
--> 257 params=self._fix_query_string(query),
258 json=body,
259 headers=headers,
260 files=files,
261 data=data,
262 auth=auth,
263 stream=raw,
264 timeout=self._http_timeout_seconds)
265 try:
266 self._record_request_log(response, raw=raw or data is not None or files is not None)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/core.py:96, in ApiClient._fix_query_string(query)
94 if query is None:
95 return None
---> 96 with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()}
98 # Query parameters may be nested, e.g.
99 # {'filter_by': {'user_ids': [123, 456]}}
100 # The HTTP-compatible representation of this is
(...)
104 # See the following for more information:
105 # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule" target="_blank" rel="noopener noreferrer">https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule
106 def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]:
Other Information
OS: [e.g. macOS]
Version: [e.g. 0.1.0]
Additional context
Add any other context about the problem here.
The text was updated successfully, but these errors were encountered:
hariprasad-sobeys
changed the title
[ISSUE] Deployment of LLM Agent broken
[ISSUE] Deployment of LLM Agent.deploy is broken with AttributeError: 'str' object has no attribute 'items' error.
Oct 7, 2024
Description
A clear and concise description of what the bug is.
Agent.deploy is broken with AttributeError: 'str' object has no attribute 'items' error.
Reproduction
A minimal code sample demonstrating the bug. Reproducible in serverless. We are working on GEN AI hackathon and we have been only provisioned with Serverless for this. This works fine in Spot-DBR 15.4 LTS, when we tried.
You can utilize the agent deployment code provided here. https://learn.microsoft.com/en-us/azure/databricks/generative-ai/create-log-agent
Simple chain notebook
Get notebook
Simple chain driver notebook
Get notebook
Expected behavior
A clear and concise description of what you expected to happen.
This is blocking ALL Agent Deployment use cases.
It will deploy agent as mentioned in https://learn.microsoft.com/en-us/azure/databricks/generative-ai/deploy-agent
Is it a regression?
Did this work in a previous version of the SDK? If so, which versions did you try?
Yes. When I ran in August-2024. I assume it would have been 0.30.0, Now strangely in no version it is working.
Debug Logs
The SDK logs helpful debugging information when debug logging is enabled. Set the log level to debug by adding
logging.basicConfig(level=logging.DEBUG)
to your program, and include the logs here.agents.deploy(UC_MODEL_NAME, version)
AttributeError: 'str' object has no attribute 'items'
File , line 3
1 import mlflow
2 from databricks import agents
----> 3 agents.deploy(UC_MODEL_NAME, version)
4 import logging
5 logging.basicConfig(level=logging.DEBUG)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/agents/deployments.py:445, in deploy(model_name, model_version, scale_to_zero, environment_vars, instance_profile_arn, tags, workload_size, **kwargs)
422 def deploy(
423 model_name: str,
424 model_version: int,
(...)
430 **kwargs,
431 ) -> Deployment:
432 """
433 Deploy new version of the agents.
434
(...)
443 :return: Chain deployment metadata.
444 """
--> 445 _check_model_is_rag_compatible(model_name, model_version)
446 _check_model_name(model_name)
447 endpoint_name = kwargs.get("endpoint_name", _create_endpoint_name(model_name))
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/agents/utils/mlflow_utils.py:53, in _check_model_is_rag_compatible(model_name, version)
51 return False
52 mlflow.set_registry_uri("databricks-uc")
---> 53 loaded_model = mlflow.pyfunc.load_model(f"models:/{model_name}/{str(version)}")
54 input_schema = loaded_model.metadata.get_input_schema()
56 chat_completions_request_properties = convert_dataclass_to_schema(
57 ChatCompletionRequest
58 ).to_dict()[0]
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/tracing/provider.py:268, in trace_disabled..wrapper(*args, **kwargs)
266 disable()
267 try:
--> 268 is_func_called, result = True, f(*args, **kwargs)
269 finally:
270 enable()
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/pyfunc/init.py:1017, in load_model(model_uri, suppress_warnings, dst_path, model_config)
1013 entity_list.append(Entity(job=job_entity))
1015 lineage_header_info = LineageHeaderInfo(entities=entity_list) if entity_list else None
-> 1017 local_path = _download_artifact_from_uri(
1018 artifact_uri=model_uri, output_path=dst_path, lineage_header_info=lineage_header_info
1019 )
1021 if not suppress_warnings:
1022 model_requirements = _get_pip_requirements_from_model_path(local_path)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/tracking/artifact_utils.py:111, in _download_artifact_from_uri(artifact_uri, output_path, lineage_header_info)
108 repo = get_artifact_repository(artifact_uri=root_uri)
110 if isinstance(repo, ModelsArtifactRepository):
--> 111 return repo.download_artifacts(
112 artifact_path=artifact_path,
113 dst_path=output_path,
114 lineage_header_info=lineage_header_info,
115 )
116 return repo.download_artifacts(artifact_path=artifact_path, dst_path=output_path)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/models_artifact_repo.py:196, in ModelsArtifactRepository.download_artifacts(self, artifact_path, dst_path, lineage_header_info)
194 # Pass lineage header info if model is registered in UC
195 if isinstance(self.repo, UnityCatalogModelsArtifactRepository):
--> 196 model_path = self.repo.download_artifacts(
197 artifact_path, dst_path, lineage_header_info=lineage_header_info
198 )
199 else:
200 model_path = self.repo.download_artifacts(artifact_path, dst_path)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/unity_catalog_models_artifact_repo.py:140, in UnityCatalogModelsArtifactRepository.download_artifacts(self, artifact_path, dst_path, lineage_header_info)
139 def download_artifacts(self, artifact_path, dst_path=None, lineage_header_info=None):
--> 140 return self._get_artifact_repo(lineage_header_info=lineage_header_info).download_artifacts(
141 artifact_path, dst_path
142 )
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/artifact_repo.py:250, in ArtifactRepository.download_artifacts(self, artifact_path, dst_path)
248 # Submit download tasks
249 futures = {}
--> 250 if self._is_directory(artifact_path):
251 for file_info in self._iter_artifacts_recursive(artifact_path):
252 if file_info.is_dir: # Empty directory
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/artifact_repo.py:149, in ArtifactRepository._is_directory(self, artifact_path)
148 def _is_directory(self, artifact_path):
--> 149 listing = self.list_artifacts(artifact_path)
150 return len(listing) > 0
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/store/artifact/presigned_url_artifact_repo.py:103, in PresignedUrlArtifactRepository.list_artifacts(self, path)
100 req_body = json.dumps({"page_token": page_token}) if page_token else ""
102 response_proto = ListDirectoryResponse()
--> 103 resp = call_endpoint(
104 host_creds=self.db_creds,
105 endpoint=endpoint,
106 method="GET",
107 json_body=req_body,
108 response_proto=response_proto,
109 )
110 for dir_entry in resp.contents:
111 rel_path = posixpath.relpath(dir_entry.path, self.artifact_uri)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/utils/rest_utils.py:365, in call_endpoint(host_creds, endpoint, method, json_body, response_proto, extra_headers)
363 if method == "GET":
364 call_kwargs["params"] = json_body
--> 365 response = http_request(**call_kwargs)
366 else:
367 call_kwargs["json"] = json_body
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/mlflow/utils/rest_utils.py:111, in http_request(host_creds, endpoint, method, max_retries, backoff_factor, backoff_jitter, extra_headers, retry_codes, timeout, raise_on_status, respect_retry_after_header, **kwargs)
106 ws_client = WorkspaceClient(config=config)
107 try:
108 # Databricks SDK
APIClient.do
API is for making request using109 # HTTP
110 # https://github.com/databricks/databricks-sdk-py/blob/a714146d9c155dd1e3567475be78623f72028ee0/databricks/sdk/core.py#L134" target="_blank" rel="noopener noreferrer">https://github.com/databricks/databricks-sdk-py/blob/a714146d9c155dd1e3567475be78623f72028ee0/databricks/sdk/core.py#L134
--> 111 raw_response = ws_client.api_client.do(
112 method=method,
113 path=endpoint,
114 headers=extra_headers,
115 raw=True,
116 query=kwargs.get("params"),
117 body=kwargs.get("json"),
118 files=kwargs.get("files"),
119 data=kwargs.get("data"),
120 )
121 return raw_response["contents"]._response
122 except DatabricksError as e:
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/core.py:157, in ApiClient.do(self, method, path, url, query, headers, body, raw, files, data, auth, response_headers)
153 headers['User-Agent'] = self._user_agent_base
154 retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
155 is_retryable=self._is_retryable,
156 clock=self._cfg.clock)
--> 157 response = retryable(self._perform)(method,
158 url,
159 query=query,
160 headers=headers,
161 body=body,
162 raw=raw,
163 files=files,
164 data=data,
165 auth=auth)
167 resp = dict()
168 for header in response_headers if response_headers else []:
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/retries.py:54, in retried..decorator..wrapper(*args, **kwargs)
50 retry_reason = f'{type(err).name} is allowed to retry'
52 if retry_reason is None:
53 # raise if exception is not retryable
---> 54 raise err
56 logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
57 clock.sleep(sleep + random())
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/retries.py:33, in retried..decorator..wrapper(*args, **kwargs)
31 while clock.time() < deadline:
32 try:
---> 33 return func(*args, **kwargs)
34 except Exception as err:
35 last_err = err
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/core.py:257, in ApiClient._perform(self, method, url, query, headers, body, raw, files, data, auth)
245 def _perform(self,
246 method: str,
247 url: str,
(...)
253 data=None,
254 auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
255 response = self._session.request(method,
256 url,
--> 257 params=self._fix_query_string(query),
258 json=body,
259 headers=headers,
260 files=files,
261 data=data,
262 auth=auth,
263 stream=raw,
264 timeout=self._http_timeout_seconds)
265 try:
266 self._record_request_log(response, raw=raw or data is not None or files is not None)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-fb33ac87-ad6b-49bb-b943-c8caa8a46319/lib/python3.10/site-packages/databricks/sdk/core.py:96, in ApiClient._fix_query_string(query)
94 if query is None:
95 return None
---> 96 with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()}
98 # Query parameters may be nested, e.g.
99 # {'filter_by': {'user_ids': [123, 456]}}
100 # The HTTP-compatible representation of this is
(...)
104 # See the following for more information:
105 # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule" target="_blank" rel="noopener noreferrer">https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule
106 def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]:
Other Information
Additional context
Add any other context about the problem here.
The text was updated successfully, but these errors were encountered: