Skip to content

Commit a388915

Browse files
authored
Merge pull request #2182 from FedML-AI/raphael/fix-deploy
[Deploy][Autoscale] Bug fix: continue the for loop if no scale op.
2 parents 4ceba31 + 1d5a05d commit a388915

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

python/fedml/computing/scheduler/comm_utils/job_monitor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def autoscaler_reconcile_after_interval(self):
148148
if current_replicas == new_replicas:
149149
# Basically the autoscaler decided that no scaling operation should take place.
150150
logging.info(f"No scaling operation for endpoint {e_id}.")
151-
return
151+
continue
152152

153153
# Should scale in / out
154154
curr_version = fedml.get_env_version()
@@ -159,15 +159,15 @@ def autoscaler_reconcile_after_interval(self):
159159
mlops_prefix = "https://open-test.fedml.ai/"
160160
else:
161161
logging.error(f"Do not support the version {curr_version}.")
162-
return
162+
continue
163163
autoscale_url_path = "fedmlModelServer/api/v1/endpoint/auto-scale"
164164
url = f"{mlops_prefix}{autoscale_url_path}"
165165

166166
# Get cached token for authorization of autoscale request
167167
cached_token = fedml_model_cache.get_end_point_token(e_id, e_name, model_name)
168168
if cached_token is None:
169169
logging.error(f"Failed to get the cached token for endpoint {e_id}.")
170-
return
170+
continue
171171

172172
req_header = {
173173
"Authorization": f"Bearer {cached_token}"

0 commit comments

Comments
 (0)