@@ -115,7 +115,7 @@ def run_impl(
115115
116116 # start unified inference server
117117 self .start_device_inference_gateway (
118- agent_config = self .agent_config , inference_port = inference_port )
118+ inference_port = inference_port , agent_config = self .agent_config )
119119
120120 # start inference monitor server
121121 self .stop_device_inference_monitor (
@@ -540,6 +540,14 @@ def stop_device_inference_monitor(run_id, end_point_name, model_id, model_name,
540540 def recover_inference_and_monitor ():
541541 # noinspection PyBroadException
542542 try :
543+ agent_config = dict ()
544+ try :
545+ agent_config ["mqtt_config" ], _ , _ , _ = MLOpsConfigs .fetch_all_configs ()
546+ except Exception as e :
547+ pass
548+
549+ FedMLDeployMasterJobRunner .start_device_inference_gateway (agent_config = agent_config )
550+
543551 history_jobs = FedMLServerDataInterface .get_instance ().get_history_jobs ()
544552 for job in history_jobs .job_list :
545553 if job .running_json is None :
@@ -558,15 +566,6 @@ def recover_inference_and_monitor():
558566 if not is_activated :
559567 continue
560568
561- agent_config = dict ()
562- try :
563- agent_config ["mqtt_config" ], _ , _ , _ = MLOpsConfigs .fetch_all_configs ()
564- except Exception as e :
565- pass
566-
567- FedMLDeployMasterJobRunner .start_device_inference_gateway (
568- inference_port = inference_port , agent_config = agent_config )
569-
570569 FedMLDeployMasterJobRunner .stop_device_inference_monitor (
571570 run_id , end_point_name , model_id , model_name , model_version )
572571 FedMLDeployMasterJobRunner .start_device_inference_monitor (
0 commit comments