@@ -73,6 +73,8 @@ def barrier_all(self):
7373 """
7474 if self .deepep_engine is not None :
7575 self .deepep_engine .barrier_all ()
76+ else :
77+ raise RuntimeError ("The deepep engine has not been initialized yet." )
7678
7779
7880@singleton
@@ -207,18 +209,6 @@ class XPUEPRunner:
207209 EPRunnerBase
208210 """
209211
210- def _init_ep_engine (self , engine_class ):
211- self .ep_engine = engine_class (
212- num_max_dispatch_tokens_per_rank = self .num_max_dispatch_tokens_per_rank ,
213- hidden_size = self .hidden_size ,
214- num_experts = self .num_experts + self .redundant_experts_num ,
215- ep_size = self .ep_size ,
216- ep_rank = self .ep_rank ,
217- splitwise_role = self .splitwise_role ,
218- moe_phase = self .moe_phase ,
219- group = self .ep_group ,
220- )
221-
222212 def __init__ (
223213 self ,
224214 top_k : int ,
@@ -249,6 +239,18 @@ def init_ep_engine(self):
249239 """Initialize the EP engine with default implementation"""
250240 self ._init_ep_engine (self ._get_engine_class ())
251241
242+ def _init_ep_engine (self , engine_class ):
243+ self .ep_engine = engine_class (
244+ num_max_dispatch_tokens_per_rank = self .num_max_dispatch_tokens_per_rank ,
245+ hidden_size = self .hidden_size ,
246+ num_experts = self .num_experts + self .redundant_experts_num ,
247+ ep_size = self .ep_size ,
248+ ep_rank = self .ep_rank ,
249+ splitwise_role = self .splitwise_role ,
250+ moe_phase = self .moe_phase ,
251+ group = self .ep_group ,
252+ )
253+
252254 @abstractmethod
253255 def _get_engine_class (self ):
254256 """Get the engine class to be initialized"""
0 commit comments