Skip to content

Commit

Permalink
fix fused_moe.py conflict
Browse files Browse the repository at this point in the history
  • Loading branch information
BruceXcluding committed Dec 27, 2024
1 parent 57a5006 commit 3fa113b
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion python/sglang/srt/layers/moe/fused_moe_triton/fused_moe.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import torch
import triton
import triton.language as tl
from sgl_kernel import moe_align_block_size as sgl_moe_align_block_size
from vllm import _custom_ops as ops

from sglang.srt.layers.moe.topk import select_experts
Expand All @@ -20,6 +19,10 @@

is_hip_ = is_hip()

from sgl_kernel import moe_align_block_size as sgl_moe_align_block_size



logger = logging.getLogger(__name__)
padding_size = 128 if bool(int(os.getenv("MOE_PADDING", "0"))) else 0

Expand Down

0 comments on commit 3fa113b

Please sign in to comment.