lv12 commited on
Commit
9a27a9d
·
verified ·
1 Parent(s): 82b10cf

Uploading model.pt

Browse files
Files changed (4) hide show
  1. __init__.py +0 -0
  2. config.json +2 -1
  3. custom_modules.py +9 -0
  4. model.py +1 -0
__init__.py ADDED
File without changes
config.json CHANGED
@@ -9,6 +9,7 @@
9
  "num_experts": 2,
10
  "hidden_dim": 256,
11
  "auto_map": {
12
- "AutoModel": "model.py"
 
13
  }
14
  }
 
9
  "num_experts": 2,
10
  "hidden_dim": 256,
11
  "auto_map": {
12
+ "AutoModel": "model.EmbeddingMoE",
13
+ "AutoConfig": "model.EmbeddingMoEConfig"
14
  }
15
  }
custom_modules.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoConfig, AutoModel
2
+ from transformers.models.auto.configuration_auto import CONFIG_MAPPING
3
+
4
+ from .model import EmbeddingMoEConfig, EmbeddingMoE
5
+
6
+ # Register your model
7
+ CONFIG_MAPPING.register("embedding_moe", EmbeddingMoEConfig)
8
+ AutoConfig.register("embedding_moe", EmbeddingMoEConfig)
9
+ AutoModel.register("embedding_moe", EmbeddingMoE)
model.py CHANGED
@@ -1,5 +1,6 @@
1
  import torch
2
  from torch import nn
 
3
  from transformers import PreTrainedModel, PretrainedConfig, AutoModel
4
 
5
  class EmbeddingMoEConfig(PretrainedConfig):
 
1
  import torch
2
  from torch import nn
3
+ import torch.nn.functional as F
4
  from transformers import PreTrainedModel, PretrainedConfig, AutoModel
5
 
6
  class EmbeddingMoEConfig(PretrainedConfig):