Skip to content

Commit

Permalink
0.1.2
Browse files Browse the repository at this point in the history
Signed-off-by: ssbuild <[email protected]>
  • Loading branch information
ssbuild committed Apr 11, 2023
1 parent 123cd99 commit 9ec8da8
Show file tree
Hide file tree
Showing 7 changed files with 634 additions and 627 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
- keras 模块封装

## 更新
- <strong>2023年04月11</strong>
- deep_training 0.1.2 重构lora v2, 增加adalora
- <strong>2023年04月07</strong>
- deep_training 0.1.1 同步更新chatglm 词表配置信息
- <strong>2023年04月02</strong>
Expand Down
17 changes: 9 additions & 8 deletions data_helper/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,15 @@ class TrainingArguments:
"},
)
adv: dict = field(
default_factory= lambda: {
'mode': None, # None, fgm, fgsm_local, fgsm, pgd, free_local, free
'emb_name=': 'embedding',
'attack_iters': 2, # pgd
'minibatch_replays': 2, # free
'alpha': 0.1, # pgd
'epsilon': 1.0 # pgd,fgm
},
# default_factory= lambda: {
# 'mode': None, # None, fgm, fgsm_local, fgsm, pgd, free_local, free
# 'emb_name=': 'embedding',
# 'attack_iters': 2, # pgd
# 'minibatch_replays': 2, # free
# 'alpha': 0.1, # pgd
# 'epsilon': 1.0 # pgd,fgm
# },
default=None,
metadata={"help": "对抗训练"},
)
hierarchical_position: float = field(
Expand Down
6 changes: 5 additions & 1 deletion nlp/models/lora/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,6 @@
# @Time : 2023/3/2 20:55
# @Author : tk
# @Author : tk

# 兼容旧版本
from .v1 import LoraModel,LoraArguments
from .v2 import LoraModel as LoraModelV2,LoraArguments as LoraArgumentsV2
1 change: 1 addition & 0 deletions nlp/models/lora/v2/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@ def save_pretrained(self, save_directory, **kwargs):
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
config = LORA_TYPE_TO_CONFIG_MAPPING[LoraBaseArguments.from_pretrained(pretrained_model_name_or_path,**kwargs).lora_type].from_pretrained(pretrained_model_name_or_path,**kwargs)
assert config.with_lora , ValueError('lora config get bad with_lora ',config.with_lora)
# config = cls()
# config.lora = None
# config.adalora = None
Expand Down
Loading

0 comments on commit 9ec8da8

Please sign in to comment.