Skip to content

Commit

Permalink
0.1.0
Browse files Browse the repository at this point in the history
Signed-off-by: tk <[email protected]>
  • Loading branch information
ssbuild committed Apr 3, 2023
1 parent 48fe35c commit d7c5630
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 9 deletions.
2 changes: 1 addition & 1 deletion nlp/models/chatglm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -877,7 +877,7 @@ def get_layer(layer_id):

total_params = sum(p.numel() for p in self.parameters())
trainable_params = sum(p.numel() for p in self.parameters() if p.requires_grad)
print("Using p-tuning v2: # trainable_params = {} / {}".format(trainable_params, total_params))
print("Using p-tuning v2: # trainable_params = {} / {} , || trainable %: {}".format(trainable_params, total_params , 100 * trainable_params / total_params))
def get_input_embeddings(self):
return self.word_embeddings

Expand Down
21 changes: 14 additions & 7 deletions nlp/models/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,17 @@ def __new__(cls, name, base, attr,*args,**kwargs):
return cls_



class PreTrainedModel_Data:
base_model_prefix = None
config_class = None

class TransformerBase(MyLightningModule,metaclass=TransformerFakeMeta):
def __init__(self,*args,**kwargs):
config = get_value_from_args('config',PretrainedConfig,*args,**kwargs)
super(TransformerBase, self).__init__()
self.config = config
self.base_model_prefix = None
self._premodel_data = PreTrainedModel_Data()
self._trainer: typing.Optional["pl.Trainer"] = None

def forward(self, *args, **batch):
Expand Down Expand Up @@ -216,9 +221,9 @@ def from_pretrained(self,CLS, *args, **kwargs):

@property
def model(self):
if not self.base_model_prefix:
if not self._premodel_data.base_model_prefix:
return None
return getattr(self, self.base_model_prefix,None)
return getattr(self, self._premodel_data.base_model_prefix,None)

@model.setter
def model(self, model):
Expand All @@ -231,13 +236,15 @@ def set_model(self, model , copy_attr=True):
o = getattr(model,k,None)
if o is None:
continue
setattr(self,k,o)
if o == 'model':
o = 'model_'
setattr(self._premodel_data,k,o)

assert self.base_model_prefix is not None, ValueError('base_model_prefix is not allow empty')
setattr(self, self.base_model_prefix, model)
assert self._premodel_data.base_model_prefix is not None, ValueError('base_model_prefix is not allow empty')
setattr(self, self._premodel_data.base_model_prefix, model)

def get_model_lr(self):
return [(self.model if self.base_model_prefix is not None else self , self.config.task_specific_params['learning_rate']), ]
return [(self.model if self._premodel_data.base_model_prefix is not None else self , self.config.task_specific_params['learning_rate']), ]



Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
ignore = ['test','tests']
setup(
name='deep_training',
version='0.1.0rc0',
version='0.1.0',
description='an easy training architecture',
long_description='torch_training: https://github.com/ssbuild/deep_training.git',
license='Apache License 2.0',
Expand Down

0 comments on commit d7c5630

Please sign in to comment.