-
Notifications
You must be signed in to change notification settings - Fork 17
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #79 from ssbuild/dev
support skywork
- Loading branch information
Showing
8 changed files
with
1,501 additions
and
15 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
# coding=utf8 | ||
# @Time : 2023/10/31 1:51 | ||
# @Author : tk | ||
# @FileName: __init__.py |
89 changes: 89 additions & 0 deletions
89
src/deep_training/nlp/models/skywork/configuration_skywork.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
# Copyright (c) SkyworkAI and the HuggingFace Inc. team. All rights reserved. | ||
# This code is built upon Huggingface's transformers repository. | ||
|
||
|
||
from transformers.configuration_utils import PretrainedConfig | ||
from transformers.utils import logging | ||
|
||
|
||
logger = logging.get_logger(__name__) | ||
|
||
LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP = {} | ||
|
||
|
||
class SkyworkConfig(PretrainedConfig): | ||
|
||
model_type = "skywork" | ||
keys_to_ignore_at_inference = ["past_key_values"] | ||
|
||
def __init__( | ||
self, | ||
vocab_size=32000, | ||
hidden_size=4096, | ||
intermediate_size=11008, | ||
num_hidden_layers=32, | ||
num_attention_heads=32, | ||
num_key_value_heads=None, | ||
hidden_act="silu", | ||
max_position_embeddings=2048, | ||
initializer_range=0.02, | ||
rms_norm_eps=1e-6, | ||
use_cache=True, | ||
pad_token_id=None, | ||
bos_token_id=1, | ||
eos_token_id=2, | ||
pretraining_tp=1, | ||
tie_word_embeddings=False, | ||
rope_theta=10000.0, | ||
rope_scaling=None, | ||
**kwargs, | ||
): | ||
self.vocab_size = vocab_size | ||
self.max_position_embeddings = max_position_embeddings | ||
self.hidden_size = hidden_size | ||
self.intermediate_size = intermediate_size | ||
self.num_hidden_layers = num_hidden_layers | ||
self.num_attention_heads = num_attention_heads | ||
|
||
# for backward compatibility | ||
if num_key_value_heads is None: | ||
num_key_value_heads = num_attention_heads | ||
|
||
self.num_key_value_heads = num_key_value_heads | ||
self.hidden_act = hidden_act | ||
self.initializer_range = initializer_range | ||
self.rms_norm_eps = rms_norm_eps | ||
self.pretraining_tp = pretraining_tp | ||
self.use_cache = use_cache | ||
self.rope_theta = rope_theta | ||
self.rope_scaling = rope_scaling | ||
self._rope_scaling_validation() | ||
|
||
super().__init__( | ||
pad_token_id=pad_token_id, | ||
bos_token_id=bos_token_id, | ||
eos_token_id=eos_token_id, | ||
tie_word_embeddings=tie_word_embeddings, | ||
**kwargs, | ||
) | ||
|
||
def _rope_scaling_validation(self): | ||
""" | ||
Validate the `rope_scaling` configuration. | ||
""" | ||
if self.rope_scaling is None: | ||
return | ||
|
||
if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2: | ||
raise ValueError( | ||
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, " | ||
f"got {self.rope_scaling}" | ||
) | ||
rope_scaling_type = self.rope_scaling.get("type", None) | ||
rope_scaling_factor = self.rope_scaling.get("factor", None) | ||
if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic", "ntk"]: | ||
raise ValueError( | ||
f"`rope_scaling`'s type field must be one of ['linear', 'dynamic'], got {rope_scaling_type}" | ||
) | ||
if rope_scaling_factor is None or not isinstance(rope_scaling_factor, float) or rope_scaling_factor <= 1.0: | ||
raise ValueError(f"`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}") |
Oops, something went wrong.