Skip to content

Commit

Permalink
glm4
Browse files Browse the repository at this point in the history
  • Loading branch information
ssbuild committed Jun 12, 2024
1 parent 6724c7e commit 734628a
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 8 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@ pip install -U git+https://github.com/ssbuild/deep_training.git --no-deps --forc

## update
- <strong>2024-06-10</strong>
- 0.3.1 support glm4,glm4v
- 0.3.1 support glm4 https://github.com/ssbuild/glm4_finetuning
glm4v https://github.com/ssbuild/glm4v_finetuning

- <strong>2024-02-15</strong>
- 0.2.11 support internlm2
- 0.2.11 support internlm2 https://github.com/ssbuild/internlm2_finetuning

- <strong>2023-12-02</strong>
- 0.2.10 update qwen model for 1.8b 7b 14b 72b
Expand Down
10 changes: 4 additions & 6 deletions src/deep_training/zoo/model_zoo/glm4v/llm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,14 @@ def process_response(self, output, history):

@torch.inference_mode()
def chat(self, tokenizer, query: str, history: List[Dict] = None, role: str = "user", image=None,
max_length: int = 8192, num_beams=1, do_sample=True, top_p=0.8, temperature=0.8, logits_processor=None,
logits_processor=None,
**kwargs):
if history is None:
history = []
if logits_processor is None:
logits_processor = LogitsProcessorList()
logits_processor.append(InvalidScoreLogitsProcessor())
gen_kwargs = {"max_length": max_length, "num_beams": num_beams, "do_sample": do_sample, "top_p": top_p,
"temperature": temperature, "logits_processor": logits_processor, **kwargs}
gen_kwargs = {"logits_processor": logits_processor, **kwargs}
message = {"role": role, "content": query}
if image is not None:
message["image"] = image
Expand All @@ -90,7 +89,7 @@ def chat(self, tokenizer, query: str, history: List[Dict] = None, role: str = "u

@torch.inference_mode()
def stream_chat(self, tokenizer, query: str, history: List[Dict] = None, role: str = "user", image=None,
past_key_values=None, max_length: int = 8192, do_sample=True, top_p=0.8, temperature=0.8,
past_key_values=None,
logits_processor=None, return_past_key_values=False, **kwargs):
if history is None:
history = []
Expand All @@ -99,8 +98,7 @@ def stream_chat(self, tokenizer, query: str, history: List[Dict] = None, role: s
logits_processor.append(InvalidScoreLogitsProcessor())
eos_token_id = [tokenizer.eos_token_id, tokenizer.convert_tokens_to_ids("<|user|>"),
tokenizer.convert_tokens_to_ids("<|observation|>")]
gen_kwargs = {"max_length": max_length, "do_sample": do_sample, "top_p": top_p,
"temperature": temperature, "logits_processor": logits_processor, **kwargs}
gen_kwargs = {"logits_processor": logits_processor, **kwargs}
message = {"role": role, "content": "query"}
if image is not None:
message["image"] = image
Expand Down

0 comments on commit 734628a

Please sign in to comment.