Skip to content

Commit

Permalink
Merge pull request #811 from illacloud/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
AruSeito authored Jan 14, 2023
2 parents 44ef44d + 1aabd7a commit 07de4a9
Show file tree
Hide file tree
Showing 29 changed files with 922 additions and 324 deletions.
33 changes: 33 additions & 0 deletions apps/builder/src/i18n/locale/en-US.json
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,39 @@
"placeholder": {
"query": "# Use $ to add a variable into your query (e.g. $category ) \r\nquery ($owner: String!, $name: String!){\r\n repository(owner: $owner, name: $name) \r\n}"
}
},
"hugging_face": {
"mode_id": "Model ID",
"placeholder": {
"mode_id": "bert-base-uncased",
"text": "The answer to the universe is [MASK].",
"json": "{ \r\n\"past_user_inputs\": [\"Which movie is the best ?\"], \r\n\"generated_responses\": [\"It's Die Hard for sure.\"], \r\n\"text\": \"Can you explain why ?\" \r\n}",
"binary": "'@cats.jpg' \\",
"use_cache": "Boolean, leave blank for true",
"min_length": "Integer to define the minimum length in tokens of the output summary",
"top_k": "Integer to define the top tokens",
"top_p": "Float to define the tokens",
"temperature": "Float(0.0-100.0),leave blank for 1.0",
"repetition_penalty": "Float (0.0-100.0)",
"max_time": "Float (0.0-120.0) to define the amount of time in seconds"
},
"tips": {
"mode_id": "Go to the <0>Model Hub</0> and select the model you want to use.",
"use_detail_parameters": "<0>Click here</0> to learn more about the detail parameters",
"temperature": "1 means regular sampling, 0 means always taking the highest score, 100.0 is getting closer to uniform probability.",
"repetition_penalty": "The more a token is used within generation the more it is penalized to not be picked in successive generation passes."
},
"parameter": "Parameter",
"use_detail_parameters": "Use detail parameters",
"use_cache": "Use cache",
"wait_for_model": "Wait for model",
"min_length": "Min length",
"max_length": "Max length",
"top_k": "Top k",
"top_p": "Top p",
"temperature": "Temperature",
"repetition_penalty": "Repetition penalty",
"max_time": "Max time"
}
},
"resource": {
Expand Down
33 changes: 33 additions & 0 deletions apps/builder/src/i18n/locale/ja-JP.json
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,39 @@
"placeholder": {
"query": "# Use $ to add a variable into your query (e.g. $category ) \nquery ($owner: String!, $name: String!){\n repository(owner: $owner, name: $name) \n}"
}
},
"hugging_face": {
"mode_id": "Model ID",
"placeholder": {
"mode_id": "bert-base-uncased",
"text": "The answer to the universe is [MASK].",
"json": "{ \r\n\"past_user_inputs\": [\"Which movie is the best ?\"], \r\n\"generated_responses\": [\"It's Die Hard for sure.\"], \r\n\"text\": \"Can you explain why ?\" \r\n}",
"binary": "'@cats.jpg' \\",
"use_cache": "Boolean, leave blank for true",
"min_length": "Integer to define the minimum length in tokens of the output summary",
"top_k": "Integer to define the top tokens",
"top_p": "Float to define the tokens",
"temperature": "Float(0.0-100.0),leave blank for 1.0",
"repetition_penalty": "Float (0.0-100.0)",
"max_time": "Float (0.0-120.0) to define the amount of time in seconds"
},
"tips": {
"mode_id": "Go to the <0>Model Hub</0> and select the model you want to use.",
"use_detail_parameters": "<0>Click here</0> to learn more about the detail parameters",
"temperature": "1 means regular sampling, 0 means always taking the highest score, 100.0 is getting closer to uniform probability.",
"repetition_penalty": "The more a token is used within generation the more it is penalized to not be picked in successive generation passes."
},
"parameter": "Parameter",
"use_detail_parameters": "Use detail parameters",
"use_cache": "Use cache",
"wait_for_model": "Wait for model",
"min_length": "Min length",
"max_length": "Max length",
"top_k": "Top k",
"top_p": "Top p",
"temperature": "Temperature",
"repetition_penalty": "Repetition penalty",
"max_time": "Max time"
}
},
"resource": {
Expand Down
33 changes: 33 additions & 0 deletions apps/builder/src/i18n/locale/ko-KR.json
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,39 @@
"placeholder": {
"query": "# Use $ to add a variable into your query (e.g. $category ) \nquery ($owner: String!, $name: String!){\n repository(owner: $owner, name: $name) \n}"
}
},
"hugging_face": {
"mode_id": "Model ID",
"placeholder": {
"mode_id": "bert-base-uncased",
"text": "The answer to the universe is [MASK].",
"json": "{ \r\n\"past_user_inputs\": [\"Which movie is the best ?\"], \r\n\"generated_responses\": [\"It's Die Hard for sure.\"], \r\n\"text\": \"Can you explain why ?\" \r\n}",
"binary": "'@cats.jpg' \\",
"use_cache": "Boolean, leave blank for true",
"min_length": "Integer to define the minimum length in tokens of the output summary",
"top_k": "Integer to define the top tokens",
"top_p": "Float to define the tokens",
"temperature": "Float(0.0-100.0),leave blank for 1.0",
"repetition_penalty": "Float (0.0-100.0)",
"max_time": "Float (0.0-120.0) to define the amount of time in seconds"
},
"tips": {
"mode_id": "Go to the <0>Model Hub</0> and select the model you want to use.",
"use_detail_parameters": "<0>Click here</0> to learn more about the detail parameters",
"temperature": "1 means regular sampling, 0 means always taking the highest score, 100.0 is getting closer to uniform probability.",
"repetition_penalty": "The more a token is used within generation the more it is penalized to not be picked in successive generation passes."
},
"parameter": "Parameter",
"use_detail_parameters": "Use detail parameters",
"use_cache": "Use cache",
"wait_for_model": "Wait for model",
"min_length": "Min length",
"max_length": "Max length",
"top_k": "Top k",
"top_p": "Top p",
"temperature": "Temperature",
"repetition_penalty": "Repetition penalty",
"max_time": "Max time"
}
},
"resource": {
Expand Down
33 changes: 33 additions & 0 deletions apps/builder/src/i18n/locale/zh-CN.json
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,39 @@
"placeholder": {
"query": "# Use $ to add a variable into your query (e.g. $category ) \nquery ($owner: String!, $name: String!){\n repository(owner: $owner, name: $name) \n}"
}
},
"hugging_face": {
"mode_id": "Model ID",
"placeholder": {
"mode_id": "bert-base-uncased",
"text": "The answer to the universe is [MASK].",
"json": "{ \r\n\"past_user_inputs\": [\"Which movie is the best ?\"], \r\n\"generated_responses\": [\"It's Die Hard for sure.\"], \r\n\"text\": \"Can you explain why ?\" \r\n}",
"binary": "'@cats.jpg' \\",
"use_cache": "Boolean,为空时默认true",
"min_length": "整数,用于定义输出摘要的最小标记长度",
"top_k": "整数,用于定义在创建新文本的示例操作中的顶级标记。",
"top_p": "浮点数,用于定义在文本生成示例操作中的标记。",
"temperature": "Float(0.0-100.0),为空时默认1.0",
"repetition_penalty": "Float (0.0-100.0)",
"max_time": "Float (0.0-120.0) ,定义查询应该花费的最大时间量(以秒为单位)"
},
"tips": {
"mode_id": "Go to the <0>Model Hub</0> and select the model you want to use.",
"use_detail_parameters": "<0>Click here</0> to learn more about the detail parameters",
"temperature": "1表示定期抽样,0表示总是取最高分,100.0越来越接近均匀概率。",
"repetition_penalty": "The more a token is used within generation the more it is penalized to not be picked in successive generation passes."
},
"parameter": "Parameter",
"use_detail_parameters": "Use detail parameters",
"use_cache": "Use cache",
"wait_for_model": "Wait for model",
"min_length": "Min length",
"max_length": "Max length",
"top_k": "Top k",
"top_p": "Top p",
"temperature": "Temperature",
"repetition_penalty": "Repetition penalty",
"max_time": "Max time"
}
},
"resource": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ export const nameStyle = css`
margin-top: 8px;
font-size: 14px;
font-weight: 500;
text-align: center;
color: ${globalColor(`--${illaPrefix}-grayBlue-02`)};
`

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ export const GraphQLPanel: FC = () => {

const handleOnDeleteKeyValue = useCallback(
(index: number, record: Params, name?: string) => {
console.log("name", name)
if (name && content.hasOwnProperty(name)) {
const oldList = content[name as keyof typeof content] as Params[]
let newList = [...oldList]
Expand Down
Loading

0 comments on commit 07de4a9

Please sign in to comment.