diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/app.py b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/app.py new file mode 100644 index 00000000..5cefc378 --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/app.py @@ -0,0 +1,22 @@ +from flask import Flask, render_template, request, jsonify +from chat import chatbot + +app = Flask(__name__) + + +@app.route("/") +def hello(): + return render_template('chat.html') + +@app.route("/ask", methods=['POST']) +def ask(): + + message = str(request.form['messageText']) + + bot_response = chatbot(message) + + return jsonify({'status':'OK','answer':bot_response}) + + +if __name__ == "__main__": + app.run() diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/chat.html b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/chat.html new file mode 100644 index 00000000..80d5852f --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/chat.html @@ -0,0 +1,160 @@ + + + + + + + Mistral Chatbot + + + + + + + +
+
+
+
+
+ + + +
+
+
+ Welcome To Mistral Chatbot !!! + (You: Green / Bot: + White) +
+
+
    +
+
+ +
+
+
+
+ + + + + + + diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/chat.py b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/chat.py new file mode 100644 index 00000000..72e4ec82 --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/chat.py @@ -0,0 +1,26 @@ +from peft import AutoPeftModelForCausalLM +from transformers import GenerationConfig +from transformers import AutoTokenizer +import torch +tokenizer = AutoTokenizer.from_pretrained("Vasanth/mistral-finetuned-alpaca") + +model = AutoPeftModelForCausalLM.from_pretrained( + "Vasanth/mistral-finetuned-alpaca", + low_cpu_mem_usage=True, + return_dict=True, + torch_dtype=torch.float16, + device_map="cuda") + +generation_config = GenerationConfig( + do_sample=True, + top_k=1, + temperature=0.1, + max_new_tokens=100, + pad_token_id=tokenizer.eos_token_id +) + +def chatbot(message): + input_str = "###Human: " + message + " ###Assistant: " + inputs = tokenizer(input_str, return_tensors="pt").to("cuda") + outputs = model.generate(**inputs, generation_config=generation_config) + return tokenizer.decode(outputs[0], skip_special_tokens=True).replace(input_str, '') \ No newline at end of file diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/config.py b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/config.py new file mode 100644 index 00000000..9a835b3b --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/config.py @@ -0,0 +1,2 @@ +##OPEN API STUFF +OPENAI_API_KEY = "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" diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/temp_files_chat b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/temp_files_chat new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/temp_files_chat @@ -0,0 +1 @@ + diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/test.csv b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/test.csv new file mode 100644 index 00000000..02b2a251 --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Flask/templates/test.csv @@ -0,0 +1,23 @@ +Questions,Answers +What is global warming?,Global warming is a long-term increase in Earth's average surface temperature due to human-made emissions of greenhouse gases. +What causes climate change?,"Climate change is caused by factors such as biotic processes, variations in solar radiation received by Earth, plate tectonics, and volcanic eruptions. Certain human activities have also been identified as significant causes of recent climate change, often referred to as global warming." +How can we mitigate climate change?,"Climate change can be mitigated through a variety of means such as reducing greenhouse gas emissions, promoting the use of renewable energy sources, improving insulation in buildings, and adopting sustainable farming practices." +What is the effect of climate change on biodiversity?,"Climate change affects biodiversity by altering the timing of natural events, shifting habitats, and increasing the rate of disease. In some cases, species may not be able to adapt to these changes quickly enough, which can result in reduced populations and even extinction." +What are greenhouse gases?,"Greenhouse gases are gases in Earth's atmosphere that trap heat. They let sunlight pass through the atmosphere, but they prevent the heat that the sunlight brings from leaving the atmosphere. The main greenhouse gases are carbon dioxide, methane, nitrous oxide, and fluorinated gases." +What is carbon footprint?,"A carbon footprint is the total greenhouse gas (GHG) emissions caused by an individual, event, organization, or product, expressed as carbon dioxide equivalent." +What is climate change adaptation?,"Climate change adaptation is the process of adjusting to current or expected climate change and its effects. It is one of the ways to respond to climate change, along with climate change mitigation." +What is the Paris Agreement?,"The Paris Agreement is a legally binding international treaty on climate change. It was adopted by 196 Parties at COP 21 in Paris, on 12 December 2015. Its goal is to limit global warming to well below 2, preferably to 1.5 degrees Celsius, compared to pre-industrial levels." +What is the impact of deforestation on climate change?,"Deforestation affects climate change on a large scale, as trees absorb CO2 when they grow. When they are cut down and burned or allowed to rot, their stored carbon is released back into the air." +Can renewable energy sources help combat climate change?,"Yes, shifting to renewable energy sources like wind, solar, and hydro can significantly help in reducing the emission of greenhouse gases and thus combat climate change." +What is the role of individuals in combating climate change?,"Individuals can play a significant role in combating climate change. This can be done by reducing personal carbon footprints, through measures like using energy efficiently, reducing waste, choosing sustainable products, using public transportation, and advocating for policies that support renewable energy and other sustainable practices." +How does climate change affect human health?,"Climate change affects human health in several ways. It can increase heat-related illnesses, exacerbate respiratory disorders due to poor air quality, alter the spread of vector-borne diseases, and increase the risk of illnesses caused by unsafe or insufficient water or lack of food." +What is GR491?,"GR491 is a Handbook of Sustainable Design of Digital Services, created by Institutes for Sustainable IT, learn more at https://gr491.isit-europe.org/en/#main-container" +What is a Carbon footprint?,"The Carbon footprint is a measure of the amount of carbon dioxide released into the atmosphere as a result of the activities of a particular individual, organization, or community." +Why is important to measure the Digital Services' carbon footprint?,"Calculating your Digital Solution’s carbon footprint is a necessary step to understanding your company’s contribution to global warming so you can identify ways to reduce it. Plus, consumers are increasingly interested in transparency around the environmental impacts of the products they use" +How to assess the carbon footprint of my digital solution?,"Identify the Digital Solutions that are your sources of carbon dioxide emissions +Collect data and quantify the amount of energy that is used +Look for potential reduction opportunities +Go to Carbon assessment to start your journey" +What can I do to reduce my digital solution carbon footprint?,"Once you have the results of the Carbon assessment, deepen into the results to identify disproportionate emissions sources and think about where there might be opportunities to reduce your footprint, if your main source of emissions is a website that lots of people visit, some small tweaks to the site design can increase its energy efficiency and reduce its carbon footprint." +What is SDG?,"The Sustainable Development Goals (SDGs), also known as the Global Goals, were adopted by the United Nations in 2015 as a universal call to action to end poverty, protect the planet, and ensure that by 2030 all people enjoy peace and prosperity." +How to learn more about sustainability?,Go to the Awareness section and take one of the quizzes catered for you or create a custom quiz based on your interests diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/Mistral_Chatbot.ipynb b/Generative Models/Advanced Mistral 7B LLM Chatbot/Mistral_Chatbot.ipynb new file mode 100644 index 00000000..65ce90fa --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/Mistral_Chatbot.ipynb @@ -0,0 +1 @@ +{"cells":[{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"9HaQh8gz0QiO","outputId":"b5dbdda8-70ba-422d-cb1e-6bc539e67ca6","executionInfo":{"status":"ok","timestamp":1696726568632,"user_tz":-330,"elapsed":55113,"user":{"displayName":"VASANTH P","userId":"10391598464000680670"}}},"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting git+https://github.com/huggingface/transformers\n"," Cloning https://github.com/huggingface/transformers to /tmp/pip-req-build-2ud2xb3t\n"," Running command git clone --filter=blob:none --quiet https://github.com/huggingface/transformers /tmp/pip-req-build-2ud2xb3t\n"," Resolved https://github.com/huggingface/transformers to commit 897a826d830e8b1e03eb482b165b5d88a7a08d5f\n"," Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n"," Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n"," Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n","Collecting accelerate\n"," Downloading accelerate-0.23.0-py3-none-any.whl (258 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m258.1/258.1 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting peft\n"," Downloading peft-0.5.0-py3-none-any.whl (85 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m85.6/85.6 kB\u001b[0m \u001b[31m9.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting bitsandbytes\n"," Downloading bitsandbytes-0.41.1-py3-none-any.whl (92.6 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.6/92.6 MB\u001b[0m \u001b[31m9.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pip in /usr/local/lib/python3.10/dist-packages (23.1.2)\n","Collecting install\n"," Downloading install-1.3.5-py3-none-any.whl (3.2 kB)\n","Collecting trl\n"," Downloading trl-0.7.1-py3-none-any.whl (117 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m118.0/118.0 kB\u001b[0m \u001b[31m15.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting py7zr\n"," Downloading py7zr-0.20.6-py3-none-any.whl (66 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.7/66.7 kB\u001b[0m \u001b[31m8.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting auto-gptq\n"," Downloading auto_gptq-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.8 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m87.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting optimum\n"," Downloading optimum-1.13.2.tar.gz (300 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m301.0/301.0 kB\u001b[0m \u001b[31m37.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n"," Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n"," Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate) (1.23.5)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (23.2)\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate) (5.9.5)\n","Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate) (6.0.1)\n","Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate) (2.0.1+cu118)\n","Collecting huggingface-hub (from accelerate)\n"," Downloading huggingface_hub-0.17.3-py3-none-any.whl (295 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m295.0/295.0 kB\u001b[0m \u001b[31m28.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from peft) (4.66.1)\n","Collecting safetensors (from peft)\n"," Downloading safetensors-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m57.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers==4.35.0.dev0) (3.12.4)\n","Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers==4.35.0.dev0) (2023.6.3)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers==4.35.0.dev0) (2.31.0)\n","Collecting tokenizers<0.15,>=0.14 (from transformers==4.35.0.dev0)\n"," Downloading tokenizers-0.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.8 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.8/3.8 MB\u001b[0m \u001b[31m99.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting datasets (from trl)\n"," Downloading datasets-2.14.5-py3-none-any.whl (519 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m519.6/519.6 kB\u001b[0m \u001b[31m49.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting texttable (from py7zr)\n"," Downloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)\n","Collecting pycryptodomex>=3.6.6 (from py7zr)\n"," Downloading pycryptodomex-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m91.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting pyzstd>=0.14.4 (from py7zr)\n"," Downloading pyzstd-0.15.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m412.3/412.3 kB\u001b[0m \u001b[31m43.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting pyppmd<1.1.0,>=0.18.1 (from py7zr)\n"," Downloading pyppmd-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (138 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m138.8/138.8 kB\u001b[0m \u001b[31m17.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting pybcj>=0.6.0 (from py7zr)\n"," Downloading pybcj-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (49 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.8/49.8 kB\u001b[0m \u001b[31m3.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting multivolumefile>=0.2.3 (from py7zr)\n"," Downloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)\n","Collecting brotli>=1.0.9 (from py7zr)\n"," Downloading Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (3.0 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.0/3.0 MB\u001b[0m \u001b[31m84.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting inflate64>=0.3.1 (from py7zr)\n"," Downloading inflate64-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (93 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m93.1/93.1 kB\u001b[0m \u001b[31m11.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting rouge (from auto-gptq)\n"," Downloading rouge-1.0.1-py3-none-any.whl (13 kB)\n","Collecting coloredlogs (from optimum)\n"," Downloading coloredlogs-15.0.1-py2.py3-none-any.whl (46 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m46.0/46.0 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from optimum) (1.12)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (2023.6.0)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub->accelerate) (4.5.0)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (3.1.2)\n","Requirement already satisfied: triton==2.0.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate) (2.0.0)\n","Requirement already satisfied: cmake in /usr/local/lib/python3.10/dist-packages (from triton==2.0.0->torch>=1.10.0->accelerate) (3.27.6)\n","Requirement already satisfied: lit in /usr/local/lib/python3.10/dist-packages (from triton==2.0.0->torch>=1.10.0->accelerate) (17.0.2)\n","Collecting sentencepiece!=0.1.92,>=0.1.91 (from transformers==4.35.0.dev0)\n"," Downloading sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m80.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: protobuf in /usr/local/lib/python3.10/dist-packages (from transformers==4.35.0.dev0) (3.20.3)\n","Collecting humanfriendly>=9.1 (from coloredlogs->optimum)\n"," Downloading humanfriendly-10.0-py2.py3-none-any.whl (86 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m86.8/86.8 kB\u001b[0m \u001b[31m11.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pyarrow>=8.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets->trl) (9.0.0)\n","Collecting dill<0.3.8,>=0.3.0 (from datasets->trl)\n"," Downloading dill-0.3.7-py3-none-any.whl (115 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m115.3/115.3 kB\u001b[0m \u001b[31m15.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from datasets->trl) (1.5.3)\n","Collecting xxhash (from datasets->trl)\n"," Downloading xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m21.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hCollecting multiprocess (from datasets->trl)\n"," Downloading multiprocess-0.70.15-py310-none-any.whl (134 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m16.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets->trl) (3.8.5)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.35.0.dev0) (3.3.0)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.35.0.dev0) (3.4)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.35.0.dev0) (2.0.6)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.35.0.dev0) (2023.7.22)\n","Requirement already satisfied: six in /usr/local/lib/python3.10/dist-packages (from rouge->auto-gptq) (1.16.0)\n","Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->optimum) (1.3.0)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->trl) (23.1.0)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->trl) (6.0.4)\n","Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->trl) (4.0.3)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->trl) (1.9.2)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->trl) (1.4.0)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets->trl) (1.3.1)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch>=1.10.0->accelerate) (2.1.3)\n","Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->trl) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->datasets->trl) (2023.3.post1)\n","Building wheels for collected packages: transformers, optimum\n"," Building wheel for transformers (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for transformers: filename=transformers-4.35.0.dev0-py3-none-any.whl size=7745571 sha256=914c731adae69cbfffa58fea7d990accba5172a838fe9830cf82e64ae701e356\n"," Stored in directory: /tmp/pip-ephem-wheel-cache-9qae8u3l/wheels/c0/14/d6/6c9a5582d2ac191ec0a483be151a4495fe1eb2a6706ca49f1b\n"," Building wheel for optimum (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for optimum: filename=optimum-1.13.2-py3-none-any.whl size=395599 sha256=0f21447846579f34a5450f47c37d4cc020b397d6e90fc31fed6cd8b9ba4e1d88\n"," Stored in directory: /root/.cache/pip/wheels/6e/b7/2c/79405d98f0943373d8546daeae25a3d377f7659ca0cbe48699\n","Successfully built transformers optimum\n","Installing collected packages: texttable, sentencepiece, brotli, bitsandbytes, xxhash, safetensors, rouge, pyzstd, pyppmd, pycryptodomex, pybcj, multivolumefile, install, inflate64, humanfriendly, dill, py7zr, multiprocess, huggingface-hub, coloredlogs, tokenizers, transformers, datasets, accelerate, peft, trl, optimum, auto-gptq\n","Successfully installed accelerate-0.23.0 auto-gptq-0.4.2 bitsandbytes-0.41.1 brotli-1.1.0 coloredlogs-15.0.1 datasets-2.14.5 dill-0.3.7 huggingface-hub-0.17.3 humanfriendly-10.0 inflate64-0.3.1 install-1.3.5 multiprocess-0.70.15 multivolumefile-0.2.3 optimum-1.13.2 peft-0.5.0 py7zr-0.20.6 pybcj-1.0.1 pycryptodomex-3.19.0 pyppmd-1.0.0 pyzstd-0.15.9 rouge-1.0.1 safetensors-0.4.0 sentencepiece-0.1.99 texttable-1.7.0 tokenizers-0.14.1 transformers-4.35.0.dev0 trl-0.7.1 xxhash-3.4.1\n"]}],"source":["! pip install accelerate peft bitsandbytes git+https://github.com/huggingface/transformers trl py7zr auto-gptq optimum"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":145,"referenced_widgets":["f1402ec61b2a493db4f9fbc21d6b3cf2","5845378d6a5e47ec99b9ed51ce3da08e","b0fe6b3b4a6648b198f710cf0a1dafdd","161aa3aa074a43f9adaa4c974b79672c","4eb677845caa4a1e8a80e4fb28c141ad","eb10188e7e744da5b47e50b7b7a504e9","8fef569859b44fc58f0e472855a2cfb3","e408fda1b6d047a795555b853d8da15b","d14ab7031eaa48a28c8266b42bc9ddd7","77282984ad5942948ca91fac762d3782","cbb9aa2e7b474bea8e820369afdfc69d","72cb2b2952cd4d6f820ff44566afff7a","9c001b62e1734754a39766e921dfd39b","62cba932d5e04a879de45ca01bed21b8","8ce7fdc20404488da7429c3baa8869cc","ea527461be434c1e8bd75f84f9cd0848","f70d1a342ac942978314d64dc2f5bec6","b3ace92302844dcf84a7718e06556f02","65a13b1109614d43a31f8bc75212b2b8","2400ba6ca6ff4f39b6218f77d6d52ddb","8a664505719d40618ef7b5342a0a4b27","b3dc2d64611e4f81a12bd52540b98cd0","d0d97423dd7b484aa7f85eedcb703254","3909b24f4e9445108afeca2b3c4829c6","5f65e8bdf1574824b66b45bbb3e4894c","22e1e59027cd44f88acb2f5c29f2cbdf","6699de6ced1f43919ce9e2a15b17fc69","048fa829e579417abc06e749d9404c79","ec8dd7408fc44a5dbf46401fc5993f4b","39996229275a46798842ccd92e04408a","2c47ad9177114dab815f474b79bfcb83","9c79bda15e454bd5a058a946dcee32ba"]},"id":"mlv-mL7k0lWk","outputId":"304d5ed3-b5e5-47bb-8452-a609c91a206a","executionInfo":{"status":"ok","timestamp":1696726569356,"user_tz":-330,"elapsed":727,"user":{"displayName":"VASANTH P","userId":"10391598464000680670"}}},"outputs":[{"output_type":"display_data","data":{"text/plain":["VBox(children=(HTML(value='
"],"text/html":["\n","
\n"," \n"," \n"," [250/250 22:51, Epoch 0/1]\n","
\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
StepTraining Loss
1001.479000
2001.248000

"]},"metadata":{}},{"output_type":"execute_result","data":{"text/plain":["TrainOutput(global_step=250, training_loss=1.3423775177001953, metrics={'train_runtime': 1383.1959, 'train_samples_per_second': 1.446, 'train_steps_per_second': 0.181, 'total_flos': 309533448536064.0, 'train_loss': 1.3423775177001953, 'epoch': 0.4})"]},"metadata":{},"execution_count":1}],"source":["import torch\n","from datasets import load_dataset, Dataset\n","from peft import LoraConfig, AutoPeftModelForCausalLM, prepare_model_for_kbit_training, get_peft_model\n","from transformers import AutoModelForCausalLM, AutoTokenizer, GPTQConfig, TrainingArguments\n","from trl import SFTTrainer\n","import os\n","\n","data = load_dataset(\"tatsu-lab/alpaca\", split=\"train\")\n","data_df = data.to_pandas()\n","data_df = data_df[:5000]\n","data_df[\"text\"] = data_df[[\"input\", \"instruction\", \"output\"]].apply(lambda x: \"###Human: \" + x[\"instruction\"] + \" \" + x[\"input\"] + \" ###Assistant: \"+ x[\"output\"], axis=1)\n","data = Dataset.from_pandas(data_df)\n","\n","\n","tokenizer = AutoTokenizer.from_pretrained(\"TheBloke/Mistral-7B-Instruct-v0.1-GPTQ\")\n","tokenizer.pad_token = tokenizer.eos_token\n","\n","\n","quantization_config_loading = GPTQConfig(bits=4, disable_exllama=True, tokenizer=tokenizer)\n","model = AutoModelForCausalLM.from_pretrained(\n"," \"TheBloke/Mistral-7B-Instruct-v0.1-GPTQ\",\n"," quantization_config=quantization_config_loading,\n"," device_map=\"auto\"\n"," )\n","\n","\n","model.config.use_cache=False\n","model.config.pretraining_tp=1\n","model.gradient_checkpointing_enable()\n","model = prepare_model_for_kbit_training(model)\n","\n","\n","peft_config = LoraConfig(\n"," r=16, lora_alpha=16, lora_dropout=0.05, bias=\"none\", task_type=\"CAUSAL_LM\", target_modules=[\"q_proj\", \"v_proj\"]\n",")\n","model = get_peft_model(model, peft_config)\n","\n","\n","training_arguments = TrainingArguments(\n"," output_dir=\"mistral-finetuned-alpaca\",\n"," per_device_train_batch_size=8,\n"," gradient_accumulation_steps=1,\n"," optim=\"paged_adamw_32bit\",\n"," learning_rate=2e-4,\n"," lr_scheduler_type=\"cosine\",\n"," save_strategy=\"epoch\",\n"," logging_steps=100,\n"," num_train_epochs=1,\n"," max_steps=250,\n"," fp16=True,\n"," push_to_hub=True\n",")\n","\n","\n","trainer = SFTTrainer(\n"," model=model,\n"," train_dataset=data,\n"," peft_config=peft_config,\n"," dataset_text_field=\"text\",\n"," args=training_arguments,\n"," tokenizer=tokenizer,\n"," packing=False,\n"," max_seq_length=512\n",")\n","\n","\n","trainer.train()"]},{"cell_type":"code","source":["! cp -r /content/mistral-finetuned-alpaca /content/drive/MyDrive/"],"metadata":{"id":"2C0J_lvpETlx"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["# Inference"],"metadata":{"id":"xNGTJHf8loC-"}},{"cell_type":"code","source":["from google.colab import drive\n","drive.mount('/content/drive')"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"PAwdjd_Yg0n9","executionInfo":{"status":"ok","timestamp":1696731867090,"user_tz":-330,"elapsed":21152,"user":{"displayName":"VASANTH P","userId":"10391598464000680670"}},"outputId":"6838c997-3477-4298-cdf1-cd91bef00fd8"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Mounted at /content/drive\n"]}]},{"cell_type":"code","source":["from peft import AutoPeftModelForCausalLM\n","from transformers import GenerationConfig\n","from transformers import AutoTokenizer\n","import torch\n","tokenizer = AutoTokenizer.from_pretrained(\"/content/mistral-finetuned-alpaca\")\n","\n","inputs = tokenizer(\"\"\"###Human: Why mobile is bad for human? ###Assistant: \"\"\", return_tensors=\"pt\").to(\"cuda\")"],"metadata":{"id":"5F2TaEJdrMb4","colab":{"base_uri":"https://localhost:8080/"},"outputId":"f030b2c7-237a-4081-8af6-cc5ae45dbd00","executionInfo":{"status":"ok","timestamp":1696731905373,"user_tz":-330,"elapsed":342,"user":{"displayName":"VASANTH P","userId":"10391598464000680670"}}},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n"]}]},{"cell_type":"code","source":["model = AutoPeftModelForCausalLM.from_pretrained(\n"," \"/content/mistral-finetuned-alpaca\",\n"," low_cpu_mem_usage=True,\n"," return_dict=True,\n"," torch_dtype=torch.float16,\n"," device_map=\"cuda\")"],"metadata":{"id":"oXWdyFH07pkS"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["generation_config = GenerationConfig(\n"," do_sample=True,\n"," top_k=1,\n"," temperature=0.1,\n"," max_new_tokens=100,\n"," pad_token_id=tokenizer.eos_token_id\n",")"],"metadata":{"id":"8CUitmUL7GzZ"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["import time\n","st_time = time.time()\n","outputs = model.generate(**inputs, generation_config=generation_config)\n","print(tokenizer.decode(outputs[0], skip_special_tokens=True))\n","print(time.time()-st_time)"],"metadata":{"id":"k2xZ0HX_sHZ6","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1696731934910,"user_tz":-330,"elapsed":12562,"user":{"displayName":"VASANTH P","userId":"10391598464000680670"}},"outputId":"f1072119-716f-4f63-a581-733ea429b628"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["###Human: Why mobile is bad for human? ###Assistant: 1. Mobile devices can be addictive and lead to a sedentary lifestyle.\n","2. Mobile devices can be harmful to mental health, causing anxiety and depression.\n","3. Mobile devices can be harmful to physical health, causing eye strain and headaches.\n","4. Mobile devices can be harmful to social relationships, causing people to become isolated and disconnected from others.\n","5. Mobile devices can be harmful to privacy, as they can track users' location and personal information.\n","\n","12.296765804290771\n"]}]},{"cell_type":"code","source":[],"metadata":{"id":"MfDUrykTlehI"},"execution_count":null,"outputs":[]}],"metadata":{"accelerator":"GPU","colab":{"provenance":[{"file_id":"1PStReD70FLxBLaxwOITePjv8dFX0CdBo","timestamp":1696671802254},{"file_id":"1-w2BaBxrydS5XKfPGfrKioctma78Sl15","timestamp":1695942887421}],"gpuType":"T4"},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"},"widgets":{"application/vnd.jupyter.widget-state+json":{"f1402ec61b2a493db4f9fbc21d6b3cf2":{"model_module":"@jupyter-widgets/controls","model_name":"VBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"VBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"VBoxView","box_style":"","children":["IPY_MODEL_8a664505719d40618ef7b5342a0a4b27","IPY_MODEL_b3dc2d64611e4f81a12bd52540b98cd0","IPY_MODEL_d0d97423dd7b484aa7f85eedcb703254","IPY_MODEL_3909b24f4e9445108afeca2b3c4829c6"],"layout":"IPY_MODEL_8fef569859b44fc58f0e472855a2cfb3"}},"5845378d6a5e47ec99b9ed51ce3da08e":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_e408fda1b6d047a795555b853d8da15b","placeholder":"​","style":"IPY_MODEL_d14ab7031eaa48a28c8266b42bc9ddd7","value":"


Copy a token from your Hugging Face\ntokens page and paste it below.
Immediately click login after copying\nyour token or it might be stored in plain text in this notebook file.
"}},"b0fe6b3b4a6648b198f710cf0a1dafdd":{"model_module":"@jupyter-widgets/controls","model_name":"PasswordModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"PasswordModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"PasswordView","continuous_update":true,"description":"Token:","description_tooltip":null,"disabled":false,"layout":"IPY_MODEL_77282984ad5942948ca91fac762d3782","placeholder":"​","style":"IPY_MODEL_cbb9aa2e7b474bea8e820369afdfc69d","value":""}},"161aa3aa074a43f9adaa4c974b79672c":{"model_module":"@jupyter-widgets/controls","model_name":"CheckboxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"CheckboxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"CheckboxView","description":"Add token as git credential?","description_tooltip":null,"disabled":false,"indent":true,"layout":"IPY_MODEL_72cb2b2952cd4d6f820ff44566afff7a","style":"IPY_MODEL_9c001b62e1734754a39766e921dfd39b","value":true}},"4eb677845caa4a1e8a80e4fb28c141ad":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ButtonView","button_style":"","description":"Login","disabled":false,"icon":"","layout":"IPY_MODEL_62cba932d5e04a879de45ca01bed21b8","style":"IPY_MODEL_8ce7fdc20404488da7429c3baa8869cc","tooltip":""}},"eb10188e7e744da5b47e50b7b7a504e9":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ea527461be434c1e8bd75f84f9cd0848","placeholder":"​","style":"IPY_MODEL_f70d1a342ac942978314d64dc2f5bec6","value":"\nPro Tip: If you don't already have one, you can create a dedicated\n'notebooks' token with 'write' access, that you can then easily reuse for all\nnotebooks.
"}},"8fef569859b44fc58f0e472855a2cfb3":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":"center","align_self":null,"border":null,"bottom":null,"display":"flex","flex":null,"flex_flow":"column","grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":"50%"}},"e408fda1b6d047a795555b853d8da15b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"d14ab7031eaa48a28c8266b42bc9ddd7":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"77282984ad5942948ca91fac762d3782":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"cbb9aa2e7b474bea8e820369afdfc69d":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"72cb2b2952cd4d6f820ff44566afff7a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9c001b62e1734754a39766e921dfd39b":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"62cba932d5e04a879de45ca01bed21b8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"8ce7fdc20404488da7429c3baa8869cc":{"model_module":"@jupyter-widgets/controls","model_name":"ButtonStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ButtonStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","button_color":null,"font_weight":""}},"ea527461be434c1e8bd75f84f9cd0848":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"f70d1a342ac942978314d64dc2f5bec6":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"b3ace92302844dcf84a7718e06556f02":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_65a13b1109614d43a31f8bc75212b2b8","placeholder":"​","style":"IPY_MODEL_2400ba6ca6ff4f39b6218f77d6d52ddb","value":"Connecting..."}},"65a13b1109614d43a31f8bc75212b2b8":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"2400ba6ca6ff4f39b6218f77d6d52ddb":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"8a664505719d40618ef7b5342a0a4b27":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_5f65e8bdf1574824b66b45bbb3e4894c","placeholder":"​","style":"IPY_MODEL_22e1e59027cd44f88acb2f5c29f2cbdf","value":"Token is valid (permission: write)."}},"b3dc2d64611e4f81a12bd52540b98cd0":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_6699de6ced1f43919ce9e2a15b17fc69","placeholder":"​","style":"IPY_MODEL_048fa829e579417abc06e749d9404c79","value":"Your token has been saved in your configured git credential helpers (store)."}},"d0d97423dd7b484aa7f85eedcb703254":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_ec8dd7408fc44a5dbf46401fc5993f4b","placeholder":"​","style":"IPY_MODEL_39996229275a46798842ccd92e04408a","value":"Your token has been saved to /root/.cache/huggingface/token"}},"3909b24f4e9445108afeca2b3c4829c6":{"model_module":"@jupyter-widgets/controls","model_name":"LabelModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"LabelModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"LabelView","description":"","description_tooltip":null,"layout":"IPY_MODEL_2c47ad9177114dab815f474b79bfcb83","placeholder":"​","style":"IPY_MODEL_9c79bda15e454bd5a058a946dcee32ba","value":"Login successful"}},"5f65e8bdf1574824b66b45bbb3e4894c":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"22e1e59027cd44f88acb2f5c29f2cbdf":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"6699de6ced1f43919ce9e2a15b17fc69":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"048fa829e579417abc06e749d9404c79":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"ec8dd7408fc44a5dbf46401fc5993f4b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"39996229275a46798842ccd92e04408a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"2c47ad9177114dab815f474b79bfcb83":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9c79bda15e454bd5a058a946dcee32ba":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"5981cb40ef464de292611a516b0e1ced":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HBoxModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HBoxView","box_style":"","children":["IPY_MODEL_83cd236211b44b57b799f85ba8961bd7","IPY_MODEL_5ddd9fa6fad14b6cbcb80310fa07d810","IPY_MODEL_468ebfcbed774e2a8a75b76452dacfe1"],"layout":"IPY_MODEL_cd13f44bb203414593ad06ab80391dd7"}},"83cd236211b44b57b799f85ba8961bd7":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_7a829e9ccb4d49c2bf2e1ab16a15b5ee","placeholder":"​","style":"IPY_MODEL_205d7ca4f9324c2f85b7f31e3a5e3120","value":"Map: 100%"}},"5ddd9fa6fad14b6cbcb80310fa07d810":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"FloatProgressModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"ProgressView","bar_style":"success","description":"","description_tooltip":null,"layout":"IPY_MODEL_fc0e61f5db5d49b2825894782d25d4ef","max":5000,"min":0,"orientation":"horizontal","style":"IPY_MODEL_df4c60c77309425b950e6787982762a0","value":5000}},"468ebfcbed774e2a8a75b76452dacfe1":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_dom_classes":[],"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"HTMLModel","_view_count":null,"_view_module":"@jupyter-widgets/controls","_view_module_version":"1.5.0","_view_name":"HTMLView","description":"","description_tooltip":null,"layout":"IPY_MODEL_966597c0e9814b33a90dba172788d50a","placeholder":"​","style":"IPY_MODEL_9961639b515144389da5e4ff8918533e","value":" 5000/5000 [00:01<00:00, 4398.10 examples/s]"}},"cd13f44bb203414593ad06ab80391dd7":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"7a829e9ccb4d49c2bf2e1ab16a15b5ee":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"205d7ca4f9324c2f85b7f31e3a5e3120":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}},"fc0e61f5db5d49b2825894782d25d4ef":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"df4c60c77309425b950e6787982762a0":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"ProgressStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","bar_color":null,"description_width":""}},"966597c0e9814b33a90dba172788d50a":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_model_module":"@jupyter-widgets/base","_model_module_version":"1.2.0","_model_name":"LayoutModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"LayoutView","align_content":null,"align_items":null,"align_self":null,"border":null,"bottom":null,"display":null,"flex":null,"flex_flow":null,"grid_area":null,"grid_auto_columns":null,"grid_auto_flow":null,"grid_auto_rows":null,"grid_column":null,"grid_gap":null,"grid_row":null,"grid_template_areas":null,"grid_template_columns":null,"grid_template_rows":null,"height":null,"justify_content":null,"justify_items":null,"left":null,"margin":null,"max_height":null,"max_width":null,"min_height":null,"min_width":null,"object_fit":null,"object_position":null,"order":null,"overflow":null,"overflow_x":null,"overflow_y":null,"padding":null,"right":null,"top":null,"visibility":null,"width":null}},"9961639b515144389da5e4ff8918533e":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_model_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_model_name":"DescriptionStyleModel","_view_count":null,"_view_module":"@jupyter-widgets/base","_view_module_version":"1.2.0","_view_name":"StyleView","description_width":""}}}}},"nbformat":4,"nbformat_minor":0} diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/README.md b/Generative Models/Advanced Mistral 7B LLM Chatbot/README.md new file mode 100644 index 00000000..0097f64f --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/README.md @@ -0,0 +1,40 @@ +# Mistral LLM Chatbot + +Mistral LLM is a language model developed by Mistral AI. It is a large language model designed to understand and generate human-like text. LLM stands for "Large Language Model," indicating its size and capabilities. + +Mistral 7B is a specific model within the Mistral LLM family. The "7B" refers to the number of parameters in the model, with larger numbers generally indicating more powerful and capable models. Mistral 7B is one of the latest models from Mistral AI and is designed to outperform the previous Llama 2 13B model on various benchmarks. + +Mistral 7B is trained on a large amount of text data and can be used for a wide range of natural language processing tasks, such as text generation, question answering, language translation, and more. It is designed to understand and generate text in a way that is coherent and contextually relevant. + +In this repository, we have used Mistral 7B to create a LLM chatbot: + + +## Introduction +Mistral LLM is a state-of-the-art language model developed by Mistral AI. It is part of the Large Language Model (LLM) family, designed to generate human-like text based on the input it receives. The Mistral 7B model is specifically optimized for high performance, outperforming previous models like Llama 2 13B in various benchmarks. + +This project utilizes Mistral 7B, a cutting-edge language model with 7 billion parameters, to create a highly capable conversational chatbot. The Mistral 7B model can generate natural, contextually relevant responses, making it ideal for a wide range of natural language processing (NLP) tasks, including text generation, question answering, and language translation. + +## Key Features: +Text Generation: Generate coherent and contextually accurate responses. +Question Answering: Answer questions based on context. +Multilingual Support: Understand and respond in multiple languages. +Contextual Understanding: Maintain conversation context over multiple interactions. + + +Pre-trained Mistral 7B model for natural language processing tasks. +Real-time text generation using the language model. +Contextual memory: Remembers previous conversation history. +Fast and scalable deployment, capable of handling multiple user queries. + +## Requirements +To run the Mistral LLM Chatbot, you need to install the following dependencies: + +Python 3.8+ +PyTorch (for using the Mistral 7B model) +Transformers library from Hugging Face +CUDA (optional, for GPU acceleration) + +![mistral1](https://github.com/user-attachments/assets/cfb19f50-5438-4c77-b84b-9e30febfe91e) + + + diff --git a/Generative Models/Advanced Mistral 7B LLM Chatbot/chatbot temp b/Generative Models/Advanced Mistral 7B LLM Chatbot/chatbot temp new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/Generative Models/Advanced Mistral 7B LLM Chatbot/chatbot temp @@ -0,0 +1 @@ +