From 84beb59801f7b8d61917e2f8a7f4cbe95c420d64 Mon Sep 17 00:00:00 2001 From: Humaid Alqasimi Date: Mon, 7 Oct 2024 14:25:46 +0400 Subject: [PATCH] Add Falcon LLM Signed-off-by: Humaid Alqasimi --- modules/common/services/desktop.nix | 7 ++ .../microvm/virtualization/microvm/guivm.nix | 13 ++- modules/reference/services/default.nix | 6 +- modules/reference/services/ollama/ollama.nix | 105 ++++++++++++++++++ overlays/custom-packages/alpaca/default.nix | 18 +++ overlays/custom-packages/ollama/default.nix | 15 +++ 6 files changed, 161 insertions(+), 3 deletions(-) create mode 100644 modules/reference/services/ollama/ollama.nix create mode 100644 overlays/custom-packages/alpaca/default.nix create mode 100644 overlays/custom-packages/ollama/default.nix diff --git a/modules/common/services/desktop.nix b/modules/common/services/desktop.nix index 580366be0e..bd4b946b7f 100644 --- a/modules/common/services/desktop.nix +++ b/modules/common/services/desktop.nix @@ -195,6 +195,13 @@ in icon = "${pkgs.losslesscut-bin}/share/icons/losslesscut.png"; } + { + name = "Falcon AI"; + description = "Your local large language model, developed by TII."; + path = "${pkgs.alpaca}/bin/alpaca"; + icon = "${pkgs.ghaf-artwork}/icons/falcon-icon.svg"; + } + { name = "Shutdown"; description = "Shutdown System"; diff --git a/modules/microvm/virtualization/microvm/guivm.nix b/modules/microvm/virtualization/microvm/guivm.nix index 4f6ae42ce7..0e481d6e76 100644 --- a/modules/microvm/virtualization/microvm/guivm.nix +++ b/modules/microvm/virtualization/microvm/guivm.nix @@ -68,6 +68,14 @@ let storagevm = { enable = true; name = "guivm"; + directories = [ + { + directory = "/var/lib/private/ollama"; + inherit (config.ghaf.users.accounts) user; + group = "ollama"; + mode = "u=rwx,g=,o="; + } + ]; users.${config.ghaf.users.accounts.user}.directories = [ ".cache" ".config" @@ -147,7 +155,7 @@ let microvm = { optimize.enable = false; vcpu = 2; - mem = 2048; + mem = 12288; hypervisor = "qemu"; shares = [ { @@ -184,8 +192,11 @@ let imports = [ ../../../common ../../../desktop + ../../../reference/services ]; + ghaf.reference.services.ollama = true; + # Waypipe service runs in the GUIVM and listens for incoming connections from AppVMs systemd.user.services.waypipe = { enable = true; diff --git a/modules/reference/services/default.nix b/modules/reference/services/default.nix index ecab9648a9..9bed9dd941 100644 --- a/modules/reference/services/default.nix +++ b/modules/reference/services/default.nix @@ -11,11 +11,13 @@ in ./dendrite-pinecone/dendrite-pinecone.nix ./dendrite-pinecone/dendrite-config.nix ./proxy-server/3proxy-config.nix + ./ollama/ollama.nix ]; options.ghaf.reference.services = { - enable = mkEnableOption "Enable the Ghaf reference services"; - dendrite = mkEnableOption "Enable the dendrite-pinecone service"; + enable = mkEnableOption "Ghaf reference services"; + dendrite = mkEnableOption "dendrite-pinecone service"; proxy-business = mkEnableOption "Enable the proxy server service"; + ollama = mkEnableOption "ollama service"; }; config = mkIf cfg.enable { ghaf.reference.services = { diff --git a/modules/reference/services/ollama/ollama.nix b/modules/reference/services/ollama/ollama.nix new file mode 100644 index 0000000000..1b42cf4a6a --- /dev/null +++ b/modules/reference/services/ollama/ollama.nix @@ -0,0 +1,105 @@ +# Copyright 2024 TII (SSRC) and the Ghaf contributors +# SPDX-License-Identifier: Apache-2.0 +{ + config, + lib, + pkgs, + ... +}: +let + cfg = config.ghaf.reference.services; + inherit (lib) mkIf; +in +{ + config = mkIf cfg.ollama { + services.ollama = { + enable = true; + openFirewall = true; + host = "0.0.0.0"; + }; + + environment.systemPackages = [ + (pkgs.writeShellApplication { + name = "load-falcon"; + runtimeInputs = with pkgs; [ + libnotify + ollama + ]; + text = '' + if [ "''${1:-}" == "--check" ]; then + if ollama show falcon2; then + echo "falcon2 model is installed" + exit 0 + fi + + if [ -f /tmp/falcon-download ]; then + if [ "$(cat /tmp/falcon-download)" == "1" ]; then + echo "falcon2 model is currently being installed" + exit 0 + fi + fi + + echo "falcon2 model is not installed" + exit 1 + fi + + function cleanup() { + echo 0 > /tmp/falcon-download + } + + trap cleanup SIGINT + + if ! ollama show falcon2; then + notify-send -i ${pkgs.ghaf-artwork}/icons/falcon-icon.svg 'Falcon AI' 'Downloading the latest falcon2 model. This may take a while...' + echo 1 > /tmp/falcon-download + else + echo "falcon2 model is already installed" + exit 0 + fi + + if ollama pull falcon2:latest; then + notify-send -i ${pkgs.ghaf-artwork}/icons/falcon-icon.svg 'Falcon AI' 'The falcon model has been downloaded successfully. You may now try it out!' + else + notify-send -i ${pkgs.ghaf-artwork}/icons/falcon-icon.svg 'Falcon AI' 'Failed to download the falcon model. Please try again later.' + fi + + cleanup + ''; + }) + ]; + + # This forces Alpaca to use the systemd ollama daemon instead of spawning + # its own. + system.userActivationScripts.alpaca-configure = { + text = '' + source ${config.system.build.setEnvironment} + mkdir -p $HOME/.config/com.jeffser.Alpaca + cat < $HOME/.config/com.jeffser.Alpaca/server.json + { + "remote_url": "http://localhost:11434", + "remote_bearer_token": "", + "run_remote": true, + "local_port": 11435, + "run_on_background": false, + "powersaver_warning": true, + "model_tweaks": { + "temperature": 0.7, + "seed": 0, + "keep_alive": 5 + }, + "ollama_overrides": {}, + "idle_timer": 0 + } + EOF + ''; + }; + + systemd.services.ollama = { + serviceConfig = { + TimeoutStartSec = "5h"; + Restart = "always"; + RestartSec = "5s"; + }; + }; + }; +} diff --git a/overlays/custom-packages/alpaca/default.nix b/overlays/custom-packages/alpaca/default.nix new file mode 100644 index 0000000000..db392657ce --- /dev/null +++ b/overlays/custom-packages/alpaca/default.nix @@ -0,0 +1,18 @@ +# Copyright 2024 TII (SSRC) and the Ghaf contributors +# SPDX-License-Identifier: Apache-2.0 +# +# Bump to newer version of Alpaca due to a bug. +# +{ prev }: +prev.alpaca.overrideAttrs rec { + version = "2.0.6"; + src = prev.fetchFromGitHub { + owner = "Jeffser"; + repo = "Alpaca"; + rev = "refs/tags/${version}"; + hash = "sha256-4c6pisd3o7mycivHd1QZ2N7s8pYzrQXiZMbVvl5ciPA="; + }; + + patches = [ ]; + postPatch = ""; +} diff --git a/overlays/custom-packages/ollama/default.nix b/overlays/custom-packages/ollama/default.nix new file mode 100644 index 0000000000..283da2fe34 --- /dev/null +++ b/overlays/custom-packages/ollama/default.nix @@ -0,0 +1,15 @@ +# Copyright 2024 TII (SSRC) and the Ghaf contributors +# SPDX-License-Identifier: Apache-2.0 +# +# Use latest version of ollama. Should be removed when nixpkgs is bumped. +# +{ prev }: +prev.ollama.overrideAttrs { + src = prev.fetchFromGitHub { + owner = "ollama"; + repo = "ollama"; + rev = "v0.3.1"; + hash = "sha256-ctz9xh1wisG0YUxglygKHIvU9bMgMLkGqDoknb8qSAU="; + fetchSubmodules = true; + }; +}