Skip to content

Commit

Permalink
Add Falcon LLM
Browse files Browse the repository at this point in the history
Signed-off-by: Humaid Alqasimi <[email protected]>
  • Loading branch information
humaidq-tii authored and brianmcgillion committed Oct 8, 2024
1 parent d30fe94 commit 34a7e43
Show file tree
Hide file tree
Showing 5 changed files with 130 additions and 5 deletions.
7 changes: 7 additions & 0 deletions modules/common/services/desktop.nix
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,13 @@ in
icon = "${pkgs.losslesscut-bin}/share/icons/losslesscut.png";
}

{
name = "Falcon AI";
description = "Your local large language model, developed by TII.";
path = "${pkgs.alpaca}/bin/alpaca";
icon = "${pkgs.ghaf-artwork}/icons/falcon-icon.svg";
}

{
name = "Shutdown";
description = "Shutdown System";
Expand Down
4 changes: 2 additions & 2 deletions modules/microvm/virtualization/microvm/common/storagevm.nix
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ in
# FIXME: Probably will lead to disgraceful error messages, as we
# put typechecking on nix impermanence option. But other,
# proper, ways are much harder.
type = types.anything;
type = types.listOf types.anything;
default = [ ];
example = [
"/var/lib/nixos"
Expand Down Expand Up @@ -51,7 +51,7 @@ in
};

files = mkOption {
type = types.anything;
type = types.listOf types.anything;
default = [ ];
example = [ "/etc/machine-id" ];
description = ''
Expand Down
13 changes: 12 additions & 1 deletion modules/microvm/virtualization/microvm/guivm.nix
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,14 @@ let
storagevm = {
enable = true;
name = "guivm";
directories = [
{
directory = "/var/lib/private/ollama";
inherit (config.ghaf.users.accounts) user;
group = "ollama";
mode = "u=rwx,g=,o=";
}
];
users.${config.ghaf.users.accounts.user}.directories = [
".cache"
".config"
Expand Down Expand Up @@ -147,7 +155,7 @@ let
microvm = {
optimize.enable = false;
vcpu = 2;
mem = 2048;
mem = 12288;
hypervisor = "qemu";
shares = [
{
Expand Down Expand Up @@ -184,8 +192,11 @@ let
imports = [
../../../common
../../../desktop
../../../reference/services
];

ghaf.reference.services.ollama = true;

# Waypipe service runs in the GUIVM and listens for incoming connections from AppVMs
systemd.user.services.waypipe = {
enable = true;
Expand Down
6 changes: 4 additions & 2 deletions modules/reference/services/default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,13 @@ in
./dendrite-pinecone/dendrite-pinecone.nix
./dendrite-pinecone/dendrite-config.nix
./proxy-server/3proxy-config.nix
./ollama/ollama.nix
];
options.ghaf.reference.services = {
enable = mkEnableOption "Enable the Ghaf reference services";
dendrite = mkEnableOption "Enable the dendrite-pinecone service";
enable = mkEnableOption "Ghaf reference services";
dendrite = mkEnableOption "dendrite-pinecone service";
proxy-business = mkEnableOption "Enable the proxy server service";
ollama = mkEnableOption "ollama service";
};
config = mkIf cfg.enable {
ghaf.reference.services = {
Expand Down
105 changes: 105 additions & 0 deletions modules/reference/services/ollama/ollama.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# Copyright 2024 TII (SSRC) and the Ghaf contributors
# SPDX-License-Identifier: Apache-2.0
{
config,
lib,
pkgs,
...
}:
let
cfg = config.ghaf.reference.services;
inherit (lib) mkIf;
in
{
config = mkIf cfg.ollama {
services.ollama = {
enable = true;
openFirewall = true;
host = "0.0.0.0";
};

environment.systemPackages = [
(pkgs.writeShellApplication {
name = "load-falcon";
runtimeInputs = with pkgs; [
libnotify
ollama
];
text = ''
if [ "''${1:-}" == "--check" ]; then
if ollama show falcon2; then
echo "falcon2 model is installed"
exit 0
fi
if [ -f /tmp/falcon-download ]; then
if [ "$(cat /tmp/falcon-download)" == "1" ]; then
echo "falcon2 model is currently being installed"
exit 0
fi
fi
echo "falcon2 model is not installed"
exit 1
fi
function cleanup() {
echo 0 > /tmp/falcon-download
}
trap cleanup SIGINT
if ! ollama show falcon2; then
notify-send -i ${pkgs.ghaf-artwork}/icons/falcon-icon.svg 'Falcon AI' 'Downloading the latest falcon2 model. This may take a while...'
echo 1 > /tmp/falcon-download
else
echo "falcon2 model is already installed"
exit 0
fi
if ollama pull falcon2:latest; then
notify-send -i ${pkgs.ghaf-artwork}/icons/falcon-icon.svg 'Falcon AI' 'The falcon model has been downloaded successfully. You may now try it out!'
else
notify-send -i ${pkgs.ghaf-artwork}/icons/falcon-icon.svg 'Falcon AI' 'Failed to download the falcon model. Please try again later.'
fi
cleanup
'';
})
];

# This forces Alpaca to use the systemd ollama daemon instead of spawning
# its own.
system.userActivationScripts.alpaca-configure = {
text = ''
source ${config.system.build.setEnvironment}
mkdir -p $HOME/.config/com.jeffser.Alpaca
cat <<EOF > $HOME/.config/com.jeffser.Alpaca/server.json
{
"remote_url": "http://localhost:11434",
"remote_bearer_token": "",
"run_remote": true,
"local_port": 11435,
"run_on_background": false,
"powersaver_warning": true,
"model_tweaks": {
"temperature": 0.7,
"seed": 0,
"keep_alive": 5
},
"ollama_overrides": {},
"idle_timer": 0
}
EOF
'';
};

systemd.services.ollama = {
serviceConfig = {
TimeoutStartSec = "5h";
Restart = "always";
RestartSec = "5s";
};
};
};
}

0 comments on commit 34a7e43

Please sign in to comment.