diff --git a/README.md b/README.md
index 21643a27..3bf28d3f 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,3 @@
-
@@ -25,19 +24,19 @@ The outputs run primarily on Linux, but can also run on Windows via [NixOS-WSL](
The main outputs of the `flake.nix` at the moment are as follows:
-#### KoboldAI ( A WebUI for GPT Writing )
+#### [InvokeAI](https://github.com/invoke-ai/InvokeAI) ( A Stable Diffusion WebUI )
-- `nix run .#koboldai-amd`
-- `nix run .#koboldai-nvidia`
+- `nix run .#invokeai-amd`
+- `nix run .#invokeai-nvidia`
-![koboldai](/../images/koboldai.webp)
+![invokeai](https://raw.githubusercontent.com/nixified-ai/flake/images/invokeai.webp)
-#### InvokeAI ( A Stable Diffusion WebUI )
+#### [textgen](https://github.com/oobabooga/text-generation-webui) ( Also called text-generation-webui: A WebUI for LLMs and LoRA training )
-- `nix run .#invokeai-amd`
-- `nix run .#invokeai-nvidia`
+- `nix run .#textgen-amd`
+- `nix run .#textgen-nvidia`
-![invokeai](/../images/invokeai.webp)
+![textgen](https://raw.githubusercontent.com/nixified-ai/flake/images/textgen.webp)
## Install NixOS-WSL in Windows
diff --git a/flake.lock b/flake.lock
index 299f98e2..fc71ce2b 100644
--- a/flake.lock
+++ b/flake.lock
@@ -7,11 +7,11 @@
]
},
"locked": {
- "lastModified": 1677714448,
- "narHash": "sha256-Hq8qLs8xFu28aDjytfxjdC96bZ6pds21Yy09mSC156I=",
+ "lastModified": 1685662779,
+ "narHash": "sha256-cKDDciXGpMEjP1n6HlzKinN0H+oLmNpgeCTzYnsA2po=",
"owner": "hercules-ci",
"repo": "flake-parts",
- "rev": "dc531e3a9ce757041e1afaff8ee932725ca60002",
+ "rev": "71fb97f0d875fd4de4994dfb849f2c75e17eb6c3",
"type": "github"
},
"original": {
@@ -25,11 +25,11 @@
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
- "lastModified": 1673362319,
- "narHash": "sha256-Pjp45Vnj7S/b3BRpZEVfdu8sqqA6nvVjvYu59okhOyI=",
+ "lastModified": 1685662779,
+ "narHash": "sha256-cKDDciXGpMEjP1n6HlzKinN0H+oLmNpgeCTzYnsA2po=",
"owner": "hercules-ci",
"repo": "flake-parts",
- "rev": "82c16f1682cf50c01cb0280b38a1eed202b3fe9f",
+ "rev": "71fb97f0d875fd4de4994dfb849f2c75e17eb6c3",
"type": "github"
},
"original": {
@@ -46,11 +46,11 @@
]
},
"locked": {
- "lastModified": 1666885127,
- "narHash": "sha256-uXA/3lhLhwOTBMn9a5zJODKqaRT+SuL5cpEmOz2ULoo=",
+ "lastModified": 1685662779,
+ "narHash": "sha256-cKDDciXGpMEjP1n6HlzKinN0H+oLmNpgeCTzYnsA2po=",
"owner": "hercules-ci",
"repo": "flake-parts",
- "rev": "0e101dbae756d35a376a5e1faea532608e4a4b9a",
+ "rev": "71fb97f0d875fd4de4994dfb849f2c75e17eb6c3",
"type": "github"
},
"original": {
@@ -59,34 +59,34 @@
"type": "github"
}
},
- "flake-utils": {
+ "haskell-flake": {
"locked": {
- "lastModified": 1667077288,
- "narHash": "sha256-bdC8sFNDpT0HK74u9fUkpbf1MEzVYJ+ka7NXCdgBoaA=",
- "owner": "numtide",
- "repo": "flake-utils",
- "rev": "6ee9ebb6b1ee695d2cacc4faa053a7b9baa76817",
+ "lastModified": 1684780604,
+ "narHash": "sha256-2uMZsewmRn7rRtAnnQNw1lj0uZBMh4m6Cs/7dV5YF08=",
+ "owner": "srid",
+ "repo": "haskell-flake",
+ "rev": "74210fa80a49f1b6f67223debdbf1494596ff9f2",
"type": "github"
},
"original": {
- "owner": "numtide",
- "repo": "flake-utils",
+ "owner": "srid",
+ "ref": "0.3.0",
+ "repo": "haskell-flake",
"type": "github"
}
},
"hercules-ci-agent": {
"inputs": {
"flake-parts": "flake-parts_3",
- "nix-darwin": "nix-darwin",
- "nixpkgs": "nixpkgs",
- "pre-commit-hooks-nix": "pre-commit-hooks-nix"
+ "haskell-flake": "haskell-flake",
+ "nixpkgs": "nixpkgs"
},
"locked": {
- "lastModified": 1673183923,
- "narHash": "sha256-vb+AEQJAW4Xn4oHsfsx8H12XQU0aK8VYLtWYJm/ol28=",
+ "lastModified": 1686721748,
+ "narHash": "sha256-ilD6ANYID+b0/+GTFbuZXfmu92bqVqY5ITKXSxqIp5A=",
"owner": "hercules-ci",
"repo": "hercules-ci-agent",
- "rev": "b3f8aa8e4a8b22dbbe92cc5a89e6881090b933b3",
+ "rev": "7192b83935ab292a8e894db590dfd44f976e183b",
"type": "github"
},
"original": {
@@ -103,11 +103,11 @@
]
},
"locked": {
- "lastModified": 1676558019,
- "narHash": "sha256-obUHCMMWbffb3k0b9YIChsJ2Z281BcDYnTPTbJRP6vs=",
+ "lastModified": 1686830987,
+ "narHash": "sha256-1XLTM0lFr3NV+0rd55SQW/8oQ3ACnqlYcda3FelIwHU=",
"owner": "hercules-ci",
"repo": "hercules-ci-effects",
- "rev": "fdbc15b55db8d037504934d3af52f788e0593380",
+ "rev": "04e4ab63b9eed2452edee1bb698827e1cb8265c6",
"type": "github"
},
"original": {
@@ -119,66 +119,27 @@
"invokeai-src": {
"flake": false,
"locked": {
- "lastModified": 1677475057,
- "narHash": "sha256-REtyVcyRgspn1yYvB4vIHdOrPRZRNSSraepHik9MfgE=",
+ "lastModified": 1697424725,
+ "narHash": "sha256-y3nxZ4PQ/d2wMX1crpJPDMYXf48YBG0sRIBOTgN6XlI=",
"owner": "invoke-ai",
"repo": "InvokeAI",
- "rev": "650f4bb58ceca458bff1410f35cd6d6caad399c6",
+ "rev": "ad786130ffb11f91cbfcc40846114dd1fdcecdf6",
"type": "github"
},
"original": {
"owner": "invoke-ai",
- "ref": "v2.3.1.post2",
+ "ref": "v3.3.0post3",
"repo": "InvokeAI",
"type": "github"
}
},
- "koboldai-src": {
- "flake": false,
- "locked": {
- "lastModified": 1668957963,
- "narHash": "sha256-fKQ/6LiMmrfSWczC5kcf6M9cpuF9dDYl2gJ4+6ZLSdY=",
- "owner": "koboldai",
- "repo": "koboldai-client",
- "rev": "f2077b8e58db6bd47a62bf9ed2649bb0711f9678",
- "type": "github"
- },
- "original": {
- "owner": "koboldai",
- "ref": "1.19.2",
- "repo": "koboldai-client",
- "type": "github"
- }
- },
- "nix-darwin": {
- "inputs": {
- "nixpkgs": [
- "hercules-ci-effects",
- "hercules-ci-agent",
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1667419884,
- "narHash": "sha256-oLNw87ZI5NxTMlNQBv1wG2N27CUzo9admaFlnmavpiY=",
- "owner": "LnL7",
- "repo": "nix-darwin",
- "rev": "cfc0125eafadc9569d3d6a16ee928375b77e3100",
- "type": "github"
- },
- "original": {
- "owner": "LnL7",
- "repo": "nix-darwin",
- "type": "github"
- }
- },
"nixpkgs": {
"locked": {
- "lastModified": 1672262501,
- "narHash": "sha256-ZNXqX9lwYo1tOFAqrVtKTLcJ2QMKCr3WuIvpN8emp7I=",
+ "lastModified": 1686501370,
+ "narHash": "sha256-G0WuM9fqTPRc2URKP9Lgi5nhZMqsfHGrdEbrLvAPJcg=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "e182da8622a354d44c39b3d7a542dc12cd7baa5f",
+ "rev": "75a5ebf473cd60148ba9aec0d219f72e5cf52519",
"type": "github"
},
"original": {
@@ -191,11 +152,11 @@
"nixpkgs-lib": {
"locked": {
"dir": "lib",
- "lastModified": 1672350804,
- "narHash": "sha256-jo6zkiCabUBn3ObuKXHGqqORUMH27gYDIFFfLq5P4wg=",
+ "lastModified": 1685564631,
+ "narHash": "sha256-8ywr3AkblY4++3lIVxmrWZFzac7+f32ZEhH/A8pNscI=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "677ed08a50931e38382dbef01cba08a8f7eac8f6",
+ "rev": "4f53efe34b3a8877ac923b9350c874e3dcd5dc0a",
"type": "github"
},
"original": {
@@ -208,11 +169,11 @@
},
"nixpkgs_2": {
"locked": {
- "lastModified": 1677932085,
- "narHash": "sha256-+AB4dYllWig8iO6vAiGGYl0NEgmMgGHpy9gzWJ3322g=",
+ "lastModified": 1697059129,
+ "narHash": "sha256-9NJcFF9CEYPvHJ5ckE8kvINvI84SZZ87PvqMbH6pro0=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "3c5319ad3aa51551182ac82ea17ab1c6b0f0df89",
+ "rev": "5e4c2ada4fcd54b99d56d7bd62f384511a7e2593",
"type": "github"
},
"original": {
@@ -222,37 +183,31 @@
"type": "github"
}
},
- "pre-commit-hooks-nix": {
+ "root": {
"inputs": {
- "flake-utils": "flake-utils",
- "nixpkgs": [
- "hercules-ci-effects",
- "hercules-ci-agent",
- "nixpkgs"
- ]
- },
+ "flake-parts": "flake-parts",
+ "hercules-ci-effects": "hercules-ci-effects",
+ "invokeai-src": "invokeai-src",
+ "nixpkgs": "nixpkgs_2",
+ "textgen-src": "textgen-src"
+ }
+ },
+ "textgen-src": {
+ "flake": false,
"locked": {
- "lastModified": 1667760143,
- "narHash": "sha256-+X5CyeNEKp41bY/I1AJgW/fn69q5cLJ1bgiaMMCKB3M=",
- "owner": "cachix",
- "repo": "pre-commit-hooks.nix",
- "rev": "06f48d63d473516ce5b8abe70d15be96a0147fcd",
+ "lastModified": 1696789008,
+ "narHash": "sha256-+V8XOVnEyImj+a8uCkZXEHXW8bTIBRlnfMcQfcZNgqg=",
+ "owner": "oobabooga",
+ "repo": "text-generation-webui",
+ "rev": "2e471071af48e19867cfa522d2def44c24785c50",
"type": "github"
},
"original": {
- "owner": "cachix",
- "repo": "pre-commit-hooks.nix",
+ "owner": "oobabooga",
+ "ref": "v1.7",
+ "repo": "text-generation-webui",
"type": "github"
}
- },
- "root": {
- "inputs": {
- "flake-parts": "flake-parts",
- "hercules-ci-effects": "hercules-ci-effects",
- "invokeai-src": "invokeai-src",
- "koboldai-src": "koboldai-src",
- "nixpkgs": "nixpkgs_2"
- }
}
},
"root": "root",
diff --git a/flake.nix b/flake.nix
index 58a8d014..34450767 100644
--- a/flake.nix
+++ b/flake.nix
@@ -11,11 +11,11 @@
url = "github:NixOS/nixpkgs/nixos-unstable";
};
invokeai-src = {
- url = "github:invoke-ai/InvokeAI/v2.3.1.post2";
+ url = "github:invoke-ai/InvokeAI/v3.3.0post3";
flake = false;
};
- koboldai-src = {
- url = "github:koboldai/koboldai-client/1.19.2";
+ textgen-src = {
+ url = "github:oobabooga/text-generation-webui/v1.7";
flake = false;
};
flake-parts = {
@@ -29,16 +29,31 @@
};
outputs = { flake-parts, invokeai-src, hercules-ci-effects, ... }@inputs:
flake-parts.lib.mkFlake { inherit inputs; } {
+ perSystem = { system, ... }: {
+ _module.args.pkgs = import inputs.nixpkgs { config.allowUnfree = true; inherit system; };
+ legacyPackages = {
+ koboldai = builtins.throw ''
+
+
+ koboldai has been dropped from nixified.ai due to lack of upstream development,
+ try textgen instead which is better maintained. If you would like to use the last
+ available version of koboldai with nixified.ai, then run:
+
+ nix run github:nixified.ai/flake/0c58f8cba3fb42c54f2a7bf9bd45ee4cbc9f2477#koboldai
+ '';
+ };
+ };
systems = [
"x86_64-linux"
];
+ debug = true;
imports = [
hercules-ci-effects.flakeModule
- ./modules/dependency-sets
- ./modules/aipython3
+# ./modules/nixpkgs-config
+ ./overlays
./projects/invokeai
- ./projects/koboldai
+ ./projects/textgen
./website
];
- };
+ };
}
diff --git a/modules/aipython3/default.nix b/modules/aipython3/default.nix
deleted file mode 100644
index ccb82fcc..00000000
--- a/modules/aipython3/default.nix
+++ /dev/null
@@ -1,28 +0,0 @@
-{ lib, ... }:
-
-{
- perSystem = { pkgs, ... }: {
- dependencySets = let
- overlays = import ./overlays.nix pkgs;
-
- mkPythonPackages = overlayList: let
- python3' = pkgs.python3.override {
- packageOverrides = lib.composeManyExtensions overlayList;
- };
- in python3'.pkgs;
-
- in {
- aipython3-amd = mkPythonPackages [
- overlays.fixPackages
- overlays.extraDeps
- overlays.torchRocm
- ];
-
- aipython3-nvidia = mkPythonPackages [
- overlays.fixPackages
- overlays.extraDeps
- overlays.torchCuda
- ];
- };
- };
-}
diff --git a/modules/aipython3/overlays.nix b/modules/aipython3/overlays.nix
deleted file mode 100644
index 02ad03e9..00000000
--- a/modules/aipython3/overlays.nix
+++ /dev/null
@@ -1,118 +0,0 @@
-pkgs: {
- fixPackages = final: prev: let
- relaxProtobuf = pkg: pkg.overrideAttrs (old: {
- nativeBuildInputs = old.nativeBuildInputs ++ [ final.pythonRelaxDepsHook ];
- pythonRelaxDeps = [ "protobuf" ];
- });
- in {
- pytorch-lightning = relaxProtobuf prev.pytorch-lightning;
- wandb = relaxProtobuf prev.wandb;
- markdown-it-py = prev.markdown-it-py.overrideAttrs (old: {
- nativeBuildInputs = old.nativeBuildInputs ++ [ final.pythonRelaxDepsHook ];
- pythonRelaxDeps = [ "linkify-it-py" ];
- passthru = old.passthru // {
- optional-dependencies = with final; {
- linkify = [ linkify-it-py ];
- plugins = [ mdit-py-plugins ];
- };
- };
- });
- filterpy = prev.filterpy.overrideAttrs (old: {
- doInstallCheck = false;
- });
- shap = prev.shap.overrideAttrs (old: {
- doInstallCheck = false;
- propagatedBuildInputs = old.propagatedBuildInputs ++ [ final.packaging ];
- pythonImportsCheck = [ "shap" ];
-
- meta = old.meta // {
- broken = false;
- };
- });
- streamlit = let
- streamlit = final.callPackage (pkgs.path + "/pkgs/applications/science/machine-learning/streamlit") {
- protobuf3 = final.protobuf;
- };
- in final.toPythonModule (relaxProtobuf streamlit);
- };
-
- extraDeps = final: prev: let
- rm = d: d.overrideAttrs (old: {
- nativeBuildInputs = old.nativeBuildInputs ++ [ final.pythonRelaxDepsHook ];
- pythonRemoveDeps = [ "opencv-python-headless" "opencv-python" "tb-nightly" "clip" ];
- });
- callPackage = final.callPackage;
- rmCallPackage = path: args: rm (callPackage path args);
- in {
- scikit-image = final.scikitimage;
- opencv-python-headless = final.opencv-python;
- opencv-python = final.opencv4;
-
- safetensors = callPackage ../../packages/safetensors { };
- compel = callPackage ../../packages/compel { };
- apispec-webframeworks = callPackage ../../packages/apispec-webframeworks { };
- pydeprecate = callPackage ../../packages/pydeprecate { };
- taming-transformers-rom1504 =
- callPackage ../../packages/taming-transformers-rom1504 { };
- albumentations = rmCallPackage ../../packages/albumentations { };
- qudida = rmCallPackage ../../packages/qudida { };
- gfpgan = rmCallPackage ../../packages/gfpgan { };
- basicsr = rmCallPackage ../../packages/basicsr { };
- facexlib = rmCallPackage ../../packages/facexlib { };
- realesrgan = rmCallPackage ../../packages/realesrgan { };
- codeformer = callPackage ../../packages/codeformer { };
- clipseg = rmCallPackage ../../packages/clipseg { };
- kornia = callPackage ../../packages/kornia { };
- lpips = callPackage ../../packages/lpips { };
- ffmpy = callPackage ../../packages/ffmpy { };
- picklescan = callPackage ../../packages/picklescan { };
- diffusers = callPackage ../../packages/diffusers { };
- pypatchmatch = callPackage ../../packages/pypatchmatch { };
- fonts = callPackage ../../packages/fonts { };
- font-roboto = callPackage ../../packages/font-roboto { };
- analytics-python = callPackage ../../packages/analytics-python { };
- gradio = callPackage ../../packages/gradio { };
- blip = callPackage ../../packages/blip { };
- fairscale = callPackage ../../packages/fairscale { };
- torch-fidelity = callPackage ../../packages/torch-fidelity { };
- resize-right = callPackage ../../packages/resize-right { };
- torchdiffeq = callPackage ../../packages/torchdiffeq { };
- k-diffusion = callPackage ../../packages/k-diffusion { };
- accelerate = callPackage ../../packages/accelerate { };
- clip-anytorch = callPackage ../../packages/clip-anytorch { };
- clean-fid = callPackage ../../packages/clean-fid { };
- getpass-asterisk = callPackage ../../packages/getpass-asterisk { };
- };
-
- torchRocm = final: prev: rec {
- # TODO: figure out how to patch torch-bin trying to access /opt/amdgpu
- # there might be an environment variable for it, can use a wrapper for that
- # otherwise just grep the world for /opt/amdgpu or something and substituteInPlace the path
- # you can run this thing without the fix by creating /opt and running nix build nixpkgs#libdrm --inputs-from . --out-link /opt/amdgpu
- torch-bin = prev.torch-bin.overrideAttrs (old: {
- src = pkgs.fetchurl {
- name = "torch-1.13.1+rocm5.1.1-cp310-cp310-linux_x86_64.whl";
- url = "https://download.pytorch.org/whl/rocm5.1.1/torch-1.13.1%2Brocm5.1.1-cp310-cp310-linux_x86_64.whl";
- hash = "sha256-qUwAL3L9ODy9hjne8jZQRoG4BxvXXLT7cAy9RbM837A=";
- };
- postFixup = (old.postFixup or "") + ''
- ${pkgs.gnused}/bin/sed -i s,/opt/amdgpu/share/libdrm/amdgpu.ids,/tmp/nix-pytorch-rocm___/amdgpu.ids,g $out/${final.python.sitePackages}/torch/lib/libdrm_amdgpu.so
- '';
- rocmSupport = true;
- });
- torchvision-bin = prev.torchvision-bin.overrideAttrs (old: {
- src = pkgs.fetchurl {
- name = "torchvision-0.14.1+rocm5.1.1-cp310-cp310-linux_x86_64.whl";
- url = "https://download.pytorch.org/whl/rocm5.1.1/torchvision-0.14.1%2Brocm5.1.1-cp310-cp310-linux_x86_64.whl";
- hash = "sha256-8CM1QZ9cZfexa+HWhG4SfA/PTGB2475dxoOtGZ3Wa2E=";
- };
- });
- torch = torch-bin;
- torchvision = torchvision-bin;
- };
-
- torchCuda = final: prev: {
- torch = final.torch-bin;
- torchvision = final.torchvision-bin;
- };
-}
diff --git a/modules/dependency-sets/default.nix b/modules/dependency-sets/default.nix
deleted file mode 100644
index a1c650d4..00000000
--- a/modules/dependency-sets/default.nix
+++ /dev/null
@@ -1,15 +0,0 @@
-{ lib, ... }:
-
-let
- inherit (lib) mkOption types;
-in
-
-{
- perSystem.options = {
- dependencySets = mkOption {
- description = "Specially instantiated dependency sets for customized builds";
- type = with types; lazyAttrsOf unspecified;
- default = {};
- };
- };
-}
diff --git a/modules/nixpkgs-config/default.nix b/modules/nixpkgs-config/default.nix
new file mode 100644
index 00000000..b42cbb5d
--- /dev/null
+++ b/modules/nixpkgs-config/default.nix
@@ -0,0 +1,27 @@
+{ inputs, lib, ... }:
+
+{
+ perSystem = { system, ... }: {
+ _module.args.pkgs = import inputs.nixpkgs {
+ inherit system;
+ config = {
+ allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [
+ # for Triton
+ "cuda_cudart"
+ "cuda_nvcc"
+ "cuda_nvtx"
+
+ # for CUDA Torch
+ "cuda_cccl"
+ "cuda_cupti"
+ "cuda_nvprof"
+ "cudatoolkit"
+ "cudatoolkit-11-cudnn"
+ "libcublas"
+ "libcusolver"
+ "libcusparse"
+ ];
+ };
+ };
+ };
+}
\ No newline at end of file
diff --git a/overlays/default.nix b/overlays/default.nix
new file mode 100644
index 00000000..01c92765
--- /dev/null
+++ b/overlays/default.nix
@@ -0,0 +1,25 @@
+{ lib, ... }:
+
+let
+ l = lib.extend (import ./lib.nix);
+
+ overlaySets = {
+ python = import ./python l;
+ };
+
+ prefixAttrs = prefix: lib.mapAttrs' (name: value: lib.nameValuePair "${prefix}-${name}" value);
+
+in
+
+{
+ flake = {
+ lib = {
+ inherit (l) overlays;
+ };
+ overlays = lib.pipe overlaySets [
+ (lib.mapAttrs prefixAttrs)
+ (lib.attrValues)
+ (lib.foldl' (a: b: a // b) {})
+ ];
+ };
+}
diff --git a/overlays/lib.nix b/overlays/lib.nix
new file mode 100644
index 00000000..d6d003ae
--- /dev/null
+++ b/overlays/lib.nix
@@ -0,0 +1,39 @@
+lib: _: {
+ overlays = {
+ runOverlay = do: final: prev: do {
+ inherit final prev;
+ util = {
+ callPackageOrTuple = input:
+ if lib.isList input then
+ assert lib.length input == 2; let
+ pkg = lib.head input;
+ args = lib.last input;
+ in final.callPackage pkg args
+ else
+ final.callPackage input { };
+ };
+ };
+
+ callManyPackages = packages: lib.overlays.runOverlay ({ util, ... }:
+ let
+ packages' = lib.listToAttrs (map (x: lib.nameValuePair (baseNameOf x) x) packages);
+ in
+ lib.mapAttrs (lib.const util.callPackageOrTuple) packages'
+ );
+
+ applyOverlays = packageSet: overlays: let
+ combinedOverlay = lib.composeManyExtensions overlays;
+ in
+ # regular extensible package set
+ if packageSet ? extend then
+ packageSet.extend combinedOverlay
+ # makeScope-style package set, this case needs to be handled before makeScopeWithSplicing
+ else if packageSet ? overrideScope' then
+ packageSet.overrideScope' combinedOverlay
+ # makeScopeWithSplicing-style package set
+ else if packageSet ? overrideScope then
+ packageSet.overrideScope combinedOverlay
+ else
+ throw "don't know how to extend the given package set";
+ };
+}
diff --git a/overlays/python/default.nix b/overlays/python/default.nix
new file mode 100644
index 00000000..ab9443df
--- /dev/null
+++ b/overlays/python/default.nix
@@ -0,0 +1,66 @@
+lib: {
+ fixPackages = final: prev: let
+ relaxProtobuf = pkg: pkg.overrideAttrs (old: {
+ nativeBuildInputs = old.nativeBuildInputs ++ [ final.pythonRelaxDepsHook ];
+ pythonRelaxDeps = [ "protobuf" ];
+ });
+ in {
+ invisible-watermark = prev.invisible-watermark.overridePythonAttrs {
+ pythonImportsCheck = [ ];
+ };
+ torchsde = prev.torchsde.overridePythonAttrs { doCheck = false;
+ pythonImportsCheck = []; };
+ pytorch-lightning = relaxProtobuf prev.pytorch-lightning;
+ wandb = relaxProtobuf (prev.wandb.overridePythonAttrs {
+ doCheck = false;
+ });
+ markdown-it-py = prev.markdown-it-py.overrideAttrs (old: {
+ nativeBuildInputs = old.nativeBuildInputs ++ [ final.pythonRelaxDepsHook ];
+ pythonRelaxDeps = [ "linkify-it-py" ];
+ passthru = old.passthru // {
+ optional-dependencies = with final; {
+ linkify = [ linkify-it-py ];
+ plugins = [ mdit-py-plugins ];
+ };
+ };
+ });
+ filterpy = prev.filterpy.overrideAttrs (old: {
+ doInstallCheck = false;
+ });
+ shap = prev.shap.overrideAttrs (old: {
+ doInstallCheck = false;
+ propagatedBuildInputs = old.propagatedBuildInputs ++ [ final.packaging ];
+ pythonImportsCheck = [ "shap" ];
+
+ meta = old.meta // {
+ broken = false;
+ };
+ });
+ streamlit = let
+ streamlit = final.callPackage (final.pkgs.path + "/pkgs/applications/science/machine-learning/streamlit") {
+ protobuf3 = final.protobuf;
+ };
+ in final.toPythonModule (relaxProtobuf streamlit);
+ opencv-python-headless = final.opencv-python;
+ opencv-python = final.opencv4;
+ };
+
+ torchRocm = final: prev: {
+ torch = prev.torch.override {
+ magma = prev.pkgs.magma-hip;
+ cudaSupport = false;
+ rocmSupport = true;
+ };
+ torchvision = prev.torchvision.overridePythonAttrs (old: {
+ patches = (old.patches or []) ++ [ ./torchvision/fix-rocm-build.patch ];
+ });
+ };
+
+ torchCuda = final: prev: {
+ torch = prev.torch.override {
+ magma = prev.pkgs.magma-cuda-static;
+ cudaSupport = true;
+ rocmSupport = false;
+ };
+ };
+}
diff --git a/overlays/python/torchvision/fix-rocm-build.patch b/overlays/python/torchvision/fix-rocm-build.patch
new file mode 100644
index 00000000..8cafa507
--- /dev/null
+++ b/overlays/python/torchvision/fix-rocm-build.patch
@@ -0,0 +1,30 @@
+From 20d90dfc2be8fedce229f47982db656862c9dc32 Mon Sep 17 00:00:00 2001
+From: Paul Mulders
+Date: Thu, 11 May 2023 00:43:51 +0200
+Subject: [PATCH] setup.py: fix ROCm build (#7573)
+
+---
+ setup.py | 8 +++++++-
+ 1 file changed, 7 insertions(+), 1 deletion(-)
+
+diff --git a/setup.py b/setup.py
+index c523ba073c5..732b5c0e1b7 100644
+--- a/setup.py
++++ b/setup.py
+@@ -328,9 +328,15 @@ def get_extensions():
+ image_src = (
+ glob.glob(os.path.join(image_path, "*.cpp"))
+ + glob.glob(os.path.join(image_path, "cpu", "*.cpp"))
+- + glob.glob(os.path.join(image_path, "cuda", "*.cpp"))
+ )
+
++ if is_rocm_pytorch:
++ image_src += glob.glob(os.path.join(image_path, "hip", "*.cpp"))
++ # we need to exclude this in favor of the hipified source
++ image_src.remove(os.path.join(image_path, "image.cpp"))
++ else:
++ image_src += glob.glob(os.path.join(image_path, "cuda", "*.cpp"))
++
+ if use_png or use_jpeg:
+ ext_modules.append(
+ extension(
diff --git a/packages/accelerate/default.nix b/packages/accelerate/default.nix
index 137b0f01..26221c27 100644
--- a/packages/accelerate/default.nix
+++ b/packages/accelerate/default.nix
@@ -2,18 +2,18 @@
# If you run pynixify again, the file will be either overwritten or
# deleted, and you will lose the changes you made to it.
-{ buildPythonPackage, fetchPypi, lib, numpy, packaging, psutil, pyyaml, torch }:
+{ buildPythonPackage, fetchPypi, lib, numpy, packaging, psutil, pyyaml, torch, huggingface-hub }:
buildPythonPackage rec {
pname = "accelerate";
- version = "0.13.1";
+ version = "0.23.0";
src = fetchPypi {
inherit pname version;
- sha256 = "1dk82s80rq8xp3v4hr9a27vgj9k3gy9yssp7ww7i3c0vc07gx2cv";
+ sha256 = "sha256-ITnSGfqaN3c8QnnJr+vp9oHy8p6FopsL6NdiV72OSr4=";
};
- propagatedBuildInputs = [ numpy packaging psutil pyyaml torch ];
+ propagatedBuildInputs = [ numpy packaging psutil pyyaml torch huggingface-hub ];
# TODO FIXME
doCheck = false;
diff --git a/packages/autogptq/default.nix b/packages/autogptq/default.nix
new file mode 100644
index 00000000..f73fceeb
--- /dev/null
+++ b/packages/autogptq/default.nix
@@ -0,0 +1,79 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, safetensors
+, accelerate
+, rouge
+, peft
+, transformers
+, datasets
+, torch
+, cudaPackages
+, rocmPackages
+, symlinkJoin
+, which
+, ninja
+, pybind11
+, gcc11Stdenv
+}:
+let
+ cuda-native-redist = symlinkJoin {
+ name = "cuda-redist";
+ paths = with cudaPackages; [
+ cuda_cudart # cuda_runtime.h
+ cuda_nvcc
+ ];
+ };
+in
+
+buildPythonPackage rec {
+ pname = "autogptq";
+ version = "0.4.2";
+ format = "setuptools";
+
+ BUILD_CUDA_EXT = "1";
+
+ CUDA_HOME = cuda-native-redist;
+ CUDA_VERSION = cudaPackages.cudaVersion;
+
+ buildInputs = [
+ pybind11
+ cudaPackages.cudatoolkit
+ ];
+
+ preBuild = ''
+ export PATH=${gcc11Stdenv.cc}/bin:$PATH
+ '';
+
+ nativeBuildInputs = [
+ which
+ ninja
+ rocmPackages.clr
+ ];
+
+ src = fetchFromGitHub {
+ owner = "PanQiWei";
+ repo = "AutoGPTQ";
+ rev = "51c043c6bef1380e121474ad73ea2a22f2fb5737";
+ hash = "sha256-O/ox/VSMgvqK9SWwlaz8o12fLkz9591p8CVC3e8POQI=";
+ };
+
+ pythonImportsCheck = [ "auto_gptq" ];
+
+ propagatedBuildInputs = [
+ safetensors
+ accelerate
+ rouge
+ peft
+ transformers
+ datasets
+ torch
+ ];
+
+ meta = with lib; {
+ description = "An easy-to-use LLMs quantization package with user-friendly apis, based on GPTQ algorithm";
+ homepage = "https://github.com/PanQiWei/AutoGPTQ";
+ license = licenses.mit;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/compel/default.nix b/packages/compel/default.nix
index a05dae4a..29073143 100644
--- a/packages/compel/default.nix
+++ b/packages/compel/default.nix
@@ -1,18 +1,19 @@
-{ buildPythonPackage, fetchPypi, lib, setuptools, transformers, diffusers, torch }:
+{ buildPythonPackage, fetchPypi, lib, setuptools, transformers, diffusers, pyparsing, torch }:
buildPythonPackage rec {
pname = "compel";
- version = "0.1.7";
+ version = "2.0.2";
format = "pyproject";
src = fetchPypi {
inherit pname version;
- sha256 = "sha256-JP+PX0yENTNnfsAJ/hzgIA/cr/RhIWV1GEa1rYTdlnc=";
+ sha256 = "sha256-Lp3mS26l+d9Z+Prn662aV9HzadzJU8hkWICkm7GcLHw=";
};
propagatedBuildInputs = [
setuptools
diffusers
+ pyparsing
transformers
torch
];
diff --git a/packages/controlnet-aux/default.nix b/packages/controlnet-aux/default.nix
new file mode 100644
index 00000000..408a285e
--- /dev/null
+++ b/packages/controlnet-aux/default.nix
@@ -0,0 +1,52 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, setuptools
+, wheel
+, filelock
+, huggingface-hub
+, opencv-python
+, torchvision
+, einops
+, scikit-image
+, timm
+, pythonRelaxDepsHook
+}:
+
+buildPythonPackage rec {
+ pname = "controlnet-aux";
+ version = "0.0.7";
+ format = "pyproject";
+
+ src = fetchPypi {
+ pname = "controlnet_aux";
+ inherit version;
+ hash = "sha256-23KZMjum04ni/mt9gTGgWica86SsKldHdUSMTQd4vow=";
+ };
+
+ propagatedBuildInputs = [
+ filelock
+ huggingface-hub
+ opencv-python
+ torchvision
+ einops
+ scikit-image
+ timm
+ ];
+
+ nativeBuildInputs = [
+ setuptools
+ wheel
+ pythonRelaxDepsHook
+ ];
+
+ pythonImportsCheck = [ "controlnet_aux" ];
+ pythonRemoveDeps = [ "opencv-python" ];
+
+ meta = with lib; {
+ description = "Auxillary models for controlnet";
+ homepage = "https://pypi.org/project/controlnet-aux/";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/diffusers/default.nix b/packages/diffusers/default.nix
index e28e30f5..7579e85c 100644
--- a/packages/diffusers/default.nix
+++ b/packages/diffusers/default.nix
@@ -2,29 +2,9 @@
, buildPythonPackage
, fetchPypi
, setuptools
-, writeText
+, safetensors
, isPy27
-, pytestCheckHook
-, pytest-mpl
, numpy
-, scipy
-, scikit-learn
-, pandas
-, transformers
-, opencv4
-, lightgbm
-, catboost
-, pyspark
-, sentencepiece
-, tqdm
-, slicer
-, numba
-, matplotlib
-, nose
-, lime
-, cloudpickle
-, ipython
-, packaging
, pillow
, requests
, regex
@@ -34,17 +14,18 @@
buildPythonPackage rec {
pname = "diffusers";
- version = "0.14.0";
+ version = "0.21.4";
disabled = isPy27;
format = "pyproject";
src = fetchPypi {
inherit pname version;
- sha256 = "sha256-sqQqEtq1OMtFo7DGVQMFO6RG5fLfSDbeOFtSON+DCkY=";
+ sha256 = "sha256-P6w4gzF5Qn8WfGdd2nHue09eYnIARXqNUn5Aza+XJog=";
};
propagatedBuildInputs = [
+ safetensors
setuptools
pillow
numpy
diff --git a/packages/dynamicprompts/default.nix b/packages/dynamicprompts/default.nix
new file mode 100644
index 00000000..0be26e12
--- /dev/null
+++ b/packages/dynamicprompts/default.nix
@@ -0,0 +1,55 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, hatchling
+, jinja2
+, pyparsing
+, pytest
+, pytest-cov
+, pytest-lazy-fixture
+, requests
+, transformers
+}:
+
+buildPythonPackage rec {
+ pname = "dynamicprompts";
+ version = "0.27.1";
+ format = "pyproject";
+
+ src = fetchPypi {
+ inherit pname version;
+ hash = "sha256-lS/UgfZoR4wWozdtSAFBenIRljuPxnL8fMQT3dIA7WE=";
+ };
+
+ nativeBuildInputs = [
+ hatchling
+ ];
+
+ propagatedBuildInputs = [
+ jinja2
+ pyparsing
+ ];
+
+ passthru.optional-dependencies = {
+ dev = [
+ pytest
+ pytest-cov
+ pytest-lazy-fixture
+ ];
+ feelinglucky = [
+ requests
+ ];
+ magicprompt = [
+ transformers
+ ];
+ };
+
+ pythonImportsCheck = [ "dynamicprompts" ];
+
+ meta = with lib; {
+ description = "Dynamic prompts templating library for Stable Diffusion";
+ homepage = "https://pypi.org/project/dynamicprompts/";
+ license = licenses.mit;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/easing-functions/default.nix b/packages/easing-functions/default.nix
new file mode 100644
index 00000000..16933c0d
--- /dev/null
+++ b/packages/easing-functions/default.nix
@@ -0,0 +1,32 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, setuptools
+, wheel
+}:
+
+buildPythonPackage rec {
+ pname = "easing-functions";
+ version = "1.0.4";
+ pyproject = true;
+
+ src = fetchPypi {
+ pname = "easing_functions";
+ inherit version;
+ hash = "sha256-4Yx5MdRFuF8oxNFa0Kmke7ZdTi7vwNs4QESPriXj+d4=";
+ };
+
+ nativeBuildInputs = [
+ setuptools
+ wheel
+ ];
+
+ pythonImportsCheck = [ "easing_functions" ];
+
+ meta = with lib; {
+ description = "A collection of the basic easing functions for python";
+ homepage = "https://pypi.org/project/easing-functions/";
+ license = licenses.unfree; # FIXME: nix-init did not found a license
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/fastapi-events/default.nix b/packages/fastapi-events/default.nix
new file mode 100644
index 00000000..5916f475
--- /dev/null
+++ b/packages/fastapi-events/default.nix
@@ -0,0 +1,35 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, setuptools
+, wheel
+}:
+
+buildPythonPackage rec {
+ pname = "fastapi-events";
+ version = "0.8.0";
+ pyproject = true;
+
+ src = fetchFromGitHub {
+ owner = "melvinkcx";
+ repo = "fastapi-events";
+ rev = "v${version}";
+ hash = "sha256-dfLZDacu5jb2HcfI1Y2/xCDr1kTM6E5xlHAPratD+Yw=";
+ };
+
+ doCheck = false;
+
+ nativeBuildInputs = [
+ setuptools
+ wheel
+ ];
+
+ pythonImportsCheck = [ "fastapi_events" ];
+
+ meta = with lib; {
+ description = "Asynchronous event dispatching/handling library for FastAPI and Starlette";
+ homepage = "https://github.com/melvinkcx/fastapi-events";
+ license = licenses.mit;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/fastapi-socketio/default.nix b/packages/fastapi-socketio/default.nix
new file mode 100644
index 00000000..0993139d
--- /dev/null
+++ b/packages/fastapi-socketio/default.nix
@@ -0,0 +1,47 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, setuptools
+, wheel
+, fastapi
+, python-socketio
+, pytest
+}:
+
+buildPythonPackage rec {
+ pname = "fastapi-socketio";
+ version = "0.0.10";
+ format = "pyproject";
+
+ doCheck = false;
+
+ src = fetchPypi {
+ inherit pname version;
+ hash = "sha256-IC+bMZ8BAAHL0RFOySoNnrX1ypMW6uX9QaYIjaCBJyc=";
+ };
+
+ nativeBuildInputs = [
+ setuptools
+ wheel
+ ];
+
+ propagatedBuildInputs = [
+ fastapi
+ python-socketio
+ ];
+
+ passthru.optional-dependencies = {
+ test = [
+ pytest
+ ];
+ };
+
+ pythonImportsCheck = [ "fastapi_socketio" ];
+
+ meta = with lib; {
+ description = "Easily integrate socket.io with your FastAPI app";
+ homepage = "https://pypi.org/project/fastapi-socketio/";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/fastapi/default.nix b/packages/fastapi/default.nix
new file mode 100644
index 00000000..ee3df777
--- /dev/null
+++ b/packages/fastapi/default.nix
@@ -0,0 +1,100 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, pydantic
+, starlette
+, pytestCheckHook
+, pytest-asyncio
+, aiosqlite
+, flask
+, httpx
+, hatchling
+, orjson
+, passlib
+, peewee
+, python-jose
+, sqlalchemy
+, trio
+, pythonOlder
+}:
+
+buildPythonPackage rec {
+ pname = "fastapi";
+ version = "0.85.2";
+ format = "pyproject";
+
+ disabled = pythonOlder "3.7";
+
+ src = fetchFromGitHub {
+ owner = "tiangolo";
+ repo = pname;
+ rev = "refs/tags/${version}";
+ hash = "sha256-j3Set+xWNcRqbn90DJOJQhMrJYI3msvWHlFvN1habP0=";
+ };
+
+ nativeBuildInputs = [
+ hatchling
+ ];
+
+ postPatch = ''
+ substituteInPlace pyproject.toml \
+ --replace "starlette==" "starlette>="
+ '';
+
+ propagatedBuildInputs = [
+ starlette
+ pydantic
+ ];
+
+ doCheck = false;
+
+ checkInputs = [
+ aiosqlite
+ flask
+ httpx
+ orjson
+ passlib
+ peewee
+ python-jose
+ pytestCheckHook
+ pytest-asyncio
+ sqlalchemy
+ trio
+ ] ++ passlib.optional-dependencies.bcrypt;
+
+ pytestFlagsArray = [
+ # ignoring deprecation warnings to avoid test failure from
+ # tests/test_tutorial/test_testing/test_tutorial001.py
+ "-W ignore::DeprecationWarning"
+ ];
+
+ disabledTestPaths = [
+ # Disabled tests require orjson which requires rust nightly
+ "tests/test_default_response_class.py"
+ # Don't test docs and examples
+ "docs_src"
+ ];
+
+ disabledTests = [
+ "test_get_custom_response"
+ # Failed: DID NOT RAISE
+ "test_websocket_invalid_data"
+ "test_websocket_no_credentials"
+ # TypeError: __init__() missing 1...starlette-releated
+ "test_head"
+ "test_options"
+ "test_trace"
+ ];
+
+ pythonImportsCheck = [
+ "fastapi"
+ ];
+
+ meta = with lib; {
+ description = "Web framework for building APIs";
+ homepage = "https://github.com/tiangolo/fastapi";
+ license = licenses.mit;
+ maintainers = with maintainers; [ wd15 ];
+ };
+}
+
diff --git a/packages/flexgen/default.nix b/packages/flexgen/default.nix
new file mode 100644
index 00000000..f7918fda
--- /dev/null
+++ b/packages/flexgen/default.nix
@@ -0,0 +1,44 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, setuptools
+, attrs
+, numpy
+, pulp
+, torch
+, tqdm
+, transformers
+}:
+
+buildPythonPackage rec {
+ pname = "flexgen";
+ version = "0.1.7";
+ format = "pyproject";
+
+ src = fetchPypi {
+ inherit pname version;
+ hash = "sha256-GYnl5CYsMWgTdbCfhWcNyjtpnHCXAcYWtMUmAJcRQAM=";
+ };
+
+ nativeBuildInputs = [
+ setuptools
+ ];
+
+ propagatedBuildInputs = [
+ attrs
+ numpy
+ pulp
+ torch
+ tqdm
+ transformers
+ ];
+
+ pythonImportsCheck = [ "flexgen" ];
+
+ meta = with lib; {
+ description = "Running large language models like OPT-175B/GPT-3 on a single GPU. Focusing on high-throughput large-batch generation";
+ homepage = "https://github.com/FMInference/FlexGen";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ jpetrucciani ];
+ };
+}
diff --git a/packages/gradio-client/default.nix b/packages/gradio-client/default.nix
new file mode 100644
index 00000000..b2631c68
--- /dev/null
+++ b/packages/gradio-client/default.nix
@@ -0,0 +1,51 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, hatch-fancy-pypi-readme
+, hatch-requirements-txt
+, hatchling
+, fsspec
+, httpx
+, huggingface-hub
+, packaging
+, requests
+, typing-extensions
+, websockets
+}:
+
+buildPythonPackage rec {
+ pname = "gradio-client";
+ version = "0.2.5";
+ format = "pyproject";
+
+ src = fetchPypi {
+ pname = "gradio_client";
+ inherit version;
+ hash = "sha256-GiTdegmXbbcP0yzbVRK297FcGghPMQtfazAhyXlNkKQ=";
+ };
+
+ nativeBuildInputs = [
+ hatch-fancy-pypi-readme
+ hatch-requirements-txt
+ hatchling
+ ];
+
+ propagatedBuildInputs = [
+ fsspec
+ httpx
+ huggingface-hub
+ packaging
+ requests
+ typing-extensions
+ websockets
+ ];
+
+ pythonImportsCheck = [ "gradio_client" ];
+
+ meta = with lib; {
+ description = "Python library for easily interacting with trained machine learning models";
+ homepage = "https://github.com/gradio-app/gradio";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/gradio/default.nix b/packages/gradio/default.nix
index 30146fb0..19732cab 100644
--- a/packages/gradio/default.nix
+++ b/packages/gradio/default.nix
@@ -4,10 +4,13 @@
, pythonOlder
, writeTextFile
, setuptools
+, altair
, analytics-python
+, aiofiles
, aiohttp
, fastapi
, ffmpy
+, gradio-client
, markdown-it-py
, linkify-it-py
, mdit-py-plugins
@@ -34,6 +37,7 @@
, wandb
, respx
, scikitimage
+, semantic-version
, shap
, ipython
, hatchling
@@ -41,11 +45,12 @@
, hatch-fancy-pypi-readme
, pytestCheckHook
, websockets
+, pythonRelaxDepsHook
}:
buildPythonPackage rec {
pname = "gradio";
- version = "3.5";
+ version = "3.31.0";
disabled = pythonOlder "3.7";
format = "pyproject";
@@ -53,19 +58,24 @@ buildPythonPackage rec {
# and its releases are also more frequent than github tags
src = fetchPypi {
inherit pname version;
- sha256 = "sha256-8MmpH2N1twrPGHS+HBLDWRtpg2Gd1rQzulbOEDr3rNQ=";
+ sha256 = "sha256-4YIhhj64daLOfOqmzsJC8SaNym/OOwe/5fpb0BA8N90=";
};
+ pythonRelaxDeps = [ "mdit-py-plugins" ];
nativeBuildInputs = [
hatchling
hatch-requirements-txt
hatch-fancy-pypi-readme
+ pythonRelaxDepsHook
];
propagatedBuildInputs = [
- analytics-python
+ altair
aiohttp
+ aiofiles
+ analytics-python
fastapi
ffmpy
+ gradio-client
matplotlib
numpy
orjson
@@ -81,10 +91,11 @@ buildPythonPackage rec {
fsspec
httpx
pydantic
+ semantic-version
websockets
markdown-it-py
] ++ markdown-it-py.optional-dependencies.plugins
- ++ markdown-it-py.optional-dependencies.linkify;
+ ++ markdown-it-py.optional-dependencies.linkify;
postPatch = ''
# Unpin h11, as its version was only pinned to aid dependency resolution.
diff --git a/packages/hf-doc-builder/default.nix b/packages/hf-doc-builder/default.nix
new file mode 100644
index 00000000..eb6e9fc2
--- /dev/null
+++ b/packages/hf-doc-builder/default.nix
@@ -0,0 +1,87 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, git-python
+, gql
+, nbformat
+, packaging
+, pyyaml
+, requests
+, tqdm
+, black
+, flake8
+, isort
+, pytest
+, pytest-xdist
+, tokenizers
+, torch
+, transformers
+}:
+
+buildPythonPackage rec {
+ pname = "hf-doc-builder";
+ version = "0.4.0";
+ format = "pyproject";
+
+ src = fetchPypi {
+ inherit pname version;
+ hash = "sha256-I8AnfIa9IK5lXAn8oHJzyvJT51VNFFmnKIWhhhYhVI0=";
+ };
+
+ propagatedBuildInputs = [
+ git-python
+ gql
+ nbformat
+ packaging
+ pyyaml
+ requests
+ tqdm
+ ];
+
+ passthru.optional-dependencies = {
+ all = [
+ black
+ flake8
+ isort
+ pytest
+ pytest-xdist
+ tokenizers
+ torch
+ transformers
+ ];
+ dev = [
+ black
+ flake8
+ isort
+ pytest
+ pytest-xdist
+ tokenizers
+ torch
+ transformers
+ ];
+ quality = [
+ black
+ flake8
+ isort
+ ];
+ testing = [
+ pytest
+ pytest-xdist
+ tokenizers
+ torch
+ transformers
+ ];
+ transformers = [
+ transformers
+ ];
+ };
+
+ pythonImportsCheck = [ "hf-doc-builder" ];
+
+ meta = with lib; {
+ description = "Doc building utility";
+ homepage = "https://github.com/huggingface/doc-builder";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ jpetrucciani ];
+ };
+}
diff --git a/packages/k-diffusion/default.nix b/packages/k-diffusion/default.nix
deleted file mode 100644
index b90d65de..00000000
--- a/packages/k-diffusion/default.nix
+++ /dev/null
@@ -1,40 +0,0 @@
-# WARNING: This file was automatically generated. You should avoid editing it.
-# If you run pynixify again, the file will be either overwritten or
-# deleted, and you will lose the changes you made to it.
-
-{ accelerate, buildPythonPackage, clip-anytorch, einops, fetchPypi, jsonmerge
-, kornia, lib, pillow, resize-right, scikit-image, scipy, torch, torchdiffeq
-, torchvision, tqdm, wandb, clean-fid }:
-
-buildPythonPackage rec {
- pname = "k-diffusion";
- version = "0.0.9";
-
- src = fetchPypi {
- inherit pname version;
- sha256 = "02j7hkhdh57bkvc75xygj50a64dzdi44d1gsw4wjmvp9f7pllpfa";
- };
-
- propagatedBuildInputs = [
- accelerate
- clip-anytorch
- einops
- jsonmerge
- kornia
- pillow
- resize-right
- scikit-image
- scipy
- torch
- torchdiffeq
- torchvision
- tqdm
- wandb
- clean-fid
- ];
-
- # TODO FIXME
- doCheck = false;
-
- meta = with lib; { };
-}
diff --git a/packages/lion-pytorch/default.nix b/packages/lion-pytorch/default.nix
new file mode 100644
index 00000000..38527017
--- /dev/null
+++ b/packages/lion-pytorch/default.nix
@@ -0,0 +1,32 @@
+{ lib
+, buildPythonPackage
+, pythonOlder
+, fetchFromGitHub
+, torch
+}:
+
+buildPythonPackage rec {
+ pname = "lion-pytorch";
+ version = "0.0.7";
+ format = "setuptools";
+ disabled = pythonOlder "3.6";
+
+ src = fetchFromGitHub {
+ owner = "lucidrains";
+ repo = "lion-pytorch";
+ rev = "refs/tags/${version}";
+ hash = "sha256-CSb0s3DKv/KpEmCkCR+Y8iwrLdCL9w9Pl6W46cPB420";
+ };
+
+ propagatedBuildInputs = [ torch ];
+
+ pythonImportsCheck = [ "lion_pytorch" ];
+ doCheck = false; # no tests currently
+
+ meta = with lib; {
+ description = "Optimizer tuned by Google Brain using genetic algorithms";
+ homepage = "https://github.com/lucidrains/lion-pytorch";
+ license = licenses.mit;
+ maintainers = with maintainers; [ bcdarwin ];
+ };
+}
diff --git a/packages/llama-cpp-python/default.nix b/packages/llama-cpp-python/default.nix
new file mode 100644
index 00000000..5608d1ab
--- /dev/null
+++ b/packages/llama-cpp-python/default.nix
@@ -0,0 +1,53 @@
+{ buildPythonPackage, fetchFromGitHub, lib, stdenv, darwin, cmake, ninja, poetry-core, scikit-build, setuptools, typing-extensions }:
+let
+ inherit (stdenv) isDarwin;
+ osSpecific = with darwin.apple_sdk.frameworks; if isDarwin then [ Accelerate CoreGraphics CoreVideo ] else [ ];
+ llama-cpp-pin = fetchFromGitHub {
+ owner = "ggerganov";
+ repo = "llama.cpp";
+ rev = "2e6cd4b02549e343bef3768e6b946f999c82e823";
+ hash = "sha256-VzY3e/EJ+LLx55H0wkIVoHfZ0zAShf6Y9Q3fz4xQ0V8=";
+ };
+in
+buildPythonPackage rec {
+ pname = "llama-cpp-python";
+ version = "0.1.54";
+
+ format = "pyproject";
+ src = fetchFromGitHub {
+ owner = "abetlen";
+ repo = pname;
+ rev = "refs/tags/v${version}";
+ hash = "sha256-8YIMbJIMwWJWkXjnjcgR5kvSq4uBd6E/IA2xRm+W5dM=";
+ };
+
+ preConfigure = ''
+ cp -r ${llama-cpp-pin}/. ./vendor/llama.cpp
+ chmod -R +w ./vendor/llama.cpp
+ '';
+ preBuild = ''
+ cd ..
+ '';
+ buildInputs = osSpecific;
+
+ nativeBuildInputs = [
+ cmake
+ ninja
+ poetry-core
+ scikit-build
+ setuptools
+ ];
+
+ propagatedBuildInputs = [
+ typing-extensions
+ ];
+
+ pythonImportsCheck = [ "llama_cpp" ];
+
+ meta = with lib; {
+ description = "A Python wrapper for llama.cpp";
+ homepage = "https://github.com/abetlen/llama-cpp-python";
+ license = licenses.mit;
+ maintainers = with maintainers; [ jpetrucciani ];
+ };
+}
diff --git a/packages/mediapipe/default.nix b/packages/mediapipe/default.nix
new file mode 100644
index 00000000..774744cd
--- /dev/null
+++ b/packages/mediapipe/default.nix
@@ -0,0 +1,34 @@
+{ lib
+, fetchurl
+, buildPythonPackage
+, protobuf
+, numpy
+, opencv4
+, attrs
+, matplotlib
+, autoPatchelfHook
+}:
+
+buildPythonPackage {
+ pname = "mediapipe";
+ version = "0.10.7";
+ format = "wheel";
+
+ src = fetchurl {
+ url = "https://files.pythonhosted.org/packages/52/92/a2b0f9a943ebee88aa9dab040535ea05908ec102b8052b28c645cf0ac25b/mediapipe-0.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl";
+ sha256 = "sha256-88kAkpxRn4Pj+Ib95WNj+53P36gHjpFt5rXlaX4bpco=";
+ };
+
+ propagatedBuildInputs = [ protobuf numpy opencv4 matplotlib attrs ];
+
+ nativeBuildInputs = [ autoPatchelfHook ];
+
+ pythonImportsCheck = [ "mediapipe" ];
+
+ meta = with lib; {
+ description = "Cross-platform, customizable ML solutions for live and streaming media";
+ homepage = "https://github.com/google/mediapipe/releases/tag/v0.10.7";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/peft/default.nix b/packages/peft/default.nix
new file mode 100644
index 00000000..2444d2ad
--- /dev/null
+++ b/packages/peft/default.nix
@@ -0,0 +1,59 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, accelerate
+, numpy
+, packaging
+, psutil
+, pyyaml
+, torch
+, transformers
+, black
+, hf-doc-builder
+, ruff
+}:
+
+buildPythonPackage rec {
+ pname = "peft";
+ version = "0.2.0";
+ format = "pyproject";
+
+ src = fetchPypi {
+ inherit pname version;
+ hash = "sha256-zjP0hMcDgZBwW2nk0iiSMMfBgZwQhHgUg6yOEY8Kca8=";
+ };
+
+ propagatedBuildInputs = [
+ accelerate
+ numpy
+ packaging
+ psutil
+ pyyaml
+ torch
+ transformers
+ ];
+
+ passthru.optional-dependencies = {
+ dev = [
+ black
+ hf-doc-builder
+ ruff
+ ];
+ docs_specific = [
+ hf-doc-builder
+ ];
+ quality = [
+ black
+ ruff
+ ];
+ };
+
+ pythonImportsCheck = [ "peft" ];
+
+ meta = with lib; {
+ description = "Parameter-Efficient Fine-Tuning (PEFT";
+ homepage = "https://github.com/huggingface/peft";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ jpetrucciani ];
+ };
+}
diff --git a/packages/python-engineio/default.nix b/packages/python-engineio/default.nix
new file mode 100644
index 00000000..8da18ee9
--- /dev/null
+++ b/packages/python-engineio/default.nix
@@ -0,0 +1,73 @@
+{ lib
+, stdenv
+, aiohttp
+, buildPythonPackage
+, eventlet
+, fetchFromGitHub
+, iana-etc
+, libredirect
+, mock
+, pytestCheckHook
+, pythonOlder
+, requests
+, tornado
+, websocket-client
+}:
+
+buildPythonPackage rec {
+ pname = "python-engineio";
+ version = "4.7.1";
+ format = "setuptools";
+
+ disabled = pythonOlder "3.6";
+
+ src = fetchFromGitHub {
+ owner = "miguelgrinberg";
+ repo = "python-engineio";
+ rev = "refs/tags/v${version}";
+ hash = "sha256-jHXpPnrQlIpmQ2sY4y6AUx/6W8Pf+683s4NmmlwZO58=";
+ };
+
+ nativeCheckInputs = [
+ aiohttp
+ eventlet
+ mock
+ requests
+ tornado
+ websocket-client
+ pytestCheckHook
+ ];
+
+ doCheck = !stdenv.isDarwin;
+
+ preCheck = lib.optionalString stdenv.isLinux ''
+ echo "nameserver 127.0.0.1" > resolv.conf
+ export NIX_REDIRECTS=/etc/protocols=${iana-etc}/etc/protocols:/etc/resolv.conf=$(realpath resolv.conf) \
+ LD_PRELOAD=${libredirect}/lib/libredirect.so
+ '';
+
+ postCheck = ''
+ unset NIX_REDIRECTS LD_PRELOAD
+ '';
+
+ # somehow effective log level does not change?
+ disabledTests = [
+ "test_logger"
+ ];
+
+ pythonImportsCheck = [
+ "engineio"
+ ];
+
+ meta = with lib; {
+ description = "Python based Engine.IO client and server";
+ longDescription = ''
+ Engine.IO is a lightweight transport protocol that enables real-time
+ bidirectional event-based communication between clients and a server.
+ '';
+ homepage = "https://github.com/miguelgrinberg/python-engineio/";
+ changelog = "https://github.com/miguelgrinberg/python-engineio/blob/v${version}/CHANGES.md";
+ license = with licenses; [ mit ];
+ maintainers = with maintainers; [ mic92 ];
+ };
+}
diff --git a/packages/pytorch-lightning/default.nix b/packages/pytorch-lightning/default.nix
new file mode 100644
index 00000000..983df8c9
--- /dev/null
+++ b/packages/pytorch-lightning/default.nix
@@ -0,0 +1,71 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, pythonOlder
+, fsspec
+, lightning-utilities
+, numpy
+, packaging
+, pyyaml
+, tensorboardx
+, torch
+, torchmetrics
+, tqdm
+, traitlets
+
+# tests
+, psutil
+, pytestCheckHook
+}:
+
+buildPythonPackage rec {
+ pname = "pytorch-lightning";
+ version = "1.9.3";
+ format = "pyproject";
+
+ src = fetchFromGitHub {
+ owner = "Lightning-AI";
+ repo = "pytorch-lightning";
+ rev = "refs/tags/${version}";
+ hash = "sha256-Xftm2gLNXnBUEPX0ULjSx8LpS5t4k+g64cxUgCAaz2w=";
+ };
+
+ preConfigure = ''
+ export PACKAGE_NAME=pytorch
+ '';
+
+ propagatedBuildInputs = [
+ fsspec
+ numpy
+ packaging
+ pyyaml
+ tensorboardx
+ torch
+ lightning-utilities
+ torchmetrics
+ tqdm
+ traitlets
+ ]
+ ++ fsspec.optional-dependencies.http;
+
+ nativeCheckInputs = [
+ psutil
+ pytestCheckHook
+ ];
+
+ # Some packages are not in NixPkgs; other tests try to build distributed
+ # models, which doesn't work in the sandbox.
+ doCheck = false;
+
+ pythonImportsCheck = [
+ "pytorch_lightning"
+ ];
+
+ meta = with lib; {
+ description = "Lightweight PyTorch wrapper for machine learning researchers";
+ homepage = "https://pytorch-lightning.readthedocs.io";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ tbenst ];
+ };
+}
+
diff --git a/packages/rouge/default.nix b/packages/rouge/default.nix
new file mode 100644
index 00000000..77066edc
--- /dev/null
+++ b/packages/rouge/default.nix
@@ -0,0 +1,31 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, six
+}:
+
+buildPythonPackage rec {
+ pname = "rouge";
+ version = "1.0.1";
+ format = "setuptools";
+
+ src = fetchFromGitHub {
+ owner = "pltrdy";
+ repo = "rouge";
+ rev = version;
+ hash = "sha256-Xxq0N3mwy8+O8PrHNia9RovHS74+029Z+yhT7kdkLbk=";
+ };
+
+ pythonImportsCheck = [ "rouge" ];
+
+ propagatedBuildInputs = [ six ];
+
+ doCheck = false;
+
+ meta = with lib; {
+ description = "A full Python Implementation of the ROUGE Metric (not a wrapper)";
+ homepage = "https://github.com/pltrdy/rouge";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/packages/rwkv/default.nix b/packages/rwkv/default.nix
new file mode 100644
index 00000000..e4ffcee9
--- /dev/null
+++ b/packages/rwkv/default.nix
@@ -0,0 +1,31 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, setuptools
+, tokenizers
+}:
+
+buildPythonPackage rec {
+ pname = "rwkv";
+ version = "0.7.4";
+ format = "pyproject";
+
+ src = fetchPypi {
+ inherit pname version;
+ hash = "sha256-35hoK+o0xE+Pcc3V7G/+c8rOpQL1Xwj3JbAU3oIHM+Y=";
+ };
+
+ propagatedBuildInputs = [
+ setuptools
+ tokenizers
+ ];
+
+ pythonImportsCheck = [ "rwkv" ];
+
+ meta = with lib; {
+ description = "The RWKV Language Model";
+ homepage = "https://github.com/BlinkDL/ChatRWKV";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ jpetrucciani ];
+ };
+}
diff --git a/packages/safetensors/Cargo.lock b/packages/safetensors/Cargo.lock
deleted file mode 100644
index 1febf05d..00000000
--- a/packages/safetensors/Cargo.lock
+++ /dev/null
@@ -1,378 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-version = 3
-
-[[package]]
-name = "autocfg"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
-
-[[package]]
-name = "bitflags"
-version = "1.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-
-[[package]]
-name = "indoc"
-version = "1.0.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306"
-
-[[package]]
-name = "itoa"
-version = "1.0.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
-
-[[package]]
-name = "libc"
-version = "0.2.139"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
-
-[[package]]
-name = "libloading"
-version = "0.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
-dependencies = [
- "cfg-if",
- "winapi",
-]
-
-[[package]]
-name = "lock_api"
-version = "0.4.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
-dependencies = [
- "autocfg",
- "scopeguard",
-]
-
-[[package]]
-name = "memmap2"
-version = "0.5.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
-dependencies = [
- "libc",
-]
-
-[[package]]
-name = "memoffset"
-version = "0.6.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
-dependencies = [
- "autocfg",
-]
-
-[[package]]
-name = "once_cell"
-version = "1.17.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
-
-[[package]]
-name = "parking_lot"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
-dependencies = [
- "lock_api",
- "parking_lot_core",
-]
-
-[[package]]
-name = "parking_lot_core"
-version = "0.9.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521"
-dependencies = [
- "cfg-if",
- "libc",
- "redox_syscall",
- "smallvec",
- "windows-sys",
-]
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.51"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
-dependencies = [
- "unicode-ident",
-]
-
-[[package]]
-name = "pyo3"
-version = "0.17.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543"
-dependencies = [
- "cfg-if",
- "indoc",
- "libc",
- "memoffset",
- "parking_lot",
- "pyo3-build-config",
- "pyo3-ffi",
- "pyo3-macros",
- "unindent",
-]
-
-[[package]]
-name = "pyo3-build-config"
-version = "0.17.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8"
-dependencies = [
- "once_cell",
- "target-lexicon",
-]
-
-[[package]]
-name = "pyo3-ffi"
-version = "0.17.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc"
-dependencies = [
- "libc",
- "pyo3-build-config",
-]
-
-[[package]]
-name = "pyo3-macros"
-version = "0.17.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28"
-dependencies = [
- "proc-macro2",
- "pyo3-macros-backend",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "pyo3-macros-backend"
-version = "0.17.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "quote"
-version = "1.0.23"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
-dependencies = [
- "proc-macro2",
-]
-
-[[package]]
-name = "redox_syscall"
-version = "0.2.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
-dependencies = [
- "bitflags",
-]
-
-[[package]]
-name = "ryu"
-version = "1.0.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
-
-[[package]]
-name = "safetensors"
-version = "0.2.8"
-dependencies = [
- "serde",
- "serde_json",
-]
-
-[[package]]
-name = "safetensors-python"
-version = "0.2.8"
-dependencies = [
- "libloading",
- "memmap2",
- "pyo3",
- "safetensors",
- "serde_json",
-]
-
-[[package]]
-name = "scopeguard"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
-
-[[package]]
-name = "serde"
-version = "1.0.152"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
-dependencies = [
- "serde_derive",
-]
-
-[[package]]
-name = "serde_derive"
-version = "1.0.152"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "serde_json"
-version = "1.0.93"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
-dependencies = [
- "itoa",
- "ryu",
- "serde",
-]
-
-[[package]]
-name = "smallvec"
-version = "1.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
-
-[[package]]
-name = "syn"
-version = "1.0.109"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
-[[package]]
-name = "target-lexicon"
-version = "0.12.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ae9980cab1db3fceee2f6c6f643d5d8de2997c58ee8d25fb0cc8a9e9e7348e5"
-
-[[package]]
-name = "unicode-ident"
-version = "1.0.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
-
-[[package]]
-name = "unindent"
-version = "0.1.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c"
-
-[[package]]
-name = "winapi"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
-dependencies = [
- "winapi-i686-pc-windows-gnu",
- "winapi-x86_64-pc-windows-gnu",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-
-[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
-
-[[package]]
-name = "windows-sys"
-version = "0.45.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
-dependencies = [
- "windows-targets",
-]
-
-[[package]]
-name = "windows-targets"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7"
-dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
-]
-
-[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608"
-
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7"
-
-[[package]]
-name = "windows_i686_gnu"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640"
-
-[[package]]
-name = "windows_i686_msvc"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605"
-
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45"
-
-[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463"
-
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.42.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd"
diff --git a/packages/safetensors/default.nix b/packages/safetensors/default.nix
index 74beb1a0..432361ff 100644
--- a/packages/safetensors/default.nix
+++ b/packages/safetensors/default.nix
@@ -1,7 +1,9 @@
{ lib
-, fetchPypi
+, fetchFromGitHub
, runCommand
, rustPlatform
+, cargo
+, rustc
, buildPythonPackage
, setuptools
, setuptools-rust
@@ -14,36 +16,30 @@
, numpy
, pytest
, pytest-benchmark
-, tensorflow
, torch
}:
let
-
pname = "safetensors";
- version = "0.2.8";
-
- patchedSrc = runCommand "patched-src" {
- src = fetchPypi {
- inherit pname version;
- hash = "sha256-JyCyCmo4x5ncp5vXbK7qwvffWFqdT31Z+n4o7/nMsn8=";
- };
- } ''
- unpackPhase
- cp ${./Cargo.lock} $sourceRoot/Cargo.lock
- cp -r $sourceRoot $out
- '';
+ version = "0.3.3";
+ src = fetchFromGitHub {
+ repo = pname;
+ owner = "huggingface";
+ rev = "v${version}";
+ hash = "sha256-U+indMoLFN6vMZkJTWFG08lsdXuK5gOfgaHmUVl6DPk=";
+ };
in
-buildPythonPackage {
- inherit pname version;
+buildPythonPackage rec {
+ inherit pname version src;
format = "pyproject";
- src = patchedSrc;
+ postPatch = "cd bindings/python";
cargoDeps = rustPlatform.fetchCargoTarball {
- src = patchedSrc;
+ inherit src;
+ postPatch = "cd bindings/python";
name = "${pname}-${version}";
- hash = "sha256-IZKaw4NquK/BbIv1xkMFgNR20vve4H6Re76mvxtcNUA=";
+ hash = "sha256-qiJtiPpNs7wycOyzef34OgXxUGMaKZIXEdqomxtmUD0=";
};
nativeBuildInputs = [
@@ -51,8 +47,8 @@ buildPythonPackage {
setuptools-rust
wheel
rustPlatform.cargoSetupHook
- rustPlatform.rust.cargo
- rustPlatform.rust.rustc
+ cargo
+ rustc
];
propagatedBuildInputs = [
@@ -68,7 +64,6 @@ buildPythonPackage {
pytest
pytest-benchmark
setuptools-rust
- tensorflow
torch
];
diff --git a/packages/starlette/default.nix b/packages/starlette/default.nix
new file mode 100644
index 00000000..70d69e48
--- /dev/null
+++ b/packages/starlette/default.nix
@@ -0,0 +1,88 @@
+{ lib
+, stdenv
+, buildPythonPackage
+, fetchFromGitHub
+, fetchpatch
+, aiofiles
+, anyio
+, contextlib2
+, itsdangerous
+, jinja2
+, python-multipart
+, pyyaml
+, requests
+, aiosqlite
+, pytestCheckHook
+, pythonOlder
+, trio
+, typing-extensions
+}:
+
+buildPythonPackage rec {
+ pname = "starlette";
+ version = "0.20.4";
+ format = "setuptools";
+
+ disabled = pythonOlder "3.6";
+
+ src = fetchFromGitHub {
+ owner = "encode";
+ repo = pname;
+ rev = "refs/tags/${version}";
+ hash = "sha256-vP2TJPn9lRGnLGkO8lUmnsoT6rSnhuWDD3WqNk76SM0=";
+ };
+
+ patches = [
+ (fetchpatch {
+ url = "https://github.com/encode/starlette/commit/ab70211f0e1fb7390668bf4891eeceda8d9723a0.diff";
+ excludes = [ "requirements.txt" ]; # conflicts
+ hash = "sha256-UHf4c4YUWp/1I1vD8J0hMewdlfkmluA+FyGf9ZsSv3Y=";
+ })
+ ];
+
+ postPatch = ''
+ # remove coverage arguments to pytest
+ sed -i '/--cov/d' setup.cfg
+ '';
+
+ propagatedBuildInputs = [
+ aiofiles
+ anyio
+ itsdangerous
+ jinja2
+ python-multipart
+ pyyaml
+ requests
+ ] ++ lib.optionals (pythonOlder "3.8") [
+ typing-extensions
+ ] ++ lib.optionals (pythonOlder "3.7") [
+ contextlib2
+ ];
+
+ doCheck = false;
+
+ checkInputs = [
+ aiosqlite
+ pytestCheckHook
+ trio
+ typing-extensions
+ ];
+
+ disabledTests = [
+ # asserts fail due to inclusion of br in Accept-Encoding
+ "test_websocket_headers"
+ "test_request_headers"
+ ];
+
+ pythonImportsCheck = [
+ "starlette"
+ ];
+
+ meta = with lib; {
+ homepage = "https://www.starlette.io/";
+ description = "The little ASGI framework that shines";
+ license = licenses.bsd3;
+ maintainers = with maintainers; [ wd15 ];
+ };
+}
+
diff --git a/packages/torch-grammar/default.nix b/packages/torch-grammar/default.nix
new file mode 100644
index 00000000..8d2edd67
--- /dev/null
+++ b/packages/torch-grammar/default.nix
@@ -0,0 +1,39 @@
+{ lib
+, buildPythonPackage
+, fetchPypi
+, poetry-core
+, sentencepiece
+, torch
+, transformers
+}:
+
+buildPythonPackage rec {
+ pname = "torch-grammar";
+ version = "0.3.3";
+ pyproject = true;
+
+ src = fetchPypi {
+ pname = "torch_grammar";
+ inherit version;
+ hash = "sha256-bVmv/OjLk3r20NvpYFr8r6IoOzJwq2DNMKRFVHm7dTA=";
+ };
+
+ nativeBuildInputs = [
+ poetry-core
+ ];
+
+ propagatedBuildInputs = [
+ sentencepiece
+ torch
+ transformers
+ ];
+
+ pythonImportsCheck = [ "torch_grammar" ];
+
+ meta = with lib; {
+ description = "Restrict LLM generations to a context-free grammar";
+ homepage = "https://pypi.org/project/torch-grammar/";
+ license = licenses.mit;
+ maintainers = with maintainers; [ ];
+ };
+}
diff --git a/projects/invokeai/default.nix b/projects/invokeai/default.nix
index cce85984..f735d4d1 100644
--- a/projects/invokeai/default.nix
+++ b/projects/invokeai/default.nix
@@ -1,8 +1,76 @@
{ config, inputs, lib, withSystem, ... }:
+let
+ l = lib // config.flake.lib;
+ inherit (config.flake) overlays;
+in
+
{
perSystem = { config, pkgs, ... }: let
- inherit (config.dependencySets) aipython3-amd aipython3-nvidia;
+ commonOverlays = [
+ overlays.python-fixPackages
+ (l.overlays.callManyPackages [
+ ../../packages/mediapipe
+ ../../packages/safetensors
+ ../../packages/easing-functions
+ ../../packages/dynamicprompts
+ ../../packages/controlnet-aux
+ ../../packages/fastapi
+ ../../packages/fastapi-events
+ ../../packages/fastapi-socketio
+ ../../packages/starlette
+ ../../packages/pytorch-lightning
+ ../../packages/compel
+ ../../packages/taming-transformers-rom1504
+ ../../packages/albumentations
+ ../../packages/qudida
+ ../../packages/gfpgan
+ ../../packages/basicsr
+ ../../packages/facexlib
+ ../../packages/realesrgan
+ ../../packages/codeformer
+ ../../packages/clipseg
+ ../../packages/kornia
+ ../../packages/picklescan
+ ../../packages/diffusers
+ ../../packages/pypatchmatch
+ ../../packages/torch-fidelity
+ ../../packages/resize-right
+ ../../packages/torchdiffeq
+ ../../packages/accelerate
+ ../../packages/clip-anytorch
+ ../../packages/clean-fid
+ ../../packages/getpass-asterisk
+ ../../packages/mediapipe
+ ../../packages/python-engineio
+ ])
+ (final: prev: lib.mapAttrs
+ (_: pkg: pkg.overrideAttrs (old: {
+ nativeBuildInputs = old.nativeBuildInputs ++ [ final.pythonRelaxDepsHook ];
+ pythonRemoveDeps = [ "opencv-python-headless" "opencv-python" "tb-nightly" "clip" ];
+ }))
+ {
+ inherit (prev)
+ albumentations
+ qudida
+ gfpgan
+ basicsr
+ facexlib
+ realesrgan
+ clipseg
+ ;
+ }
+ )
+ ];
+
+ python3Variants = {
+ amd = l.overlays.applyOverlays pkgs.python3Packages (commonOverlays ++ [
+ overlays.python-torchRocm
+ ]);
+ nvidia = l.overlays.applyOverlays pkgs.python3Packages (commonOverlays ++ [
+ overlays.python-torchCuda
+ ]);
+ };
src = inputs.invokeai-src;
@@ -10,10 +78,10 @@
in {
packages = {
invokeai-amd = mkInvokeAIVariant {
- aipython3 = aipython3-amd;
+ python3Packages = python3Variants.amd;
};
invokeai-nvidia = mkInvokeAIVariant {
- aipython3 = aipython3-nvidia;
+ python3Packages = python3Variants.nvidia;
};
};
};
diff --git a/projects/invokeai/nixos/default.nix b/projects/invokeai/nixos/default.nix
index 660786fc..716f3710 100644
--- a/projects/invokeai/nixos/default.nix
+++ b/projects/invokeai/nixos/default.nix
@@ -2,14 +2,22 @@
let
inherit (lib)
- mkIf mkOption mkEnableOption types
- escapeShellArgs getExe optionalString
+ mkIf mkOption mkEnableOption mkRenamedOptionModule types
+ escapeShellArgs flatten getExe mapAttrsToList
+ isBool isFloat isInt isList isString
+ floatToString optionalString
;
cfg = config.services.invokeai;
in
{
+ imports = map ({ old, new ? old }: mkRenamedOptionModule [ "services" "invokeai" old ] [ "services" "invokeai" "settings" new ]) [
+ { old = "host"; }
+ { old = "port"; }
+ { old = "dataDir"; new = "root"; }
+ { old = "precision"; }
+ ];
options.services.invokeai = {
enable = mkEnableOption "InvokeAI Web UI for Stable Diffusion";
@@ -30,40 +38,48 @@ in
type = types.str;
};
- host = mkOption {
- description = "Which IP address to listen on.";
- default = "127.0.0.1";
- type = types.str;
- };
-
- port = mkOption {
- description = "Which port to listen on.";
- default = 9090;
- type = types.port;
- };
-
- dataDir = mkOption {
- description = "Where to store InvokeAI's state.";
- default = "/var/lib/invokeai";
- type = types.path;
- };
-
- maxLoadedModels = mkOption {
- description = "Maximum amount of models to keep in VRAM at once.";
- default = 1;
- type = types.ints.positive;
- };
-
- nsfwChecker = mkEnableOption "the NSFW Checker";
-
- precision = mkOption {
- description = "Set model precision.";
- default = "auto";
- type = types.enum [ "auto" "float32" "autocast" "float16" ];
+ settings = mkOption {
+ description = "Structured command line arguments.";
+ default = { };
+ type = types.submodule {
+ freeformType = with types; let
+ atom = nullOr (oneOf [
+ bool
+ str
+ int
+ float
+ ]);
+ in attrsOf (either atom (listOf atom));
+ options = {
+ host = mkOption {
+ description = "Which IP address to listen on.";
+ default = "127.0.0.1";
+ type = types.str;
+ };
+
+ port = mkOption {
+ description = "Which port to listen on.";
+ default = 9090;
+ type = types.port;
+ };
+
+ root = mkOption {
+ description = "Where to store InvokeAI's state.";
+ default = "/var/lib/invokeai";
+ type = types.path;
+ };
+
+ precision = mkOption {
+ description = "Set model precision.";
+ default = "auto";
+ type = types.enum [ "auto" "float32" "autocast" "float16" ];
+ };
+ };
+ };
};
extraArgs = mkOption {
- description = "Extra command line arguments.";
+ description = "Additional raw command line arguments.";
default = [];
type = with types; listOf str;
};
@@ -71,18 +87,16 @@ in
config = let
- yesno = enable: text: "--${optionalString (!enable) "no-"}${text}";
+ cliArgs = (flatten (mapAttrsToList (n: v:
+ if v == null then []
+ else if isBool v then [ "--${optionalString (!v) "no-"}${n}" ]
+ else if isInt v then [ "--${n}" "${toString v}" ]
+ else if isFloat v then [ "--${n}" "${floatToString v}" ]
+ else if isString v then ["--${n}" v ]
+ else if isList v then [ "--${n}" (toString v) ]
+ else throw "Unhandled type for setting \"${n}\""
+ ) cfg.settings)) ++ cfg.extraArgs;
- cliArgs = [
- "--web"
- "--host" cfg.host
- "--port" cfg.port
- "--root_dir" cfg.dataDir
- "--max_loaded_models" cfg.maxLoadedModels
- (yesno cfg.nsfwChecker "nsfw_checker")
- "--precision" cfg.precision
- ] ++ cfg.extraArgs;
- initialModelsPath = "${cfg.package}/${cfg.package.pythonModule.sitePackages}/invokeai/configs/INITIAL_MODELS.yaml";
in mkIf cfg.enable {
users.users = mkIf (cfg.user == "invokeai") {
invokeai = {
@@ -96,11 +110,11 @@ in
systemd.services.invokeai = {
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
- preStart = ''
- ln -sf ${initialModelsPath} '${cfg.dataDir}/configs/INITIAL_MODELS.yaml'
- cp -L --no-clobber --no-preserve=mode ${initialModelsPath} '${cfg.dataDir}/configs/models.yaml'
- '';
- environment.HOME = "${cfg.dataDir}/.home";
+ environment = {
+ HOME = "${cfg.settings.root}/.home";
+ INVOKEAI_ROOT = "${cfg.settings.root}";
+ NIXIFIED_AI_NONINTERACTIVE = "1";
+ };
serviceConfig = {
User = cfg.user;
Group = cfg.group;
@@ -109,9 +123,9 @@ in
};
};
systemd.tmpfiles.rules = [
- "d '${cfg.dataDir}' 0755 ${cfg.user} ${cfg.group} - -"
- "d '${cfg.dataDir}/configs' 0755 ${cfg.user} ${cfg.group} - -"
- "d '${cfg.dataDir}/.home' 0750 ${cfg.user} ${cfg.group} - -"
+ "d '${cfg.settings.root}' 0755 ${cfg.user} ${cfg.group} - -"
+ "d '${cfg.settings.root}/configs' 0755 ${cfg.user} ${cfg.group} - -"
+ "d '${cfg.settings.root}/.home' 0750 ${cfg.user} ${cfg.group} - -"
];
};
}
diff --git a/projects/invokeai/package.nix b/projects/invokeai/package.nix
index 9fd4ab0e..1f38f96b 100644
--- a/projects/invokeai/package.nix
+++ b/projects/invokeai/package.nix
@@ -1,26 +1,43 @@
-{ aipython3
+{ python3Packages
# misc
, lib
, src
# extra deps
-, libdrm
}:
let
getVersion = lib.flip lib.pipe [
- (src: builtins.readFile "${src}/ldm/invoke/_version.py")
- (builtins.match ".*__version__='([^']+)'.*")
+ (src: builtins.readFile "${src}/invokeai/version/invokeai_version.py")
+ (builtins.match ".*__version__ = \"([^\"]+)\".*")
builtins.head
];
in
-aipython3.buildPythonPackage {
+python3Packages.buildPythonPackage {
pname = "InvokeAI";
format = "pyproject";
version = getVersion src;
inherit src;
- propagatedBuildInputs = with aipython3; [
+ propagatedBuildInputs = with python3Packages; [
+ semver
+ mediapipe
numpy
+ torchsde
+ uvicorn
+ pyperclip
+ invisible-watermark
+ fastapi
+ fastapi-events
+ fastapi-socketio
+ timm
+ scikit-image
+ controlnet-aux
+ compel
+ python-dotenv
+ uvloop
+ watchfiles
+ httptools
+ websockets
dnspython
albumentations
opencv4
@@ -33,7 +50,6 @@ aipython3.buildPythonPackage {
protobuf
omegaconf
test-tube
- streamlit
einops
taming-transformers-rom1504
torch-fidelity
@@ -48,7 +64,6 @@ aipython3.buildPythonPackage {
pillow
send2trash
flask
- flask-socketio
flask-cors
dependency-injector
gfpgan
@@ -57,10 +72,16 @@ aipython3.buildPythonPackage {
getpass-asterisk
safetensors
datasets
+ accelerate
+ huggingface-hub
+ easing-functions
+ dynamicprompts
+ torchvision
+ test-tube
];
- nativeBuildInputs = [ aipython3.pythonRelaxDepsHook ];
- pythonRemoveDeps = [ "clip" "pyreadline3" "flaskwebgui" "opencv-python" ];
- pythonRelaxDeps = [ "dnspython" "protobuf" "flask" "flask-socketio" "pytorch-lightning" ];
+ nativeBuildInputs = with python3Packages; [ pythonRelaxDepsHook pip ];
+ pythonRemoveDeps = [ "clip" "pyreadline3" "flaskwebgui" "opencv-python" "fastapi-socketio" ];
+ pythonRelaxDeps = [ "dnspython" "flask" "requests" "numpy" "pytorch-lightning" "torchsde" "uvicorn" "invisible-watermark" "accelerate" "scikit-image" "safetensors" "huggingface-hub" "torchvision" "test-tube" "fastapi" ];
makeWrapperArgs = [
'' --run '
if [ -d "/usr/lib/wsl/lib" ]
@@ -72,24 +93,33 @@ aipython3.buildPythonPackage {
fi
'
''
- ] ++ lib.optionals (aipython3.torch.rocmSupport or false) [
+ # See note about consumer GPUs:
+ # https://docs.amd.com/bundle/ROCm-Deep-Learning-Guide-v5.4.3/page/Troubleshooting.html
+ " --set-default HSA_OVERRIDE_GFX_VERSION 10.3.0"
+
+ '' --run 'export INVOKEAI_ROOT="''${INVOKEAI_ROOT:-$HOME/invokeai}"' ''
'' --run '
- if [ ! -e /tmp/nix-pytorch-rocm___/amdgpu.ids ]
+ if [[ ! -d "$INVOKEAI_ROOT" && "''${0##*/}" != invokeai-configure ]]
then
- mkdir -p /tmp/nix-pytorch-rocm___
- ln -s ${libdrm}/share/libdrm/amdgpu.ids /tmp/nix-pytorch-rocm___/amdgpu.ids
+ echo "State directory does not exist, running invokeai-configure"
+ if [[ "''${NIXIFIED_AI_NONINTERACTIVE:-0}" != 0 ]]; then
+ ${placeholder "out"}/bin/invokeai-configure --yes --skip-sd-weights
+ else
+ ${placeholder "out"}/bin/invokeai-configure
+ fi
fi
'
''
- # See note about consumer GPUs:
- # https://docs.amd.com/bundle/ROCm-Deep-Learning-Guide-v5.4.3/page/Troubleshooting.html
- " --set-default HSA_OVERRIDE_GFX_VERSION 10.3.0"
];
patchPhase = ''
runHook prePatch
+ substituteInPlace ./pyproject.toml \
+ --replace 'setuptools~=65.5' 'setuptools' \
+ --replace 'pip~=22.3' 'pip'
+
# Add subprocess to the imports
- substituteInPlace ./ldm/invoke/config/invokeai_configure.py --replace \
+ substituteInPlace ./invokeai/backend/install/invokeai_configure.py --replace \
'import shutil' \
'
import shutil
@@ -98,19 +128,18 @@ import subprocess
# shutil.copytree will inherit the permissions of files in the /nix/store
# which are read only, so we subprocess.call cp instead and tell it not to
# preserve the mode
- substituteInPlace ./ldm/invoke/config/invokeai_configure.py --replace \
+ substituteInPlace ./invokeai/backend/install/invokeai_configure.py --replace \
"shutil.copytree(configs_src, configs_dest, dirs_exist_ok=True)" \
"subprocess.call('cp -r --no-preserve=mode {configs_src} {configs_dest}'.format(configs_src=configs_src, configs_dest=configs_dest), shell=True)"
runHook postPatch
+
+ substituteInPlace ./pyproject.toml \
+ --replace 'pip~=22.3' 'pip' \
+ --replace 'setuptools~=65.5' 'setuptools'
'';
- postFixup = ''
- chmod +x $out/bin/*
- wrapPythonPrograms
- '';
- doCheck = false;
meta = {
description = "Fancy Web UI for Stable Diffusion";
homepage = "https://invoke-ai.github.io/InvokeAI/";
- mainProgram = "invoke.py";
+ mainProgram = "invokeai-web";
};
}
diff --git a/projects/koboldai/default.nix b/projects/koboldai/default.nix
deleted file mode 100644
index 7832e344..00000000
--- a/projects/koboldai/default.nix
+++ /dev/null
@@ -1,42 +0,0 @@
-{ config, inputs, lib, withSystem, ... }:
-
-{
- perSystem = { config, pkgs, ... }: let
- inherit (config.dependencySets) aipython3-amd aipython3-nvidia;
-
- src = inputs.koboldai-src;
-
- mkKoboldAIVariant = args: pkgs.callPackage ./package.nix ({ inherit src; } // args);
- in {
- packages = {
- koboldai-nvidia = mkKoboldAIVariant {
- aipython3 = aipython3-nvidia;
- };
- koboldai-amd = mkKoboldAIVariant {
- aipython3 = aipython3-amd;
- };
- };
- };
-
- flake.nixosModules = let
- packageModule = pkgAttrName: { pkgs, ... }: {
- services.koboldai.package = withSystem pkgs.system (
- { config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName}
- );
- };
- in {
- koboldai = ./nixos;
- koboldai-amd = {
- imports = [
- config.flake.nixosModules.koboldai
- (packageModule "koboldai-amd")
- ];
- };
- koboldai-nvidia = {
- imports = [
- config.flake.nixosModules.koboldai
- (packageModule "koboldai-nvidia")
- ];
- };
- };
-}
diff --git a/projects/koboldai/nixos/default.nix b/projects/koboldai/nixos/default.nix
deleted file mode 100644
index a3956d21..00000000
--- a/projects/koboldai/nixos/default.nix
+++ /dev/null
@@ -1,87 +0,0 @@
-{ config, lib, ... }:
-
-let
- inherit (lib)
- mkIf mkOption mkEnableOption types
- escapeShellArgs getExe optional
- ;
-
- cfg = config.services.koboldai;
-in
-
-{
- options.services.koboldai= {
- enable = mkEnableOption "KoboldAI Web UI";
-
- package = mkOption {
- description = "Which KoboldAI package to use.";
- type = types.package;
- };
-
- user = mkOption {
- description = "Which user to run KoboldAI as.";
- default = "koboldai";
- type = types.str;
- };
-
- group = mkOption {
- description = "Which group to run KoboldAI as.";
- default = "koboldai";
- type = types.str;
- };
-
- host = mkOption {
- description = "Whether to make KoboldAI remotely accessible.";
- default = false;
- type = types.bool;
- };
-
- port = mkOption {
- description = "Which port to listen on.";
- default = 5000;
- type = types.port;
- };
-
- dataDir = mkOption {
- description = "Where to store KoboldAI's state.";
- default = "/var/lib/koboldai";
- type = types.path;
- };
-
- extraArgs = mkOption {
- description = "Extra command line arguments.";
- default = [];
- type = with types; listOf str;
- };
- };
-
- config = let
- cliArgs = (optional cfg.host "--host") ++ [
- "--port" cfg.port
- ] ++ cfg.extraArgs;
- in mkIf cfg.enable {
- users.users = mkIf (cfg.user == "koboldai") {
- koboldai = {
- isSystemUser = true;
- inherit (cfg) group;
- };
- };
- users.groups = mkIf (cfg.group == "koboldai") {
- koboldai = {};
- };
- systemd.services.koboldai = {
- after = [ "network.target" ];
- wantedBy = [ "multi-user.target" ];
- environment.HOME = cfg.dataDir;
- serviceConfig = {
- User = cfg.user;
- Group = cfg.group;
- ExecStart = "${getExe cfg.package} ${escapeShellArgs cliArgs}";
- PrivateTmp = true;
- };
- };
- systemd.tmpfiles.rules = [
- "d '${cfg.dataDir}' 0755 ${cfg.user} ${cfg.group} - -"
- ];
- };
-}
diff --git a/projects/koboldai/package.nix b/projects/koboldai/package.nix
deleted file mode 100644
index c40b96d8..00000000
--- a/projects/koboldai/package.nix
+++ /dev/null
@@ -1,128 +0,0 @@
-{ aipython3
-, lib
-, src
-, wsl ? false
-, fetchFromGitHub
-, writeShellScriptBin
-, runCommand
-, tmpDir ? "/tmp/nix-koboldai"
-, stateDir ? "$HOME/.koboldai/state"
-, libdrm
-}:
-let
- overrides = {
- transformers = aipython3.transformers.overrideAttrs (old: rec {
- propagatedBuildInputs = old.propagatedBuildInputs ++ [ aipython3.huggingface-hub ];
- pname = "transformers";
- version = "4.24.0";
- src = fetchFromGitHub {
- owner = "huggingface";
- repo = pname;
- rev = "refs/tags/v${version}";
- hash = "sha256-aGtTey+QK12URZcGNaRAlcaOphON4ViZOGdigtXU1g0=";
- };
- });
- bleach = aipython3.bleach.overrideAttrs (old: rec {
- pname = "bleach";
- version = "4.1.0";
- src = fetchFromGitHub {
- owner = "mozilla";
- repo = pname;
- rev = "refs/tags/v${version}";
- hash = "sha256-YuvH8FvZBqSYRt7ScKfuTZMsljJQlhFR+3tg7kABF0Y=";
- };
- });
- };
- # The original kobold-ai program wants to write models settings and user
- # scripts to the current working directory, but tries to write to the
- # /nix/store erroneously due to mismanagement of the current working
- # directory in its source code. The patching below replicates the original
- # functionality of the program by making symlinks in the source code
- # directory that point to ${tmpDir}
- #
- # The wrapper script we have made for the program will then create another
- # symlink that points to ${stateDir}, ultimately the default symlink trail
- # looks like the following
- #
- # /nix/store/kobold-ai/models -> /tmp/nix-koboldai -> ~/.koboldai/state
- patchedSrc = runCommand "koboldAi-patchedSrc" {} ''
- cp -r --no-preserve=mode ${src} ./src
- cd src
- rm -rf models settings userscripts
- cd -
- substituteInPlace ./src/aiserver.py \
- --replace 'os.system("")' 'STATE_DIR = os.path.expandvars("${stateDir}")' \
- --replace 'cache_dir="cache"' "cache_dir=os.path.join(STATE_DIR, 'cache')" \
- --replace 'shutil.rmtree("cache/")' 'shutil.rmtree(os.path.join(STATE_DIR, "cache"))' \
- --replace "app.config['SESSION_TYPE'] = 'filesystem'" "app.config['SESSION_TYPE'] = 'memcached'"
-
- # https://stackoverflow.com/questions/59433832/runtimeerror-only-tensors-of-floating-point-dtype-can-require-gradients
- # Typo in casing by author means that breakmodels crash the program, but
- # correcting the case from tensor -> Tensor fixes it
- substituteInPlace ./src/breakmodel.py --replace "torch.tensor" "torch.Tensor"
- mv ./src $out
- ln -s ${tmpDir}/models/ $out/models
- ln -s ${tmpDir}/settings/ $out/settings
- ln -s ${tmpDir}/userscripts/ $out/userscripts
- '';
- koboldPython = aipython3.python.withPackages (_: with aipython3; [
- overrides.bleach
- overrides.transformers
- colorama
- flask
- flask-socketio
- flask-session
- eventlet
- dnspython
- markdown
- sentencepiece
- protobuf
- marshmallow
- loguru
- termcolor
- psutil
- torch-bin
- torchvision-bin
- apispec
- apispec-webframeworks
- lupa
- memcached
- ]);
-
- # See note about consumer GPUs:
- # https://docs.amd.com/bundle/ROCm-Deep-Learning-Guide-v5.4.3/page/Troubleshooting.html
- rocmInit = ''
- if [ ! -e /tmp/nix-pytorch-rocm___/amdgpu.ids ]
- then
- mkdir -p /tmp/nix-pytorch-rocm___
- ln -s ${libdrm}/share/libdrm/amdgpu.ids /tmp/nix-pytorch-rocm___/amdgpu.ids
- fi
- export HSA_OVERRIDE_GFX_VERSION=''${HSA_OVERRIDE_GFX_VERSION-'10.3.0'}
- '';
-in
-(writeShellScriptBin "koboldai" ''
- if [ -d "/usr/lib/wsl/lib" ]
- then
- echo "Running via WSL (Windows Subsystem for Linux), setting LD_LIBRARY_PATH"
- set -x
- export LD_LIBRARY_PATH="/usr/lib/wsl/lib"
- set +x
- fi
- rm -rf ${tmpDir}
- mkdir -p ${tmpDir}
- mkdir -p ${stateDir}/models ${stateDir}/cache ${stateDir}/settings ${stateDir}/userscripts
- ln -s ${stateDir}/models/ ${tmpDir}/models
- ln -s ${stateDir}/settings/ ${tmpDir}/settings
- ln -s ${stateDir}/userscripts/ ${tmpDir}/userscripts
- ${lib.optionalString (aipython3.torch.rocmSupport or false) rocmInit}
- ${koboldPython}/bin/python ${patchedSrc}/aiserver.py $@
-'').overrideAttrs
- (_: {
- meta = {
- maintainers = [ lib.maintainers.matthewcroughan ];
- license = lib.licenses.agpl3;
- description = "browser-based front-end for AI-assisted writing with multiple local & remote AI models";
- homepage = "https://github.com/KoboldAI/KoboldAI-Client";
- mainProgram = "koboldai";
- };
- })
diff --git a/projects/textgen/default.nix b/projects/textgen/default.nix
new file mode 100644
index 00000000..d5d714e7
--- /dev/null
+++ b/projects/textgen/default.nix
@@ -0,0 +1,76 @@
+{ config, inputs, lib, withSystem, ... }:
+
+let
+ l = lib // config.flake.lib;
+ inherit (config.flake) overlays;
+in
+
+{
+ perSystem = { config, pkgs, ... }: let
+ commonOverlays = [
+ overlays.python-fixPackages
+ (l.overlays.callManyPackages [
+ ../../packages/apispec-webframeworks
+ ../../packages/torch-grammar
+ ../../packages/flexgen
+ ../../packages/gradio
+ ../../packages/gradio-client
+ ../../packages/analytics-python
+ ../../packages/ffmpy
+ ../../packages/llama-cpp-python
+ ../../packages/rwkv
+ ../../packages/autogptq
+ ../../packages/rouge
+ ])
+ ];
+
+ python3Variants = {
+ amd = l.overlays.applyOverlays pkgs.python3Packages (commonOverlays ++ [
+ overlays.python-torchRocm
+ ]);
+ nvidia = l.overlays.applyOverlays pkgs.python3Packages (commonOverlays ++ [
+ overlays.python-torchCuda
+ ]);
+ };
+
+ src = inputs.textgen-src;
+
+ mkTextGenVariant = args: pkgs.callPackage ./package.nix ({ inherit src; } // args);
+ in {
+ packages = {
+ textgen-nvidia = mkTextGenVariant {
+ python3Packages = python3Variants.nvidia;
+ };
+ };
+ legacyPackages = {
+ textgen-amd = throw ''
+
+
+ text-generation-webui is not supported on AMD yet, as bitsandbytes does not support ROCm.
+ '';
+ };
+ };
+
+ flake.nixosModules = let
+ packageModule = pkgAttrName: { pkgs, ... }: {
+ services.textgen.package = withSystem pkgs.system (
+ { config, ... }: lib.mkOptionDefault config.packages.${pkgAttrName}
+ );
+ };
+ in {
+ textgen = ./nixos;
+ textgen-amd = {
+ imports = [
+ config.flake.nixosModules.textgen
+ (packageModule "textgen-amd")
+ ];
+ };
+ textgen-nvidia = {
+ imports = [
+ config.flake.nixosModules.textgen
+ (packageModule "textgen-nvidia")
+ ];
+ };
+ };
+}
+
diff --git a/projects/textgen/package.nix b/projects/textgen/package.nix
new file mode 100644
index 00000000..face144f
--- /dev/null
+++ b/projects/textgen/package.nix
@@ -0,0 +1,149 @@
+{ python3Packages
+, lib
+, src
+, writeShellScriptBin
+, runCommand
+, tmpDir ? "/tmp/nix-textgen"
+, stateDir ? "$HOME/.textgen/state"
+, libdrm
+, cudaPackages
+}:
+let
+ patchedSrc = runCommand "textgen-patchedSrc" { } ''
+ cp -r --no-preserve=mode ${src} ./src
+ cd src
+ rm -rf models loras cache
+ mv ./prompts ./_prompts
+ mv ./characters ./_characters
+ mv ./presets ./_presets
+ cd -
+ substituteInPlace ./src/modules/presets.py \
+ --replace "Path('presets" "Path('$out/presets" \
+ --replace "Path('prompts" "Path('$out/prompts" \
+ --replace "Path(f'prompts" "Path(f'$out/prompts" \
+ --replace "Path('extensions" "Path('$out/extensions" \
+ --replace "Path(f'presets" "Path(f'$out/presets" \
+ --replace "Path('softprompts" "Path('$out/softprompts" \
+ --replace "Path(f'softprompts" "Path(f'$out/softprompts" \
+ --replace "Path('characters" "Path('$out/characters" \
+ --replace "Path('cache" "Path('$out/cache"
+ substituteInPlace ./src/download-model.py \
+ --replace "=args.output" "='$out/models/'" \
+ --replace "base_folder=None" "base_folder='$out/models/'"
+ substituteInPlace ./src/modules/html_generator.py \
+ --replace "../css" "$out/css" \
+ --replace 'Path(__file__).resolve().parent / ' "" \
+ --replace "Path(f'css" "Path(f'$out/css"
+ substituteInPlace ./src/modules/utils.py \
+ --replace "Path('css" "Path('$out/css" \
+ --replace "Path('characters" "Path('$out/characters" \
+ --replace "characters/" "$out/characters/"
+ substituteInPlace ./src/modules/chat.py \
+ --replace "folder = 'characters'" "folder = '$out/characters'" \
+ --replace "Path('characters" "Path('$out/characters" \
+ --replace "characters/" "$out/characters/"
+ substituteInPlace ./src/server.py \
+ --replace "Path('presets" "Path('$out/presets" \
+ --replace "Path('prompts" "Path('$out/prompts" \
+ --replace "Path(f'prompts" "Path(f'$out/prompts" \
+ --replace "Path('extensions" "Path('$out/extensions" \
+ --replace "Path(f'presets" "Path(f'$out/presets" \
+ --replace "Path('softprompts" "Path('$out/softprompts" \
+ --replace "Path(f'softprompts" "Path(f'$out/softprompts" \
+ --replace "Path('characters" "Path('$out/characters" \
+ --replace "Path('cache" "Path('$out/cache"
+ substituteInPlace ./src/download-model.py \
+ --replace "=args.output" "='$out/models/'" \
+ --replace "base_folder=None" "base_folder='$out/models/'"
+ substituteInPlace ./src/modules/html_generator.py \
+ --replace "../css" "$out/css" \
+ --replace 'Path(__file__).resolve().parent / ' "" \
+ --replace "Path(f'css" "Path(f'$out/css"
+ substituteInPlace ./src/modules/utils.py \
+ --replace "Path('css" "Path('$out/css" \
+ --replace "Path('characters" "Path('$out/characters" \
+ --replace "characters/" "$out/characters/"
+ substituteInPlace ./src/modules/chat.py \
+ --replace "folder = 'characters'" "folder = '$out/characters'" \
+ --replace "Path('characters" "Path('$out/characters" \
+ --replace "characters/" "$out/characters/"
+ mv ./src $out
+ ln -s ${tmpDir}/models/ $out/models
+ ln -s ${tmpDir}/loras/ $out/loras
+ ln -s ${tmpDir}/cache/ $out/cache
+ ln -s ${tmpDir}/prompts/ $out/prompts
+ ln -s ${tmpDir}/characters/ $out/characters
+ ln -s ${tmpDir}/presets/ $out/presets
+ '';
+ textgenPython = python3Packages.python.withPackages (_: with python3Packages; [
+ accelerate
+ bitsandbytes
+ colorama
+ datasets
+ flexgen
+ gradio
+ llama-cpp-python
+ markdown
+ numpy
+ pandas
+ peft
+ pillow
+ pyyaml
+ requests
+ rwkv
+ safetensors
+ sentencepiece
+ tqdm
+ transformers
+ #autogptq # can't build this..
+ torch
+ torch-grammar
+ ]);
+
+ # See note about consumer GPUs:
+ # https://docs.amd.com/bundle/ROCm-Deep-Learning-Guide-v5.4.3/page/Troubleshooting.html
+ rocmInit = ''
+ if [ ! -e /tmp/nix-pytorch-rocm___/amdgpu.ids ]
+ then
+ mkdir -p /tmp/nix-pytorch-rocm___
+ ln -s ${libdrm}/share/libdrm/amdgpu.ids /tmp/nix-pytorch-rocm___/amdgpu.ids
+ fi
+ export HSA_OVERRIDE_GFX_VERSION=''${HSA_OVERRIDE_GFX_VERSION-'10.3.0'}
+ '';
+in
+(writeShellScriptBin "textgen" ''
+ if [ -d "/usr/lib/wsl/lib" ]
+ then
+ echo "Running via WSL (Windows Subsystem for Linux), setting LD_LIBRARY_PATH"
+ set -x
+ export LD_LIBRARY_PATH="/usr/lib/wsl/lib"
+ set +x
+ fi
+ rm -rf ${tmpDir}
+ mkdir -p ${tmpDir}
+ mkdir -p ${stateDir}/models ${stateDir}/cache ${stateDir}/loras ${stateDir}/prompts ${stateDir}/characters ${stateDir}/presets
+ cp -r --no-preserve=mode ${patchedSrc}/_prompts/* ${stateDir}/prompts/
+ cp -r --no-preserve=mode ${patchedSrc}/_characters/* ${stateDir}/characters/
+ cp -r --no-preserve=mode ${patchedSrc}/_presets/* ${stateDir}/presets/
+ ln -s ${stateDir}/models/ ${tmpDir}/models
+ ln -s ${stateDir}/loras/ ${tmpDir}/loras
+ ln -s ${stateDir}/cache/ ${tmpDir}/cache
+ ln -s ${stateDir}/prompts/ ${tmpDir}/prompts
+ ln -s ${stateDir}/characters/ ${tmpDir}/characters
+ ln -s ${stateDir}/presets/ ${tmpDir}/presets
+ ${lib.optionalString (python3Packages.torch.rocmSupport or false) rocmInit}
+ export LD_LIBRARY_PATH=/run/opengl-driver/lib:${cudaPackages.cudatoolkit}/lib
+ ${textgenPython}/bin/python ${patchedSrc}/server.py $@ \
+ --model-dir ${stateDir}/models/ \
+ --lora-dir ${stateDir}/loras/ \
+
+'').overrideAttrs
+ (_: {
+ meta = {
+ maintainers = [ lib.maintainers.jpetrucciani ];
+ license = lib.licenses.agpl3;
+ description = "";
+ homepage = "https://github.com/oobabooga/text-generation-webui";
+ mainProgram = "textgen";
+ };
+ })
diff --git a/website/src/index.md b/website/src/index.md
index 019a9961..5214a5d7 100644
--- a/website/src/index.md
+++ b/website/src/index.md
@@ -72,18 +72,18 @@ See the following documentation from Microsoft for the details on how to enable
# Packaged Projects
-### KoboldAI
+### textgen (Also called text-generation-webui: A WebUI for LLMs and LoRA training)
-- [Official website](https://github.com/KoboldAI/KoboldAI-Client)
+- [Official website](https://github.com/oobabooga/text-generation-webui)
-A browser-based front-end for AI-assisted writing with multiple local & remote AI models.
+A Gradio web UI for Large Language Models. Supports transformers, GPTQ, AWQ, llama.cpp (GGUF), Llama models.
#### Get started
-- `nix run github:nixified-ai/flake#koboldai-amd`
-- `nix run github:nixified-ai/flake#koboldai-nvidia`
+- `nix run github:nixified-ai/flake#textgen-amd`
+- `nix run github:nixified-ai/flake#textgen-nvidia`
-![](https://raw.githubusercontent.com/nixified-ai/flake/images/koboldai.webp)
+![](https://raw.githubusercontent.com/nixified-ai/flake/images/textgen.webp)
---