1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
|
{
lib
, buildPythonPackage
, pythonOlder
, fetchFromGitHub
, pythonRelaxDepsHook
# propagated build input
, faiss
, torch
, transformers
, huggingface-hub
, numpy
, pyyaml
, regex
# optional-dependencies
, aiohttp
, fastapi
, uvicorn
# TODO add apache-libcloud
# , apache-libcloud
, rich
, duckdb
, pillow
, networkx
, python-louvain
, onnx
, onnxruntime
, soundfile
, scipy
, ttstokenizer
, beautifulsoup4
, nltk
, pandas
, tika
, imagehash
, timm
, fasttext
, sentencepiece
, accelerate
, onnxmltools
, annoy
, hnswlib
# TODO add pymagnitude-lite
#, pymagnitude-lite
, scikit-learn
, sentence-transformers
, croniter
, openpyxl
, requests
, xmltodict
# native check inputs
, unittestCheckHook
}:
let
version = "7.1.0";
api = [ aiohttp fastapi uvicorn ];
# cloud = [ apache-libcloud ];
console = [ rich ];
database = [ duckdb pillow ];
graph = [ networkx python-louvain ];
model = [ onnx onnxruntime ];
pipeline-audio = [ onnx onnxruntime soundfile scipy ttstokenizer ];
pipeline-data = [ beautifulsoup4 nltk pandas tika ];
pipeline-image = [ imagehash pillow timm ];
pipeline-text = [ fasttext sentencepiece ];
pipeline-train = [ accelerate onnx onnxmltools onnxruntime ];
pipeline = pipeline-audio ++ pipeline-data ++ pipeline-image ++ pipeline-text ++ pipeline-train;
similarity = [
annoy
fasttext
hnswlib
# pymagnitude-lite
scikit-learn
sentence-transformers
];
workflow = [
# apache-libcloud
croniter
openpyxl
pandas
pillow
requests
xmltodict
];
all = api ++ console ++ database ++ graph ++ model ++ pipeline ++ similarity ++ workflow;
optional-dependencies = {
inherit api console database graph model pipeline-audio pipeline-image
pipeline-text pipeline-train pipeline similarity workflow all;
};
in
buildPythonPackage {
pname = "txtai";
inherit version;
format = "setuptools";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "neuml";
repo = "txtai";
rev = "refs/tags/v${version}";
hash = "sha256-L+L2jRkCQKOgd1k3N4mft0Kt6kvCN81lgSQUjoon5rk=";
};
nativeBuildInputs = [
pythonRelaxDepsHook
];
pythonRemoveDeps = [
# We call it faiss, not faiss-cpu.
"faiss-cpu"
];
propagatedBuildInputs = [
faiss
torch
transformers
huggingface-hub
numpy
pyyaml
regex
];
passthru.optional-dependencies = optional-dependencies;
pythonImportsCheck = [ "txtai" ];
# some tests hang forever
doCheck = false;
preCheck = ''
export TRANSFORMERS_CACHE=$(mktemp -d)
'';
nativeCheckInputs = [
unittestCheckHook
] ++ optional-dependencies.api ++ optional-dependencies.similarity;
unittestFlagsArray = [
"-s" "test/python" "-v"
];
meta = with lib; {
description = "Semantic search and workflows powered by language models";
changelog = "https://github.com/neuml/txtai/releases/tag/v${version}";
homepage = "https://github.com/neuml/txtai";
license = licenses.asl20;
maintainers = with maintainers; [ happysalada ];
};
}
|