1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
|
{
lib,
aiohttp,
apscheduler,
azure-identity,
azure-keyvault-secrets,
backoff,
buildPythonPackage,
click,
cryptography,
fastapi,
fastapi-sso,
fetchFromGitHub,
google-cloud-kms,
gunicorn,
importlib-metadata,
jinja2,
jsonschema,
openai,
orjson,
poetry-core,
prisma,
pydantic,
pyjwt,
pynacl,
python-dotenv,
python-multipart,
pythonOlder,
pyyaml,
requests,
resend,
rq,
tiktoken,
tokenizers,
uvicorn,
}:
buildPythonPackage rec {
pname = "litellm";
version = "1.41.28";
pyproject = true;
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "BerriAI";
repo = "litellm";
rev = "refs/tags/v${version}";
hash = "sha256-DNFzBl2K4liphEMVPRbLWMzzCxtIcvUgQxvppAnv/10=";
};
build-system = [ poetry-core ];
dependencies = [
aiohttp
click
importlib-metadata
jinja2
jsonschema
openai
pydantic
python-dotenv
requests
tiktoken
tokenizers
];
passthru.optional-dependencies = {
proxy = [
apscheduler
backoff
cryptography
fastapi
fastapi-sso
gunicorn
orjson
pyjwt
python-multipart
pyyaml
rq
uvicorn
];
extra_proxy = [
azure-identity
azure-keyvault-secrets
google-cloud-kms
prisma
pynacl
resend
];
};
pythonImportsCheck = [ "litellm" ];
# access network
doCheck = false;
meta = with lib; {
description = "Use any LLM as a drop in replacement for gpt-3.5-turbo. Use Azure, OpenAI, Cohere, Anthropic, Ollama, VLLM, Sagemaker, HuggingFace, Replicate (100+ LLMs)";
mainProgram = "litellm";
homepage = "https://github.com/BerriAI/litellm";
changelog = "https://github.com/BerriAI/litellm/releases/tag/v${version}";
license = licenses.mit;
maintainers = with maintainers; [ happysalada ];
};
}
|