mirror of
https://github.com/mudler/LocalAI.git
synced 2026-02-06 12:43:04 -05:00
Compare commits
3762 Commits
v1.20.1
...
llama_cpp/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f52776a1c | ||
|
|
5f7ece3e94 | ||
|
|
c717b8d800 | ||
|
|
f1d35c4149 | ||
|
|
ee7e77b6c1 | ||
|
|
324fecbb75 | ||
|
|
a79bfcf0a7 | ||
|
|
82495e7fb6 | ||
|
|
6030b12283 | ||
|
|
b5be867e28 | ||
|
|
9b806250d4 | ||
|
|
5f066e702f | ||
|
|
47bb3a3db2 | ||
|
|
51230a801e | ||
|
|
754bedc3ea | ||
|
|
98e5291afc | ||
|
|
e29b2c3aff | ||
|
|
8dc574f3c4 | ||
|
|
05bf2493a5 | ||
|
|
eae4ca08da | ||
|
|
fa284f7445 | ||
|
|
8f69b80520 | ||
|
|
b1fc5acd4a | ||
|
|
fab41c29dd | ||
|
|
fb0ec96396 | ||
|
|
7659461036 | ||
|
|
580687da46 | ||
|
|
1929eb2894 | ||
|
|
b29544d747 | ||
|
|
7c30e82647 | ||
|
|
a1d061c835 | ||
|
|
851c67019c | ||
|
|
53ed5ef189 | ||
|
|
294f7022f3 | ||
|
|
932f6b01a6 | ||
|
|
e96452c5d4 | ||
|
|
5fc8d5bb78 | ||
|
|
121937ed6f | ||
|
|
2e38f2a054 | ||
|
|
2a6187bc01 | ||
|
|
584c48df5a | ||
|
|
8dd67748a1 | ||
|
|
3fd0bf3c88 | ||
|
|
4062a6c404 | ||
|
|
354c0b763e | ||
|
|
40f9065367 | ||
|
|
fc02bc0aba | ||
|
|
45badb75e8 | ||
|
|
d7e1922582 | ||
|
|
642a39afa0 | ||
|
|
34d9deaf39 | ||
|
|
ef37a73e1b | ||
|
|
37de945ae8 | ||
|
|
468f1f4539 | ||
|
|
0640451368 | ||
|
|
99058511cc | ||
|
|
ec293b3b59 | ||
|
|
9b1b6df8e9 | ||
|
|
cd7fbafcd2 | ||
|
|
e5125216cf | ||
|
|
2105f82433 | ||
|
|
49c0c7881a | ||
|
|
f8829376d8 | ||
|
|
0475f63675 | ||
|
|
ec206cc67c | ||
|
|
34171fcf94 | ||
|
|
238c334aa7 | ||
|
|
d2df0a1769 | ||
|
|
d58647ac31 | ||
|
|
c1d3ce9a93 | ||
|
|
c1dd4ff5d5 | ||
|
|
48118b9582 | ||
|
|
ceda2e69db | ||
|
|
cea1703acc | ||
|
|
33fc9b9922 | ||
|
|
b783997c52 | ||
|
|
f6ec06d21c | ||
|
|
7e1f2657d5 | ||
|
|
9589097252 | ||
|
|
cb87d331a9 | ||
|
|
6dfc96249a | ||
|
|
a2564ed654 | ||
|
|
6c747caa34 | ||
|
|
8ae5e0feb9 | ||
|
|
c35dd0a7b8 | ||
|
|
2f5af6b246 | ||
|
|
00cf2e0e0a | ||
|
|
c7a1d9c089 | ||
|
|
ad7ba52166 | ||
|
|
c5b9f45166 | ||
|
|
61b64a65ab | ||
|
|
8276952920 | ||
|
|
b7cd5bfaec | ||
|
|
da4312e4d3 | ||
|
|
7d507c54ed | ||
|
|
df7ed49889 | ||
|
|
bfdc29d316 | ||
|
|
7fdc006071 | ||
|
|
615830245b | ||
|
|
61376c0fa7 | ||
|
|
d0fb23514f | ||
|
|
780d034ac9 | ||
|
|
ec2a044c7e | ||
|
|
ad6fdd21fd | ||
|
|
cd94e6b352 | ||
|
|
b37cef3718 | ||
|
|
9f957d547d | ||
|
|
f0d9f0c5d8 | ||
|
|
d33e1c72a3 | ||
|
|
33f9ee06c9 | ||
|
|
c54677402d | ||
|
|
3fe3a7b23d | ||
|
|
f8ff6fa1fd | ||
|
|
dfadc3696e | ||
|
|
dbcf5fb4fc | ||
|
|
2633137a17 | ||
|
|
d9c17dd23b | ||
|
|
d8b7bd4860 | ||
|
|
a611cbc0f4 | ||
|
|
850b525159 | ||
|
|
35b3426a2a | ||
|
|
cd2b0c0e7c | ||
|
|
73d80c43a8 | ||
|
|
665562b850 | ||
|
|
7a78e4f482 | ||
|
|
6f41a6f934 | ||
|
|
bb54f2da2b | ||
|
|
e1cc7ee107 | ||
|
|
cfc9dfa3d5 | ||
|
|
6a650e68cb | ||
|
|
5e1373877a | ||
|
|
b5b0ab26e7 | ||
|
|
9725bb4bbd | ||
|
|
33b4275bbc | ||
|
|
6644af10c6 | ||
|
|
7c4a2e9b85 | ||
|
|
bcccee3909 | ||
|
|
c6f50ddd0c | ||
|
|
6613373b1b | ||
|
|
1659b3f795 | ||
|
|
30600dd5cb | ||
|
|
179fcf5541 | ||
|
|
9cb75086bb | ||
|
|
594bb462ab | ||
|
|
aa730a7b96 | ||
|
|
0a454c527a | ||
|
|
cf86bcb984 | ||
|
|
a6d9988e84 | ||
|
|
f3a114342e | ||
|
|
0d275ccc03 | ||
|
|
58dba3f01c | ||
|
|
b68d6e8088 | ||
|
|
2352cec7e6 | ||
|
|
de72ae79b5 | ||
|
|
884c07d5f9 | ||
|
|
cca7cbef1e | ||
|
|
32cd0d03d4 | ||
|
|
ee4d9e83d0 | ||
|
|
5547e08a30 | ||
|
|
ca7385c303 | ||
|
|
28759e79d3 | ||
|
|
40249b6b84 | ||
|
|
e09e47bada | ||
|
|
3796558aeb | ||
|
|
cca4f010f8 | ||
|
|
be3ff482d0 | ||
|
|
af255cd0be | ||
|
|
8000228d1b | ||
|
|
79abe0ad77 | ||
|
|
8131d11d1f | ||
|
|
beb01c91f3 | ||
|
|
1ccd64ff6a | ||
|
|
fc7681c68c | ||
|
|
49d026a229 | ||
|
|
f9b968e19d | ||
|
|
022d4a5ecb | ||
|
|
0e917eb01d | ||
|
|
efde0eaf83 | ||
|
|
add8fc35a2 | ||
|
|
9bcf4c56f1 | ||
|
|
3fcfaec7c8 | ||
|
|
a463d40a3e | ||
|
|
1e1f0ee321 | ||
|
|
80b3139fa0 | ||
|
|
5173d37acb | ||
|
|
470e48a900 | ||
|
|
b706dddc93 | ||
|
|
867db3f888 | ||
|
|
b79aa31398 | ||
|
|
fb9a09d49c | ||
|
|
0a78f0ad2d | ||
|
|
d68660bd5a | ||
|
|
30ceee2dec | ||
|
|
18c38335fc | ||
|
|
89040ff6f7 | ||
|
|
de343700fd | ||
|
|
87d18ad951 | ||
|
|
912c8eff04 | ||
|
|
481f30bde8 | ||
|
|
236ac30252 | ||
|
|
6f761e62e4 | ||
|
|
1f29b5f38e | ||
|
|
33d702c5e0 | ||
|
|
95ff236127 | ||
|
|
2d64269763 | ||
|
|
a7a6020328 | ||
|
|
40618164b2 | ||
|
|
eb8c29f90a | ||
|
|
63116a2c6a | ||
|
|
311c2cf539 | ||
|
|
a6fcbd991d | ||
|
|
2e1dc8deef | ||
|
|
282e017b22 | ||
|
|
f86cb8be2d | ||
|
|
5c56ec4f87 | ||
|
|
dd2845a034 | ||
|
|
2e7db014b6 | ||
|
|
6faeee1d92 | ||
|
|
31d73eb934 | ||
|
|
60863b9e52 | ||
|
|
a9fc71e2f3 | ||
|
|
ce9a9a30e0 | ||
|
|
2693a21da5 | ||
|
|
d460eab18e | ||
|
|
c61e5fe266 | ||
|
|
88e570b5de | ||
|
|
6efa97ce0b | ||
|
|
41cde5468a | ||
|
|
d650647db9 | ||
|
|
5bc7ef37a2 | ||
|
|
e0a52807c8 | ||
|
|
1a95a19f87 | ||
|
|
bcfc08e5bf | ||
|
|
4d282ca963 | ||
|
|
525f49b69d | ||
|
|
786aa1de05 | ||
|
|
ea82deb16b | ||
|
|
b0891309ba | ||
|
|
b034cff149 | ||
|
|
432f34f001 | ||
|
|
cbd61dccd4 | ||
|
|
0de0817d71 | ||
|
|
bf57d6e5ac | ||
|
|
0b9603e010 | ||
|
|
8d925217f6 | ||
|
|
669a1ccae6 | ||
|
|
7a7d36ad63 | ||
|
|
8b889955b4 | ||
|
|
a226555949 | ||
|
|
f38f17865a | ||
|
|
03f380701b | ||
|
|
65e2866c97 | ||
|
|
cd3cd899ad | ||
|
|
c2ae3100e7 | ||
|
|
ec0868e691 | ||
|
|
489c289916 | ||
|
|
ac5fb50bcc | ||
|
|
7c9f011d91 | ||
|
|
80f7f17843 | ||
|
|
f0c41d6405 | ||
|
|
8472321a81 | ||
|
|
3bac4724ac | ||
|
|
59db154cbc | ||
|
|
1cc4525f15 | ||
|
|
45c58752e5 | ||
|
|
d5c9c717b5 | ||
|
|
dd7fa6b9f7 | ||
|
|
039c318607 | ||
|
|
0870bf5af6 | ||
|
|
6073b9944e | ||
|
|
ef0e0f3777 | ||
|
|
b7de9e0aa0 | ||
|
|
39292407a1 | ||
|
|
f257bf8d14 | ||
|
|
8ca2fb5ef1 | ||
|
|
3a790fed13 | ||
|
|
a334f28a07 | ||
|
|
dc6663d121 | ||
|
|
103caf9823 | ||
|
|
4226d2d837 | ||
|
|
7434256fc9 | ||
|
|
86a0563ae1 | ||
|
|
c68951cbfe | ||
|
|
8408084120 | ||
|
|
0f2f4c7e23 | ||
|
|
5ffad3b004 | ||
|
|
e5ccd97b8c | ||
|
|
a3b08d46ec | ||
|
|
090f5065fc | ||
|
|
88de2ea01a | ||
|
|
9650d490d4 | ||
|
|
4de1c83764 | ||
|
|
e5978dc714 | ||
|
|
f784986e19 | ||
|
|
bf6426aef2 | ||
|
|
4a91950848 | ||
|
|
4614ea1685 | ||
|
|
f0bf59d1d9 | ||
|
|
83dd678959 | ||
|
|
9d6c9f874a | ||
|
|
c62f2bb336 | ||
|
|
38aeca6f9c | ||
|
|
3b0cf52f6a | ||
|
|
bac3022044 | ||
|
|
cd41701524 | ||
|
|
6a382a1afe | ||
|
|
8dcab2f9c7 | ||
|
|
1d1d5627f0 | ||
|
|
233b3369ad | ||
|
|
c587ac0aef | ||
|
|
38c5d16b57 | ||
|
|
ef6fc052eb | ||
|
|
7ff35c08ac | ||
|
|
43f75ee7f3 | ||
|
|
82811a9630 | ||
|
|
04a3d8e5ac | ||
|
|
9af09b3f8c | ||
|
|
0d590a4044 | ||
|
|
e0a54de4f5 | ||
|
|
6bc2ae5467 | ||
|
|
8caaf49f5d | ||
|
|
1db51044bb | ||
|
|
ec21b58008 | ||
|
|
996259b529 | ||
|
|
f2942cc0e1 | ||
|
|
f8fbfd4fa3 | ||
|
|
41e239c67e | ||
|
|
587827e779 | ||
|
|
456b4982ef | ||
|
|
159388cce8 | ||
|
|
cfc73c7773 | ||
|
|
6d5bde860b | ||
|
|
6ef383033b | ||
|
|
cd494089d9 | ||
|
|
3033845f94 | ||
|
|
0f365ac204 | ||
|
|
525cf198be | ||
|
|
658c2a4f55 | ||
|
|
c987de090d | ||
|
|
04365843e6 | ||
|
|
1dc5781679 | ||
|
|
30704292de | ||
|
|
e52c66c76e | ||
|
|
cb28aef93b | ||
|
|
029f97c2a2 | ||
|
|
3be71be696 | ||
|
|
6adb019f8f | ||
|
|
fcaa0a2f01 | ||
|
|
fd17a3312c | ||
|
|
12d0fe610b | ||
|
|
11c67d16b8 | ||
|
|
63f7c86c4d | ||
|
|
ac89bf77bf | ||
|
|
0395cc02fb | ||
|
|
616972fca0 | ||
|
|
942fbff62d | ||
|
|
2612a0c910 | ||
|
|
2dcb6d7247 | ||
|
|
6978eec69f | ||
|
|
2fcfe54466 | ||
|
|
4e7506a3be | ||
|
|
2a46217f90 | ||
|
|
31ff9dbd52 | ||
|
|
9483abef03 | ||
|
|
ce3e8b3e31 | ||
|
|
f3bb84c9a7 | ||
|
|
ecb1297582 | ||
|
|
73fc702b3c | ||
|
|
e3af62ae1a | ||
|
|
dc21604741 | ||
|
|
5433f1a70e | ||
|
|
d5e032bdcd | ||
|
|
de786f6586 | ||
|
|
8b9bc4aa6e | ||
|
|
e6cea7d28e | ||
|
|
7d7d56f2ce | ||
|
|
1caae91ab6 | ||
|
|
e90f2cb0ca | ||
|
|
5a4291fadd | ||
|
|
91ef58ee5a | ||
|
|
a86e8c78f1 | ||
|
|
adb24214c6 | ||
|
|
f03a0430aa | ||
|
|
73bc12abc0 | ||
|
|
7fa437bbcc | ||
|
|
4a27c99928 | ||
|
|
6ce94834b6 | ||
|
|
84a26458dc | ||
|
|
7aa377b6a9 | ||
|
|
64e66dda4a | ||
|
|
a085f61fdc | ||
|
|
21bdfe5fa4 | ||
|
|
7ebd7b2454 | ||
|
|
6984749ea1 | ||
|
|
c0a206bc7a | ||
|
|
01bbb31fb3 | ||
|
|
72111c597d | ||
|
|
b2f9fc870b | ||
|
|
1fc6d469ac | ||
|
|
05848b2027 | ||
|
|
1da0644aa3 | ||
|
|
c087cd1377 | ||
|
|
c621412f6a | ||
|
|
5a8b1892cd | ||
|
|
5b20426863 | ||
|
|
5c6cd50ed6 | ||
|
|
bace6516f1 | ||
|
|
3baadf6f27 | ||
|
|
8804c701b8 | ||
|
|
7b3ceb19bb | ||
|
|
e7f3effea1 | ||
|
|
61694a2ffb | ||
|
|
573a3f104c | ||
|
|
0e8af53a5b | ||
|
|
960ffa808c | ||
|
|
92719568e5 | ||
|
|
163939af71 | ||
|
|
399f1241dc | ||
|
|
58c9ade2e8 | ||
|
|
6e1c93d84f | ||
|
|
4076ea0494 | ||
|
|
26cbf77c0d | ||
|
|
640790d628 | ||
|
|
4132adea2f | ||
|
|
2b2d907a3a | ||
|
|
6e8f4f584b | ||
|
|
662cfc2b48 | ||
|
|
a25d355d66 | ||
|
|
6d1cfdbefc | ||
|
|
5ecc478968 | ||
|
|
aef5c4291b | ||
|
|
c059f912b9 | ||
|
|
bc1e059259 | ||
|
|
38dc07793a | ||
|
|
da6ef0967d | ||
|
|
7a011e60bd | ||
|
|
e13dd5b09f | ||
|
|
86ee303bd6 | ||
|
|
978ee96fd3 | ||
|
|
3ad5691db6 | ||
|
|
0027681090 | ||
|
|
8cba990edc | ||
|
|
88857696d4 | ||
|
|
23f347e687 | ||
|
|
b6e3dc5f02 | ||
|
|
69667521e2 | ||
|
|
2a92effc5d | ||
|
|
a65e012aa2 | ||
|
|
8e9b41d05f | ||
|
|
078da5c2f0 | ||
|
|
c5af5d139c | ||
|
|
2c9279a542 | ||
|
|
a67d22f5f2 | ||
|
|
dc7c51dcc7 | ||
|
|
98df65c7aa | ||
|
|
1559b6b522 | ||
|
|
a0244e3fb4 | ||
|
|
d66396201a | ||
|
|
9628860c0e | ||
|
|
cae9bf1308 | ||
|
|
5bb5da0760 | ||
|
|
867973a850 | ||
|
|
701cd6b6d5 | ||
|
|
7f61d397d5 | ||
|
|
1ae0b896fa | ||
|
|
3937407cb3 | ||
|
|
0e34ae4f3f | ||
|
|
a38b99ecb6 | ||
|
|
a4a4358182 | ||
|
|
4bc39c2db3 | ||
|
|
cc3df759f8 | ||
|
|
378161060c | ||
|
|
f2f788fe60 | ||
|
|
9fa8ed6b1e | ||
|
|
7fc37c5e29 | ||
|
|
4bc4b1e8bc | ||
|
|
e495b89f18 | ||
|
|
ba09eaea1b | ||
|
|
61cc76c455 | ||
|
|
8abecb4a18 | ||
|
|
8b3f76d8e6 | ||
|
|
4e0497f1a6 | ||
|
|
ba88c9f451 | ||
|
|
a598285825 | ||
|
|
cb7a172897 | ||
|
|
771be28dfb | ||
|
|
7d6b3eb42d | ||
|
|
0bb33fab55 | ||
|
|
e3bf7f77f7 | ||
|
|
bd1707d339 | ||
|
|
0474804541 | ||
|
|
72693b3917 | ||
|
|
a03b70010f | ||
|
|
e3717e5c1a | ||
|
|
c8f6858218 | ||
|
|
06d7cc43ae | ||
|
|
f2147cb850 | ||
|
|
75bb9f4c28 | ||
|
|
a2ef4b1e07 | ||
|
|
161c9fe2db | ||
|
|
7547463f81 | ||
|
|
32e4dfd47b | ||
|
|
f67e5dec68 | ||
|
|
297d54acea | ||
|
|
56f44d448c | ||
|
|
0f0fafacd9 | ||
|
|
4f239bac89 | ||
|
|
04d74ac648 | ||
|
|
18c3dc33ee | ||
|
|
508cfa7369 | ||
|
|
1f94cddbae | ||
|
|
21ae7b4cd4 | ||
|
|
bef22ab547 | ||
|
|
eb04e8cdcf | ||
|
|
17e533a086 | ||
|
|
4fc68409ff | ||
|
|
e587044449 | ||
|
|
1f09db5161 | ||
|
|
05b744f086 | ||
|
|
89ca4bc02d | ||
|
|
e626aa48a4 | ||
|
|
752b5e0339 | ||
|
|
637d72d6e3 | ||
|
|
f3bfec580a | ||
|
|
165c1ddff3 | ||
|
|
fb83238e9e | ||
|
|
700bfa41c7 | ||
|
|
25bdc350df | ||
|
|
1b899e1a68 | ||
|
|
3bf13f8c69 | ||
|
|
7a00729374 | ||
|
|
d484028532 | ||
|
|
0eb7fc2c41 | ||
|
|
a69e30e0c9 | ||
|
|
9c018e6bff | ||
|
|
281e818047 | ||
|
|
270f0e2157 | ||
|
|
673e59e76c | ||
|
|
5a8a2adb44 | ||
|
|
a7317d23bf | ||
|
|
2bab9b5fe2 | ||
|
|
081be3ba7d | ||
|
|
25e6f21322 | ||
|
|
b4df1c9cf3 | ||
|
|
4fbd6609f2 | ||
|
|
7387932f89 | ||
|
|
59c37e67b2 | ||
|
|
c09d227647 | ||
|
|
547d322b28 | ||
|
|
a6f0bb410f | ||
|
|
710f624ecd | ||
|
|
5018452be7 | ||
|
|
ece239966f | ||
|
|
3b8bc7e64c | ||
|
|
fc73b2b430 | ||
|
|
901dba6063 | ||
|
|
b88a7a4550 | ||
|
|
106e40845f | ||
|
|
0064bec8f5 | ||
|
|
9e6dbb0b5a | ||
|
|
d26e61388b | ||
|
|
31a7084c75 | ||
|
|
128612a6fc | ||
|
|
6af3f46bc3 | ||
|
|
d2cf8ef070 | ||
|
|
259ad3cfe6 | ||
|
|
18b320d577 | ||
|
|
89e151f035 | ||
|
|
22060f6410 | ||
|
|
7ee3288460 | ||
|
|
cbbc954a8c | ||
|
|
2c425e9c69 | ||
|
|
c59975ab05 | ||
|
|
05f7004487 | ||
|
|
2f9203cd2a | ||
|
|
f09b33f2ef | ||
|
|
65470b0ab1 | ||
|
|
9a23fe662b | ||
|
|
6d7ac09e96 | ||
|
|
c2a39e3639 | ||
|
|
ae625a4d00 | ||
|
|
7f3a029596 | ||
|
|
b34cf00819 | ||
|
|
d4a10b4300 | ||
|
|
9c74d74f7b | ||
|
|
679ee7bea4 | ||
|
|
77d7dc62c4 | ||
|
|
699519d1fe | ||
|
|
8faf39d34e | ||
|
|
5d261a6fcd | ||
|
|
22d5727089 | ||
|
|
c965197d6f | ||
|
|
994a6c4939 | ||
|
|
f926d2a72b | ||
|
|
ddeb9ed93e | ||
|
|
c7e99c7b59 | ||
|
|
6fabc92e56 | ||
|
|
4645b3c919 | ||
|
|
134fe2705c | ||
|
|
3cca32ba7e | ||
|
|
c069e61b26 | ||
|
|
7fa159e164 | ||
|
|
5f92025617 | ||
|
|
333e1bc732 | ||
|
|
e90b97c144 | ||
|
|
747eeb1d46 | ||
|
|
5d2c53abc0 | ||
|
|
0b1e721242 | ||
|
|
8c76a9ce99 | ||
|
|
338321af5b | ||
|
|
2774a92484 | ||
|
|
1a6bfb41a1 | ||
|
|
314981eaf8 | ||
|
|
d7266c633d | ||
|
|
eb4d5f2b95 | ||
|
|
c63b449ad6 | ||
|
|
dd4a778c2c | ||
|
|
a0896d21d6 | ||
|
|
0e697f951a | ||
|
|
fa4bb9082d | ||
|
|
8ff7b15441 | ||
|
|
dd45f85a20 | ||
|
|
decdd9e522 | ||
|
|
31a21d4a2c | ||
|
|
2c129843a7 | ||
|
|
ce71a0bcfb | ||
|
|
0a32c38317 | ||
|
|
36f596f260 | ||
|
|
953552545b | ||
|
|
835e55b1de | ||
|
|
dcd2921eaa | ||
|
|
5e6459fd18 | ||
|
|
50ddb3eb59 | ||
|
|
5eebfee4b5 | ||
|
|
567919ea90 | ||
|
|
27a3997530 | ||
|
|
192ba2c657 | ||
|
|
92abac9ca8 | ||
|
|
04ebbbd73a | ||
|
|
55305e0d95 | ||
|
|
67623639e4 | ||
|
|
cc76def342 | ||
|
|
4967fa5928 | ||
|
|
2b98e4ec56 | ||
|
|
fa1d058ee2 | ||
|
|
a49a588bfa | ||
|
|
ca7dda61c6 | ||
|
|
ffedddd76d | ||
|
|
766c76ae8e | ||
|
|
3096ff33e9 | ||
|
|
90a7451da4 | ||
|
|
529a4b9ee8 | ||
|
|
0567e104eb | ||
|
|
ecbeacd022 | ||
|
|
2772960e41 | ||
|
|
1b694191e2 | ||
|
|
69578a5f8f | ||
|
|
7d96cfe72b | ||
|
|
423514a5a5 | ||
|
|
12568c7d6d | ||
|
|
8d16a0a536 | ||
|
|
87ca801f00 | ||
|
|
e4ecbb6c30 | ||
|
|
b1a67de2b9 | ||
|
|
71a23910fe | ||
|
|
0ede31f9cf | ||
|
|
9f5dcf2d1e | ||
|
|
e878556e98 | ||
|
|
b096928172 | ||
|
|
db7442ae67 | ||
|
|
b6cd430e08 | ||
|
|
478e50cda2 | ||
|
|
1db2b9943c | ||
|
|
ac41aa8b67 | ||
|
|
156a98e2e7 | ||
|
|
d88ec1209e | ||
|
|
fde8dbfc80 | ||
|
|
879dc73eba | ||
|
|
1dfc52de16 | ||
|
|
1331129485 | ||
|
|
1cd98062e5 | ||
|
|
9791d9b77a | ||
|
|
8956452a45 | ||
|
|
f3659fa49c | ||
|
|
585f2be793 | ||
|
|
d13f160222 | ||
|
|
db5495b9d7 | ||
|
|
3def1ae232 | ||
|
|
c6ebead8e5 | ||
|
|
cff4a950e0 | ||
|
|
e4fa894153 | ||
|
|
69caccfa82 | ||
|
|
ab50c13160 | ||
|
|
56d4e82b14 | ||
|
|
09b5bd48bc | ||
|
|
957dcfb6a9 | ||
|
|
67f7bffd18 | ||
|
|
de81b42b49 | ||
|
|
06eb7e9fa7 | ||
|
|
45bc1ac566 | ||
|
|
02aafeff75 | ||
|
|
6b46c52789 | ||
|
|
d732e261a4 | ||
|
|
807c574e91 | ||
|
|
bb171a39b3 | ||
|
|
941a4fc50e | ||
|
|
afe65bd7bf | ||
|
|
6f9762049c | ||
|
|
122970d70d | ||
|
|
8664b1c7a2 | ||
|
|
c92166f38a | ||
|
|
d616058b12 | ||
|
|
a7b4001b75 | ||
|
|
ff85f01459 | ||
|
|
695f81a08b | ||
|
|
326be287da | ||
|
|
0404d98190 | ||
|
|
0a8ec1eb22 | ||
|
|
d860932dcd | ||
|
|
1cb137bd2d | ||
|
|
3c279e5568 | ||
|
|
fb55e3df57 | ||
|
|
de46fb6e2e | ||
|
|
d7a0e3c5ea | ||
|
|
0533ea817d | ||
|
|
755e4fb5f4 | ||
|
|
e4fdde158f | ||
|
|
6d0712fa6d | ||
|
|
bbbb28e3ca | ||
|
|
3bf2e9d065 | ||
|
|
1461fd8777 | ||
|
|
054860539a | ||
|
|
c87870b18e | ||
|
|
5ad2be9c45 | ||
|
|
61a24746a1 | ||
|
|
d557eb9361 | ||
|
|
a9a1a361a9 | ||
|
|
12d070af80 | ||
|
|
8d40557bc8 | ||
|
|
5a5f3a899a | ||
|
|
a2d1f133c8 | ||
|
|
0ae6420c31 | ||
|
|
3a3e05cf18 | ||
|
|
6a20388e25 | ||
|
|
06c836a937 | ||
|
|
049a13fe78 | ||
|
|
30bf6c962f | ||
|
|
a72b3a23c3 | ||
|
|
e9971b168a | ||
|
|
5b59b5e0c1 | ||
|
|
8cfd712428 | ||
|
|
21f7faa80d | ||
|
|
a6a0121118 | ||
|
|
ba66aa33c5 | ||
|
|
8fc024a770 | ||
|
|
52aa9d08aa | ||
|
|
4c9379c39e | ||
|
|
0ff2c39364 | ||
|
|
1af7e5dc49 | ||
|
|
af3bb64e42 | ||
|
|
77281f836e | ||
|
|
550275811d | ||
|
|
c27ce6c54d | ||
|
|
ac4991b069 | ||
|
|
25bee71bb8 | ||
|
|
b993780a3b | ||
|
|
ea0c9f1168 | ||
|
|
08311f275a | ||
|
|
4de0f2f737 | ||
|
|
42ae807c41 | ||
|
|
94593ba4c3 | ||
|
|
6a6e1a0ea9 | ||
|
|
5b19af99ff | ||
|
|
28fb8e607a | ||
|
|
bb85b6ef00 | ||
|
|
b9b5a635ca | ||
|
|
131ea5b627 | ||
|
|
fac70e9642 | ||
|
|
7e76ea40fb | ||
|
|
de09ae42ef | ||
|
|
6424f0666d | ||
|
|
f3ae94ca70 | ||
|
|
09c9f67a02 | ||
|
|
c264ca542d | ||
|
|
bbf30d416d | ||
|
|
27617a1b06 | ||
|
|
e84081769e | ||
|
|
20119fc580 | ||
|
|
09941c0bfb | ||
|
|
cabe0f4993 | ||
|
|
1977c7f190 | ||
|
|
061e7c4eae | ||
|
|
5313e660f6 | ||
|
|
9e32fda304 | ||
|
|
83202cae54 | ||
|
|
d96addfa9d | ||
|
|
a715fe588d | ||
|
|
2ac4a86bb4 | ||
|
|
8670d480a6 | ||
|
|
af0b4ff237 | ||
|
|
e694764065 | ||
|
|
f3c27e0381 | ||
|
|
bf44319d0d | ||
|
|
5b133a640b | ||
|
|
0030a3fe75 | ||
|
|
0a748b009e | ||
|
|
257e951def | ||
|
|
fbd82a2dd0 | ||
|
|
5db321dad2 | ||
|
|
f5638a6354 | ||
|
|
5f64cc6328 | ||
|
|
28b10e8804 | ||
|
|
3277f5095d | ||
|
|
fe3ced2919 | ||
|
|
45e37a07bb | ||
|
|
e57b750ca3 | ||
|
|
49df492268 | ||
|
|
516cd660f1 | ||
|
|
8fd3ace9a1 | ||
|
|
099469cb05 | ||
|
|
6be8c0c618 | ||
|
|
3cddf24747 | ||
|
|
c330360785 | ||
|
|
8cd51570e5 | ||
|
|
0e7aa5cd15 | ||
|
|
e06a5f49de | ||
|
|
fb2f847507 | ||
|
|
e01acc88c9 | ||
|
|
7a5912908a | ||
|
|
4b1b942a7f | ||
|
|
230fe0098f | ||
|
|
cc163429dc | ||
|
|
f670e0a91c | ||
|
|
731674eee7 | ||
|
|
cc1f6f913f | ||
|
|
7f90ff7aec | ||
|
|
8d45670e41 | ||
|
|
e4b8ddb6a1 | ||
|
|
a801561f81 | ||
|
|
16ced07102 | ||
|
|
d35595372d | ||
|
|
81be192279 | ||
|
|
28a1310890 | ||
|
|
2a702e9ca4 | ||
|
|
3ecaea1b6e | ||
|
|
7daf5ac3e3 | ||
|
|
7bc80c17f8 | ||
|
|
1996ceb293 | ||
|
|
0bc3dc43da | ||
|
|
3324c4e6cb | ||
|
|
7329db4e78 | ||
|
|
464686aee6 | ||
|
|
bfa3d4ccff | ||
|
|
6a91288c8c | ||
|
|
96cb407ee0 | ||
|
|
5a19094d3a | ||
|
|
e3b943ffcb | ||
|
|
df30d6a482 | ||
|
|
c3c27b7e3d | ||
|
|
431716d4d6 | ||
|
|
d290fd159f | ||
|
|
051faaf771 | ||
|
|
41a2dfb0d9 | ||
|
|
ed0094c3d0 | ||
|
|
52fadeded1 | ||
|
|
a37fa8d9c4 | ||
|
|
03974a4dd4 | ||
|
|
1d6afbd65d | ||
|
|
d79f02ea09 | ||
|
|
ba2f426e3e | ||
|
|
732042e5c6 | ||
|
|
f1763aabf2 | ||
|
|
e0d90b173b | ||
|
|
ff07612bfa | ||
|
|
7badaf78a0 | ||
|
|
af41436f1b | ||
|
|
cd5489ce47 | ||
|
|
60ec2cf751 | ||
|
|
244f4b564f | ||
|
|
f1d6d65417 | ||
|
|
72e52c4f6a | ||
|
|
1656e1a88e | ||
|
|
7f62b418a4 | ||
|
|
1f4e66d638 | ||
|
|
a37b2c765c | ||
|
|
b4b67e00bd | ||
|
|
91e1ff5a95 | ||
|
|
d9204ea3b5 | ||
|
|
3d0fbcb4f7 | ||
|
|
03f3df9a82 | ||
|
|
fff35d5528 | ||
|
|
539e94db73 | ||
|
|
0f4f62cf3c | ||
|
|
e7cffd7afa | ||
|
|
26d790a2b6 | ||
|
|
5cf838c08d | ||
|
|
4db8f5cbce | ||
|
|
3b6b37a81b | ||
|
|
8f5aa2d9de | ||
|
|
a6bc8aa7c7 | ||
|
|
4ab107bc1a | ||
|
|
4c3710a531 | ||
|
|
901b06284a | ||
|
|
8eef5a2c5e | ||
|
|
e9cace137b | ||
|
|
9409c99738 | ||
|
|
4d44ebc2f2 | ||
|
|
9a1182fa01 | ||
|
|
66e9ef3f33 | ||
|
|
8282414583 | ||
|
|
d1d7ce83d4 | ||
|
|
5177837ab0 | ||
|
|
f9e368b7c4 | ||
|
|
eef80b9880 | ||
|
|
073eaec729 | ||
|
|
318225f631 | ||
|
|
89429a439b | ||
|
|
200fe358f0 | ||
|
|
e426ab7c23 | ||
|
|
715071b68d | ||
|
|
a05737c7e4 | ||
|
|
e8eb0b2c50 | ||
|
|
e15d29aba2 | ||
|
|
10675ac28e | ||
|
|
0ec25b8b07 | ||
|
|
e81ceff681 | ||
|
|
6831719e1e | ||
|
|
b264a91b3f | ||
|
|
1a08948e63 | ||
|
|
14a1e02f44 | ||
|
|
2f09aa1b85 | ||
|
|
a396040886 | ||
|
|
aeb1dca52e | ||
|
|
83a8d90c52 | ||
|
|
adebd557ce | ||
|
|
0c0e015b38 | ||
|
|
390bb3f58b | ||
|
|
30739d94a4 | ||
|
|
83e2dd5dff | ||
|
|
f496d0113b | ||
|
|
a752183fb5 | ||
|
|
296b97925f | ||
|
|
d0cc3047dc | ||
|
|
032a33de49 | ||
|
|
1e9bf19c8d | ||
|
|
4bd8434ae0 | ||
|
|
958f6eb722 | ||
|
|
96306a39a0 | ||
|
|
895cd7c76a | ||
|
|
cbdbe59f16 | ||
|
|
ee7904f170 | ||
|
|
a761e01944 | ||
|
|
96f8ec0402 | ||
|
|
8027fdf1c7 | ||
|
|
212c8e1a6d | ||
|
|
78533d7230 | ||
|
|
b5eeb5c5ab | ||
|
|
b147ad0596 | ||
|
|
7d0ac1ea3f | ||
|
|
d08d97bebf | ||
|
|
acb2eb23c8 | ||
|
|
de4aa9fb1d | ||
|
|
560ba6f25e | ||
|
|
8131ddd878 | ||
|
|
26c3deb673 | ||
|
|
6d20497d45 | ||
|
|
482c6b8be4 | ||
|
|
5bba5edf45 | ||
|
|
792b866727 | ||
|
|
f053f7bde2 | ||
|
|
d7dee3a5ec | ||
|
|
b8d74e52b1 | ||
|
|
62abe0d2c9 | ||
|
|
5414c294c4 | ||
|
|
1b3e89c89c | ||
|
|
69c6e5b192 | ||
|
|
0c02512f15 | ||
|
|
b0ead0bf12 | ||
|
|
ab5adf40af | ||
|
|
8d82afb595 | ||
|
|
aea71dd2c6 | ||
|
|
9fdb44323d | ||
|
|
6a299c04a7 | ||
|
|
9ce71fe427 | ||
|
|
e8de7b52da | ||
|
|
1780ccadbc | ||
|
|
f8cffd05e5 | ||
|
|
b898cd49b5 | ||
|
|
7cd33d10c9 | ||
|
|
cd480dbe5c | ||
|
|
cb8bf79ada | ||
|
|
b206eab80f | ||
|
|
80dc23fab9 | ||
|
|
844c0c422d | ||
|
|
07655c0c2e | ||
|
|
bebfd19b45 | ||
|
|
6e34430d99 | ||
|
|
0d08aaa29b | ||
|
|
66f9c06e7d | ||
|
|
775adf871f | ||
|
|
a0fc19a3d6 | ||
|
|
7bd18662a7 | ||
|
|
95b0739906 | ||
|
|
cad7e9a1cd | ||
|
|
4426efab05 | ||
|
|
6765b17acd | ||
|
|
ae1340d59b | ||
|
|
fc52f179fe | ||
|
|
4f43a9a162 | ||
|
|
20edd44463 | ||
|
|
1a4f9d8453 | ||
|
|
f2dd33b8f4 | ||
|
|
25e988868c | ||
|
|
ab344e4f47 | ||
|
|
fac7893dd6 | ||
|
|
9be338cfe4 | ||
|
|
b4d4f96919 | ||
|
|
8cc2d01caa | ||
|
|
bf37eebecb | ||
|
|
3f0850b58b | ||
|
|
2ffa89b8b9 | ||
|
|
d43adc0205 | ||
|
|
78b34505ab | ||
|
|
e55a1bed59 | ||
|
|
0d7550ad54 | ||
|
|
b5992255ac | ||
|
|
e845cc0401 | ||
|
|
a10033e8a4 | ||
|
|
6c6d840e6b | ||
|
|
a8b3b3d6f4 | ||
|
|
ec66f7e3b1 | ||
|
|
05841c2435 | ||
|
|
c553d73748 | ||
|
|
1006e8a2ed | ||
|
|
9bcfda171b | ||
|
|
baee4f7bd5 | ||
|
|
286dc32fe0 | ||
|
|
36e4c0fcf0 | ||
|
|
3c21c8789a | ||
|
|
d9facbcee9 | ||
|
|
930280ecac | ||
|
|
3415e6ae74 | ||
|
|
f1082f3c6d | ||
|
|
f345f7a795 | ||
|
|
1a2a7a57b3 | ||
|
|
ae80a2bd24 | ||
|
|
c30ecdd535 | ||
|
|
f16c7cef92 | ||
|
|
e1dd78bcea | ||
|
|
25acb0cbbc | ||
|
|
7674c80bb6 | ||
|
|
e044970a5b | ||
|
|
639526d207 | ||
|
|
998ff9fa22 | ||
|
|
7122c7472e | ||
|
|
671381267a | ||
|
|
d1762e098e | ||
|
|
270d33504b | ||
|
|
9b0983d027 | ||
|
|
afd0af987d | ||
|
|
58524d40c9 | ||
|
|
2a7222c6aa | ||
|
|
0093985e7c | ||
|
|
7f51e2dddf | ||
|
|
f3bbdef77d | ||
|
|
9cbf168dc0 | ||
|
|
9572f0577b | ||
|
|
1a14c7d45a | ||
|
|
5c29e0cd4d | ||
|
|
1a74af1492 | ||
|
|
8f6332ab23 | ||
|
|
816ae7a53a | ||
|
|
1d630e4185 | ||
|
|
bc8dd3ad14 | ||
|
|
b969053701 | ||
|
|
60bf7c9dd7 | ||
|
|
d65c10cee7 | ||
|
|
6c71698299 | ||
|
|
c7c275c7c8 | ||
|
|
d0adbee75d | ||
|
|
159a7f6df2 | ||
|
|
0eb2911aad | ||
|
|
cab9f88ca4 | ||
|
|
a3b675b09e | ||
|
|
6477913e8f | ||
|
|
138cd97ce7 | ||
|
|
4dd9ac39b0 | ||
|
|
23499ddc8a | ||
|
|
8864156300 | ||
|
|
478014ca18 | ||
|
|
d45477b003 | ||
|
|
396fb88e33 | ||
|
|
a429ec1b3f | ||
|
|
5b5fb9c22a | ||
|
|
801a87c3a6 | ||
|
|
badbd212f7 | ||
|
|
c4bbecc4d6 | ||
|
|
8a08e9ec67 | ||
|
|
61e486dbf5 | ||
|
|
f2f387e1dd | ||
|
|
3be9a08fc9 | ||
|
|
b325807c60 | ||
|
|
ae9855a39e | ||
|
|
9ac62b589f | ||
|
|
d12660a286 | ||
|
|
3d3bd2d10f | ||
|
|
b656d10556 | ||
|
|
8c67f38ef6 | ||
|
|
4623728cd7 | ||
|
|
5f804aa6e8 | ||
|
|
f52c6e3a31 | ||
|
|
0b4bb7a562 | ||
|
|
2bc4b56a79 | ||
|
|
fc920cc58a | ||
|
|
fdb560b8e5 | ||
|
|
708cba0c1b | ||
|
|
24abf568cb | ||
|
|
7ca0e2d925 | ||
|
|
037e8030bf | ||
|
|
472d11f884 | ||
|
|
b40d5d12b7 | ||
|
|
6938618e30 | ||
|
|
5d9c530eaa | ||
|
|
9429a53db7 | ||
|
|
1d6d301370 | ||
|
|
8f2be82667 | ||
|
|
cca911f3e5 | ||
|
|
e37bbbaacc | ||
|
|
59cbf38b4b | ||
|
|
432c31d904 | ||
|
|
af33483687 | ||
|
|
5051074845 | ||
|
|
fc4a714992 | ||
|
|
0429e00746 | ||
|
|
73f1f25b9a | ||
|
|
044570fa85 | ||
|
|
37527420de | ||
|
|
1854b8c612 | ||
|
|
b8824f2ad9 | ||
|
|
3ab83e91df | ||
|
|
f2cb261797 | ||
|
|
c85f46a71d | ||
|
|
75b283d83c | ||
|
|
1918efdfdd | ||
|
|
ec239a0cd0 | ||
|
|
b74a936178 | ||
|
|
de1ddb8ba6 | ||
|
|
272763f625 | ||
|
|
3aff87a5cf | ||
|
|
885118e863 | ||
|
|
a03a9b9e51 | ||
|
|
f45d6c746a | ||
|
|
5eceb5f67c | ||
|
|
a9c0dd3a1e | ||
|
|
fb17e737f0 | ||
|
|
b5a21202ed | ||
|
|
e147f1bd3e | ||
|
|
61839efed2 | ||
|
|
a0fe050055 | ||
|
|
f943c4b803 | ||
|
|
cea5a0ea42 | ||
|
|
f5e1527a5a | ||
|
|
7184ca546f | ||
|
|
5592f5e820 | ||
|
|
d4c1746c7d | ||
|
|
88737e1d76 | ||
|
|
ba225f660b | ||
|
|
3127cd1352 | ||
|
|
b90d78d9f6 | ||
|
|
b86a3e4fa6 | ||
|
|
be907d993f | ||
|
|
ab0f8648a3 | ||
|
|
c226149503 | ||
|
|
4a079f893c | ||
|
|
87b7648591 | ||
|
|
cf4f024420 | ||
|
|
3c0ac49d90 | ||
|
|
4307ae5d52 | ||
|
|
50f71f73d7 | ||
|
|
dc04a43868 | ||
|
|
cc04b62d3a | ||
|
|
feb54e65c2 | ||
|
|
44a5dac312 | ||
|
|
074b52bbfe | ||
|
|
236a60bab8 | ||
|
|
7b70f0543b | ||
|
|
5f33962932 | ||
|
|
45b91d501e | ||
|
|
e51792784a | ||
|
|
28594336e9 | ||
|
|
9c9359fc96 | ||
|
|
bc5d1f255b | ||
|
|
0fcefbc168 | ||
|
|
9044b17e4d | ||
|
|
ad31daf03b | ||
|
|
1167487f5e | ||
|
|
61358e4d35 | ||
|
|
2c8a87b1e4 | ||
|
|
55aad5f525 | ||
|
|
58ff47de26 | ||
|
|
0d6c3a7d57 | ||
|
|
e001fada6c | ||
|
|
f4547fcf8a | ||
|
|
7b75e9de2d | ||
|
|
cbedf2f428 | ||
|
|
0597f3b9e9 | ||
|
|
5f688d7a8d | ||
|
|
fa20628b3a | ||
|
|
13bf048cfc | ||
|
|
bdd6920910 | ||
|
|
3c3050f68e | ||
|
|
1688ba7f2a | ||
|
|
e8128a339a | ||
|
|
369110e6bf | ||
|
|
2b62260b6d | ||
|
|
03800ccceb | ||
|
|
f1b86d6e7f | ||
|
|
404ca3cc23 | ||
|
|
7492179c67 | ||
|
|
eeb22317b5 | ||
|
|
9b46dcf006 | ||
|
|
6c8e870812 | ||
|
|
0f8f249465 | ||
|
|
720ffc1d9d | ||
|
|
5c4e4c1cbc | ||
|
|
32ca4a51e5 | ||
|
|
dbe98229e8 | ||
|
|
1de20331ca | ||
|
|
7d2f213dc8 | ||
|
|
76c8d0b868 | ||
|
|
aae7e5fe99 | ||
|
|
9cb30bedeb | ||
|
|
f1a72f3a16 | ||
|
|
a04cf9543d | ||
|
|
7a973c8c16 | ||
|
|
66b06f43af | ||
|
|
74134ef99a | ||
|
|
f5fdef72e3 | ||
|
|
cfc45dff37 | ||
|
|
30f641fe12 | ||
|
|
76f5ba1412 | ||
|
|
b601535cdd | ||
|
|
c9c58a24a8 | ||
|
|
4e3df95737 | ||
|
|
f028ee8a26 | ||
|
|
47dc4337ba | ||
|
|
fa6fcdf53e | ||
|
|
163ca74590 | ||
|
|
961a993b88 | ||
|
|
46847f3bd4 | ||
|
|
f03bbf3188 | ||
|
|
73ab921391 | ||
|
|
eaf0e3022a | ||
|
|
7adbc16bae | ||
|
|
76d813ed1c | ||
|
|
4f1ab2366d | ||
|
|
51e0db367a | ||
|
|
c20e145aa5 | ||
|
|
b1ea9318e6 | ||
|
|
9892d7d584 | ||
|
|
96377feff6 | ||
|
|
eeeedaf5c6 | ||
|
|
de148cb2ad | ||
|
|
8a4df3af99 | ||
|
|
cfb0ac3992 | ||
|
|
57de92e727 | ||
|
|
ccf64cd7e2 | ||
|
|
47c4248703 | ||
|
|
faf203eeb3 | ||
|
|
534cdf1306 | ||
|
|
569171ae97 | ||
|
|
b10c4ad90f | ||
|
|
a7db97e033 | ||
|
|
e0acc149fe | ||
|
|
61e14ad10b | ||
|
|
a028d97888 | ||
|
|
e898e0bdc2 | ||
|
|
8b0b326875 | ||
|
|
57e793482a | ||
|
|
9b1d53f109 | ||
|
|
f6adcd49fb | ||
|
|
65bcc01a34 | ||
|
|
3200090901 | ||
|
|
6516c093cb | ||
|
|
f69afb457c | ||
|
|
c53c0b068b | ||
|
|
939fbe59cc | ||
|
|
62d0d004fa | ||
|
|
de2b5748c3 | ||
|
|
065215341f | ||
|
|
1770b92fb6 | ||
|
|
a73c660fee | ||
|
|
b7d757186c | ||
|
|
1ef379854e | ||
|
|
216838b5da | ||
|
|
6ce0c0e4df | ||
|
|
8ab7517294 | ||
|
|
8a89aafc8c | ||
|
|
c222b2b7c0 | ||
|
|
5b166df96a | ||
|
|
489cb90322 | ||
|
|
c1d76290dc | ||
|
|
668ec2fadc | ||
|
|
ee4f1210bb | ||
|
|
aebaf71be6 | ||
|
|
1db504353c | ||
|
|
b36ced8681 | ||
|
|
5de277cc78 | ||
|
|
daf1b25476 | ||
|
|
e76bdaf61b | ||
|
|
f3aeec6a4d | ||
|
|
4e2a5719e7 | ||
|
|
fe7ffdbc63 | ||
|
|
8079ffee25 | ||
|
|
9688f516e0 | ||
|
|
7903e1f6fa | ||
|
|
1ec64bf683 | ||
|
|
2daf638ef8 | ||
|
|
bc25890a65 | ||
|
|
066fcce57b | ||
|
|
94fe07d073 | ||
|
|
8252a66034 | ||
|
|
5ab0ae9de5 | ||
|
|
7e2ef630aa | ||
|
|
e7d3efec14 | ||
|
|
4f5ec946ac | ||
|
|
9099d0c77e | ||
|
|
b69614c2b3 | ||
|
|
068b90a6dc | ||
|
|
0586fe2d9c | ||
|
|
f1e03bf474 | ||
|
|
7f0093b2c9 | ||
|
|
e8431d62a2 | ||
|
|
adafd7cf23 | ||
|
|
6daef00d30 | ||
|
|
a0cdd19038 | ||
|
|
d454118887 | ||
|
|
356f23bacb | ||
|
|
196c249367 | ||
|
|
e2a8dd64db | ||
|
|
20a5b20b59 | ||
|
|
06d0d00231 | ||
|
|
62c7f745ca | ||
|
|
551faa8ddb | ||
|
|
2c041a2077 | ||
|
|
c4af769d4f | ||
|
|
b425a870b0 | ||
|
|
b59e16742e | ||
|
|
947224b952 | ||
|
|
20cd8814c1 | ||
|
|
ce8045f521 | ||
|
|
1bf5a11437 | ||
|
|
2daa5e6be0 | ||
|
|
b91aa288b5 | ||
|
|
43187d1aba | ||
|
|
97b730e238 | ||
|
|
d11ed5287b | ||
|
|
81ac490202 | ||
|
|
e53dd4a57b | ||
|
|
d274df2fe2 | ||
|
|
0b3a55b9fe | ||
|
|
abd5eea66d | ||
|
|
65c3df392c | ||
|
|
57908df956 | ||
|
|
26e522a558 | ||
|
|
817685e4c1 | ||
|
|
bcad3f3018 | ||
|
|
303370ad87 | ||
|
|
a9fb7174ba | ||
|
|
6d6f50340f | ||
|
|
6a136b2a4b | ||
|
|
8f7045cfa6 | ||
|
|
61c964dce7 | ||
|
|
48d621c64e | ||
|
|
661dbbf2b4 | ||
|
|
254f644c5f | ||
|
|
88edb1e2af | ||
|
|
640a3f1bfe | ||
|
|
b1243453f4 | ||
|
|
dfc651f643 | ||
|
|
d4978383ff | ||
|
|
cde0139363 | ||
|
|
3d4bb757d2 | ||
|
|
a4e749c22f | ||
|
|
25a9685e2f | ||
|
|
94d417c2b7 | ||
|
|
b897d47e0f | ||
|
|
3422d21346 | ||
|
|
a7917a2150 | ||
|
|
7b23b894b4 | ||
|
|
15c083f731 | ||
|
|
293eaad69d | ||
|
|
605126db8a | ||
|
|
3980beabd7 | ||
|
|
11d3ce9edb | ||
|
|
14cb620cd8 | ||
|
|
841dfefd62 | ||
|
|
d1cb2467fd | ||
|
|
a8e10f03e9 | ||
|
|
94010a0a44 | ||
|
|
75bc933dc4 | ||
|
|
8de0f21f7c | ||
|
|
66b03b54cb | ||
|
|
9ea8159683 | ||
|
|
c33083aeca | ||
|
|
eb34f838f8 | ||
|
|
8327e85e34 | ||
|
|
a8c08d83d0 | ||
|
|
e314cdcdde | ||
|
|
4528e969c9 | ||
|
|
175ae751ba | ||
|
|
43bfdc9561 | ||
|
|
546dce68a6 | ||
|
|
82db2fa425 | ||
|
|
a27af2d7ad | ||
|
|
9f43f37150 | ||
|
|
3ad920b50a | ||
|
|
dbe7ac484c | ||
|
|
d9905ba050 | ||
|
|
dd2e243997 | ||
|
|
fd905b483b | ||
|
|
9c5cd9b38b | ||
|
|
07ce0a3c17 | ||
|
|
5be2d22117 | ||
|
|
e88468640f | ||
|
|
81890e76a0 | ||
|
|
a91c2e7aaa | ||
|
|
7748eb6553 | ||
|
|
835932e95e | ||
|
|
ae1ec4e096 | ||
|
|
c75ecfa009 | ||
|
|
8737a65760 | ||
|
|
418c582430 | ||
|
|
6fd0341eca | ||
|
|
ccc7cb0287 | ||
|
|
a1d6cc93a8 | ||
|
|
dc14d80f51 | ||
|
|
b8eb10b6b7 | ||
|
|
0f6b4513bf | ||
|
|
6f0c936f74 | ||
|
|
42136b6f27 | ||
|
|
2810e3ea5c | ||
|
|
11d34e38dc | ||
|
|
06951cdd6b | ||
|
|
103af480c7 | ||
|
|
db401b4d84 | ||
|
|
e0c876aae1 | ||
|
|
5e0847b3d7 | ||
|
|
ee5ca49bc1 | ||
|
|
015835dba2 | ||
|
|
313ea2c4d2 | ||
|
|
26c4058be4 | ||
|
|
32db787991 | ||
|
|
011565aaa3 | ||
|
|
c967ac37bc | ||
|
|
64721606b9 | ||
|
|
7c502ec209 | ||
|
|
7ee25ecfb3 | ||
|
|
cdbcac6a78 | ||
|
|
87f78ecfa9 | ||
|
|
cffecda48c | ||
|
|
963e5903fc | ||
|
|
9c425d55f6 | ||
|
|
398a9efa3a | ||
|
|
8f2cf52f3b | ||
|
|
134ea1a37b | ||
|
|
3e77a17b26 | ||
|
|
a26fb548b1 | ||
|
|
08e1e2251e | ||
|
|
dcabda42d1 | ||
|
|
fd4043266b | ||
|
|
e1db6dce82 | ||
|
|
d5da8c3509 | ||
|
|
9db068388b | ||
|
|
54c0f153e2 | ||
|
|
e45e8a58fc | ||
|
|
52bc463a3f | ||
|
|
0da16c73ba | ||
|
|
e416843f22 | ||
|
|
e65e3253a3 | ||
|
|
bc7d4586ed | ||
|
|
056d4b4fc9 | ||
|
|
5927f9e43e | ||
|
|
98dfa363db | ||
|
|
92cd538829 | ||
|
|
cdcfb2617c | ||
|
|
1a9299a7c0 | ||
|
|
a60b9b7a38 | ||
|
|
1b44a5a3b7 | ||
|
|
fdf1452c6b | ||
|
|
773cec77a2 | ||
|
|
585e0745da | ||
|
|
41db6668f0 | ||
|
|
c9f28e2b56 | ||
|
|
6afe9c8fda | ||
|
|
f166541ac3 | ||
|
|
7ddf486b37 | ||
|
|
5f130febb8 | ||
|
|
b82577d642 | ||
|
|
97cf028175 | ||
|
|
094f808549 | ||
|
|
18f9e11f1a | ||
|
|
18c35ee86f | ||
|
|
53d1db1da0 | ||
|
|
13e7432b89 | ||
|
|
ddd289d1af | ||
|
|
f9903d850f | ||
|
|
1e3cef6774 | ||
|
|
dcf28e6a28 | ||
|
|
cb47a03880 | ||
|
|
d2a5a58e11 | ||
|
|
88115e4ddb | ||
|
|
0a198e32de | ||
|
|
61388317c1 | ||
|
|
304484c59b | ||
|
|
93ba5ea14f | ||
|
|
8ec828a654 | ||
|
|
b6f681315a | ||
|
|
d53e71021f | ||
|
|
43146fa607 | ||
|
|
f4dab82919 | ||
|
|
f659304227 | ||
|
|
fd493a4451 | ||
|
|
181fa93168 | ||
|
|
d5d9e78983 | ||
|
|
a1a86aa1f7 | ||
|
|
9695969913 | ||
|
|
975c579d44 | ||
|
|
814cc24b69 | ||
|
|
086f9e1f07 | ||
|
|
3f923bb2ce | ||
|
|
803e2db30b | ||
|
|
a282bd4969 | ||
|
|
5bca02bad4 | ||
|
|
4858e72fd9 | ||
|
|
7eab6ba71b | ||
|
|
a909f63fbe | ||
|
|
b46f36195f | ||
|
|
465f1f14a7 | ||
|
|
b8b1e10f34 | ||
|
|
a1634b219a | ||
|
|
6257e2f510 | ||
|
|
65ca754166 | ||
|
|
a0f0505f0d | ||
|
|
be6c4e6061 | ||
|
|
1996e6f4c9 | ||
|
|
671cd42917 | ||
|
|
568a01bf5c | ||
|
|
164abb8c9f | ||
|
|
ed2946feac | ||
|
|
bdd351b372 | ||
|
|
ad5e7d376a | ||
|
|
6e78d8cd9d | ||
|
|
614125f268 | ||
|
|
f41965bfb5 | ||
|
|
85a3cc8d8f | ||
|
|
ea8675d473 | ||
|
|
08a54c1812 | ||
|
|
8c7439b96e | ||
|
|
a9e42a76fa | ||
|
|
1a3b3d3e67 | ||
|
|
759d35e6b5 | ||
|
|
825e85bcc5 | ||
|
|
62165d556c | ||
|
|
78459889d8 | ||
|
|
0fdc6a92f6 | ||
|
|
8586a0167a | ||
|
|
f1d16a45c5 | ||
|
|
2023627d7f | ||
|
|
d5e1958a1f | ||
|
|
f9c58a01d3 | ||
|
|
4500650000 | ||
|
|
5674e671d0 | ||
|
|
0f44c3f69c | ||
|
|
f9069daf03 | ||
|
|
5f58841a3a | ||
|
|
287200e687 | ||
|
|
b653883c0a | ||
|
|
6b8a402353 | ||
|
|
d9b63fae7c | ||
|
|
377cdcabbf | ||
|
|
92a7f40141 | ||
|
|
e06daf437a | ||
|
|
d19bea4af2 | ||
|
|
fbca9f82fd | ||
|
|
04f284d202 | ||
|
|
cfd6112256 | ||
|
|
debc0974a6 | ||
|
|
03bbbea039 | ||
|
|
55af0b1c68 | ||
|
|
c8bfb72104 | ||
|
|
1b8a663001 | ||
|
|
a9abfa2b61 | ||
|
|
092bb0bd6b | ||
|
|
e28e80857b | ||
|
|
905473c739 | ||
|
|
aa0564a1c6 | ||
|
|
2553de0187 | ||
|
|
408dfe62ee | ||
|
|
648ffdf449 | ||
|
|
04c0841ca9 | ||
|
|
43144c4743 | ||
|
|
a778668bcd | ||
|
|
4b131a7090 | ||
|
|
d06a052d54 | ||
|
|
b5115903bf | ||
|
|
afaff175d0 | ||
|
|
4686877c6d | ||
|
|
e5586e8781 | ||
|
|
3acd767ac4 | ||
|
|
5488fc3bc1 | ||
|
|
0965c6cd68 | ||
|
|
db704199dc | ||
|
|
2cc3b7128e | ||
|
|
88b99d30bb | ||
|
|
307a835199 | ||
|
|
f84b55d1ef | ||
|
|
139209353f | ||
|
|
a30058b80f | ||
|
|
53f406dc35 | ||
|
|
2649407f44 | ||
|
|
0a8f627cce | ||
|
|
76d4e88e0c | ||
|
|
d4d2a76f8f | ||
|
|
7d306c6431 | ||
|
|
44bdacac61 | ||
|
|
6bd6e2bdeb | ||
|
|
2908ff3f6b | ||
|
|
f19277b8e2 | ||
|
|
32de75c683 | ||
|
|
164a9e972f | ||
|
|
d747f2c89b | ||
|
|
58662db48e | ||
|
|
078942fc9f | ||
|
|
6dfee99575 | ||
|
|
ad62156d54 | ||
|
|
1689740269 | ||
|
|
50a3b54e34 | ||
|
|
e94a50e9db | ||
|
|
4e0f3cc980 | ||
|
|
2a8cbad122 | ||
|
|
453c45d022 | ||
|
|
4550abbfce | ||
|
|
f2ba1cfb01 | ||
|
|
8c4196faf3 | ||
|
|
b0f4556c0f | ||
|
|
fa5c98549a | ||
|
|
3d12d2037c | ||
|
|
d6522e69ca | ||
|
|
ef1507d000 | ||
|
|
a3d69872e3 | ||
|
|
33b2d38dd0 | ||
|
|
74408bdc77 | ||
|
|
8c4f720fb5 | ||
|
|
8002ad27cb | ||
|
|
1b8a77433a | ||
|
|
a370a11115 | ||
|
|
aa87eff283 | ||
|
|
0d784f46e5 | ||
|
|
c54cfd3609 | ||
|
|
6555994060 | ||
|
|
0893d3cbbe | ||
|
|
90cacb9692 | ||
|
|
69d2902b0a | ||
|
|
c1752cbb83 | ||
|
|
b8e129f2a6 | ||
|
|
cc6fac1688 | ||
|
|
043cb94436 | ||
|
|
bbdf78615e | ||
|
|
e332ff8066 | ||
|
|
26d99ed1c7 | ||
|
|
1da8d8b9db | ||
|
|
bf8f8671d1 | ||
|
|
51cba89682 | ||
|
|
3e8e71f8b6 | ||
|
|
4edd8c80b4 | ||
|
|
fd70a22196 | ||
|
|
56f4deb938 | ||
|
|
9bd7f3f995 | ||
|
|
ee21b00a8d | ||
|
|
1f43678d53 | ||
|
|
20c0e128c0 | ||
|
|
5c3d1d81e6 | ||
|
|
c22b3187a7 | ||
|
|
54f2657870 | ||
|
|
cef7f8a014 | ||
|
|
bf8e50a11d | ||
|
|
6c6cd8bbe0 | ||
|
|
00d6c2a966 | ||
|
|
415cf31aa3 | ||
|
|
f55053bfba | ||
|
|
e24654ada0 | ||
|
|
c4cecba07f | ||
|
|
38cad0b8dc | ||
|
|
052af98dcd | ||
|
|
56d8f5163c | ||
|
|
b6af4f4467 | ||
|
|
a5b08f43ff | ||
|
|
c15f506fd5 | ||
|
|
a2a63460e9 | ||
|
|
2fcea486eb | ||
|
|
5c9d26e39b | ||
|
|
191bc2e50a | ||
|
|
fbb9facda4 | ||
|
|
c6a819e92f | ||
|
|
a50cde69a2 | ||
|
|
e5bd74878e | ||
|
|
dc98b2ea44 | ||
|
|
acf119828f | ||
|
|
a53392f919 | ||
|
|
eee1fb2c75 | ||
|
|
8826ca93b3 | ||
|
|
5049629381 | ||
|
|
92136a5d34 | ||
|
|
075e5015c0 | ||
|
|
46fd4ff6db | ||
|
|
4a4e44bf55 | ||
|
|
22247ad92c | ||
|
|
d0f2bf3181 | ||
|
|
0e4e101101 | ||
|
|
f4b1bd8f6d | ||
|
|
e95cb8eaac | ||
|
|
db1159b651 | ||
|
|
a9a3a07c3b | ||
|
|
06c8339862 | ||
|
|
2394f7833f | ||
|
|
36e19928eb | ||
|
|
abc27e0dc4 | ||
|
|
42d6b9e0cc | ||
|
|
c866b77586 | ||
|
|
5356b81b7f | ||
|
|
30fe163100 | ||
|
|
afb5bbc1b8 | ||
|
|
12a8d0e46f | ||
|
|
09c7d8d458 | ||
|
|
149cc1eb13 | ||
|
|
a5ce987bdb | ||
|
|
2edc732c33 | ||
|
|
fec01d9e69 | ||
|
|
9ca5ef339a | ||
|
|
a8003f2b7c | ||
|
|
25deb4ba95 | ||
|
|
3d3db1d74f | ||
|
|
cabb1602e8 | ||
|
|
25e7661de2 | ||
|
|
cbfab81c35 | ||
|
|
925315ab5c | ||
|
|
5213e79f5c | ||
|
|
7fe6d0ad2b | ||
|
|
cf747bcdec | ||
|
|
d51444d606 | ||
|
|
e35d8169b1 | ||
|
|
a7ac2f7bb0 | ||
|
|
b7496dea9b | ||
|
|
8f45852273 | ||
|
|
48a1a7da23 | ||
|
|
535f771761 | ||
|
|
eda5c1422b | ||
|
|
300f2779e4 | ||
|
|
3be71811ca | ||
|
|
4cfa040f17 | ||
|
|
c5d5092347 | ||
|
|
c3e374f30a | ||
|
|
424b2e0064 | ||
|
|
486b491c4c | ||
|
|
7781dfe49e | ||
|
|
5139dadceb | ||
|
|
96ea240b39 | ||
|
|
8b8522046d | ||
|
|
36d980e520 | ||
|
|
11c16f529e | ||
|
|
58c4a6d9d9 | ||
|
|
9a159fbfad | ||
|
|
791c3ace72 | ||
|
|
ac5d655598 | ||
|
|
a9f438e1e6 | ||
|
|
6b72bdcb0a | ||
|
|
f336c1a7b8 | ||
|
|
47bc72343c | ||
|
|
bf87943da7 | ||
|
|
dbe1e652bc | ||
|
|
8ac79cfc33 | ||
|
|
0180bcf22a | ||
|
|
a7998e0263 | ||
|
|
923e4cce85 | ||
|
|
e2d40d0fcc | ||
|
|
70f6d80677 | ||
|
|
22e30fccbc | ||
|
|
5da07b0a84 | ||
|
|
0ff1b7f8f7 | ||
|
|
71be066937 | ||
|
|
b95c523385 | ||
|
|
589a2ac869 | ||
|
|
68fc014c6d | ||
|
|
56db715a91 | ||
|
|
c2804c42fe | ||
|
|
1655411ccd | ||
|
|
3daba4731c | ||
|
|
45ce1803f8 | ||
|
|
164dee65c3 | ||
|
|
2259512345 | ||
|
|
b8e7a76524 | ||
|
|
72f97e62bb | ||
|
|
607fd066f0 | ||
|
|
69a3b22fa1 | ||
|
|
11d960b2a6 | ||
|
|
ae6d327698 | ||
|
|
bb9a5aea9e | ||
|
|
49739e85a0 | ||
|
|
12950cac21 | ||
|
|
d2da2f1672 | ||
|
|
e1d0d94073 | ||
|
|
b5b01ea635 | ||
|
|
bc684c259c | ||
|
|
da3bc8077d | ||
|
|
6a6094a58d | ||
|
|
8369614b6e | ||
|
|
cac472d4a1 | ||
|
|
6d0ede813f | ||
|
|
a0252127a2 | ||
|
|
00ad01fd79 | ||
|
|
d5c0ad8a1b | ||
|
|
11ed1cebb3 | ||
|
|
fc640be591 | ||
|
|
311954f41b | ||
|
|
bbfa5075f6 | ||
|
|
47fe31aa53 | ||
|
|
18dddc1ae0 | ||
|
|
b38fd8780b | ||
|
|
11eaf9c0a7 | ||
|
|
5d892f86ea | ||
|
|
7f06954425 | ||
|
|
771a052480 | ||
|
|
99b57b321b | ||
|
|
75ef6ccf1e | ||
|
|
de1fbdca71 | ||
|
|
ce827139bb | ||
|
|
0762aa5327 | ||
|
|
81ae92f017 | ||
|
|
84d6e5a987 | ||
|
|
ac5f6f210b | ||
|
|
61fe2404a0 | ||
|
|
db2d8f4d04 | ||
|
|
a9c521eb41 | ||
|
|
a913fd310d | ||
|
|
fbaae8528d | ||
|
|
7d030b56b2 | ||
|
|
0add16049e | ||
|
|
2bb48b0816 | ||
|
|
023ce59d44 | ||
|
|
7822d944b5 | ||
|
|
b510352393 | ||
|
|
d3a217c254 | ||
|
|
2a3427e533 | ||
|
|
7ec02babd5 | ||
|
|
5a4c4f4ab2 | ||
|
|
af095204fa | ||
|
|
70e53bc191 | ||
|
|
7cf59d9f98 | ||
|
|
7147f1990f | ||
|
|
16f7140461 | ||
|
|
6f1b4f29a8 | ||
|
|
93658fc5fd | ||
|
|
736df11454 | ||
|
|
2669f4738a | ||
|
|
aca2c4196a | ||
|
|
9cfd89087b | ||
|
|
6aba6223c7 | ||
|
|
a28b3771a7 | ||
|
|
d02a0f6f01 | ||
|
|
c12d121783 | ||
|
|
b06046fe4c | ||
|
|
6d350ccce0 | ||
|
|
bcd3c1deb2 | ||
|
|
5afea9babf | ||
|
|
a495515e10 | ||
|
|
9a8a249932 | ||
|
|
dfa183551e | ||
|
|
d903925fe7 | ||
|
|
0ccf35ba45 | ||
|
|
a199d98fb7 | ||
|
|
9475a6fa05 | ||
|
|
1d651bbfad | ||
|
|
7fade2ffbd | ||
|
|
f0702e5ff8 | ||
|
|
a7a27a5082 | ||
|
|
10024905a0 | ||
|
|
0c31d1a4c8 | ||
|
|
9ba108bd5b | ||
|
|
721340ec9a | ||
|
|
b9da06dafe | ||
|
|
20f9f267e8 | ||
|
|
a85c4f96e0 | ||
|
|
9337a01e9d | ||
|
|
8758aa4ecf | ||
|
|
42fba91521 | ||
|
|
0c84c7b1cc | ||
|
|
73c9b3598d | ||
|
|
bb6d06f0d1 | ||
|
|
13cb7960bd | ||
|
|
e4c696d966 | ||
|
|
d58f9c333b | ||
|
|
dd270d58bd | ||
|
|
1465e3dfd1 | ||
|
|
1651f25d03 | ||
|
|
e67c9ae3bf | ||
|
|
f2f372b7f5 | ||
|
|
857443e2b5 | ||
|
|
ad449a237e | ||
|
|
3f74b34f06 | ||
|
|
1dbb3b8abc | ||
|
|
5d416006ae | ||
|
|
27b03a52f3 | ||
|
|
1ed5af1da8 | ||
|
|
7278bf3de8 | ||
|
|
d6b3fbb4ad | ||
|
|
3457acc48b | ||
|
|
f18862fb44 | ||
|
|
be55fce9be | ||
|
|
409e2d348e | ||
|
|
8bbf09370c | ||
|
|
714e80abce | ||
|
|
121f143fc0 | ||
|
|
c50e0edcb8 | ||
|
|
d6c4e751f2 | ||
|
|
faadabea14 | ||
|
|
57f7900210 | ||
|
|
5bb2321fe0 | ||
|
|
10324d9ad2 | ||
|
|
02de274e00 | ||
|
|
7d92936e1a | ||
|
|
447d9f844b | ||
|
|
89979da33f | ||
|
|
71f3fa653a | ||
|
|
cd385c2720 | ||
|
|
83ffd626dc | ||
|
|
121ffe61c5 | ||
|
|
710f566553 | ||
|
|
bd57ebf042 | ||
|
|
ae4b67fb56 | ||
|
|
9729d2ae37 | ||
|
|
4dfa085339 | ||
|
|
7137c32f8f | ||
|
|
e30114a4a4 | ||
|
|
a92b3b13e9 | ||
|
|
c4534cd908 | ||
|
|
9f61ac8acc | ||
|
|
74eaf02484 | ||
|
|
7ba4a78fcc | ||
|
|
f3357a17b8 | ||
|
|
8627bc2dd4 | ||
|
|
0c0bc18c94 | ||
|
|
63ee689f21 | ||
|
|
a0e0804f25 | ||
|
|
71b8232076 | ||
|
|
2e2a0dffbc | ||
|
|
6d20f38510 | ||
|
|
9e3e892ac7 | ||
|
|
5fcafc3d1e | ||
|
|
74f8785047 | ||
|
|
b1773e33d5 | ||
|
|
a507c13f8e | ||
|
|
8317839ca5 | ||
|
|
4a1a3a56ba | ||
|
|
f7ffa9cd58 | ||
|
|
60117ec057 | ||
|
|
1c708d21de | ||
|
|
1d94aaa10f | ||
|
|
8814b31805 | ||
|
|
36e185ba63 | ||
|
|
2c8623dbb4 | ||
|
|
e198347886 | ||
|
|
66cf38b0b3 | ||
|
|
11b2adae0c | ||
|
|
61b5602111 | ||
|
|
abcf0ff000 | ||
|
|
9cfc9ac66f | ||
|
|
c3306fe825 | ||
|
|
ad5978b3ca | ||
|
|
4e11ca55fd | ||
|
|
52ba230d31 | ||
|
|
307ad7592b | ||
|
|
06aa068ac7 | ||
|
|
ecc6345436 | ||
|
|
c8fc92d6d5 | ||
|
|
b3f362f229 | ||
|
|
e03363df3d | ||
|
|
d1a222ea87 | ||
|
|
69a2cf06c8 | ||
|
|
c53196e197 | ||
|
|
f9ddc31b77 | ||
|
|
1494ba13e6 | ||
|
|
77c8152cbf | ||
|
|
7bf5cc50b5 | ||
|
|
ada35e428e | ||
|
|
de1f010f01 | ||
|
|
e1e221b6e5 | ||
|
|
9818d2d1e1 | ||
|
|
416aec3db6 | ||
|
|
a02fb001f9 | ||
|
|
f0ed4aff1a | ||
|
|
30916e8eec | ||
|
|
57c96fe05e | ||
|
|
22ffe1a083 | ||
|
|
dc38b1f71e | ||
|
|
4c31e4567a | ||
|
|
1c0bbb92b2 | ||
|
|
62176de6d2 | ||
|
|
55318cca0f | ||
|
|
094a6fccd8 | ||
|
|
42fe864cb4 | ||
|
|
ed322bf59f | ||
|
|
f15a93b19b | ||
|
|
6e1ec08f46 | ||
|
|
e2e2a8e447 | ||
|
|
1788fc8d4a | ||
|
|
12d6d2d177 | ||
|
|
d1a123954b | ||
|
|
8f0bf9810a | ||
|
|
c2576d0879 | ||
|
|
797c1739ce | ||
|
|
a36b721ca6 | ||
|
|
fc50a90f6a | ||
|
|
2b55dd2c4f | ||
|
|
4c8957de63 | ||
|
|
01d83129a2 | ||
|
|
5afd2de87e | ||
|
|
d792cf115b | ||
|
|
e4b91e9dbb | ||
|
|
d590532d7f | ||
|
|
26f393bd99 | ||
|
|
af0545834f | ||
|
|
c492a9735a | ||
|
|
05c75ca617 | ||
|
|
4c7e8f4d54 | ||
|
|
115b523732 | ||
|
|
4767057088 | ||
|
|
33bc1e8b19 | ||
|
|
8845524d01 | ||
|
|
92faf5fd1d | ||
|
|
2775edb3f0 | ||
|
|
98ffc00926 | ||
|
|
9b21f0d6ad | ||
|
|
57ea7f81bb | ||
|
|
274487c5eb | ||
|
|
17634b394b | ||
|
|
2d59c99d31 | ||
|
|
abcbbbed2d | ||
|
|
f1e90575f3 | ||
|
|
a7dbeb36ca | ||
|
|
d50c72a657 | ||
|
|
12b470f00a | ||
|
|
198bc6d939 | ||
|
|
3feb869025 | ||
|
|
f24fac43da | ||
|
|
9c96a73d93 | ||
|
|
45233937b7 | ||
|
|
f822bebfd8 | ||
|
|
0dd02b2ad7 | ||
|
|
9948ff2715 | ||
|
|
0da042dc2b | ||
|
|
5c747a16c4 | ||
|
|
40604e877c | ||
|
|
3dfed64a15 | ||
|
|
e5f91fbba2 | ||
|
|
4700c9df92 | ||
|
|
6f8d6f601a | ||
|
|
8a39707b36 | ||
|
|
e7df875db3 | ||
|
|
cb042713e8 | ||
|
|
7c4e526853 | ||
|
|
3a70cf311b | ||
|
|
5d08b9ac68 | ||
|
|
86f8d5b50a | ||
|
|
d4a3872dd9 | ||
|
|
d6a7a77f6b | ||
|
|
2a839e1432 | ||
|
|
610e1c00c6 | ||
|
|
b1f93935be | ||
|
|
d57acefed4 | ||
|
|
0a7e4c1b93 | ||
|
|
82cc81974f | ||
|
|
fe0d092f58 | ||
|
|
0dd21f2b5e | ||
|
|
f9fad3f4ee | ||
|
|
7021c02d45 | ||
|
|
7aa7f13095 | ||
|
|
d59bcd539e | ||
|
|
d5a6c1e4f6 | ||
|
|
7ef8edda32 | ||
|
|
81c4b72258 | ||
|
|
fe4c8c8251 | ||
|
|
02d4eeffc8 | ||
|
|
80652abc9b | ||
|
|
2169c3497d | ||
|
|
fee52942eb | ||
|
|
868182bc38 | ||
|
|
ac37b47170 | ||
|
|
43f49533e8 | ||
|
|
3379c3d98c | ||
|
|
d605df471c | ||
|
|
8bf4ccf3ed | ||
|
|
392cf15877 | ||
|
|
5eda7f578d | ||
|
|
717cc6fe1a | ||
|
|
9031d2b9eb | ||
|
|
4a69ef3052 | ||
|
|
80ae919dbe | ||
|
|
0802895cd2 | ||
|
|
9fee46207a | ||
|
|
bd900945f7 | ||
|
|
89484efaed | ||
|
|
a9757fb057 | ||
|
|
1c96e0b79e | ||
|
|
c7f0743f48 | ||
|
|
ead69a116a | ||
|
|
0314b37cd8 | ||
|
|
703cd08f01 | ||
|
|
b53947a5bb | ||
|
|
39de3cf21d | ||
|
|
e3cd11cc0a | ||
|
|
5e5037f10d | ||
|
|
9c331239d9 | ||
|
|
36789e9ead | ||
|
|
6ec593c237 | ||
|
|
bbb1dc2ae0 | ||
|
|
385d8dc29b | ||
|
|
fb574434a4 | ||
|
|
7ab3217df0 | ||
|
|
2f9f04b260 | ||
|
|
8385eb2a59 | ||
|
|
99324eeef0 | ||
|
|
ede352256b | ||
|
|
b555b64616 | ||
|
|
824cc816ea | ||
|
|
a1bc2e9771 | ||
|
|
9fc09b32cf | ||
|
|
8ec7a0a407 | ||
|
|
d3166e8571 | ||
|
|
2966979161 | ||
|
|
f4ed47bf95 | ||
|
|
1a75546b27 | ||
|
|
a6b92af875 | ||
|
|
3dc601c470 | ||
|
|
153e977155 | ||
|
|
7d61de63ae | ||
|
|
bcd9e153ba | ||
|
|
19282af059 | ||
|
|
9c0c11e8a0 | ||
|
|
3f7eddb039 | ||
|
|
77ad49333a | ||
|
|
ef5e8326c8 | ||
|
|
86509e6002 | ||
|
|
8667a67695 | ||
|
|
f505d7ab3f | ||
|
|
450dbed820 | ||
|
|
46b86f7e6e | ||
|
|
0ee1f8c1cf | ||
|
|
87bd831aba | ||
|
|
f9f83791d1 | ||
|
|
e75f73bf73 | ||
|
|
bd277162c7 | ||
|
|
f19ee465d2 | ||
|
|
7b85ff7280 | ||
|
|
134cb993c2 | ||
|
|
2cf28f3c01 | ||
|
|
18c0f4718d | ||
|
|
f878b63ee4 | ||
|
|
6eaa01db15 | ||
|
|
1d605073a4 | ||
|
|
fc29c04f82 | ||
|
|
63fc22baab | ||
|
|
6a919b30ac | ||
|
|
3f7ec2e596 | ||
|
|
82d5123c1e | ||
|
|
252961751c | ||
|
|
031627584b | ||
|
|
24a8eebcef | ||
|
|
bf9dd1de7f | ||
|
|
35d55572ac | ||
|
|
c7357a9872 | ||
|
|
27e16a00fa | ||
|
|
919e2e4369 | ||
|
|
96f67efe32 | ||
|
|
607900a4bb | ||
|
|
53c8ab1020 | ||
|
|
81d01e8a5f | ||
|
|
b8b0c7ad0b | ||
|
|
6de12c694a | ||
|
|
25f97910cc | ||
|
|
89bd04c0ac | ||
|
|
195d3b9f03 | ||
|
|
865496f80b | ||
|
|
4a22e54cda | ||
|
|
bd8e2320c3 | ||
|
|
b5661d6302 | ||
|
|
e7813d4ec4 | ||
|
|
d384627fa9 | ||
|
|
1ed6b96dd7 | ||
|
|
53f90218b0 | ||
|
|
112d6a3083 | ||
|
|
1f7cedf5ee | ||
|
|
50cdfe0090 | ||
|
|
c6838d4301 | ||
|
|
4e84764787 | ||
|
|
f521e50fa8 | ||
|
|
09de674b03 | ||
|
|
b1da8aa145 | ||
|
|
58f8f8d381 | ||
|
|
db658adc7a | ||
|
|
01a4f103f5 | ||
|
|
38b3115a15 | ||
|
|
a3eb6e04c1 | ||
|
|
a83fa725e1 | ||
|
|
b591d8c659 | ||
|
|
bc2b8e0063 | ||
|
|
85c7b28364 | ||
|
|
d1a5c343b7 | ||
|
|
7dbf49ab22 | ||
|
|
b375a654e7 | ||
|
|
12a96c520a | ||
|
|
35561edb6e | ||
|
|
6564e7ea01 | ||
|
|
121bce581c | ||
|
|
9d3c6d321e | ||
|
|
23835f8cca | ||
|
|
06c315bcb3 | ||
|
|
e2ac43853f | ||
|
|
b6ddb53ceb | ||
|
|
edea2e7c3a | ||
|
|
2a2ef49b74 | ||
|
|
6585ba2a9c | ||
|
|
fbd0a270b3 | ||
|
|
fc60031ac1 | ||
|
|
85fe197684 | ||
|
|
6489b456dd | ||
|
|
d6ce4b6845 | ||
|
|
57ccd1873d | ||
|
|
b7c0d46170 | ||
|
|
ce035416aa | ||
|
|
4bc92d448c | ||
|
|
9c0df648a6 | ||
|
|
be35dc451c | ||
|
|
5a4dc2b7dc | ||
|
|
9eb27c563c | ||
|
|
405794d4ca | ||
|
|
6ba730d7f8 | ||
|
|
00f257c6f2 | ||
|
|
4f42d865a2 | ||
|
|
4ababe33e4 | ||
|
|
2edb8f8756 | ||
|
|
5305d4dcbf | ||
|
|
2bbbfa849f | ||
|
|
e070134c6a | ||
|
|
c03045c5c7 | ||
|
|
a01274b521 | ||
|
|
bda2222fee | ||
|
|
fbef2aa984 | ||
|
|
b2f9873f84 | ||
|
|
0a60ce9477 | ||
|
|
f5a9381df3 | ||
|
|
84d7dc753a | ||
|
|
f65e9cc22f | ||
|
|
957ae167c5 | ||
|
|
f579954c83 | ||
|
|
1c2e54e5be | ||
|
|
6a60774fa1 | ||
|
|
d3beb2f4e4 | ||
|
|
62aa3bfdb2 | ||
|
|
babd5cc1a0 | ||
|
|
a657aac7dc | ||
|
|
1448ff1309 | ||
|
|
d2c912b2df | ||
|
|
88aff0bc99 | ||
|
|
0b212de447 | ||
|
|
f75213bd81 | ||
|
|
55c2076204 | ||
|
|
705f54257e | ||
|
|
9fa92e14cd | ||
|
|
34f39e8bdf | ||
|
|
87c8f2368b | ||
|
|
20f6f30a31 | ||
|
|
1fe82f70d3 | ||
|
|
7ee93a8b5c | ||
|
|
6b59f79364 | ||
|
|
ffad7890fe | ||
|
|
10491892c4 | ||
|
|
d08a963d1c | ||
|
|
272fbab6f1 | ||
|
|
70aeba7b6a | ||
|
|
3ed03d04df | ||
|
|
b3b8010930 | ||
|
|
30861f49a8 | ||
|
|
5345f30a33 | ||
|
|
de2bf82e09 | ||
|
|
67b20a7147 | ||
|
|
905ed62ee3 | ||
|
|
76bd8083c1 | ||
|
|
d55edf3bfa | ||
|
|
1ad84ec396 | ||
|
|
fc87507012 | ||
|
|
68e15e71be | ||
|
|
bb063ab78a | ||
|
|
6886e4e5ab | ||
|
|
8c08643c65 | ||
|
|
2c2efe2d11 | ||
|
|
db42a93dab | ||
|
|
dcccfc2cce | ||
|
|
96127e9967 | ||
|
|
41bce28d5f | ||
|
|
a00e9a82ae | ||
|
|
95e31fd279 | ||
|
|
fb04347d3b | ||
|
|
f5bcba70da | ||
|
|
d5846c8639 | ||
|
|
664b2e352b | ||
|
|
dcbdc12cc9 | ||
|
|
c87fca3ec1 | ||
|
|
642f6cee75 | ||
|
|
03efa26ff5 | ||
|
|
b6b8ab6c21 | ||
|
|
b60acabb82 | ||
|
|
e7eb81beeb | ||
|
|
e56110543b | ||
|
|
fd0bc21c3e | ||
|
|
3bc5652b27 | ||
|
|
59ef426fbf | ||
|
|
28c6daf916 | ||
|
|
133987b1fb | ||
|
|
cbb93bd8ec | ||
|
|
7223284323 | ||
|
|
8d046de287 | ||
|
|
2845baecd5 | ||
|
|
d5a56f04be | ||
|
|
f120a0c9f9 | ||
|
|
401ee553f4 | ||
|
|
e3c89ac9cd | ||
|
|
b59841cf69 | ||
|
|
cca881ec49 | ||
|
|
dd95ae130f | ||
|
|
185ab93b0d | ||
|
|
bb38f051e6 | ||
|
|
2a05c39adf | ||
|
|
deb5311373 | ||
|
|
bdfebfe0f4 | ||
|
|
3a88299cfe | ||
|
|
748e4cb6b1 | ||
|
|
7c554be4ea | ||
|
|
6011845ee9 | ||
|
|
c184f23621 | ||
|
|
8cec0304ee | ||
|
|
dc51869c61 | ||
|
|
f881d25630 | ||
|
|
683c306f90 | ||
|
|
a985d8c239 | ||
|
|
17608ea6aa | ||
|
|
9280060e05 | ||
|
|
cbcb74e159 | ||
|
|
f5c1518438 | ||
|
|
29e4729c22 | ||
|
|
68f3943e0f | ||
|
|
b59f81abff | ||
|
|
94c5524277 | ||
|
|
5b3211e71c | ||
|
|
5c135d0dec | ||
|
|
a4c96836ac | ||
|
|
ff19b22d72 | ||
|
|
d96d4883ce | ||
|
|
83576d7f57 | ||
|
|
23b926d43e | ||
|
|
9aec1b3a61 | ||
|
|
2d65df38d1 | ||
|
|
6f5b6711ea | ||
|
|
89c888bf55 | ||
|
|
a637ee2278 | ||
|
|
1b270759ef | ||
|
|
b10441a41c | ||
|
|
97de2b6550 | ||
|
|
497a037344 | ||
|
|
cf0af16695 | ||
|
|
62b4030278 | ||
|
|
c047c19145 | ||
|
|
b941732f54 | ||
|
|
e591ff2e74 | ||
|
|
bd2f95c130 | ||
|
|
ad85c5a1e7 | ||
|
|
421eb8a727 | ||
|
|
b7ff441cc0 | ||
|
|
83d867ad46 | ||
|
|
6acba2bcbe | ||
|
|
6a2a10603c | ||
|
|
356907a5cf | ||
|
|
7ab7a188d0 | ||
|
|
ff1a5bfc62 | ||
|
|
522f185baf | ||
|
|
f7b5a4ca7d | ||
|
|
1d30955677 | ||
|
|
d3307e93d3 | ||
|
|
8d9a452e4b | ||
|
|
466eb82845 | ||
|
|
7e562d10a3 | ||
|
|
7b1e792732 | ||
|
|
30b883affe | ||
|
|
20ec4d0342 | ||
|
|
a9f8460086 | ||
|
|
98b3b2b1ab | ||
|
|
e8bc0a789b | ||
|
|
2b6a2c7dde | ||
|
|
c8c8238f9d | ||
|
|
3eaf59021c | ||
|
|
a8bfb6f9c2 | ||
|
|
b783c811db | ||
|
|
59af0e77af | ||
|
|
5d83c8d3a2 | ||
|
|
8f968d0341 | ||
|
|
f93fe30350 | ||
|
|
784ccf97ba | ||
|
|
a0163dafce | ||
|
|
f072cb3cd0 | ||
|
|
e84b31935c | ||
|
|
03b1cf51fd | ||
|
|
9e6dec0bc4 | ||
|
|
04b01cd62c | ||
|
|
a181dd0ebc | ||
|
|
69206fcd4b | ||
|
|
2c94e15746 | ||
|
|
12513ebae0 | ||
|
|
4156a4f15f | ||
|
|
491bb4f174 | ||
|
|
5866fc8ded | ||
|
|
eb4cd78ca6 | ||
|
|
40ce71855a | ||
|
|
9c0d0afd09 | ||
|
|
0f9aa1ef91 | ||
|
|
3ee5ceb9fa | ||
|
|
1bd72a3be5 | ||
|
|
fbd14118bf | ||
|
|
515d98b978 | ||
|
|
789cf6c599 | ||
|
|
0bc82d7270 | ||
|
|
9a7ad75bff | ||
|
|
9fb3e4040b | ||
|
|
070fd1b9da | ||
|
|
dda5b9f260 | ||
|
|
8d84dd4f88 | ||
|
|
f569237a50 | ||
|
|
e265a618d9 | ||
|
|
533343c84f | ||
|
|
260f2e1d94 | ||
|
|
964732590d | ||
|
|
70a2bfe82e | ||
|
|
ba2d969c44 | ||
|
|
d3c78cf4d7 | ||
|
|
34afd891a6 | ||
|
|
d3137775a1 | ||
|
|
e1772026a1 | ||
|
|
d0423254dd | ||
|
|
db0e52ae9d | ||
|
|
4f030f9cd3 | ||
|
|
60fb45eb97 | ||
|
|
43f0688a95 | ||
|
|
8142bdc48f | ||
|
|
89a11e15e7 | ||
|
|
06de542032 | ||
|
|
ecbb61cbf4 | ||
|
|
7f13e3a783 | ||
|
|
c926469b9c | ||
|
|
c30b57a629 | ||
|
|
2f297979a7 | ||
|
|
2437a2769d | ||
|
|
b58b7cad94 | ||
|
|
68148f2a1a | ||
|
|
4897eb0ba2 | ||
|
|
1b43966c48 | ||
|
|
c5f2f11503 | ||
|
|
895443d1b5 | ||
|
|
6a0802e8e6 | ||
|
|
94cfaad7f4 | ||
|
|
ac4a94dd44 | ||
|
|
58bf8614d9 | ||
|
|
3764e50b35 | ||
|
|
3f464d2d9e | ||
|
|
5116d561e1 | ||
|
|
96a7a3b59f | ||
|
|
112d0ffa45 | ||
|
|
25f45827ab | ||
|
|
f322f7c62d | ||
|
|
06351cbbb4 | ||
|
|
8f952d90b0 | ||
|
|
7b205510f9 | ||
|
|
f183fec232 | ||
|
|
91f48b2143 | ||
|
|
f404580256 | ||
|
|
882556d4db | ||
|
|
f8382adbf7 | ||
|
|
80298f94fa | ||
|
|
0f8b489346 | ||
|
|
154694462e | ||
|
|
347317d5d2 | ||
|
|
d40722d2fa | ||
|
|
7b12300f15 | ||
|
|
3c50abffdd | ||
|
|
2eb2ed84ab | ||
|
|
5da10fb769 | ||
|
|
bec883e3ff | ||
|
|
14b41be057 | ||
|
|
aff2acacf9 | ||
|
|
b4d4c0a18f | ||
|
|
3a5f2283ea | ||
|
|
d9109ffafb | ||
|
|
d7e137295a | ||
|
|
6c087ae743 | ||
|
|
88af1033d6 | ||
|
|
e96d2d7667 | ||
|
|
aae7ad9d73 | ||
|
|
23b3d22525 | ||
|
|
603d81dda1 | ||
|
|
a21a52d384 | ||
|
|
219078a5e0 | ||
|
|
3b7a78adda | ||
|
|
0d62594099 | ||
|
|
d38e9090df | ||
|
|
b049805c9b | ||
|
|
0f9b58f2cf | ||
|
|
0f134d557e | ||
|
|
2676e127ae | ||
|
|
270d4f8413 | ||
|
|
2d79cee8cb | ||
|
|
4c9623f50d | ||
|
|
596cf76135 | ||
|
|
a293aa1b79 | ||
|
|
c4eb02c80f | ||
|
|
9c9198ff08 | ||
|
|
83c79d5453 | ||
|
|
88fd000065 | ||
|
|
956d652314 | ||
|
|
9ce2b4d71f | ||
|
|
4e974cb4fc | ||
|
|
d072835796 | ||
|
|
17cf6c4a4d | ||
|
|
fab3e711ff | ||
|
|
4e1463fec2 | ||
|
|
2fc6fe806b | ||
|
|
bdd6769b2d | ||
|
|
1ffee9989f | ||
|
|
34ab442ce9 | ||
|
|
67aa31faad | ||
|
|
6ef78ef7f6 | ||
|
|
daa7544d9c | ||
|
|
34527737bb | ||
|
|
148adebe16 | ||
|
|
bae2a649fd | ||
|
|
90945ebab3 | ||
|
|
4a239a4bff | ||
|
|
5ddaa19914 | ||
|
|
77d752a481 | ||
|
|
29ff51c12a | ||
|
|
c0744899c9 | ||
|
|
c9092ad39c | ||
|
|
b588cae70e | ||
|
|
fb0f188c93 | ||
|
|
b99182c8d4 | ||
|
|
95c65d67f5 | ||
|
|
c603b95ac7 | ||
|
|
13cfa6de0a | ||
|
|
0560c6fd57 | ||
|
|
f24dddae42 | ||
|
|
06b461b061 | ||
|
|
e50a7ba879 | ||
|
|
3b2bce1fc9 | ||
|
|
3fe7e9f678 | ||
|
|
654b661688 | ||
|
|
7f387fb238 | ||
|
|
5d31e5269d | ||
|
|
ff8a6962cd | ||
|
|
10c64dbb55 | ||
|
|
3f7212c660 | ||
|
|
5dc6bace49 | ||
|
|
3cd5918ae6 | ||
|
|
5b75bf16c7 | ||
|
|
0c40f545d4 | ||
|
|
b2fc92daa7 | ||
|
|
0787797961 | ||
|
|
2ba9e27bcf | ||
|
|
4d98dd9ce7 | ||
|
|
087bceccac | ||
|
|
7064697ce5 | ||
|
|
0b99be73b3 | ||
|
|
669cd06dd9 | ||
|
|
2bbc52fcc8 | ||
|
|
577888f3c0 | ||
|
|
1c80f628ff | ||
|
|
10430a00bd | ||
|
|
9f5c274321 | ||
|
|
d075dc44dd | ||
|
|
be8ffbdfcf | ||
|
|
eaf653f3d3 | ||
|
|
e9c28a1ed7 | ||
|
|
ba984c7097 | ||
|
|
ff1f9125ed | ||
|
|
2c82058548 | ||
|
|
16433d2e8e | ||
|
|
345047ed7c | ||
|
|
6343758f9c | ||
|
|
135208806c | ||
|
|
3280de7adf | ||
|
|
db3113c5c8 | ||
|
|
593fb62bf0 | ||
|
|
480834f75b | ||
|
|
3200a6655e | ||
|
|
b90cdced59 | ||
|
|
fc3502b56f | ||
|
|
785adc1ed5 | ||
|
|
e25fc656c9 | ||
|
|
bb3ec56de3 | ||
|
|
785c54e7b0 | ||
|
|
003b43f6fc | ||
|
|
663488b6bd | ||
|
|
e1d6b706f4 | ||
|
|
29615576fb | ||
|
|
f8cea16c03 | ||
|
|
e0187c2a1a | ||
|
|
b76d2fe68a | ||
|
|
ee4f722bf8 | ||
|
|
dce63237f2 | ||
|
|
0b637465d9 | ||
|
|
114f549f5e | ||
|
|
ea330d452d | ||
|
|
eb11a46a73 | ||
|
|
b57e14d65c | ||
|
|
7efa8e75d4 | ||
|
|
7551369abe | ||
|
|
79915bcd11 | ||
|
|
c8d7d14a37 | ||
|
|
c56bc0de98 | ||
|
|
3a9408363b | ||
|
|
21a12c2cdd | ||
|
|
371d0cc1f7 | ||
|
|
23fa92bec0 | ||
|
|
f91e4e5c03 | ||
|
|
6cbe6a4f99 | ||
|
|
491e1d752b | ||
|
|
1542c58466 | ||
|
|
1a3dedece0 | ||
|
|
a58ff00ab1 | ||
|
|
fdb45153fe | ||
|
|
16474bfb40 | ||
|
|
5a6d120a56 | ||
|
|
7a480bb16f | ||
|
|
053531e434 | ||
|
|
b7ab4f25d9 | ||
|
|
73566a2bb2 | ||
|
|
8ccd5ab040 | ||
|
|
5a3db730b9 | ||
|
|
8ad669339e | ||
|
|
a10a952085 | ||
|
|
b37447cac5 | ||
|
|
f2d182a2eb | ||
|
|
6b6c8cdd5f | ||
|
|
5f35e85e86 | ||
|
|
02f1b477df | ||
|
|
9ab8f8f5e0 | ||
|
|
9a255d6453 | ||
|
|
e0ef9e2bb9 | ||
|
|
86627b27f7 | ||
|
|
4e92569d45 | ||
|
|
f7508e3888 | ||
|
|
badfc16df1 | ||
|
|
b584dcf18a | ||
|
|
4c845fb47d | ||
|
|
07c0559d06 | ||
|
|
beb598e4f9 | ||
|
|
c89271b2e4 | ||
|
|
29909666c3 | ||
|
|
566b5cf2ee | ||
|
|
a670318a9f | ||
|
|
84e2407afa | ||
|
|
c4186f13c3 | ||
|
|
4ac7956f68 | ||
|
|
e49ea0123b | ||
|
|
7123d07456 | ||
|
|
2db22087ae | ||
|
|
fa7b2aee9c | ||
|
|
4d70b6fb2d | ||
|
|
e2c3ffb09b | ||
|
|
b4cb22f444 | ||
|
|
5534b13903 | ||
|
|
5b79bd04a7 | ||
|
|
9d8c705fd9 | ||
|
|
310b2171be | ||
|
|
98af0b5d85 | ||
|
|
ca14f95d2c | ||
|
|
1b69b338c0 | ||
|
|
88942e4761 | ||
|
|
efa32a2677 | ||
|
|
dfc420706c | ||
|
|
e2de8a88f7 | ||
|
|
7f4febd6c2 | ||
|
|
93e581dfd0 | ||
|
|
cf513efa78 | ||
|
|
9e8b34427a | ||
|
|
88d0aa1e40 | ||
|
|
9b09eb005f | ||
|
|
4db41b71f3 | ||
|
|
28a421cb1d | ||
|
|
e6768097f4 | ||
|
|
18a04246fa | ||
|
|
f69de3be0d | ||
|
|
650ae620c5 | ||
|
|
6a209cbef6 | ||
|
|
9786bb826d | ||
|
|
9b4c6f348a | ||
|
|
cb6ddb21ec | ||
|
|
0baacca605 | ||
|
|
222d714ec7 | ||
|
|
fd2d89d37b | ||
|
|
6440b608dc | ||
|
|
1937118eab | ||
|
|
bc272d1e4b | ||
|
|
d651f390cd | ||
|
|
ea777f8716 | ||
|
|
eca5200fbd | ||
|
|
0809e9e7a0 | ||
|
|
b66baa3db6 | ||
|
|
6eb77f0d3a | ||
|
|
b20354b3ad | ||
|
|
d6f76c75e1 | ||
|
|
ed4f412f1c | ||
|
|
5bf56e01aa | ||
|
|
5ff5f0b393 | ||
|
|
6559ac11b1 | ||
|
|
02ec546dd6 | ||
|
|
995aa5ed21 | ||
|
|
e28ba4b807 | ||
|
|
d1e3436de5 | ||
|
|
d3ddc9e4aa | ||
|
|
fea9522982 | ||
|
|
fe055d4b36 | ||
|
|
581b894789 | ||
|
|
477655f6e6 | ||
|
|
169d8d21ff | ||
|
|
c5475020fe | ||
|
|
b52ff1249f | ||
|
|
c5798500cb | ||
|
|
67ad3532ec | ||
|
|
5cb96fe7df | ||
|
|
810e8e5855 | ||
|
|
f3bcc648e7 | ||
|
|
3096566333 | ||
|
|
f50c6a4e88 | ||
|
|
ab4ee54855 | ||
|
|
f2d35062d4 | ||
|
|
b69ff46c7e | ||
|
|
117c9873e1 | ||
|
|
17e94fbcb1 | ||
|
|
92f7feb874 | ||
|
|
b70e2bffa3 | ||
|
|
06c43ca285 | ||
|
|
530bec9c64 | ||
|
|
fa10302dd2 | ||
|
|
54faaa87ea | ||
|
|
daba8a85f9 | ||
|
|
ac0f3d6e82 | ||
|
|
da0b6a89ae | ||
|
|
929a68c06d | ||
|
|
a0aa5d01a1 | ||
|
|
dc834cc9d2 | ||
|
|
b58274b8a2 | ||
|
|
a31d00d904 | ||
|
|
2cc1bd85af | ||
|
|
2c5a46bc34 | ||
|
|
f7f8b4804b | ||
|
|
e5bd9a76c7 | ||
|
|
4690b534e0 | ||
|
|
6a7a7996bb | ||
|
|
962ebbaf77 | ||
|
|
f90d56d371 | ||
|
|
445cfd4db3 | ||
|
|
b24d44dc56 | ||
|
|
cd31f8d865 | ||
|
|
970cb3a219 | ||
|
|
f7aabf1b50 | ||
|
|
e38610e521 | ||
|
|
3754f154ee | ||
|
|
29d7812344 | ||
|
|
5fd46175dc | ||
|
|
52a268c38c | ||
|
|
53c3842bc2 | ||
|
|
c4f958e11b | ||
|
|
147440b39b | ||
|
|
baff5ff8c2 | ||
|
|
ea13863221 | ||
|
|
93ca56086e | ||
|
|
11c48a0004 | ||
|
|
b7ea9602f5 | ||
|
|
982dc6a2bd | ||
|
|
74d903acca | ||
|
|
5fef3b0ff1 | ||
|
|
0674893649 | ||
|
|
e8d44447ad | ||
|
|
a24cd4fda0 | ||
|
|
01860674c4 | ||
|
|
987b7ad42d | ||
|
|
21974fe1d3 | ||
|
|
26e1892521 | ||
|
|
a78cd67737 | ||
|
|
5e243ceaeb | ||
|
|
1a0a6f60a7 | ||
|
|
3179c019af | ||
|
|
a8089494fd | ||
|
|
a248ede222 | ||
|
|
0f0ae13ad0 | ||
|
|
773d5d23d5 | ||
|
|
c3982212f9 | ||
|
|
7e6bf6e7a1 | ||
|
|
9fc0135991 | ||
|
|
164be58445 | ||
|
|
1f8461767d | ||
|
|
935f4c23f6 | ||
|
|
4c97406f2b | ||
|
|
fb2a05ff43 | ||
|
|
030d555995 | ||
|
|
56d843c263 | ||
|
|
2dc1fa2474 | ||
|
|
c9451cb604 | ||
|
|
006306b183 | ||
|
|
2cd4936c99 | ||
|
|
44bc540bb5 | ||
|
|
6b411ae212 | ||
|
|
eed285f9de | ||
|
|
c8dd8e5ef4 | ||
|
|
365ef92530 | ||
|
|
5fceb876c4 | ||
|
|
d98063e80e | ||
|
|
45761f8be2 | ||
|
|
4ae4e44506 | ||
|
|
2ada13b1ad | ||
|
|
5d170e9264 | ||
|
|
1b0a64aa46 | ||
|
|
aa8e1c63d5 | ||
|
|
60690c9fc4 | ||
|
|
758b0c9042 | ||
|
|
48d0aa2f6d | ||
|
|
b664edde29 | ||
|
|
e16658b7ec | ||
|
|
d30280ed23 | ||
|
|
9dbd217c59 | ||
|
|
23eac98b3c | ||
|
|
4fffc47e77 | ||
|
|
d65214a234 | ||
|
|
2fb34b00b5 | ||
|
|
f718a391c0 | ||
|
|
ac56ac2b2d | ||
|
|
34c3f563fd | ||
|
|
d2bea6f9e3 | ||
|
|
a09fe1b9ba | ||
|
|
55778b35ff | ||
|
|
8b169f1dac | ||
|
|
d344daf129 | ||
|
|
3411e072ca | ||
|
|
8e36fe9b6f | ||
|
|
0d8bf91699 | ||
|
|
bd507678be | ||
|
|
b6f0e80d54 | ||
|
|
729378ca98 | ||
|
|
220958a87c | ||
|
|
f3f6535aad | ||
|
|
228bc4903f | ||
|
|
38c9abed8b | ||
|
|
66b002458d | ||
|
|
39814cab32 | ||
|
|
180cd4ccda | ||
|
|
284ad026b1 | ||
|
|
afa1bca1e3 | ||
|
|
03adc1f60d | ||
|
|
b319ed58b0 | ||
|
|
8d30b39811 | ||
|
|
1038f7469c | ||
|
|
b9e7708643 | ||
|
|
1e37101930 | ||
|
|
b2772509b4 | ||
|
|
27ec84827c | ||
|
|
852316c5a6 | ||
|
|
e9448005a5 | ||
|
|
bbea62b907 | ||
|
|
13012cfa70 | ||
|
|
8f2681f904 | ||
|
|
f9c75d4878 | ||
|
|
502c1eedaa | ||
|
|
e9f090257c | ||
|
|
af9e5a2d05 | ||
|
|
af8c705ecd | ||
|
|
5763dc1613 | ||
|
|
6b06d4e0af | ||
|
|
bcaa320f36 | ||
|
|
33c78d2228 | ||
|
|
df4a13a08b | ||
|
|
fdec8a9d00 | ||
|
|
0cc1ad2188 | ||
|
|
cdece3879f | ||
|
|
320d8a48d9 | ||
|
|
46609e936e | ||
|
|
b72c6cc9fc | ||
|
|
538a086309 | ||
|
|
c751a4ac06 | ||
|
|
e843d7df0e | ||
|
|
de3a1a0a8e | ||
|
|
57bd365d87 | ||
|
|
b739cbb86b | ||
|
|
4486db912b | ||
|
|
6b07ded119 | ||
|
|
d5699dbf4f | ||
|
|
0fdff26924 | ||
|
|
619f2517a4 | ||
|
|
b91820b7f8 | ||
|
|
4e74560649 | ||
|
|
95244ed6e7 | ||
|
|
f1f39eea3f | ||
|
|
eed5706994 | ||
|
|
1981154f49 | ||
|
|
a8ebf6f575 | ||
|
|
912d2dccfa | ||
|
|
fcb63aed8a | ||
|
|
0e549424e7 | ||
|
|
69d638268b | ||
|
|
18eea9088a | ||
|
|
fb105837ba | ||
|
|
7e52c8e21a | ||
|
|
d068839896 | ||
|
|
e0dee52a2a | ||
|
|
677e20756b | ||
|
|
b2785ff06e | ||
|
|
da82ce81b5 | ||
|
|
70c4f110a4 | ||
|
|
099bd54ff2 | ||
|
|
12c0d9443e | ||
|
|
cbda06fb96 | ||
|
|
b1a242251c | ||
|
|
fce606fc0f | ||
|
|
b606c7b768 | ||
|
|
0a6956b029 | ||
|
|
821cf0e3fd | ||
|
|
11a0418510 | ||
|
|
40781ac013 | ||
|
|
fdfd868953 | ||
|
|
0795975486 | ||
|
|
a49248d29f | ||
|
|
182fef339d | ||
|
|
c74dec7e38 | ||
|
|
b4548ad72d | ||
|
|
e152b07b74 | ||
|
|
0e44a4e664 | ||
|
|
24d7dadfed | ||
|
|
92005b9c02 | ||
|
|
636d487dc8 | ||
|
|
93f51d80d4 | ||
|
|
36da11a0ee | ||
|
|
d23e73b118 | ||
|
|
d692b2c32a | ||
|
|
7e2f8bb408 | ||
|
|
951e39d36c | ||
|
|
aeb3f835ae | ||
|
|
cc3d601836 | ||
|
|
2bbb221fb1 | ||
|
|
195be10050 | ||
|
|
a38618db02 | ||
|
|
efcca15d3f | ||
|
|
a153b628c2 | ||
|
|
f36d86ba6d | ||
|
|
74492a81c7 | ||
|
|
ed13782986 | ||
|
|
8342553214 | ||
|
|
8aa5f5a660 | ||
|
|
b2d9e3f704 | ||
|
|
f744e1f931 | ||
|
|
b85dad0286 | ||
|
|
3851b51d98 | ||
|
|
ff77d3bc22 | ||
|
|
93cfec3c32 | ||
|
|
89560ef87f | ||
|
|
9bc209ba73 | ||
|
|
84e0dc3246 | ||
|
|
4d4d76114d | ||
|
|
86bc5f1350 | ||
|
|
e8f02c083f | ||
|
|
ebb1fcedea | ||
|
|
66f90f8dc1 | ||
|
|
3c778b538a | ||
|
|
35290e146b | ||
|
|
784657a652 | ||
|
|
831efa8893 | ||
|
|
957f428fd5 | ||
|
|
61e5e6bc36 | ||
|
|
eab4a91a9b | ||
|
|
2bba62ca4d | ||
|
|
bcdc83b46d | ||
|
|
92fbdfd06f | ||
|
|
93702e39d4 | ||
|
|
a7fc89c207 | ||
|
|
123a5a2e16 | ||
|
|
ab2f403dd0 | ||
|
|
b9c5e14e2c | ||
|
|
bf65ed6eb8 | ||
|
|
4e79294f97 | ||
|
|
8477e8fac3 | ||
|
|
13ccd2afef | ||
|
|
23b833d171 | ||
|
|
07c49ee4b8 | ||
|
|
07c4bdda7c | ||
|
|
2266d8263c | ||
|
|
160eb48b2b | ||
|
|
0c0efc871c | ||
|
|
7ef5f3b473 | ||
|
|
66ee4afb95 | ||
|
|
93f0b7ae03 | ||
|
|
8210ffcb6c | ||
|
|
e7cbe32601 | ||
|
|
b500ceaf73 | ||
|
|
d3c283ac19 | ||
|
|
607586e0b7 | ||
|
|
2d7913b3be | ||
|
|
b7ffe66219 | ||
|
|
e58410fa99 | ||
|
|
1395e505cd | ||
|
|
42a4c86dca | ||
|
|
c9adc5680c | ||
|
|
08c7b17298 | ||
|
|
5e12382524 | ||
|
|
6cf99527f8 | ||
|
|
3e293f1465 | ||
|
|
0106c58181 | ||
|
|
bd25d8049c | ||
|
|
49cec7fd61 | ||
|
|
d9456f2a23 | ||
|
|
8495750cb8 | ||
|
|
1f501cc1ef | ||
|
|
a922119c41 | ||
|
|
643d85d2cc | ||
|
|
4b1ee0c170 | ||
|
|
3bec467a91 | ||
|
|
600152df23 | ||
|
|
dd84c29a3d | ||
|
|
07468c8786 | ||
|
|
418ba02025 | ||
|
|
abc9360dc6 | ||
|
|
743095b7d8 | ||
|
|
3cf64d1e7e | ||
|
|
e533dcf506 | ||
|
|
eeaf8c7ccd | ||
|
|
7e34dfdae7 | ||
|
|
e4bf51d5bd | ||
|
|
ead61bf9d5 | ||
|
|
b12a205320 | ||
|
|
621541a92f | ||
|
|
ed5734ae25 | ||
|
|
a046dcac5e | ||
|
|
843f93e1ab | ||
|
|
fa9e330fc6 | ||
|
|
b202bfaaa0 | ||
|
|
0eb0ac7dd0 | ||
|
|
d2b83d8357 | ||
|
|
88b65f63d0 | ||
|
|
020ce29cd8 | ||
|
|
801b481beb | ||
|
|
8967ed1601 | ||
|
|
5826fb8e6d | ||
|
|
89351f1a7d | ||
|
|
ae2e4fc2fe | ||
|
|
db199f61da | ||
|
|
44adbd2c75 | ||
|
|
20136ca8b7 | ||
|
|
45d520f913 | ||
|
|
3882130911 | ||
|
|
a6b540737f | ||
|
|
f82065703d | ||
|
|
b423af001d | ||
|
|
b9e77d394b | ||
|
|
57222497ec | ||
|
|
5c5f07c1e7 | ||
|
|
f895d06605 | ||
|
|
bc8f648a91 | ||
|
|
8e57f4df31 | ||
|
|
a08cc5adbb | ||
|
|
595a73fce4 | ||
|
|
dc919e08e8 | ||
|
|
5d1018495f | ||
|
|
ad6fd7a991 | ||
|
|
e022b5959e | ||
|
|
db7f4955a1 | ||
|
|
5c69dd155f | ||
|
|
504f2e8bf4 | ||
|
|
e586dc2924 | ||
|
|
333f918005 | ||
|
|
c8e29033c2 | ||
|
|
d0bd961bde | ||
|
|
006511ee25 | ||
|
|
4ab72146cd | ||
|
|
b60a3fc879 | ||
|
|
a0eeb74957 | ||
|
|
daa0b8741c | ||
|
|
939411300a | ||
|
|
1c312685aa | ||
|
|
316de82f51 | ||
|
|
9068bc5271 | ||
|
|
31a4c9c9d3 | ||
|
|
c1966af2cf | ||
|
|
c665898652 | ||
|
|
f651a660aa | ||
|
|
ba672b51da | ||
|
|
be498c5dd9 | ||
|
|
6e95beccb9 | ||
|
|
c8be839481 | ||
|
|
c7e08813a5 | ||
|
|
d21a6b33ab | ||
|
|
9112cf153e | ||
|
|
3868ac8402 | ||
|
|
3f09010227 | ||
|
|
d6cf82aba3 | ||
|
|
dfe54639b1 | ||
|
|
bc5f5aa538 | ||
|
|
05818e0425 | ||
|
|
7f72a61104 | ||
|
|
8e45d47740 | ||
|
|
71771d1e9b | ||
|
|
aa098e4d0b | ||
|
|
0135e1e3b9 | ||
|
|
ff88c390bb | ||
|
|
d825821a22 | ||
|
|
cbed6ab1bb | ||
|
|
6fc122fa1a | ||
|
|
feba38be36 | ||
|
|
ba85d0bcad | ||
|
|
ad3623dd8d | ||
|
|
8292781045 | ||
|
|
54ec6348fa | ||
|
|
255748bcba | ||
|
|
594eb468df | ||
|
|
960d314e4f | ||
|
|
ed3b50622b | ||
|
|
9f2235c208 | ||
|
|
4ec50bfc41 | ||
|
|
51b67a247a | ||
|
|
01205fd4c0 | ||
|
|
c72808f18b | ||
|
|
6b539a2972 | ||
|
|
2151d21862 | ||
|
|
fb0a4c5d9a | ||
|
|
e690bf387a | ||
|
|
5e155fb081 | ||
|
|
39a6b562cf | ||
|
|
c56b6ddb1c | ||
|
|
2e61ff32ad | ||
|
|
02f6e18adc | ||
|
|
4436e62cf1 | ||
|
|
6e0eb96c61 | ||
|
|
fd68bf7084 | ||
|
|
58cdf97361 | ||
|
|
53dbe36f32 | ||
|
|
081bd07fd1 | ||
|
|
ef1306f703 | ||
|
|
3196967995 | ||
|
|
3875e5e0e5 | ||
|
|
fc8423392f | ||
|
|
f1f6035967 | ||
|
|
ddd21f1644 | ||
|
|
d0a6a35b55 | ||
|
|
e0632f2ce2 | ||
|
|
37e6974afe | ||
|
|
e23e490455 | ||
|
|
f76bb8954b | ||
|
|
d168c7c9dc | ||
|
|
fd9d060c94 | ||
|
|
d8b17795d7 | ||
|
|
ea7b33b0d2 | ||
|
|
8ace0a9ba7 | ||
|
|
98ad93d53e | ||
|
|
38e4ec0b2a | ||
|
|
f083a901fe | ||
|
|
df13ba655c | ||
|
|
7678b25755 | ||
|
|
c87ca4f320 | ||
|
|
3c24a70a1b | ||
|
|
e46db63e06 | ||
|
|
1c57f8d077 | ||
|
|
16cebf0390 | ||
|
|
555bc02665 | ||
|
|
c1bae1ee81 | ||
|
|
f2ed3df3da | ||
|
|
abd678e147 | ||
|
|
6ac5d814fb | ||
|
|
f928899338 | ||
|
|
5a6fd98839 | ||
|
|
072f71dfb7 | ||
|
|
670cee8274 | ||
|
|
9f1be45552 | ||
|
|
f1846ae5ac | ||
|
|
ac19998e5e | ||
|
|
cb7512734d | ||
|
|
3733250b3c | ||
|
|
da3cd8993d | ||
|
|
7690caf020 | ||
|
|
5e335eaead | ||
|
|
d5d82ba344 | ||
|
|
efe2883c5d | ||
|
|
47237c7c3c | ||
|
|
697c769b64 | ||
|
|
94261b1717 | ||
|
|
eaf85a30f9 | ||
|
|
6a88b030ea | ||
|
|
f538416fb3 | ||
|
|
06cd9ef98d | ||
|
|
f3d71f8819 | ||
|
|
b7127c2dc9 | ||
|
|
b2dc5fbd7e | ||
|
|
9e653d6abe | ||
|
|
52c9a7f45d | ||
|
|
ee42c9bfe6 | ||
|
|
e6c3e483a1 | ||
|
|
3a253c6cd7 | ||
|
|
e9c3bbc6d7 | ||
|
|
23d64ac53a | ||
|
|
34f9f20ff4 | ||
|
|
a4a72a79ae | ||
|
|
6ca4d38a01 | ||
|
|
b5c93f176a | ||
|
|
1aaf88098d | ||
|
|
6f447e613d | ||
|
|
dfb7c3b1aa | ||
|
|
b41eb5e1f3 | ||
|
|
9c2d264979 | ||
|
|
b996c3198c | ||
|
|
f879c07c86 | ||
|
|
441e2965ff | ||
|
|
cbe9a03e3c | ||
|
|
4ee7e73d00 | ||
|
|
1cca449726 | ||
|
|
faf7c1c325 | ||
|
|
58288494d6 | ||
|
|
72283dc744 | ||
|
|
b8240b4c18 | ||
|
|
5309da40b7 | ||
|
|
08b90b4720 | ||
|
|
2e890b3838 | ||
|
|
06656fc057 | ||
|
|
574fa67bdc | ||
|
|
e19d7226f8 | ||
|
|
0843fe6c65 | ||
|
|
62a02cd1fe | ||
|
|
949da7792d | ||
|
|
ce724a7e55 | ||
|
|
0a06c80801 | ||
|
|
edc55ade61 | ||
|
|
09e5d9007b | ||
|
|
db926896bd | ||
|
|
ab7b4d5ee9 | ||
|
|
bcf02449b3 | ||
|
|
d48faf35ab | ||
|
|
583bd28a5c | ||
|
|
7e1d8c489b | ||
|
|
de28867374 | ||
|
|
a1aa6cb7c2 | ||
|
|
85e2767dca | ||
|
|
fd48cb6506 | ||
|
|
522659eb59 | ||
|
|
f068efe509 | ||
|
|
726fe416bb | ||
|
|
66fa4f1767 | ||
|
|
d6565f3b99 | ||
|
|
27686ff20b | ||
|
|
a8b865022f | ||
|
|
c1888a8062 | ||
|
|
a95bb0521d | ||
|
|
e2311a145c | ||
|
|
d4e0bab6be | ||
|
|
5b0dc20e4c | ||
|
|
9723c3c21d | ||
|
|
9dc32275ad | ||
|
|
611c11f57b | ||
|
|
763d1f524a | ||
|
|
6428003c3b | ||
|
|
2eac4f93bb | ||
|
|
24adf9cbcb | ||
|
|
c45f581c47 | ||
|
|
ae0c48e6bd | ||
|
|
4ca649154d | ||
|
|
66dd387858 | ||
|
|
9789f5a96a | ||
|
|
cae7b197ec | ||
|
|
f7621b2c6c | ||
|
|
95eb72bfd3 | ||
|
|
7e2d101a46 | ||
|
|
6597881854 | ||
|
|
eaa899df63 | ||
|
|
16ed0bd0c5 | ||
|
|
939187a129 | ||
|
|
4b520c3343 | ||
|
|
51215d480a | ||
|
|
987f0041d3 | ||
|
|
a29de9bf50 | ||
|
|
9bd5831fda | ||
|
|
59f0f2f0fd | ||
|
|
9ae47d37e9 | ||
|
|
2b3ad7f41c | ||
|
|
51db10b18f | ||
|
|
b4b21a446b | ||
|
|
23eced1644 | ||
|
|
7741a6e75d | ||
|
|
d4210db0c9 | ||
|
|
17dde75107 | ||
|
|
1fc3a375df | ||
|
|
64a8471dd5 | ||
|
|
86a8df1c8b | ||
|
|
2eeed2287b | ||
|
|
3d83128f16 | ||
|
|
1c286c3c2f | ||
|
|
2f7beb6744 | ||
|
|
ab0370a0b9 | ||
|
|
3f9a41684a | ||
|
|
dd982acf2c | ||
|
|
fb6a5bc620 | ||
|
|
7641f92cde | ||
|
|
72325fd0a3 | ||
|
|
1b7ed5e2e6 | ||
|
|
86fac272d8 | ||
|
|
865e523ff1 | ||
|
|
9aa2a7ca13 | ||
|
|
e80cbca6b0 | ||
|
|
718a5d4a9e | ||
|
|
9222bec8b1 | ||
|
|
4a965e1b0e | ||
|
|
48e5380e45 | ||
|
|
831418612b | ||
|
|
89ff12309d | ||
|
|
3a4fb6fa4b | ||
|
|
b181503c30 | ||
|
|
887b3dff04 | ||
|
|
3822bd2369 | ||
|
|
4de2c6a421 | ||
|
|
6c4231fd35 | ||
|
|
adfa7aa1fa | ||
|
|
8b6e601405 | ||
|
|
6011911746 | ||
|
|
997119c27a | ||
|
|
2eb6865a27 | ||
|
|
2b2d6673ff | ||
|
|
563c5b7ea0 | ||
|
|
67966b623c | ||
|
|
9fc3fd04be | ||
|
|
238fec244a | ||
|
|
3d71bc9b64 | ||
|
|
3923024d84 | ||
|
|
710b195be1 | ||
|
|
6e408137ee | ||
|
|
9b205cfcfc | ||
|
|
42a80d1b8b | ||
|
|
d6073ac18e | ||
|
|
1c450d46cf | ||
|
|
6b312a8522 | ||
|
|
2b2007ae9e | ||
|
|
e94a34be8c | ||
|
|
c3fb4b1d8e | ||
|
|
e3ca1a7dbe | ||
|
|
2d64d8b444 | ||
|
|
9b98be160a | ||
|
|
9f708ff318 | ||
|
|
4e0ad33d92 | ||
|
|
519285bf38 | ||
|
|
fd1b7b3f22 | ||
|
|
687730a7f5 | ||
|
|
b7821361c3 | ||
|
|
63e1f8fffd | ||
|
|
824612f1b4 | ||
|
|
9482acfdfc | ||
|
|
c75bdd99e4 | ||
|
|
6f34e8f044 | ||
|
|
6d187af643 | ||
|
|
97e9598c79 | ||
|
|
5a6a6de3d7 | ||
|
|
b1a20effde | ||
|
|
ba5ab26f2e | ||
|
|
69f53211a1 | ||
|
|
9dddd1134d | ||
|
|
c5c77d2b0d | ||
|
|
763f94ca80 | ||
|
|
20d637e7b7 | ||
|
|
480b14c8dc | ||
|
|
999db4301a | ||
|
|
92cbc4d516 | ||
|
|
ff9afdb0fe | ||
|
|
3e35b20a02 | ||
|
|
9ea371d6cd | ||
|
|
7a0f9767da | ||
|
|
9d7363f2a7 | ||
|
|
8ee5cf38fd | ||
|
|
a6b788d220 | ||
|
|
ccd87cd9f0 | ||
|
|
b5af87fc6c | ||
|
|
3c9544b023 | ||
|
|
2f65671070 | ||
|
|
8c5436cbed | ||
|
|
548959b50f | ||
|
|
2addb9f99a | ||
|
|
fdd95d1d86 | ||
|
|
66a558ff41 | ||
|
|
733b612eb2 | ||
|
|
991ecce004 | ||
|
|
ad0e30bca5 | ||
|
|
55461188a4 | ||
|
|
5d2405fdef | ||
|
|
e9f1268225 | ||
|
|
803a0ac02a | ||
|
|
bde87d00b9 | ||
|
|
0eae727366 | ||
|
|
3b4c5d54d8 | ||
|
|
4e16bc2f13 | ||
|
|
562ac62f59 | ||
|
|
e7fa2e06f8 | ||
|
|
8123f009d0 | ||
|
|
622aaa9f7d | ||
|
|
7b1ee203ce | ||
|
|
f347e51927 | ||
|
|
9b17af18b3 | ||
|
|
23c7fbfe6b | ||
|
|
035fea676a | ||
|
|
6e1a234d15 | ||
|
|
5b596ea605 | ||
|
|
6bd56460de | ||
|
|
6ef7ea2635 | ||
|
|
f8c00fbaf1 | ||
|
|
d9a42cc4c5 | ||
|
|
fc0bc32814 | ||
|
|
c62504ac92 | ||
|
|
f227e918f9 | ||
|
|
c132dbadce | ||
|
|
b839eb80a1 | ||
|
|
23b03a7f03 | ||
|
|
9196583651 | ||
|
|
fd28252e55 | ||
|
|
94f20e2eb7 | ||
|
|
5ced99a8e7 | ||
|
|
c377e61ff0 | ||
|
|
a6fe0a020a | ||
|
|
bf2ed3d752 | ||
|
|
d17a92eef3 | ||
|
|
1a7be035d3 | ||
|
|
004baaa30f | ||
|
|
ef19268418 | ||
|
|
e82470341f | ||
|
|
88fa42de75 | ||
|
|
432513c3ba | ||
|
|
45370c212b | ||
|
|
e91f660eb1 | ||
|
|
3f3162e57c | ||
|
|
208d1fce58 | ||
|
|
128694213f | ||
|
|
8034ed3473 | ||
|
|
d22069c59e | ||
|
|
5a04d32b39 | ||
|
|
ab65f3a17d | ||
|
|
4e23cbebcf | ||
|
|
63418c1afc | ||
|
|
8ca671761a | ||
|
|
81a5ed9f31 | ||
|
|
528b9d9206 | ||
|
|
1a4c57fac2 | ||
|
|
44a7045732 | ||
|
|
8ac7186185 | ||
|
|
975387f7ae | ||
|
|
d793b5af5e | ||
|
|
5188776224 | ||
|
|
07249c0446 | ||
|
|
188301f403 | ||
|
|
e660721a0c | ||
|
|
e029cc66bc | ||
|
|
e34b5f0119 | ||
|
|
c223364816 | ||
|
|
74fd5844ca | ||
|
|
4ebc86df84 | ||
|
|
8cd03eff58 | ||
|
|
46660a16a0 | ||
|
|
27b097309e | ||
|
|
d0fa1f8e94 | ||
|
|
55e38fea0e | ||
|
|
274ace2898 | ||
|
|
a8cc3709c6 | ||
|
|
a28ab18987 | ||
|
|
048b81373d | ||
|
|
aea1d62ae6 | ||
|
|
601e54000d | ||
|
|
7bdf707dd3 | ||
|
|
4a7e7e9fdb | ||
|
|
bdf3f95346 | ||
|
|
453e9c5da9 | ||
|
|
3a69bd3ef5 | ||
|
|
a69c0f765e | ||
|
|
97d1367764 | ||
|
|
880e21288e | ||
|
|
2ba9762255 | ||
|
|
30f120ee6a | ||
|
|
28a36e20aa | ||
|
|
a8fb4d23f8 | ||
|
|
f37a4ec9c8 | ||
|
|
31ed13094b | ||
|
|
8ccf5b2044 | ||
|
|
247d85b523 | ||
|
|
54688db994 | ||
|
|
8590f5a599 | ||
|
|
289d51c049 | ||
|
|
813eaa867c | ||
|
|
abffb16292 | ||
|
|
50e439f633 | ||
|
|
25eb1415df | ||
|
|
0b28220f2b | ||
|
|
5661740990 | ||
|
|
255c31bddf | ||
|
|
7888fefeea | ||
|
|
0937835802 | ||
|
|
ea806b37ac | ||
|
|
d6614f3149 | ||
|
|
9a50a39848 | ||
|
|
2793e8f327 | ||
|
|
c0bb5c4bf6 | ||
|
|
cc74fc93b4 | ||
|
|
44b39195d6 | ||
|
|
2454110d81 | ||
|
|
ee59e7d45f | ||
|
|
605c319157 | ||
|
|
dc307a1cc0 | ||
|
|
e7981152b2 | ||
|
|
b3eb5c860b | ||
|
|
1c2f7409e3 | ||
|
|
57d41a3f94 | ||
|
|
f9d2bd24eb | ||
|
|
0e7e8eec53 | ||
|
|
9a30a246d8 | ||
|
|
c332499252 | ||
|
|
005f289632 | ||
|
|
3d7553317f | ||
|
|
8e4f6b2ee5 | ||
|
|
d5cad7d3ae | ||
|
|
355e9d4fb5 | ||
|
|
629185e10a | ||
|
|
deeef5fc24 | ||
|
|
b905c07650 | ||
|
|
1ff30034e8 | ||
|
|
c64b59c80c | ||
|
|
9a869bbaf6 | ||
|
|
fe1b54b713 | ||
|
|
cc84dfd50f | ||
|
|
158c7867e7 | ||
|
|
997c39ccd5 | ||
|
|
3bab307904 | ||
|
|
02704e38d3 | ||
|
|
9e5fb29965 | ||
|
|
7dba131d5f | ||
|
|
ce0b771217 | ||
|
|
44bc7aa3d0 | ||
|
|
7f0c88ed3e | ||
|
|
d15508f52c | ||
|
|
b111423b9c | ||
|
|
215a51c4c1 | ||
|
|
1120847f72 | ||
|
|
704323b805 | ||
|
|
10b0e13882 | ||
|
|
901f0709c5 | ||
|
|
0d6165e481 | ||
|
|
6583eed6b2 | ||
|
|
a9ca70ad4a | ||
|
|
ab5b75eb01 | ||
|
|
cc060a283d | ||
|
|
28db83e17b | ||
|
|
dbb1f86455 | ||
|
|
02f7c555af | ||
|
|
d982b38f76 | ||
|
|
bc2e4b952e | ||
|
|
afdc0ebfd7 | ||
|
|
1079b18ff7 | ||
|
|
8cb1061c11 | ||
|
|
2bacd0180d | ||
|
|
ddf9bc2335 | ||
|
|
a1afd940e3 | ||
|
|
8bb76201c0 | ||
|
|
ede71d398c | ||
|
|
0c73a637f1 | ||
|
|
37700f2d98 | ||
|
|
0ec695f9e4 | ||
|
|
7ffd21dbc8 | ||
|
|
48b3920656 | ||
|
|
63d91af555 | ||
|
|
a96c3bc885 | ||
|
|
77e1ae3d70 | ||
|
|
9cc8d90865 | ||
|
|
a6c621ef7f | ||
|
|
328289099a | ||
|
|
22ffd5f490 | ||
|
|
81708bb1e6 | ||
|
|
c81e9d8d1f | ||
|
|
ff3ab5fcca | ||
|
|
1d1cae8e4d | ||
|
|
8c781a6a44 | ||
|
|
93a4bec06b | ||
|
|
c93f57efd6 | ||
|
|
0e4f93c5cf | ||
|
|
5b3fedebfe | ||
|
|
219751bb21 | ||
|
|
bb7772a364 | ||
|
|
3c8fc37c56 | ||
|
|
39805b09e5 | ||
|
|
63b01199fe | ||
|
|
b09bae3443 | ||
|
|
de6fb98bed | ||
|
|
433605e282 | ||
|
|
a843e64fc2 | ||
|
|
71611d2dec | ||
|
|
abf48e8a5d | ||
|
|
ac5ea0cd4d | ||
|
|
a46fcacedd | ||
|
|
df947fc933 | ||
|
|
91d49cfe9f | ||
|
|
19d15f83db | ||
|
|
cde61cc518 | ||
|
|
acd829a7a0 | ||
|
|
4aa5dac768 | ||
|
|
08b59b5cc5 | ||
|
|
6b900e28cd | ||
|
|
5ca21ee398 | ||
|
|
953e30814a | ||
|
|
a65344cf25 | ||
|
|
7fb8b4191f | ||
|
|
fc8aec7324 | ||
|
|
c309aac8f5 | ||
|
|
1e37ec727d | ||
|
|
ae36bae59d | ||
|
|
e663beebf0 | ||
|
|
9d0292e9e1 | ||
|
|
fe27bb7982 | ||
|
|
d603a9cbb5 | ||
|
|
c1fc22e746 | ||
|
|
85d3710924 | ||
|
|
a0324245f1 | ||
|
|
ce8e9dc690 | ||
|
|
32ca7efbeb | ||
|
|
27520eb169 | ||
|
|
9843adb4f1 | ||
|
|
8e8d474ae8 | ||
|
|
6151ea1c4d | ||
|
|
d969025f87 | ||
|
|
18e1cb9c92 | ||
|
|
e7ceb9e8f5 | ||
|
|
3a4675c8c3 | ||
|
|
5ce0f216cf | ||
|
|
688f150463 | ||
|
|
00ccb8d4f1 | ||
|
|
e70b91aaef | ||
|
|
8b90ac2b1a | ||
|
|
f085baa77d | ||
|
|
fa4de05c14 | ||
|
|
dde12b492b | ||
|
|
096d98c3d9 | ||
|
|
147cae9ed8 | ||
|
|
c63709014b | ||
|
|
9b307799ce | ||
|
|
78e36779cf | ||
|
|
90ae35e2e4 | ||
|
|
b96e30e66c | ||
|
|
0af0df7423 | ||
|
|
0883d324d9 | ||
|
|
77597e6a16 | ||
|
|
eae6b36d03 | ||
|
|
c4bc7c41b1 | ||
|
|
c79ddd6fc4 | ||
|
|
ae58fb8821 | ||
|
|
569c1d1163 | ||
|
|
12fe0932c4 | ||
|
|
72e3e236de | ||
|
|
ab59b238b3 | ||
|
|
bed9570e48 | ||
|
|
c6bf67f446 | ||
|
|
5ee186b8e5 | ||
|
|
94817b557c | ||
|
|
26e1496075 | ||
|
|
92fca8ae74 | ||
|
|
7fa5b8401d | ||
|
|
0eac0402e1 | ||
|
|
c71c729bc2 | ||
|
|
e459f114cd | ||
|
|
982a7e86a8 | ||
|
|
94916749c5 | ||
|
|
5ce5f87a26 | ||
|
|
1d2ae46ddc | ||
|
|
71ac331f90 | ||
|
|
47cc95fc9f | ||
|
|
3feb632eb4 | ||
|
|
236497e331 | ||
|
|
a38dc497b2 | ||
|
|
28ed52fa94 | ||
|
|
e995b95c94 | ||
|
|
8379cce209 | ||
|
|
3c6b798522 | ||
|
|
c18770a61a | ||
|
|
6352448b72 | ||
|
|
fb6cce487f | ||
|
|
3079cc4167 | ||
|
|
27ef8b1eb7 | ||
|
|
c00435d72b | ||
|
|
d0e67cce75 | ||
|
|
6ec315e540 | ||
|
|
cf4e6f909c | ||
|
|
b3a99166fd | ||
|
|
107008331e | ||
|
|
accd9f9044 | ||
|
|
17294ae5e5 | ||
|
|
3c3a9b765a | ||
|
|
526c5bcdad | ||
|
|
a1bbe75d43 | ||
|
|
572a311639 | ||
|
|
cb5d6f6e3a | ||
|
|
e3cabb555d | ||
|
|
f193f56564 | ||
|
|
c0a91ab548 | ||
|
|
26e510bf28 | ||
|
|
98e73ed67a | ||
|
|
7f3de3ca4a | ||
|
|
189cb3a7be | ||
|
|
1d0ed95a54 | ||
|
|
5dcfdbe51d | ||
|
|
f2f1d7fe72 | ||
|
|
ae533cadef | ||
|
|
58f6aab637 | ||
|
|
b816009db0 | ||
|
|
a84dee1be1 | ||
|
|
30e4ddbf10 | ||
|
|
296a5b6707 | ||
|
|
b0520dcb59 | ||
|
|
f42967ed86 | ||
|
|
966675c8e3 | ||
|
|
f68df1624b | ||
|
|
42cade808b | ||
|
|
d59211982b | ||
|
|
7aaa10680d | ||
|
|
dcf35dd25f | ||
|
|
e70322676c | ||
|
|
b3f43ab938 | ||
|
|
bbc4468908 | ||
|
|
4de7f55f2f | ||
|
|
def23e4ee2 | ||
|
|
55befe396a | ||
|
|
483fddccf9 | ||
|
|
c4495ad8f2 | ||
|
|
05aed255db | ||
|
|
0f1326b2bd | ||
|
|
1668489b00 | ||
|
|
7dd292cbb3 | ||
|
|
c0578031b5 | ||
|
|
a5b64b6a41 | ||
|
|
b722e7eb7e | ||
|
|
6d19a8bdb5 | ||
|
|
f09ddd2983 | ||
|
|
a6839fd238 | ||
|
|
f3063f98d3 | ||
|
|
70674d3c58 | ||
|
|
3829aba869 |
17
.devcontainer-scripts/postcreate.sh
Normal file
17
.devcontainer-scripts/postcreate.sh
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd /workspace
|
||||||
|
|
||||||
|
# Get the files into the volume without a bind mount
|
||||||
|
if [ ! -d ".git" ]; then
|
||||||
|
git clone https://github.com/mudler/LocalAI.git .
|
||||||
|
else
|
||||||
|
git fetch
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Standard Post-Create script completed."
|
||||||
|
|
||||||
|
if [ -f "/devcontainer-customization/postcreate.sh" ]; then
|
||||||
|
echo "Launching customization postcreate.sh"
|
||||||
|
bash "/devcontainer-customization/postcreate.sh"
|
||||||
|
fi
|
||||||
13
.devcontainer-scripts/poststart.sh
Normal file
13
.devcontainer-scripts/poststart.sh
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd /workspace
|
||||||
|
|
||||||
|
# Ensures generated source files are present upon load
|
||||||
|
make prepare
|
||||||
|
|
||||||
|
echo "Standard Post-Start script completed."
|
||||||
|
|
||||||
|
if [ -f "/devcontainer-customization/poststart.sh" ]; then
|
||||||
|
echo "Launching customization poststart.sh"
|
||||||
|
bash "/devcontainer-customization/poststart.sh"
|
||||||
|
fi
|
||||||
55
.devcontainer-scripts/utils.sh
Normal file
55
.devcontainer-scripts/utils.sh
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This file contains some really simple functions that are useful when building up customization scripts.
|
||||||
|
|
||||||
|
|
||||||
|
# Checks if the git config has a user registered - and sets it up if not.
|
||||||
|
#
|
||||||
|
# Param 1: name
|
||||||
|
# Param 2: email
|
||||||
|
#
|
||||||
|
config_user() {
|
||||||
|
echo "Configuring git for $1 <$2>"
|
||||||
|
local gcn=$(git config --global user.name)
|
||||||
|
if [ -z "${gcn}" ]; then
|
||||||
|
echo "Setting up git user / remote"
|
||||||
|
git config --global user.name "$1"
|
||||||
|
git config --global user.email "$2"
|
||||||
|
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Checks if the git remote is configured - and sets it up if not. Fetches either way.
|
||||||
|
#
|
||||||
|
# Param 1: remote name
|
||||||
|
# Param 2: remote url
|
||||||
|
#
|
||||||
|
config_remote() {
|
||||||
|
echo "Adding git remote and fetching $2 as $1"
|
||||||
|
local gr=$(git remote -v | grep $1)
|
||||||
|
if [ -z "${gr}" ]; then
|
||||||
|
git remote add $1 $2
|
||||||
|
fi
|
||||||
|
git fetch $1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Setup special .ssh files
|
||||||
|
# Prints out lines of text to make things pretty
|
||||||
|
# Param 1: bash array, filenames relative to the customization directory that should be copied to ~/.ssh
|
||||||
|
setup_ssh() {
|
||||||
|
echo "starting ~/.ssh directory setup..."
|
||||||
|
mkdir -p "${HOME}.ssh"
|
||||||
|
chmod 0700 "${HOME}/.ssh"
|
||||||
|
echo "-----"
|
||||||
|
local files=("$@")
|
||||||
|
for file in "${files[@]}" ; do
|
||||||
|
local cfile="/devcontainer-customization/${file}"
|
||||||
|
local hfile="${HOME}/.ssh/${file}"
|
||||||
|
if [ ! -f "${hfile}" ]; then
|
||||||
|
echo "copying \"${file}\""
|
||||||
|
cp "${cfile}" "${hfile}"
|
||||||
|
chmod 600 "${hfile}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "~/.ssh directory setup complete!"
|
||||||
|
}
|
||||||
25
.devcontainer/customization/README.md
Normal file
25
.devcontainer/customization/README.md
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
Place any additional resources your environment requires in this directory
|
||||||
|
|
||||||
|
Script hooks are currently called for:
|
||||||
|
`postcreate.sh` and `poststart.sh`
|
||||||
|
|
||||||
|
If files with those names exist here, they will be called at the end of the normal script.
|
||||||
|
|
||||||
|
This is a good place to set things like `git config --global user.name` are set - and to handle any other files that are mounted via this directory.
|
||||||
|
|
||||||
|
To assist in doing so, `source /.devcontainer-scripts/utils.sh` will provide utility functions that may be useful - for example:
|
||||||
|
|
||||||
|
```
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
source "/.devcontainer-scripts/utils.sh"
|
||||||
|
|
||||||
|
sshfiles=("config", "key.pub")
|
||||||
|
|
||||||
|
setup_ssh "${sshfiles[@]}"
|
||||||
|
|
||||||
|
config_user "YOUR NAME" "YOUR EMAIL"
|
||||||
|
|
||||||
|
config_remote "REMOTE NAME" "REMOTE URL"
|
||||||
|
|
||||||
|
```
|
||||||
24
.devcontainer/devcontainer.json
Normal file
24
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://raw.githubusercontent.com/devcontainers/spec/main/schemas/devContainer.schema.json",
|
||||||
|
"name": "LocalAI",
|
||||||
|
"workspaceFolder": "/workspace",
|
||||||
|
"dockerComposeFile": [ "./docker-compose-devcontainer.yml" ],
|
||||||
|
"service": "api",
|
||||||
|
"shutdownAction": "stopCompose",
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"extensions": [
|
||||||
|
"golang.go",
|
||||||
|
"ms-vscode.makefile-tools",
|
||||||
|
"ms-azuretools.vscode-docker",
|
||||||
|
"ms-python.python",
|
||||||
|
"ms-python.debugpy",
|
||||||
|
"wayou.vscode-todo-highlight",
|
||||||
|
"waderyan.gitblame"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"forwardPorts": [8080, 3000],
|
||||||
|
"postCreateCommand": "bash /.devcontainer-scripts/postcreate.sh",
|
||||||
|
"postStartCommand": "bash /.devcontainer-scripts/poststart.sh"
|
||||||
|
}
|
||||||
44
.devcontainer/docker-compose-devcontainer.yml
Normal file
44
.devcontainer/docker-compose-devcontainer.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
services:
|
||||||
|
api:
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
target: devcontainer
|
||||||
|
env_file:
|
||||||
|
- ../.env
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
volumes:
|
||||||
|
- localai_workspace:/workspace
|
||||||
|
- ../models:/host-models
|
||||||
|
- ./customization:/devcontainer-customization
|
||||||
|
command: /bin/sh -c "while sleep 1000; do :; done"
|
||||||
|
cap_add:
|
||||||
|
- SYS_PTRACE
|
||||||
|
security_opt:
|
||||||
|
- seccomp:unconfined
|
||||||
|
prometheus:
|
||||||
|
image: prom/prometheus
|
||||||
|
container_name: prometheus
|
||||||
|
command:
|
||||||
|
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||||
|
ports:
|
||||||
|
- 9090:9090
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- ./prometheus:/etc/prometheus
|
||||||
|
- prom_data:/prometheus
|
||||||
|
grafana:
|
||||||
|
image: grafana/grafana
|
||||||
|
container_name: grafana
|
||||||
|
ports:
|
||||||
|
- 3000:3000
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- GF_SECURITY_ADMIN_USER=admin
|
||||||
|
- GF_SECURITY_ADMIN_PASSWORD=grafana
|
||||||
|
volumes:
|
||||||
|
- ./grafana:/etc/grafana/provisioning/datasources
|
||||||
|
volumes:
|
||||||
|
prom_data:
|
||||||
|
localai_workspace:
|
||||||
10
.devcontainer/grafana/datasource.yml
Normal file
10
.devcontainer/grafana/datasource.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
|
||||||
|
apiVersion: 1
|
||||||
|
|
||||||
|
datasources:
|
||||||
|
- name: Prometheus
|
||||||
|
type: prometheus
|
||||||
|
url: http://prometheus:9090
|
||||||
|
isDefault: true
|
||||||
|
access: proxy
|
||||||
|
editable: true
|
||||||
21
.devcontainer/prometheus/prometheus.yml
Normal file
21
.devcontainer/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
global:
|
||||||
|
scrape_interval: 15s
|
||||||
|
scrape_timeout: 10s
|
||||||
|
evaluation_interval: 15s
|
||||||
|
alerting:
|
||||||
|
alertmanagers:
|
||||||
|
- static_configs:
|
||||||
|
- targets: []
|
||||||
|
scheme: http
|
||||||
|
timeout: 10s
|
||||||
|
api_version: v1
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
||||||
|
honor_timestamps: true
|
||||||
|
scrape_interval: 15s
|
||||||
|
scrape_timeout: 10s
|
||||||
|
metrics_path: /metrics
|
||||||
|
scheme: http
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- localhost:9090
|
||||||
@@ -1,6 +1,19 @@
|
|||||||
.git
|
|
||||||
.idea
|
.idea
|
||||||
|
.github
|
||||||
|
.vscode
|
||||||
|
.devcontainer
|
||||||
models
|
models
|
||||||
|
backends
|
||||||
examples/chatbot-ui/models
|
examples/chatbot-ui/models
|
||||||
|
backend/go/image/stablediffusion-ggml/build/
|
||||||
examples/rwkv/models
|
examples/rwkv/models
|
||||||
examples/**/models
|
examples/**/models
|
||||||
|
Dockerfile*
|
||||||
|
__pycache__
|
||||||
|
|
||||||
|
# SonarQube
|
||||||
|
.scannerwork
|
||||||
|
|
||||||
|
# backend virtual environments
|
||||||
|
**/venv
|
||||||
|
backend/python/**/source
|
||||||
|
|||||||
31
.editorconfig
Normal file
31
.editorconfig
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.go]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[Makefile]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[*.proto]
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.js]
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.yaml]
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
97
.env
97
.env
@@ -1,43 +1,102 @@
|
|||||||
## Set number of threads.
|
## Set number of threads.
|
||||||
## Note: prefer the number of physical cores. Overbooking the CPU degrades performance notably.
|
## Note: prefer the number of physical cores. Overbooking the CPU degrades performance notably.
|
||||||
# THREADS=14
|
# LOCALAI_THREADS=14
|
||||||
|
|
||||||
## Specify a different bind address (defaults to ":8080")
|
## Specify a different bind address (defaults to ":8080")
|
||||||
# ADDRESS=127.0.0.1:8080
|
# LOCALAI_ADDRESS=127.0.0.1:8080
|
||||||
|
|
||||||
## Default models context size
|
## Default models context size
|
||||||
# CONTEXT_SIZE=512
|
# LOCALAI_CONTEXT_SIZE=512
|
||||||
#
|
#
|
||||||
## Define galleries.
|
## Define galleries.
|
||||||
## models will to install will be visible in `/models/available`
|
## models will to install will be visible in `/models/available`
|
||||||
# GALLERIES=[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}]
|
# LOCALAI_GALLERIES=[{"name":"localai", "url":"github:mudler/LocalAI/gallery/index.yaml@master"}]
|
||||||
|
|
||||||
## CORS settings
|
## CORS settings
|
||||||
# CORS=true
|
# LOCALAI_CORS=true
|
||||||
# CORS_ALLOW_ORIGINS=*
|
# LOCALAI_CORS_ALLOW_ORIGINS=*
|
||||||
|
|
||||||
## Default path for models
|
## Default path for models
|
||||||
#
|
#
|
||||||
MODELS_PATH=/models
|
# LOCALAI_MODELS_PATH=/models
|
||||||
|
|
||||||
## Enable debug mode
|
## Enable debug mode
|
||||||
# DEBUG=true
|
# LOCALAI_LOG_LEVEL=debug
|
||||||
|
|
||||||
|
## Disables COMPEL (Diffusers)
|
||||||
|
# COMPEL=0
|
||||||
|
|
||||||
|
## Enable/Disable single backend (useful if only one GPU is available)
|
||||||
|
# LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||||
|
|
||||||
|
# Forces shutdown of the backends if busy (only if LOCALAI_SINGLE_ACTIVE_BACKEND is set)
|
||||||
|
# LOCALAI_FORCE_BACKEND_SHUTDOWN=true
|
||||||
|
|
||||||
## Specify a build type. Available: cublas, openblas, clblas.
|
## Specify a build type. Available: cublas, openblas, clblas.
|
||||||
|
## cuBLAS: This is a GPU-accelerated version of the complete standard BLAS (Basic Linear Algebra Subprograms) library. It's provided by Nvidia and is part of their CUDA toolkit.
|
||||||
|
## OpenBLAS: This is an open-source implementation of the BLAS library that aims to provide highly optimized code for various platforms. It includes support for multi-threading and can be compiled to use hardware-specific features for additional performance. OpenBLAS can run on many kinds of hardware, including CPUs from Intel, AMD, and ARM.
|
||||||
|
## clBLAS: This is an open-source implementation of the BLAS library that uses OpenCL, a framework for writing programs that execute across heterogeneous platforms consisting of CPUs, GPUs, and other processors. clBLAS is designed to take advantage of the parallel computing power of GPUs but can also run on any hardware that supports OpenCL. This includes hardware from different vendors like Nvidia, AMD, and Intel.
|
||||||
# BUILD_TYPE=openblas
|
# BUILD_TYPE=openblas
|
||||||
|
|
||||||
## Uncomment and set to false to disable rebuilding from source
|
## Uncomment and set to true to enable rebuilding from source
|
||||||
# REBUILD=false
|
# REBUILD=true
|
||||||
|
|
||||||
## Enable go tags, available: stablediffusion, tts
|
|
||||||
## stablediffusion: image generation with stablediffusion
|
|
||||||
## tts: enables text-to-speech with go-piper
|
|
||||||
## (requires REBUILD=true)
|
|
||||||
#
|
|
||||||
# GO_TAGS=stablediffusion
|
|
||||||
|
|
||||||
## Path where to store generated images
|
## Path where to store generated images
|
||||||
# IMAGE_PATH=/tmp
|
# LOCALAI_IMAGE_PATH=/tmp/generated/images
|
||||||
|
|
||||||
## Specify a default upload limit in MB (whisper)
|
## Specify a default upload limit in MB (whisper)
|
||||||
# UPLOAD_LIMIT
|
# LOCALAI_UPLOAD_LIMIT=15
|
||||||
|
|
||||||
|
## List of external GRPC backends (note on the container image this variable is already set to use extra backends available in extra/)
|
||||||
|
# LOCALAI_EXTERNAL_GRPC_BACKENDS=my-backend:127.0.0.1:9000,my-backend2:/usr/bin/backend.py
|
||||||
|
|
||||||
|
### Advanced settings ###
|
||||||
|
### Those are not really used by LocalAI, but from components in the stack ###
|
||||||
|
##
|
||||||
|
### Preload libraries
|
||||||
|
# LD_PRELOAD=
|
||||||
|
|
||||||
|
### Huggingface cache for models
|
||||||
|
# HUGGINGFACE_HUB_CACHE=/usr/local/huggingface
|
||||||
|
|
||||||
|
### Python backends GRPC max workers
|
||||||
|
### Default number of workers for GRPC Python backends.
|
||||||
|
### This actually controls wether a backend can process multiple requests or not.
|
||||||
|
# PYTHON_GRPC_MAX_WORKERS=1
|
||||||
|
|
||||||
|
### Define the number of parallel LLAMA.cpp workers (Defaults to 1)
|
||||||
|
# LLAMACPP_PARALLEL=1
|
||||||
|
|
||||||
|
### Define a list of GRPC Servers for llama-cpp workers to distribute the load
|
||||||
|
# https://github.com/ggerganov/llama.cpp/pull/6829
|
||||||
|
# https://github.com/ggerganov/llama.cpp/blob/master/tools/rpc/README.md
|
||||||
|
# LLAMACPP_GRPC_SERVERS=""
|
||||||
|
|
||||||
|
### Enable to run parallel requests
|
||||||
|
# LOCALAI_PARALLEL_REQUESTS=true
|
||||||
|
|
||||||
|
# Enable to allow p2p mode
|
||||||
|
# LOCALAI_P2P=true
|
||||||
|
|
||||||
|
# Enable to use federated mode
|
||||||
|
# LOCALAI_FEDERATED=true
|
||||||
|
|
||||||
|
# Enable to start federation server
|
||||||
|
# FEDERATED_SERVER=true
|
||||||
|
|
||||||
|
# Define to use federation token
|
||||||
|
# TOKEN=""
|
||||||
|
|
||||||
|
### Watchdog settings
|
||||||
|
###
|
||||||
|
# Enables watchdog to kill backends that are inactive for too much time
|
||||||
|
# LOCALAI_WATCHDOG_IDLE=true
|
||||||
|
#
|
||||||
|
# Time in duration format (e.g. 1h30m) after which a backend is considered idle
|
||||||
|
# LOCALAI_WATCHDOG_IDLE_TIMEOUT=5m
|
||||||
|
#
|
||||||
|
# Enables watchdog to kill backends that are busy for too much time
|
||||||
|
# LOCALAI_WATCHDOG_BUSY=true
|
||||||
|
#
|
||||||
|
# Time in duration format (e.g. 1h30m) after which a backend is considered busy
|
||||||
|
# LOCALAI_WATCHDOG_BUSY_TIMEOUT=5m
|
||||||
|
|||||||
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
*.sh text eol=lf
|
||||||
|
backend/cpp/llama/*.hpp linguist-vendored
|
||||||
5
.github/FUNDING.yml
vendored
Normal file
5
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: [mudler]
|
||||||
|
custom:
|
||||||
|
- https://www.buymeacoffee.com/mudler
|
||||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,9 +2,7 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: ''
|
title: ''
|
||||||
labels: bug
|
labels: bug, unconfirmed, up-for-grabs
|
||||||
assignees: mudler
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!-- Thanks for helping us to improve LocalAI! We welcome all bug reports. Please fill out each area of the template so we can better help you. Comments like this will be hidden when you post but you can delete them if you wish. -->
|
<!-- Thanks for helping us to improve LocalAI! We welcome all bug reports. Please fill out each area of the template so we can better help you. Comments like this will be hidden when you post but you can delete them if you wish. -->
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE/feature_request.md
vendored
4
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,9 +2,7 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ''
|
title: ''
|
||||||
labels: enhancement
|
labels: enhancement, up-for-grabs
|
||||||
assignees: mudler
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!-- Thanks for helping us to improve LocalAI! We welcome all feature requests. Please fill out each area of the template so we can better help you. Comments like this will be hidden when you post but you can delete them if you wish. -->
|
<!-- Thanks for helping us to improve LocalAI! We welcome all feature requests. Please fill out each area of the template so we can better help you. Comments like this will be hidden when you post but you can delete them if you wish. -->
|
||||||
|
|||||||
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
16
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -8,16 +8,24 @@ This PR fixes #
|
|||||||
**[Signed commits](../CONTRIBUTING.md#signing-off-on-commits-developer-certificate-of-origin)**
|
**[Signed commits](../CONTRIBUTING.md#signing-off-on-commits-developer-certificate-of-origin)**
|
||||||
- [ ] Yes, I signed my commits.
|
- [ ] Yes, I signed my commits.
|
||||||
|
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
Thank you for contributing to LocalAI!
|
Thank you for contributing to LocalAI!
|
||||||
|
|
||||||
Contributing Conventions:
|
Contributing Conventions
|
||||||
|
-------------------------
|
||||||
|
|
||||||
1. Include descriptive PR titles with [<component-name>] prepended.
|
The draft above helps to give a quick overview of your PR.
|
||||||
2. Build and test your changes before submitting a PR.
|
|
||||||
|
Remember to remove this comment and to at least:
|
||||||
|
|
||||||
|
1. Include descriptive PR titles with [<component-name>] prepended. We use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/).
|
||||||
|
2. Build and test your changes before submitting a PR (`make build`).
|
||||||
3. Sign your commits
|
3. Sign your commits
|
||||||
|
4. **Tag maintainer:** for a quicker response, tag the relevant maintainer (see below).
|
||||||
|
5. **X/Twitter handle:** we announce bigger features on X/Twitter. If your PR gets announced, and you'd like a mention, we'll gladly shout you out!
|
||||||
|
|
||||||
By following the community's contribution conventions upfront, the review process will
|
By following the community's contribution conventions upfront, the review process will
|
||||||
be accelerated and your PR merged more quickly.
|
be accelerated and your PR merged more quickly.
|
||||||
|
|
||||||
|
If no one reviews your PR within a few days, please @-mention @mudler.
|
||||||
-->
|
-->
|
||||||
20
.github/bump_deps.sh
vendored
20
.github/bump_deps.sh
vendored
@@ -3,7 +3,25 @@ set -xe
|
|||||||
REPO=$1
|
REPO=$1
|
||||||
BRANCH=$2
|
BRANCH=$2
|
||||||
VAR=$3
|
VAR=$3
|
||||||
|
FILE=$4
|
||||||
|
|
||||||
|
if [ -z "$FILE" ]; then
|
||||||
|
FILE="Makefile"
|
||||||
|
fi
|
||||||
|
|
||||||
LAST_COMMIT=$(curl -s -H "Accept: application/vnd.github.VERSION.sha" "https://api.github.com/repos/$REPO/commits/$BRANCH")
|
LAST_COMMIT=$(curl -s -H "Accept: application/vnd.github.VERSION.sha" "https://api.github.com/repos/$REPO/commits/$BRANCH")
|
||||||
|
|
||||||
sed -i Makefile -e "s/$VAR?=.*/$VAR?=$LAST_COMMIT/"
|
# Read $VAR from Makefile (only first match)
|
||||||
|
set +e
|
||||||
|
CURRENT_COMMIT="$(grep -m1 "^$VAR?=" $FILE | cut -d'=' -f2)"
|
||||||
|
set -e
|
||||||
|
|
||||||
|
sed -i $FILE -e "s/$VAR?=.*/$VAR?=$LAST_COMMIT/"
|
||||||
|
|
||||||
|
if [ -z "$CURRENT_COMMIT" ]; then
|
||||||
|
echo "Could not find $VAR in Makefile."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Changes: https://github.com/$REPO/compare/${CURRENT_COMMIT}..${LAST_COMMIT}" >> "${VAR}_message.txt"
|
||||||
|
echo "${LAST_COMMIT}" >> "${VAR}_commit.txt"
|
||||||
7
.github/bump_docs.sh
vendored
Executable file
7
.github/bump_docs.sh
vendored
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -xe
|
||||||
|
REPO=$1
|
||||||
|
|
||||||
|
LATEST_TAG=$(curl -s "https://api.github.com/repos/$REPO/releases/latest" | jq -r '.tag_name')
|
||||||
|
|
||||||
|
cat <<< $(jq ".version = \"$LATEST_TAG\"" docs/data/version.json) > docs/data/version.json
|
||||||
85
.github/check_and_update.py
vendored
Normal file
85
.github/check_and_update.py
vendored
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import hashlib
|
||||||
|
from huggingface_hub import hf_hub_download, get_paths_info
|
||||||
|
import requests
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
uri = sys.argv[1]
|
||||||
|
file_name = uri.split('/')[-1]
|
||||||
|
|
||||||
|
# Function to parse the URI and determine download method
|
||||||
|
def parse_uri(uri):
|
||||||
|
if uri.startswith('huggingface://'):
|
||||||
|
repo_id = uri.split('://')[1]
|
||||||
|
return 'huggingface', repo_id.rsplit('/', 1)[0]
|
||||||
|
elif 'huggingface.co' in uri:
|
||||||
|
parts = uri.split('/resolve/')
|
||||||
|
if len(parts) > 1:
|
||||||
|
repo_path = parts[0].split('https://huggingface.co/')[-1]
|
||||||
|
return 'huggingface', repo_path
|
||||||
|
return 'direct', uri
|
||||||
|
|
||||||
|
def calculate_sha256(file_path):
|
||||||
|
sha256_hash = hashlib.sha256()
|
||||||
|
with open(file_path, 'rb') as f:
|
||||||
|
for byte_block in iter(lambda: f.read(4096), b''):
|
||||||
|
sha256_hash.update(byte_block)
|
||||||
|
return sha256_hash.hexdigest()
|
||||||
|
|
||||||
|
def manual_safety_check_hf(repo_id):
|
||||||
|
scanResponse = requests.get('https://huggingface.co/api/models/' + repo_id + "/scan")
|
||||||
|
scan = scanResponse.json()
|
||||||
|
# Check if 'hasUnsafeFile' exists in the response
|
||||||
|
if 'hasUnsafeFile' in scan:
|
||||||
|
if scan['hasUnsafeFile']:
|
||||||
|
return scan
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
download_type, repo_id_or_url = parse_uri(uri)
|
||||||
|
|
||||||
|
new_checksum = None
|
||||||
|
file_path = None
|
||||||
|
|
||||||
|
# Decide download method based on URI type
|
||||||
|
if download_type == 'huggingface':
|
||||||
|
# Check if the repo is flagged as dangerous by HF
|
||||||
|
hazard = manual_safety_check_hf(repo_id_or_url)
|
||||||
|
if hazard != None:
|
||||||
|
print(f'Error: HuggingFace has detected security problems for {repo_id_or_url}: {str(hazard)}', filename=file_name)
|
||||||
|
sys.exit(5)
|
||||||
|
# Use HF API to pull sha
|
||||||
|
for file in get_paths_info(repo_id_or_url, [file_name], repo_type='model'):
|
||||||
|
try:
|
||||||
|
new_checksum = file.lfs.sha256
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Error from Hugging Face Hub: {str(e)}', file=sys.stderr)
|
||||||
|
sys.exit(2)
|
||||||
|
if new_checksum is None:
|
||||||
|
try:
|
||||||
|
file_path = hf_hub_download(repo_id=repo_id_or_url, filename=file_name)
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Error from Hugging Face Hub: {str(e)}', file=sys.stderr)
|
||||||
|
sys.exit(2)
|
||||||
|
else:
|
||||||
|
response = requests.get(repo_id_or_url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
with open(file_name, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
|
file_path = file_name
|
||||||
|
elif response.status_code == 404:
|
||||||
|
print(f'File not found: {response.status_code}', file=sys.stderr)
|
||||||
|
sys.exit(2)
|
||||||
|
else:
|
||||||
|
print(f'Error downloading file: {response.status_code}', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if new_checksum is None:
|
||||||
|
new_checksum = calculate_sha256(file_path)
|
||||||
|
print(new_checksum)
|
||||||
|
os.remove(file_path)
|
||||||
|
else:
|
||||||
|
print(new_checksum)
|
||||||
63
.github/checksum_checker.sh
vendored
Normal file
63
.github/checksum_checker.sh
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# This scripts needs yq and huggingface_hub to be installed
|
||||||
|
# to install hugingface_hub run pip install huggingface_hub
|
||||||
|
|
||||||
|
# Path to the input YAML file
|
||||||
|
input_yaml=$1
|
||||||
|
|
||||||
|
# Function to download file and check checksum using Python
|
||||||
|
function check_and_update_checksum() {
|
||||||
|
model_name="$1"
|
||||||
|
file_name="$2"
|
||||||
|
uri="$3"
|
||||||
|
old_checksum="$4"
|
||||||
|
idx="$5"
|
||||||
|
|
||||||
|
# Download the file and calculate new checksum using Python
|
||||||
|
new_checksum=$(python3 ./.github/check_and_update.py $uri)
|
||||||
|
result=$?
|
||||||
|
|
||||||
|
if [[ $result -eq 5 ]]; then
|
||||||
|
echo "Contaminated entry detected, deleting entry for $model_name..."
|
||||||
|
yq eval -i "del([$idx])" "$input_yaml"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$new_checksum" == "" ]]; then
|
||||||
|
echo "Error calculating checksum for $file_name. Skipping..."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Checksum for $file_name: $new_checksum"
|
||||||
|
|
||||||
|
# Compare and update the YAML file if checksums do not match
|
||||||
|
|
||||||
|
if [[ $result -eq 2 ]]; then
|
||||||
|
echo "File not found, deleting entry for $file_name..."
|
||||||
|
# yq eval -i "del(.[$idx].files[] | select(.filename == \"$file_name\"))" "$input_yaml"
|
||||||
|
elif [[ "$old_checksum" != "$new_checksum" ]]; then
|
||||||
|
echo "Checksum mismatch for $file_name. Updating..."
|
||||||
|
yq eval -i "del(.[$idx].files[] | select(.filename == \"$file_name\").sha256)" "$input_yaml"
|
||||||
|
yq eval -i "(.[$idx].files[] | select(.filename == \"$file_name\")).sha256 = \"$new_checksum\"" "$input_yaml"
|
||||||
|
elif [[ $result -ne 0 ]]; then
|
||||||
|
echo "Error downloading file $file_name. Skipping..."
|
||||||
|
else
|
||||||
|
echo "Checksum match for $file_name. No update needed."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Read the YAML and process each file
|
||||||
|
len=$(yq eval '. | length' "$input_yaml")
|
||||||
|
for ((i=0; i<$len; i++))
|
||||||
|
do
|
||||||
|
name=$(yq eval ".[$i].name" "$input_yaml")
|
||||||
|
files_len=$(yq eval ".[$i].files | length" "$input_yaml")
|
||||||
|
for ((j=0; j<$files_len; j++))
|
||||||
|
do
|
||||||
|
filename=$(yq eval ".[$i].files[$j].filename" "$input_yaml")
|
||||||
|
uri=$(yq eval ".[$i].files[$j].uri" "$input_yaml")
|
||||||
|
checksum=$(yq eval ".[$i].files[$j].sha256" "$input_yaml")
|
||||||
|
echo "Checking model $name, file $filename. URI = $uri, Checksum = $checksum"
|
||||||
|
check_and_update_checksum "$name" "$filename" "$uri" "$checksum" "$i"
|
||||||
|
done
|
||||||
|
done
|
||||||
304
.github/ci/modelslist.go
vendored
Normal file
304
.github/ci/modelslist.go
vendored
Normal file
@@ -0,0 +1,304 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"html/template"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/microcosm-cc/bluemonday"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
var modelPageTemplate string = `
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>LocalAI models</title>
|
||||||
|
<link href="https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.3.0/flowbite.min.css" rel="stylesheet" />
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/vanilla-lazyload@19.1.3/dist/lazyload.min.js"></script>
|
||||||
|
|
||||||
|
<link
|
||||||
|
rel="stylesheet"
|
||||||
|
href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.8.0/build/styles/default.min.css"
|
||||||
|
/>
|
||||||
|
<script
|
||||||
|
defer
|
||||||
|
src="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.8.0/build/highlight.min.js"
|
||||||
|
></script>
|
||||||
|
<script
|
||||||
|
defer
|
||||||
|
src="https://cdn.jsdelivr.net/npm/alpinejs@3.x.x/dist/cdn.min.js"
|
||||||
|
></script>
|
||||||
|
<script
|
||||||
|
defer
|
||||||
|
src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"
|
||||||
|
></script>
|
||||||
|
<script
|
||||||
|
defer
|
||||||
|
src="https://cdn.jsdelivr.net/npm/dompurify@3.0.6/dist/purify.min.js"
|
||||||
|
></script>
|
||||||
|
|
||||||
|
<link href="/static/general.css" rel="stylesheet" />
|
||||||
|
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&family=Roboto:wght@400;500&display=swap" rel="stylesheet">
|
||||||
|
<link
|
||||||
|
href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700,900&display=swap"
|
||||||
|
rel="stylesheet" />
|
||||||
|
<link
|
||||||
|
rel="stylesheet"
|
||||||
|
href="https://cdn.jsdelivr.net/npm/tw-elements/css/tw-elements.min.css" />
|
||||||
|
<script src="https://cdn.tailwindcss.com/3.3.0"></script>
|
||||||
|
<script>
|
||||||
|
tailwind.config = {
|
||||||
|
darkMode: "class",
|
||||||
|
theme: {
|
||||||
|
fontFamily: {
|
||||||
|
sans: ["Roboto", "sans-serif"],
|
||||||
|
body: ["Roboto", "sans-serif"],
|
||||||
|
mono: ["ui-monospace", "monospace"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
corePlugins: {
|
||||||
|
preflight: false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.1.1/css/all.min.css">
|
||||||
|
<script src="https://unpkg.com/htmx.org@1.9.12" integrity="sha384-ujb1lZYygJmzgSwoxRggbCHcjc0rB2XoQrxeTUQyRjrOnlCoYta87iKBWq3EsdM2" crossorigin="anonymous"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body class="bg-gray-900 text-gray-200">
|
||||||
|
<div class="flex flex-col min-h-screen">
|
||||||
|
|
||||||
|
<nav class="bg-gray-800 shadow-lg">
|
||||||
|
<div class="container mx-auto px-4 py-4">
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div class="flex items-center">
|
||||||
|
<a href="/" class="text-white text-xl font-bold"><img src="https://github.com/mudler/LocalAI/assets/2420543/0966aa2a-166e-4f99-a3e5-6c915fc997dd" alt="LocalAI Logo" class="h-10 mr-3 border-2 border-gray-300 shadow rounded"></a>
|
||||||
|
<a href="/" class="text-white text-xl font-bold">LocalAI</a>
|
||||||
|
</div>
|
||||||
|
<!-- Menu button for small screens -->
|
||||||
|
<div class="lg:hidden">
|
||||||
|
<button id="menu-toggle" class="text-gray-400 hover:text-white focus:outline-none">
|
||||||
|
<i class="fas fa-bars fa-lg"></i>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<!-- Navigation links -->
|
||||||
|
<div class="hidden lg:flex lg:items-center lg:justify-end lg:flex-1 lg:w-0">
|
||||||
|
<a href="https://localai.io" class="text-gray-400 hover:text-white px-3 py-2 rounded" target="_blank" ><i class="fas fa-book-reader pr-2"></i> Documentation</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- Collapsible menu for small screens -->
|
||||||
|
<div class="hidden lg:hidden" id="mobile-menu">
|
||||||
|
<div class="pt-4 pb-3 border-t border-gray-700">
|
||||||
|
|
||||||
|
<a href="https://localai.io" class="block text-gray-400 hover:text-white px-3 py-2 rounded mt-1" target="_blank" ><i class="fas fa-book-reader pr-2"></i> Documentation</a>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.is-hidden {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|
||||||
|
<div class="container mx-auto px-4 flex-grow">
|
||||||
|
|
||||||
|
<div class="models mt-12">
|
||||||
|
<h2 class="text-center text-3xl font-semibold text-gray-100">
|
||||||
|
LocalAI model gallery list </h2><br>
|
||||||
|
|
||||||
|
<h2 class="text-center text-3xl font-semibold text-gray-100">
|
||||||
|
|
||||||
|
🖼️ Available {{.AvailableModels}} models</i> <a href="https://localai.io/models/" target="_blank" >
|
||||||
|
<i class="fas fa-circle-info pr-2"></i>
|
||||||
|
</a></h2>
|
||||||
|
|
||||||
|
<h3>
|
||||||
|
Refer to the Model gallery <a href="https://localai.io/models/" target="_blank" ><i class="fas fa-circle-info pr-2"></i></a> for more information on how to use the models with LocalAI.<br>
|
||||||
|
|
||||||
|
You can install models with the CLI command <code>local-ai models install <model-name></code>. or by using the WebUI.
|
||||||
|
</h3>
|
||||||
|
|
||||||
|
<input class="form-control appearance-none block w-full mt-5 px-3 py-2 text-base font-normal text-gray-300 pb-2 mb-5 bg-gray-800 bg-clip-padding border border-solid border-gray-600 rounded transition ease-in-out m-0 focus:text-gray-300 focus:bg-gray-900 focus:border-blue-500 focus:outline-none" type="search"
|
||||||
|
id="searchbox" placeholder="Live search keyword..">
|
||||||
|
<div class="dark grid grid-cols-1 grid-rows-1 md:grid-cols-3 block rounded-lg shadow-secondary-1 dark:bg-surface-dark">
|
||||||
|
{{ range $_, $model := .Models }}
|
||||||
|
<div class="box me-4 mb-2 block rounded-lg bg-white shadow-secondary-1 dark:bg-gray-800 dark:bg-surface-dark dark:text-white text-surface pb-2">
|
||||||
|
<div>
|
||||||
|
{{ $icon := "https://upload.wikimedia.org/wikipedia/commons/6/65/No-Image-Placeholder.svg" }}
|
||||||
|
{{ if $model.Icon }}
|
||||||
|
{{ $icon = $model.Icon }}
|
||||||
|
{{ end }}
|
||||||
|
<div class="flex justify-center items-center">
|
||||||
|
<img data-src="{{ $icon }}" alt="{{$model.Name}}" class="rounded-t-lg max-h-48 max-w-96 object-cover mt-3 lazy">
|
||||||
|
</div>
|
||||||
|
<div class="p-6 text-surface dark:text-white">
|
||||||
|
<h5 class="mb-2 text-xl font-medium leading-tight">{{$model.Name}}</h5>
|
||||||
|
|
||||||
|
|
||||||
|
<p class="mb-4 text-base truncate">{{ $model.Description }}</p>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<div class="px-6 pt-4 pb-2">
|
||||||
|
|
||||||
|
<!-- Modal toggle -->
|
||||||
|
<button data-modal-target="{{ $model.Name}}-modal" data-modal-toggle="{{ $model.Name }}-modal" class="block text-white bg-blue-700 hover:bg-blue-800 focus:ring-4 focus:outline-none focus:ring-blue-300 font-medium rounded-lg text-sm px-5 py-2.5 text-center dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-blue-800" type="button">
|
||||||
|
More info
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<!-- Main modal -->
|
||||||
|
<div id="{{ $model.Name}}-modal" tabindex="-1" aria-hidden="true" class="hidden overflow-y-auto overflow-x-hidden fixed top-0 right-0 left-0 z-50 justify-center items-center w-full md:inset-0 h-[calc(100%-1rem)] max-h-full">
|
||||||
|
<div class="relative p-4 w-full max-w-2xl max-h-full">
|
||||||
|
<!-- Modal content -->
|
||||||
|
<div class="relative bg-white rounded-lg shadow dark:bg-gray-700">
|
||||||
|
<!-- Modal header -->
|
||||||
|
<div class="flex items-center justify-between p-4 md:p-5 border-b rounded-t dark:border-gray-600">
|
||||||
|
<h3 class="text-xl font-semibold text-gray-900 dark:text-white">
|
||||||
|
{{ $model.Name}}
|
||||||
|
</h3>
|
||||||
|
<button type="button" class="text-gray-400 bg-transparent hover:bg-gray-200 hover:text-gray-900 rounded-lg text-sm w-8 h-8 ms-auto inline-flex justify-center items-center dark:hover:bg-gray-600 dark:hover:text-white" data-modal-hide="{{$model.Name}}-modal">
|
||||||
|
<svg class="w-3 h-3" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 14 14">
|
||||||
|
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="m1 1 6 6m0 0 6 6M7 7l6-6M7 7l-6 6"/>
|
||||||
|
</svg>
|
||||||
|
<span class="sr-only">Close modal</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<!-- Modal body -->
|
||||||
|
<div class="p-4 md:p-5 space-y-4">
|
||||||
|
<div class="flex justify-center items-center">
|
||||||
|
<img data-src="{{ $icon }}" alt="{{$model.Name}}" class="lazy rounded-t-lg max-h-48 max-w-96 object-cover mt-3">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400">
|
||||||
|
{{ $model.Description }}
|
||||||
|
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400">
|
||||||
|
To install the model with the CLI, run: <br>
|
||||||
|
<code> local-ai models install {{$model.Name}} </code> <br>
|
||||||
|
|
||||||
|
<hr>
|
||||||
|
See also <a href="https://localai.io/models/" target="_blank" >
|
||||||
|
Installation <i class="fas fa-circle-info pr-2"></i>
|
||||||
|
</a> to see how to install models with the REST API.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400">
|
||||||
|
<ul>
|
||||||
|
{{ range $_, $u := $model.URLs }}
|
||||||
|
<li><a href="{{ $u }}" target=_blank><i class="fa-solid fa-link"></i> {{ $u }}</a></li>
|
||||||
|
{{ end }}
|
||||||
|
</ul>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<!-- Modal footer -->
|
||||||
|
<div class="flex items-center p-4 md:p-5 border-t border-gray-200 rounded-b dark:border-gray-600">
|
||||||
|
<button data-modal-hide="{{ $model.Name}}-modal" type="button" class="py-2.5 px-5 ms-3 text-sm font-medium text-gray-900 focus:outline-none bg-white rounded-lg border border-gray-200 hover:bg-gray-100 hover:text-blue-700 focus:z-10 focus:ring-4 focus:ring-gray-100 dark:focus:ring-gray-700 dark:bg-gray-800 dark:text-gray-400 dark:border-gray-600 dark:hover:text-white dark:hover:bg-gray-700">Close</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
var lazyLoadInstance = new LazyLoad({
|
||||||
|
// Your custom settings go here
|
||||||
|
});
|
||||||
|
|
||||||
|
let cards = document.querySelectorAll('.box')
|
||||||
|
|
||||||
|
function liveSearch() {
|
||||||
|
let search_query = document.getElementById("searchbox").value;
|
||||||
|
|
||||||
|
//Use innerText if all contents are visible
|
||||||
|
//Use textContent for including hidden elements
|
||||||
|
for (var i = 0; i < cards.length; i++) {
|
||||||
|
if(cards[i].textContent.toLowerCase()
|
||||||
|
.includes(search_query.toLowerCase())) {
|
||||||
|
cards[i].classList.remove("is-hidden");
|
||||||
|
} else {
|
||||||
|
cards[i].classList.add("is-hidden");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//A little delay
|
||||||
|
let typingTimer;
|
||||||
|
let typeInterval = 500;
|
||||||
|
let searchInput = document.getElementById('searchbox');
|
||||||
|
|
||||||
|
searchInput.addEventListener('keyup', () => {
|
||||||
|
clearTimeout(typingTimer);
|
||||||
|
typingTimer = setTimeout(liveSearch, typeInterval);
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.3.0/flowbite.min.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`
|
||||||
|
|
||||||
|
type GalleryModel struct {
|
||||||
|
Name string `json:"name" yaml:"name"`
|
||||||
|
URLs []string `json:"urls" yaml:"urls"`
|
||||||
|
Icon string `json:"icon" yaml:"icon"`
|
||||||
|
Description string `json:"description" yaml:"description"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// read the YAML file which contains the models
|
||||||
|
|
||||||
|
f, err := ioutil.ReadFile(os.Args[1])
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error reading file:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
models := []*GalleryModel{}
|
||||||
|
err = yaml.Unmarshal(f, &models)
|
||||||
|
if err != nil {
|
||||||
|
// write to stderr
|
||||||
|
os.Stderr.WriteString("Error unmarshaling YAML: " + err.Error() + "\n")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure that all arbitrary text content is sanitized before display
|
||||||
|
for i, m := range models {
|
||||||
|
models[i].Name = bluemonday.StrictPolicy().Sanitize(m.Name)
|
||||||
|
models[i].Description = bluemonday.StrictPolicy().Sanitize(m.Description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// render the template
|
||||||
|
data := struct {
|
||||||
|
Models []*GalleryModel
|
||||||
|
AvailableModels int
|
||||||
|
}{
|
||||||
|
Models: models,
|
||||||
|
AvailableModels: len(models),
|
||||||
|
}
|
||||||
|
tmpl := template.Must(template.New("modelPage").Parse(modelPageTemplate))
|
||||||
|
|
||||||
|
err = tmpl.Execute(os.Stdout, data)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error executing template:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
119
.github/dependabot.yml
vendored
Normal file
119
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "gitsubmodule"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "gomod"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "github.com/mudler/LocalAI/pkg/grpc/proto"
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/bark"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/common/template"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/coqui"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/diffusers"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/exllama"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/exllama2"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/mamba"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/openvoice"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/rerankers"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/sentencetransformers"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/transformers"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/backend/python/vllm"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/examples/chainlit"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/examples/functions"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/examples/langchain/langchainpy-localai-example"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/examples/langchain-chroma"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/examples/streamlit-bot"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/examples/k8sgpt"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/examples/kubernetes"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/examples/langchain"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "gomod"
|
||||||
|
directory: "/examples/semantic-todo"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/examples/telegram-bot"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
33
.github/labeler.yml
vendored
Normal file
33
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
enhancement:
|
||||||
|
- head-branch: ['^feature', 'feature']
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
- any:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: 'Makefile'
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: '*.mod'
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: '*.sum'
|
||||||
|
|
||||||
|
kind/documentation:
|
||||||
|
- any:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: 'docs/*'
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: '*.md'
|
||||||
|
|
||||||
|
area/ai-model:
|
||||||
|
- any:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: 'gallery/*'
|
||||||
|
|
||||||
|
examples:
|
||||||
|
- any:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: 'examples/*'
|
||||||
|
|
||||||
|
ci:
|
||||||
|
- any:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: '.github/*'
|
||||||
15
.github/release.yml
vendored
15
.github/release.yml
vendored
@@ -12,13 +12,26 @@ changelog:
|
|||||||
- title: "Bug fixes :bug:"
|
- title: "Bug fixes :bug:"
|
||||||
labels:
|
labels:
|
||||||
- bug
|
- bug
|
||||||
|
- regression
|
||||||
|
- title: "🖧 P2P area"
|
||||||
|
labels:
|
||||||
|
- area/p2p
|
||||||
- title: Exciting New Features 🎉
|
- title: Exciting New Features 🎉
|
||||||
labels:
|
labels:
|
||||||
- Semver-Minor
|
- Semver-Minor
|
||||||
- enhancement
|
- enhancement
|
||||||
|
- ux
|
||||||
|
- roadmap
|
||||||
|
- title: 🧠 Models
|
||||||
|
labels:
|
||||||
|
- area/ai-model
|
||||||
|
- title: 📖 Documentation and examples
|
||||||
|
labels:
|
||||||
|
- kind/documentation
|
||||||
|
- examples
|
||||||
- title: 👒 Dependencies
|
- title: 👒 Dependencies
|
||||||
labels:
|
labels:
|
||||||
- dependencies
|
- dependencies
|
||||||
- title: Other Changes
|
- title: Other Changes
|
||||||
labels:
|
labels:
|
||||||
- "*"
|
- "*"
|
||||||
|
|||||||
1048
.github/workflows/backend.yml
vendored
Normal file
1048
.github/workflows/backend.yml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
241
.github/workflows/backend_build.yml
vendored
Normal file
241
.github/workflows/backend_build.yml
vendored
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
---
|
||||||
|
name: 'build python backend container images (reusable)'
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
base-image:
|
||||||
|
description: 'Base image'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build-type:
|
||||||
|
description: 'Build type'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
cuda-major-version:
|
||||||
|
description: 'CUDA major version'
|
||||||
|
default: "12"
|
||||||
|
type: string
|
||||||
|
cuda-minor-version:
|
||||||
|
description: 'CUDA minor version'
|
||||||
|
default: "1"
|
||||||
|
type: string
|
||||||
|
platforms:
|
||||||
|
description: 'Platforms'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
tag-latest:
|
||||||
|
description: 'Tag latest'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
tag-suffix:
|
||||||
|
description: 'Tag suffix'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
runs-on:
|
||||||
|
description: 'Runs on'
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
backend:
|
||||||
|
description: 'Backend to build'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
context:
|
||||||
|
description: 'Build context'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
dockerfile:
|
||||||
|
description: 'Build Dockerfile'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
skip-drivers:
|
||||||
|
description: 'Skip drivers'
|
||||||
|
default: 'false'
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
dockerUsername:
|
||||||
|
required: true
|
||||||
|
dockerPassword:
|
||||||
|
required: true
|
||||||
|
quayUsername:
|
||||||
|
required: true
|
||||||
|
quayPassword:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
backend-build:
|
||||||
|
runs-on: ${{ inputs.runs-on }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
|
||||||
|
- name: Free Disk Space (Ubuntu)
|
||||||
|
if: inputs.runs-on == 'ubuntu-latest'
|
||||||
|
uses: jlumbroso/free-disk-space@main
|
||||||
|
with:
|
||||||
|
# this might remove tools that are actually needed,
|
||||||
|
# if set to "true" but frees about 6 GB
|
||||||
|
tool-cache: true
|
||||||
|
# all of these default to true, but feel free to set to
|
||||||
|
# "false" if necessary for your workflow
|
||||||
|
android: true
|
||||||
|
dotnet: true
|
||||||
|
haskell: true
|
||||||
|
large-packages: true
|
||||||
|
docker-images: true
|
||||||
|
swap-storage: true
|
||||||
|
|
||||||
|
- name: Force Install GIT latest
|
||||||
|
run: |
|
||||||
|
sudo apt-get update \
|
||||||
|
&& sudo apt-get install -y software-properties-common \
|
||||||
|
&& sudo apt-get update \
|
||||||
|
&& sudo add-apt-repository -y ppa:git-core/ppa \
|
||||||
|
&& sudo apt-get update \
|
||||||
|
&& sudo apt-get install -y git
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Release space from worker
|
||||||
|
if: inputs.runs-on == 'ubuntu-latest'
|
||||||
|
run: |
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
df -h
|
||||||
|
echo
|
||||||
|
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
||||||
|
sudo apt-get remove --auto-remove android-sdk-platform-tools snapd || true
|
||||||
|
sudo apt-get purge --auto-remove android-sdk-platform-tools snapd || true
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo apt-get remove -y '^mono-.*' || true
|
||||||
|
sudo apt-get remove -y '^ghc-.*' || true
|
||||||
|
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
||||||
|
sudo apt-get remove -y 'php.*' || true
|
||||||
|
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
||||||
|
sudo apt-get remove -y '^google-.*' || true
|
||||||
|
sudo apt-get remove -y azure-cli || true
|
||||||
|
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
||||||
|
sudo apt-get remove -y '^gfortran-.*' || true
|
||||||
|
sudo apt-get remove -y microsoft-edge-stable || true
|
||||||
|
sudo apt-get remove -y firefox || true
|
||||||
|
sudo apt-get remove -y powershell || true
|
||||||
|
sudo apt-get remove -y r-base-core || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
echo
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
sudo rm -rfv build || true
|
||||||
|
sudo rm -rf /usr/share/dotnet || true
|
||||||
|
sudo rm -rf /opt/ghc || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
||||||
|
df -h
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
quay.io/go-skynet/local-ai-backends
|
||||||
|
localai/localai-backends
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=semver,pattern={{raw}}
|
||||||
|
type=sha
|
||||||
|
flavor: |
|
||||||
|
latest=${{ inputs.tag-latest }}
|
||||||
|
suffix=${{ inputs.tag-suffix }},onlatest=true
|
||||||
|
|
||||||
|
- name: Docker meta for PR
|
||||||
|
id: meta_pull_request
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
quay.io/go-skynet/ci-tests
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch,suffix=${{ github.event.number }}-${{ inputs.backend }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
||||||
|
type=semver,pattern={{raw}},suffix=${{ github.event.number }}-${{ inputs.backend }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
||||||
|
type=sha,suffix=${{ github.event.number }}-${{ inputs.backend }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
||||||
|
flavor: |
|
||||||
|
latest=${{ inputs.tag-latest }}
|
||||||
|
suffix=${{ inputs.tag-suffix }},onlatest=true
|
||||||
|
## End testing image
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@master
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@master
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.dockerUsername }}
|
||||||
|
password: ${{ secrets.dockerPassword }}
|
||||||
|
|
||||||
|
- name: Login to Quay.io
|
||||||
|
# if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.quayUsername }}
|
||||||
|
password: ${{ secrets.quayPassword }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
build-args: |
|
||||||
|
BUILD_TYPE=${{ inputs.build-type }}
|
||||||
|
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
||||||
|
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
||||||
|
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
||||||
|
BASE_IMAGE=${{ inputs.base-image }}
|
||||||
|
BACKEND=${{ inputs.backend }}
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
file: ${{ inputs.dockerfile }}
|
||||||
|
cache-from: type=gha
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|
||||||
|
- name: Build and push (PR)
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
build-args: |
|
||||||
|
BUILD_TYPE=${{ inputs.build-type }}
|
||||||
|
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
||||||
|
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
||||||
|
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
||||||
|
BASE_IMAGE=${{ inputs.base-image }}
|
||||||
|
BACKEND=${{ inputs.backend }}
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
file: ${{ inputs.dockerfile }}
|
||||||
|
cache-from: type=gha
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta_pull_request.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_pull_request.outputs.labels }}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- name: job summary
|
||||||
|
run: |
|
||||||
|
echo "Built image: ${{ steps.meta.outputs.labels }}" >> $GITHUB_STEP_SUMMARY
|
||||||
23
.github/workflows/build-test.yaml
vendored
Normal file
23
.github/workflows/build-test.yaml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: Build test
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: 1.23
|
||||||
|
- name: Run GoReleaser
|
||||||
|
run: |
|
||||||
|
make dev-dist
|
||||||
56
.github/workflows/bump_deps.yaml
vendored
56
.github/workflows/bump_deps.yaml
vendored
@@ -9,42 +9,54 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- repository: "go-skynet/go-llama.cpp"
|
- repository: "ggml-org/llama.cpp"
|
||||||
variable: "GOLLAMA_VERSION"
|
variable: "LLAMA_VERSION"
|
||||||
branch: "master"
|
branch: "master"
|
||||||
- repository: "go-skynet/go-ggml-transformers.cpp"
|
file: "backend/cpp/llama-cpp/Makefile"
|
||||||
variable: "GOGGMLTRANSFORMERS_VERSION"
|
- repository: "ggml-org/whisper.cpp"
|
||||||
branch: "master"
|
|
||||||
- repository: "donomii/go-rwkv.cpp"
|
|
||||||
variable: "RWKV_VERSION"
|
|
||||||
branch: "main"
|
|
||||||
- repository: "ggerganov/whisper.cpp"
|
|
||||||
variable: "WHISPER_CPP_VERSION"
|
variable: "WHISPER_CPP_VERSION"
|
||||||
branch: "master"
|
branch: "master"
|
||||||
- repository: "go-skynet/go-bert.cpp"
|
file: "backend/go/whisper/Makefile"
|
||||||
variable: "BERT_VERSION"
|
- repository: "PABannier/bark.cpp"
|
||||||
|
variable: "BARKCPP_VERSION"
|
||||||
|
branch: "main"
|
||||||
|
file: "Makefile"
|
||||||
|
- repository: "richiejp/stable-diffusion.cpp"
|
||||||
|
variable: "STABLEDIFFUSION_GGML_VERSION"
|
||||||
branch: "master"
|
branch: "master"
|
||||||
- repository: "go-skynet/bloomz.cpp"
|
file: "backend/go/stablediffusion-ggml/Makefile"
|
||||||
variable: "BLOOMZ_VERSION"
|
- repository: "mudler/go-piper"
|
||||||
branch: "main"
|
variable: "PIPER_VERSION"
|
||||||
- repository: "nomic-ai/gpt4all"
|
branch: "master"
|
||||||
variable: "GPT4ALL_VERSION"
|
file: "backend/go/piper/Makefile"
|
||||||
branch: "main"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Bump dependencies 🔧
|
- name: Bump dependencies 🔧
|
||||||
|
id: bump
|
||||||
run: |
|
run: |
|
||||||
bash .github/bump_deps.sh ${{ matrix.repository }} ${{ matrix.branch }} ${{ matrix.variable }}
|
bash .github/bump_deps.sh ${{ matrix.repository }} ${{ matrix.branch }} ${{ matrix.variable }} ${{ matrix.file }}
|
||||||
|
{
|
||||||
|
echo 'message<<EOF'
|
||||||
|
cat "${{ matrix.variable }}_message.txt"
|
||||||
|
echo EOF
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
{
|
||||||
|
echo 'commit<<EOF'
|
||||||
|
cat "${{ matrix.variable }}_commit.txt"
|
||||||
|
echo EOF
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
rm -rfv ${{ matrix.variable }}_message.txt
|
||||||
|
rm -rfv ${{ matrix.variable }}_commit.txt
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v5
|
uses: peter-evans/create-pull-request@v7
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
||||||
push-to-fork: ci-forks/LocalAI
|
push-to-fork: ci-forks/LocalAI
|
||||||
commit-message: ':arrow_up: Update ${{ matrix.repository }}'
|
commit-message: ':arrow_up: Update ${{ matrix.repository }}'
|
||||||
title: ':arrow_up: Update ${{ matrix.repository }}'
|
title: 'chore: :arrow_up: Update ${{ matrix.repository }} to `${{ steps.bump.outputs.commit }}`'
|
||||||
branch: "update/${{ matrix.variable }}"
|
branch: "update/${{ matrix.variable }}"
|
||||||
body: Bump of ${{ matrix.repository }} version
|
body: ${{ steps.bump.outputs.message }}
|
||||||
signoff: true
|
signoff: true
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
31
.github/workflows/bump_docs.yaml
vendored
Normal file
31
.github/workflows/bump_docs.yaml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: Bump dependencies
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 20 * * *
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
bump:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- repository: "mudler/LocalAI"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Bump dependencies 🔧
|
||||||
|
run: |
|
||||||
|
bash .github/bump_docs.sh ${{ matrix.repository }}
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
||||||
|
push-to-fork: ci-forks/LocalAI
|
||||||
|
commit-message: ':arrow_up: Update docs version ${{ matrix.repository }}'
|
||||||
|
title: 'docs: :arrow_up: update docs version ${{ matrix.repository }}'
|
||||||
|
branch: "update/docs"
|
||||||
|
body: Bump of ${{ matrix.repository }} version inside docs
|
||||||
|
signoff: true
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
46
.github/workflows/checksum_checker.yaml
vendored
Normal file
46
.github/workflows/checksum_checker.yaml
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: Check if checksums are up-to-date
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 20 * * *
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
checksum_check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Force Install GIT latest
|
||||||
|
run: |
|
||||||
|
sudo apt-get update \
|
||||||
|
&& sudo apt-get install -y software-properties-common \
|
||||||
|
&& sudo apt-get update \
|
||||||
|
&& sudo add-apt-repository -y ppa:git-core/ppa \
|
||||||
|
&& sudo apt-get update \
|
||||||
|
&& sudo apt-get install -y git
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y pip wget
|
||||||
|
pip install huggingface_hub
|
||||||
|
- name: 'Setup yq'
|
||||||
|
uses: dcarbone/install-yq-action@v1.3.1
|
||||||
|
with:
|
||||||
|
version: 'v4.44.2'
|
||||||
|
download-compressed: true
|
||||||
|
force: true
|
||||||
|
|
||||||
|
- name: Checksum checker 🔧
|
||||||
|
run: |
|
||||||
|
export HF_HOME=/hf_cache
|
||||||
|
sudo mkdir /hf_cache
|
||||||
|
sudo chmod 777 /hf_cache
|
||||||
|
bash .github/checksum_checker.sh gallery/index.yaml
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
||||||
|
push-to-fork: ci-forks/LocalAI
|
||||||
|
commit-message: ':arrow_up: Checksum updates in gallery/index.yaml'
|
||||||
|
title: 'chore(model-gallery): :arrow_up: update checksum'
|
||||||
|
branch: "update/checksum"
|
||||||
|
body: Updating checksums in gallery/index.yaml
|
||||||
|
signoff: true
|
||||||
43
.github/workflows/dependabot_auto.yml
vendored
Normal file
43
.github/workflows/dependabot_auto.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Dependabot auto-merge
|
||||||
|
on:
|
||||||
|
- pull_request_target
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
packages: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependabot:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||||
|
steps:
|
||||||
|
- name: Dependabot metadata
|
||||||
|
id: metadata
|
||||||
|
uses: dependabot/fetch-metadata@v2.4.0
|
||||||
|
with:
|
||||||
|
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
skip-commit-verification: true
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Approve a PR if not already approved
|
||||||
|
run: |
|
||||||
|
gh pr checkout "$PR_URL"
|
||||||
|
if [ "$(gh pr status --json reviewDecision -q .currentBranch.reviewDecision)" != "APPROVED" ];
|
||||||
|
then
|
||||||
|
gh pr review --approve "$PR_URL"
|
||||||
|
else
|
||||||
|
echo "PR already approved.";
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
PR_URL: ${{github.event.pull_request.html_url}}
|
||||||
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
|
||||||
|
- name: Enable auto-merge for Dependabot PRs
|
||||||
|
if: ${{ contains(github.event.pull_request.title, 'bump')}}
|
||||||
|
run: gh pr merge --auto --squash "$PR_URL"
|
||||||
|
env:
|
||||||
|
PR_URL: ${{github.event.pull_request.html_url}}
|
||||||
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
64
.github/workflows/deploy-explorer.yaml
vendored
Normal file
64
.github/workflows/deploy-explorer.yaml
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
name: Explorer deployment
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ci-deploy-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: '1.21.x'
|
||||||
|
cache: false
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y wget curl build-essential ffmpeg protobuf-compiler ccache upx-ucl gawk cmake libgmock-dev
|
||||||
|
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
||||||
|
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
||||||
|
make protogen-go
|
||||||
|
- name: Build api
|
||||||
|
run: |
|
||||||
|
CGO_ENABLED=0 make build
|
||||||
|
- name: rm
|
||||||
|
uses: appleboy/ssh-action@v1.2.2
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.EXPLORER_SSH_HOST }}
|
||||||
|
username: ${{ secrets.EXPLORER_SSH_USERNAME }}
|
||||||
|
key: ${{ secrets.EXPLORER_SSH_KEY }}
|
||||||
|
port: ${{ secrets.EXPLORER_SSH_PORT }}
|
||||||
|
script: |
|
||||||
|
sudo rm -rf local-ai/ || true
|
||||||
|
- name: copy file via ssh
|
||||||
|
uses: appleboy/scp-action@v1.0.0
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.EXPLORER_SSH_HOST }}
|
||||||
|
username: ${{ secrets.EXPLORER_SSH_USERNAME }}
|
||||||
|
key: ${{ secrets.EXPLORER_SSH_KEY }}
|
||||||
|
port: ${{ secrets.EXPLORER_SSH_PORT }}
|
||||||
|
source: "local-ai"
|
||||||
|
overwrite: true
|
||||||
|
rm: true
|
||||||
|
target: ./local-ai
|
||||||
|
- name: restarting
|
||||||
|
uses: appleboy/ssh-action@v1.2.2
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.EXPLORER_SSH_HOST }}
|
||||||
|
username: ${{ secrets.EXPLORER_SSH_USERNAME }}
|
||||||
|
key: ${{ secrets.EXPLORER_SSH_KEY }}
|
||||||
|
port: ${{ secrets.EXPLORER_SSH_PORT }}
|
||||||
|
script: |
|
||||||
|
sudo cp -rfv local-ai/local-ai /usr/bin/local-ai
|
||||||
|
sudo systemctl restart local-ai
|
||||||
83
.github/workflows/disabled/comment-pr.yaml
vendored
Normal file
83
.github/workflows/disabled/comment-pr.yaml
vendored
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: Comment PRs
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
comment-pr:
|
||||||
|
env:
|
||||||
|
MODEL_NAME: hermes-2-theta-llama-3-8b
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: "${{ github.event.pull_request.merge_commit_sha }}"
|
||||||
|
fetch-depth: 0 # needed to checkout all branches for this Action to work
|
||||||
|
- uses: mudler/localai-github-action@v1
|
||||||
|
with:
|
||||||
|
model: 'hermes-2-theta-llama-3-8b' # Any from models.localai.io, or from huggingface.com with: "huggingface://<repository>/file"
|
||||||
|
# Check the PR diff using the current branch and the base branch of the PR
|
||||||
|
- uses: GrantBirki/git-diff-action@v2.7.0
|
||||||
|
id: git-diff-action
|
||||||
|
with:
|
||||||
|
json_diff_file_output: diff.json
|
||||||
|
raw_diff_file_output: diff.txt
|
||||||
|
file_output_only: "true"
|
||||||
|
base_branch: ${{ github.event.pull_request.base.sha }}
|
||||||
|
- name: Show diff
|
||||||
|
env:
|
||||||
|
DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }}
|
||||||
|
run: |
|
||||||
|
cat $DIFF
|
||||||
|
- name: Summarize
|
||||||
|
env:
|
||||||
|
DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }}
|
||||||
|
id: summarize
|
||||||
|
run: |
|
||||||
|
input="$(cat $DIFF)"
|
||||||
|
|
||||||
|
# Define the LocalAI API endpoint
|
||||||
|
API_URL="http://localhost:8080/chat/completions"
|
||||||
|
|
||||||
|
# Create a JSON payload using jq to handle special characters
|
||||||
|
json_payload=$(jq -n --arg input "$input" '{
|
||||||
|
model: "'$MODEL_NAME'",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: "You are LocalAI-bot in Github that helps understanding PRs and assess complexity. Explain what has changed in this PR diff and why"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: $input
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}')
|
||||||
|
|
||||||
|
# Send the request to LocalAI
|
||||||
|
response=$(curl -s -X POST $API_URL \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$json_payload")
|
||||||
|
|
||||||
|
# Extract the summary from the response
|
||||||
|
summary="$(echo $response | jq -r '.choices[0].message.content')"
|
||||||
|
|
||||||
|
# Print the summary
|
||||||
|
# -H "Authorization: Bearer $API_KEY" \
|
||||||
|
echo "Summary:"
|
||||||
|
echo "$summary"
|
||||||
|
echo "payload sent"
|
||||||
|
echo "$json_payload"
|
||||||
|
{
|
||||||
|
echo 'message<<EOF'
|
||||||
|
echo "$summary"
|
||||||
|
echo EOF
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
docker logs --tail 10 local-ai
|
||||||
|
- uses: mshick/add-pr-comment@v2
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
||||||
|
message: ${{ steps.summarize.outputs.message }}
|
||||||
|
message-failure: |
|
||||||
|
Uh oh! Could not analyze this PR, maybe it's too big?
|
||||||
63
.github/workflows/disabled/test-gpu.yml
vendored
Normal file
63
.github/workflows/disabled/test-gpu.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
---
|
||||||
|
name: 'GPU tests'
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ci-gpu-tests-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
ubuntu-latest:
|
||||||
|
runs-on: gpu
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
go-version: ['1.21.x']
|
||||||
|
steps:
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- name: Setup Go ${{ matrix.go-version }}
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: ${{ matrix.go-version }}
|
||||||
|
# You can test your matrix by printing the current Go version
|
||||||
|
- name: Display Go version
|
||||||
|
run: go version
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y make wget
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
if [ ! -e /run/systemd/system ]; then
|
||||||
|
sudo mkdir /run/systemd/system
|
||||||
|
fi
|
||||||
|
sudo mkdir -p /host/tests/${{ github.head_ref || github.ref }}
|
||||||
|
sudo chmod -R 777 /host/tests/${{ github.head_ref || github.ref }}
|
||||||
|
make \
|
||||||
|
TEST_DIR="/host/tests/${{ github.head_ref || github.ref }}" \
|
||||||
|
BUILD_TYPE=cublas \
|
||||||
|
prepare-e2e run-e2e-image test-e2e
|
||||||
|
- name: Release space from worker ♻
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
sudo rm -rf build || true
|
||||||
|
sudo rm -rf bin || true
|
||||||
|
sudo rm -rf dist || true
|
||||||
|
sudo docker logs $(sudo docker ps -q --filter ancestor=localai-tests) > logs.txt
|
||||||
|
sudo cat logs.txt || true
|
||||||
|
sudo rm -rf logs.txt
|
||||||
|
make clean || true
|
||||||
|
make \
|
||||||
|
TEST_DIR="/host/tests/${{ github.head_ref || github.ref }}" \
|
||||||
|
teardown-e2e || true
|
||||||
|
sudo rm -rf /host/tests/${{ github.head_ref || github.ref }} || true
|
||||||
|
docker system prune -f -a --volumes || true
|
||||||
95
.github/workflows/generate_grpc_cache.yaml
vendored
Normal file
95
.github/workflows/generate_grpc_cache.yaml
vendored
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
name: 'generate and publish GRPC docker caches'
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
schedule:
|
||||||
|
# daily at midnight
|
||||||
|
- cron: '0 0 * * *'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: grpc-cache-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate_caches:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- grpc-base-image: ubuntu:22.04
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
platforms: 'linux/amd64,linux/arm64'
|
||||||
|
runs-on: ${{matrix.runs-on}}
|
||||||
|
steps:
|
||||||
|
- name: Release space from worker
|
||||||
|
if: matrix.runs-on == 'ubuntu-latest'
|
||||||
|
run: |
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
df -h
|
||||||
|
echo
|
||||||
|
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
||||||
|
sudo apt-get remove --auto-remove android-sdk-platform-tools || true
|
||||||
|
sudo apt-get purge --auto-remove android-sdk-platform-tools || true
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo apt-get remove -y '^mono-.*' || true
|
||||||
|
sudo apt-get remove -y '^ghc-.*' || true
|
||||||
|
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
||||||
|
sudo apt-get remove -y 'php.*' || true
|
||||||
|
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
||||||
|
sudo apt-get remove -y '^google-.*' || true
|
||||||
|
sudo apt-get remove -y azure-cli || true
|
||||||
|
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
||||||
|
sudo apt-get remove -y '^gfortran-.*' || true
|
||||||
|
sudo apt-get remove -y microsoft-edge-stable || true
|
||||||
|
sudo apt-get remove -y firefox || true
|
||||||
|
sudo apt-get remove -y powershell || true
|
||||||
|
sudo apt-get remove -y r-base-core || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
echo
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
sudo rm -rfv build || true
|
||||||
|
sudo rm -rf /usr/share/dotnet || true
|
||||||
|
sudo rm -rf /opt/ghc || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
||||||
|
df -h
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@master
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@master
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache GRPC
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
# The build-args MUST be an EXACT match between the image cache and other workflow steps that want to use that cache.
|
||||||
|
# This means that even the MAKEFLAGS have to be an EXACT match.
|
||||||
|
# If the build-args are not an EXACT match, it will result in a cache miss, which will require GRPC to be built from scratch.
|
||||||
|
build-args: |
|
||||||
|
GRPC_BASE_IMAGE=${{ matrix.grpc-base-image }}
|
||||||
|
GRPC_MAKEFLAGS=--jobs=4 --output-sync=target
|
||||||
|
GRPC_VERSION=v1.65.0
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
cache-to: type=gha,ignore-error=true
|
||||||
|
cache-from: type=gha
|
||||||
|
target: grpc
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
|
push: false
|
||||||
59
.github/workflows/generate_intel_image.yaml
vendored
Normal file
59
.github/workflows/generate_intel_image.yaml
vendored
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
name: 'generate and publish intel docker caches'
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: intel-cache-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate_caches:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- base-image: intel/oneapi-basekit:2025.2.0-0-devel-ubuntu22.04
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
runs-on: ${{matrix.runs-on}}
|
||||||
|
steps:
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@master
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Login to quay
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
||||||
|
password: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@master
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Cache Intel images
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
build-args: |
|
||||||
|
BASE_IMAGE=${{ matrix.base-image }}
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
tags: quay.io/go-skynet/intel-oneapi-base:latest
|
||||||
|
push: true
|
||||||
|
target: intel
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
68
.github/workflows/image-pr.yml
vendored
Normal file
68
.github/workflows/image-pr.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
---
|
||||||
|
name: 'build container images tests'
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ci-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
image-build:
|
||||||
|
uses: ./.github/workflows/image_build.yml
|
||||||
|
with:
|
||||||
|
tag-latest: ${{ matrix.tag-latest }}
|
||||||
|
tag-suffix: ${{ matrix.tag-suffix }}
|
||||||
|
build-type: ${{ matrix.build-type }}
|
||||||
|
cuda-major-version: ${{ matrix.cuda-major-version }}
|
||||||
|
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
base-image: ${{ matrix.base-image }}
|
||||||
|
grpc-base-image: ${{ matrix.grpc-base-image }}
|
||||||
|
makeflags: ${{ matrix.makeflags }}
|
||||||
|
secrets:
|
||||||
|
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
|
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
||||||
|
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
||||||
|
strategy:
|
||||||
|
# Pushing with all jobs in parallel
|
||||||
|
# eats the bandwidth of all the nodes
|
||||||
|
max-parallel: ${{ github.event_name != 'pull_request' && 4 || 8 }}
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- build-type: 'cublas'
|
||||||
|
cuda-major-version: "12"
|
||||||
|
cuda-minor-version: "0"
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'false'
|
||||||
|
tag-suffix: '-gpu-nvidia-cuda12'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
base-image: "ubuntu:22.04"
|
||||||
|
makeflags: "--jobs=3 --output-sync=target"
|
||||||
|
- build-type: 'hipblas'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'false'
|
||||||
|
tag-suffix: '-hipblas'
|
||||||
|
base-image: "rocm/dev-ubuntu-22.04:6.1"
|
||||||
|
grpc-base-image: "ubuntu:22.04"
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
makeflags: "--jobs=3 --output-sync=target"
|
||||||
|
- build-type: 'sycl_f16'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'false'
|
||||||
|
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
|
||||||
|
grpc-base-image: "ubuntu:22.04"
|
||||||
|
tag-suffix: 'sycl-f16'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
makeflags: "--jobs=3 --output-sync=target"
|
||||||
|
- build-type: 'vulkan'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'false'
|
||||||
|
tag-suffix: '-vulkan-core'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
base-image: "ubuntu:22.04"
|
||||||
|
makeflags: "--jobs=4 --output-sync=target"
|
||||||
218
.github/workflows/image.yml
vendored
218
.github/workflows/image.yml
vendored
@@ -2,7 +2,6 @@
|
|||||||
name: 'build container images'
|
name: 'build container images'
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
@@ -14,96 +13,151 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
hipblas-jobs:
|
||||||
|
uses: ./.github/workflows/image_build.yml
|
||||||
|
with:
|
||||||
|
tag-latest: ${{ matrix.tag-latest }}
|
||||||
|
tag-suffix: ${{ matrix.tag-suffix }}
|
||||||
|
build-type: ${{ matrix.build-type }}
|
||||||
|
cuda-major-version: ${{ matrix.cuda-major-version }}
|
||||||
|
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
base-image: ${{ matrix.base-image }}
|
||||||
|
grpc-base-image: ${{ matrix.grpc-base-image }}
|
||||||
|
aio: ${{ matrix.aio }}
|
||||||
|
makeflags: ${{ matrix.makeflags }}
|
||||||
|
secrets:
|
||||||
|
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
|
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
||||||
|
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
||||||
strategy:
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- build-type: 'hipblas'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'auto'
|
||||||
|
tag-suffix: '-gpu-hipblas'
|
||||||
|
base-image: "rocm/dev-ubuntu-22.04:6.1"
|
||||||
|
grpc-base-image: "ubuntu:22.04"
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
makeflags: "--jobs=3 --output-sync=target"
|
||||||
|
aio: "-aio-gpu-hipblas"
|
||||||
|
|
||||||
|
core-image-build:
|
||||||
|
uses: ./.github/workflows/image_build.yml
|
||||||
|
with:
|
||||||
|
tag-latest: ${{ matrix.tag-latest }}
|
||||||
|
tag-suffix: ${{ matrix.tag-suffix }}
|
||||||
|
build-type: ${{ matrix.build-type }}
|
||||||
|
cuda-major-version: ${{ matrix.cuda-major-version }}
|
||||||
|
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
aio: ${{ matrix.aio }}
|
||||||
|
base-image: ${{ matrix.base-image }}
|
||||||
|
grpc-base-image: ${{ matrix.grpc-base-image }}
|
||||||
|
makeflags: ${{ matrix.makeflags }}
|
||||||
|
skip-drivers: ${{ matrix.skip-drivers }}
|
||||||
|
secrets:
|
||||||
|
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
|
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
||||||
|
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
||||||
|
strategy:
|
||||||
|
#max-parallel: ${{ github.event_name != 'pull_request' && 2 || 4 }}
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- build-type: ''
|
- build-type: ''
|
||||||
platforms: 'linux/amd64,linux/arm64'
|
platforms: 'linux/amd64,linux/arm64'
|
||||||
tag-latest: 'auto'
|
tag-latest: 'auto'
|
||||||
tag-suffix: ''
|
tag-suffix: ''
|
||||||
ffmpeg: ''
|
base-image: "ubuntu:22.04"
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
aio: "-aio-cpu"
|
||||||
|
makeflags: "--jobs=4 --output-sync=target"
|
||||||
|
skip-drivers: 'false'
|
||||||
- build-type: 'cublas'
|
- build-type: 'cublas'
|
||||||
cuda-major-version: 11
|
cuda-major-version: "11"
|
||||||
cuda-minor-version: 7
|
cuda-minor-version: "7"
|
||||||
platforms: 'linux/amd64'
|
platforms: 'linux/amd64'
|
||||||
tag-latest: 'false'
|
tag-latest: 'auto'
|
||||||
tag-suffix: '-cublas-cuda11'
|
tag-suffix: '-gpu-nvidia-cuda11'
|
||||||
ffmpeg: ''
|
runs-on: 'ubuntu-latest'
|
||||||
|
base-image: "ubuntu:22.04"
|
||||||
|
makeflags: "--jobs=4 --output-sync=target"
|
||||||
|
skip-drivers: 'false'
|
||||||
|
aio: "-aio-gpu-nvidia-cuda-11"
|
||||||
- build-type: 'cublas'
|
- build-type: 'cublas'
|
||||||
cuda-major-version: 12
|
cuda-major-version: "12"
|
||||||
cuda-minor-version: 1
|
cuda-minor-version: "0"
|
||||||
platforms: 'linux/amd64'
|
platforms: 'linux/amd64'
|
||||||
tag-latest: 'false'
|
tag-latest: 'auto'
|
||||||
tag-suffix: '-cublas-cuda12'
|
tag-suffix: '-gpu-nvidia-cuda12'
|
||||||
ffmpeg: ''
|
runs-on: 'ubuntu-latest'
|
||||||
- build-type: ''
|
base-image: "ubuntu:22.04"
|
||||||
platforms: 'linux/amd64,linux/arm64'
|
skip-drivers: 'false'
|
||||||
tag-latest: 'false'
|
makeflags: "--jobs=4 --output-sync=target"
|
||||||
tag-suffix: '-ffmpeg'
|
aio: "-aio-gpu-nvidia-cuda-12"
|
||||||
ffmpeg: 'true'
|
- build-type: 'vulkan'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'auto'
|
||||||
|
tag-suffix: '-vulkan'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
base-image: "ubuntu:22.04"
|
||||||
|
skip-drivers: 'false'
|
||||||
|
makeflags: "--jobs=4 --output-sync=target"
|
||||||
|
aio: "-aio-gpu-vulkan"
|
||||||
|
- build-type: 'sycl_f16'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'auto'
|
||||||
|
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
|
||||||
|
grpc-base-image: "ubuntu:22.04"
|
||||||
|
tag-suffix: '-gpu-intel-f16'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
makeflags: "--jobs=3 --output-sync=target"
|
||||||
|
aio: "-aio-gpu-intel-f16"
|
||||||
|
- build-type: 'sycl_f32'
|
||||||
|
platforms: 'linux/amd64'
|
||||||
|
tag-latest: 'auto'
|
||||||
|
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
|
||||||
|
grpc-base-image: "ubuntu:22.04"
|
||||||
|
tag-suffix: '-gpu-intel-f32'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
makeflags: "--jobs=3 --output-sync=target"
|
||||||
|
aio: "-aio-gpu-intel-f32"
|
||||||
|
|
||||||
|
gh-runner:
|
||||||
|
uses: ./.github/workflows/image_build.yml
|
||||||
|
with:
|
||||||
|
tag-latest: ${{ matrix.tag-latest }}
|
||||||
|
tag-suffix: ${{ matrix.tag-suffix }}
|
||||||
|
build-type: ${{ matrix.build-type }}
|
||||||
|
cuda-major-version: ${{ matrix.cuda-major-version }}
|
||||||
|
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
aio: ${{ matrix.aio }}
|
||||||
|
base-image: ${{ matrix.base-image }}
|
||||||
|
grpc-base-image: ${{ matrix.grpc-base-image }}
|
||||||
|
makeflags: ${{ matrix.makeflags }}
|
||||||
|
skip-drivers: ${{ matrix.skip-drivers }}
|
||||||
|
secrets:
|
||||||
|
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
|
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
||||||
|
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
- build-type: 'cublas'
|
- build-type: 'cublas'
|
||||||
cuda-major-version: 11
|
cuda-major-version: "12"
|
||||||
cuda-minor-version: 7
|
cuda-minor-version: "0"
|
||||||
platforms: 'linux/amd64'
|
platforms: 'linux/arm64'
|
||||||
tag-latest: 'false'
|
tag-latest: 'auto'
|
||||||
tag-suffix: '-cublas-cuda11-ffmpeg'
|
tag-suffix: '-nvidia-l4t-arm64'
|
||||||
ffmpeg: 'true'
|
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
||||||
- build-type: 'cublas'
|
runs-on: 'ubuntu-24.04-arm'
|
||||||
cuda-major-version: 12
|
makeflags: "--jobs=4 --output-sync=target"
|
||||||
cuda-minor-version: 1
|
skip-drivers: 'true'
|
||||||
platforms: 'linux/amd64'
|
|
||||||
tag-latest: 'false'
|
|
||||||
tag-suffix: '-cublas-cuda12-ffmpeg'
|
|
||||||
ffmpeg: 'true'
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
with:
|
|
||||||
images: quay.io/go-skynet/local-ai
|
|
||||||
tags: |
|
|
||||||
type=ref,event=branch
|
|
||||||
type=semver,pattern={{raw}}
|
|
||||||
type=sha
|
|
||||||
flavor: |
|
|
||||||
latest=${{ matrix.tag-latest }}
|
|
||||||
suffix=${{ matrix.tag-suffix }}
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@master
|
|
||||||
with:
|
|
||||||
platforms: all
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@master
|
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: quay.io
|
|
||||||
username: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
|
||||||
password: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
|
||||||
build-args: |
|
|
||||||
BUILD_TYPE=${{ matrix.build-type }}
|
|
||||||
CUDA_MAJOR_VERSION=${{ matrix.cuda-major-version }}
|
|
||||||
CUDA_MINOR_VERSION=${{ matrix.cuda-minor-version }}
|
|
||||||
FFMPEG=${{ matrix.ffmpeg }}
|
|
||||||
context: .
|
|
||||||
file: ./Dockerfile
|
|
||||||
platforms: ${{ matrix.platforms }}
|
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
|
|||||||
313
.github/workflows/image_build.yml
vendored
Normal file
313
.github/workflows/image_build.yml
vendored
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
---
|
||||||
|
name: 'build container images (reusable)'
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
base-image:
|
||||||
|
description: 'Base image'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
grpc-base-image:
|
||||||
|
description: 'GRPC Base image, must be a compatible image with base-image'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
build-type:
|
||||||
|
description: 'Build type'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
cuda-major-version:
|
||||||
|
description: 'CUDA major version'
|
||||||
|
default: "12"
|
||||||
|
type: string
|
||||||
|
cuda-minor-version:
|
||||||
|
description: 'CUDA minor version'
|
||||||
|
default: "4"
|
||||||
|
type: string
|
||||||
|
platforms:
|
||||||
|
description: 'Platforms'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
tag-latest:
|
||||||
|
description: 'Tag latest'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
tag-suffix:
|
||||||
|
description: 'Tag suffix'
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
skip-drivers:
|
||||||
|
description: 'Skip drivers by default'
|
||||||
|
default: 'false'
|
||||||
|
type: string
|
||||||
|
runs-on:
|
||||||
|
description: 'Runs on'
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
makeflags:
|
||||||
|
description: 'Make Flags'
|
||||||
|
required: false
|
||||||
|
default: '--jobs=4 --output-sync=target'
|
||||||
|
type: string
|
||||||
|
aio:
|
||||||
|
description: 'AIO Image Name'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
dockerUsername:
|
||||||
|
required: true
|
||||||
|
dockerPassword:
|
||||||
|
required: true
|
||||||
|
quayUsername:
|
||||||
|
required: true
|
||||||
|
quayPassword:
|
||||||
|
required: true
|
||||||
|
jobs:
|
||||||
|
reusable_image-build:
|
||||||
|
runs-on: ${{ inputs.runs-on }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Free Disk Space (Ubuntu)
|
||||||
|
if: inputs.runs-on == 'ubuntu-latest'
|
||||||
|
uses: jlumbroso/free-disk-space@main
|
||||||
|
with:
|
||||||
|
# this might remove tools that are actually needed,
|
||||||
|
# if set to "true" but frees about 6 GB
|
||||||
|
tool-cache: true
|
||||||
|
# all of these default to true, but feel free to set to
|
||||||
|
# "false" if necessary for your workflow
|
||||||
|
android: true
|
||||||
|
dotnet: true
|
||||||
|
haskell: true
|
||||||
|
large-packages: true
|
||||||
|
docker-images: true
|
||||||
|
swap-storage: true
|
||||||
|
- name: Force Install GIT latest
|
||||||
|
run: |
|
||||||
|
sudo apt-get update \
|
||||||
|
&& sudo apt-get install -y software-properties-common \
|
||||||
|
&& sudo apt-get update \
|
||||||
|
&& sudo add-apt-repository -y ppa:git-core/ppa \
|
||||||
|
&& sudo apt-get update \
|
||||||
|
&& sudo apt-get install -y git
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Release space from worker
|
||||||
|
if: inputs.runs-on == 'ubuntu-latest'
|
||||||
|
run: |
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
df -h
|
||||||
|
echo
|
||||||
|
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
||||||
|
sudo apt-get remove --auto-remove android-sdk-platform-tools snapd || true
|
||||||
|
sudo apt-get purge --auto-remove android-sdk-platform-tools snapd || true
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo apt-get remove -y '^mono-.*' || true
|
||||||
|
sudo apt-get remove -y '^ghc-.*' || true
|
||||||
|
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
||||||
|
sudo apt-get remove -y 'php.*' || true
|
||||||
|
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
||||||
|
sudo apt-get remove -y '^google-.*' || true
|
||||||
|
sudo apt-get remove -y azure-cli || true
|
||||||
|
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
||||||
|
sudo apt-get remove -y '^gfortran-.*' || true
|
||||||
|
sudo apt-get remove -y microsoft-edge-stable || true
|
||||||
|
sudo apt-get remove -y firefox || true
|
||||||
|
sudo apt-get remove -y powershell || true
|
||||||
|
sudo apt-get remove -y r-base-core || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
echo
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
sudo rm -rfv build || true
|
||||||
|
sudo rm -rf /usr/share/dotnet || true
|
||||||
|
sudo rm -rf /opt/ghc || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
||||||
|
df -h
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
quay.io/go-skynet/local-ai
|
||||||
|
localai/localai
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=semver,pattern={{raw}}
|
||||||
|
type=sha
|
||||||
|
flavor: |
|
||||||
|
latest=${{ inputs.tag-latest }}
|
||||||
|
suffix=${{ inputs.tag-suffix }},onlatest=true
|
||||||
|
- name: Docker meta for PR
|
||||||
|
id: meta_pull_request
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
quay.io/go-skynet/ci-tests
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch,suffix=localai${{ github.event.number }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
||||||
|
type=semver,pattern={{raw}},suffix=localai${{ github.event.number }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
||||||
|
type=sha,suffix=localai${{ github.event.number }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
||||||
|
flavor: |
|
||||||
|
latest=${{ inputs.tag-latest }}
|
||||||
|
suffix=${{ inputs.tag-suffix }}
|
||||||
|
- name: Docker meta AIO (quay.io)
|
||||||
|
if: inputs.aio != ''
|
||||||
|
id: meta_aio
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
quay.io/go-skynet/local-ai
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=semver,pattern={{raw}}
|
||||||
|
flavor: |
|
||||||
|
latest=${{ inputs.tag-latest }}
|
||||||
|
suffix=${{ inputs.aio }},onlatest=true
|
||||||
|
|
||||||
|
- name: Docker meta AIO (dockerhub)
|
||||||
|
if: inputs.aio != ''
|
||||||
|
id: meta_aio_dockerhub
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
localai/localai
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=semver,pattern={{raw}}
|
||||||
|
flavor: |
|
||||||
|
latest=${{ inputs.tag-latest }}
|
||||||
|
suffix=${{ inputs.aio }},onlatest=true
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@master
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@master
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.dockerUsername }}
|
||||||
|
password: ${{ secrets.dockerPassword }}
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.quayUsername }}
|
||||||
|
password: ${{ secrets.quayPassword }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
# The build-args MUST be an EXACT match between the image cache and other workflow steps that want to use that cache.
|
||||||
|
# This means that even the MAKEFLAGS have to be an EXACT match.
|
||||||
|
# If the build-args are not an EXACT match, it will result in a cache miss, which will require GRPC to be built from scratch.
|
||||||
|
# This is why some build args like GRPC_VERSION and MAKEFLAGS are hardcoded
|
||||||
|
build-args: |
|
||||||
|
BUILD_TYPE=${{ inputs.build-type }}
|
||||||
|
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
||||||
|
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
||||||
|
BASE_IMAGE=${{ inputs.base-image }}
|
||||||
|
GRPC_BASE_IMAGE=${{ inputs.grpc-base-image || inputs.base-image }}
|
||||||
|
GRPC_MAKEFLAGS=--jobs=4 --output-sync=target
|
||||||
|
GRPC_VERSION=v1.65.0
|
||||||
|
MAKEFLAGS=${{ inputs.makeflags }}
|
||||||
|
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
cache-from: type=gha
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
### Start testing image
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
# The build-args MUST be an EXACT match between the image cache and other workflow steps that want to use that cache.
|
||||||
|
# This means that even the MAKEFLAGS have to be an EXACT match.
|
||||||
|
# If the build-args are not an EXACT match, it will result in a cache miss, which will require GRPC to be built from scratch.
|
||||||
|
# This is why some build args like GRPC_VERSION and MAKEFLAGS are hardcoded
|
||||||
|
build-args: |
|
||||||
|
BUILD_TYPE=${{ inputs.build-type }}
|
||||||
|
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
||||||
|
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
||||||
|
BASE_IMAGE=${{ inputs.base-image }}
|
||||||
|
GRPC_BASE_IMAGE=${{ inputs.grpc-base-image || inputs.base-image }}
|
||||||
|
GRPC_MAKEFLAGS=--jobs=4 --output-sync=target
|
||||||
|
GRPC_VERSION=v1.65.0
|
||||||
|
MAKEFLAGS=${{ inputs.makeflags }}
|
||||||
|
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
cache-from: type=gha
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
#push: true
|
||||||
|
tags: ${{ steps.meta_pull_request.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_pull_request.outputs.labels }}
|
||||||
|
## End testing image
|
||||||
|
- name: Build and push AIO image
|
||||||
|
if: inputs.aio != ''
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
build-args: |
|
||||||
|
BASE_IMAGE=quay.io/go-skynet/local-ai:${{ steps.meta.outputs.version }}
|
||||||
|
MAKEFLAGS=${{ inputs.makeflags }}
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile.aio
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta_aio.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_aio.outputs.labels }}
|
||||||
|
|
||||||
|
- name: Build and push AIO image (dockerhub)
|
||||||
|
if: inputs.aio != ''
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
build-args: |
|
||||||
|
BASE_IMAGE=localai/localai:${{ steps.meta.outputs.version }}
|
||||||
|
MAKEFLAGS=${{ inputs.makeflags }}
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile.aio
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta_aio_dockerhub.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_aio_dockerhub.outputs.labels }}
|
||||||
|
|
||||||
|
- name: job summary
|
||||||
|
run: |
|
||||||
|
echo "Built image: ${{ steps.meta.outputs.labels }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
- name: job summary(AIO)
|
||||||
|
if: inputs.aio != ''
|
||||||
|
run: |
|
||||||
|
echo "Built image: ${{ steps.meta_aio.outputs.labels }}" >> $GITHUB_STEP_SUMMARY
|
||||||
12
.github/workflows/labeler.yml
vendored
Normal file
12
.github/workflows/labeler.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
name: "Pull Request Labeler"
|
||||||
|
on:
|
||||||
|
- pull_request_target
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
labeler:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/labeler@v5
|
||||||
35
.github/workflows/localaibot_automerge.yml
vendored
Normal file
35
.github/workflows/localaibot_automerge.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
name: LocalAI-bot auto-merge
|
||||||
|
on:
|
||||||
|
- pull_request_target
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
packages: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependabot:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.actor == 'localai-bot' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Approve a PR if not already approved
|
||||||
|
run: |
|
||||||
|
gh pr checkout "$PR_URL"
|
||||||
|
if [ "$(gh pr status --json reviewDecision -q .currentBranch.reviewDecision)" != "APPROVED" ];
|
||||||
|
then
|
||||||
|
gh pr review --approve "$PR_URL"
|
||||||
|
else
|
||||||
|
echo "PR already approved.";
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
PR_URL: ${{github.event.pull_request.html_url}}
|
||||||
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
|
||||||
|
- name: Enable auto-merge for LocalAIBot PRs
|
||||||
|
run: gh pr merge --auto --squash "$PR_URL"
|
||||||
|
env:
|
||||||
|
PR_URL: ${{github.event.pull_request.html_url}}
|
||||||
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
168
.github/workflows/notify-models.yaml
vendored
Normal file
168
.github/workflows/notify-models.yaml
vendored
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
name: Notifications for new models
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- closed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify-discord:
|
||||||
|
if: ${{ (github.event.pull_request.merged == true) && (contains(github.event.pull_request.labels.*.name, 'area/ai-model')) }}
|
||||||
|
env:
|
||||||
|
MODEL_NAME: gemma-3-12b-it
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # needed to checkout all branches for this Action to work
|
||||||
|
- uses: mudler/localai-github-action@v1
|
||||||
|
with:
|
||||||
|
model: 'gemma-3-12b-it' # Any from models.localai.io, or from huggingface.com with: "huggingface://<repository>/file"
|
||||||
|
# Check the PR diff using the current branch and the base branch of the PR
|
||||||
|
- uses: GrantBirki/git-diff-action@v2.8.1
|
||||||
|
id: git-diff-action
|
||||||
|
with:
|
||||||
|
json_diff_file_output: diff.json
|
||||||
|
raw_diff_file_output: diff.txt
|
||||||
|
file_output_only: "true"
|
||||||
|
- name: Summarize
|
||||||
|
env:
|
||||||
|
DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }}
|
||||||
|
id: summarize
|
||||||
|
run: |
|
||||||
|
input="$(cat $DIFF)"
|
||||||
|
|
||||||
|
# Define the LocalAI API endpoint
|
||||||
|
API_URL="http://localhost:8080/chat/completions"
|
||||||
|
|
||||||
|
# Create a JSON payload using jq to handle special characters
|
||||||
|
json_payload=$(jq -n --arg input "$input" '{
|
||||||
|
model: "'$MODEL_NAME'",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: "You are LocalAI-bot. Write a discord message to notify everyone about the new model from the git diff. Make it informal. An example can include: the URL of the model, the name, and a brief description of the model if exists. Also add an hint on how to install it in LocalAI and that can be browsed over https://models.localai.io. For example: local-ai run model_name_here"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: $input
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}')
|
||||||
|
|
||||||
|
# Send the request to LocalAI
|
||||||
|
response=$(curl -s -X POST $API_URL \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$json_payload")
|
||||||
|
|
||||||
|
# Extract the summary from the response
|
||||||
|
summary="$(echo $response | jq -r '.choices[0].message.content')"
|
||||||
|
|
||||||
|
# Print the summary
|
||||||
|
# -H "Authorization: Bearer $API_KEY" \
|
||||||
|
echo "Summary:"
|
||||||
|
echo "$summary"
|
||||||
|
echo "payload sent"
|
||||||
|
echo "$json_payload"
|
||||||
|
{
|
||||||
|
echo 'message<<EOF'
|
||||||
|
echo "$summary"
|
||||||
|
echo EOF
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
docker logs --tail 10 local-ai
|
||||||
|
- name: Discord notification
|
||||||
|
env:
|
||||||
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||||
|
DISCORD_USERNAME: "LocalAI-Bot"
|
||||||
|
DISCORD_AVATAR: "https://avatars.githubusercontent.com/u/139863280?v=4"
|
||||||
|
uses: Ilshidur/action-discord@master
|
||||||
|
with:
|
||||||
|
args: ${{ steps.summarize.outputs.message }}
|
||||||
|
- name: Setup tmate session if fails
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: mxschmitt/action-tmate@v3.22
|
||||||
|
with:
|
||||||
|
detached: true
|
||||||
|
connect-timeout-seconds: 180
|
||||||
|
limit-access-to-actor: true
|
||||||
|
notify-twitter:
|
||||||
|
if: ${{ (github.event.pull_request.merged == true) && (contains(github.event.pull_request.labels.*.name, 'area/ai-model')) }}
|
||||||
|
env:
|
||||||
|
MODEL_NAME: gemma-3-12b-it
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # needed to checkout all branches for this Action to work
|
||||||
|
- name: Start LocalAI
|
||||||
|
run: |
|
||||||
|
echo "Starting LocalAI..."
|
||||||
|
docker run -e -ti -d --name local-ai -p 8080:8080 localai/localai:master run --debug $MODEL_NAME
|
||||||
|
until [ "`docker inspect -f {{.State.Health.Status}} local-ai`" == "healthy" ]; do echo "Waiting for container to be ready"; docker logs --tail 10 local-ai; sleep 2; done
|
||||||
|
# Check the PR diff using the current branch and the base branch of the PR
|
||||||
|
- uses: GrantBirki/git-diff-action@v2.8.1
|
||||||
|
id: git-diff-action
|
||||||
|
with:
|
||||||
|
json_diff_file_output: diff.json
|
||||||
|
raw_diff_file_output: diff.txt
|
||||||
|
file_output_only: "true"
|
||||||
|
- name: Summarize
|
||||||
|
env:
|
||||||
|
DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }}
|
||||||
|
id: summarize
|
||||||
|
run: |
|
||||||
|
input="$(cat $DIFF)"
|
||||||
|
|
||||||
|
# Define the LocalAI API endpoint
|
||||||
|
API_URL="http://localhost:8080/chat/completions"
|
||||||
|
|
||||||
|
# Create a JSON payload using jq to handle special characters
|
||||||
|
json_payload=$(jq -n --arg input "$input" '{
|
||||||
|
model: "'$MODEL_NAME'",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: "You are LocalAI-bot. Write a twitter message to notify everyone about the new model from the git diff. Make it informal and really short. An example can include: the name, and a brief description of the model if exists. Also add an hint on how to install it in LocalAI. For example: local-ai run model_name_here"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: $input
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}')
|
||||||
|
|
||||||
|
# Send the request to LocalAI
|
||||||
|
response=$(curl -s -X POST $API_URL \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$json_payload")
|
||||||
|
|
||||||
|
# Extract the summary from the response
|
||||||
|
summary="$(echo $response | jq -r '.choices[0].message.content')"
|
||||||
|
|
||||||
|
# Print the summary
|
||||||
|
# -H "Authorization: Bearer $API_KEY" \
|
||||||
|
echo "Summary:"
|
||||||
|
echo "$summary"
|
||||||
|
echo "payload sent"
|
||||||
|
echo "$json_payload"
|
||||||
|
{
|
||||||
|
echo 'message<<EOF'
|
||||||
|
echo "$summary"
|
||||||
|
echo EOF
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
docker logs --tail 10 local-ai
|
||||||
|
- uses: Eomm/why-don-t-you-tweet@v2
|
||||||
|
with:
|
||||||
|
tweet-message: ${{ steps.summarize.outputs.message }}
|
||||||
|
env:
|
||||||
|
# Get your tokens from https://developer.twitter.com/apps
|
||||||
|
TWITTER_CONSUMER_API_KEY: ${{ secrets.TWITTER_APP_KEY }}
|
||||||
|
TWITTER_CONSUMER_API_SECRET: ${{ secrets.TWITTER_APP_SECRET }}
|
||||||
|
TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }}
|
||||||
|
TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
|
||||||
|
- name: Setup tmate session if fails
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: mxschmitt/action-tmate@v3.22
|
||||||
|
with:
|
||||||
|
detached: true
|
||||||
|
connect-timeout-seconds: 180
|
||||||
|
limit-access-to-actor: true
|
||||||
63
.github/workflows/notify-releases.yaml
vendored
Normal file
63
.github/workflows/notify-releases.yaml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: Release notifications
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types:
|
||||||
|
- published
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify-discord:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
RELEASE_BODY: ${{ github.event.release.body }}
|
||||||
|
RELEASE_TITLE: ${{ github.event.release.name }}
|
||||||
|
RELEASE_TAG_NAME: ${{ github.event.release.tag_name }}
|
||||||
|
steps:
|
||||||
|
- uses: mudler/localai-github-action@v1
|
||||||
|
with:
|
||||||
|
model: 'gemma-3-12b-it' # Any from models.localai.io, or from huggingface.com with: "huggingface://<repository>/file"
|
||||||
|
- name: Summarize
|
||||||
|
id: summarize
|
||||||
|
run: |
|
||||||
|
input="$RELEASE_TITLE\b$RELEASE_BODY"
|
||||||
|
|
||||||
|
# Define the LocalAI API endpoint
|
||||||
|
API_URL="http://localhost:8080/chat/completions"
|
||||||
|
|
||||||
|
# Create a JSON payload using jq to handle special characters
|
||||||
|
json_payload=$(jq -n --arg input "$input" '{
|
||||||
|
model: "'$MODEL_NAME'",
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: "Write a discord message with a bullet point summary of the release notes."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: $input
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}')
|
||||||
|
|
||||||
|
# Send the request to LocalAI API
|
||||||
|
response=$(curl -s -X POST $API_URL \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$json_payload")
|
||||||
|
|
||||||
|
# Extract the summary from the response
|
||||||
|
summary=$(echo $response | jq -r '.choices[0].message.content')
|
||||||
|
|
||||||
|
# Print the summary
|
||||||
|
# -H "Authorization: Bearer $API_KEY" \
|
||||||
|
{
|
||||||
|
echo 'message<<EOF'
|
||||||
|
echo "$summary"
|
||||||
|
echo EOF
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
- name: Discord notification
|
||||||
|
env:
|
||||||
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_URL_RELEASE }}
|
||||||
|
DISCORD_USERNAME: "LocalAI-Bot"
|
||||||
|
DISCORD_AVATAR: "https://avatars.githubusercontent.com/u/139863280?v=4"
|
||||||
|
uses: Ilshidur/action-discord@master
|
||||||
|
with:
|
||||||
|
args: ${{ steps.summarize.outputs.message }}
|
||||||
28
.github/workflows/prlint.yaml
vendored
Normal file
28
.github/workflows/prlint.yaml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
name: Check PR style
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened
|
||||||
|
- edited
|
||||||
|
- synchronize
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
title-lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
statuses: write
|
||||||
|
steps:
|
||||||
|
- uses: aslafy-z/conventional-pr-title-action@v3
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# check-pr-description:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# steps:
|
||||||
|
# - uses: actions/checkout@v2
|
||||||
|
# - uses: jadrol/pr-description-checker-action@v1.0.0
|
||||||
|
# id: description-checker
|
||||||
|
# with:
|
||||||
|
# repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# exempt-labels: no qa
|
||||||
91
.github/workflows/release.yaml
vendored
91
.github/workflows/release.yaml
vendored
@@ -1,79 +1,26 @@
|
|||||||
name: Build and Release
|
name: goreleaser
|
||||||
|
|
||||||
on: push
|
on:
|
||||||
|
push:
|
||||||
permissions:
|
tags:
|
||||||
contents: write
|
- 'v*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-linux:
|
goreleaser:
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- build: 'avx2'
|
|
||||||
defines: ''
|
|
||||||
- build: 'avx'
|
|
||||||
defines: '-DLLAMA_AVX2=OFF'
|
|
||||||
- build: 'avx512'
|
|
||||||
defines: '-DLLAMA_AVX512=ON'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Clone
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: true
|
fetch-depth: 0
|
||||||
- name: Dependencies
|
- name: Set up Go
|
||||||
run: |
|
uses: actions/setup-go@v5
|
||||||
sudo apt-get update
|
with:
|
||||||
sudo apt-get install build-essential ffmpeg
|
go-version: 1.23
|
||||||
- name: Build
|
- name: Run GoReleaser
|
||||||
id: build
|
uses: goreleaser/goreleaser-action@v6
|
||||||
|
with:
|
||||||
|
version: v2.11.0
|
||||||
|
args: release --clean
|
||||||
env:
|
env:
|
||||||
CMAKE_ARGS: "${{ matrix.defines }}"
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
BUILD_ID: "${{ matrix.build }}"
|
|
||||||
run: |
|
|
||||||
STATIC=true make dist
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.build }}
|
|
||||||
path: release/
|
|
||||||
- name: Release
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
if: startsWith(github.ref, 'refs/tags/')
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
release/*
|
|
||||||
|
|
||||||
build-macOS:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- build: 'avx2'
|
|
||||||
defines: ''
|
|
||||||
- build: 'avx'
|
|
||||||
defines: '-DLLAMA_AVX2=OFF'
|
|
||||||
- build: 'avx512'
|
|
||||||
defines: '-DLLAMA_AVX512=ON'
|
|
||||||
runs-on: macOS-latest
|
|
||||||
steps:
|
|
||||||
- name: Clone
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
- name: Build
|
|
||||||
id: build
|
|
||||||
env:
|
|
||||||
CMAKE_ARGS: "${{ matrix.defines }}"
|
|
||||||
BUILD_ID: "${{ matrix.build }}"
|
|
||||||
run: |
|
|
||||||
make dist
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.build }}
|
|
||||||
path: release/
|
|
||||||
- name: Release
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
if: startsWith(github.ref, 'refs/tags/')
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
release/*
|
|
||||||
30
.github/workflows/secscan.yaml
vendored
Normal file
30
.github/workflows/secscan.yaml
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
name: "Security Scan"
|
||||||
|
|
||||||
|
# Run workflow each time code is pushed to your repository and on a schedule.
|
||||||
|
# The scheduled workflow runs every at 00:00 on Sunday UTC time.
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
GO111MODULE: on
|
||||||
|
steps:
|
||||||
|
- name: Checkout Source
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
if: ${{ github.actor != 'dependabot[bot]' }}
|
||||||
|
- name: Run Gosec Security Scanner
|
||||||
|
if: ${{ github.actor != 'dependabot[bot]' }}
|
||||||
|
uses: securego/gosec@v2.22.7
|
||||||
|
with:
|
||||||
|
# we let the report trigger content trigger a failure using the GitHub Security features.
|
||||||
|
args: '-no-fail -fmt sarif -out results.sarif ./...'
|
||||||
|
- name: Upload SARIF file
|
||||||
|
if: ${{ github.actor != 'dependabot[bot]' }}
|
||||||
|
uses: github/codeql-action/upload-sarif@v3
|
||||||
|
with:
|
||||||
|
# Path to SARIF file relative to the root of the repository
|
||||||
|
sarif_file: results.sarif
|
||||||
24
.github/workflows/stalebot.yml
vendored
Normal file
24
.github/workflows/stalebot.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
name: 'Close stale issues and PRs'
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '30 1 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9
|
||||||
|
with:
|
||||||
|
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 5 days.'
|
||||||
|
stale-pr-message: 'This PR is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 10 days.'
|
||||||
|
close-issue-message: 'This issue was closed because it has been stalled for 5 days with no activity.'
|
||||||
|
close-pr-message: 'This PR was closed because it has been stalled for 10 days with no activity.'
|
||||||
|
days-before-issue-stale: 90
|
||||||
|
days-before-pr-stale: 90
|
||||||
|
days-before-issue-close: 5
|
||||||
|
days-before-pr-close: 10
|
||||||
|
exempt-issue-labels: 'roadmap'
|
||||||
|
exempt-pr-labels: 'roadmap'
|
||||||
249
.github/workflows/test-extra.yml
vendored
Normal file
249
.github/workflows/test-extra.yml
vendored
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
---
|
||||||
|
name: 'Tests extras backends'
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ci-tests-extra-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Requires CUDA
|
||||||
|
# tests-chatterbox-tts:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# steps:
|
||||||
|
# - name: Clone
|
||||||
|
# uses: actions/checkout@v4
|
||||||
|
# with:
|
||||||
|
# submodules: true
|
||||||
|
# - name: Dependencies
|
||||||
|
# run: |
|
||||||
|
# sudo apt-get update
|
||||||
|
# sudo apt-get install build-essential ffmpeg
|
||||||
|
# # Install UV
|
||||||
|
# curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
# sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
# sudo apt-get install -y libopencv-dev
|
||||||
|
# pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
|
||||||
|
# - name: Test chatterbox-tts
|
||||||
|
# run: |
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/chatterbox
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/chatterbox test
|
||||||
|
tests-transformers:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install build-essential ffmpeg
|
||||||
|
# Install UV
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
sudo apt-get install -y libopencv-dev
|
||||||
|
pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
|
||||||
|
- name: Test transformers
|
||||||
|
run: |
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/transformers
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/transformers test
|
||||||
|
tests-rerankers:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install build-essential ffmpeg
|
||||||
|
# Install UV
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
sudo apt-get install -y libopencv-dev
|
||||||
|
pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
|
||||||
|
- name: Test rerankers
|
||||||
|
run: |
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/rerankers
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/rerankers test
|
||||||
|
|
||||||
|
tests-diffusers:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y build-essential ffmpeg
|
||||||
|
sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
sudo apt-get install -y libopencv-dev
|
||||||
|
# Install UV
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
- name: Test diffusers
|
||||||
|
run: |
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/diffusers
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/diffusers test
|
||||||
|
|
||||||
|
#tests-vllm:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# steps:
|
||||||
|
# - name: Clone
|
||||||
|
# uses: actions/checkout@v4
|
||||||
|
# with:
|
||||||
|
# submodules: true
|
||||||
|
# - name: Dependencies
|
||||||
|
# run: |
|
||||||
|
# sudo apt-get update
|
||||||
|
# sudo apt-get install -y build-essential ffmpeg
|
||||||
|
# sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
# sudo apt-get install -y libopencv-dev
|
||||||
|
# # Install UV
|
||||||
|
# curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
# pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
# - name: Test vllm backend
|
||||||
|
# run: |
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/vllm
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/vllm test
|
||||||
|
# tests-transformers-musicgen:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# steps:
|
||||||
|
# - name: Clone
|
||||||
|
# uses: actions/checkout@v4
|
||||||
|
# with:
|
||||||
|
# submodules: true
|
||||||
|
# - name: Dependencies
|
||||||
|
# run: |
|
||||||
|
# sudo apt-get update
|
||||||
|
# sudo apt-get install build-essential ffmpeg
|
||||||
|
# # Install UV
|
||||||
|
# curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
# sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
# sudo apt-get install -y libopencv-dev
|
||||||
|
# pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
|
||||||
|
# - name: Test transformers-musicgen
|
||||||
|
# run: |
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/transformers-musicgen
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/transformers-musicgen test
|
||||||
|
|
||||||
|
# tests-bark:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# steps:
|
||||||
|
# - name: Release space from worker
|
||||||
|
# run: |
|
||||||
|
# echo "Listing top largest packages"
|
||||||
|
# pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
# head -n 30 <<< "${pkgs}"
|
||||||
|
# echo
|
||||||
|
# df -h
|
||||||
|
# echo
|
||||||
|
# sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
||||||
|
# sudo apt-get remove --auto-remove android-sdk-platform-tools || true
|
||||||
|
# sudo apt-get purge --auto-remove android-sdk-platform-tools || true
|
||||||
|
# sudo rm -rf /usr/local/lib/android
|
||||||
|
# sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
||||||
|
# sudo rm -rf /usr/share/dotnet
|
||||||
|
# sudo apt-get remove -y '^mono-.*' || true
|
||||||
|
# sudo apt-get remove -y '^ghc-.*' || true
|
||||||
|
# sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
||||||
|
# sudo apt-get remove -y 'php.*' || true
|
||||||
|
# sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
||||||
|
# sudo apt-get remove -y '^google-.*' || true
|
||||||
|
# sudo apt-get remove -y azure-cli || true
|
||||||
|
# sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
||||||
|
# sudo apt-get remove -y '^gfortran-.*' || true
|
||||||
|
# sudo apt-get remove -y microsoft-edge-stable || true
|
||||||
|
# sudo apt-get remove -y firefox || true
|
||||||
|
# sudo apt-get remove -y powershell || true
|
||||||
|
# sudo apt-get remove -y r-base-core || true
|
||||||
|
# sudo apt-get autoremove -y
|
||||||
|
# sudo apt-get clean
|
||||||
|
# echo
|
||||||
|
# echo "Listing top largest packages"
|
||||||
|
# pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
# head -n 30 <<< "${pkgs}"
|
||||||
|
# echo
|
||||||
|
# sudo rm -rfv build || true
|
||||||
|
# sudo rm -rf /usr/share/dotnet || true
|
||||||
|
# sudo rm -rf /opt/ghc || true
|
||||||
|
# sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
# sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
||||||
|
# df -h
|
||||||
|
# - name: Clone
|
||||||
|
# uses: actions/checkout@v4
|
||||||
|
# with:
|
||||||
|
# submodules: true
|
||||||
|
# - name: Dependencies
|
||||||
|
# run: |
|
||||||
|
# sudo apt-get update
|
||||||
|
# sudo apt-get install build-essential ffmpeg
|
||||||
|
# # Install UV
|
||||||
|
# curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
# sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
# sudo apt-get install -y libopencv-dev
|
||||||
|
# pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
|
||||||
|
# - name: Test bark
|
||||||
|
# run: |
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/bark
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/bark test
|
||||||
|
|
||||||
|
|
||||||
|
# Below tests needs GPU. Commented out for now
|
||||||
|
# TODO: Re-enable as soon as we have GPU nodes
|
||||||
|
# tests-vllm:
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# steps:
|
||||||
|
# - name: Clone
|
||||||
|
# uses: actions/checkout@v4
|
||||||
|
# with:
|
||||||
|
# submodules: true
|
||||||
|
# - name: Dependencies
|
||||||
|
# run: |
|
||||||
|
# sudo apt-get update
|
||||||
|
# sudo apt-get install build-essential ffmpeg
|
||||||
|
# # Install UV
|
||||||
|
# curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
# sudo apt-get install -y ca-certificates cmake curl patch python3-pip
|
||||||
|
# sudo apt-get install -y libopencv-dev
|
||||||
|
# pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
# - name: Test vllm
|
||||||
|
# run: |
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/vllm
|
||||||
|
# make --jobs=5 --output-sync=target -C backend/python/vllm test
|
||||||
|
|
||||||
|
tests-coqui:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install build-essential ffmpeg
|
||||||
|
sudo apt-get install -y ca-certificates cmake curl patch espeak espeak-ng python3-pip
|
||||||
|
# Install UV
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||||
|
- name: Test coqui
|
||||||
|
run: |
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/coqui
|
||||||
|
make --jobs=5 --output-sync=target -C backend/python/coqui test
|
||||||
203
.github/workflows/test.yml
vendored
203
.github/workflows/test.yml
vendored
@@ -9,36 +9,215 @@ on:
|
|||||||
tags:
|
tags:
|
||||||
- '*'
|
- '*'
|
||||||
|
|
||||||
|
env:
|
||||||
|
GRPC_VERSION: v1.65.0
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ci-tests-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
group: ci-tests-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
ubuntu-latest:
|
tests-linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
go-version: ['1.21.x']
|
||||||
steps:
|
steps:
|
||||||
|
- name: Release space from worker
|
||||||
|
run: |
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
df -h
|
||||||
|
echo
|
||||||
|
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
||||||
|
sudo apt-get remove --auto-remove android-sdk-platform-tools || true
|
||||||
|
sudo apt-get purge --auto-remove android-sdk-platform-tools || true
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo apt-get remove -y '^mono-.*' || true
|
||||||
|
sudo apt-get remove -y '^ghc-.*' || true
|
||||||
|
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
||||||
|
sudo apt-get remove -y 'php.*' || true
|
||||||
|
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
||||||
|
sudo apt-get remove -y '^google-.*' || true
|
||||||
|
sudo apt-get remove -y azure-cli || true
|
||||||
|
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
||||||
|
sudo apt-get remove -y '^gfortran-.*' || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
echo
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
sudo rm -rfv build || true
|
||||||
|
df -h
|
||||||
- name: Clone
|
- name: Clone
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
- name: Setup Go ${{ matrix.go-version }}
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: ${{ matrix.go-version }}
|
||||||
|
cache: false
|
||||||
|
# You can test your matrix by printing the current Go version
|
||||||
|
- name: Display Go version
|
||||||
|
run: go version
|
||||||
|
- name: Proto Dependencies
|
||||||
|
run: |
|
||||||
|
# Install protoc
|
||||||
|
curl -L -s https://github.com/protocolbuffers/protobuf/releases/download/v26.1/protoc-26.1-linux-x86_64.zip -o protoc.zip && \
|
||||||
|
unzip -j -d /usr/local/bin protoc.zip bin/protoc && \
|
||||||
|
rm protoc.zip
|
||||||
|
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
||||||
|
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
||||||
|
PATH="$PATH:$HOME/go/bin" make protogen-go
|
||||||
- name: Dependencies
|
- name: Dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install build-essential ffmpeg
|
sudo apt-get install build-essential ccache upx-ucl curl ffmpeg
|
||||||
|
sudo apt-get install -y libgmock-dev clang
|
||||||
|
# Install UV
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
sudo apt-get install -y ca-certificates cmake patch python3-pip unzip
|
||||||
|
sudo apt-get install -y libopencv-dev
|
||||||
|
|
||||||
|
curl -L -s https://github.com/protocolbuffers/protobuf/releases/download/v26.1/protoc-26.1-linux-x86_64.zip -o protoc.zip && \
|
||||||
|
unzip -j -d /usr/local/bin protoc.zip bin/protoc && \
|
||||||
|
rm protoc.zip
|
||||||
|
|
||||||
|
curl -O https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb
|
||||||
|
sudo dpkg -i cuda-keyring_1.1-1_all.deb
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y cuda-nvcc-${CUDA_VERSION} libcublas-dev-${CUDA_VERSION}
|
||||||
|
export CUDACXX=/usr/local/cuda/bin/nvcc
|
||||||
|
|
||||||
|
|
||||||
|
# The python3-grpc-tools package in 22.04 is too old
|
||||||
|
pip install --user grpcio-tools==1.71.0 grpcio==1.71.0
|
||||||
|
|
||||||
|
make -C backend/python/transformers
|
||||||
|
|
||||||
|
make backends/huggingface backends/llama-cpp backends/local-store backends/silero-vad backends/piper backends/whisper backends/stablediffusion-ggml
|
||||||
|
env:
|
||||||
|
CUDA_VERSION: 12-4
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
make test
|
PATH="$PATH:/root/go/bin" GO_TAGS="tts" make --jobs 5 --output-sync=target test
|
||||||
|
- name: Setup tmate session if tests fail
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: mxschmitt/action-tmate@v3.22
|
||||||
|
with:
|
||||||
|
detached: true
|
||||||
|
connect-timeout-seconds: 180
|
||||||
|
limit-access-to-actor: true
|
||||||
|
|
||||||
macOS-latest:
|
tests-aio-container:
|
||||||
runs-on: macOS-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Release space from worker
|
||||||
|
run: |
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
df -h
|
||||||
|
echo
|
||||||
|
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
||||||
|
sudo apt-get remove --auto-remove android-sdk-platform-tools || true
|
||||||
|
sudo apt-get purge --auto-remove android-sdk-platform-tools || true
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo apt-get remove -y '^mono-.*' || true
|
||||||
|
sudo apt-get remove -y '^ghc-.*' || true
|
||||||
|
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
||||||
|
sudo apt-get remove -y 'php.*' || true
|
||||||
|
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
||||||
|
sudo apt-get remove -y '^google-.*' || true
|
||||||
|
sudo apt-get remove -y azure-cli || true
|
||||||
|
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
||||||
|
sudo apt-get remove -y '^gfortran-.*' || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
echo
|
||||||
|
echo "Listing top largest packages"
|
||||||
|
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
||||||
|
head -n 30 <<< "${pkgs}"
|
||||||
|
echo
|
||||||
|
sudo rm -rfv build || true
|
||||||
|
df -h
|
||||||
|
- name: Clone
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
# Install protoc
|
||||||
|
curl -L -s https://github.com/protocolbuffers/protobuf/releases/download/v26.1/protoc-26.1-linux-x86_64.zip -o protoc.zip && \
|
||||||
|
unzip -j -d /usr/local/bin protoc.zip bin/protoc && \
|
||||||
|
rm protoc.zip
|
||||||
|
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
||||||
|
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
||||||
|
PATH="$PATH:$HOME/go/bin" make protogen-go
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
PATH="$PATH:$HOME/go/bin" make backends/local-store backends/silero-vad backends/llama-cpp backends/whisper backends/piper backends/stablediffusion-ggml docker-build-aio e2e-aio
|
||||||
|
- name: Setup tmate session if tests fail
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: mxschmitt/action-tmate@v3.22
|
||||||
|
with:
|
||||||
|
detached: true
|
||||||
|
connect-timeout-seconds: 180
|
||||||
|
limit-access-to-actor: true
|
||||||
|
|
||||||
|
tests-apple:
|
||||||
|
runs-on: macOS-14
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
go-version: ['1.21.x']
|
||||||
steps:
|
steps:
|
||||||
- name: Clone
|
- name: Clone
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
|
- name: Setup Go ${{ matrix.go-version }}
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: ${{ matrix.go-version }}
|
||||||
|
cache: false
|
||||||
|
# You can test your matrix by printing the current Go version
|
||||||
|
- name: Display Go version
|
||||||
|
run: go version
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
brew install protobuf grpc make protoc-gen-go protoc-gen-go-grpc libomp llvm
|
||||||
|
pip install --user --no-cache-dir grpcio-tools==1.71.0 grpcio==1.71.0
|
||||||
|
- name: Build llama-cpp-darwin
|
||||||
|
run: |
|
||||||
|
make protogen-go
|
||||||
|
make build
|
||||||
|
bash scripts/build-llama-cpp-darwin.sh
|
||||||
|
ls -la build/darwin.tar
|
||||||
|
mv build/darwin.tar build/llama-cpp.tar
|
||||||
|
./local-ai backends install "ocifile://$PWD/build/llama-cpp.tar"
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
CMAKE_ARGS="-DLLAMA_F16C=OFF -DLLAMA_AVX512=OFF -DLLAMA_AVX2=OFF -DLLAMA_FMA=OFF" make test
|
export C_INCLUDE_PATH=/usr/local/include
|
||||||
|
export CPLUS_INCLUDE_PATH=/usr/local/include
|
||||||
|
export CC=/opt/homebrew/opt/llvm/bin/clang
|
||||||
|
# Used to run the newer GNUMake version from brew that supports --output-sync
|
||||||
|
export PATH="/opt/homebrew/opt/make/libexec/gnubin:$PATH"
|
||||||
|
PATH="$PATH:$HOME/go/bin" make protogen-go
|
||||||
|
PATH="$PATH:$HOME/go/bin" BUILD_TYPE="GITHUB_CI_HAS_BROKEN_METAL" CMAKE_ARGS="-DGGML_F16C=OFF -DGGML_AVX512=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF" make --jobs 4 --output-sync=target test
|
||||||
|
- name: Setup tmate session if tests fail
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: mxschmitt/action-tmate@v3.22
|
||||||
|
with:
|
||||||
|
detached: true
|
||||||
|
connect-timeout-seconds: 180
|
||||||
|
limit-access-to-actor: true
|
||||||
|
|||||||
37
.github/workflows/update_swagger.yaml
vendored
Normal file
37
.github/workflows/update_swagger.yaml
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
name: Update swagger
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 20 * * *
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
swagger:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: 'stable'
|
||||||
|
- name: Dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install protobuf-compiler
|
||||||
|
- run: |
|
||||||
|
go install github.com/swaggo/swag/cmd/swag@latest
|
||||||
|
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
||||||
|
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
||||||
|
- name: Bump swagger 🔧
|
||||||
|
run: |
|
||||||
|
make protogen-go swagger
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
||||||
|
push-to-fork: ci-forks/LocalAI
|
||||||
|
commit-message: 'feat(swagger): update swagger'
|
||||||
|
title: 'feat(swagger): update swagger'
|
||||||
|
branch: "update/swagger"
|
||||||
|
body: Update swagger
|
||||||
|
signoff: true
|
||||||
|
|
||||||
26
.github/workflows/yaml-check.yml
vendored
Normal file
26
.github/workflows/yaml-check.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
name: 'Yamllint GitHub Actions'
|
||||||
|
on:
|
||||||
|
- pull_request
|
||||||
|
jobs:
|
||||||
|
yamllint:
|
||||||
|
name: 'Yamllint'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout'
|
||||||
|
uses: actions/checkout@master
|
||||||
|
- name: 'Yamllint model gallery'
|
||||||
|
uses: karancode/yamllint-github-action@master
|
||||||
|
with:
|
||||||
|
yamllint_file_or_dir: 'gallery'
|
||||||
|
yamllint_strict: false
|
||||||
|
yamllint_comment: true
|
||||||
|
env:
|
||||||
|
GITHUB_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: 'Yamllint Backend gallery'
|
||||||
|
uses: karancode/yamllint-github-action@master
|
||||||
|
with:
|
||||||
|
yamllint_file_or_dir: 'backend'
|
||||||
|
yamllint_strict: false
|
||||||
|
yamllint_comment: true
|
||||||
|
env:
|
||||||
|
GITHUB_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
45
.gitignore
vendored
45
.gitignore
vendored
@@ -1,12 +1,24 @@
|
|||||||
# go-llama build artifacts
|
# go-llama build artifacts
|
||||||
go-llama
|
/sources/
|
||||||
gpt4all
|
__pycache__/
|
||||||
go-stable-diffusion
|
*.a
|
||||||
|
*.o
|
||||||
|
get-sources
|
||||||
|
prepare-sources
|
||||||
|
/backend/cpp/llama-cpp/grpc-server
|
||||||
|
/backend/cpp/llama-cpp/llama.cpp
|
||||||
|
/backend/cpp/llama-*
|
||||||
|
!backend/cpp/llama-cpp
|
||||||
|
/backends
|
||||||
|
/backend-images
|
||||||
|
/result.yaml
|
||||||
|
|
||||||
|
*.log
|
||||||
|
|
||||||
go-ggml-transformers
|
go-ggml-transformers
|
||||||
go-gpt2
|
go-gpt2
|
||||||
go-rwkv
|
|
||||||
whisper.cpp
|
whisper.cpp
|
||||||
bloomz
|
/bloomz
|
||||||
go-bert
|
go-bert
|
||||||
|
|
||||||
# LocalAI build binary
|
# LocalAI build binary
|
||||||
@@ -14,6 +26,9 @@ LocalAI
|
|||||||
local-ai
|
local-ai
|
||||||
# prevent above rules from omitting the helm chart
|
# prevent above rules from omitting the helm chart
|
||||||
!charts/*
|
!charts/*
|
||||||
|
# prevent above rules from omitting the api/localai folder
|
||||||
|
!api/localai
|
||||||
|
!core/**/localai
|
||||||
|
|
||||||
# Ignore models
|
# Ignore models
|
||||||
models/*
|
models/*
|
||||||
@@ -27,6 +42,22 @@ release/
|
|||||||
.idea
|
.idea
|
||||||
|
|
||||||
# Generated during build
|
# Generated during build
|
||||||
backend-assets/
|
backend-assets/*
|
||||||
|
!backend-assets/.keep
|
||||||
|
prepare
|
||||||
|
/ggml-metal.metal
|
||||||
|
docs/static/gallery.html
|
||||||
|
|
||||||
/ggml-metal.metal
|
# Protobuf generated files
|
||||||
|
*.pb.go
|
||||||
|
*pb2.py
|
||||||
|
*pb2_grpc.py
|
||||||
|
|
||||||
|
# SonarQube
|
||||||
|
.scannerwork
|
||||||
|
|
||||||
|
# backend virtual environments
|
||||||
|
**/venv
|
||||||
|
|
||||||
|
# per-developer customization files for the development container
|
||||||
|
.devcontainer/customization/*
|
||||||
|
|||||||
6
.gitmodules
vendored
Normal file
6
.gitmodules
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[submodule "docs/themes/hugo-theme-relearn"]
|
||||||
|
path = docs/themes/hugo-theme-relearn
|
||||||
|
url = https://github.com/McShelby/hugo-theme-relearn.git
|
||||||
|
[submodule "docs/themes/lotusdocs"]
|
||||||
|
path = docs/themes/lotusdocs
|
||||||
|
url = https://github.com/colinwilson/lotusdocs
|
||||||
33
.goreleaser.yaml
Normal file
33
.goreleaser.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
version: 2
|
||||||
|
before:
|
||||||
|
hooks:
|
||||||
|
- make protogen-go
|
||||||
|
- go mod tidy
|
||||||
|
dist: release
|
||||||
|
source:
|
||||||
|
enabled: true
|
||||||
|
name_template: '{{ .ProjectName }}-{{ .Tag }}-source'
|
||||||
|
builds:
|
||||||
|
-
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
ldflags:
|
||||||
|
- -s -w
|
||||||
|
- -X "github.com/mudler/LocalAI/internal.Version={{ .Tag }}"
|
||||||
|
- -X "github.com/mudler/LocalAI/internal.Commit={{ .FullCommit }}"
|
||||||
|
goos:
|
||||||
|
- linux
|
||||||
|
- darwin
|
||||||
|
#- windows
|
||||||
|
goarch:
|
||||||
|
- amd64
|
||||||
|
- arm64
|
||||||
|
archives:
|
||||||
|
- formats: [ 'binary' ] # this removes the tar of the archives, leaving the binaries alone
|
||||||
|
name_template: local-ai-{{ .Tag }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}
|
||||||
|
checksum:
|
||||||
|
name_template: '{{ .ProjectName }}-{{ .Tag }}-checksums.txt'
|
||||||
|
snapshot:
|
||||||
|
version_template: "{{ .Tag }}-next"
|
||||||
|
changelog:
|
||||||
|
use: github-native
|
||||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"recommendations": [
|
||||||
|
"golang.go"
|
||||||
|
]
|
||||||
|
}
|
||||||
21
.vscode/launch.json
vendored
21
.vscode/launch.json
vendored
@@ -3,12 +3,12 @@
|
|||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: Current File",
|
"name": "Python: Current File",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"program": "${file}",
|
"program": "${file}",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
"cwd": "${workspaceFolder}/examples/langchain-chroma",
|
"cwd": "${fileDirname}",
|
||||||
"env": {
|
"env": {
|
||||||
"OPENAI_API_BASE": "http://localhost:8080/v1",
|
"OPENAI_API_BASE": "http://localhost:8080/v1",
|
||||||
"OPENAI_API_KEY": "abc"
|
"OPENAI_API_KEY": "abc"
|
||||||
@@ -19,15 +19,16 @@
|
|||||||
"type": "go",
|
"type": "go",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"mode": "debug",
|
"mode": "debug",
|
||||||
"program": "${workspaceFolder}/main.go",
|
"program": "${workspaceRoot}",
|
||||||
"args": [
|
"args": [],
|
||||||
"api"
|
|
||||||
],
|
|
||||||
"env": {
|
"env": {
|
||||||
"C_INCLUDE_PATH": "${workspaceFolder}/go-llama:${workspaceFolder}/go-stable-diffusion/:${workspaceFolder}/gpt4all/gpt4all-bindings/golang/:${workspaceFolder}/go-gpt2:${workspaceFolder}/go-rwkv:${workspaceFolder}/whisper.cpp:${workspaceFolder}/go-bert:${workspaceFolder}/bloomz",
|
"LOCALAI_LOG_LEVEL": "debug",
|
||||||
"LIBRARY_PATH": "${workspaceFolder}/go-llama:${workspaceFolder}/go-stable-diffusion/:${workspaceFolder}/gpt4all/gpt4all-bindings/golang/:${workspaceFolder}/go-gpt2:${workspaceFolder}/go-rwkv:${workspaceFolder}/whisper.cpp:${workspaceFolder}/go-bert:${workspaceFolder}/bloomz",
|
"LOCALAI_P2P": "true",
|
||||||
"DEBUG": "true"
|
"LOCALAI_FEDERATED": "true"
|
||||||
}
|
},
|
||||||
|
"buildFlags": ["-tags", "", "-v"],
|
||||||
|
"envFile": "${workspaceFolder}/.env",
|
||||||
|
"cwd": "${workspaceRoot}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
84
CONTRIBUTING.md
Normal file
84
CONTRIBUTING.md
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
# Contributing to LocalAI
|
||||||
|
|
||||||
|
Thank you for your interest in contributing to LocalAI! We appreciate your time and effort in helping to improve our project. Before you get started, please take a moment to review these guidelines.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Getting Started](#getting-started)
|
||||||
|
- [Prerequisites](#prerequisites)
|
||||||
|
- [Setting up the Development Environment](#setting-up-the-development-environment)
|
||||||
|
- [Contributing](#contributing)
|
||||||
|
- [Submitting an Issue](#submitting-an-issue)
|
||||||
|
- [Creating a Pull Request (PR)](#creating-a-pull-request-pr)
|
||||||
|
- [Coding Guidelines](#coding-guidelines)
|
||||||
|
- [Testing](#testing)
|
||||||
|
- [Documentation](#documentation)
|
||||||
|
- [Community and Communication](#community-and-communication)
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Golang [1.21]
|
||||||
|
- Git
|
||||||
|
- macOS/Linux
|
||||||
|
|
||||||
|
### Setting up the Development Environment and running localAI in the local environment
|
||||||
|
|
||||||
|
1. Clone the repository: `git clone https://github.com/go-skynet/LocalAI.git`
|
||||||
|
2. Navigate to the project directory: `cd LocalAI`
|
||||||
|
3. Install the required dependencies ( see https://localai.io/basics/build/#build-localai-locally )
|
||||||
|
4. Build LocalAI: `make build`
|
||||||
|
5. Run LocalAI: `./local-ai`
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We welcome contributions from everyone! To get started, follow these steps:
|
||||||
|
|
||||||
|
### Submitting an Issue
|
||||||
|
|
||||||
|
If you find a bug, have a feature request, or encounter any issues, please check the [issue tracker](https://github.com/go-skynet/LocalAI/issues) to see if a similar issue has already been reported. If not, feel free to [create a new issue](https://github.com/go-skynet/LocalAI/issues/new) and provide as much detail as possible.
|
||||||
|
|
||||||
|
### Creating a Pull Request (PR)
|
||||||
|
|
||||||
|
1. Fork the repository.
|
||||||
|
2. Create a new branch with a descriptive name: `git checkout -b [branch name]`
|
||||||
|
3. Make your changes and commit them.
|
||||||
|
4. Push the changes to your fork: `git push origin [branch name]`
|
||||||
|
5. Create a new pull request from your branch to the main project's `main` or `master` branch.
|
||||||
|
6. Provide a clear description of your changes in the pull request.
|
||||||
|
7. Make any requested changes during the review process.
|
||||||
|
8. Once your PR is approved, it will be merged into the main project.
|
||||||
|
|
||||||
|
## Coding Guidelines
|
||||||
|
|
||||||
|
- No specific coding guidelines at the moment. Please make sure the code can be tested. The most popular lint tools like [`golangci-lint`](https://golangci-lint.run) can help you here.
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
`make test` cannot handle all the model now. Please be sure to add a test case for the new features or the part was changed.
|
||||||
|
|
||||||
|
### Running AIO tests
|
||||||
|
|
||||||
|
All-In-One images has a set of tests that automatically verifies that most of the endpoints works correctly, a flow can be :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build the LocalAI docker image
|
||||||
|
make DOCKER_IMAGE=local-ai docker
|
||||||
|
|
||||||
|
# Build the corresponding AIO image
|
||||||
|
BASE_IMAGE=local-ai DOCKER_AIO_IMAGE=local-ai-aio:test make docker-aio
|
||||||
|
|
||||||
|
# Run the AIO e2e tests
|
||||||
|
LOCALAI_IMAGE_TAG=test LOCALAI_IMAGE=local-ai-aio make run-e2e-aio
|
||||||
|
```
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
We are welcome the contribution of the documents, please open new PR or create a new issue. The documentation is available under `docs/` https://github.com/mudler/LocalAI/tree/master/docs
|
||||||
|
|
||||||
|
## Community and Communication
|
||||||
|
|
||||||
|
- You can reach out via the Github issue tracker.
|
||||||
|
- Open a new discussion at [Discussion](https://github.com/go-skynet/LocalAI/discussions)
|
||||||
|
- Join the Discord channel [Discord](https://discord.gg/uJAeKSAGDy)
|
||||||
359
Dockerfile
359
Dockerfile
@@ -1,118 +1,315 @@
|
|||||||
ARG GO_VERSION=1.20-bullseye
|
ARG BASE_IMAGE=ubuntu:22.04
|
||||||
|
ARG GRPC_BASE_IMAGE=${BASE_IMAGE}
|
||||||
|
ARG INTEL_BASE_IMAGE=${BASE_IMAGE}
|
||||||
|
|
||||||
FROM golang:$GO_VERSION as requirements
|
FROM ${BASE_IMAGE} AS requirements
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
ca-certificates curl wget espeak-ng libgomp1 \
|
||||||
|
python3 python-is-python3 ffmpeg && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# The requirements-drivers target is for BUILD_TYPE specific items. If you need to install something specific to CUDA, or specific to ROCM, it goes here.
|
||||||
|
FROM requirements AS requirements-drivers
|
||||||
|
|
||||||
ARG BUILD_TYPE
|
ARG BUILD_TYPE
|
||||||
ARG CUDA_MAJOR_VERSION=11
|
ARG CUDA_MAJOR_VERSION=12
|
||||||
ARG CUDA_MINOR_VERSION=7
|
ARG CUDA_MINOR_VERSION=0
|
||||||
ARG SPDLOG_VERSION="1.11.0"
|
ARG SKIP_DRIVERS=false
|
||||||
ARG PIPER_PHONEMIZE_VERSION='1.0.0'
|
ARG TARGETARCH
|
||||||
|
ARG TARGETVARIANT
|
||||||
|
ENV BUILD_TYPE=${BUILD_TYPE}
|
||||||
|
|
||||||
|
RUN mkdir -p /run/localai
|
||||||
|
RUN echo "default" > /run/localai/capability
|
||||||
|
|
||||||
|
# Vulkan requirements
|
||||||
|
RUN <<EOT bash
|
||||||
|
if [ "${BUILD_TYPE}" = "vulkan" ] && [ "${SKIP_DRIVERS}" = "false" ]; then
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
software-properties-common pciutils wget gpg-agent && \
|
||||||
|
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - && \
|
||||||
|
wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y \
|
||||||
|
vulkan-sdk && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
echo "vulkan" > /run/localai/capability
|
||||||
|
fi
|
||||||
|
EOT
|
||||||
|
|
||||||
|
# CuBLAS requirements
|
||||||
|
RUN <<EOT bash
|
||||||
|
if [ "${BUILD_TYPE}" = "cublas" ] && [ "${SKIP_DRIVERS}" = "false" ]; then
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
software-properties-common pciutils
|
||||||
|
if [ "amd64" = "$TARGETARCH" ]; then
|
||||||
|
curl -O https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb
|
||||||
|
fi
|
||||||
|
if [ "arm64" = "$TARGETARCH" ]; then
|
||||||
|
curl -O https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/arm64/cuda-keyring_1.1-1_all.deb
|
||||||
|
fi
|
||||||
|
dpkg -i cuda-keyring_1.1-1_all.deb && \
|
||||||
|
rm -f cuda-keyring_1.1-1_all.deb && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
cuda-nvcc-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} \
|
||||||
|
libcufft-dev-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} \
|
||||||
|
libcurand-dev-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} \
|
||||||
|
libcublas-dev-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} \
|
||||||
|
libcusparse-dev-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} \
|
||||||
|
libcusolver-dev-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
echo "nvidia" > /run/localai/capability
|
||||||
|
fi
|
||||||
|
EOT
|
||||||
|
|
||||||
|
# If we are building with clblas support, we need the libraries for the builds
|
||||||
|
RUN if [ "${BUILD_TYPE}" = "clblas" ] && [ "${SKIP_DRIVERS}" = "false" ]; then \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
libclblast-dev && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* \
|
||||||
|
; fi
|
||||||
|
|
||||||
|
RUN if [ "${BUILD_TYPE}" = "hipblas" ] && [ "${SKIP_DRIVERS}" = "false" ]; then \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
hipblas-dev \
|
||||||
|
rocblas-dev && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
echo "amd" > /run/localai/capability && \
|
||||||
|
# I have no idea why, but the ROCM lib packages don't trigger ldconfig after they install, which results in local-ai and others not being able
|
||||||
|
# to locate the libraries. We run ldconfig ourselves to work around this packaging deficiency
|
||||||
|
ldconfig \
|
||||||
|
; fi
|
||||||
|
|
||||||
|
# Cuda
|
||||||
|
ENV PATH=/usr/local/cuda/bin:${PATH}
|
||||||
|
|
||||||
|
# HipBLAS requirements
|
||||||
|
ENV PATH=/opt/rocm/bin:${PATH}
|
||||||
|
|
||||||
|
###################################
|
||||||
|
###################################
|
||||||
|
|
||||||
|
# The requirements-core target is common to all images. It should not be placed in requirements-core unless every single build will use it.
|
||||||
|
FROM requirements-drivers AS build-requirements
|
||||||
|
|
||||||
|
ARG GO_VERSION=1.22.6
|
||||||
|
ARG CMAKE_VERSION=3.26.4
|
||||||
|
ARG CMAKE_FROM_SOURCE=false
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
|
|
||||||
ENV BUILD_TYPE=${BUILD_TYPE}
|
|
||||||
ARG GO_TAGS="stablediffusion tts"
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y ca-certificates cmake curl patch
|
apt-get install -y --no-install-recommends \
|
||||||
|
build-essential \
|
||||||
|
ccache \
|
||||||
|
ca-certificates espeak-ng \
|
||||||
|
curl libssl-dev \
|
||||||
|
git \
|
||||||
|
git-lfs \
|
||||||
|
unzip upx-ucl python3 python-is-python3 && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# CuBLAS requirements
|
# Install CMake (the version in 22.04 is too old)
|
||||||
RUN if [ "${BUILD_TYPE}" = "cublas" ]; then \
|
RUN <<EOT bash
|
||||||
apt-get install -y software-properties-common && \
|
if [ "${CMAKE_FROM_SOURCE}" = "true" ]; then
|
||||||
apt-add-repository contrib && \
|
curl -L -s https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}.tar.gz -o cmake.tar.gz && tar xvf cmake.tar.gz && cd cmake-${CMAKE_VERSION} && ./configure && make && make install
|
||||||
curl -O https://developer.download.nvidia.com/compute/cuda/repos/debian11/x86_64/cuda-keyring_1.0-1_all.deb && \
|
else
|
||||||
dpkg -i cuda-keyring_1.0-1_all.deb && \
|
apt-get update && \
|
||||||
rm -f cuda-keyring_1.0-1_all.deb && \
|
apt-get install -y \
|
||||||
apt-get update && \
|
cmake && \
|
||||||
apt-get install -y cuda-nvcc-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} libcublas-dev-${CUDA_MAJOR_VERSION}-${CUDA_MINOR_VERSION} \
|
apt-get clean && \
|
||||||
; fi
|
rm -rf /var/lib/apt/lists/*
|
||||||
ENV PATH /usr/local/cuda/bin:${PATH}
|
fi
|
||||||
|
EOT
|
||||||
|
|
||||||
WORKDIR /build
|
# Install Go
|
||||||
|
RUN curl -L -s https://go.dev/dl/go${GO_VERSION}.linux-${TARGETARCH}.tar.gz | tar -C /usr/local -xz
|
||||||
|
ENV PATH=$PATH:/root/go/bin:/usr/local/go/bin
|
||||||
|
|
||||||
# OpenBLAS requirements
|
# Install grpc compilers
|
||||||
RUN apt-get install -y libopenblas-dev
|
RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2 && \
|
||||||
|
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
||||||
|
|
||||||
# Stable Diffusion requirements
|
COPY --chmod=644 custom-ca-certs/* /usr/local/share/ca-certificates/
|
||||||
RUN apt-get install -y libopencv-dev && \
|
RUN update-ca-certificates
|
||||||
ln -s /usr/include/opencv4/opencv2 /usr/include/opencv2
|
|
||||||
|
|
||||||
|
# OpenBLAS requirements and stable diffusion
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
libopenblas-dev && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN test -n "$TARGETARCH" \
|
||||||
|
|| (echo 'warn: missing $TARGETARCH, either set this `ARG` manually, or run using `docker buildkit`')
|
||||||
|
|
||||||
# Use the variables in subsequent instructions
|
# Use the variables in subsequent instructions
|
||||||
RUN echo "Target Architecture: $TARGETARCH"
|
RUN echo "Target Architecture: $TARGETARCH"
|
||||||
RUN echo "Target Variant: $TARGETVARIANT"
|
RUN echo "Target Variant: $TARGETVARIANT"
|
||||||
|
|
||||||
# piper requirements
|
|
||||||
# Use pre-compiled Piper phonemization library (includes onnxruntime)
|
|
||||||
#RUN if echo "${GO_TAGS}" | grep -q "tts"; then \
|
|
||||||
RUN test -n "$TARGETARCH" \
|
|
||||||
|| (echo 'warn: missing $TARGETARCH, either set this `ARG` manually, or run using `docker buildkit`')
|
|
||||||
|
|
||||||
RUN curl -L "https://github.com/gabime/spdlog/archive/refs/tags/v${SPDLOG_VERSION}.tar.gz" | \
|
|
||||||
tar -xzvf - && \
|
|
||||||
mkdir -p "spdlog-${SPDLOG_VERSION}/build" && \
|
WORKDIR /build
|
||||||
cd "spdlog-${SPDLOG_VERSION}/build" && \
|
|
||||||
cmake .. && \
|
|
||||||
make -j8 && \
|
|
||||||
cmake --install . --prefix /usr && mkdir -p "lib/Linux-$(uname -m)" && \
|
|
||||||
cd /build && \
|
|
||||||
mkdir -p "lib/Linux-$(uname -m)/piper_phonemize" && \
|
|
||||||
curl -L "https://github.com/rhasspy/piper-phonemize/releases/download/v${PIPER_PHONEMIZE_VERSION}/libpiper_phonemize-${TARGETARCH:-$(go env GOARCH)}${TARGETVARIANT}.tar.gz" | \
|
|
||||||
tar -C "lib/Linux-$(uname -m)/piper_phonemize" -xzvf - && ls -liah /build/lib/Linux-$(uname -m)/piper_phonemize/ && \
|
|
||||||
cp -rfv /build/lib/Linux-$(uname -m)/piper_phonemize/lib/. /lib64/ && \
|
|
||||||
cp -rfv /build/lib/Linux-$(uname -m)/piper_phonemize/lib/. /usr/lib/ && \
|
|
||||||
cp -rfv /build/lib/Linux-$(uname -m)/piper_phonemize/include/. /usr/include/
|
|
||||||
# \
|
|
||||||
# ; fi
|
|
||||||
|
|
||||||
###################################
|
###################################
|
||||||
###################################
|
###################################
|
||||||
|
|
||||||
FROM requirements as builder
|
# Temporary workaround for Intel's repository to work correctly
|
||||||
|
# https://community.intel.com/t5/Intel-oneAPI-Math-Kernel-Library/APT-Repository-not-working-signatures-invalid/m-p/1599436/highlight/true#M36143
|
||||||
|
# This is a temporary workaround until Intel fixes their repository
|
||||||
|
FROM ${INTEL_BASE_IMAGE} AS intel
|
||||||
|
RUN wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | \
|
||||||
|
gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
|
||||||
|
RUN echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy/lts/2350 unified" > /etc/apt/sources.list.d/intel-graphics.list
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
intel-oneapi-runtime-libs && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ARG GO_TAGS="stablediffusion tts"
|
###################################
|
||||||
|
###################################
|
||||||
|
|
||||||
|
# The builder-base target has the arguments, variables, and copies shared between full builder images and the uncompiled devcontainer
|
||||||
|
|
||||||
|
FROM build-requirements AS builder-base
|
||||||
|
|
||||||
|
ARG GO_TAGS=""
|
||||||
|
ARG GRPC_BACKENDS
|
||||||
|
ARG MAKEFLAGS
|
||||||
|
ARG LD_FLAGS="-s -w"
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG TARGETVARIANT
|
||||||
|
ENV GRPC_BACKENDS=${GRPC_BACKENDS}
|
||||||
ENV GO_TAGS=${GO_TAGS}
|
ENV GO_TAGS=${GO_TAGS}
|
||||||
|
ENV MAKEFLAGS=${MAKEFLAGS}
|
||||||
|
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||||
|
ENV NVIDIA_REQUIRE_CUDA="cuda>=${CUDA_MAJOR_VERSION}.0"
|
||||||
|
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
ENV LD_FLAGS=${LD_FLAGS}
|
||||||
|
|
||||||
|
RUN echo "GO_TAGS: $GO_TAGS" && echo "TARGETARCH: $TARGETARCH"
|
||||||
|
|
||||||
|
WORKDIR /build
|
||||||
|
|
||||||
|
|
||||||
|
# We need protoc installed, and the version in 22.04 is too old.
|
||||||
|
RUN <<EOT bash
|
||||||
|
if [ "amd64" = "$TARGETARCH" ]; then
|
||||||
|
curl -L -s https://github.com/protocolbuffers/protobuf/releases/download/v27.1/protoc-27.1-linux-x86_64.zip -o protoc.zip && \
|
||||||
|
unzip -j -d /usr/local/bin protoc.zip bin/protoc && \
|
||||||
|
rm protoc.zip
|
||||||
|
fi
|
||||||
|
if [ "arm64" = "$TARGETARCH" ]; then
|
||||||
|
curl -L -s https://github.com/protocolbuffers/protobuf/releases/download/v27.1/protoc-27.1-linux-aarch_64.zip -o protoc.zip && \
|
||||||
|
unzip -j -d /usr/local/bin protoc.zip bin/protoc && \
|
||||||
|
rm protoc.zip
|
||||||
|
fi
|
||||||
|
EOT
|
||||||
|
|
||||||
|
###################################
|
||||||
|
###################################
|
||||||
|
|
||||||
|
# Compile backends first in a separate stage
|
||||||
|
FROM builder-base AS builder-backends
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG TARGETVARIANT
|
||||||
|
|
||||||
|
WORKDIR /build
|
||||||
|
|
||||||
|
COPY ./Makefile .
|
||||||
|
COPY ./backend ./backend
|
||||||
|
COPY ./go.mod .
|
||||||
|
COPY ./go.sum .
|
||||||
|
COPY ./.git ./.git
|
||||||
|
|
||||||
|
# Some of the Go backends use libs from the main src, we could further optimize the caching by building the CPP backends before here
|
||||||
|
COPY ./pkg/grpc ./pkg/grpc
|
||||||
|
COPY ./pkg/utils ./pkg/utils
|
||||||
|
COPY ./pkg/langchain ./pkg/langchain
|
||||||
|
|
||||||
|
RUN ls -l ./
|
||||||
|
RUN make protogen-go
|
||||||
|
|
||||||
|
# The builder target compiles LocalAI. This target is not the target that will be uploaded to the registry.
|
||||||
|
# Adjustments to the build process should likely be made here.
|
||||||
|
FROM builder-backends AS builder
|
||||||
|
|
||||||
|
WORKDIR /build
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
## Build the binary
|
||||||
|
## If we're on arm64 AND using cublas/hipblas, skip some of the llama-compat backends to save space
|
||||||
|
## Otherwise just run the normal build
|
||||||
|
RUN make build
|
||||||
|
|
||||||
|
###################################
|
||||||
|
###################################
|
||||||
|
|
||||||
|
# The devcontainer target is not used on CI. It is a target for developers to use locally -
|
||||||
|
# rather than copying files it mounts them locally and leaves building to the developer
|
||||||
|
|
||||||
|
FROM builder-base AS devcontainer
|
||||||
|
|
||||||
|
COPY .devcontainer-scripts /.devcontainer-scripts
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
ssh less
|
||||||
|
# For the devcontainer, leave apt functional in case additional devtools are needed at runtime.
|
||||||
|
|
||||||
|
RUN go install github.com/go-delve/delve/cmd/dlv@latest
|
||||||
|
|
||||||
|
RUN go install github.com/mikefarah/yq/v4@latest
|
||||||
|
|
||||||
|
###################################
|
||||||
|
###################################
|
||||||
|
|
||||||
|
# This is the final target. The result of this target will be the image uploaded to the registry.
|
||||||
|
# If you cannot find a more suitable place for an addition, this layer is a suitable place for it.
|
||||||
|
FROM requirements-drivers
|
||||||
|
|
||||||
|
ENV HEALTHCHECK_ENDPOINT=http://localhost:8080/readyz
|
||||||
|
|
||||||
|
ARG CUDA_MAJOR_VERSION=12
|
||||||
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||||
ENV NVIDIA_REQUIRE_CUDA="cuda>=${CUDA_MAJOR_VERSION}.0"
|
ENV NVIDIA_REQUIRE_CUDA="cuda>=${CUDA_MAJOR_VERSION}.0"
|
||||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
|
||||||
WORKDIR /build
|
WORKDIR /
|
||||||
|
|
||||||
COPY Makefile .
|
COPY ./entrypoint.sh .
|
||||||
RUN make get-sources
|
|
||||||
COPY go.mod .
|
|
||||||
RUN make prepare
|
|
||||||
COPY . .
|
|
||||||
RUN ESPEAK_DATA=/build/lib/Linux-$(uname -m)/piper_phonemize/lib/espeak-ng-data make build
|
|
||||||
|
|
||||||
###################################
|
# Copy the binary
|
||||||
###################################
|
|
||||||
|
|
||||||
FROM requirements
|
|
||||||
|
|
||||||
ARG FFMPEG
|
|
||||||
|
|
||||||
ENV REBUILD=true
|
|
||||||
ENV HEALTHCHECK_ENDPOINT=http://localhost:8080/readyz
|
|
||||||
|
|
||||||
# Add FFmpeg
|
|
||||||
RUN if [ "${FFMPEG}" = "true" ]; then \
|
|
||||||
apt-get install -y ffmpeg \
|
|
||||||
; fi
|
|
||||||
|
|
||||||
WORKDIR /build
|
|
||||||
|
|
||||||
# we start fresh & re-copy all assets because `make build` does not clean up nicely after itself
|
|
||||||
# so when `entrypoint.sh` runs `make build` again (which it does by default), the build would fail
|
|
||||||
# see https://github.com/go-skynet/LocalAI/pull/658#discussion_r1241971626 and
|
|
||||||
# https://github.com/go-skynet/LocalAI/pull/434
|
|
||||||
COPY . .
|
|
||||||
RUN make prepare-sources
|
|
||||||
COPY --from=builder /build/local-ai ./
|
COPY --from=builder /build/local-ai ./
|
||||||
|
|
||||||
|
# Make sure the models directory exists
|
||||||
|
RUN mkdir -p /models /backends
|
||||||
|
|
||||||
# Define the health check command
|
# Define the health check command
|
||||||
HEALTHCHECK --interval=1m --timeout=10m --retries=10 \
|
HEALTHCHECK --interval=1m --timeout=10m --retries=10 \
|
||||||
CMD curl -f $HEALTHCHECK_ENDPOINT || exit 1
|
CMD curl -f ${HEALTHCHECK_ENDPOINT} || exit 1
|
||||||
|
|
||||||
|
VOLUME /models /backends
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
ENTRYPOINT [ "/build/entrypoint.sh" ]
|
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||||
|
|||||||
8
Dockerfile.aio
Normal file
8
Dockerfile.aio
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
ARG BASE_IMAGE=ubuntu:22.04
|
||||||
|
|
||||||
|
FROM ${BASE_IMAGE}
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y pciutils && apt-get clean
|
||||||
|
|
||||||
|
COPY aio/ /aio
|
||||||
|
ENTRYPOINT [ "/aio/entrypoint.sh" ]
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
VERSION 0.7
|
|
||||||
|
|
||||||
build:
|
|
||||||
FROM DOCKERFILE -f Dockerfile .
|
|
||||||
SAVE ARTIFACT /usr/bin/local-ai AS LOCAL local-ai
|
|
||||||
10
Entitlements.plist
Normal file
10
Entitlements.plist
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>com.apple.security.network.client</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.network.server</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2023 Ettore Di Giacinto
|
Copyright (c) 2023-2025 Ettore Di Giacinto (mudler@localai.io)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
730
Makefile
730
Makefile
@@ -3,33 +3,31 @@ GOTEST=$(GOCMD) test
|
|||||||
GOVET=$(GOCMD) vet
|
GOVET=$(GOCMD) vet
|
||||||
BINARY_NAME=local-ai
|
BINARY_NAME=local-ai
|
||||||
|
|
||||||
GOLLAMA_VERSION?=42ba448383692c11ca8f04f2b87e87f3f9bdac30
|
GORELEASER?=
|
||||||
GPT4ALL_REPO?=https://github.com/nomic-ai/gpt4all
|
|
||||||
GPT4ALL_VERSION?=a67f8132e1657974d2abe4abeb82df9be3d42bbd
|
|
||||||
GOGGMLTRANSFORMERS_VERSION?=8e31841dcddca16468c11b2e7809f279fa76a832
|
|
||||||
RWKV_REPO?=https://github.com/donomii/go-rwkv.cpp
|
|
||||||
RWKV_VERSION?=f5a8c45396741470583f59b916a2a7641e63bcd0
|
|
||||||
WHISPER_CPP_VERSION?=85ed71aaec8e0612a84c0b67804bde75aa75a273
|
|
||||||
BERT_VERSION?=6069103f54b9969c02e789d0fb12a23bd614285f
|
|
||||||
PIPER_VERSION?=56b8a81b4760a6fbee1a82e62f007ae7e8f010a7
|
|
||||||
BLOOMZ_VERSION?=1834e77b83faafe912ad4092ccf7f77937349e2f
|
|
||||||
export BUILD_TYPE?=
|
|
||||||
CGO_LDFLAGS?=
|
|
||||||
CUDA_LIBPATH?=/usr/local/cuda/lib64/
|
|
||||||
STABLEDIFFUSION_VERSION?=d89260f598afb809279bc72aa0107b4292587632
|
|
||||||
GO_TAGS?=
|
|
||||||
BUILD_ID?=git
|
|
||||||
|
|
||||||
VERSION?=$(shell git describe --always --tags --dirty || echo "dev" )
|
ONEAPI_VERSION?=2025.2
|
||||||
|
|
||||||
|
export BUILD_TYPE?=
|
||||||
|
|
||||||
|
GO_TAGS?=
|
||||||
|
BUILD_ID?=
|
||||||
|
NATIVE?=false
|
||||||
|
|
||||||
|
TEST_DIR=/tmp/test
|
||||||
|
|
||||||
|
TEST_FLAKES?=5
|
||||||
|
|
||||||
|
RANDOM := $(shell bash -c 'echo $$RANDOM')
|
||||||
|
|
||||||
|
VERSION?=$(shell git describe --always --tags || echo "dev" )
|
||||||
# go tool nm ./local-ai | grep Commit
|
# go tool nm ./local-ai | grep Commit
|
||||||
LD_FLAGS?=
|
LD_FLAGS?=-s -w
|
||||||
override LD_FLAGS += -X "github.com/go-skynet/LocalAI/internal.Version=$(VERSION)"
|
override LD_FLAGS += -X "github.com/mudler/LocalAI/internal.Version=$(VERSION)"
|
||||||
override LD_FLAGS += -X "github.com/go-skynet/LocalAI/internal.Commit=$(shell git rev-parse HEAD)"
|
override LD_FLAGS += -X "github.com/mudler/LocalAI/internal.Commit=$(shell git rev-parse HEAD)"
|
||||||
|
|
||||||
OPTIONAL_TARGETS?=
|
OPTIONAL_TARGETS?=
|
||||||
ESPEAK_DATA?=
|
|
||||||
|
|
||||||
OS := $(shell uname -s)
|
export OS := $(shell uname -s)
|
||||||
ARCH := $(shell uname -m)
|
ARCH := $(shell uname -m)
|
||||||
GREEN := $(shell tput -Txterm setaf 2)
|
GREEN := $(shell tput -Txterm setaf 2)
|
||||||
YELLOW := $(shell tput -Txterm setaf 3)
|
YELLOW := $(shell tput -Txterm setaf 3)
|
||||||
@@ -37,258 +35,195 @@ WHITE := $(shell tput -Txterm setaf 7)
|
|||||||
CYAN := $(shell tput -Txterm setaf 6)
|
CYAN := $(shell tput -Txterm setaf 6)
|
||||||
RESET := $(shell tput -Txterm sgr0)
|
RESET := $(shell tput -Txterm sgr0)
|
||||||
|
|
||||||
C_INCLUDE_PATH=$(shell pwd)/go-llama:$(shell pwd)/go-stable-diffusion/:$(shell pwd)/gpt4all/gpt4all-bindings/golang/:$(shell pwd)/go-ggml-transformers:$(shell pwd)/go-rwkv:$(shell pwd)/whisper.cpp:$(shell pwd)/go-bert:$(shell pwd)/bloomz
|
# Default Docker bridge IP
|
||||||
LIBRARY_PATH=$(shell pwd)/go-piper:$(shell pwd)/go-llama:$(shell pwd)/go-stable-diffusion/:$(shell pwd)/gpt4all/gpt4all-bindings/golang/:$(shell pwd)/go-ggml-transformers:$(shell pwd)/go-rwkv:$(shell pwd)/whisper.cpp:$(shell pwd)/go-bert:$(shell pwd)/bloomz
|
E2E_BRIDGE_IP?=172.17.0.1
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE),openblas)
|
ifndef UNAME_S
|
||||||
CGO_LDFLAGS+=-lopenblas
|
UNAME_S := $(shell uname -s)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE),cublas)
|
ifeq ($(OS),Darwin)
|
||||||
CGO_LDFLAGS+=-lcublas -lcudart -L$(CUDA_LIBPATH)
|
ifeq ($(OSX_SIGNING_IDENTITY),)
|
||||||
export LLAMA_CUBLAS=1
|
OSX_SIGNING_IDENTITY := $(shell security find-identity -v -p codesigning | grep '"' | head -n 1 | sed -E 's/.*"(.*)"/\1/')
|
||||||
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE),metal)
|
# check if goreleaser exists
|
||||||
CGO_LDFLAGS+=-framework Foundation -framework Metal -framework MetalKit -framework MetalPerformanceShaders
|
ifeq (, $(shell which goreleaser))
|
||||||
export LLAMA_METAL=1
|
GORELEASER=curl -sfL https://goreleaser.com/static/run | bash -s --
|
||||||
|
else
|
||||||
|
GORELEASER=$(shell which goreleaser)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(BUILD_TYPE),clblas)
|
TEST_PATHS?=./api/... ./pkg/... ./core/...
|
||||||
CGO_LDFLAGS+=-lOpenCL -lclblast
|
|
||||||
endif
|
|
||||||
|
|
||||||
# glibc-static or glibc-devel-static required
|
|
||||||
ifeq ($(STATIC),true)
|
|
||||||
LD_FLAGS=-linkmode external -extldflags -static
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(findstring stablediffusion,$(GO_TAGS)),stablediffusion)
|
|
||||||
OPTIONAL_TARGETS+=go-stable-diffusion/libstablediffusion.a
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(findstring tts,$(GO_TAGS)),tts)
|
|
||||||
OPTIONAL_TARGETS+=go-piper/libpiper_binding.a
|
|
||||||
OPTIONAL_TARGETS+=backend-assets/espeak-ng-data
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: all test build vendor
|
.PHONY: all test build vendor
|
||||||
|
|
||||||
all: help
|
all: help
|
||||||
|
|
||||||
## GPT4ALL
|
|
||||||
gpt4all:
|
|
||||||
git clone --recurse-submodules $(GPT4ALL_REPO) gpt4all
|
|
||||||
cd gpt4all && git checkout -b build $(GPT4ALL_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
# This is hackish, but needed as both go-llama and go-gpt4allj have their own version of ggml..
|
|
||||||
@find ./gpt4all -type f -name "*.c" -exec sed -i'' -e 's/ggml_/ggml_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.cpp" -exec sed -i'' -e 's/ggml_/ggml_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.m" -exec sed -i'' -e 's/ggml_/ggml_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.h" -exec sed -i'' -e 's/ggml_/ggml_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.c" -exec sed -i'' -e 's/llama_/llama_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.cpp" -exec sed -i'' -e 's/llama_/llama_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.h" -exec sed -i'' -e 's/llama_/llama_gpt4all_/g' {} +
|
|
||||||
@find ./gpt4all/gpt4all-backend -type f -name "llama_util.h" -execdir mv {} "llama_gpt4all_util.h" \;
|
|
||||||
@find ./gpt4all -type f -name "*.cmake" -exec sed -i'' -e 's/llama_util/llama_gpt4all_util/g' {} +
|
|
||||||
@find ./gpt4all -type f -name "*.txt" -exec sed -i'' -e 's/llama_util/llama_gpt4all_util/g' {} +
|
|
||||||
@find ./gpt4all/gpt4all-bindings/golang -type f -name "*.cpp" -exec sed -i'' -e 's/load_model/load_gpt4all_model/g' {} +
|
|
||||||
@find ./gpt4all/gpt4all-bindings/golang -type f -name "*.go" -exec sed -i'' -e 's/load_model/load_gpt4all_model/g' {} +
|
|
||||||
@find ./gpt4all/gpt4all-bindings/golang -type f -name "*.h" -exec sed -i'' -e 's/load_model/load_gpt4all_model/g' {} +
|
|
||||||
|
|
||||||
## go-piper
|
|
||||||
go-piper:
|
|
||||||
git clone --recurse-submodules https://github.com/mudler/go-piper go-piper
|
|
||||||
cd go-piper && git checkout -b build $(PIPER_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
|
|
||||||
## BERT embeddings
|
|
||||||
go-bert:
|
|
||||||
git clone --recurse-submodules https://github.com/go-skynet/go-bert.cpp go-bert
|
|
||||||
cd go-bert && git checkout -b build $(BERT_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
@find ./go-bert -type f -name "*.c" -exec sed -i'' -e 's/ggml_/ggml_bert_/g' {} +
|
|
||||||
@find ./go-bert -type f -name "*.cpp" -exec sed -i'' -e 's/ggml_/ggml_bert_/g' {} +
|
|
||||||
@find ./go-bert -type f -name "*.h" -exec sed -i'' -e 's/ggml_/ggml_bert_/g' {} +
|
|
||||||
|
|
||||||
## stable diffusion
|
|
||||||
go-stable-diffusion:
|
|
||||||
git clone --recurse-submodules https://github.com/mudler/go-stable-diffusion go-stable-diffusion
|
|
||||||
cd go-stable-diffusion && git checkout -b build $(STABLEDIFFUSION_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
|
|
||||||
go-stable-diffusion/libstablediffusion.a:
|
|
||||||
$(MAKE) -C go-stable-diffusion libstablediffusion.a
|
|
||||||
|
|
||||||
## RWKV
|
|
||||||
go-rwkv:
|
|
||||||
git clone --recurse-submodules $(RWKV_REPO) go-rwkv
|
|
||||||
cd go-rwkv && git checkout -b build $(RWKV_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
@find ./go-rwkv -type f -name "*.c" -exec sed -i'' -e 's/ggml_/ggml_rwkv_/g' {} +
|
|
||||||
@find ./go-rwkv -type f -name "*.cpp" -exec sed -i'' -e 's/ggml_/ggml_rwkv_/g' {} +
|
|
||||||
@find ./go-rwkv -type f -name "*.h" -exec sed -i'' -e 's/ggml_/ggml_rwkv_/g' {} +
|
|
||||||
|
|
||||||
go-rwkv/librwkv.a: go-rwkv
|
|
||||||
cd go-rwkv && cd rwkv.cpp && cmake . -DRWKV_BUILD_SHARED_LIBRARY=OFF && cmake --build . && cp librwkv.a ..
|
|
||||||
|
|
||||||
## bloomz
|
|
||||||
bloomz:
|
|
||||||
git clone --recurse-submodules https://github.com/go-skynet/bloomz.cpp bloomz
|
|
||||||
@find ./bloomz -type f -name "*.c" -exec sed -i'' -e 's/ggml_/ggml_bloomz_/g' {} +
|
|
||||||
@find ./bloomz -type f -name "*.cpp" -exec sed -i'' -e 's/ggml_/ggml_bloomz_/g' {} +
|
|
||||||
@find ./bloomz -type f -name "*.h" -exec sed -i'' -e 's/ggml_/ggml_bloomz_/g' {} +
|
|
||||||
@find ./bloomz -type f -name "*.cpp" -exec sed -i'' -e 's/gpt_/gpt_bloomz_/g' {} +
|
|
||||||
@find ./bloomz -type f -name "*.h" -exec sed -i'' -e 's/gpt_/gpt_bloomz_/g' {} +
|
|
||||||
@find ./bloomz -type f -name "*.cpp" -exec sed -i'' -e 's/void replace/void json_bloomz_replace/g' {} +
|
|
||||||
@find ./bloomz -type f -name "*.cpp" -exec sed -i'' -e 's/::replace/::json_bloomz_replace/g' {} +
|
|
||||||
|
|
||||||
bloomz/libbloomz.a: bloomz
|
|
||||||
cd bloomz && make libbloomz.a
|
|
||||||
|
|
||||||
go-bert/libgobert.a: go-bert
|
|
||||||
$(MAKE) -C go-bert libgobert.a
|
|
||||||
|
|
||||||
backend-assets/gpt4all: gpt4all/gpt4all-bindings/golang/libgpt4all.a
|
|
||||||
mkdir -p backend-assets/gpt4all
|
|
||||||
@cp gpt4all/gpt4all-bindings/golang/buildllm/*.so backend-assets/gpt4all/ || true
|
|
||||||
@cp gpt4all/gpt4all-bindings/golang/buildllm/*.dylib backend-assets/gpt4all/ || true
|
|
||||||
@cp gpt4all/gpt4all-bindings/golang/buildllm/*.dll backend-assets/gpt4all/ || true
|
|
||||||
|
|
||||||
backend-assets/espeak-ng-data:
|
|
||||||
mkdir -p backend-assets/espeak-ng-data
|
|
||||||
ifdef ESPEAK_DATA
|
|
||||||
@cp -rf $(ESPEAK_DATA)/. backend-assets/espeak-ng-data
|
|
||||||
else
|
|
||||||
@touch backend-assets/espeak-ng-data/keep
|
|
||||||
endif
|
|
||||||
|
|
||||||
gpt4all/gpt4all-bindings/golang/libgpt4all.a: gpt4all
|
|
||||||
$(MAKE) -C gpt4all/gpt4all-bindings/golang/ libgpt4all.a
|
|
||||||
|
|
||||||
## CEREBRAS GPT
|
|
||||||
go-ggml-transformers:
|
|
||||||
git clone --recurse-submodules https://github.com/go-skynet/go-ggml-transformers.cpp go-ggml-transformers
|
|
||||||
cd go-ggml-transformers && git checkout -b build $(GOGPT2_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
# This is hackish, but needed as both go-llama and go-gpt4allj have their own version of ggml..
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.c" -exec sed -i'' -e 's/ggml_/ggml_gpt2_/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.cpp" -exec sed -i'' -e 's/ggml_/ggml_gpt2_/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.h" -exec sed -i'' -e 's/ggml_/ggml_gpt2_/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.cpp" -exec sed -i'' -e 's/gpt_print_usage/gpt2_print_usage/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.h" -exec sed -i'' -e 's/gpt_print_usage/gpt2_print_usage/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.cpp" -exec sed -i'' -e 's/gpt_params_parse/gpt2_params_parse/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.h" -exec sed -i'' -e 's/gpt_params_parse/gpt2_params_parse/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.cpp" -exec sed -i'' -e 's/gpt_random_prompt/gpt2_random_prompt/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.h" -exec sed -i'' -e 's/gpt_random_prompt/gpt2_random_prompt/g' {} +
|
|
||||||
@find ./go-ggml-transformers -type f -name "*.cpp" -exec sed -i'' -e 's/json_/json_gpt2_/g' {} +
|
|
||||||
|
|
||||||
go-ggml-transformers/libtransformers.a: go-ggml-transformers
|
|
||||||
$(MAKE) -C go-ggml-transformers libtransformers.a
|
|
||||||
|
|
||||||
whisper.cpp:
|
|
||||||
git clone https://github.com/ggerganov/whisper.cpp.git
|
|
||||||
cd whisper.cpp && git checkout -b build $(WHISPER_CPP_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
@find ./whisper.cpp -type f -name "*.c" -exec sed -i'' -e 's/ggml_/ggml_whisper_/g' {} +
|
|
||||||
@find ./whisper.cpp -type f -name "*.cpp" -exec sed -i'' -e 's/ggml_/ggml_whisper_/g' {} +
|
|
||||||
@find ./whisper.cpp -type f -name "*.h" -exec sed -i'' -e 's/ggml_/ggml_whisper_/g' {} +
|
|
||||||
|
|
||||||
whisper.cpp/libwhisper.a: whisper.cpp
|
|
||||||
cd whisper.cpp && make libwhisper.a
|
|
||||||
|
|
||||||
go-llama:
|
|
||||||
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama
|
|
||||||
cd go-llama && git checkout -b build $(GOLLAMA_VERSION) && git submodule update --init --recursive --depth 1
|
|
||||||
|
|
||||||
go-llama/libbinding.a: go-llama
|
|
||||||
$(MAKE) -C go-llama BUILD_TYPE=$(BUILD_TYPE) libbinding.a
|
|
||||||
|
|
||||||
go-piper/libpiper_binding.a:
|
|
||||||
$(MAKE) -C go-piper libpiper_binding.a example/main
|
|
||||||
|
|
||||||
get-sources: go-llama go-ggml-transformers gpt4all go-piper go-rwkv whisper.cpp go-bert bloomz go-stable-diffusion
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
replace:
|
|
||||||
$(GOCMD) mod edit -replace github.com/go-skynet/go-llama.cpp=$(shell pwd)/go-llama
|
|
||||||
$(GOCMD) mod edit -replace github.com/nomic-ai/gpt4all/gpt4all-bindings/golang=$(shell pwd)/gpt4all/gpt4all-bindings/golang
|
|
||||||
$(GOCMD) mod edit -replace github.com/go-skynet/go-ggml-transformers.cpp=$(shell pwd)/go-ggml-transformers
|
|
||||||
$(GOCMD) mod edit -replace github.com/donomii/go-rwkv.cpp=$(shell pwd)/go-rwkv
|
|
||||||
$(GOCMD) mod edit -replace github.com/ggerganov/whisper.cpp=$(shell pwd)/whisper.cpp
|
|
||||||
$(GOCMD) mod edit -replace github.com/go-skynet/go-bert.cpp=$(shell pwd)/go-bert
|
|
||||||
$(GOCMD) mod edit -replace github.com/go-skynet/bloomz.cpp=$(shell pwd)/bloomz
|
|
||||||
$(GOCMD) mod edit -replace github.com/mudler/go-stable-diffusion=$(shell pwd)/go-stable-diffusion
|
|
||||||
$(GOCMD) mod edit -replace github.com/mudler/go-piper=$(shell pwd)/go-piper
|
|
||||||
|
|
||||||
prepare-sources: get-sources replace
|
|
||||||
$(GOCMD) mod download
|
|
||||||
|
|
||||||
## GENERIC
|
## GENERIC
|
||||||
rebuild: ## Rebuilds the project
|
rebuild: ## Rebuilds the project
|
||||||
$(MAKE) -C go-llama clean
|
$(GOCMD) clean -cache
|
||||||
$(MAKE) -C gpt4all/gpt4all-bindings/golang/ clean
|
|
||||||
$(MAKE) -C go-ggml-transformers clean
|
|
||||||
$(MAKE) -C go-rwkv clean
|
|
||||||
$(MAKE) -C whisper.cpp clean
|
|
||||||
$(MAKE) -C go-stable-diffusion clean
|
|
||||||
$(MAKE) -C go-bert clean
|
|
||||||
$(MAKE) -C bloomz clean
|
|
||||||
$(MAKE) -C go-piper clean
|
|
||||||
$(MAKE) build
|
$(MAKE) build
|
||||||
|
|
||||||
prepare: prepare-sources backend-assets/gpt4all $(OPTIONAL_TARGETS) go-llama/libbinding.a go-bert/libgobert.a go-ggml-transformers/libtransformers.a go-rwkv/librwkv.a whisper.cpp/libwhisper.a bloomz/libbloomz.a ## Prepares for building
|
|
||||||
touch $@
|
|
||||||
|
|
||||||
clean: ## Remove build related file
|
clean: ## Remove build related file
|
||||||
rm -fr ./go-llama
|
$(GOCMD) clean -cache
|
||||||
rm -rf ./gpt4all
|
rm -f prepare
|
||||||
rm -rf ./go-gpt2
|
|
||||||
rm -rf ./go-stable-diffusion
|
|
||||||
rm -rf ./go-ggml-transformers
|
|
||||||
rm -rf ./backend-assets
|
|
||||||
rm -rf ./go-rwkv
|
|
||||||
rm -rf ./go-bert
|
|
||||||
rm -rf ./bloomz
|
|
||||||
rm -rf ./whisper.cpp
|
|
||||||
rm -rf ./go-piper
|
|
||||||
rm -rf $(BINARY_NAME)
|
rm -rf $(BINARY_NAME)
|
||||||
rm -rf release/
|
rm -rf release/
|
||||||
|
$(MAKE) protogen-clean
|
||||||
|
rmdir pkg/grpc/proto || true
|
||||||
|
|
||||||
|
clean-tests:
|
||||||
|
rm -rf test-models
|
||||||
|
rm -rf test-dir
|
||||||
|
|
||||||
|
## Install Go tools
|
||||||
|
install-go-tools:
|
||||||
|
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
||||||
|
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
||||||
|
|
||||||
## Build:
|
## Build:
|
||||||
|
build: protogen-go install-go-tools ## Build the project
|
||||||
build: prepare ## Build the project
|
|
||||||
$(info ${GREEN}I local-ai build info:${RESET})
|
$(info ${GREEN}I local-ai build info:${RESET})
|
||||||
$(info ${GREEN}I BUILD_TYPE: ${YELLOW}$(BUILD_TYPE)${RESET})
|
$(info ${GREEN}I BUILD_TYPE: ${YELLOW}$(BUILD_TYPE)${RESET})
|
||||||
$(info ${GREEN}I GO_TAGS: ${YELLOW}$(GO_TAGS)${RESET})
|
$(info ${GREEN}I GO_TAGS: ${YELLOW}$(GO_TAGS)${RESET})
|
||||||
$(info ${GREEN}I LD_FLAGS: ${YELLOW}$(LD_FLAGS)${RESET})
|
$(info ${GREEN}I LD_FLAGS: ${YELLOW}$(LD_FLAGS)${RESET})
|
||||||
|
$(info ${GREEN}I UPX: ${YELLOW}$(UPX)${RESET})
|
||||||
|
rm -rf $(BINARY_NAME) || true
|
||||||
|
CGO_LDFLAGS="$(CGO_LDFLAGS)" $(GOCMD) build -ldflags "$(LD_FLAGS)" -tags "$(GO_TAGS)" -o $(BINARY_NAME) ./
|
||||||
|
|
||||||
CGO_LDFLAGS="$(CGO_LDFLAGS)" C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} $(GOCMD) build -ldflags "$(LD_FLAGS)" -tags "$(GO_TAGS)" -o $(BINARY_NAME) ./
|
dev-dist:
|
||||||
ifeq ($(BUILD_TYPE),metal)
|
$(GORELEASER) build --snapshot --clean
|
||||||
cp go-llama/build/bin/ggml-metal.metal .
|
|
||||||
endif
|
|
||||||
|
|
||||||
dist: build
|
dist:
|
||||||
mkdir -p release
|
$(GORELEASER) build --clean
|
||||||
cp $(BINARY_NAME) release/$(BINARY_NAME)-$(BUILD_ID)-$(OS)-$(ARCH)
|
|
||||||
|
|
||||||
generic-build: ## Build the project using generic
|
osx-signed: build
|
||||||
BUILD_TYPE="generic" $(MAKE) build
|
codesign --deep --force --sign "$(OSX_SIGNING_IDENTITY)" --entitlements "./Entitlements.plist" "./$(BINARY_NAME)"
|
||||||
|
|
||||||
## Run
|
## Run
|
||||||
run: prepare ## run local-ai
|
run: ## run local-ai
|
||||||
CGO_LDFLAGS="$(CGO_LDFLAGS)" C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} $(GOCMD) run ./
|
CGO_LDFLAGS="$(CGO_LDFLAGS)" $(GOCMD) run ./
|
||||||
|
|
||||||
test-models/testmodel:
|
test-models/testmodel.ggml:
|
||||||
mkdir test-models
|
mkdir test-models
|
||||||
mkdir test-dir
|
mkdir test-dir
|
||||||
wget https://huggingface.co/nnakasato/ggml-model-test/resolve/main/ggml-model-q4.bin -O test-models/testmodel
|
wget -q https://huggingface.co/mradermacher/gpt2-alpaca-gpt4-GGUF/resolve/main/gpt2-alpaca-gpt4.Q4_K_M.gguf -O test-models/testmodel.ggml
|
||||||
wget https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin -O test-models/whisper-en
|
wget -q https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin -O test-models/whisper-en
|
||||||
wget https://huggingface.co/skeskinen/ggml/resolve/main/all-MiniLM-L6-v2/ggml-model-q4_0.bin -O test-models/bert
|
wget -q https://huggingface.co/mudler/all-MiniLM-L6-v2/resolve/main/ggml-model-q4_0.bin -O test-models/bert
|
||||||
wget https://cdn.openai.com/whisper/draft-20220913a/micro-machines.wav -O test-dir/audio.wav
|
wget -q https://cdn.openai.com/whisper/draft-20220913a/micro-machines.wav -O test-dir/audio.wav
|
||||||
wget https://huggingface.co/mudler/rwkv-4-raven-1.5B-ggml/resolve/main/RWKV-4-Raven-1B5-v11-Eng99%2525-Other1%2525-20230425-ctx4096_Q4_0.bin -O test-models/rwkv
|
|
||||||
wget https://raw.githubusercontent.com/saharNooby/rwkv.cpp/5eb8f09c146ea8124633ab041d9ea0b1f1db4459/rwkv/20B_tokenizer.json -O test-models/rwkv.tokenizer.json
|
|
||||||
cp tests/models_fixtures/* test-models
|
cp tests/models_fixtures/* test-models
|
||||||
|
|
||||||
test: prepare test-models/testmodel
|
prepare-test: protogen-go
|
||||||
cp -r backend-assets api
|
|
||||||
cp tests/models_fixtures/* test-models
|
cp tests/models_fixtures/* test-models
|
||||||
C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="!gpt4all && !llama" --flake-attempts 5 -v -r ./api ./pkg
|
|
||||||
C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="gpt4all" --flake-attempts 5 -v -r ./api ./pkg
|
########################################################
|
||||||
C_INCLUDE_PATH=${C_INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="llama" --flake-attempts 5 -v -r ./api ./pkg
|
## Tests
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
## Test targets
|
||||||
|
test: test-models/testmodel.ggml protogen-go
|
||||||
|
@echo 'Running tests'
|
||||||
|
export GO_TAGS="debug"
|
||||||
|
$(MAKE) prepare-test
|
||||||
|
HUGGINGFACE_GRPC=$(abspath ./)/backend/python/transformers/run.sh TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models BACKENDS_PATH=$(abspath ./)/backends \
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="!llama-gguf" --flake-attempts $(TEST_FLAKES) --fail-fast -v -r $(TEST_PATHS)
|
||||||
|
$(MAKE) test-llama-gguf
|
||||||
|
$(MAKE) test-tts
|
||||||
|
$(MAKE) test-stablediffusion
|
||||||
|
|
||||||
|
backends/llama-cpp: docker-build-llama-cpp docker-save-llama-cpp build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/llama-cpp.tar)"
|
||||||
|
|
||||||
|
backends/piper: docker-build-piper docker-save-piper build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/piper.tar)"
|
||||||
|
|
||||||
|
backends/stablediffusion-ggml: docker-build-stablediffusion-ggml docker-save-stablediffusion-ggml build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/stablediffusion-ggml.tar)"
|
||||||
|
|
||||||
|
backends/whisper: docker-build-whisper docker-save-whisper build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/whisper.tar)"
|
||||||
|
|
||||||
|
backends/silero-vad: docker-build-silero-vad docker-save-silero-vad build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/silero-vad.tar)"
|
||||||
|
|
||||||
|
backends/local-store: docker-build-local-store docker-save-local-store build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/local-store.tar)"
|
||||||
|
|
||||||
|
backends/huggingface: docker-build-huggingface docker-save-huggingface build
|
||||||
|
./local-ai backends install "ocifile://$(abspath ./backend-images/huggingface.tar)"
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
## AIO tests
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
docker-build-aio:
|
||||||
|
docker build --build-arg MAKEFLAGS="--jobs=5 --output-sync=target" -t local-ai:tests -f Dockerfile .
|
||||||
|
BASE_IMAGE=local-ai:tests DOCKER_AIO_IMAGE=local-ai-aio:test $(MAKE) docker-aio
|
||||||
|
|
||||||
|
e2e-aio:
|
||||||
|
LOCALAI_BACKEND_DIR=$(abspath ./backends) \
|
||||||
|
LOCALAI_MODELS_DIR=$(abspath ./models) \
|
||||||
|
LOCALAI_IMAGE_TAG=test \
|
||||||
|
LOCALAI_IMAGE=local-ai-aio \
|
||||||
|
$(MAKE) run-e2e-aio
|
||||||
|
|
||||||
|
run-e2e-aio: protogen-go
|
||||||
|
@echo 'Running e2e AIO tests'
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --flake-attempts $(TEST_FLAKES) -v -r ./tests/e2e-aio
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
## E2E tests
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
prepare-e2e:
|
||||||
|
mkdir -p $(TEST_DIR)
|
||||||
|
cp -rfv $(abspath ./tests/e2e-fixtures)/gpu.yaml $(TEST_DIR)/gpu.yaml
|
||||||
|
test -e $(TEST_DIR)/ggllm-test-model.bin || wget -q https://huggingface.co/TheBloke/CodeLlama-7B-Instruct-GGUF/resolve/main/codellama-7b-instruct.Q2_K.gguf -O $(TEST_DIR)/ggllm-test-model.bin
|
||||||
|
docker build --build-arg IMAGE_TYPE=core --build-arg BUILD_TYPE=$(BUILD_TYPE) --build-arg CUDA_MAJOR_VERSION=12 --build-arg CUDA_MINOR_VERSION=0 -t localai-tests .
|
||||||
|
|
||||||
|
run-e2e-image:
|
||||||
|
ls -liah $(abspath ./tests/e2e-fixtures)
|
||||||
|
docker run -p 5390:8080 -e MODELS_PATH=/models -e THREADS=1 -e DEBUG=true -d --rm -v $(TEST_DIR):/models --gpus all --name e2e-tests-$(RANDOM) localai-tests
|
||||||
|
|
||||||
|
test-e2e:
|
||||||
|
@echo 'Running e2e tests'
|
||||||
|
BUILD_TYPE=$(BUILD_TYPE) \
|
||||||
|
LOCALAI_API=http://$(E2E_BRIDGE_IP):5390/v1 \
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --flake-attempts $(TEST_FLAKES) -v -r ./tests/e2e
|
||||||
|
|
||||||
|
teardown-e2e:
|
||||||
|
rm -rf $(TEST_DIR) || true
|
||||||
|
docker stop $$(docker ps -q --filter ancestor=localai-tests)
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
## Integration and unit tests
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
test-llama-gguf: prepare-test
|
||||||
|
TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models BACKENDS_PATH=$(abspath ./)/backends \
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="llama-gguf" --flake-attempts $(TEST_FLAKES) -v -r $(TEST_PATHS)
|
||||||
|
|
||||||
|
test-tts: prepare-test
|
||||||
|
TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models BACKENDS_PATH=$(abspath ./)/backends \
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="tts" --flake-attempts $(TEST_FLAKES) -v -r $(TEST_PATHS)
|
||||||
|
|
||||||
|
test-stablediffusion: prepare-test
|
||||||
|
TEST_DIR=$(abspath ./)/test-dir/ FIXTURES=$(abspath ./)/tests/fixtures CONFIG_FILE=$(abspath ./)/test-models/config.yaml MODELS_PATH=$(abspath ./)/test-models BACKENDS_PATH=$(abspath ./)/backends \
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="stablediffusion" --flake-attempts $(TEST_FLAKES) -v -r $(TEST_PATHS)
|
||||||
|
|
||||||
|
test-stores:
|
||||||
|
$(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --label-filter="stores" --flake-attempts $(TEST_FLAKES) -v -r tests/integration
|
||||||
|
|
||||||
|
test-container:
|
||||||
|
docker build --target requirements -t local-ai-test-container .
|
||||||
|
docker run -ti --rm --entrypoint /bin/bash -ti -v $(abspath ./):/build local-ai-test-container
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
## Help
|
||||||
|
########################################################
|
||||||
|
|
||||||
## Help:
|
## Help:
|
||||||
help: ## Show this help.
|
help: ## Show this help.
|
||||||
@@ -301,3 +236,324 @@ help: ## Show this help.
|
|||||||
if (/^[a-zA-Z_-]+:.*?##.*$$/) {printf " ${YELLOW}%-20s${GREEN}%s${RESET}\n", $$1, $$2} \
|
if (/^[a-zA-Z_-]+:.*?##.*$$/) {printf " ${YELLOW}%-20s${GREEN}%s${RESET}\n", $$1, $$2} \
|
||||||
else if (/^## .*$$/) {printf " ${CYAN}%s${RESET}\n", substr($$1,4)} \
|
else if (/^## .*$$/) {printf " ${CYAN}%s${RESET}\n", substr($$1,4)} \
|
||||||
}' $(MAKEFILE_LIST)
|
}' $(MAKEFILE_LIST)
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
## Backends
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
.PHONY: protogen
|
||||||
|
protogen: protogen-go protogen-python
|
||||||
|
|
||||||
|
.PHONY: protogen-clean
|
||||||
|
protogen-clean: protogen-go-clean protogen-python-clean
|
||||||
|
|
||||||
|
protoc:
|
||||||
|
@OS_NAME=$$(uname -s | tr '[:upper:]' '[:lower:]'); \
|
||||||
|
ARCH_NAME=$$(uname -m); \
|
||||||
|
if [ "$$OS_NAME" = "darwin" ]; then \
|
||||||
|
if [ "$$ARCH_NAME" = "arm64" ]; then \
|
||||||
|
FILE=protoc-31.1-osx-aarch_64.zip; \
|
||||||
|
elif [ "$$ARCH_NAME" = "x86_64" ]; then \
|
||||||
|
FILE=protoc-31.1-osx-x86_64.zip; \
|
||||||
|
else \
|
||||||
|
echo "Unsupported macOS architecture: $$ARCH_NAME"; exit 1; \
|
||||||
|
fi; \
|
||||||
|
elif [ "$$OS_NAME" = "linux" ]; then \
|
||||||
|
if [ "$$ARCH_NAME" = "x86_64" ]; then \
|
||||||
|
FILE=protoc-31.1-linux-x86_64.zip; \
|
||||||
|
elif [ "$$ARCH_NAME" = "aarch64" ] || [ "$$ARCH_NAME" = "arm64" ]; then \
|
||||||
|
FILE=protoc-31.1-linux-aarch_64.zip; \
|
||||||
|
elif [ "$$ARCH_NAME" = "ppc64le" ]; then \
|
||||||
|
FILE=protoc-31.1-linux-ppcle_64.zip; \
|
||||||
|
elif [ "$$ARCH_NAME" = "s390x" ]; then \
|
||||||
|
FILE=protoc-31.1-linux-s390_64.zip; \
|
||||||
|
elif [ "$$ARCH_NAME" = "i386" ] || [ "$$ARCH_NAME" = "x86" ]; then \
|
||||||
|
FILE=protoc-31.1-linux-x86_32.zip; \
|
||||||
|
else \
|
||||||
|
echo "Unsupported Linux architecture: $$ARCH_NAME"; exit 1; \
|
||||||
|
fi; \
|
||||||
|
else \
|
||||||
|
echo "Unsupported OS: $$OS_NAME"; exit 1; \
|
||||||
|
fi; \
|
||||||
|
URL=https://github.com/protocolbuffers/protobuf/releases/download/v31.1/$$FILE; \
|
||||||
|
curl -L -s $$URL -o protoc.zip && \
|
||||||
|
unzip -j -d $(CURDIR) protoc.zip bin/protoc && rm protoc.zip
|
||||||
|
|
||||||
|
.PHONY: protogen-go
|
||||||
|
protogen-go: protoc install-go-tools
|
||||||
|
mkdir -p pkg/grpc/proto
|
||||||
|
./protoc --experimental_allow_proto3_optional -Ibackend/ --go_out=pkg/grpc/proto/ --go_opt=paths=source_relative --go-grpc_out=pkg/grpc/proto/ --go-grpc_opt=paths=source_relative \
|
||||||
|
backend/backend.proto
|
||||||
|
|
||||||
|
.PHONY: protogen-go-clean
|
||||||
|
protogen-go-clean:
|
||||||
|
$(RM) pkg/grpc/proto/backend.pb.go pkg/grpc/proto/backend_grpc.pb.go
|
||||||
|
$(RM) bin/*
|
||||||
|
|
||||||
|
.PHONY: protogen-python
|
||||||
|
protogen-python: bark-protogen coqui-protogen chatterbox-protogen diffusers-protogen exllama2-protogen rerankers-protogen transformers-protogen kokoro-protogen vllm-protogen faster-whisper-protogen
|
||||||
|
|
||||||
|
.PHONY: protogen-python-clean
|
||||||
|
protogen-python-clean: bark-protogen-clean coqui-protogen-clean chatterbox-protogen-clean diffusers-protogen-clean exllama2-protogen-clean rerankers-protogen-clean transformers-protogen-clean kokoro-protogen-clean vllm-protogen-clean faster-whisper-protogen-clean
|
||||||
|
|
||||||
|
.PHONY: bark-protogen
|
||||||
|
bark-protogen:
|
||||||
|
$(MAKE) -C backend/python/bark protogen
|
||||||
|
|
||||||
|
.PHONY: bark-protogen-clean
|
||||||
|
bark-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/bark protogen-clean
|
||||||
|
|
||||||
|
.PHONY: coqui-protogen
|
||||||
|
coqui-protogen:
|
||||||
|
$(MAKE) -C backend/python/coqui protogen
|
||||||
|
|
||||||
|
.PHONY: coqui-protogen-clean
|
||||||
|
coqui-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/coqui protogen-clean
|
||||||
|
|
||||||
|
.PHONY: diffusers-protogen
|
||||||
|
diffusers-protogen:
|
||||||
|
$(MAKE) -C backend/python/diffusers protogen
|
||||||
|
|
||||||
|
.PHONY: chatterbox-protogen
|
||||||
|
chatterbox-protogen:
|
||||||
|
$(MAKE) -C backend/python/chatterbox protogen
|
||||||
|
|
||||||
|
.PHONY: diffusers-protogen-clean
|
||||||
|
diffusers-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/diffusers protogen-clean
|
||||||
|
|
||||||
|
.PHONY: chatterbox-protogen-clean
|
||||||
|
chatterbox-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/chatterbox protogen-clean
|
||||||
|
|
||||||
|
.PHONY: faster-whisper-protogen
|
||||||
|
faster-whisper-protogen:
|
||||||
|
$(MAKE) -C backend/python/faster-whisper protogen
|
||||||
|
|
||||||
|
.PHONY: faster-whisper-protogen-clean
|
||||||
|
faster-whisper-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/faster-whisper protogen-clean
|
||||||
|
|
||||||
|
.PHONY: exllama2-protogen
|
||||||
|
exllama2-protogen:
|
||||||
|
$(MAKE) -C backend/python/exllama2 protogen
|
||||||
|
|
||||||
|
.PHONY: exllama2-protogen-clean
|
||||||
|
exllama2-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/exllama2 protogen-clean
|
||||||
|
|
||||||
|
.PHONY: rerankers-protogen
|
||||||
|
rerankers-protogen:
|
||||||
|
$(MAKE) -C backend/python/rerankers protogen
|
||||||
|
|
||||||
|
.PHONY: rerankers-protogen-clean
|
||||||
|
rerankers-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/rerankers protogen-clean
|
||||||
|
|
||||||
|
.PHONY: transformers-protogen
|
||||||
|
transformers-protogen:
|
||||||
|
$(MAKE) -C backend/python/transformers protogen
|
||||||
|
|
||||||
|
.PHONY: transformers-protogen-clean
|
||||||
|
transformers-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/transformers protogen-clean
|
||||||
|
|
||||||
|
.PHONY: kokoro-protogen
|
||||||
|
kokoro-protogen:
|
||||||
|
$(MAKE) -C backend/python/kokoro protogen
|
||||||
|
|
||||||
|
.PHONY: kokoro-protogen-clean
|
||||||
|
kokoro-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/kokoro protogen-clean
|
||||||
|
|
||||||
|
.PHONY: vllm-protogen
|
||||||
|
vllm-protogen:
|
||||||
|
$(MAKE) -C backend/python/vllm protogen
|
||||||
|
|
||||||
|
.PHONY: vllm-protogen-clean
|
||||||
|
vllm-protogen-clean:
|
||||||
|
$(MAKE) -C backend/python/vllm protogen-clean
|
||||||
|
|
||||||
|
|
||||||
|
prepare-test-extra: protogen-python
|
||||||
|
$(MAKE) -C backend/python/transformers
|
||||||
|
$(MAKE) -C backend/python/diffusers
|
||||||
|
$(MAKE) -C backend/python/chatterbox
|
||||||
|
$(MAKE) -C backend/python/vllm
|
||||||
|
|
||||||
|
test-extra: prepare-test-extra
|
||||||
|
$(MAKE) -C backend/python/transformers test
|
||||||
|
$(MAKE) -C backend/python/diffusers test
|
||||||
|
$(MAKE) -C backend/python/chatterbox test
|
||||||
|
$(MAKE) -C backend/python/vllm test
|
||||||
|
|
||||||
|
DOCKER_IMAGE?=local-ai
|
||||||
|
DOCKER_AIO_IMAGE?=local-ai-aio
|
||||||
|
IMAGE_TYPE?=core
|
||||||
|
BASE_IMAGE?=ubuntu:22.04
|
||||||
|
|
||||||
|
docker:
|
||||||
|
docker build \
|
||||||
|
--build-arg BASE_IMAGE=$(BASE_IMAGE) \
|
||||||
|
--build-arg IMAGE_TYPE=$(IMAGE_TYPE) \
|
||||||
|
--build-arg GO_TAGS="$(GO_TAGS)" \
|
||||||
|
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \
|
||||||
|
--build-arg BUILD_TYPE=$(BUILD_TYPE) \
|
||||||
|
-t $(DOCKER_IMAGE) .
|
||||||
|
|
||||||
|
docker-cuda11:
|
||||||
|
docker build \
|
||||||
|
--build-arg CUDA_MAJOR_VERSION=11 \
|
||||||
|
--build-arg CUDA_MINOR_VERSION=8 \
|
||||||
|
--build-arg BASE_IMAGE=$(BASE_IMAGE) \
|
||||||
|
--build-arg IMAGE_TYPE=$(IMAGE_TYPE) \
|
||||||
|
--build-arg GO_TAGS="$(GO_TAGS)" \
|
||||||
|
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \
|
||||||
|
--build-arg BUILD_TYPE=$(BUILD_TYPE) \
|
||||||
|
-t $(DOCKER_IMAGE)-cuda11 .
|
||||||
|
|
||||||
|
docker-aio:
|
||||||
|
@echo "Building AIO image with base $(BASE_IMAGE) as $(DOCKER_AIO_IMAGE)"
|
||||||
|
docker build \
|
||||||
|
--build-arg BASE_IMAGE=$(BASE_IMAGE) \
|
||||||
|
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \
|
||||||
|
-t $(DOCKER_AIO_IMAGE) -f Dockerfile.aio .
|
||||||
|
|
||||||
|
docker-aio-all:
|
||||||
|
$(MAKE) docker-aio DOCKER_AIO_SIZE=cpu
|
||||||
|
$(MAKE) docker-aio DOCKER_AIO_SIZE=cpu
|
||||||
|
|
||||||
|
docker-image-intel:
|
||||||
|
docker build \
|
||||||
|
--build-arg BASE_IMAGE=intel/oneapi-basekit:${ONEAPI_VERSION}.0-0-devel-ubuntu24.04 \
|
||||||
|
--build-arg IMAGE_TYPE=$(IMAGE_TYPE) \
|
||||||
|
--build-arg GO_TAGS="$(GO_TAGS)" \
|
||||||
|
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \
|
||||||
|
--build-arg BUILD_TYPE=sycl_f32 -t $(DOCKER_IMAGE) .
|
||||||
|
|
||||||
|
docker-image-intel-xpu:
|
||||||
|
docker build \
|
||||||
|
--build-arg BASE_IMAGE=intel/oneapi-basekit:${ONEAPI_VERSION}.0-0-devel-ubuntu22.04 \
|
||||||
|
--build-arg IMAGE_TYPE=$(IMAGE_TYPE) \
|
||||||
|
--build-arg GO_TAGS="$(GO_TAGS)" \
|
||||||
|
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \
|
||||||
|
--build-arg BUILD_TYPE=sycl_f32 -t $(DOCKER_IMAGE) .
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
## Backends
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
backend-images:
|
||||||
|
mkdir -p backend-images
|
||||||
|
|
||||||
|
docker-build-llama-cpp:
|
||||||
|
docker build --build-arg BUILD_TYPE=$(BUILD_TYPE) --build-arg IMAGE_BASE=$(IMAGE_BASE) -t local-ai-backend:llama-cpp -f backend/Dockerfile.llama-cpp .
|
||||||
|
|
||||||
|
docker-build-bark-cpp:
|
||||||
|
docker build -t local-ai-backend:bark-cpp -f backend/Dockerfile.go --build-arg BACKEND=bark-cpp .
|
||||||
|
|
||||||
|
docker-build-piper:
|
||||||
|
docker build -t local-ai-backend:piper -f backend/Dockerfile.go --build-arg BACKEND=piper .
|
||||||
|
|
||||||
|
docker-build-local-store:
|
||||||
|
docker build -t local-ai-backend:local-store -f backend/Dockerfile.go --build-arg BACKEND=local-store .
|
||||||
|
|
||||||
|
docker-build-huggingface:
|
||||||
|
docker build -t local-ai-backend:huggingface -f backend/Dockerfile.go --build-arg BACKEND=huggingface .
|
||||||
|
|
||||||
|
docker-save-huggingface: backend-images
|
||||||
|
docker save local-ai-backend:huggingface -o backend-images/huggingface.tar
|
||||||
|
|
||||||
|
docker-save-local-store: backend-images
|
||||||
|
docker save local-ai-backend:local-store -o backend-images/local-store.tar
|
||||||
|
|
||||||
|
docker-build-silero-vad:
|
||||||
|
docker build -t local-ai-backend:silero-vad -f backend/Dockerfile.go --build-arg BACKEND=silero-vad .
|
||||||
|
|
||||||
|
docker-save-silero-vad: backend-images
|
||||||
|
docker save local-ai-backend:silero-vad -o backend-images/silero-vad.tar
|
||||||
|
|
||||||
|
docker-save-piper: backend-images
|
||||||
|
docker save local-ai-backend:piper -o backend-images/piper.tar
|
||||||
|
|
||||||
|
docker-save-llama-cpp: backend-images
|
||||||
|
docker save local-ai-backend:llama-cpp -o backend-images/llama-cpp.tar
|
||||||
|
|
||||||
|
docker-save-bark-cpp: backend-images
|
||||||
|
docker save local-ai-backend:bark-cpp -o backend-images/bark-cpp.tar
|
||||||
|
|
||||||
|
docker-build-stablediffusion-ggml:
|
||||||
|
docker build -t local-ai-backend:stablediffusion-ggml -f backend/Dockerfile.go --build-arg BACKEND=stablediffusion-ggml .
|
||||||
|
|
||||||
|
docker-save-stablediffusion-ggml: backend-images
|
||||||
|
docker save local-ai-backend:stablediffusion-ggml -o backend-images/stablediffusion-ggml.tar
|
||||||
|
|
||||||
|
docker-build-rerankers:
|
||||||
|
docker build -t local-ai-backend:rerankers -f backend/Dockerfile.python --build-arg BACKEND=rerankers .
|
||||||
|
|
||||||
|
docker-build-vllm:
|
||||||
|
docker build -t local-ai-backend:vllm -f backend/Dockerfile.python --build-arg BACKEND=vllm .
|
||||||
|
|
||||||
|
docker-build-transformers:
|
||||||
|
docker build -t local-ai-backend:transformers -f backend/Dockerfile.python --build-arg BACKEND=transformers .
|
||||||
|
|
||||||
|
docker-build-diffusers:
|
||||||
|
docker build -t local-ai-backend:diffusers -f backend/Dockerfile.python --build-arg BACKEND=diffusers .
|
||||||
|
|
||||||
|
docker-build-kokoro:
|
||||||
|
docker build -t local-ai-backend:kokoro -f backend/Dockerfile.python --build-arg BACKEND=kokoro .
|
||||||
|
|
||||||
|
docker-build-whisper:
|
||||||
|
docker build --build-arg BUILD_TYPE=$(BUILD_TYPE) --build-arg BASE_IMAGE=$(BASE_IMAGE) -t local-ai-backend:whisper -f backend/Dockerfile.go --build-arg BACKEND=whisper .
|
||||||
|
|
||||||
|
docker-save-whisper: backend-images
|
||||||
|
docker save local-ai-backend:whisper -o backend-images/whisper.tar
|
||||||
|
|
||||||
|
docker-build-faster-whisper:
|
||||||
|
docker build -t local-ai-backend:faster-whisper -f backend/Dockerfile.python --build-arg BACKEND=faster-whisper .
|
||||||
|
|
||||||
|
docker-build-coqui:
|
||||||
|
docker build -t local-ai-backend:coqui -f backend/Dockerfile.python --build-arg BACKEND=coqui .
|
||||||
|
|
||||||
|
docker-build-bark:
|
||||||
|
docker build -t local-ai-backend:bark -f backend/Dockerfile.python --build-arg BACKEND=bark .
|
||||||
|
|
||||||
|
docker-build-chatterbox:
|
||||||
|
docker build -t local-ai-backend:chatterbox -f backend/Dockerfile.python --build-arg BACKEND=chatterbox .
|
||||||
|
|
||||||
|
docker-build-exllama2:
|
||||||
|
docker build -t local-ai-backend:exllama2 -f backend/Dockerfile.python --build-arg BACKEND=exllama2 .
|
||||||
|
|
||||||
|
docker-build-backends: docker-build-llama-cpp docker-build-rerankers docker-build-vllm docker-build-transformers docker-build-diffusers docker-build-kokoro docker-build-faster-whisper docker-build-coqui docker-build-bark docker-build-chatterbox docker-build-exllama2
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
### END Backends
|
||||||
|
########################################################
|
||||||
|
|
||||||
|
.PHONY: swagger
|
||||||
|
swagger:
|
||||||
|
swag init -g core/http/app.go --output swagger
|
||||||
|
|
||||||
|
.PHONY: gen-assets
|
||||||
|
gen-assets:
|
||||||
|
$(GOCMD) run core/dependencies_manager/manager.go webui_static.yaml core/http/static/assets
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
docs/layouts/_default:
|
||||||
|
mkdir -p docs/layouts/_default
|
||||||
|
|
||||||
|
docs/static/gallery.html: docs/layouts/_default
|
||||||
|
$(GOCMD) run ./.github/ci/modelslist.go ./gallery/index.yaml > docs/static/gallery.html
|
||||||
|
|
||||||
|
docs/public: docs/layouts/_default docs/static/gallery.html
|
||||||
|
cd docs && hugo --minify
|
||||||
|
|
||||||
|
docs-clean:
|
||||||
|
rm -rf docs/public
|
||||||
|
rm -rf docs/static/gallery.html
|
||||||
|
|
||||||
|
.PHONY: docs
|
||||||
|
docs: docs/static/gallery.html
|
||||||
|
cd docs && hugo serve
|
||||||
|
|||||||
438
README.md
438
README.md
@@ -1,220 +1,329 @@
|
|||||||
<h1 align="center">
|
<h1 align="center">
|
||||||
<br>
|
<br>
|
||||||
<img height="300" src="https://user-images.githubusercontent.com/2420543/233147843-88697415-6dbf-4368-a862-ab217f9f7342.jpeg"> <br>
|
<img width="300" src="./core/http/static/logo.png"> <br>
|
||||||
LocalAI
|
|
||||||
<br>
|
<br>
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
[](https://github.com/go-skynet/LocalAI/actions/workflows/test.yml) [](https://github.com/go-skynet/LocalAI/actions/workflows/image.yml)
|
<p align="center">
|
||||||
|
<a href="https://github.com/go-skynet/LocalAI/fork" target="blank">
|
||||||
|
<img src="https://img.shields.io/github/forks/go-skynet/LocalAI?style=for-the-badge" alt="LocalAI forks"/>
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/go-skynet/LocalAI/stargazers" target="blank">
|
||||||
|
<img src="https://img.shields.io/github/stars/go-skynet/LocalAI?style=for-the-badge" alt="LocalAI stars"/>
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/go-skynet/LocalAI/pulls" target="blank">
|
||||||
|
<img src="https://img.shields.io/github/issues-pr/go-skynet/LocalAI?style=for-the-badge" alt="LocalAI pull-requests"/>
|
||||||
|
</a>
|
||||||
|
<a href='https://github.com/go-skynet/LocalAI/releases'>
|
||||||
|
<img src='https://img.shields.io/github/release/go-skynet/LocalAI?&label=Latest&style=for-the-badge'>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
[](https://discord.gg/uJAeKSAGDy)
|
<p align="center">
|
||||||
|
<a href="https://hub.docker.com/r/localai/localai" target="blank">
|
||||||
|
<img src="https://img.shields.io/badge/dockerhub-images-important.svg?logo=Docker" alt="LocalAI Docker hub"/>
|
||||||
|
</a>
|
||||||
|
<a href="https://quay.io/repository/go-skynet/local-ai?tab=tags&tag=latest" target="blank">
|
||||||
|
<img src="https://img.shields.io/badge/quay.io-images-important.svg?" alt="LocalAI Quay.io"/>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
[Documentation website](https://localai.io/)
|
<p align="center">
|
||||||
|
<a href="https://twitter.com/LocalAI_API" target="blank">
|
||||||
|
<img src="https://img.shields.io/badge/X-%23000000.svg?style=for-the-badge&logo=X&logoColor=white&label=LocalAI_API" alt="Follow LocalAI_API"/>
|
||||||
|
</a>
|
||||||
|
<a href="https://discord.gg/uJAeKSAGDy" target="blank">
|
||||||
|
<img src="https://dcbadge.vercel.app/api/server/uJAeKSAGDy?style=flat-square&theme=default-inverted" alt="Join LocalAI Discord Community"/>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
**LocalAI** is a drop-in replacement REST API that's compatible with OpenAI API specifications for local inferencing. It allows you to run LLMs (and not only) locally or on-prem with consumer grade hardware, supporting multiple model families that are compatible with the ggml format. Does not require GPU.
|
<p align="center">
|
||||||
|
<a href="https://trendshift.io/repositories/5539" target="_blank"><img src="https://trendshift.io/api/badge/repositories/5539" alt="mudler%2FLocalAI | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
For a list of the supported model families, please see [the model compatibility table](https://localai.io/model-compatibility/index.html#model-compatibility-table).
|
> :bulb: Get help - [❓FAQ](https://localai.io/faq/) [💭Discussions](https://github.com/go-skynet/LocalAI/discussions) [:speech_balloon: Discord](https://discord.gg/uJAeKSAGDy) [:book: Documentation website](https://localai.io/)
|
||||||
|
>
|
||||||
|
> [💻 Quickstart](https://localai.io/basics/getting_started/) [🖼️ Models](https://models.localai.io/) [🚀 Roadmap](https://github.com/mudler/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3Aroadmap) [🥽 Demo](https://demo.localai.io) [🌍 Explorer](https://explorer.localai.io) [🛫 Examples](https://github.com/mudler/LocalAI-examples) Try on
|
||||||
|
[](https://t.me/localaiofficial_bot)
|
||||||
|
|
||||||
In a nutshell:
|
[](https://github.com/go-skynet/LocalAI/actions/workflows/test.yml)[](https://github.com/go-skynet/LocalAI/actions/workflows/release.yaml)[](https://github.com/go-skynet/LocalAI/actions/workflows/image.yml)[](https://github.com/go-skynet/LocalAI/actions/workflows/bump_deps.yaml)[](https://artifacthub.io/packages/search?repo=localai)
|
||||||
|
|
||||||
- Local, OpenAI drop-in alternative REST API. You own your data.
|
**LocalAI** is the free, Open Source OpenAI alternative. LocalAI act as a drop-in replacement REST API that's compatible with OpenAI (Elevenlabs, Anthropic... ) API specifications for local AI inferencing. It allows you to run LLMs, generate images, audio (and not only) locally or on-prem with consumer grade hardware, supporting multiple model families. Does not require GPU. It is created and maintained by [Ettore Di Giacinto](https://github.com/mudler).
|
||||||
- NO GPU required. NO Internet access is required either
|
|
||||||
- Optional, GPU Acceleration is available in `llama.cpp`-compatible LLMs. See also the [build section](https://localai.io/basics/build/index.html).
|
|
||||||
- Supports multiple models:
|
|
||||||
- 📖 Text generation with GPTs (`llama.cpp`, `gpt4all.cpp`, ... and more)
|
|
||||||
- 🗣 Text to Audio 🎺🆕
|
|
||||||
- 🔈 Audio to Text (Audio transcription with `whisper.cpp`)
|
|
||||||
- 🎨 Image generation with stable diffusion
|
|
||||||
- 🏃 Once loaded the first time, it keep models loaded in memory for faster inference
|
|
||||||
- ⚡ Doesn't shell-out, but uses C++ bindings for a faster inference and better performance.
|
|
||||||
|
|
||||||
LocalAI was created by [Ettore Di Giacinto](https://github.com/mudler/) and is a community-driven project, focused on making the AI accessible to anyone. Any contribution, feedback and PR is welcome!
|
|
||||||
|
|
||||||
See the [Getting started](https://localai.io/basics/getting_started/index.html) and [examples](https://github.com/go-skynet/LocalAI/tree/master/examples/) sections to learn how to use LocalAI. For a list of curated models check out the [model gallery](https://localai.io/models/).
|
|
||||||
|
|
||||||
|
|
||||||
| [ChatGPT OSS alternative](https://github.com/go-skynet/LocalAI/tree/master/examples/chatbot-ui) | [Image generation](https://localai.io/api-endpoints/index.html#image-generation) |
|
## 📚🆕 Local Stack Family
|
||||||
|------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|
|
|
||||||
|  |  |
|
|
||||||
|
|
||||||
| [Telegram bot](https://github.com/go-skynet/LocalAI/tree/master/examples/telegram-bot) | [Flowise](https://github.com/go-skynet/LocalAI/tree/master/examples/flowise) |
|
🆕 LocalAI is now part of a comprehensive suite of AI tools designed to work together:
|
||||||
|------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|
|
|
||||||
 | | |
|
|
||||||
|
|
||||||
## News
|
<table>
|
||||||
|
<tr>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<a href="https://github.com/mudler/LocalAGI">
|
||||||
|
<img src="https://raw.githubusercontent.com/mudler/LocalAGI/refs/heads/main/webui/react-ui/public/logo_2.png" width="300" alt="LocalAGI Logo">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<h3><a href="https://github.com/mudler/LocalAGI">LocalAGI</a></h3>
|
||||||
|
<p>A powerful Local AI agent management platform that serves as a drop-in replacement for OpenAI's Responses API, enhanced with advanced agentic capabilities.</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<a href="https://github.com/mudler/LocalRecall">
|
||||||
|
<img src="https://raw.githubusercontent.com/mudler/LocalRecall/refs/heads/main/static/localrecall_horizontal.png" width="300" alt="LocalRecall Logo">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<h3><a href="https://github.com/mudler/LocalRecall">LocalRecall</a></h3>
|
||||||
|
<p>A REST-ful API and knowledge base management system that provides persistent memory and storage capabilities for AI agents.</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
- 🔥🔥🔥 28-06-2023: **v1.20.0**: Added text to audio and gallery huggingface repositories! [Release notes](https://localai.io/basics/news/index.html#-28-06-2023-__v1200__-) [Changelog](https://github.com/go-skynet/LocalAI/releases/tag/v1.20.0)
|
## Screenshots
|
||||||
- 🔥🔥🔥 19-06-2023: **v1.19.0**: CUDA support! [Release notes](https://localai.io/basics/news/index.html#-19-06-2023-__v1190__-) [Changelog](https://github.com/go-skynet/LocalAI/releases/tag/v1.19.0)
|
|
||||||
- 🔥🔥🔥 06-06-2023: **v1.18.0**: Many updates, new features, and much more 🚀, check out the [Release notes](https://localai.io/basics/news/index.html#-06-06-2023-__v1180__-)!
|
|
||||||
- 29-05-2023: LocalAI now has a website, [https://localai.io](https://localai.io)! check the news in the [dedicated section](https://localai.io/basics/news/index.html)!
|
|
||||||
|
|
||||||
For latest news, follow also on Twitter [@LocalAI_API](https://twitter.com/LocalAI_API) and [@mudler_it](https://twitter.com/mudler_it)
|
|
||||||
|
|
||||||
## Contribute and help
|
| Talk Interface | Generate Audio |
|
||||||
|
| --- | --- |
|
||||||
|
|  |  |
|
||||||
|
|
||||||
To help the project you can:
|
| Models Overview | Generate Images |
|
||||||
|
| --- | --- |
|
||||||
|
|  |  |
|
||||||
|
|
||||||
- [Hacker news post](https://news.ycombinator.com/item?id=35726934) - help us out by voting if you like this project.
|
| Chat Interface | Home |
|
||||||
|
| --- | --- |
|
||||||
|
|  |  |
|
||||||
|
|
||||||
- If you have technological skills and want to contribute to development, have a look at the open issues. If you are new you can have a look at the [good-first-issue](https://github.com/go-skynet/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) and [help-wanted](https://github.com/go-skynet/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) labels.
|
| Login | Swarm |
|
||||||
|
| --- | --- |
|
||||||
|
| |  |
|
||||||
|
|
||||||
- If you don't have technological skills you can still help improving documentation or add examples or share your user-stories with our community, any help and contribution is welcome!
|
## 💻 Quickstart
|
||||||
|
|
||||||
## Usage
|
Run the installer script:
|
||||||
|
|
||||||
Check out the [Getting started](https://localai.io/basics/getting_started/index.html) section. Here below you will find generic, quick instructions to get ready and use LocalAI.
|
|
||||||
|
|
||||||
The easiest way to run LocalAI is by using `docker-compose` (to build locally, see [building LocalAI](https://localai.io/basics/build/index.html)):
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Basic installation
|
||||||
git clone https://github.com/go-skynet/LocalAI
|
curl https://localai.io/install.sh | sh
|
||||||
|
|
||||||
cd LocalAI
|
|
||||||
|
|
||||||
# (optional) Checkout a specific LocalAI tag
|
|
||||||
# git checkout -b build <TAG>
|
|
||||||
|
|
||||||
# copy your models to models/
|
|
||||||
cp your-model.bin models/
|
|
||||||
|
|
||||||
# (optional) Edit the .env file to set things like context size and threads
|
|
||||||
# vim .env
|
|
||||||
|
|
||||||
# start with docker-compose
|
|
||||||
docker-compose up -d --pull always
|
|
||||||
# or you can build the images with:
|
|
||||||
# docker-compose up -d --build
|
|
||||||
|
|
||||||
# Now API is accessible at localhost:8080
|
|
||||||
curl http://localhost:8080/v1/models
|
|
||||||
# {"object":"list","data":[{"id":"your-model.bin","object":"model"}]}
|
|
||||||
|
|
||||||
curl http://localhost:8080/v1/completions -H "Content-Type: application/json" -d '{
|
|
||||||
"model": "your-model.bin",
|
|
||||||
"prompt": "A long time ago in a galaxy far, far away",
|
|
||||||
"temperature": 0.7
|
|
||||||
}'
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Example: Use GPT4ALL-J model
|
For more installation options, see [Installer Options](https://localai.io/docs/advanced/installer/).
|
||||||
|
|
||||||
<details>
|
Or run with docker:
|
||||||
|
|
||||||
|
### CPU only image:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Clone LocalAI
|
docker run -ti --name local-ai -p 8080:8080 localai/localai:latest
|
||||||
git clone https://github.com/go-skynet/LocalAI
|
|
||||||
|
|
||||||
cd LocalAI
|
|
||||||
|
|
||||||
# (optional) Checkout a specific LocalAI tag
|
|
||||||
# git checkout -b build <TAG>
|
|
||||||
|
|
||||||
# Download gpt4all-j to models/
|
|
||||||
wget https://gpt4all.io/models/ggml-gpt4all-j.bin -O models/ggml-gpt4all-j
|
|
||||||
|
|
||||||
# Use a template from the examples
|
|
||||||
cp -rf prompt-templates/ggml-gpt4all-j.tmpl models/
|
|
||||||
|
|
||||||
# (optional) Edit the .env file to set things like context size and threads
|
|
||||||
# vim .env
|
|
||||||
|
|
||||||
# start with docker-compose
|
|
||||||
docker-compose up -d --pull always
|
|
||||||
# or you can build the images with:
|
|
||||||
# docker-compose up -d --build
|
|
||||||
# Now API is accessible at localhost:8080
|
|
||||||
curl http://localhost:8080/v1/models
|
|
||||||
# {"object":"list","data":[{"id":"ggml-gpt4all-j","object":"model"}]}
|
|
||||||
|
|
||||||
curl http://localhost:8080/v1/chat/completions -H "Content-Type: application/json" -d '{
|
|
||||||
"model": "ggml-gpt4all-j",
|
|
||||||
"messages": [{"role": "user", "content": "How are you?"}],
|
|
||||||
"temperature": 0.9
|
|
||||||
}'
|
|
||||||
|
|
||||||
# {"model":"ggml-gpt4all-j","choices":[{"message":{"role":"assistant","content":"I'm doing well, thanks. How about you?"}}]}
|
|
||||||
```
|
|
||||||
</details>
|
|
||||||
|
|
||||||
|
|
||||||
### Build locally
|
|
||||||
|
|
||||||
<details>
|
|
||||||
|
|
||||||
In order to build the `LocalAI` container image locally you can use `docker`:
|
|
||||||
|
|
||||||
```
|
|
||||||
# build the image
|
|
||||||
docker build -t localai .
|
|
||||||
docker run localai
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Or you can build the binary with `make`:
|
### NVIDIA GPU Images:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
make build
|
# CUDA 12.0
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --gpus all localai/localai:latest-gpu-nvidia-cuda-12
|
||||||
|
|
||||||
|
# CUDA 11.7
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --gpus all localai/localai:latest-gpu-nvidia-cuda-11
|
||||||
|
|
||||||
|
# NVIDIA Jetson (L4T) ARM64
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --gpus all localai/localai:latest-nvidia-l4t-arm64
|
||||||
```
|
```
|
||||||
|
|
||||||
</details>
|
### AMD GPU Images (ROCm):
|
||||||
|
|
||||||
See the [build section](https://localai.io/basics/build/index.html) in our documentation for detailed instructions.
|
```bash
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --device=/dev/kfd --device=/dev/dri --group-add=video localai/localai:latest-gpu-hipblas
|
||||||
|
```
|
||||||
|
|
||||||
### Run LocalAI in Kubernetes
|
### Intel GPU Images (oneAPI):
|
||||||
|
|
||||||
LocalAI can be installed inside Kubernetes with helm. See [installation instructions](https://localai.io/basics/getting_started/index.html#run-localai-in-kubernetes).
|
```bash
|
||||||
|
# Intel GPU with FP16 support
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --device=/dev/dri/card1 --device=/dev/dri/renderD128 localai/localai:latest-gpu-intel-f16
|
||||||
|
|
||||||
## Supported API endpoints
|
# Intel GPU with FP32 support
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --device=/dev/dri/card1 --device=/dev/dri/renderD128 localai/localai:latest-gpu-intel-f32
|
||||||
|
```
|
||||||
|
|
||||||
See the [list of the supported API endpoints](https://localai.io/api-endpoints/index.html) and how to configure image generation and audio transcription.
|
### Vulkan GPU Images:
|
||||||
|
|
||||||
## Frequently asked questions
|
```bash
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 localai/localai:latest-gpu-vulkan
|
||||||
|
```
|
||||||
|
|
||||||
See [the FAQ](https://localai.io/faq/index.html) section for a list of common questions.
|
### AIO Images (pre-downloaded models):
|
||||||
|
|
||||||
## Projects already using LocalAI to run local models
|
```bash
|
||||||
|
# CPU version
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 localai/localai:latest-aio-cpu
|
||||||
|
|
||||||
Feel free to open up a PR to get your project listed!
|
# NVIDIA CUDA 12 version
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 --gpus all localai/localai:latest-aio-gpu-nvidia-cuda-12
|
||||||
|
|
||||||
- [Kairos](https://github.com/kairos-io/kairos)
|
# NVIDIA CUDA 11 version
|
||||||
- [k8sgpt](https://github.com/k8sgpt-ai/k8sgpt#running-local-models)
|
docker run -ti --name local-ai -p 8080:8080 --gpus all localai/localai:latest-aio-gpu-nvidia-cuda-11
|
||||||
- [Spark](https://github.com/cedriking/spark)
|
|
||||||
- [autogpt4all](https://github.com/aorumbayev/autogpt4all)
|
|
||||||
- [Mods](https://github.com/charmbracelet/mods)
|
|
||||||
- [Flowise](https://github.com/FlowiseAI/Flowise)
|
|
||||||
|
|
||||||
## Short-term roadmap
|
# Intel GPU version
|
||||||
|
docker run -ti --name local-ai -p 8080:8080 localai/localai:latest-aio-gpu-intel-f16
|
||||||
|
|
||||||
- [x] Mimic OpenAI API (https://github.com/go-skynet/LocalAI/issues/10)
|
# AMD GPU version
|
||||||
- [x] Binary releases (https://github.com/go-skynet/LocalAI/issues/6)
|
docker run -ti --name local-ai -p 8080:8080 --device=/dev/kfd --device=/dev/dri --group-add=video localai/localai:latest-aio-gpu-hipblas
|
||||||
- [ ] Upstream our golang bindings to llama.cpp (https://github.com/ggerganov/llama.cpp/issues/351)
|
```
|
||||||
- [x] Upstream [gpt4all](https://github.com/go-skynet/LocalAI/issues/85) bindings
|
|
||||||
- [x] Multi-model support
|
|
||||||
- [x] Have a webUI!
|
|
||||||
- [x] Allow configuration of defaults for models.
|
|
||||||
- [x] Support for embeddings
|
|
||||||
- [x] Support for audio transcription with https://github.com/ggerganov/whisper.cpp
|
|
||||||
- [x] GPU/CUDA support ( https://github.com/go-skynet/LocalAI/issues/69 )
|
|
||||||
- [X] Enable automatic downloading of models from a curated gallery
|
|
||||||
- [ ] Enable automatic downloading of models from HuggingFace
|
|
||||||
- [ ] Enable gallery management directly from the webui.
|
|
||||||
- [ ] 🔥 OpenAI functions: https://github.com/go-skynet/LocalAI/issues/588
|
|
||||||
|
|
||||||
## Star history
|
For more information about the AIO images and pre-downloaded models, see [Container Documentation](https://localai.io/basics/container/).
|
||||||
|
|
||||||
|
To load models:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# From the model gallery (see available models with `local-ai models list`, in the WebUI from the model tab, or visiting https://models.localai.io)
|
||||||
|
local-ai run llama-3.2-1b-instruct:q4_k_m
|
||||||
|
# Start LocalAI with the phi-2 model directly from huggingface
|
||||||
|
local-ai run huggingface://TheBloke/phi-2-GGUF/phi-2.Q8_0.gguf
|
||||||
|
# Install and run a model from the Ollama OCI registry
|
||||||
|
local-ai run ollama://gemma:2b
|
||||||
|
# Run a model from a configuration file
|
||||||
|
local-ai run https://gist.githubusercontent.com/.../phi-2.yaml
|
||||||
|
# Install and run a model from a standard OCI registry (e.g., Docker Hub)
|
||||||
|
local-ai run oci://localai/phi-2:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information, see [💻 Getting started](https://localai.io/basics/getting_started/index.html)
|
||||||
|
|
||||||
|
## 📰 Latest project news
|
||||||
|
|
||||||
|
- June 2025: [Backend management](https://github.com/mudler/LocalAI/pull/5607) has been added. Attention: extras images are going to be deprecated from the next release! Read [the backend management PR](https://github.com/mudler/LocalAI/pull/5607).
|
||||||
|
- May 2025: [Audio input](https://github.com/mudler/LocalAI/pull/5466) and [Reranking](https://github.com/mudler/LocalAI/pull/5396) in llama.cpp backend, [Realtime API](https://github.com/mudler/LocalAI/pull/5392), Support to Gemma, SmollVLM, and more multimodal models (available in the gallery).
|
||||||
|
- May 2025: Important: image name changes [See release](https://github.com/mudler/LocalAI/releases/tag/v2.29.0)
|
||||||
|
- Apr 2025: Rebrand, WebUI enhancements
|
||||||
|
- Apr 2025: [LocalAGI](https://github.com/mudler/LocalAGI) and [LocalRecall](https://github.com/mudler/LocalRecall) join the LocalAI family stack.
|
||||||
|
- Apr 2025: WebUI overhaul, AIO images updates
|
||||||
|
- Feb 2025: Backend cleanup, Breaking changes, new backends (kokoro, OutelTTS, faster-whisper), Nvidia L4T images
|
||||||
|
- Jan 2025: LocalAI model release: https://huggingface.co/mudler/LocalAI-functioncall-phi-4-v0.3, SANA support in diffusers: https://github.com/mudler/LocalAI/pull/4603
|
||||||
|
- Dec 2024: stablediffusion.cpp backend (ggml) added ( https://github.com/mudler/LocalAI/pull/4289 )
|
||||||
|
- Nov 2024: Bark.cpp backend added ( https://github.com/mudler/LocalAI/pull/4287 )
|
||||||
|
- Nov 2024: Voice activity detection models (**VAD**) added to the API: https://github.com/mudler/LocalAI/pull/4204
|
||||||
|
- Oct 2024: examples moved to [LocalAI-examples](https://github.com/mudler/LocalAI-examples)
|
||||||
|
- Aug 2024: 🆕 FLUX-1, [P2P Explorer](https://explorer.localai.io)
|
||||||
|
- July 2024: 🔥🔥 🆕 P2P Dashboard, LocalAI Federated mode and AI Swarms: https://github.com/mudler/LocalAI/pull/2723. P2P Global community pools: https://github.com/mudler/LocalAI/issues/3113
|
||||||
|
- May 2024: 🔥🔥 Decentralized P2P llama.cpp: https://github.com/mudler/LocalAI/pull/2343 (peer2peer llama.cpp!) 👉 Docs https://localai.io/features/distribute/
|
||||||
|
- May 2024: 🔥🔥 Distributed inferencing: https://github.com/mudler/LocalAI/pull/2324
|
||||||
|
- April 2024: Reranker API: https://github.com/mudler/LocalAI/pull/2121
|
||||||
|
|
||||||
|
Roadmap items: [List of issues](https://github.com/mudler/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3Aroadmap)
|
||||||
|
|
||||||
|
## 🚀 [Features](https://localai.io/features/)
|
||||||
|
|
||||||
|
- 🧩 [Backend Gallery](https://localai.io/backends/): Install/remove backends on the fly, powered by OCI images — fully customizable and API-driven.
|
||||||
|
- 📖 [Text generation with GPTs](https://localai.io/features/text-generation/) (`llama.cpp`, `transformers`, `vllm` ... [:book: and more](https://localai.io/model-compatibility/index.html#model-compatibility-table))
|
||||||
|
- 🗣 [Text to Audio](https://localai.io/features/text-to-audio/)
|
||||||
|
- 🔈 [Audio to Text](https://localai.io/features/audio-to-text/) (Audio transcription with `whisper.cpp`)
|
||||||
|
- 🎨 [Image generation](https://localai.io/features/image-generation)
|
||||||
|
- 🔥 [OpenAI-alike tools API](https://localai.io/features/openai-functions/)
|
||||||
|
- 🧠 [Embeddings generation for vector databases](https://localai.io/features/embeddings/)
|
||||||
|
- ✍️ [Constrained grammars](https://localai.io/features/constrained_grammars/)
|
||||||
|
- 🖼️ [Download Models directly from Huggingface ](https://localai.io/models/)
|
||||||
|
- 🥽 [Vision API](https://localai.io/features/gpt-vision/)
|
||||||
|
- 📈 [Reranker API](https://localai.io/features/reranker/)
|
||||||
|
- 🆕🖧 [P2P Inferencing](https://localai.io/features/distribute/)
|
||||||
|
- [Agentic capabilities](https://github.com/mudler/LocalAGI)
|
||||||
|
- 🔊 Voice activity detection (Silero-VAD support)
|
||||||
|
- 🌍 Integrated WebUI!
|
||||||
|
|
||||||
|
|
||||||
|
### 🔗 Community and integrations
|
||||||
|
|
||||||
|
Build and deploy custom containers:
|
||||||
|
- https://github.com/sozercan/aikit
|
||||||
|
|
||||||
|
WebUIs:
|
||||||
|
- https://github.com/Jirubizu/localai-admin
|
||||||
|
- https://github.com/go-skynet/LocalAI-frontend
|
||||||
|
- QA-Pilot(An interactive chat project that leverages LocalAI LLMs for rapid understanding and navigation of GitHub code repository) https://github.com/reid41/QA-Pilot
|
||||||
|
|
||||||
|
Model galleries
|
||||||
|
- https://github.com/go-skynet/model-gallery
|
||||||
|
|
||||||
|
Other:
|
||||||
|
- Helm chart https://github.com/go-skynet/helm-charts
|
||||||
|
- VSCode extension https://github.com/badgooooor/localai-vscode-plugin
|
||||||
|
- Langchain: https://python.langchain.com/docs/integrations/providers/localai/
|
||||||
|
- Terminal utility https://github.com/djcopley/ShellOracle
|
||||||
|
- Local Smart assistant https://github.com/mudler/LocalAGI
|
||||||
|
- Home Assistant https://github.com/sammcj/homeassistant-localai / https://github.com/drndos/hass-openai-custom-conversation / https://github.com/valentinfrlch/ha-gpt4vision
|
||||||
|
- Discord bot https://github.com/mudler/LocalAGI/tree/main/examples/discord
|
||||||
|
- Slack bot https://github.com/mudler/LocalAGI/tree/main/examples/slack
|
||||||
|
- Shell-Pilot(Interact with LLM using LocalAI models via pure shell scripts on your Linux or MacOS system) https://github.com/reid41/shell-pilot
|
||||||
|
- Telegram bot https://github.com/mudler/LocalAI/tree/master/examples/telegram-bot
|
||||||
|
- Another Telegram Bot https://github.com/JackBekket/Hellper
|
||||||
|
- Auto-documentation https://github.com/JackBekket/Reflexia
|
||||||
|
- Github bot which answer on issues, with code and documentation as context https://github.com/JackBekket/GitHelper
|
||||||
|
- Github Actions: https://github.com/marketplace/actions/start-localai
|
||||||
|
- Examples: https://github.com/mudler/LocalAI/tree/master/examples/
|
||||||
|
|
||||||
|
|
||||||
|
### 🔗 Resources
|
||||||
|
|
||||||
|
- [LLM finetuning guide](https://localai.io/docs/advanced/fine-tuning/)
|
||||||
|
- [How to build locally](https://localai.io/basics/build/index.html)
|
||||||
|
- [How to install in Kubernetes](https://localai.io/basics/getting_started/index.html#run-localai-in-kubernetes)
|
||||||
|
- [Projects integrating LocalAI](https://localai.io/docs/integrations/)
|
||||||
|
- [How tos section](https://io.midori-ai.xyz/howtos/) (curated by our community)
|
||||||
|
|
||||||
|
## :book: 🎥 [Media, Blogs, Social](https://localai.io/basics/news/#media-blogs-social)
|
||||||
|
|
||||||
|
- [Run Visual studio code with LocalAI (SUSE)](https://www.suse.com/c/running-ai-locally/)
|
||||||
|
- 🆕 [Run LocalAI on Jetson Nano Devkit](https://mudler.pm/posts/local-ai-jetson-nano-devkit/)
|
||||||
|
- [Run LocalAI on AWS EKS with Pulumi](https://www.pulumi.com/blog/low-code-llm-apps-with-local-ai-flowise-and-pulumi/)
|
||||||
|
- [Run LocalAI on AWS](https://staleks.hashnode.dev/installing-localai-on-aws-ec2-instance)
|
||||||
|
- [Create a slackbot for teams and OSS projects that answer to documentation](https://mudler.pm/posts/smart-slackbot-for-teams/)
|
||||||
|
- [LocalAI meets k8sgpt](https://www.youtube.com/watch?v=PKrDNuJ_dfE)
|
||||||
|
- [Question Answering on Documents locally with LangChain, LocalAI, Chroma, and GPT4All](https://mudler.pm/posts/localai-question-answering/)
|
||||||
|
- [Tutorial to use k8sgpt with LocalAI](https://medium.com/@tyler_97636/k8sgpt-localai-unlock-kubernetes-superpowers-for-free-584790de9b65)
|
||||||
|
|
||||||
|
## Citation
|
||||||
|
|
||||||
|
If you utilize this repository, data in a downstream project, please consider citing it with:
|
||||||
|
|
||||||
|
```
|
||||||
|
@misc{localai,
|
||||||
|
author = {Ettore Di Giacinto},
|
||||||
|
title = {LocalAI: The free, Open source OpenAI alternative},
|
||||||
|
year = {2023},
|
||||||
|
publisher = {GitHub},
|
||||||
|
journal = {GitHub repository},
|
||||||
|
howpublished = {\url{https://github.com/go-skynet/LocalAI}},
|
||||||
|
```
|
||||||
|
|
||||||
|
## ❤️ Sponsors
|
||||||
|
|
||||||
|
> Do you find LocalAI useful?
|
||||||
|
|
||||||
|
Support the project by becoming [a backer or sponsor](https://github.com/sponsors/mudler). Your logo will show up here with a link to your website.
|
||||||
|
|
||||||
|
A huge thank you to our generous sponsors who support this project covering CI expenses, and our [Sponsor list](https://github.com/sponsors/mudler):
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://www.spectrocloud.com/" target="blank">
|
||||||
|
<img height="200" src="https://github.com/user-attachments/assets/72eab1dd-8b93-4fc0-9ade-84db49f24962">
|
||||||
|
</a>
|
||||||
|
<a href="https://www.premai.io/" target="blank">
|
||||||
|
<img height="200" src="https://github.com/mudler/LocalAI/assets/2420543/42e4ca83-661e-4f79-8e46-ae43689683d6"> <br>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
## 🌟 Star history
|
||||||
|
|
||||||
[](https://star-history.com/#go-skynet/LocalAI&Date)
|
[](https://star-history.com/#go-skynet/LocalAI&Date)
|
||||||
|
|
||||||
## License
|
## 📖 License
|
||||||
|
|
||||||
LocalAI is a community-driven project created by [Ettore Di Giacinto](https://github.com/mudler/).
|
LocalAI is a community-driven project created by [Ettore Di Giacinto](https://github.com/mudler/).
|
||||||
|
|
||||||
MIT
|
MIT - Author Ettore Di Giacinto <mudler@localai.io>
|
||||||
|
|
||||||
## Author
|
## 🙇 Acknowledgements
|
||||||
|
|
||||||
Ettore Di Giacinto and others
|
|
||||||
|
|
||||||
## Acknowledgements
|
|
||||||
|
|
||||||
LocalAI couldn't have been built without the help of great software already available from the community. Thank you!
|
LocalAI couldn't have been built without the help of great software already available from the community. Thank you!
|
||||||
|
|
||||||
@@ -224,10 +333,11 @@ LocalAI couldn't have been built without the help of great software already avai
|
|||||||
- https://github.com/antimatter15/alpaca.cpp
|
- https://github.com/antimatter15/alpaca.cpp
|
||||||
- https://github.com/EdVince/Stable-Diffusion-NCNN
|
- https://github.com/EdVince/Stable-Diffusion-NCNN
|
||||||
- https://github.com/ggerganov/whisper.cpp
|
- https://github.com/ggerganov/whisper.cpp
|
||||||
- https://github.com/saharNooby/rwkv.cpp
|
- https://github.com/rhasspy/piper
|
||||||
|
|
||||||
## Contributors
|
## 🤗 Contributors
|
||||||
|
|
||||||
|
This is a community project, a special thanks to our contributors! 🤗
|
||||||
<a href="https://github.com/go-skynet/LocalAI/graphs/contributors">
|
<a href="https://github.com/go-skynet/LocalAI/graphs/contributors">
|
||||||
<img src="https://contrib.rocks/image?repo=go-skynet/LocalAI" />
|
<img src="https://contrib.rocks/image?repo=go-skynet/LocalAI" />
|
||||||
</a>
|
</a>
|
||||||
|
|||||||
42
SECURITY.md
Normal file
42
SECURITY.md
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
At LocalAI, we take the security of our software seriously. We understand the importance of protecting our community from vulnerabilities and are committed to ensuring the safety and security of our users.
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
We provide support and updates for certain versions of our software. The following table outlines which versions are currently supported with security updates:
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| > 2.0 | :white_check_mark: |
|
||||||
|
| < 2.0 | :x: |
|
||||||
|
|
||||||
|
Please ensure that you are using a supported version to receive the latest security updates.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
We encourage the responsible disclosure of any security vulnerabilities. If you believe you've found a security issue in our software, we kindly ask you to follow the steps below to report it to us:
|
||||||
|
|
||||||
|
1. **Email Us:** Send an email to [security@localai.io](mailto:security@localai.io) with a detailed report. Please do not disclose the vulnerability publicly or to any third parties before it has been addressed by us.
|
||||||
|
|
||||||
|
2. **Expect a Response:** We aim to acknowledge receipt of vulnerability reports within 48 hours. Our security team will review your report and work closely with you to understand the impact and ensure a thorough investigation.
|
||||||
|
|
||||||
|
3. **Collaboration:** If the vulnerability is accepted, we will work with you and our community to address the issue promptly. We'll keep you informed throughout the resolution process and may request additional information or collaboration.
|
||||||
|
|
||||||
|
4. **Disclosure:** Once the vulnerability has been resolved, we encourage a coordinated disclosure. We believe in transparency and will work with you to ensure that our community is informed in a responsible manner.
|
||||||
|
|
||||||
|
## Use of Third-Party Platforms
|
||||||
|
|
||||||
|
As a Free and Open Source Software (FOSS) organization, we do not offer monetary bounties. However, researchers who wish to report vulnerabilities can also do so via [Huntr](https://huntr.dev/bounties), a platform that recognizes contributions to open source security.
|
||||||
|
|
||||||
|
## Contact
|
||||||
|
|
||||||
|
For any security-related inquiries beyond vulnerability reporting, please contact us at [security@localai.io](mailto:security@localai.io).
|
||||||
|
|
||||||
|
## Acknowledgments
|
||||||
|
|
||||||
|
We appreciate the efforts of those who contribute to the security of our project. Your responsible disclosure is invaluable to the safety and integrity of LocalAI.
|
||||||
|
|
||||||
|
Thank you for helping us keep LocalAI secure.
|
||||||
5
aio/cpu/README.md
Normal file
5
aio/cpu/README.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
## AIO CPU size
|
||||||
|
|
||||||
|
Use this image with CPU-only.
|
||||||
|
|
||||||
|
Please keep using only C++ backends so the base image is as small as possible (without CUDA, cuDNN, python, etc).
|
||||||
13
aio/cpu/embeddings.yaml
Normal file
13
aio/cpu/embeddings.yaml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
embeddings: true
|
||||||
|
name: text-embedding-ada-002
|
||||||
|
backend: llama-cpp
|
||||||
|
parameters:
|
||||||
|
model: huggingface://bartowski/granite-embedding-107m-multilingual-GGUF/granite-embedding-107m-multilingual-f16.gguf
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
You can test this model with curl like this:
|
||||||
|
|
||||||
|
curl http://localhost:8080/embeddings -X POST -H "Content-Type: application/json" -d '{
|
||||||
|
"input": "Your text string goes here",
|
||||||
|
"model": "text-embedding-ada-002"
|
||||||
|
}'
|
||||||
23
aio/cpu/image-gen.yaml
Normal file
23
aio/cpu/image-gen.yaml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: stablediffusion
|
||||||
|
backend: stablediffusion-ggml
|
||||||
|
cfg_scale: 4.5
|
||||||
|
|
||||||
|
options:
|
||||||
|
- sampler:euler
|
||||||
|
parameters:
|
||||||
|
model: stable-diffusion-v1-5-pruned-emaonly-Q4_0.gguf
|
||||||
|
step: 25
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: "stable-diffusion-v1-5-pruned-emaonly-Q4_0.gguf"
|
||||||
|
sha256: "b8944e9fe0b69b36ae1b5bb0185b3a7b8ef14347fe0fa9af6c64c4829022261f"
|
||||||
|
uri: "huggingface://second-state/stable-diffusion-v1-5-GGUF/stable-diffusion-v1-5-pruned-emaonly-Q4_0.gguf"
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
curl http://localhost:8080/v1/images/generations \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"prompt": "<positive prompt>|<negative prompt>",
|
||||||
|
"step": 25,
|
||||||
|
"size": "512x512"
|
||||||
|
}'
|
||||||
33
aio/cpu/rerank.yaml
Normal file
33
aio/cpu/rerank.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: jina-reranker-v1-base-en
|
||||||
|
reranking: true
|
||||||
|
f16: true
|
||||||
|
parameters:
|
||||||
|
model: jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
backend: llama-cpp
|
||||||
|
download_files:
|
||||||
|
- filename: jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
sha256: 5f696cf0d0f3d347c4a279eee8270e5918554cdac0ed1f632f2619e4e8341407
|
||||||
|
uri: huggingface://mradermacher/jina-reranker-v1-tiny-en-GGUF/jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
You can test this model with curl like this:
|
||||||
|
|
||||||
|
curl http://localhost:8080/v1/rerank \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"model": "jina-reranker-v1-base-en",
|
||||||
|
"query": "Organic skincare products for sensitive skin",
|
||||||
|
"documents": [
|
||||||
|
"Eco-friendly kitchenware for modern homes",
|
||||||
|
"Biodegradable cleaning supplies for eco-conscious consumers",
|
||||||
|
"Organic cotton baby clothes for sensitive skin",
|
||||||
|
"Natural organic skincare range for sensitive skin",
|
||||||
|
"Tech gadgets for smart homes: 2024 edition",
|
||||||
|
"Sustainable gardening tools and compost solutions",
|
||||||
|
"Sensitive skin-friendly facial cleansers and toners",
|
||||||
|
"Organic food wraps and storage solutions",
|
||||||
|
"All-natural pet food for dogs with allergies",
|
||||||
|
"Yoga mats made from recycled materials"
|
||||||
|
],
|
||||||
|
"top_n": 3
|
||||||
|
}'
|
||||||
18
aio/cpu/speech-to-text.yaml
Normal file
18
aio/cpu/speech-to-text.yaml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: whisper-1
|
||||||
|
backend: whisper
|
||||||
|
parameters:
|
||||||
|
model: ggml-whisper-base.bin
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
## example audio file
|
||||||
|
wget --quiet --show-progress -O gb1.ogg https://upload.wikimedia.org/wikipedia/commons/1/1f/George_W_Bush_Columbia_FINAL.ogg
|
||||||
|
|
||||||
|
## Send the example audio file to the transcriptions endpoint
|
||||||
|
curl http://localhost:8080/v1/audio/transcriptions \
|
||||||
|
-H "Content-Type: multipart/form-data" \
|
||||||
|
-F file="@$PWD/gb1.ogg" -F model="whisper-1"
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: "ggml-whisper-base.bin"
|
||||||
|
sha256: "60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe"
|
||||||
|
uri: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin"
|
||||||
15
aio/cpu/text-to-speech.yaml
Normal file
15
aio/cpu/text-to-speech.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: tts-1
|
||||||
|
download_files:
|
||||||
|
- filename: voice-en-us-amy-low.tar.gz
|
||||||
|
uri: https://github.com/rhasspy/piper/releases/download/v0.0.2/voice-en-us-amy-low.tar.gz
|
||||||
|
backend: piper
|
||||||
|
parameters:
|
||||||
|
model: en-us-amy-low.onnx
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
To test if this model works as expected, you can use the following curl command:
|
||||||
|
|
||||||
|
curl http://localhost:8080/tts -H "Content-Type: application/json" -d '{
|
||||||
|
"model":"voice-en-us-amy-low",
|
||||||
|
"input": "Hi, this is a test."
|
||||||
|
}'
|
||||||
58
aio/cpu/text-to-text.yaml
Normal file
58
aio/cpu/text-to-text.yaml
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
context_size: 8192
|
||||||
|
f16: true
|
||||||
|
backend: llama-cpp
|
||||||
|
function:
|
||||||
|
grammar:
|
||||||
|
no_mixed_free_string: true
|
||||||
|
schema_type: llama3.1 # or JSON is supported too (json)
|
||||||
|
response_regex:
|
||||||
|
- <function=(?P<name>\w+)>(?P<arguments>.*)</function>
|
||||||
|
mmap: true
|
||||||
|
name: gpt-4
|
||||||
|
parameters:
|
||||||
|
model: Hermes-3-Llama-3.2-3B-Q4_K_M.gguf
|
||||||
|
stopwords:
|
||||||
|
- <|im_end|>
|
||||||
|
- <dummy32000>
|
||||||
|
- <|eot_id|>
|
||||||
|
- <|end_of_text|>
|
||||||
|
template:
|
||||||
|
chat: |
|
||||||
|
<|begin_of_text|><|start_header_id|>system<|end_header_id|>
|
||||||
|
You are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>
|
||||||
|
{{.Input }}
|
||||||
|
<|start_header_id|>assistant<|end_header_id|>
|
||||||
|
chat_message: |
|
||||||
|
<|start_header_id|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}<|end_header_id|>
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
The Function was executed and the response was:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content -}}
|
||||||
|
{{ else if .FunctionCall -}}
|
||||||
|
{{ range .FunctionCall }}
|
||||||
|
[{{.FunctionCall.Name}}({{.FunctionCall.Arguments}})]
|
||||||
|
{{ end }}
|
||||||
|
{{ end -}}
|
||||||
|
<|eot_id|>
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
function: |
|
||||||
|
<|start_header_id|>system<|end_header_id|>
|
||||||
|
You are an expert in composing functions. You are given a question and a set of possible functions.
|
||||||
|
Based on the question, you will need to make one or more function/tool calls to achieve the purpose.
|
||||||
|
If none of the functions can be used, point it out. If the given question lacks the parameters required by the function, also point it out. You should only return the function call in tools call sections.
|
||||||
|
If you decide to invoke any of the function(s), you MUST put it in the format as follows:
|
||||||
|
[func_name1(params_name1=params_value1,params_name2=params_value2,...),func_name2(params_name1=params_value1,params_name2=params_value2,...)]
|
||||||
|
You SHOULD NOT include any other text in the response.
|
||||||
|
Here is a list of functions in JSON format that you can invoke.
|
||||||
|
{{toJson .Functions}}
|
||||||
|
<|eot_id|><|start_header_id|>user<|end_header_id|>
|
||||||
|
{{.Input}}
|
||||||
|
<|eot_id|><|start_header_id|>assistant<|end_header_id|>
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: Hermes-3-Llama-3.2-3B-Q4_K_M.gguf
|
||||||
|
sha256: 2e220a14ba4328fee38cf36c2c068261560f999fadb5725ce5c6d977cb5126b5
|
||||||
|
uri: huggingface://bartowski/Hermes-3-Llama-3.2-3B-GGUF/Hermes-3-Llama-3.2-3B-Q4_K_M.gguf
|
||||||
8
aio/cpu/vad.yaml
Normal file
8
aio/cpu/vad.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
backend: silero-vad
|
||||||
|
name: silero-vad
|
||||||
|
parameters:
|
||||||
|
model: silero-vad.onnx
|
||||||
|
download_files:
|
||||||
|
- filename: silero-vad.onnx
|
||||||
|
uri: https://huggingface.co/onnx-community/silero-vad/resolve/main/onnx/model.onnx
|
||||||
|
sha256: a4a068cd6cf1ea8355b84327595838ca748ec29a25bc91fc82e6c299ccdc5808
|
||||||
50
aio/cpu/vision.yaml
Normal file
50
aio/cpu/vision.yaml
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
context_size: 4096
|
||||||
|
f16: true
|
||||||
|
backend: llama-cpp
|
||||||
|
mmap: true
|
||||||
|
mmproj: minicpm-v-2_6-mmproj-f16.gguf
|
||||||
|
name: gpt-4o
|
||||||
|
parameters:
|
||||||
|
model: minicpm-v-2_6-Q4_K_M.gguf
|
||||||
|
stopwords:
|
||||||
|
- <|im_end|>
|
||||||
|
- <dummy32000>
|
||||||
|
- </s>
|
||||||
|
- <|endoftext|>
|
||||||
|
template:
|
||||||
|
chat: |
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
chat_message: |
|
||||||
|
<|im_start|>{{ .RoleName }}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
Function call:
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
Function response:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{toJson .FunctionCall}}
|
||||||
|
{{ end -}}<|im_end|>
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
function: |
|
||||||
|
<|im_start|>system
|
||||||
|
You are a function calling AI model. You are provided with functions to execute. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
|
||||||
|
{{range .Functions}}
|
||||||
|
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
|
||||||
|
{{end}}
|
||||||
|
For each function call return a json object with function name and arguments
|
||||||
|
<|im_end|>
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: minicpm-v-2_6-Q4_K_M.gguf
|
||||||
|
sha256: 3a4078d53b46f22989adbf998ce5a3fd090b6541f112d7e936eb4204a04100b1
|
||||||
|
uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/ggml-model-Q4_K_M.gguf
|
||||||
|
- filename: minicpm-v-2_6-mmproj-f16.gguf
|
||||||
|
uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/mmproj-model-f16.gguf
|
||||||
|
sha256: 4485f68a0f1aa404c391e788ea88ea653c100d8e98fe572698f701e5809711fd
|
||||||
138
aio/entrypoint.sh
Executable file
138
aio/entrypoint.sh
Executable file
@@ -0,0 +1,138 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo "===> LocalAI All-in-One (AIO) container starting..."
|
||||||
|
|
||||||
|
GPU_ACCELERATION=false
|
||||||
|
GPU_VENDOR=""
|
||||||
|
|
||||||
|
function check_intel() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq intel; then
|
||||||
|
echo "Intel GPU detected"
|
||||||
|
if [ -d /opt/intel ]; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=intel
|
||||||
|
else
|
||||||
|
echo "Intel GPU detected, but Intel GPU drivers are not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_nvidia_wsl() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq "Microsoft Corporation Device 008e"; then
|
||||||
|
# We make the assumption this WSL2 cars is NVIDIA, then check for nvidia-smi
|
||||||
|
# Make sure the container was run with `--gpus all` as the only required parameter
|
||||||
|
echo "NVIDIA GPU detected via WSL2"
|
||||||
|
# nvidia-smi should be installed in the container
|
||||||
|
if nvidia-smi; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=nvidia
|
||||||
|
else
|
||||||
|
echo "NVIDIA GPU detected via WSL2, but nvidia-smi is not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_amd() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq amd; then
|
||||||
|
echo "AMD GPU detected"
|
||||||
|
# Check if ROCm is installed
|
||||||
|
if [ -d /opt/rocm ]; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=amd
|
||||||
|
else
|
||||||
|
echo "AMD GPU detected, but ROCm is not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_nvidia() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq nvidia; then
|
||||||
|
echo "NVIDIA GPU detected"
|
||||||
|
# nvidia-smi should be installed in the container
|
||||||
|
if nvidia-smi; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=nvidia
|
||||||
|
else
|
||||||
|
echo "NVIDIA GPU detected, but nvidia-smi is not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_metal() {
|
||||||
|
if system_profiler SPDisplaysDataType | grep -iq 'Metal'; then
|
||||||
|
echo "Apple Metal supported GPU detected"
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=apple
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function detect_gpu() {
|
||||||
|
case "$(uname -s)" in
|
||||||
|
Linux)
|
||||||
|
check_nvidia
|
||||||
|
check_amd
|
||||||
|
check_intel
|
||||||
|
check_nvidia_wsl
|
||||||
|
;;
|
||||||
|
Darwin)
|
||||||
|
check_metal
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
function detect_gpu_size() {
|
||||||
|
# Attempting to find GPU memory size for NVIDIA GPUs
|
||||||
|
if [ "$GPU_ACCELERATION" = true ] && [ "$GPU_VENDOR" = "nvidia" ]; then
|
||||||
|
echo "NVIDIA GPU detected. Attempting to find memory size..."
|
||||||
|
# Using head -n 1 to get the total memory of the 1st NVIDIA GPU detected.
|
||||||
|
# If handling multiple GPUs is required in the future, this is the place to do it
|
||||||
|
nvidia_sm=$(nvidia-smi --query-gpu=memory.total --format=csv,noheader,nounits | head -n 1)
|
||||||
|
if [ ! -z "$nvidia_sm" ]; then
|
||||||
|
echo "Total GPU Memory: $nvidia_sm MiB"
|
||||||
|
# if bigger than 8GB, use 16GB
|
||||||
|
#if [ "$nvidia_sm" -gt 8192 ]; then
|
||||||
|
# GPU_SIZE=gpu-16g
|
||||||
|
#else
|
||||||
|
GPU_SIZE=gpu-8g
|
||||||
|
#fi
|
||||||
|
else
|
||||||
|
echo "Unable to determine NVIDIA GPU memory size. Falling back to CPU."
|
||||||
|
GPU_SIZE=gpu-8g
|
||||||
|
fi
|
||||||
|
elif [ "$GPU_ACCELERATION" = true ] && [ "$GPU_VENDOR" = "intel" ]; then
|
||||||
|
GPU_SIZE=intel
|
||||||
|
# Default to a generic GPU size until we implement GPU size detection for non NVIDIA GPUs
|
||||||
|
elif [ "$GPU_ACCELERATION" = true ]; then
|
||||||
|
echo "Non-NVIDIA GPU detected. Specific GPU memory size detection is not implemented."
|
||||||
|
GPU_SIZE=gpu-8g
|
||||||
|
|
||||||
|
# default to cpu if GPU_SIZE is not set
|
||||||
|
else
|
||||||
|
echo "GPU acceleration is not enabled or supported. Defaulting to CPU."
|
||||||
|
GPU_SIZE=cpu
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_vars() {
|
||||||
|
if [ -z "$MODELS" ]; then
|
||||||
|
echo "MODELS environment variable is not set. Please set it to a comma-separated list of model YAML files to load."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$PROFILE" ]; then
|
||||||
|
echo "PROFILE environment variable is not set. Please set it to one of the following: cpu, gpu-8g, gpu-16g, apple"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
detect_gpu
|
||||||
|
detect_gpu_size
|
||||||
|
|
||||||
|
PROFILE="${PROFILE:-$GPU_SIZE}" # default to cpu
|
||||||
|
export MODELS="${MODELS:-/aio/${PROFILE}/embeddings.yaml,/aio/${PROFILE}/rerank.yaml,/aio/${PROFILE}/text-to-speech.yaml,/aio/${PROFILE}/image-gen.yaml,/aio/${PROFILE}/text-to-text.yaml,/aio/${PROFILE}/speech-to-text.yaml,/aio/${PROFILE}/vad.yaml,/aio/${PROFILE}/vision.yaml}"
|
||||||
|
|
||||||
|
check_vars
|
||||||
|
|
||||||
|
echo "===> Starting LocalAI[$PROFILE] with the following models: $MODELS"
|
||||||
|
|
||||||
|
exec /entrypoint.sh "$@"
|
||||||
13
aio/gpu-8g/embeddings.yaml
Normal file
13
aio/gpu-8g/embeddings.yaml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
embeddings: true
|
||||||
|
name: text-embedding-ada-002
|
||||||
|
backend: llama-cpp
|
||||||
|
parameters:
|
||||||
|
model: huggingface://bartowski/granite-embedding-107m-multilingual-GGUF/granite-embedding-107m-multilingual-f16.gguf
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
You can test this model with curl like this:
|
||||||
|
|
||||||
|
curl http://localhost:8080/embeddings -X POST -H "Content-Type: application/json" -d '{
|
||||||
|
"input": "Your text string goes here",
|
||||||
|
"model": "text-embedding-ada-002"
|
||||||
|
}'
|
||||||
25
aio/gpu-8g/image-gen.yaml
Normal file
25
aio/gpu-8g/image-gen.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: stablediffusion
|
||||||
|
parameters:
|
||||||
|
model: DreamShaper_8_pruned.safetensors
|
||||||
|
backend: diffusers
|
||||||
|
step: 25
|
||||||
|
f16: true
|
||||||
|
|
||||||
|
diffusers:
|
||||||
|
pipeline_type: StableDiffusionPipeline
|
||||||
|
cuda: true
|
||||||
|
enable_parameters: "negative_prompt,num_inference_steps"
|
||||||
|
scheduler_type: "k_dpmpp_2m"
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: DreamShaper_8_pruned.safetensors
|
||||||
|
uri: huggingface://Lykon/DreamShaper/DreamShaper_8_pruned.safetensors
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
curl http://localhost:8080/v1/images/generations \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"prompt": "<positive prompt>|<negative prompt>",
|
||||||
|
"step": 25,
|
||||||
|
"size": "512x512"
|
||||||
|
}'
|
||||||
33
aio/gpu-8g/rerank.yaml
Normal file
33
aio/gpu-8g/rerank.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: jina-reranker-v1-base-en
|
||||||
|
reranking: true
|
||||||
|
f16: true
|
||||||
|
parameters:
|
||||||
|
model: jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
backend: llama-cpp
|
||||||
|
download_files:
|
||||||
|
- filename: jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
sha256: 5f696cf0d0f3d347c4a279eee8270e5918554cdac0ed1f632f2619e4e8341407
|
||||||
|
uri: huggingface://mradermacher/jina-reranker-v1-tiny-en-GGUF/jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
You can test this model with curl like this:
|
||||||
|
|
||||||
|
curl http://localhost:8080/v1/rerank \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"model": "jina-reranker-v1-base-en",
|
||||||
|
"query": "Organic skincare products for sensitive skin",
|
||||||
|
"documents": [
|
||||||
|
"Eco-friendly kitchenware for modern homes",
|
||||||
|
"Biodegradable cleaning supplies for eco-conscious consumers",
|
||||||
|
"Organic cotton baby clothes for sensitive skin",
|
||||||
|
"Natural organic skincare range for sensitive skin",
|
||||||
|
"Tech gadgets for smart homes: 2024 edition",
|
||||||
|
"Sustainable gardening tools and compost solutions",
|
||||||
|
"Sensitive skin-friendly facial cleansers and toners",
|
||||||
|
"Organic food wraps and storage solutions",
|
||||||
|
"All-natural pet food for dogs with allergies",
|
||||||
|
"Yoga mats made from recycled materials"
|
||||||
|
],
|
||||||
|
"top_n": 3
|
||||||
|
}'
|
||||||
18
aio/gpu-8g/speech-to-text.yaml
Normal file
18
aio/gpu-8g/speech-to-text.yaml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: whisper-1
|
||||||
|
backend: whisper
|
||||||
|
parameters:
|
||||||
|
model: ggml-whisper-base.bin
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
## example audio file
|
||||||
|
wget --quiet --show-progress -O gb1.ogg https://upload.wikimedia.org/wikipedia/commons/1/1f/George_W_Bush_Columbia_FINAL.ogg
|
||||||
|
|
||||||
|
## Send the example audio file to the transcriptions endpoint
|
||||||
|
curl http://localhost:8080/v1/audio/transcriptions \
|
||||||
|
-H "Content-Type: multipart/form-data" \
|
||||||
|
-F file="@$PWD/gb1.ogg" -F model="whisper-1"
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: "ggml-whisper-base.bin"
|
||||||
|
sha256: "60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe"
|
||||||
|
uri: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin"
|
||||||
15
aio/gpu-8g/text-to-speech.yaml
Normal file
15
aio/gpu-8g/text-to-speech.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: tts-1
|
||||||
|
download_files:
|
||||||
|
- filename: voice-en-us-amy-low.tar.gz
|
||||||
|
uri: https://github.com/rhasspy/piper/releases/download/v0.0.2/voice-en-us-amy-low.tar.gz
|
||||||
|
backend: piper
|
||||||
|
parameters:
|
||||||
|
model: en-us-amy-low.onnx
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
To test if this model works as expected, you can use the following curl command:
|
||||||
|
|
||||||
|
curl http://localhost:8080/tts -H "Content-Type: application/json" -d '{
|
||||||
|
"model":"tts-1",
|
||||||
|
"input": "Hi, this is a test."
|
||||||
|
}'
|
||||||
54
aio/gpu-8g/text-to-text.yaml
Normal file
54
aio/gpu-8g/text-to-text.yaml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
context_size: 4096
|
||||||
|
f16: true
|
||||||
|
backend: llama-cpp
|
||||||
|
function:
|
||||||
|
capture_llm_results:
|
||||||
|
- (?s)<Thought>(.*?)</Thought>
|
||||||
|
grammar:
|
||||||
|
properties_order: name,arguments
|
||||||
|
json_regex_match:
|
||||||
|
- (?s)<Output>(.*?)</Output>
|
||||||
|
replace_llm_results:
|
||||||
|
- key: (?s)<Thought>(.*?)</Thought>
|
||||||
|
value: ""
|
||||||
|
mmap: true
|
||||||
|
name: gpt-4
|
||||||
|
parameters:
|
||||||
|
model: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
|
stopwords:
|
||||||
|
- <|im_end|>
|
||||||
|
- <dummy32000>
|
||||||
|
- </s>
|
||||||
|
template:
|
||||||
|
chat: |
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
chat_message: |
|
||||||
|
<|im_start|>{{ .RoleName }}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
Function call:
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
Function response:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{toJson .FunctionCall}}
|
||||||
|
{{ end -}}<|im_end|>
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
function: |
|
||||||
|
<|im_start|>system
|
||||||
|
You are an AI assistant that executes function calls, and these are the tools at your disposal:
|
||||||
|
{{range .Functions}}
|
||||||
|
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
|
||||||
|
{{end}}
|
||||||
|
<|im_end|>
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
|
sha256: 4e7b7fe1d54b881f1ef90799219dc6cc285d29db24f559c8998d1addb35713d4
|
||||||
|
uri: huggingface://mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF/localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
8
aio/gpu-8g/vad.yaml
Normal file
8
aio/gpu-8g/vad.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
backend: silero-vad
|
||||||
|
name: silero-vad
|
||||||
|
parameters:
|
||||||
|
model: silero-vad.onnx
|
||||||
|
download_files:
|
||||||
|
- filename: silero-vad.onnx
|
||||||
|
uri: https://huggingface.co/onnx-community/silero-vad/resolve/main/onnx/model.onnx
|
||||||
|
sha256: a4a068cd6cf1ea8355b84327595838ca748ec29a25bc91fc82e6c299ccdc5808
|
||||||
50
aio/gpu-8g/vision.yaml
Normal file
50
aio/gpu-8g/vision.yaml
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
context_size: 4096
|
||||||
|
backend: llama-cpp
|
||||||
|
f16: true
|
||||||
|
mmap: true
|
||||||
|
mmproj: minicpm-v-2_6-mmproj-f16.gguf
|
||||||
|
name: gpt-4o
|
||||||
|
parameters:
|
||||||
|
model: minicpm-v-2_6-Q4_K_M.gguf
|
||||||
|
stopwords:
|
||||||
|
- <|im_end|>
|
||||||
|
- <dummy32000>
|
||||||
|
- </s>
|
||||||
|
- <|endoftext|>
|
||||||
|
template:
|
||||||
|
chat: |
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
chat_message: |
|
||||||
|
<|im_start|>{{ .RoleName }}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
Function call:
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
Function response:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{toJson .FunctionCall}}
|
||||||
|
{{ end -}}<|im_end|>
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
function: |
|
||||||
|
<|im_start|>system
|
||||||
|
You are a function calling AI model. You are provided with functions to execute. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
|
||||||
|
{{range .Functions}}
|
||||||
|
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
|
||||||
|
{{end}}
|
||||||
|
For each function call return a json object with function name and arguments
|
||||||
|
<|im_end|>
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: minicpm-v-2_6-Q4_K_M.gguf
|
||||||
|
sha256: 3a4078d53b46f22989adbf998ce5a3fd090b6541f112d7e936eb4204a04100b1
|
||||||
|
uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/ggml-model-Q4_K_M.gguf
|
||||||
|
- filename: minicpm-v-2_6-mmproj-f16.gguf
|
||||||
|
uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/mmproj-model-f16.gguf
|
||||||
|
sha256: 4485f68a0f1aa404c391e788ea88ea653c100d8e98fe572698f701e5809711fd
|
||||||
13
aio/intel/embeddings.yaml
Normal file
13
aio/intel/embeddings.yaml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
embeddings: true
|
||||||
|
name: text-embedding-ada-002
|
||||||
|
backend: llama-cpp
|
||||||
|
parameters:
|
||||||
|
model: huggingface://bartowski/granite-embedding-107m-multilingual-GGUF/granite-embedding-107m-multilingual-f16.gguf
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
You can test this model with curl like this:
|
||||||
|
|
||||||
|
curl http://localhost:8080/embeddings -X POST -H "Content-Type: application/json" -d '{
|
||||||
|
"input": "Your text string goes here",
|
||||||
|
"model": "text-embedding-ada-002"
|
||||||
|
}'
|
||||||
20
aio/intel/image-gen.yaml
Normal file
20
aio/intel/image-gen.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
name: stablediffusion
|
||||||
|
parameters:
|
||||||
|
model: Lykon/dreamshaper-8
|
||||||
|
backend: diffusers
|
||||||
|
step: 25
|
||||||
|
f16: true
|
||||||
|
diffusers:
|
||||||
|
pipeline_type: StableDiffusionPipeline
|
||||||
|
cuda: true
|
||||||
|
enable_parameters: "negative_prompt,num_inference_steps"
|
||||||
|
scheduler_type: "k_dpmpp_2m"
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
curl http://localhost:8080/v1/images/generations \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"prompt": "<positive prompt>|<negative prompt>",
|
||||||
|
"step": 25,
|
||||||
|
"size": "512x512"
|
||||||
|
}'
|
||||||
33
aio/intel/rerank.yaml
Normal file
33
aio/intel/rerank.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: jina-reranker-v1-base-en
|
||||||
|
reranking: true
|
||||||
|
f16: true
|
||||||
|
parameters:
|
||||||
|
model: jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
backend: llama-cpp
|
||||||
|
download_files:
|
||||||
|
- filename: jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
sha256: 5f696cf0d0f3d347c4a279eee8270e5918554cdac0ed1f632f2619e4e8341407
|
||||||
|
uri: huggingface://mradermacher/jina-reranker-v1-tiny-en-GGUF/jina-reranker-v1-tiny-en.f16.gguf
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
You can test this model with curl like this:
|
||||||
|
|
||||||
|
curl http://localhost:8080/v1/rerank \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"model": "jina-reranker-v1-base-en",
|
||||||
|
"query": "Organic skincare products for sensitive skin",
|
||||||
|
"documents": [
|
||||||
|
"Eco-friendly kitchenware for modern homes",
|
||||||
|
"Biodegradable cleaning supplies for eco-conscious consumers",
|
||||||
|
"Organic cotton baby clothes for sensitive skin",
|
||||||
|
"Natural organic skincare range for sensitive skin",
|
||||||
|
"Tech gadgets for smart homes: 2024 edition",
|
||||||
|
"Sustainable gardening tools and compost solutions",
|
||||||
|
"Sensitive skin-friendly facial cleansers and toners",
|
||||||
|
"Organic food wraps and storage solutions",
|
||||||
|
"All-natural pet food for dogs with allergies",
|
||||||
|
"Yoga mats made from recycled materials"
|
||||||
|
],
|
||||||
|
"top_n": 3
|
||||||
|
}'
|
||||||
18
aio/intel/speech-to-text.yaml
Normal file
18
aio/intel/speech-to-text.yaml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: whisper-1
|
||||||
|
backend: whisper
|
||||||
|
parameters:
|
||||||
|
model: ggml-whisper-base.bin
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
## example audio file
|
||||||
|
wget --quiet --show-progress -O gb1.ogg https://upload.wikimedia.org/wikipedia/commons/1/1f/George_W_Bush_Columbia_FINAL.ogg
|
||||||
|
|
||||||
|
## Send the example audio file to the transcriptions endpoint
|
||||||
|
curl http://localhost:8080/v1/audio/transcriptions \
|
||||||
|
-H "Content-Type: multipart/form-data" \
|
||||||
|
-F file="@$PWD/gb1.ogg" -F model="whisper-1"
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: "ggml-whisper-base.bin"
|
||||||
|
sha256: "60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe"
|
||||||
|
uri: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin"
|
||||||
15
aio/intel/text-to-speech.yaml
Normal file
15
aio/intel/text-to-speech.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: tts-1
|
||||||
|
download_files:
|
||||||
|
- filename: voice-en-us-amy-low.tar.gz
|
||||||
|
uri: https://github.com/rhasspy/piper/releases/download/v0.0.2/voice-en-us-amy-low.tar.gz
|
||||||
|
backend: piper
|
||||||
|
parameters:
|
||||||
|
model: en-us-amy-low.onnx
|
||||||
|
|
||||||
|
usage: |
|
||||||
|
To test if this model works as expected, you can use the following curl command:
|
||||||
|
|
||||||
|
curl http://localhost:8080/tts -H "Content-Type: application/json" -d '{
|
||||||
|
"model":"tts-1",
|
||||||
|
"input": "Hi, this is a test."
|
||||||
|
}'
|
||||||
54
aio/intel/text-to-text.yaml
Normal file
54
aio/intel/text-to-text.yaml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
context_size: 4096
|
||||||
|
f16: true
|
||||||
|
backend: llama-cpp
|
||||||
|
function:
|
||||||
|
capture_llm_results:
|
||||||
|
- (?s)<Thought>(.*?)</Thought>
|
||||||
|
grammar:
|
||||||
|
properties_order: name,arguments
|
||||||
|
json_regex_match:
|
||||||
|
- (?s)<Output>(.*?)</Output>
|
||||||
|
replace_llm_results:
|
||||||
|
- key: (?s)<Thought>(.*?)</Thought>
|
||||||
|
value: ""
|
||||||
|
mmap: true
|
||||||
|
name: gpt-4
|
||||||
|
parameters:
|
||||||
|
model: localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf
|
||||||
|
stopwords:
|
||||||
|
- <|im_end|>
|
||||||
|
- <dummy32000>
|
||||||
|
- </s>
|
||||||
|
template:
|
||||||
|
chat: |
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
chat_message: |
|
||||||
|
<|im_start|>{{ .RoleName }}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
Function call:
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
Function response:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{toJson .FunctionCall}}
|
||||||
|
{{ end -}}<|im_end|>
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
function: |
|
||||||
|
<|im_start|>system
|
||||||
|
You are an AI assistant that executes function calls, and these are the tools at your disposal:
|
||||||
|
{{range .Functions}}
|
||||||
|
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
|
||||||
|
{{end}}
|
||||||
|
<|im_end|>
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: localai-functioncall-phi-4-v0.3-q4_k_m.gguf
|
||||||
|
sha256: 23fee048ded2a6e2e1a7b6bbefa6cbf83068f194caa9552aecbaa00fec8a16d5
|
||||||
|
uri: huggingface://mudler/LocalAI-functioncall-phi-4-v0.3-Q4_K_M-GGUF/localai-functioncall-phi-4-v0.3-q4_k_m.gguf
|
||||||
8
aio/intel/vad.yaml
Normal file
8
aio/intel/vad.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
backend: silero-vad
|
||||||
|
name: silero-vad
|
||||||
|
parameters:
|
||||||
|
model: silero-vad.onnx
|
||||||
|
download_files:
|
||||||
|
- filename: silero-vad.onnx
|
||||||
|
uri: https://huggingface.co/onnx-community/silero-vad/resolve/main/onnx/model.onnx
|
||||||
|
sha256: a4a068cd6cf1ea8355b84327595838ca748ec29a25bc91fc82e6c299ccdc5808
|
||||||
51
aio/intel/vision.yaml
Normal file
51
aio/intel/vision.yaml
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
context_size: 4096
|
||||||
|
backend: llama-cpp
|
||||||
|
f16: true
|
||||||
|
mmap: true
|
||||||
|
mmproj: minicpm-v-2_6-mmproj-f16.gguf
|
||||||
|
name: gpt-4o
|
||||||
|
parameters:
|
||||||
|
model: minicpm-v-2_6-Q4_K_M.gguf
|
||||||
|
stopwords:
|
||||||
|
- <|im_end|>
|
||||||
|
- <dummy32000>
|
||||||
|
- </s>
|
||||||
|
- <|endoftext|>
|
||||||
|
template:
|
||||||
|
chat: |
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
chat_message: |
|
||||||
|
<|im_start|>{{ .RoleName }}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
Function call:
|
||||||
|
{{ else if eq .RoleName "tool" -}}
|
||||||
|
Function response:
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .Content -}}
|
||||||
|
{{.Content }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ if .FunctionCall -}}
|
||||||
|
{{toJson .FunctionCall}}
|
||||||
|
{{ end -}}<|im_end|>
|
||||||
|
completion: |
|
||||||
|
{{.Input}}
|
||||||
|
function: |
|
||||||
|
<|im_start|>system
|
||||||
|
You are a function calling AI model. You are provided with functions to execute. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
|
||||||
|
{{range .Functions}}
|
||||||
|
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
|
||||||
|
{{end}}
|
||||||
|
For each function call return a json object with function name and arguments
|
||||||
|
<|im_end|>
|
||||||
|
{{.Input -}}
|
||||||
|
<|im_start|>assistant
|
||||||
|
|
||||||
|
|
||||||
|
download_files:
|
||||||
|
- filename: minicpm-v-2_6-Q4_K_M.gguf
|
||||||
|
sha256: 3a4078d53b46f22989adbf998ce5a3fd090b6541f112d7e936eb4204a04100b1
|
||||||
|
uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/ggml-model-Q4_K_M.gguf
|
||||||
|
- filename: minicpm-v-2_6-mmproj-f16.gguf
|
||||||
|
uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/mmproj-model-f16.gguf
|
||||||
|
sha256: 4485f68a0f1aa404c391e788ea88ea653c100d8e98fe572698f701e5809711fd
|
||||||
167
api/api.go
167
api/api.go
@@ -1,167 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
|
|
||||||
"github.com/go-skynet/LocalAI/internal"
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/assets"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"github.com/gofiber/fiber/v2/middleware/cors"
|
|
||||||
"github.com/gofiber/fiber/v2/middleware/logger"
|
|
||||||
"github.com/gofiber/fiber/v2/middleware/recover"
|
|
||||||
"github.com/rs/zerolog"
|
|
||||||
"github.com/rs/zerolog/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
func App(opts ...AppOption) (*fiber.App, error) {
|
|
||||||
options := newOptions(opts...)
|
|
||||||
|
|
||||||
zerolog.SetGlobalLevel(zerolog.InfoLevel)
|
|
||||||
if options.debug {
|
|
||||||
zerolog.SetGlobalLevel(zerolog.DebugLevel)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return errors as JSON responses
|
|
||||||
app := fiber.New(fiber.Config{
|
|
||||||
BodyLimit: options.uploadLimitMB * 1024 * 1024, // this is the default limit of 4MB
|
|
||||||
DisableStartupMessage: options.disableMessage,
|
|
||||||
// Override default error handler
|
|
||||||
ErrorHandler: func(ctx *fiber.Ctx, err error) error {
|
|
||||||
// Status code defaults to 500
|
|
||||||
code := fiber.StatusInternalServerError
|
|
||||||
|
|
||||||
// Retrieve the custom status code if it's a *fiber.Error
|
|
||||||
var e *fiber.Error
|
|
||||||
if errors.As(err, &e) {
|
|
||||||
code = e.Code
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send custom error page
|
|
||||||
return ctx.Status(code).JSON(
|
|
||||||
ErrorResponse{
|
|
||||||
Error: &APIError{Message: err.Error(), Code: code},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if options.debug {
|
|
||||||
app.Use(logger.New(logger.Config{
|
|
||||||
Format: "[${ip}]:${port} ${status} - ${method} ${path}\n",
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
cm := NewConfigMerger()
|
|
||||||
if err := cm.LoadConfigs(options.loader.ModelPath); err != nil {
|
|
||||||
log.Error().Msgf("error loading config files: %s", err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.configFile != "" {
|
|
||||||
if err := cm.LoadConfigFile(options.configFile); err != nil {
|
|
||||||
log.Error().Msgf("error loading config file: %s", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.debug {
|
|
||||||
for _, v := range cm.ListConfigs() {
|
|
||||||
cfg, _ := cm.GetConfig(v)
|
|
||||||
log.Debug().Msgf("Model: %s (config: %+v)", v, cfg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.assetsDestination != "" {
|
|
||||||
// Extract files from the embedded FS
|
|
||||||
err := assets.ExtractFiles(options.backendAssets, options.assetsDestination)
|
|
||||||
if err != nil {
|
|
||||||
log.Warn().Msgf("Failed extracting backend assets files: %s (might be required for some backends to work properly, like gpt4all)", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default middleware config
|
|
||||||
app.Use(recover.New())
|
|
||||||
|
|
||||||
if options.preloadJSONModels != "" {
|
|
||||||
if err := ApplyGalleryFromString(options.loader.ModelPath, options.preloadJSONModels, cm, options.galleries); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.preloadModelsFromPath != "" {
|
|
||||||
if err := ApplyGalleryFromFile(options.loader.ModelPath, options.preloadModelsFromPath, cm, options.galleries); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.cors {
|
|
||||||
if options.corsAllowOrigins == "" {
|
|
||||||
app.Use(cors.New())
|
|
||||||
} else {
|
|
||||||
app.Use(cors.New(cors.Config{
|
|
||||||
AllowOrigins: options.corsAllowOrigins,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// LocalAI API endpoints
|
|
||||||
applier := newGalleryApplier(options.loader.ModelPath)
|
|
||||||
applier.start(options.context, cm)
|
|
||||||
|
|
||||||
app.Get("/version", func(c *fiber.Ctx) error {
|
|
||||||
return c.JSON(struct {
|
|
||||||
Version string `json:"version"`
|
|
||||||
}{Version: internal.PrintableVersion()})
|
|
||||||
})
|
|
||||||
|
|
||||||
app.Post("/models/apply", applyModelGallery(options.loader.ModelPath, cm, applier.C, options.galleries))
|
|
||||||
app.Get("/models/available", listModelFromGallery(options.galleries, options.loader.ModelPath))
|
|
||||||
app.Get("/models/jobs/:uuid", getOpStatus(applier))
|
|
||||||
|
|
||||||
// openAI compatible API endpoint
|
|
||||||
|
|
||||||
// chat
|
|
||||||
app.Post("/v1/chat/completions", chatEndpoint(cm, options))
|
|
||||||
app.Post("/chat/completions", chatEndpoint(cm, options))
|
|
||||||
|
|
||||||
// edit
|
|
||||||
app.Post("/v1/edits", editEndpoint(cm, options))
|
|
||||||
app.Post("/edits", editEndpoint(cm, options))
|
|
||||||
|
|
||||||
// completion
|
|
||||||
app.Post("/v1/completions", completionEndpoint(cm, options))
|
|
||||||
app.Post("/completions", completionEndpoint(cm, options))
|
|
||||||
app.Post("/v1/engines/:model/completions", completionEndpoint(cm, options))
|
|
||||||
|
|
||||||
// embeddings
|
|
||||||
app.Post("/v1/embeddings", embeddingsEndpoint(cm, options))
|
|
||||||
app.Post("/embeddings", embeddingsEndpoint(cm, options))
|
|
||||||
app.Post("/v1/engines/:model/embeddings", embeddingsEndpoint(cm, options))
|
|
||||||
|
|
||||||
// audio
|
|
||||||
app.Post("/v1/audio/transcriptions", transcriptEndpoint(cm, options))
|
|
||||||
app.Post("/tts", ttsEndpoint(cm, options))
|
|
||||||
|
|
||||||
// images
|
|
||||||
app.Post("/v1/images/generations", imageEndpoint(cm, options))
|
|
||||||
|
|
||||||
if options.imageDir != "" {
|
|
||||||
app.Static("/generated-images", options.imageDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.audioDir != "" {
|
|
||||||
app.Static("/generated-audio", options.audioDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
ok := func(c *fiber.Ctx) error {
|
|
||||||
return c.SendStatus(200)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Kubernetes health checks
|
|
||||||
app.Get("/healthz", ok)
|
|
||||||
app.Get("/readyz", ok)
|
|
||||||
|
|
||||||
// models
|
|
||||||
app.Get("/v1/models", listModels(options.loader, cm))
|
|
||||||
app.Get("/models", listModels(options.loader, cm))
|
|
||||||
|
|
||||||
return app, nil
|
|
||||||
}
|
|
||||||
514
api/api_test.go
514
api/api_test.go
@@ -1,514 +0,0 @@
|
|||||||
package api_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"embed"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
|
|
||||||
. "github.com/go-skynet/LocalAI/api"
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/gallery"
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/model"
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/utils"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
. "github.com/onsi/ginkgo/v2"
|
|
||||||
. "github.com/onsi/gomega"
|
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
|
|
||||||
openaigo "github.com/otiai10/openaigo"
|
|
||||||
"github.com/sashabaranov/go-openai"
|
|
||||||
)
|
|
||||||
|
|
||||||
type modelApplyRequest struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Overrides map[string]string `json:"overrides"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func getModelStatus(url string) (response map[string]interface{}) {
|
|
||||||
// Create the HTTP request
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error creating request:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error reading response body:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshal the response into a map[string]interface{}
|
|
||||||
err = json.Unmarshal(body, &response)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error unmarshaling JSON response:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func getModels(url string) (response []gallery.GalleryModel) {
|
|
||||||
utils.GetURI(url, func(url string, i []byte) error {
|
|
||||||
// Unmarshal YAML data into a struct
|
|
||||||
return json.Unmarshal(i, &response)
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func postModelApplyRequest(url string, request modelApplyRequest) (response map[string]interface{}) {
|
|
||||||
|
|
||||||
//url := "http://localhost:AI/models/apply"
|
|
||||||
|
|
||||||
// Create the request payload
|
|
||||||
|
|
||||||
payload, err := json.Marshal(request)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error marshaling JSON:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the HTTP request
|
|
||||||
req, err := http.NewRequest("POST", url, bytes.NewBuffer(payload))
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error creating request:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
req.Header.Set("Content-Type", "application/json")
|
|
||||||
|
|
||||||
// Make the request
|
|
||||||
client := &http.Client{}
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error making request:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error reading response body:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshal the response into a map[string]interface{}
|
|
||||||
err = json.Unmarshal(body, &response)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Error unmarshaling JSON response:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:embed backend-assets/*
|
|
||||||
var backendAssets embed.FS
|
|
||||||
|
|
||||||
var _ = Describe("API test", func() {
|
|
||||||
|
|
||||||
var app *fiber.App
|
|
||||||
var modelLoader *model.ModelLoader
|
|
||||||
var client *openai.Client
|
|
||||||
var client2 *openaigo.Client
|
|
||||||
var c context.Context
|
|
||||||
var cancel context.CancelFunc
|
|
||||||
var tmpdir string
|
|
||||||
|
|
||||||
Context("API with ephemeral models", func() {
|
|
||||||
BeforeEach(func() {
|
|
||||||
var err error
|
|
||||||
tmpdir, err = os.MkdirTemp("", "")
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
|
|
||||||
modelLoader = model.NewModelLoader(tmpdir)
|
|
||||||
c, cancel = context.WithCancel(context.Background())
|
|
||||||
|
|
||||||
g := []gallery.GalleryModel{
|
|
||||||
{
|
|
||||||
Name: "bert",
|
|
||||||
URL: "https://raw.githubusercontent.com/go-skynet/model-gallery/main/bert-embeddings.yaml",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "bert2",
|
|
||||||
URL: "https://raw.githubusercontent.com/go-skynet/model-gallery/main/bert-embeddings.yaml",
|
|
||||||
Overrides: map[string]interface{}{"foo": "bar"},
|
|
||||||
AdditionalFiles: []gallery.File{gallery.File{Filename: "foo.yaml", URI: "https://raw.githubusercontent.com/go-skynet/model-gallery/main/bert-embeddings.yaml"}},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
out, err := yaml.Marshal(g)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
err = ioutil.WriteFile(filepath.Join(tmpdir, "gallery_simple.yaml"), out, 0644)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
|
|
||||||
galleries := []gallery.Gallery{
|
|
||||||
{
|
|
||||||
Name: "test",
|
|
||||||
URL: "file://" + filepath.Join(tmpdir, "gallery_simple.yaml"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
app, err = App(WithContext(c),
|
|
||||||
WithGalleries(galleries),
|
|
||||||
WithModelLoader(modelLoader), WithBackendAssets(backendAssets), WithBackendAssetsOutput(tmpdir))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
go app.Listen("127.0.0.1:9090")
|
|
||||||
|
|
||||||
defaultConfig := openai.DefaultConfig("")
|
|
||||||
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
|
||||||
|
|
||||||
client2 = openaigo.NewClient("")
|
|
||||||
client2.BaseURL = defaultConfig.BaseURL
|
|
||||||
|
|
||||||
// Wait for API to be ready
|
|
||||||
client = openai.NewClientWithConfig(defaultConfig)
|
|
||||||
Eventually(func() error {
|
|
||||||
_, err := client.ListModels(context.TODO())
|
|
||||||
return err
|
|
||||||
}, "2m").ShouldNot(HaveOccurred())
|
|
||||||
})
|
|
||||||
|
|
||||||
AfterEach(func() {
|
|
||||||
cancel()
|
|
||||||
app.Shutdown()
|
|
||||||
os.RemoveAll(tmpdir)
|
|
||||||
})
|
|
||||||
|
|
||||||
Context("Applying models", func() {
|
|
||||||
It("applies models from a gallery", func() {
|
|
||||||
|
|
||||||
models := getModels("http://127.0.0.1:9090/models/available")
|
|
||||||
Expect(len(models)).To(Equal(2), fmt.Sprint(models))
|
|
||||||
Expect(models[0].Installed).To(BeFalse(), fmt.Sprint(models))
|
|
||||||
Expect(models[1].Installed).To(BeFalse(), fmt.Sprint(models))
|
|
||||||
|
|
||||||
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
||||||
ID: "test@bert2",
|
|
||||||
})
|
|
||||||
|
|
||||||
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
||||||
|
|
||||||
uuid := response["uuid"].(string)
|
|
||||||
resp := map[string]interface{}{}
|
|
||||||
Eventually(func() bool {
|
|
||||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
||||||
fmt.Println(response)
|
|
||||||
resp = response
|
|
||||||
return response["processed"].(bool)
|
|
||||||
}, "360s").Should(Equal(true))
|
|
||||||
Expect(resp["message"]).ToNot(ContainSubstring("error"))
|
|
||||||
|
|
||||||
dat, err := os.ReadFile(filepath.Join(tmpdir, "bert2.yaml"))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
|
|
||||||
_, err = os.ReadFile(filepath.Join(tmpdir, "foo.yaml"))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
|
|
||||||
content := map[string]interface{}{}
|
|
||||||
err = yaml.Unmarshal(dat, &content)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(content["backend"]).To(Equal("bert-embeddings"))
|
|
||||||
Expect(content["foo"]).To(Equal("bar"))
|
|
||||||
|
|
||||||
models = getModels("http://127.0.0.1:9090/models/available")
|
|
||||||
Expect(len(models)).To(Equal(2), fmt.Sprint(models))
|
|
||||||
Expect(models[0].Name).To(Or(Equal("bert"), Equal("bert2")))
|
|
||||||
Expect(models[1].Name).To(Or(Equal("bert"), Equal("bert2")))
|
|
||||||
for _, m := range models {
|
|
||||||
if m.Name == "bert2" {
|
|
||||||
Expect(m.Installed).To(BeTrue())
|
|
||||||
} else {
|
|
||||||
Expect(m.Installed).To(BeFalse())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
It("overrides models", func() {
|
|
||||||
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
||||||
URL: "https://raw.githubusercontent.com/go-skynet/model-gallery/main/bert-embeddings.yaml",
|
|
||||||
Name: "bert",
|
|
||||||
Overrides: map[string]string{
|
|
||||||
"backend": "llama",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
||||||
|
|
||||||
uuid := response["uuid"].(string)
|
|
||||||
|
|
||||||
Eventually(func() bool {
|
|
||||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
||||||
fmt.Println(response)
|
|
||||||
return response["processed"].(bool)
|
|
||||||
}, "360s").Should(Equal(true))
|
|
||||||
|
|
||||||
dat, err := os.ReadFile(filepath.Join(tmpdir, "bert.yaml"))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
|
|
||||||
content := map[string]interface{}{}
|
|
||||||
err = yaml.Unmarshal(dat, &content)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(content["backend"]).To(Equal("llama"))
|
|
||||||
})
|
|
||||||
It("apply models without overrides", func() {
|
|
||||||
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
||||||
URL: "https://raw.githubusercontent.com/go-skynet/model-gallery/main/bert-embeddings.yaml",
|
|
||||||
Name: "bert",
|
|
||||||
Overrides: map[string]string{},
|
|
||||||
})
|
|
||||||
|
|
||||||
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
||||||
|
|
||||||
uuid := response["uuid"].(string)
|
|
||||||
|
|
||||||
Eventually(func() bool {
|
|
||||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
||||||
fmt.Println(response)
|
|
||||||
return response["processed"].(bool)
|
|
||||||
}, "360s").Should(Equal(true))
|
|
||||||
|
|
||||||
dat, err := os.ReadFile(filepath.Join(tmpdir, "bert.yaml"))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
|
|
||||||
content := map[string]interface{}{}
|
|
||||||
err = yaml.Unmarshal(dat, &content)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(content["backend"]).To(Equal("bert-embeddings"))
|
|
||||||
})
|
|
||||||
|
|
||||||
It("runs openllama", Label("llama"), func() {
|
|
||||||
if runtime.GOOS != "linux" {
|
|
||||||
Skip("test supported only on linux")
|
|
||||||
}
|
|
||||||
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
||||||
URL: "github:go-skynet/model-gallery/openllama_3b.yaml",
|
|
||||||
Name: "openllama_3b",
|
|
||||||
Overrides: map[string]string{},
|
|
||||||
})
|
|
||||||
|
|
||||||
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
||||||
|
|
||||||
uuid := response["uuid"].(string)
|
|
||||||
|
|
||||||
Eventually(func() bool {
|
|
||||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
||||||
fmt.Println(response)
|
|
||||||
return response["processed"].(bool)
|
|
||||||
}, "360s").Should(Equal(true))
|
|
||||||
|
|
||||||
resp, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "openllama_3b", Prompt: "Count up to five: one, two, three, four, "})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Text).To(ContainSubstring("five"))
|
|
||||||
})
|
|
||||||
|
|
||||||
It("runs gpt4all", Label("gpt4all"), func() {
|
|
||||||
if runtime.GOOS != "linux" {
|
|
||||||
Skip("test supported only on linux")
|
|
||||||
}
|
|
||||||
|
|
||||||
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
||||||
URL: "github:go-skynet/model-gallery/gpt4all-j.yaml",
|
|
||||||
Name: "gpt4all-j",
|
|
||||||
Overrides: map[string]string{},
|
|
||||||
})
|
|
||||||
|
|
||||||
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
||||||
|
|
||||||
uuid := response["uuid"].(string)
|
|
||||||
|
|
||||||
Eventually(func() bool {
|
|
||||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
||||||
fmt.Println(response)
|
|
||||||
return response["processed"].(bool)
|
|
||||||
}, "360s").Should(Equal(true))
|
|
||||||
|
|
||||||
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "gpt4all-j", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "How are you?"}}})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Message.Content).To(ContainSubstring("well"))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
Context("API query", func() {
|
|
||||||
BeforeEach(func() {
|
|
||||||
modelLoader = model.NewModelLoader(os.Getenv("MODELS_PATH"))
|
|
||||||
c, cancel = context.WithCancel(context.Background())
|
|
||||||
|
|
||||||
var err error
|
|
||||||
app, err = App(WithContext(c), WithModelLoader(modelLoader))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
go app.Listen("127.0.0.1:9090")
|
|
||||||
|
|
||||||
defaultConfig := openai.DefaultConfig("")
|
|
||||||
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
|
||||||
|
|
||||||
client2 = openaigo.NewClient("")
|
|
||||||
client2.BaseURL = defaultConfig.BaseURL
|
|
||||||
|
|
||||||
// Wait for API to be ready
|
|
||||||
client = openai.NewClientWithConfig(defaultConfig)
|
|
||||||
Eventually(func() error {
|
|
||||||
_, err := client.ListModels(context.TODO())
|
|
||||||
return err
|
|
||||||
}, "2m").ShouldNot(HaveOccurred())
|
|
||||||
})
|
|
||||||
AfterEach(func() {
|
|
||||||
cancel()
|
|
||||||
app.Shutdown()
|
|
||||||
})
|
|
||||||
It("returns the models list", func() {
|
|
||||||
models, err := client.ListModels(context.TODO())
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(models.Models)).To(Equal(10))
|
|
||||||
})
|
|
||||||
It("can generate completions", func() {
|
|
||||||
resp, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "testmodel", Prompt: "abcdedfghikl"})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Text).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
|
|
||||||
It("can generate chat completions ", func() {
|
|
||||||
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "testmodel", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "abcdedfghikl"}}})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
|
|
||||||
It("can generate completions from model configs", func() {
|
|
||||||
resp, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "gpt4all", Prompt: "abcdedfghikl"})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Text).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
|
|
||||||
It("can generate chat completions from model configs", func() {
|
|
||||||
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "gpt4all-2", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "abcdedfghikl"}}})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
|
|
||||||
It("returns errors", func() {
|
|
||||||
_, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "foomodel", Prompt: "abcdedfghikl"})
|
|
||||||
Expect(err).To(HaveOccurred())
|
|
||||||
Expect(err.Error()).To(ContainSubstring("error, status code: 500, message: could not load model - all backends returned error: 11 errors occurred:"))
|
|
||||||
})
|
|
||||||
It("transcribes audio", func() {
|
|
||||||
if runtime.GOOS != "linux" {
|
|
||||||
Skip("test supported only on linux")
|
|
||||||
}
|
|
||||||
resp, err := client.CreateTranscription(
|
|
||||||
context.Background(),
|
|
||||||
openai.AudioRequest{
|
|
||||||
Model: openai.Whisper1,
|
|
||||||
FilePath: filepath.Join(os.Getenv("TEST_DIR"), "audio.wav"),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(resp.Text).To(ContainSubstring("This is the Micro Machine Man presenting"))
|
|
||||||
})
|
|
||||||
|
|
||||||
It("calculate embeddings", func() {
|
|
||||||
if runtime.GOOS != "linux" {
|
|
||||||
Skip("test supported only on linux")
|
|
||||||
}
|
|
||||||
resp, err := client.CreateEmbeddings(
|
|
||||||
context.Background(),
|
|
||||||
openai.EmbeddingRequest{
|
|
||||||
Model: openai.AdaEmbeddingV2,
|
|
||||||
Input: []string{"sun", "cat"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Data[0].Embedding)).To(BeNumerically("==", 384))
|
|
||||||
Expect(len(resp.Data[1].Embedding)).To(BeNumerically("==", 384))
|
|
||||||
|
|
||||||
sunEmbedding := resp.Data[0].Embedding
|
|
||||||
resp2, err := client.CreateEmbeddings(
|
|
||||||
context.Background(),
|
|
||||||
openai.EmbeddingRequest{
|
|
||||||
Model: openai.AdaEmbeddingV2,
|
|
||||||
Input: []string{"sun"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(resp2.Data[0].Embedding).To(Equal(sunEmbedding))
|
|
||||||
})
|
|
||||||
|
|
||||||
Context("backends", func() {
|
|
||||||
It("runs rwkv", func() {
|
|
||||||
if runtime.GOOS != "linux" {
|
|
||||||
Skip("test supported only on linux")
|
|
||||||
}
|
|
||||||
resp, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "rwkv_test", Prompt: "Count up to five: one, two, three, four,"})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices) > 0).To(BeTrue())
|
|
||||||
Expect(resp.Choices[0].Text).To(Equal(" five."))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
Context("Config file", func() {
|
|
||||||
BeforeEach(func() {
|
|
||||||
modelLoader = model.NewModelLoader(os.Getenv("MODELS_PATH"))
|
|
||||||
c, cancel = context.WithCancel(context.Background())
|
|
||||||
|
|
||||||
var err error
|
|
||||||
app, err = App(WithContext(c), WithModelLoader(modelLoader), WithConfigFile(os.Getenv("CONFIG_FILE")))
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
go app.Listen("127.0.0.1:9090")
|
|
||||||
|
|
||||||
defaultConfig := openai.DefaultConfig("")
|
|
||||||
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
|
||||||
client2 = openaigo.NewClient("")
|
|
||||||
client2.BaseURL = defaultConfig.BaseURL
|
|
||||||
// Wait for API to be ready
|
|
||||||
client = openai.NewClientWithConfig(defaultConfig)
|
|
||||||
Eventually(func() error {
|
|
||||||
_, err := client.ListModels(context.TODO())
|
|
||||||
return err
|
|
||||||
}, "2m").ShouldNot(HaveOccurred())
|
|
||||||
})
|
|
||||||
AfterEach(func() {
|
|
||||||
cancel()
|
|
||||||
app.Shutdown()
|
|
||||||
})
|
|
||||||
It("can generate chat completions from config file", func() {
|
|
||||||
models, err := client.ListModels(context.TODO())
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(models.Models)).To(Equal(12))
|
|
||||||
})
|
|
||||||
It("can generate chat completions from config file", func() {
|
|
||||||
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "list1", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "abcdedfghikl"}}})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
It("can generate chat completions from config file", func() {
|
|
||||||
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "list2", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: "abcdedfghikl"}}})
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
It("can generate edit completions from config file", func() {
|
|
||||||
request := openaigo.EditCreateRequestBody{
|
|
||||||
Model: "list2",
|
|
||||||
Instruction: "foo",
|
|
||||||
Input: "bar",
|
|
||||||
}
|
|
||||||
resp, err := client2.CreateEdit(context.Background(), request)
|
|
||||||
Expect(err).ToNot(HaveOccurred())
|
|
||||||
Expect(len(resp.Choices)).To(Equal(1))
|
|
||||||
Expect(resp.Choices[0].Text).ToNot(BeEmpty())
|
|
||||||
})
|
|
||||||
|
|
||||||
})
|
|
||||||
})
|
|
||||||
368
api/config.go
368
api/config.go
@@ -1,368 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
model "github.com/go-skynet/LocalAI/pkg/model"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"github.com/rs/zerolog/log"
|
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
OpenAIRequest `yaml:"parameters"`
|
|
||||||
Name string `yaml:"name"`
|
|
||||||
StopWords []string `yaml:"stopwords"`
|
|
||||||
Cutstrings []string `yaml:"cutstrings"`
|
|
||||||
TrimSpace []string `yaml:"trimspace"`
|
|
||||||
ContextSize int `yaml:"context_size"`
|
|
||||||
F16 bool `yaml:"f16"`
|
|
||||||
NUMA bool `yaml:"numa"`
|
|
||||||
Threads int `yaml:"threads"`
|
|
||||||
Debug bool `yaml:"debug"`
|
|
||||||
Roles map[string]string `yaml:"roles"`
|
|
||||||
Embeddings bool `yaml:"embeddings"`
|
|
||||||
Backend string `yaml:"backend"`
|
|
||||||
TemplateConfig TemplateConfig `yaml:"template"`
|
|
||||||
MirostatETA float64 `yaml:"mirostat_eta"`
|
|
||||||
MirostatTAU float64 `yaml:"mirostat_tau"`
|
|
||||||
Mirostat int `yaml:"mirostat"`
|
|
||||||
NGPULayers int `yaml:"gpu_layers"`
|
|
||||||
MMap bool `yaml:"mmap"`
|
|
||||||
MMlock bool `yaml:"mmlock"`
|
|
||||||
LowVRAM bool `yaml:"low_vram"`
|
|
||||||
|
|
||||||
TensorSplit string `yaml:"tensor_split"`
|
|
||||||
MainGPU string `yaml:"main_gpu"`
|
|
||||||
ImageGenerationAssets string `yaml:"asset_dir"`
|
|
||||||
|
|
||||||
PromptCachePath string `yaml:"prompt_cache_path"`
|
|
||||||
PromptCacheAll bool `yaml:"prompt_cache_all"`
|
|
||||||
PromptCacheRO bool `yaml:"prompt_cache_ro"`
|
|
||||||
|
|
||||||
PromptStrings, InputStrings []string
|
|
||||||
InputToken [][]int
|
|
||||||
}
|
|
||||||
|
|
||||||
type TemplateConfig struct {
|
|
||||||
Completion string `yaml:"completion"`
|
|
||||||
Chat string `yaml:"chat"`
|
|
||||||
Edit string `yaml:"edit"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConfigMerger struct {
|
|
||||||
configs map[string]Config
|
|
||||||
sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func defaultConfig(modelFile string) *Config {
|
|
||||||
return &Config{
|
|
||||||
OpenAIRequest: defaultRequest(modelFile),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewConfigMerger() *ConfigMerger {
|
|
||||||
return &ConfigMerger{
|
|
||||||
configs: make(map[string]Config),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func ReadConfigFile(file string) ([]*Config, error) {
|
|
||||||
c := &[]*Config{}
|
|
||||||
f, err := os.ReadFile(file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot read config file: %w", err)
|
|
||||||
}
|
|
||||||
if err := yaml.Unmarshal(f, c); err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot unmarshal config file: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return *c, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ReadConfig(file string) (*Config, error) {
|
|
||||||
c := &Config{}
|
|
||||||
f, err := os.ReadFile(file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot read config file: %w", err)
|
|
||||||
}
|
|
||||||
if err := yaml.Unmarshal(f, c); err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot unmarshal config file: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cm *ConfigMerger) LoadConfigFile(file string) error {
|
|
||||||
cm.Lock()
|
|
||||||
defer cm.Unlock()
|
|
||||||
c, err := ReadConfigFile(file)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot load config file: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, cc := range c {
|
|
||||||
cm.configs[cc.Name] = *cc
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cm *ConfigMerger) LoadConfig(file string) error {
|
|
||||||
cm.Lock()
|
|
||||||
defer cm.Unlock()
|
|
||||||
c, err := ReadConfig(file)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot read config file: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
cm.configs[c.Name] = *c
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cm *ConfigMerger) GetConfig(m string) (Config, bool) {
|
|
||||||
cm.Lock()
|
|
||||||
defer cm.Unlock()
|
|
||||||
v, exists := cm.configs[m]
|
|
||||||
return v, exists
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cm *ConfigMerger) ListConfigs() []string {
|
|
||||||
cm.Lock()
|
|
||||||
defer cm.Unlock()
|
|
||||||
var res []string
|
|
||||||
for k := range cm.configs {
|
|
||||||
res = append(res, k)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cm *ConfigMerger) LoadConfigs(path string) error {
|
|
||||||
cm.Lock()
|
|
||||||
defer cm.Unlock()
|
|
||||||
entries, err := os.ReadDir(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
files := make([]fs.FileInfo, 0, len(entries))
|
|
||||||
for _, entry := range entries {
|
|
||||||
info, err := entry.Info()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
files = append(files, info)
|
|
||||||
}
|
|
||||||
for _, file := range files {
|
|
||||||
// Skip templates, YAML and .keep files
|
|
||||||
if !strings.Contains(file.Name(), ".yaml") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
c, err := ReadConfig(filepath.Join(path, file.Name()))
|
|
||||||
if err == nil {
|
|
||||||
cm.configs[c.Name] = *c
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateConfig(config *Config, input *OpenAIRequest) {
|
|
||||||
if input.Echo {
|
|
||||||
config.Echo = input.Echo
|
|
||||||
}
|
|
||||||
if input.TopK != 0 {
|
|
||||||
config.TopK = input.TopK
|
|
||||||
}
|
|
||||||
if input.TopP != 0 {
|
|
||||||
config.TopP = input.TopP
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Temperature != 0 {
|
|
||||||
config.Temperature = input.Temperature
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Maxtokens != 0 {
|
|
||||||
config.Maxtokens = input.Maxtokens
|
|
||||||
}
|
|
||||||
|
|
||||||
switch stop := input.Stop.(type) {
|
|
||||||
case string:
|
|
||||||
if stop != "" {
|
|
||||||
config.StopWords = append(config.StopWords, stop)
|
|
||||||
}
|
|
||||||
case []interface{}:
|
|
||||||
for _, pp := range stop {
|
|
||||||
if s, ok := pp.(string); ok {
|
|
||||||
config.StopWords = append(config.StopWords, s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.RepeatPenalty != 0 {
|
|
||||||
config.RepeatPenalty = input.RepeatPenalty
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Keep != 0 {
|
|
||||||
config.Keep = input.Keep
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Batch != 0 {
|
|
||||||
config.Batch = input.Batch
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.F16 {
|
|
||||||
config.F16 = input.F16
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.IgnoreEOS {
|
|
||||||
config.IgnoreEOS = input.IgnoreEOS
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Seed != 0 {
|
|
||||||
config.Seed = input.Seed
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Mirostat != 0 {
|
|
||||||
config.Mirostat = input.Mirostat
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.MirostatETA != 0 {
|
|
||||||
config.MirostatETA = input.MirostatETA
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.MirostatTAU != 0 {
|
|
||||||
config.MirostatTAU = input.MirostatTAU
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.TypicalP != 0 {
|
|
||||||
config.TypicalP = input.TypicalP
|
|
||||||
}
|
|
||||||
|
|
||||||
switch inputs := input.Input.(type) {
|
|
||||||
case string:
|
|
||||||
if inputs != "" {
|
|
||||||
config.InputStrings = append(config.InputStrings, inputs)
|
|
||||||
}
|
|
||||||
case []interface{}:
|
|
||||||
for _, pp := range inputs {
|
|
||||||
switch i := pp.(type) {
|
|
||||||
case string:
|
|
||||||
config.InputStrings = append(config.InputStrings, i)
|
|
||||||
case []interface{}:
|
|
||||||
tokens := []int{}
|
|
||||||
for _, ii := range i {
|
|
||||||
tokens = append(tokens, int(ii.(float64)))
|
|
||||||
}
|
|
||||||
config.InputToken = append(config.InputToken, tokens)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch p := input.Prompt.(type) {
|
|
||||||
case string:
|
|
||||||
config.PromptStrings = append(config.PromptStrings, p)
|
|
||||||
case []interface{}:
|
|
||||||
for _, pp := range p {
|
|
||||||
if s, ok := pp.(string); ok {
|
|
||||||
config.PromptStrings = append(config.PromptStrings, s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func readInput(c *fiber.Ctx, loader *model.ModelLoader, randomModel bool) (string, *OpenAIRequest, error) {
|
|
||||||
input := new(OpenAIRequest)
|
|
||||||
// Get input data from the request body
|
|
||||||
if err := c.BodyParser(input); err != nil {
|
|
||||||
return "", nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
modelFile := input.Model
|
|
||||||
|
|
||||||
if c.Params("model") != "" {
|
|
||||||
modelFile = c.Params("model")
|
|
||||||
}
|
|
||||||
|
|
||||||
received, _ := json.Marshal(input)
|
|
||||||
|
|
||||||
log.Debug().Msgf("Request received: %s", string(received))
|
|
||||||
|
|
||||||
// Set model from bearer token, if available
|
|
||||||
bearer := strings.TrimLeft(c.Get("authorization"), "Bearer ")
|
|
||||||
bearerExists := bearer != "" && loader.ExistsInModelPath(bearer)
|
|
||||||
|
|
||||||
// If no model was specified, take the first available
|
|
||||||
if modelFile == "" && !bearerExists && randomModel {
|
|
||||||
models, _ := loader.ListModels()
|
|
||||||
if len(models) > 0 {
|
|
||||||
modelFile = models[0]
|
|
||||||
log.Debug().Msgf("No model specified, using: %s", modelFile)
|
|
||||||
} else {
|
|
||||||
log.Debug().Msgf("No model specified, returning error")
|
|
||||||
return "", nil, fmt.Errorf("no model specified")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If a model is found in bearer token takes precedence
|
|
||||||
if bearerExists {
|
|
||||||
log.Debug().Msgf("Using model from bearer token: %s", bearer)
|
|
||||||
modelFile = bearer
|
|
||||||
}
|
|
||||||
return modelFile, input, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func readConfig(modelFile string, input *OpenAIRequest, cm *ConfigMerger, loader *model.ModelLoader, debug bool, threads, ctx int, f16 bool) (*Config, *OpenAIRequest, error) {
|
|
||||||
// Load a config file if present after the model name
|
|
||||||
modelConfig := filepath.Join(loader.ModelPath, modelFile+".yaml")
|
|
||||||
|
|
||||||
var config *Config
|
|
||||||
|
|
||||||
defaults := func() {
|
|
||||||
config = defaultConfig(modelFile)
|
|
||||||
config.ContextSize = ctx
|
|
||||||
config.Threads = threads
|
|
||||||
config.F16 = f16
|
|
||||||
config.Debug = debug
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg, exists := cm.GetConfig(modelFile)
|
|
||||||
if !exists {
|
|
||||||
if _, err := os.Stat(modelConfig); err == nil {
|
|
||||||
if err := cm.LoadConfig(modelConfig); err != nil {
|
|
||||||
return nil, nil, fmt.Errorf("failed loading model config (%s) %s", modelConfig, err.Error())
|
|
||||||
}
|
|
||||||
cfg, exists = cm.GetConfig(modelFile)
|
|
||||||
if exists {
|
|
||||||
config = &cfg
|
|
||||||
} else {
|
|
||||||
defaults()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
defaults()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
config = &cfg
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the parameters for the language model prediction
|
|
||||||
updateConfig(config, input)
|
|
||||||
|
|
||||||
// Don't allow 0 as setting
|
|
||||||
if config.Threads == 0 {
|
|
||||||
if threads != 0 {
|
|
||||||
config.Threads = threads
|
|
||||||
} else {
|
|
||||||
config.Threads = 4
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enforce debug flag if passed from CLI
|
|
||||||
if debug {
|
|
||||||
config.Debug = true
|
|
||||||
}
|
|
||||||
|
|
||||||
return config, input, nil
|
|
||||||
}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/model"
|
|
||||||
. "github.com/onsi/ginkgo/v2"
|
|
||||||
. "github.com/onsi/gomega"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ = Describe("Test cases for config related functions", func() {
|
|
||||||
|
|
||||||
var (
|
|
||||||
configFile string
|
|
||||||
)
|
|
||||||
|
|
||||||
Context("Test Read configuration functions", func() {
|
|
||||||
configFile = os.Getenv("CONFIG_FILE")
|
|
||||||
It("Test ReadConfigFile", func() {
|
|
||||||
config, err := ReadConfigFile(configFile)
|
|
||||||
Expect(err).To(BeNil())
|
|
||||||
Expect(config).ToNot(BeNil())
|
|
||||||
// two configs in config.yaml
|
|
||||||
Expect(config[0].Name).To(Equal("list1"))
|
|
||||||
Expect(config[1].Name).To(Equal("list2"))
|
|
||||||
})
|
|
||||||
|
|
||||||
It("Test LoadConfigs", func() {
|
|
||||||
cm := NewConfigMerger()
|
|
||||||
options := newOptions()
|
|
||||||
modelLoader := model.NewModelLoader(os.Getenv("MODELS_PATH"))
|
|
||||||
WithModelLoader(modelLoader)(options)
|
|
||||||
|
|
||||||
err := cm.LoadConfigs(options.loader.ModelPath)
|
|
||||||
Expect(err).To(BeNil())
|
|
||||||
Expect(cm.configs).ToNot(BeNil())
|
|
||||||
|
|
||||||
// config should includes gpt4all models's api.config
|
|
||||||
Expect(cm.configs).To(HaveKey("gpt4all"))
|
|
||||||
|
|
||||||
// config should includes gpt2 models's api.config
|
|
||||||
Expect(cm.configs).To(HaveKey("gpt4all-2"))
|
|
||||||
|
|
||||||
// config should includes text-embedding-ada-002 models's api.config
|
|
||||||
Expect(cm.configs).To(HaveKey("text-embedding-ada-002"))
|
|
||||||
|
|
||||||
// config should includes rwkv_test models's api.config
|
|
||||||
Expect(cm.configs).To(HaveKey("rwkv_test"))
|
|
||||||
|
|
||||||
// config should includes whisper-1 models's api.config
|
|
||||||
Expect(cm.configs).To(HaveKey("whisper-1"))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
237
api/gallery.go
237
api/gallery.go
@@ -1,237 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
json "github.com/json-iterator/go"
|
|
||||||
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/gallery"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/rs/zerolog/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type galleryOp struct {
|
|
||||||
req gallery.GalleryModel
|
|
||||||
id string
|
|
||||||
galleries []gallery.Gallery
|
|
||||||
galleryName string
|
|
||||||
}
|
|
||||||
|
|
||||||
type galleryOpStatus struct {
|
|
||||||
Error error `json:"error"`
|
|
||||||
Processed bool `json:"processed"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
Progress float64 `json:"progress"`
|
|
||||||
TotalFileSize string `json:"file_size"`
|
|
||||||
DownloadedFileSize string `json:"downloaded_size"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type galleryApplier struct {
|
|
||||||
modelPath string
|
|
||||||
sync.Mutex
|
|
||||||
C chan galleryOp
|
|
||||||
statuses map[string]*galleryOpStatus
|
|
||||||
}
|
|
||||||
|
|
||||||
func newGalleryApplier(modelPath string) *galleryApplier {
|
|
||||||
return &galleryApplier{
|
|
||||||
modelPath: modelPath,
|
|
||||||
C: make(chan galleryOp),
|
|
||||||
statuses: make(map[string]*galleryOpStatus),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepareModel applies a
|
|
||||||
func prepareModel(modelPath string, req gallery.GalleryModel, cm *ConfigMerger, downloadStatus func(string, string, string, float64)) error {
|
|
||||||
|
|
||||||
config, err := gallery.GetGalleryConfigFromURL(req.URL)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
config.Files = append(config.Files, req.AdditionalFiles...)
|
|
||||||
|
|
||||||
return gallery.InstallModel(modelPath, req.Name, &config, req.Overrides, downloadStatus)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *galleryApplier) updateStatus(s string, op *galleryOpStatus) {
|
|
||||||
g.Lock()
|
|
||||||
defer g.Unlock()
|
|
||||||
g.statuses[s] = op
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *galleryApplier) getStatus(s string) *galleryOpStatus {
|
|
||||||
g.Lock()
|
|
||||||
defer g.Unlock()
|
|
||||||
|
|
||||||
return g.statuses[s]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *galleryApplier) start(c context.Context, cm *ConfigMerger) {
|
|
||||||
go func() {
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-c.Done():
|
|
||||||
return
|
|
||||||
case op := <-g.C:
|
|
||||||
g.updateStatus(op.id, &galleryOpStatus{Message: "processing", Progress: 0})
|
|
||||||
|
|
||||||
// updates the status with an error
|
|
||||||
updateError := func(e error) {
|
|
||||||
g.updateStatus(op.id, &galleryOpStatus{Error: e, Processed: true, Message: "error: " + e.Error()})
|
|
||||||
}
|
|
||||||
|
|
||||||
// displayDownload displays the download progress
|
|
||||||
progressCallback := func(fileName string, current string, total string, percentage float64) {
|
|
||||||
g.updateStatus(op.id, &galleryOpStatus{Message: "processing", Progress: percentage, TotalFileSize: total, DownloadedFileSize: current})
|
|
||||||
displayDownload(fileName, current, total, percentage)
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
|
||||||
// if the request contains a gallery name, we apply the gallery from the gallery list
|
|
||||||
if op.galleryName != "" {
|
|
||||||
err = gallery.InstallModelFromGallery(op.galleries, op.galleryName, g.modelPath, op.req, progressCallback)
|
|
||||||
} else {
|
|
||||||
err = prepareModel(g.modelPath, op.req, cm, progressCallback)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
updateError(err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reload models
|
|
||||||
err = cm.LoadConfigs(g.modelPath)
|
|
||||||
if err != nil {
|
|
||||||
updateError(err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
g.updateStatus(op.id, &galleryOpStatus{Processed: true, Message: "completed", Progress: 100})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
var lastProgress time.Time = time.Now()
|
|
||||||
var startTime time.Time = time.Now()
|
|
||||||
|
|
||||||
func displayDownload(fileName string, current string, total string, percentage float64) {
|
|
||||||
currentTime := time.Now()
|
|
||||||
|
|
||||||
if currentTime.Sub(lastProgress) >= 5*time.Second {
|
|
||||||
|
|
||||||
lastProgress = currentTime
|
|
||||||
|
|
||||||
// calculate ETA based on percentage and elapsed time
|
|
||||||
var eta time.Duration
|
|
||||||
if percentage > 0 {
|
|
||||||
elapsed := currentTime.Sub(startTime)
|
|
||||||
eta = time.Duration(float64(elapsed)*(100/percentage) - float64(elapsed))
|
|
||||||
}
|
|
||||||
|
|
||||||
if total != "" {
|
|
||||||
log.Debug().Msgf("Downloading %s: %s/%s (%.2f%%) ETA: %s", fileName, current, total, percentage, eta)
|
|
||||||
} else {
|
|
||||||
log.Debug().Msgf("Downloading: %s", current)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type galleryModel struct {
|
|
||||||
gallery.GalleryModel
|
|
||||||
ID string `json:"id"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func ApplyGalleryFromFile(modelPath, s string, cm *ConfigMerger, galleries []gallery.Gallery) error {
|
|
||||||
dat, err := os.ReadFile(s)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return ApplyGalleryFromString(modelPath, string(dat), cm, galleries)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ApplyGalleryFromString(modelPath, s string, cm *ConfigMerger, galleries []gallery.Gallery) error {
|
|
||||||
var requests []galleryModel
|
|
||||||
err := json.Unmarshal([]byte(s), &requests)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range requests {
|
|
||||||
if r.ID == "" {
|
|
||||||
err = prepareModel(modelPath, r.GalleryModel, cm, displayDownload)
|
|
||||||
} else {
|
|
||||||
err = gallery.InstallModelFromGallery(galleries, r.ID, modelPath, r.GalleryModel, displayDownload)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func getOpStatus(g *galleryApplier) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
|
|
||||||
status := g.getStatus(c.Params("uuid"))
|
|
||||||
if status == nil {
|
|
||||||
return fmt.Errorf("could not find any status for ID")
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.JSON(status)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type GalleryModel struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
gallery.GalleryModel
|
|
||||||
}
|
|
||||||
|
|
||||||
func applyModelGallery(modelPath string, cm *ConfigMerger, g chan galleryOp, galleries []gallery.Gallery) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
input := new(GalleryModel)
|
|
||||||
// Get input data from the request body
|
|
||||||
if err := c.BodyParser(input); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
uuid, err := uuid.NewUUID()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
g <- galleryOp{
|
|
||||||
req: input.GalleryModel,
|
|
||||||
id: uuid.String(),
|
|
||||||
galleryName: input.ID,
|
|
||||||
galleries: galleries,
|
|
||||||
}
|
|
||||||
return c.JSON(struct {
|
|
||||||
ID string `json:"uuid"`
|
|
||||||
StatusURL string `json:"status"`
|
|
||||||
}{ID: uuid.String(), StatusURL: c.BaseURL() + "/models/jobs/" + uuid.String()})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func listModelFromGallery(galleries []gallery.Gallery, basePath string) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
log.Debug().Msgf("Listing models from galleries: %+v", galleries)
|
|
||||||
|
|
||||||
models, err := gallery.AvailableGalleryModels(galleries, basePath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
log.Debug().Msgf("Models found from galleries: %+v", models)
|
|
||||||
for _, m := range models {
|
|
||||||
log.Debug().Msgf("Model found from galleries: %+v", m)
|
|
||||||
}
|
|
||||||
dat, err := json.Marshal(models)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return c.Send(dat)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
model "github.com/go-skynet/LocalAI/pkg/model"
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/tts"
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/utils"
|
|
||||||
llama "github.com/go-skynet/go-llama.cpp"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TTSRequest struct {
|
|
||||||
Model string `json:"model" yaml:"model"`
|
|
||||||
Input string `json:"input" yaml:"input"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateUniqueFileName(dir, baseName, ext string) string {
|
|
||||||
counter := 1
|
|
||||||
fileName := baseName + ext
|
|
||||||
|
|
||||||
for {
|
|
||||||
filePath := filepath.Join(dir, fileName)
|
|
||||||
_, err := os.Stat(filePath)
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return fileName
|
|
||||||
}
|
|
||||||
|
|
||||||
counter++
|
|
||||||
fileName = fmt.Sprintf("%s_%d%s", baseName, counter, ext)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ttsEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
|
|
||||||
input := new(TTSRequest)
|
|
||||||
// Get input data from the request body
|
|
||||||
if err := c.BodyParser(input); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
piperModel, err := o.loader.BackendLoader(model.PiperBackend, input.Model, []llama.ModelOption{}, uint32(0), o.assetsDestination)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if piperModel == nil {
|
|
||||||
return fmt.Errorf("could not load piper model")
|
|
||||||
}
|
|
||||||
|
|
||||||
w, ok := piperModel.(*tts.Piper)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("loader returned non-piper object %+v", w)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := os.MkdirAll(o.audioDir, 0755); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fileName := generateUniqueFileName(o.audioDir, "piper", ".wav")
|
|
||||||
filePath := filepath.Join(o.audioDir, fileName)
|
|
||||||
|
|
||||||
modelPath := filepath.Join(o.loader.ModelPath, input.Model)
|
|
||||||
|
|
||||||
if err := utils.VerifyPath(modelPath, o.loader.ModelPath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := w.TTS(input.Input, modelPath, filePath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.Download(filePath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
772
api/openai.go
772
api/openai.go
@@ -1,772 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"encoding/base64"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/ggerganov/whisper.cpp/bindings/go/pkg/whisper"
|
|
||||||
model "github.com/go-skynet/LocalAI/pkg/model"
|
|
||||||
whisperutil "github.com/go-skynet/LocalAI/pkg/whisper"
|
|
||||||
llama "github.com/go-skynet/go-llama.cpp"
|
|
||||||
"github.com/gofiber/fiber/v2"
|
|
||||||
"github.com/rs/zerolog/log"
|
|
||||||
"github.com/valyala/fasthttp"
|
|
||||||
)
|
|
||||||
|
|
||||||
// APIError provides error information returned by the OpenAI API.
|
|
||||||
type APIError struct {
|
|
||||||
Code any `json:"code,omitempty"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
Param *string `json:"param,omitempty"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ErrorResponse struct {
|
|
||||||
Error *APIError `json:"error,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type OpenAIUsage struct {
|
|
||||||
PromptTokens int `json:"prompt_tokens"`
|
|
||||||
CompletionTokens int `json:"completion_tokens"`
|
|
||||||
TotalTokens int `json:"total_tokens"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Item struct {
|
|
||||||
Embedding []float32 `json:"embedding"`
|
|
||||||
Index int `json:"index"`
|
|
||||||
Object string `json:"object,omitempty"`
|
|
||||||
|
|
||||||
// Images
|
|
||||||
URL string `json:"url,omitempty"`
|
|
||||||
B64JSON string `json:"b64_json,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type OpenAIResponse struct {
|
|
||||||
Created int `json:"created,omitempty"`
|
|
||||||
Object string `json:"object,omitempty"`
|
|
||||||
ID string `json:"id,omitempty"`
|
|
||||||
Model string `json:"model,omitempty"`
|
|
||||||
Choices []Choice `json:"choices,omitempty"`
|
|
||||||
Data []Item `json:"data,omitempty"`
|
|
||||||
|
|
||||||
Usage OpenAIUsage `json:"usage"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Choice struct {
|
|
||||||
Index int `json:"index,omitempty"`
|
|
||||||
FinishReason string `json:"finish_reason,omitempty"`
|
|
||||||
Message *Message `json:"message,omitempty"`
|
|
||||||
Delta *Message `json:"delta,omitempty"`
|
|
||||||
Text string `json:"text,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Message struct {
|
|
||||||
Role string `json:"role,omitempty" yaml:"role"`
|
|
||||||
Content string `json:"content,omitempty" yaml:"content"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type OpenAIModel struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Object string `json:"object"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type OpenAIRequest struct {
|
|
||||||
Model string `json:"model" yaml:"model"`
|
|
||||||
|
|
||||||
// whisper
|
|
||||||
File string `json:"file" validate:"required"`
|
|
||||||
Language string `json:"language"`
|
|
||||||
//whisper/image
|
|
||||||
ResponseFormat string `json:"response_format"`
|
|
||||||
// image
|
|
||||||
Size string `json:"size"`
|
|
||||||
// Prompt is read only by completion/image API calls
|
|
||||||
Prompt interface{} `json:"prompt" yaml:"prompt"`
|
|
||||||
|
|
||||||
// Edit endpoint
|
|
||||||
Instruction string `json:"instruction" yaml:"instruction"`
|
|
||||||
Input interface{} `json:"input" yaml:"input"`
|
|
||||||
|
|
||||||
Stop interface{} `json:"stop" yaml:"stop"`
|
|
||||||
|
|
||||||
// Messages is read only by chat/completion API calls
|
|
||||||
Messages []Message `json:"messages" yaml:"messages"`
|
|
||||||
|
|
||||||
Stream bool `json:"stream"`
|
|
||||||
Echo bool `json:"echo"`
|
|
||||||
// Common options between all the API calls
|
|
||||||
TopP float64 `json:"top_p" yaml:"top_p"`
|
|
||||||
TopK int `json:"top_k" yaml:"top_k"`
|
|
||||||
Temperature float64 `json:"temperature" yaml:"temperature"`
|
|
||||||
Maxtokens int `json:"max_tokens" yaml:"max_tokens"`
|
|
||||||
|
|
||||||
N int `json:"n"`
|
|
||||||
|
|
||||||
// Custom parameters - not present in the OpenAI API
|
|
||||||
Batch int `json:"batch" yaml:"batch"`
|
|
||||||
F16 bool `json:"f16" yaml:"f16"`
|
|
||||||
IgnoreEOS bool `json:"ignore_eos" yaml:"ignore_eos"`
|
|
||||||
RepeatPenalty float64 `json:"repeat_penalty" yaml:"repeat_penalty"`
|
|
||||||
Keep int `json:"n_keep" yaml:"n_keep"`
|
|
||||||
|
|
||||||
MirostatETA float64 `json:"mirostat_eta" yaml:"mirostat_eta"`
|
|
||||||
MirostatTAU float64 `json:"mirostat_tau" yaml:"mirostat_tau"`
|
|
||||||
Mirostat int `json:"mirostat" yaml:"mirostat"`
|
|
||||||
|
|
||||||
FrequencyPenalty float64 `json:"frequency_penalty" yaml:"frequency_penalty"`
|
|
||||||
TFZ float64 `json:"tfz" yaml:"tfz"`
|
|
||||||
|
|
||||||
Seed int `json:"seed" yaml:"seed"`
|
|
||||||
|
|
||||||
// Image (not supported by OpenAI)
|
|
||||||
Mode int `json:"mode"`
|
|
||||||
Step int `json:"step"`
|
|
||||||
|
|
||||||
TypicalP float64 `json:"typical_p" yaml:"typical_p"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func defaultRequest(modelFile string) OpenAIRequest {
|
|
||||||
return OpenAIRequest{
|
|
||||||
TopP: 0.7,
|
|
||||||
TopK: 80,
|
|
||||||
Maxtokens: 512,
|
|
||||||
Temperature: 0.9,
|
|
||||||
Model: modelFile,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://platform.openai.com/docs/api-reference/completions
|
|
||||||
func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
process := func(s string, req *OpenAIRequest, config *Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
|
|
||||||
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
|
|
||||||
resp := OpenAIResponse{
|
|
||||||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: []Choice{
|
|
||||||
{
|
|
||||||
Index: 0,
|
|
||||||
Text: s,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Object: "text_completion",
|
|
||||||
}
|
|
||||||
log.Debug().Msgf("Sending goroutine: %s", s)
|
|
||||||
|
|
||||||
responses <- resp
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
close(responses)
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
model, input, err := readInput(c, o.loader, true)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("`input`: %+v", input)
|
|
||||||
|
|
||||||
config, input, err := readConfig(model, input, cm, o.loader, o.debug, o.threads, o.ctxSize, o.f16)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Parameter Config: %+v", config)
|
|
||||||
|
|
||||||
if input.Stream {
|
|
||||||
log.Debug().Msgf("Stream request received")
|
|
||||||
c.Context().SetContentType("text/event-stream")
|
|
||||||
//c.Response().Header.SetContentType(fiber.MIMETextHTMLCharsetUTF8)
|
|
||||||
//c.Set("Content-Type", "text/event-stream")
|
|
||||||
c.Set("Cache-Control", "no-cache")
|
|
||||||
c.Set("Connection", "keep-alive")
|
|
||||||
c.Set("Transfer-Encoding", "chunked")
|
|
||||||
}
|
|
||||||
|
|
||||||
templateFile := config.Model
|
|
||||||
|
|
||||||
if config.TemplateConfig.Completion != "" {
|
|
||||||
templateFile = config.TemplateConfig.Completion
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Stream {
|
|
||||||
if len(config.PromptStrings) > 1 {
|
|
||||||
return errors.New("cannot handle more than 1 `PromptStrings` when `Stream`ing")
|
|
||||||
}
|
|
||||||
|
|
||||||
predInput := config.PromptStrings[0]
|
|
||||||
|
|
||||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
|
||||||
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
|
|
||||||
Input string
|
|
||||||
}{Input: predInput})
|
|
||||||
if err == nil {
|
|
||||||
predInput = templatedInput
|
|
||||||
log.Debug().Msgf("Template found, input modified to: %s", predInput)
|
|
||||||
}
|
|
||||||
|
|
||||||
responses := make(chan OpenAIResponse)
|
|
||||||
|
|
||||||
go process(predInput, input, config, o.loader, responses)
|
|
||||||
|
|
||||||
c.Context().SetBodyStreamWriter(fasthttp.StreamWriter(func(w *bufio.Writer) {
|
|
||||||
|
|
||||||
for ev := range responses {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
enc := json.NewEncoder(&buf)
|
|
||||||
enc.Encode(ev)
|
|
||||||
|
|
||||||
log.Debug().Msgf("Sending chunk: %s", buf.String())
|
|
||||||
fmt.Fprintf(w, "data: %v\n", buf.String())
|
|
||||||
w.Flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: []Choice{
|
|
||||||
{
|
|
||||||
Index: 0,
|
|
||||||
FinishReason: "stop",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Object: "text_completion",
|
|
||||||
}
|
|
||||||
respData, _ := json.Marshal(resp)
|
|
||||||
|
|
||||||
w.WriteString(fmt.Sprintf("data: %s\n\n", respData))
|
|
||||||
w.WriteString("data: [DONE]\n\n")
|
|
||||||
w.Flush()
|
|
||||||
}))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []Choice
|
|
||||||
for _, i := range config.PromptStrings {
|
|
||||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
|
||||||
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
|
|
||||||
Input string
|
|
||||||
}{Input: i})
|
|
||||||
if err == nil {
|
|
||||||
i = templatedInput
|
|
||||||
log.Debug().Msgf("Template found, input modified to: %s", i)
|
|
||||||
}
|
|
||||||
|
|
||||||
r, err := ComputeChoices(i, input, config, o, o.loader, func(s string, c *[]Choice) {
|
|
||||||
*c = append(*c, Choice{Text: s})
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
result = append(result, r...)
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: result,
|
|
||||||
Object: "text_completion",
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonResult, _ := json.Marshal(resp)
|
|
||||||
log.Debug().Msgf("Response: %s", jsonResult)
|
|
||||||
|
|
||||||
// Return the prediction in the response body
|
|
||||||
return c.JSON(resp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://platform.openai.com/docs/api-reference/embeddings
|
|
||||||
func embeddingsEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
model, input, err := readInput(c, o.loader, true)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
config, input, err := readConfig(model, input, cm, o.loader, o.debug, o.threads, o.ctxSize, o.f16)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Parameter Config: %+v", config)
|
|
||||||
items := []Item{}
|
|
||||||
|
|
||||||
for i, s := range config.InputToken {
|
|
||||||
// get the model function to call for the result
|
|
||||||
embedFn, err := ModelEmbedding("", s, o.loader, *config, o)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
embeddings, err := embedFn()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
items = append(items, Item{Embedding: embeddings, Index: i, Object: "embedding"})
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, s := range config.InputStrings {
|
|
||||||
// get the model function to call for the result
|
|
||||||
embedFn, err := ModelEmbedding(s, []int{}, o.loader, *config, o)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
embeddings, err := embedFn()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
items = append(items, Item{Embedding: embeddings, Index: i, Object: "embedding"})
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Data: items,
|
|
||||||
Object: "list",
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonResult, _ := json.Marshal(resp)
|
|
||||||
log.Debug().Msgf("Response: %s", jsonResult)
|
|
||||||
|
|
||||||
// Return the prediction in the response body
|
|
||||||
return c.JSON(resp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
|
|
||||||
process := func(s string, req *OpenAIRequest, config *Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
|
|
||||||
initialMessage := OpenAIResponse{
|
|
||||||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: []Choice{{Delta: &Message{Role: "assistant"}}},
|
|
||||||
Object: "chat.completion.chunk",
|
|
||||||
}
|
|
||||||
responses <- initialMessage
|
|
||||||
|
|
||||||
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
|
|
||||||
resp := OpenAIResponse{
|
|
||||||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: []Choice{{Delta: &Message{Content: s}, Index: 0}},
|
|
||||||
Object: "chat.completion.chunk",
|
|
||||||
}
|
|
||||||
log.Debug().Msgf("Sending goroutine: %s", s)
|
|
||||||
|
|
||||||
responses <- resp
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
close(responses)
|
|
||||||
}
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
model, input, err := readInput(c, o.loader, true)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
config, input, err := readConfig(model, input, cm, o.loader, o.debug, o.threads, o.ctxSize, o.f16)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Parameter Config: %+v", config)
|
|
||||||
|
|
||||||
var predInput string
|
|
||||||
|
|
||||||
mess := []string{}
|
|
||||||
for _, i := range input.Messages {
|
|
||||||
var content string
|
|
||||||
r := config.Roles[i.Role]
|
|
||||||
if r != "" {
|
|
||||||
content = fmt.Sprint(r, " ", i.Content)
|
|
||||||
} else {
|
|
||||||
content = i.Content
|
|
||||||
}
|
|
||||||
|
|
||||||
mess = append(mess, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
predInput = strings.Join(mess, "\n")
|
|
||||||
|
|
||||||
if input.Stream {
|
|
||||||
log.Debug().Msgf("Stream request received")
|
|
||||||
c.Context().SetContentType("text/event-stream")
|
|
||||||
//c.Response().Header.SetContentType(fiber.MIMETextHTMLCharsetUTF8)
|
|
||||||
// c.Set("Content-Type", "text/event-stream")
|
|
||||||
c.Set("Cache-Control", "no-cache")
|
|
||||||
c.Set("Connection", "keep-alive")
|
|
||||||
c.Set("Transfer-Encoding", "chunked")
|
|
||||||
}
|
|
||||||
|
|
||||||
templateFile := config.Model
|
|
||||||
|
|
||||||
if config.TemplateConfig.Chat != "" {
|
|
||||||
templateFile = config.TemplateConfig.Chat
|
|
||||||
}
|
|
||||||
|
|
||||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
|
||||||
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
|
|
||||||
Input string
|
|
||||||
}{Input: predInput})
|
|
||||||
if err == nil {
|
|
||||||
predInput = templatedInput
|
|
||||||
log.Debug().Msgf("Template found, input modified to: %s", predInput)
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Stream {
|
|
||||||
responses := make(chan OpenAIResponse)
|
|
||||||
|
|
||||||
go process(predInput, input, config, o.loader, responses)
|
|
||||||
|
|
||||||
c.Context().SetBodyStreamWriter(fasthttp.StreamWriter(func(w *bufio.Writer) {
|
|
||||||
|
|
||||||
for ev := range responses {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
enc := json.NewEncoder(&buf)
|
|
||||||
enc.Encode(ev)
|
|
||||||
|
|
||||||
log.Debug().Msgf("Sending chunk: %s", buf.String())
|
|
||||||
fmt.Fprintf(w, "data: %v\n", buf.String())
|
|
||||||
w.Flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: []Choice{
|
|
||||||
{
|
|
||||||
FinishReason: "stop",
|
|
||||||
Index: 0,
|
|
||||||
Delta: &Message{},
|
|
||||||
}},
|
|
||||||
Object: "chat.completion.chunk",
|
|
||||||
}
|
|
||||||
respData, _ := json.Marshal(resp)
|
|
||||||
|
|
||||||
w.WriteString(fmt.Sprintf("data: %s\n\n", respData))
|
|
||||||
w.WriteString("data: [DONE]\n\n")
|
|
||||||
w.Flush()
|
|
||||||
}))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := ComputeChoices(predInput, input, config, o, o.loader, func(s string, c *[]Choice) {
|
|
||||||
*c = append(*c, Choice{Message: &Message{Role: "assistant", Content: s}})
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: result,
|
|
||||||
Object: "chat.completion",
|
|
||||||
}
|
|
||||||
respData, _ := json.Marshal(resp)
|
|
||||||
log.Debug().Msgf("Response: %s", respData)
|
|
||||||
|
|
||||||
// Return the prediction in the response body
|
|
||||||
return c.JSON(resp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func editEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
model, input, err := readInput(c, o.loader, true)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
config, input, err := readConfig(model, input, cm, o.loader, o.debug, o.threads, o.ctxSize, o.f16)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Parameter Config: %+v", config)
|
|
||||||
|
|
||||||
templateFile := config.Model
|
|
||||||
|
|
||||||
if config.TemplateConfig.Edit != "" {
|
|
||||||
templateFile = config.TemplateConfig.Edit
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []Choice
|
|
||||||
for _, i := range config.InputStrings {
|
|
||||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
|
||||||
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
|
|
||||||
Input string
|
|
||||||
Instruction string
|
|
||||||
}{Input: i})
|
|
||||||
if err == nil {
|
|
||||||
i = templatedInput
|
|
||||||
log.Debug().Msgf("Template found, input modified to: %s", i)
|
|
||||||
}
|
|
||||||
|
|
||||||
r, err := ComputeChoices(i, input, config, o, o.loader, func(s string, c *[]Choice) {
|
|
||||||
*c = append(*c, Choice{Text: s})
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
result = append(result, r...)
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
|
||||||
Choices: result,
|
|
||||||
Object: "edit",
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonResult, _ := json.Marshal(resp)
|
|
||||||
log.Debug().Msgf("Response: %s", jsonResult)
|
|
||||||
|
|
||||||
// Return the prediction in the response body
|
|
||||||
return c.JSON(resp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://platform.openai.com/docs/api-reference/images/create
|
|
||||||
|
|
||||||
/*
|
|
||||||
*
|
|
||||||
|
|
||||||
curl http://localhost:8080/v1/images/generations \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{
|
|
||||||
"prompt": "A cute baby sea otter",
|
|
||||||
"n": 1,
|
|
||||||
"size": "512x512"
|
|
||||||
}'
|
|
||||||
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
func imageEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
m, input, err := readInput(c, o.loader, false)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if m == "" {
|
|
||||||
m = model.StableDiffusionBackend
|
|
||||||
}
|
|
||||||
log.Debug().Msgf("Loading model: %+v", m)
|
|
||||||
|
|
||||||
config, input, err := readConfig(m, input, cm, o.loader, o.debug, 0, 0, false)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Parameter Config: %+v", config)
|
|
||||||
|
|
||||||
// XXX: Only stablediffusion is supported for now
|
|
||||||
if config.Backend == "" {
|
|
||||||
config.Backend = model.StableDiffusionBackend
|
|
||||||
}
|
|
||||||
|
|
||||||
sizeParts := strings.Split(input.Size, "x")
|
|
||||||
if len(sizeParts) != 2 {
|
|
||||||
return fmt.Errorf("Invalid value for 'size'")
|
|
||||||
}
|
|
||||||
width, err := strconv.Atoi(sizeParts[0])
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Invalid value for 'size'")
|
|
||||||
}
|
|
||||||
height, err := strconv.Atoi(sizeParts[1])
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Invalid value for 'size'")
|
|
||||||
}
|
|
||||||
|
|
||||||
b64JSON := false
|
|
||||||
if input.ResponseFormat == "b64_json" {
|
|
||||||
b64JSON = true
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []Item
|
|
||||||
for _, i := range config.PromptStrings {
|
|
||||||
n := input.N
|
|
||||||
if input.N == 0 {
|
|
||||||
n = 1
|
|
||||||
}
|
|
||||||
for j := 0; j < n; j++ {
|
|
||||||
prompts := strings.Split(i, "|")
|
|
||||||
positive_prompt := prompts[0]
|
|
||||||
negative_prompt := ""
|
|
||||||
if len(prompts) > 1 {
|
|
||||||
negative_prompt = prompts[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
mode := 0
|
|
||||||
step := 15
|
|
||||||
|
|
||||||
if input.Mode != 0 {
|
|
||||||
mode = input.Mode
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Step != 0 {
|
|
||||||
step = input.Step
|
|
||||||
}
|
|
||||||
|
|
||||||
tempDir := ""
|
|
||||||
if !b64JSON {
|
|
||||||
tempDir = o.imageDir
|
|
||||||
}
|
|
||||||
// Create a temporary file
|
|
||||||
outputFile, err := ioutil.TempFile(tempDir, "b64")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
outputFile.Close()
|
|
||||||
output := outputFile.Name() + ".png"
|
|
||||||
// Rename the temporary file
|
|
||||||
err = os.Rename(outputFile.Name(), output)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
baseURL := c.BaseURL()
|
|
||||||
|
|
||||||
fn, err := ImageGeneration(height, width, mode, step, input.Seed, positive_prompt, negative_prompt, output, o.loader, *config, o)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := fn(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
item := &Item{}
|
|
||||||
|
|
||||||
if b64JSON {
|
|
||||||
defer os.RemoveAll(output)
|
|
||||||
data, err := os.ReadFile(output)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
item.B64JSON = base64.StdEncoding.EncodeToString(data)
|
|
||||||
} else {
|
|
||||||
base := filepath.Base(output)
|
|
||||||
item.URL = baseURL + "/generated-images/" + base
|
|
||||||
}
|
|
||||||
|
|
||||||
result = append(result, *item)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
|
||||||
Data: result,
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonResult, _ := json.Marshal(resp)
|
|
||||||
log.Debug().Msgf("Response: %s", jsonResult)
|
|
||||||
|
|
||||||
// Return the prediction in the response body
|
|
||||||
return c.JSON(resp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://platform.openai.com/docs/api-reference/audio/create
|
|
||||||
func transcriptEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
m, input, err := readInput(c, o.loader, false)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
config, input, err := readConfig(m, input, cm, o.loader, o.debug, o.threads, o.ctxSize, o.f16)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed reading parameters from request:%w", err)
|
|
||||||
}
|
|
||||||
// retrieve the file data from the request
|
|
||||||
file, err := c.FormFile("file")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
f, err := file.Open()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
dir, err := os.MkdirTemp("", "whisper")
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
dst := filepath.Join(dir, path.Base(file.Filename))
|
|
||||||
dstFile, err := os.Create(dst)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := io.Copy(dstFile, f); err != nil {
|
|
||||||
log.Debug().Msgf("Audio file copying error %+v - %+v - err %+v", file.Filename, dst, err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Audio file copied to: %+v", dst)
|
|
||||||
|
|
||||||
whisperModel, err := o.loader.BackendLoader(model.WhisperBackend, config.Model, []llama.ModelOption{}, uint32(config.Threads), o.assetsDestination)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if whisperModel == nil {
|
|
||||||
return fmt.Errorf("could not load whisper model")
|
|
||||||
}
|
|
||||||
|
|
||||||
w, ok := whisperModel.(whisper.Model)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("loader returned non-whisper object")
|
|
||||||
}
|
|
||||||
|
|
||||||
tr, err := whisperutil.Transcript(w, dst, input.Language, uint(config.Threads))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Debug().Msgf("Trascribed: %+v", tr)
|
|
||||||
// TODO: handle different outputs here
|
|
||||||
return c.Status(http.StatusOK).JSON(fiber.Map{"text": tr})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func listModels(loader *model.ModelLoader, cm *ConfigMerger) func(ctx *fiber.Ctx) error {
|
|
||||||
return func(c *fiber.Ctx) error {
|
|
||||||
models, err := loader.ListModels()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var mm map[string]interface{} = map[string]interface{}{}
|
|
||||||
|
|
||||||
dataModels := []OpenAIModel{}
|
|
||||||
for _, m := range models {
|
|
||||||
mm[m] = nil
|
|
||||||
dataModels = append(dataModels, OpenAIModel{ID: m, Object: "model"})
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, k := range cm.ListConfigs() {
|
|
||||||
if _, exists := mm[k]; !exists {
|
|
||||||
dataModels = append(dataModels, OpenAIModel{ID: k, Object: "model"})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.JSON(struct {
|
|
||||||
Object string `json:"object"`
|
|
||||||
Data []OpenAIModel `json:"data"`
|
|
||||||
}{
|
|
||||||
Object: "list",
|
|
||||||
Data: dataModels,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
153
api/options.go
153
api/options.go
@@ -1,153 +0,0 @@
|
|||||||
package api
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"embed"
|
|
||||||
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/gallery"
|
|
||||||
model "github.com/go-skynet/LocalAI/pkg/model"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Option struct {
|
|
||||||
context context.Context
|
|
||||||
configFile string
|
|
||||||
loader *model.ModelLoader
|
|
||||||
uploadLimitMB, threads, ctxSize int
|
|
||||||
f16 bool
|
|
||||||
debug, disableMessage bool
|
|
||||||
imageDir string
|
|
||||||
audioDir string
|
|
||||||
cors bool
|
|
||||||
preloadJSONModels string
|
|
||||||
preloadModelsFromPath string
|
|
||||||
corsAllowOrigins string
|
|
||||||
|
|
||||||
galleries []gallery.Gallery
|
|
||||||
|
|
||||||
backendAssets embed.FS
|
|
||||||
assetsDestination string
|
|
||||||
}
|
|
||||||
|
|
||||||
type AppOption func(*Option)
|
|
||||||
|
|
||||||
func newOptions(o ...AppOption) *Option {
|
|
||||||
opt := &Option{
|
|
||||||
context: context.Background(),
|
|
||||||
uploadLimitMB: 15,
|
|
||||||
threads: 1,
|
|
||||||
ctxSize: 512,
|
|
||||||
debug: true,
|
|
||||||
disableMessage: true,
|
|
||||||
}
|
|
||||||
for _, oo := range o {
|
|
||||||
oo(opt)
|
|
||||||
}
|
|
||||||
return opt
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithCors(b bool) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.cors = b
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithCorsAllowOrigins(b string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.corsAllowOrigins = b
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithBackendAssetsOutput(out string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.assetsDestination = out
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithBackendAssets(f embed.FS) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.backendAssets = f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithGalleries(galleries []gallery.Gallery) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.galleries = append(o.galleries, galleries...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithContext(ctx context.Context) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.context = ctx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithYAMLConfigPreload(configFile string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.preloadModelsFromPath = configFile
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithJSONStringPreload(configFile string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.preloadJSONModels = configFile
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func WithConfigFile(configFile string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.configFile = configFile
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithModelLoader(loader *model.ModelLoader) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.loader = loader
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithUploadLimitMB(limit int) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.uploadLimitMB = limit
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithThreads(threads int) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.threads = threads
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithContextSize(ctxSize int) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.ctxSize = ctxSize
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithF16(f16 bool) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.f16 = f16
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithDebug(debug bool) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.debug = debug
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithDisableMessage(disableMessage bool) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.disableMessage = disableMessage
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithAudioDir(audioDir string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.audioDir = audioDir
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithImageDir(imageDir string) AppOption {
|
|
||||||
return func(o *Option) {
|
|
||||||
o.imageDir = imageDir
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user