Compare commits
1173 Commits
master
...
fix/electr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a37e3aa39b | ||
|
|
b313ce9bcb | ||
|
|
1c0a3de4c0 | ||
|
|
34d92b3ee4 | ||
|
|
5c3d1eb541 | ||
|
|
6e8d41e042 | ||
|
|
2dba3b5e57 | ||
|
|
5e40c7bc2d | ||
|
|
d8c716f99a | ||
|
|
87524eb153 | ||
|
|
f6870ff733 | ||
|
|
bf701498de | ||
|
|
66f4369813 | ||
|
|
7c22088807 | ||
|
|
50d5199caa | ||
|
|
dc514cf5ef | ||
|
|
1c8392561d | ||
|
|
f10ad8ed69 | ||
|
|
26cdebff5a | ||
|
|
84e530b54a | ||
|
|
4e552654b9 | ||
|
|
63acfefe30 | ||
|
|
d22657fac0 | ||
|
|
e160922872 | ||
|
|
709411d533 | ||
|
|
35c21c4fdf | ||
|
|
05f52fdab5 | ||
|
|
0ce9434a09 | ||
|
|
83767cbacc | ||
|
|
070a7ffeac | ||
|
|
a4e04d0f93 | ||
|
|
848f36c371 | ||
|
|
edf43d681f | ||
|
|
877f94990e | ||
|
|
7e8dcd3e5a | ||
|
|
41b3db39b7 | ||
|
|
9fd73601c8 | ||
|
|
2e2045b53d | ||
|
|
69f6c846bf | ||
|
|
1f2a44bee2 | ||
|
|
25e0465d12 | ||
|
|
7ed4c32359 | ||
|
|
56ff9711ee | ||
|
|
2ee9efb82e | ||
|
|
f9b9e95b15 | ||
|
|
35811c837e | ||
|
|
fe96edf53f | ||
|
|
5ac14e28b2 | ||
|
|
7b7be365c0 | ||
|
|
7c48d364ab | ||
|
|
82b715304f | ||
|
|
d595e2f692 | ||
|
|
29fc58f63a | ||
|
|
312fe8121a | ||
|
|
5232e3e1d9 | ||
|
|
ea9cda1083 | ||
|
|
b31491bf38 | ||
|
|
3a8e7d6edf | ||
|
|
c09b1c0aaa | ||
|
|
2f19d5ec49 | ||
|
|
8dc4f3722b | ||
|
|
9c2f8efa70 | ||
|
|
b3887d7ddf | ||
|
|
4b45802bf7 | ||
|
|
26cda3dd6b | ||
|
|
860a646c70 | ||
|
|
02cf1a558a | ||
|
|
289b406d0d | ||
|
|
f2e23b35fc | ||
|
|
c697497e34 | ||
|
|
8ac7879d43 | ||
|
|
535f23b5c3 | ||
|
|
8a5e17d6f3 | ||
|
|
d286ecf820 | ||
|
|
e1fd9296ee | ||
|
|
4dbfd171f6 | ||
|
|
5d38843f38 | ||
|
|
cc53ba6685 | ||
|
|
1a371c9ca9 | ||
|
|
ea629a31f2 | ||
|
|
f6fdf0545b | ||
|
|
6bdcb4417c | ||
|
|
4df1487e28 | ||
|
|
37d2244070 | ||
|
|
e7c388b695 | ||
|
|
135e4ed730 | ||
|
|
978cbdc7a1 | ||
|
|
403fd2dc2b | ||
|
|
0947a928a5 | ||
|
|
7ba6e1ff83 | ||
|
|
5471be7c14 | ||
|
|
9eb0b2c2f3 | ||
|
|
5b59aee016 | ||
|
|
1162df77f2 | ||
|
|
3702d224e9 | ||
|
|
429ea06d69 | ||
|
|
0140d6920c | ||
|
|
f4997cef91 | ||
|
|
09d7684cfa | ||
|
|
696a0c5432 | ||
|
|
3141e9a585 | ||
|
|
a2d842fae2 | ||
|
|
8579ae5c07 | ||
|
|
f3999ab476 | ||
|
|
a7a3c99e80 | ||
|
|
f2ba33fce8 | ||
|
|
eefeb347b3 | ||
|
|
1c0e5d957a | ||
|
|
1bdf4abd1c | ||
|
|
e2ca22052f | ||
|
|
d689dd7e8f | ||
|
|
62fdddc690 | ||
|
|
d214ebfa1a | ||
|
|
cc13e88c8b | ||
|
|
21616689f8 | ||
|
|
ad6a10a903 | ||
|
|
426fee3568 | ||
|
|
c89fc6efa9 | ||
|
|
46d087eb8f | ||
|
|
33eca48c6f | ||
|
|
a6f25d8796 | ||
|
|
2749c77625 | ||
|
|
da2d0aee08 | ||
|
|
b0dab4ee1b | ||
|
|
426b3b01c6 | ||
|
|
4f66ecbb1d | ||
|
|
93963566d6 | ||
|
|
ede33ff9ed | ||
|
|
07ba229a46 | ||
|
|
02f6a5cb98 | ||
|
|
e8e7e787f4 | ||
|
|
39e7c4fa79 | ||
|
|
4059640436 | ||
|
|
32c6cdb5c0 | ||
|
|
8d41442fa1 | ||
|
|
49efa16418 | ||
|
|
e77d9cf8fb | ||
|
|
9e8672c435 | ||
|
|
0dcad871ab | ||
|
|
03875c6aa2 | ||
|
|
a1306384b9 | ||
|
|
312f30f25f | ||
|
|
dcfb23d711 | ||
|
|
bb8979d7a1 | ||
|
|
b89a3cd6d3 | ||
|
|
7c7ff66751 | ||
|
|
d56ad644af | ||
|
|
8c1366dc00 | ||
|
|
a300878f39 | ||
|
|
776e15e381 | ||
|
|
8f4a400b60 | ||
|
|
1e4fc3c287 | ||
|
|
0a0433bae6 | ||
|
|
0d96fcd352 | ||
|
|
6c7679c464 | ||
|
|
316e38546c | ||
|
|
d4c24f6a83 | ||
|
|
0aabe5112a | ||
|
|
fbe7a7ed35 | ||
|
|
10b7838bfa | ||
|
|
a8958ca728 | ||
|
|
b22dc4875e | ||
|
|
5a84b3a728 | ||
|
|
ffb05e7392 | ||
|
|
0453519e1c | ||
|
|
c1a230515e | ||
|
|
a62a55867d | ||
|
|
deaa6f670f | ||
|
|
b9ae04667d | ||
|
|
d80b535f5b | ||
|
|
ff8017a1f6 | ||
|
|
631ec4baf4 | ||
|
|
31ca8c2fed | ||
|
|
e946854758 | ||
|
|
1707f974e6 | ||
|
|
0eab71eb73 | ||
|
|
b21a1a91ac | ||
|
|
f4df039621 | ||
|
|
31426d66db | ||
|
|
ca2eb0d466 | ||
|
|
c37ef88d5e | ||
|
|
bdb873e743 | ||
|
|
27341a067b | ||
|
|
7382fda5e4 | ||
|
|
77bf7ff75a | ||
|
|
5cac4f873f | ||
|
|
1697cc276d | ||
|
|
fd5187b754 | ||
|
|
21b91dfea4 | ||
|
|
4443406311 | ||
|
|
04653366b2 | ||
|
|
1e6d4f17f5 | ||
|
|
53c541547d | ||
|
|
61398eb900 | ||
|
|
f18fac5b26 | ||
|
|
9a0a1155ba | ||
|
|
b316a351cf | ||
|
|
91d8abf723 | ||
|
|
4d75e84503 | ||
|
|
b171955335 | ||
|
|
a56479f15b | ||
|
|
b4087d547b | ||
|
|
ca99948993 | ||
|
|
d59b2cb13e | ||
|
|
0633ed6ce3 | ||
|
|
c4b79e98a1 | ||
|
|
ad2010ef5d | ||
|
|
4635ece80b | ||
|
|
0b5665ad9b | ||
|
|
49384b1678 | ||
|
|
b17e788211 | ||
|
|
d38a22bc3f | ||
|
|
7000b45e53 | ||
|
|
9b64c44f78 | ||
|
|
374990b351 | ||
|
|
af53996604 | ||
|
|
a746b9a3ea | ||
|
|
dd946c21c8 | ||
|
|
b47efad471 | ||
|
|
d941dcf1bf | ||
|
|
91a50d2478 | ||
|
|
e95f731d05 | ||
|
|
665f7dfb5a | ||
|
|
6a9d5a1f1d | ||
|
|
8d238c1332 | ||
|
|
182d7742d1 | ||
|
|
e99ea1a6a3 | ||
|
|
dcba116a99 | ||
|
|
5ad5a86c4f | ||
|
|
8179a5ae87 | ||
|
|
84d6f8c472 | ||
|
|
a8e1187ff3 | ||
|
|
ea7ad86a67 | ||
|
|
99f6b6664f | ||
|
|
e33af55436 | ||
|
|
8d6a425e51 | ||
|
|
acbe9d1e7c | ||
|
|
0dfdb578c1 | ||
|
|
5a42c17548 | ||
|
|
9abe9119c5 | ||
|
|
d4e13f775a | ||
|
|
4b16ac9219 | ||
|
|
7e68b84dd3 | ||
|
|
b8e5707d18 | ||
|
|
b62aee65fb | ||
|
|
e16bc37017 | ||
|
|
b1c501fa59 | ||
|
|
64a2a271c7 | ||
|
|
081d5b99cb | ||
|
|
704e481b79 | ||
|
|
036d69daa2 | ||
|
|
ab3108c248 | ||
|
|
36b047179d | ||
|
|
0cc3144db5 | ||
|
|
b0f2832b14 | ||
|
|
362a81a3e5 | ||
|
|
8441851dbc | ||
|
|
f7a3c4b3cb | ||
|
|
676eb0b201 | ||
|
|
b9ddd4a867 | ||
|
|
93010bf75b | ||
|
|
c2380f8457 | ||
|
|
dbd04574db | ||
|
|
86747c6e74 | ||
|
|
44d168e28e | ||
|
|
c695f1812e | ||
|
|
8c235ee7af | ||
|
|
f01a864481 | ||
|
|
13feef9cee | ||
|
|
ee780af047 | ||
|
|
61ee2a4664 | ||
|
|
aa85ee47cf | ||
|
|
e85cbd6f79 | ||
|
|
6a21ae6026 | ||
|
|
aa0e58aa3c | ||
|
|
f547e4d966 | ||
|
|
64ece84706 | ||
|
|
3082cee0ae | ||
|
|
3726d82f08 | ||
|
|
3f6e192b14 | ||
|
|
13790ac2b9 | ||
|
|
b145093488 | ||
|
|
21eafc5980 | ||
|
|
1a52cc078c | ||
|
|
2cc7589d1a | ||
|
|
d8f6820a4c | ||
|
|
f7a2fa76e5 | ||
|
|
75078df36e | ||
|
|
7db3a57d04 | ||
|
|
9fb0e30dac | ||
|
|
3fb11acade | ||
|
|
2052c720cc | ||
|
|
888bd4101d | ||
|
|
464f8e664b | ||
|
|
1821065159 | ||
|
|
73504154da | ||
|
|
f3205da359 | ||
|
|
cc9ff1820b | ||
|
|
b01462d7a9 | ||
|
|
87ae1c8ca6 | ||
|
|
0787a9cebe | ||
|
|
617135fcda | ||
|
|
ddf0df569e | ||
|
|
fd3944eaaa | ||
|
|
0d988e21c6 | ||
|
|
12870dfe1f | ||
|
|
51627f7f67 | ||
|
|
a0bba8ee1b | ||
|
|
5dede160a2 | ||
|
|
09d32dcd79 | ||
|
|
f7b6295e9b | ||
|
|
c479ea7b10 | ||
|
|
dd0e504db2 | ||
|
|
0226dac67f | ||
|
|
ba42935923 | ||
|
|
890b5b86a9 | ||
|
|
32172f56da | ||
|
|
058ce1d1d7 | ||
|
|
3747f190ad | ||
|
|
05b14f56f6 | ||
|
|
06f3470701 | ||
|
|
6c5c3927fb | ||
|
|
886d48ccbe | ||
|
|
3f81157156 | ||
|
|
550a856670 | ||
|
|
05407c3cb4 | ||
|
|
6a5b05fc75 | ||
|
|
561c4765e1 | ||
|
|
d024877ba8 | ||
|
|
fa2f902259 | ||
|
|
c5d84ebc59 | ||
|
|
deb13569df | ||
|
|
d800b1caf5 | ||
|
|
fe3556da58 | ||
|
|
237845f490 | ||
|
|
0e9bd0589b | ||
|
|
9bc0f24718 | ||
|
|
34adc04c7d | ||
|
|
9294d38eba | ||
|
|
c3507e8427 | ||
|
|
c1a400a859 | ||
|
|
12578d78ba | ||
|
|
4f222b953f | ||
|
|
c66b05194d | ||
|
|
404c43bbe3 | ||
|
|
b0028a05fb | ||
|
|
ccdbaf61ec | ||
|
|
dac7e06531 | ||
|
|
86c60909d0 | ||
|
|
ea938c10a3 | ||
|
|
528aed53e0 | ||
|
|
3e7eeb3447 | ||
|
|
b0b747e9bc | ||
|
|
b07e2f52a2 | ||
|
|
0ffd395745 | ||
|
|
c691820fa8 | ||
|
|
49b447982f | ||
|
|
5c0d66f967 | ||
|
|
ceb3aae654 | ||
|
|
ad58bdf99e | ||
|
|
4f87e96b01 | ||
|
|
f8fd241869 | ||
|
|
f7167ea485 | ||
|
|
5cac79cbbe | ||
|
|
e4fc97f5f2 | ||
|
|
b36a8d41a6 | ||
|
|
364ab048ac | ||
|
|
1ab6d2db41 | ||
|
|
0e54a64dfd | ||
|
|
feabd7e488 | ||
|
|
2630486ca8 | ||
|
|
37b19365c8 | ||
|
|
69fbad0381 | ||
|
|
6d25a060c1 | ||
|
|
afe615162a | ||
|
|
3b2c601e6e | ||
|
|
49a63d5e30 | ||
|
|
7c8e4d115a | ||
|
|
479b7a9043 | ||
|
|
be0f52fce7 | ||
|
|
6d8beb80be | ||
|
|
8a944fcdb0 | ||
|
|
a95f55c82b | ||
|
|
b64cae9d3d | ||
|
|
efcc4928ea | ||
|
|
8724945742 | ||
|
|
bf660f0b4c | ||
|
|
68c61564c6 | ||
|
|
82692b38d9 | ||
|
|
3cf66c281a | ||
|
|
3236c92c93 | ||
|
|
a3f22961ce | ||
|
|
758072cf6e | ||
|
|
1431e9e864 | ||
|
|
0605f65ca8 | ||
|
|
62e1a123a0 | ||
|
|
30bd2bac71 | ||
|
|
ac27788a3b | ||
|
|
1da53f154c | ||
|
|
898e102510 | ||
|
|
4756d70d95 | ||
|
|
4e70abf407 | ||
|
|
4043056332 | ||
|
|
9ef617289f | ||
|
|
20d4e1599a | ||
|
|
305d9ccb7c | ||
|
|
0683467bc1 | ||
|
|
f3c82cb13a | ||
|
|
f83c9732ca | ||
|
|
fb124b61d4 | ||
|
|
28eaef1782 | ||
|
|
1ecace23a7 | ||
|
|
0129b5da06 | ||
|
|
ddfbf3d9f8 | ||
|
|
08ce6fefd8 | ||
|
|
db3a16c86a | ||
|
|
bdcb8b6916 | ||
|
|
2af737518b | ||
|
|
9b0aa53adf | ||
|
|
6091553d12 | ||
|
|
5d181670ac | ||
|
|
05d36862c5 | ||
|
|
49a520df3e | ||
|
|
403e1e3e18 | ||
|
|
a1d51b6454 | ||
|
|
3bcce8b6fa | ||
|
|
50372c116a | ||
|
|
bcaf4c4156 | ||
|
|
250a2247cd | ||
|
|
8eeea3fca1 | ||
|
|
bfacba20cb | ||
|
|
84b43ba4b2 | ||
|
|
6fa9dd013c | ||
|
|
7ea54caff5 | ||
|
|
11498ab099 | ||
|
|
339cff20f8 | ||
|
|
236706a4ac | ||
|
|
9095a54de3 | ||
|
|
36a490388c | ||
|
|
0ccff1cd12 | ||
|
|
f677367e4b | ||
|
|
f90ac82d4c | ||
|
|
467fea87c6 | ||
|
|
52e8fd9cc3 | ||
|
|
ddb88bb021 | ||
|
|
5bc98842b7 | ||
|
|
10b12ba2cb | ||
|
|
ade0e91898 | ||
|
|
1d6afe792b | ||
|
|
c3dbd9a7a7 | ||
|
|
d9dba0c76f | ||
|
|
5b6348d103 | ||
|
|
9a16098f49 | ||
|
|
c56c33d477 | ||
|
|
cc80d51388 | ||
|
|
f1009c43a3 | ||
|
|
762ca25e19 | ||
|
|
cd3c6375d7 | ||
|
|
f594a233b0 | ||
|
|
8583f59066 | ||
|
|
0b72b45d90 | ||
|
|
c4458a3d5d | ||
|
|
39daa626b4 | ||
|
|
d9d9bedf3e | ||
|
|
df82a0ce64 | ||
|
|
02e50f3b39 | ||
|
|
09119d46ec | ||
|
|
812c2f62f8 | ||
|
|
c2b361d093 | ||
|
|
c3a6e8acfe | ||
|
|
7d6d90174f | ||
|
|
979b5fa791 | ||
|
|
7672ca9044 | ||
|
|
4f32820cde | ||
|
|
630a52b397 | ||
|
|
fd1a9b7a07 | ||
|
|
2d5c0142d2 | ||
|
|
7058b15cc4 | ||
|
|
4ce4ec5f34 | ||
|
|
728782d369 | ||
|
|
408616b34e | ||
|
|
fe688d6b1a | ||
|
|
b0a3fbd338 | ||
|
|
28b9d81464 | ||
|
|
11c34fa7e6 | ||
|
|
da54f8f85f | ||
|
|
9ffe9c381b | ||
|
|
d943f9c28c | ||
|
|
f2c7498345 | ||
|
|
d5cea40fed | ||
|
|
61d538b6d6 | ||
|
|
e1b571140f | ||
|
|
7f645e1a5c | ||
|
|
c3ff4997fd | ||
|
|
c07314bd92 | ||
|
|
b687678d9c | ||
|
|
d09bf17245 | ||
|
|
e7b4cdc91f | ||
|
|
e663ad274e | ||
|
|
1b8d747e1f | ||
|
|
579f0f3d9a | ||
|
|
f7de9cda3a | ||
|
|
52b5cf5c7e | ||
|
|
11b08d2184 | ||
|
|
470af7051c | ||
|
|
17921f24fc | ||
|
|
9a6d7e03c4 | ||
|
|
af314611af | ||
|
|
f4d06a3a73 | ||
|
|
9b9c36e84d | ||
|
|
6a24f25f91 | ||
|
|
dfeb891821 | ||
|
|
e057e17de5 | ||
|
|
276c470c1f | ||
|
|
d89e7ab415 | ||
|
|
df9ebcb3d2 | ||
|
|
5d248bf6bf | ||
|
|
f9330750f0 | ||
|
|
0321741b79 | ||
|
|
9e4ecc0ee6 | ||
|
|
bd0a12ad3c | ||
|
|
a30869112e | ||
|
|
4f32b6d8e4 | ||
|
|
991955ddce | ||
|
|
4af196ab04 | ||
|
|
12018b4a03 | ||
|
|
2d91536f92 | ||
|
|
11e68485e9 | ||
|
|
d2d979738e | ||
|
|
32150c85fb | ||
|
|
e37a53c690 | ||
|
|
4d195be713 | ||
|
|
3825eca3dc | ||
|
|
352adb5a82 | ||
|
|
9784e3bfc1 | ||
|
|
5aac1af065 | ||
|
|
aa319e71b0 | ||
|
|
20ed60d2a0 | ||
|
|
f6278373cb | ||
|
|
3f70cbbf9b | ||
|
|
0253752bc9 | ||
|
|
e3e648eea7 | ||
|
|
5cc482ebe1 | ||
|
|
cb1cd14cbb | ||
|
|
83f7399c72 | ||
|
|
8b7b0b0776 | ||
|
|
dfe9b3d02d | ||
|
|
a25ca6524f | ||
|
|
6500f048bc | ||
|
|
cc0bc49b2f | ||
|
|
6e444e0311 | ||
|
|
31b328f754 | ||
|
|
5ce11b94e0 | ||
|
|
db16188c74 | ||
|
|
376d965088 | ||
|
|
2f1380a03f | ||
|
|
9ecb8dffa6 | ||
|
|
2044e828de | ||
|
|
b287b2420a | ||
|
|
3aa1eb1fd5 | ||
|
|
74c8cae95d | ||
|
|
5ac9c3e955 | ||
|
|
d5fe47acff | ||
|
|
8c0be20422 | ||
|
|
247d89e39e | ||
|
|
d2b0338afd | ||
|
|
e854238a39 | ||
|
|
ab325e5dad | ||
|
|
cf59171937 | ||
|
|
a89f5c25be | ||
|
|
42471f4d3e | ||
|
|
be8f7efe82 | ||
|
|
5ee6024914 | ||
|
|
4cc156611c | ||
|
|
f1adc79f38 | ||
|
|
b9d9798774 | ||
|
|
bb25e5fbf6 | ||
|
|
5cced82e3f | ||
|
|
1484b238e5 | ||
|
|
c6e1324f84 | ||
|
|
69c3ac7355 | ||
|
|
3ae2e63ac5 | ||
|
|
57fd23c381 | ||
|
|
a029c720a6 | ||
|
|
3341608d52 | ||
|
|
aa401f29c3 | ||
|
|
1509cc5b69 | ||
|
|
62b719c447 | ||
|
|
4c0fa1c1d4 | ||
|
|
d9cdaa0757 | ||
|
|
51ad52d0e8 | ||
|
|
6ed02e5351 | ||
|
|
d3b81ce68b | ||
|
|
87fa327e0d | ||
|
|
f89e99b7f9 | ||
|
|
61e738287b | ||
|
|
2a865ac713 | ||
|
|
32205fb038 | ||
|
|
5ca656be07 | ||
|
|
58cf5b4cf3 | ||
|
|
cb46084111 | ||
|
|
5d2472bd56 | ||
|
|
664dcdcb82 | ||
|
|
48cba9e076 | ||
|
|
88f7d842e5 | ||
|
|
424f67d948 | ||
|
|
3b8fbcaa38 | ||
|
|
e5aacec1a5 | ||
|
|
bebb881b5b | ||
|
|
40de96ed77 | ||
|
|
f0a5bbdb1b | ||
|
|
46b50cbb49 | ||
|
|
a88d37f3cb | ||
|
|
f8eef67a03 | ||
|
|
6fd69f05e0 | ||
|
|
6b89446b46 | ||
|
|
da2bb5825f | ||
|
|
6a0b367535 | ||
|
|
733598a039 | ||
|
|
006eb4b9c2 | ||
|
|
ea9b0e7b68 | ||
|
|
84d1e43c4b | ||
|
|
ab28b02bb7 | ||
|
|
1d622b3b72 | ||
|
|
4fe18d3548 | ||
|
|
1177a83e4a | ||
|
|
9125651775 | ||
|
|
b2fc063d88 | ||
|
|
1872078ce8 | ||
|
|
13c5fa581f | ||
|
|
dea0d5e447 | ||
|
|
1ac510283f | ||
|
|
857cb3c054 | ||
|
|
7b241ec739 | ||
|
|
ab6c4cfcab | ||
|
|
0f321994c5 | ||
|
|
0373582415 | ||
|
|
0a357064d9 | ||
|
|
824ce19622 | ||
|
|
4fb784e7e1 | ||
|
|
9015eef883 | ||
|
|
4a6dc2dfc6 | ||
|
|
2a4902c3a5 | ||
|
|
fae10cd5c4 | ||
|
|
7470bded5d | ||
|
|
ad432b9981 | ||
|
|
6716391502 | ||
|
|
955c572c02 | ||
|
|
0a42329ca5 | ||
|
|
a381963623 | ||
|
|
479b6da4ce | ||
|
|
65967aedde | ||
|
|
5a2f1902c4 | ||
|
|
43e3e9b897 | ||
|
|
7b1c63cf27 | ||
|
|
4ddc4fe1e7 | ||
|
|
e620a63fee | ||
|
|
0d695746e6 | ||
|
|
f0d091d348 | ||
|
|
a2f1f09364 | ||
|
|
79f86223a5 | ||
|
|
e3ffe57162 | ||
|
|
c275663c8f | ||
|
|
9e298f8e90 | ||
|
|
cd70a18fa0 | ||
|
|
b00066b7e3 | ||
|
|
6662601a6c | ||
|
|
e57173333f | ||
|
|
9d0ea63ace | ||
|
|
da1f805cf7 | ||
|
|
ca8ef10dcf | ||
|
|
48e084dd58 | ||
|
|
6ce4192b31 | ||
|
|
1b82597eac | ||
|
|
81387f9896 | ||
|
|
b721754ead | ||
|
|
769538ad23 | ||
|
|
842c2a93fc | ||
|
|
dd88bb254e | ||
|
|
055e9b7b31 | ||
|
|
f3f44c48f4 | ||
|
|
8b5bbf9456 | ||
|
|
db47f569ce | ||
|
|
fa8a98113e | ||
|
|
32dc3a460a | ||
|
|
1a0bb175f2 | ||
|
|
684503f5fc | ||
|
|
aa4c7f0d4c | ||
|
|
7d65b290e7 | ||
|
|
b238e8fd5e | ||
|
|
9798b34f8d | ||
|
|
2e06953550 | ||
|
|
b6290e041c | ||
|
|
55cabbb70c | ||
|
|
467888b872 | ||
|
|
e0c1b67929 | ||
|
|
a5cd875f1b | ||
|
|
6da4a321b8 | ||
|
|
211bff082b | ||
|
|
498fca9d08 | ||
|
|
1a0372709d | ||
|
|
8004260e4d | ||
|
|
281236a94d | ||
|
|
5981e50514 | ||
|
|
b8de8ce8b9 | ||
|
|
a5348b60c0 | ||
|
|
a09f146145 | ||
|
|
cb67aac265 | ||
|
|
d41936ac05 | ||
|
|
80ce59fa6c | ||
|
|
e8310a7841 | ||
|
|
0d68992fb7 | ||
|
|
5619aac366 | ||
|
|
ff1f2d6c1a | ||
|
|
1ebaa673b5 | ||
|
|
eed4f0651d | ||
|
|
60b73b6cd3 | ||
|
|
af731149b5 | ||
|
|
7dbdd030a1 | ||
|
|
a66866c367 | ||
|
|
b0f3c13db1 | ||
|
|
a579cdfbec | ||
|
|
dcd712d825 | ||
|
|
dcb3aaab91 | ||
|
|
2156763c6c | ||
|
|
15457cc368 | ||
|
|
4631c344aa | ||
|
|
df6f7455e7 | ||
|
|
ffdf66b77e | ||
|
|
c8980a047a | ||
|
|
0683ca9416 | ||
|
|
7307aab103 | ||
|
|
276ff7bd42 | ||
|
|
e2a08f2536 | ||
|
|
c1f255af96 | ||
|
|
8bf7d042e5 | ||
|
|
8a1dea306e | ||
|
|
ec22b33de6 | ||
|
|
da899a3046 | ||
|
|
7f84b5508d | ||
|
|
79c4277506 | ||
|
|
943c464201 | ||
|
|
ed80ffb59a | ||
|
|
e356c42b4f | ||
|
|
93f052778f | ||
|
|
dd94cac1bd | ||
|
|
80293cf976 | ||
|
|
b2462585b7 | ||
|
|
c0e78ba2bd | ||
|
|
d1c44f71bc | ||
|
|
b63dfb8985 | ||
|
|
5ecea422c7 | ||
|
|
860e8b2442 | ||
|
|
0aa2994423 | ||
|
|
dde8b82ea0 | ||
|
|
96de49d57b | ||
|
|
e3e4878ade | ||
|
|
cec99ffacb | ||
|
|
bfe3e4295d | ||
|
|
6e8b95d709 | ||
|
|
2f250bfbf7 | ||
|
|
90c82dc6b1 | ||
|
|
21e13c8ae5 | ||
|
|
bde9d45ead | ||
|
|
96d941f83a | ||
|
|
d1eccd4928 | ||
|
|
cd26886f15 | ||
|
|
34852919da | ||
|
|
77c6aba24c | ||
|
|
a258741e2f | ||
|
|
7f3b7302b1 | ||
|
|
4fa8206332 | ||
|
|
5f29e96187 | ||
|
|
6186b34903 | ||
|
|
970ef57f21 | ||
|
|
8180e7dc82 | ||
|
|
d63a6a8ceb | ||
|
|
6ed7248d65 | ||
|
|
992ecd9aee | ||
|
|
1fd0645fe3 | ||
|
|
b5292f54aa | ||
|
|
48cce73f88 | ||
|
|
e7e513d7ec | ||
|
|
4f8c9d2066 | ||
|
|
4196fd32a4 | ||
|
|
ffaf927690 | ||
|
|
6ce47af3d6 | ||
|
|
6aa2164d16 | ||
|
|
a851d1bd2f | ||
|
|
fb3b7b8edf | ||
|
|
a9bd880a4f | ||
|
|
7aee6d9dc7 | ||
|
|
b27b44829a | ||
|
|
b355956400 | ||
|
|
779b193de6 | ||
|
|
dedb59a4ef | ||
|
|
36d5d2f3f8 | ||
|
|
1e70c23c11 | ||
|
|
7bea2b89d4 | ||
|
|
cce80971a3 | ||
|
|
c54a30f68c | ||
|
|
e92a976226 | ||
|
|
a9e592828e | ||
|
|
26b2a70069 | ||
|
|
f836291200 | ||
|
|
3ac53297ca | ||
|
|
7b20697d93 | ||
|
|
eeaa010f1a | ||
|
|
6d4291d958 | ||
|
|
26ae6f71a1 | ||
|
|
3afbad1e89 | ||
|
|
7bf3efcf88 | ||
|
|
bf81986abc | ||
|
|
b081077b9a | ||
|
|
c9201ccfd0 | ||
|
|
0c8529fc47 | ||
|
|
0f9c86eb6d | ||
|
|
64d317ac2a | ||
|
|
bbe0e8a18c | ||
|
|
a570df7f34 | ||
|
|
102af29f97 | ||
|
|
51bf76448f | ||
|
|
f220973192 | ||
|
|
8b097ec965 | ||
|
|
9b0e70b2f2 | ||
|
|
d9b3d6f3e5 | ||
|
|
9fbab15222 | ||
|
|
829dd7af77 | ||
|
|
e07c4d29cd | ||
|
|
b0c86b47a4 | ||
|
|
44bcb4cd6b | ||
|
|
07848ac1c4 | ||
|
|
caae81b3c9 | ||
|
|
63fcd7dd54 | ||
|
|
ec16c387b5 | ||
|
|
3c3e4d3d9e | ||
|
|
390373dbcb | ||
|
|
a38d2c5676 | ||
|
|
7ca24775ac | ||
|
|
dfc8c239ef | ||
|
|
13d53f388e | ||
|
|
fbb3c6aee0 | ||
|
|
ff64980599 | ||
|
|
c993ee80c7 | ||
|
|
d3af83db63 | ||
|
|
e0f6f24a5e | ||
|
|
b25e4018ed | ||
|
|
d41ddf74ea | ||
|
|
83dfb38fe5 | ||
|
|
8d9222ebd8 | ||
|
|
c27fd2c6b3 | ||
|
|
4815d06bf9 | ||
|
|
f4420202c3 | ||
|
|
a567c7cc8d | ||
|
|
e071a9722d | ||
|
|
e846604a13 | ||
|
|
1e8c09d34a | ||
|
|
ae0159bad6 | ||
|
|
7ef075e6c9 | ||
|
|
e9352b793e | ||
|
|
762e6082ec | ||
|
|
feb1d46f41 | ||
|
|
6a228944ae | ||
|
|
8888dc6bc5 | ||
|
|
f0774d75f7 | ||
|
|
2958ff417f | ||
|
|
134850733d | ||
|
|
410ece8458 | ||
|
|
1ad2d71c9b | ||
|
|
fd86e67d67 | ||
|
|
d8a1d1d14c | ||
|
|
1fcf2df28b | ||
|
|
5ac885de7b | ||
|
|
c90853ba99 | ||
|
|
90289ccc91 | ||
|
|
067eb8a188 | ||
|
|
f47af0a850 | ||
|
|
66ee2eb17e | ||
|
|
56d4b7c25e | ||
|
|
03bf3f105d | ||
|
|
c6b9469b10 | ||
|
|
ac036a3525 | ||
|
|
6064890415 | ||
|
|
4eddc70ae4 | ||
|
|
21696e1956 | ||
|
|
4e9752f5da | ||
|
|
cc8aac5918 | ||
|
|
16961bab84 | ||
|
|
42f280abf4 | ||
|
|
a9e8526d67 | ||
|
|
11b9fe759f | ||
|
|
de6f572051 | ||
|
|
1410ca0be5 | ||
|
|
da62bd172f | ||
|
|
584af05020 | ||
|
|
938d900106 | ||
|
|
c692ff98c1 | ||
|
|
82bc66bc9b | ||
|
|
856afe8780 | ||
|
|
c52603305c | ||
|
|
c53e023b81 | ||
|
|
3d86fde6f2 | ||
|
|
163f2fb524 | ||
|
|
0b172c4554 | ||
|
|
9769822dc8 | ||
|
|
d9a81409fb | ||
|
|
7d07e46798 | ||
|
|
47ad3d010b | ||
|
|
17c606205b | ||
|
|
b1a9fbe894 | ||
|
|
4e7c3dcc13 | ||
|
|
cbbce330bb | ||
|
|
604f64f3e7 | ||
|
|
e3c9bd9189 | ||
|
|
53829623fa | ||
|
|
7bfd17e69d | ||
|
|
7849d10a69 | ||
|
|
1189ff59b8 | ||
|
|
fe48240e41 | ||
|
|
84e3e02e0a | ||
|
|
b1327ec3f1 | ||
|
|
e5d5a49857 | ||
|
|
efdd40787c | ||
|
|
cfe1e578bf | ||
|
|
268b01fcf0 | ||
|
|
0134a11697 | ||
|
|
a28b213334 | ||
|
|
fcc3d0e93a | ||
|
|
076444ce50 | ||
|
|
49b4efc6c4 | ||
|
|
629253f63e | ||
|
|
495d7717c7 | ||
|
|
b50e66731a | ||
|
|
7de007dbf9 | ||
|
|
5e91f074a8 | ||
|
|
1f257d7bf8 | ||
|
|
3b6786d0d7 | ||
|
|
38585a8e00 | ||
|
|
006a4db7a0 | ||
|
|
9e7f3cbe81 | ||
|
|
c468fea7db | ||
|
|
c2fd20cf25 | ||
|
|
667c7a4c2f | ||
|
|
26d2de7db5 | ||
|
|
14f3c2678f | ||
|
|
bf48bd9cec | ||
|
|
d579fb9c3c | ||
|
|
976e50a1cb | ||
|
|
346f58a6a1 | ||
|
|
d5cd65bc4f | ||
|
|
2ecfa0d269 | ||
|
|
1941906169 | ||
|
|
883f92409e | ||
|
|
6fdeea84f7 | ||
|
|
343bfc02cb | ||
|
|
701f293785 | ||
|
|
3aed919c47 | ||
|
|
83d5421368 | ||
|
|
7ffb91105b | ||
|
|
aa743786c7 | ||
|
|
aac87ca437 | ||
|
|
931cf40636 | ||
|
|
864684a5d0 | ||
|
|
f386f50456 | ||
|
|
d4e5cb73e3 | ||
|
|
afc49486f3 | ||
|
|
8bbf256fa9 | ||
|
|
db175c3690 | ||
|
|
9a407690b6 | ||
|
|
fa6790b35b | ||
|
|
20b9ff4602 | ||
|
|
b38fad2035 | ||
|
|
6a057bf7d7 | ||
|
|
a797b5456c | ||
|
|
97bd12c26a | ||
|
|
6f34f4e2c8 | ||
|
|
479df22ea7 | ||
|
|
dc7cf36a0f | ||
|
|
cd4d816a83 | ||
|
|
3a38c80c05 | ||
|
|
bfe87b1c55 | ||
|
|
b5ec2dce88 | ||
|
|
f750db1b6d | ||
|
|
a43cfba154 | ||
|
|
6bf8578d75 | ||
|
|
3bf5e34232 | ||
|
|
c293561be2 | ||
|
|
cae645707f | ||
|
|
0a7931e73e | ||
|
|
8541aa1bd3 | ||
|
|
9a9b73e3db | ||
|
|
9ed863584a | ||
|
|
83ef0a3cf6 | ||
|
|
ffe340f849 | ||
|
|
b4df1dc30d | ||
|
|
523fecac0f | ||
|
|
1b12f60e05 | ||
|
|
788437c15c | ||
|
|
0e14c199af | ||
|
|
ed67184c7a | ||
|
|
2dc9d081e4 | ||
|
|
a066eaaadc | ||
|
|
51073af2d7 | ||
|
|
f00db63598 | ||
|
|
0935e5620e | ||
|
|
79c3c6ac50 | ||
|
|
46c9f0fb45 | ||
|
|
09f401183d | ||
|
|
4893ffebad | ||
|
|
817f783881 | ||
|
|
b545d17ed0 | ||
|
|
432ba603c2 | ||
|
|
eb904c3625 | ||
|
|
bf1d7ac928 | ||
|
|
040bd95d84 | ||
|
|
b36dd3aa81 | ||
|
|
b556a4bdce | ||
|
|
b228800e9e | ||
|
|
a22244d266 | ||
|
|
d6824afd21 | ||
|
|
14bd06fab3 | ||
|
|
18780b27fe | ||
|
|
d6ca79a52e | ||
|
|
5baca2c38d | ||
|
|
c876a03819 | ||
|
|
56ffcd4477 | ||
|
|
30ab6c14fe | ||
|
|
225137c972 | ||
|
|
f31a8efd7b | ||
|
|
cc961ec0a8 | ||
|
|
1028b736c4 | ||
|
|
d6d32400fa | ||
|
|
bd924a90dd | ||
|
|
f218a35ee5 | ||
|
|
d9c6dc4e04 | ||
|
|
b61f7403bf | ||
|
|
011b379bec | ||
|
|
54dd7a4a9b | ||
|
|
7f2ef13da1 | ||
|
|
51d9d0d9e8 | ||
|
|
0083aece57 | ||
|
|
99bf8f29be | ||
|
|
30d8a8b33b | ||
|
|
8f263cd336 | ||
|
|
d2b0593be3 | ||
|
|
d78a6712ef | ||
|
|
cf81c15f68 | ||
|
|
8f91f956fd | ||
|
|
d3524494e5 | ||
|
|
36c4e923f1 | ||
|
|
5505465f93 | ||
|
|
b3b5055080 | ||
|
|
c2a39e78ff | ||
|
|
d2bbe5ff56 | ||
|
|
676aa6a53d | ||
|
|
3d5a5c3d3c | ||
|
|
57cbb49d65 | ||
|
|
666f1a7d10 | ||
|
|
ffb5942e60 | ||
|
|
f72c87dd26 | ||
|
|
81b4680173 | ||
|
|
57f8979df1 | ||
|
|
04e5950020 | ||
|
|
68f1ba1617 | ||
|
|
35a5815513 | ||
|
|
e2f4163ed8 | ||
|
|
fb95fc61a0 | ||
|
|
1caed16099 | ||
|
|
a1d5f2802b | ||
|
|
b0f14cd311 | ||
|
|
254f262aba | ||
|
|
72211e62d5 | ||
|
|
de6fcea363 | ||
|
|
0377a35811 | ||
|
|
8ab75fdda9 | ||
|
|
15b54670ff | ||
|
|
82c7fe8d8b | ||
|
|
ace493b32f | ||
|
|
9751433803 | ||
|
|
3157867a71 | ||
|
|
5e581eabfe | ||
|
|
752877051c | ||
|
|
705e5b5a80 | ||
|
|
f4f6f5f48a | ||
|
|
d4f5f2ce95 | ||
|
|
09b6a2db0b | ||
|
|
005cd38d27 | ||
|
|
1290b73faa | ||
|
|
59d4f7d36d | ||
|
|
fefd0a1cc8 | ||
|
|
b8e4f1f803 | ||
|
|
d80a653552 | ||
|
|
2f29ec75ef | ||
|
|
5386414666 | ||
|
|
388e168158 | ||
|
|
45636b966f | ||
|
|
9d5fecd691 | ||
|
|
5c63ec380a | ||
|
|
993ec3fba6 | ||
|
|
994e6099d8 | ||
|
|
4ea238b18b | ||
|
|
e6227d905a | ||
|
|
ad61a7fe24 | ||
|
|
dc53f46946 | ||
|
|
2bd04a53bf | ||
|
|
dd2044e45d | ||
|
|
d3f0a79fe9 | ||
|
|
a9f0668649 | ||
|
|
f1ca0c05fd | ||
|
|
1528121f67 | ||
|
|
456b53d9d3 | ||
|
|
b7a5ef9d9d | ||
|
|
99c4ae7200 | ||
|
|
e4bedd4162 | ||
|
|
359cfb46ae | ||
|
|
87ac60c71d | ||
|
|
e52a518b00 | ||
|
|
c370697b47 | ||
|
|
a8e5606650 | ||
|
|
750bb6b3b5 | ||
|
|
5ac6490bf1 | ||
|
|
a606e004e5 | ||
|
|
2d9bcaeac9 | ||
|
|
cd8ab2b35f | ||
|
|
0146bacbb3 | ||
|
|
7bea36532d | ||
|
|
1ad5416611 | ||
|
|
12a3fa707b | ||
|
|
3a4e55b68d | ||
|
|
d44efc7076 | ||
|
|
03a8ce36f3 | ||
|
|
15e136b87f | ||
|
|
6826ed5162 | ||
|
|
10973eb075 | ||
|
|
55ded3ee16 | ||
|
|
95085a34f2 | ||
|
|
91758b96bf | ||
|
|
63c7d52430 | ||
|
|
319506c8f5 | ||
|
|
1365ecc5a0 | ||
|
|
04e8eb2d8e | ||
|
|
5e2f3bf7db | ||
|
|
8af534f15f | ||
|
|
0c532affe3 | ||
|
|
74581a3aa5 | ||
|
|
e9a0801a77 | ||
|
|
8a1409135b | ||
|
|
13469f0839 | ||
|
|
19b957e915 | ||
|
|
8aab98a7d6 | ||
|
|
ff213bac68 | ||
|
|
d8eb789db4 | ||
|
|
0d24a54b90 | ||
|
|
a6e53e6fcd | ||
|
|
f47974d485 | ||
|
|
880a975744 | ||
|
|
1ee57801c9 | ||
|
|
b04bb9c19d | ||
|
|
2cefcc1908 | ||
|
|
62fef4accb | ||
|
|
2c57c89f9e | ||
|
|
09c3c2c844 | ||
|
|
241bb54c66 | ||
|
|
e30cd4ac67 | ||
|
|
f1d4d4fbaf | ||
|
|
cc849c54a7 | ||
|
|
3283231e11 | ||
|
|
a6034aef26 | ||
|
|
3baa71ca43 | ||
|
|
491b29303e | ||
|
|
fab09d15cb | ||
|
|
ec6553384a | ||
|
|
35e9ef2496 |
19
.cargo/armv6l-unknown-linux-musleabihf.json
Normal file
19
.cargo/armv6l-unknown-linux-musleabihf.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"arch": "arm",
|
||||
"crt-static-defaults": true,
|
||||
"data-layout": "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64",
|
||||
"emit-debug-gdb-scripts": false,
|
||||
"env": "musl",
|
||||
"executables": true,
|
||||
"is-builtin": false,
|
||||
"linker": "arm-linux-gnueabihf-gcc",
|
||||
"linker-flavor": "gcc",
|
||||
"llvm-target": "armv6-unknown-linux-musleabihf",
|
||||
"max-atomic-width": 32,
|
||||
"os": "linux",
|
||||
"panic-strategy": "unwind",
|
||||
"relocation-model": "static",
|
||||
"target-endian": "little",
|
||||
"target-pointer-width": "32",
|
||||
"vendor": "unknown"
|
||||
}
|
||||
@ -1,12 +1,33 @@
|
||||
# macOS targets — pin minimum OS version so binaries run on supported releases.
|
||||
# Intel (x86_64): target macOS 10.15 Catalina and later.
|
||||
# Apple Silicon (aarch64): target macOS 11.0 Big Sur and later (no Catalina hardware exists).
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = ["-C", "link-arg=-mmacosx-version-min=10.15"]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = ["-C", "link-arg=-mmacosx-version-min=11.0"]
|
||||
|
||||
[target.x86_64-unknown-linux-musl]
|
||||
rustflags = ["-C", "link-arg=-static"]
|
||||
|
||||
[target.aarch64-unknown-linux-musl]
|
||||
rustflags = ["-C", "link-arg=-static"]
|
||||
|
||||
# Android targets (NDK toolchain)
|
||||
# ARMv6 musl (Raspberry Pi Zero W)
|
||||
[target.armv6l-unknown-linux-musleabihf]
|
||||
rustflags = ["-C", "link-arg=-static"]
|
||||
|
||||
# Android targets (Termux-native defaults).
|
||||
# CI/NDK cross builds can override these via CARGO_TARGET_*_LINKER.
|
||||
[target.armv7-linux-androideabi]
|
||||
linker = "armv7a-linux-androideabi21-clang"
|
||||
linker = "clang"
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
linker = "aarch64-linux-android21-clang"
|
||||
linker = "clang"
|
||||
|
||||
# Windows targets — increase stack size for large JsonSchema derives
|
||||
[target.x86_64-pc-windows-msvc]
|
||||
rustflags = ["-C", "link-args=/STACK:8388608"]
|
||||
|
||||
[target.aarch64-pc-windows-msvc]
|
||||
rustflags = ["-C", "link-args=/STACK:8388608"]
|
||||
|
||||
@ -21,15 +21,14 @@ reviews:
|
||||
# Only review PRs targeting these branches
|
||||
base_branches:
|
||||
- main
|
||||
- develop
|
||||
- dev
|
||||
# Skip reviews for draft PRs or WIP
|
||||
drafts: false
|
||||
# Enable base branch analysis
|
||||
base_branch_analysis: true
|
||||
|
||||
# Poem configuration
|
||||
poem:
|
||||
enabled: false
|
||||
# Poem feature toggle (must be a boolean, not an object)
|
||||
poem: false
|
||||
|
||||
# Reviewer suggestions
|
||||
reviewer:
|
||||
|
||||
@ -15,6 +15,9 @@ indent_size = 4
|
||||
# Trailing whitespace is significant in Markdown (line breaks).
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[*.{yml,yaml}]
|
||||
indent_size = 2
|
||||
|
||||
@ -23,3 +26,7 @@ indent_size = 2
|
||||
|
||||
[Dockerfile]
|
||||
indent_size = 4
|
||||
|
||||
[*.nix]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
@ -12,10 +12,11 @@
|
||||
API_KEY=your-api-key-here
|
||||
# ZEROCLAW_API_KEY=your-api-key-here
|
||||
|
||||
# Default provider/model (can be overridden by CLI flags)
|
||||
PROVIDER=openrouter
|
||||
# ZEROCLAW_PROVIDER=openrouter
|
||||
# ZEROCLAW_MODEL=anthropic/claude-sonnet-4-6
|
||||
# Default provider/model (required - choose one)
|
||||
# Options: openrouter, openai, anthropic, gemini, ollama, groq, mistral, deepseek, xai, and more
|
||||
# PROVIDER=your-provider-here
|
||||
# ZEROCLAW_PROVIDER=your-provider-here
|
||||
# ZEROCLAW_MODEL=your-model-here
|
||||
# ZEROCLAW_TEMPERATURE=0.7
|
||||
|
||||
# Workspace directory override
|
||||
|
||||
50
.github/CODEOWNERS
vendored
50
.github/CODEOWNERS
vendored
@ -1,32 +1,32 @@
|
||||
# Default owner for all files
|
||||
* @chumyin
|
||||
* @theonlyhennygod @JordanTheJet @chumyin
|
||||
|
||||
# Important functional modules
|
||||
/src/agent/** @theonlyhennygod
|
||||
/src/providers/** @theonlyhennygod
|
||||
/src/channels/** @theonlyhennygod
|
||||
/src/tools/** @theonlyhennygod
|
||||
/src/gateway/** @theonlyhennygod
|
||||
/src/runtime/** @theonlyhennygod
|
||||
/src/memory/** @theonlyhennygod
|
||||
/Cargo.toml @theonlyhennygod
|
||||
/Cargo.lock @theonlyhennygod
|
||||
/src/agent/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/src/providers/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/src/channels/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/src/tools/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/src/gateway/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/src/runtime/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/src/memory/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/Cargo.toml @theonlyhennygod @JordanTheJet @chumyin
|
||||
/Cargo.lock @theonlyhennygod @JordanTheJet @chumyin
|
||||
|
||||
# Security / tests / CI-CD ownership
|
||||
/src/security/** @chumyin
|
||||
/tests/** @chumyin
|
||||
/.github/** @chumyin
|
||||
/.github/workflows/** @chumyin
|
||||
/.github/codeql/** @chumyin
|
||||
/.github/dependabot.yml @chumyin
|
||||
/SECURITY.md @chumyin
|
||||
/docs/actions-source-policy.md @chumyin
|
||||
/docs/ci-map.md @chumyin
|
||||
/src/security/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/tests/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/.github/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/.github/workflows/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/.github/codeql/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/.github/dependabot.yml @theonlyhennygod @JordanTheJet @chumyin
|
||||
/SECURITY.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
/docs/actions-source-policy.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
/docs/ci-map.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
|
||||
# Docs & governance
|
||||
/docs/** @chumyin
|
||||
/AGENTS.md @chumyin
|
||||
/CLAUDE.md @chumyin
|
||||
/CONTRIBUTING.md @chumyin
|
||||
/docs/pr-workflow.md @chumyin
|
||||
/docs/reviewer-playbook.md @chumyin
|
||||
/docs/** @theonlyhennygod @JordanTheJet @chumyin
|
||||
/AGENTS.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
/CLAUDE.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
/CONTRIBUTING.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
/docs/pr-workflow.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
/docs/reviewer-playbook.md @theonlyhennygod @JordanTheJet @chumyin
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -3,6 +3,12 @@ contact_links:
|
||||
- name: Security vulnerability report
|
||||
url: https://github.com/zeroclaw-labs/zeroclaw/security/policy
|
||||
about: Please report security vulnerabilities privately via SECURITY.md policy.
|
||||
- name: Private vulnerability report template
|
||||
url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/security/private-vulnerability-report-template.md
|
||||
about: Use this template when filing a private vulnerability report in Security Advisories.
|
||||
- name: 私密漏洞报告模板(中文)
|
||||
url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/security/private-vulnerability-report-template.zh-CN.md
|
||||
about: 使用该中文模板通过 Security Advisories 进行私密漏洞提交。
|
||||
- name: Contribution guide
|
||||
url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/CONTRIBUTING.md
|
||||
about: Please read contribution and PR requirements before opening an issue.
|
||||
|
||||
8
.github/actionlint.yaml
vendored
8
.github/actionlint.yaml
vendored
@ -1,3 +1,11 @@
|
||||
self-hosted-runner:
|
||||
labels:
|
||||
- Linux
|
||||
- X64
|
||||
- racknerd
|
||||
- aws-india
|
||||
- light
|
||||
- cpu40
|
||||
- codeql
|
||||
- codeql-general
|
||||
- blacksmith-2vcpu-ubuntu-2404
|
||||
|
||||
70
.github/connectivity/probe-contract.json
vendored
Normal file
70
.github/connectivity/probe-contract.json
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
{
|
||||
"version": 1,
|
||||
"description": "Provider/model connectivity probe contract for scheduled CI checks.",
|
||||
"consecutive_transient_failures_to_escalate": 2,
|
||||
"providers": [
|
||||
{
|
||||
"name": "OpenAI",
|
||||
"provider": "openai",
|
||||
"required": true,
|
||||
"secret_env": "OPENAI_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Primary reference provider; validates baseline OpenAI-compatible path."
|
||||
},
|
||||
{
|
||||
"name": "Anthropic",
|
||||
"provider": "anthropic",
|
||||
"required": true,
|
||||
"secret_env": "ANTHROPIC_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Checks non-OpenAI provider fetch path and account health."
|
||||
},
|
||||
{
|
||||
"name": "Gemini",
|
||||
"provider": "gemini",
|
||||
"required": true,
|
||||
"secret_env": "GEMINI_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Validates Google model discovery endpoint availability."
|
||||
},
|
||||
{
|
||||
"name": "OpenRouter",
|
||||
"provider": "openrouter",
|
||||
"required": true,
|
||||
"secret_env": "OPENROUTER_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Routes across many providers; signal for aggregator-side health."
|
||||
},
|
||||
{
|
||||
"name": "Qwen",
|
||||
"provider": "qwen",
|
||||
"required": false,
|
||||
"secret_env": "DASHSCOPE_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Regional provider check; optional for global deployments."
|
||||
},
|
||||
{
|
||||
"name": "NVIDIA NIM",
|
||||
"provider": "nvidia",
|
||||
"required": false,
|
||||
"secret_env": "NVIDIA_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Optional ecosystem endpoint check."
|
||||
},
|
||||
{
|
||||
"name": "OpenAI Codex",
|
||||
"provider": "openai-codex",
|
||||
"required": false,
|
||||
"secret_env": "OPENAI_API_KEY",
|
||||
"timeout_sec": 90,
|
||||
"retries": 2,
|
||||
"notes": "Uses OpenAI-compatible models endpoint to verify Codex-profile discovery path."
|
||||
}
|
||||
]
|
||||
}
|
||||
77
.github/connectivity/providers.json
vendored
Normal file
77
.github/connectivity/providers.json
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
{
|
||||
"global_timeout_seconds": 8,
|
||||
"providers": [
|
||||
{
|
||||
"id": "openrouter",
|
||||
"url": "https://openrouter.ai/api/v1/models",
|
||||
"method": "GET",
|
||||
"critical": true
|
||||
},
|
||||
{
|
||||
"id": "openai",
|
||||
"url": "https://api.openai.com/v1/models",
|
||||
"method": "GET",
|
||||
"critical": true
|
||||
},
|
||||
{
|
||||
"id": "anthropic",
|
||||
"url": "https://api.anthropic.com/v1/messages",
|
||||
"method": "POST",
|
||||
"critical": true
|
||||
},
|
||||
{
|
||||
"id": "groq",
|
||||
"url": "https://api.groq.com/openai/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "deepseek",
|
||||
"url": "https://api.deepseek.com/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "moonshot",
|
||||
"url": "https://api.moonshot.ai/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "qwen",
|
||||
"url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "zai",
|
||||
"url": "https://api.z.ai/api/paas/v4/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "glm",
|
||||
"url": "https://open.bigmodel.cn/api/paas/v4/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "together",
|
||||
"url": "https://api.together.xyz/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "fireworks",
|
||||
"url": "https://api.fireworks.ai/inference/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
},
|
||||
{
|
||||
"id": "cohere",
|
||||
"url": "https://api.cohere.com/v1/models",
|
||||
"method": "GET",
|
||||
"critical": false
|
||||
}
|
||||
]
|
||||
}
|
||||
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@ -5,7 +5,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
target-branch: dev
|
||||
target-branch: main
|
||||
open-pull-requests-limit: 3
|
||||
labels:
|
||||
- "dependencies"
|
||||
@ -21,7 +21,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
target-branch: dev
|
||||
target-branch: main
|
||||
open-pull-requests-limit: 1
|
||||
labels:
|
||||
- "ci"
|
||||
@ -38,7 +38,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
target-branch: dev
|
||||
target-branch: main
|
||||
open-pull-requests-limit: 1
|
||||
labels:
|
||||
- "ci"
|
||||
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -2,7 +2,7 @@
|
||||
|
||||
Describe this PR in 2-5 bullets:
|
||||
|
||||
- Base branch target (`dev` for normal contributions; `main` only for `dev` promotion):
|
||||
- Base branch target (`main` or `dev`; direct `main` PRs are allowed):
|
||||
- Problem:
|
||||
- Why it matters:
|
||||
- What changed:
|
||||
|
||||
39
.github/release/canary-policy.json
vendored
Normal file
39
.github/release/canary-policy.json
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.canary-policy.v1",
|
||||
"release_channel": "stable",
|
||||
"observation_window_minutes": 60,
|
||||
"minimum_sample_size": 500,
|
||||
"cohorts": [
|
||||
{
|
||||
"name": "canary-5pct",
|
||||
"traffic_percent": 5,
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"name": "canary-20pct",
|
||||
"traffic_percent": 20,
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"name": "canary-50pct",
|
||||
"traffic_percent": 50,
|
||||
"duration_minutes": 20
|
||||
},
|
||||
{
|
||||
"name": "canary-100pct",
|
||||
"traffic_percent": 100,
|
||||
"duration_minutes": 60
|
||||
}
|
||||
],
|
||||
"observability_signals": [
|
||||
"error_rate",
|
||||
"crash_rate",
|
||||
"p95_latency_ms",
|
||||
"sample_size"
|
||||
],
|
||||
"thresholds": {
|
||||
"max_error_rate": 0.02,
|
||||
"max_crash_rate": 0.01,
|
||||
"max_p95_latency_ms": 1200
|
||||
}
|
||||
}
|
||||
10
.github/release/docs-deploy-policy.json
vendored
Normal file
10
.github/release/docs-deploy-policy.json
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.docs-deploy-policy.v1",
|
||||
"production_branch": "main",
|
||||
"allow_manual_production_dispatch": true,
|
||||
"require_preview_evidence_on_manual_production": true,
|
||||
"allow_manual_rollback_dispatch": true,
|
||||
"rollback_ref_must_be_ancestor_of_production_branch": true,
|
||||
"docs_preview_retention_days": 14,
|
||||
"docs_guard_artifact_retention_days": 21
|
||||
}
|
||||
18
.github/release/ghcr-tag-policy.json
vendored
Normal file
18
.github/release/ghcr-tag-policy.json
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.ghcr-tag-policy.v1",
|
||||
"release_tag_regex": "^v[0-9]+\\.[0-9]+\\.[0-9]+$",
|
||||
"sha_tag_prefix": "sha-",
|
||||
"sha_tag_length": 12,
|
||||
"latest_tag": "latest",
|
||||
"require_latest_on_release": true,
|
||||
"immutable_tag_classes": [
|
||||
"release",
|
||||
"sha"
|
||||
],
|
||||
"rollback_priority": [
|
||||
"sha",
|
||||
"release"
|
||||
],
|
||||
"contract_artifact_retention_days": 21,
|
||||
"scan_artifact_retention_days": 14
|
||||
}
|
||||
17
.github/release/ghcr-vulnerability-policy.json
vendored
Normal file
17
.github/release/ghcr-vulnerability-policy.json
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.ghcr-vulnerability-policy.v1",
|
||||
"required_tag_classes": [
|
||||
"release",
|
||||
"sha",
|
||||
"latest"
|
||||
],
|
||||
"blocking_severities": [
|
||||
"HIGH",
|
||||
"CRITICAL"
|
||||
],
|
||||
"max_blocking_findings_per_tag": 0,
|
||||
"require_blocking_count_parity": true,
|
||||
"require_artifact_id_parity": true,
|
||||
"scan_artifact_retention_days": 14,
|
||||
"audit_artifact_retention_days": 21
|
||||
}
|
||||
9
.github/release/nightly-owner-routing.json
vendored
Normal file
9
.github/release/nightly-owner-routing.json
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.nightly-owner-routing.v1",
|
||||
"owners": {
|
||||
"default": "@chumyin",
|
||||
"whatsapp-web": "@chumyin",
|
||||
"browser-native": "@chumyin",
|
||||
"nightly-all-features": "@chumyin"
|
||||
}
|
||||
}
|
||||
33
.github/release/prerelease-stage-gates.json
vendored
Normal file
33
.github/release/prerelease-stage-gates.json
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.prerelease-stage-gates.v1",
|
||||
"stage_order": ["alpha", "beta", "rc", "stable"],
|
||||
"required_previous_stage": {
|
||||
"beta": "alpha",
|
||||
"rc": "beta",
|
||||
"stable": "rc"
|
||||
},
|
||||
"required_checks": {
|
||||
"alpha": [
|
||||
"CI Required Gate",
|
||||
"Security Audit"
|
||||
],
|
||||
"beta": [
|
||||
"CI Required Gate",
|
||||
"Security Audit",
|
||||
"Feature Matrix Summary"
|
||||
],
|
||||
"rc": [
|
||||
"CI Required Gate",
|
||||
"Security Audit",
|
||||
"Feature Matrix Summary",
|
||||
"Nightly Summary & Routing"
|
||||
],
|
||||
"stable": [
|
||||
"CI Required Gate",
|
||||
"Security Audit",
|
||||
"Feature Matrix Summary",
|
||||
"Verify Artifact Set",
|
||||
"Nightly Summary & Routing"
|
||||
]
|
||||
}
|
||||
}
|
||||
30
.github/release/release-artifact-contract.json
vendored
Normal file
30
.github/release/release-artifact-contract.json
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.release-artifact-contract.v1",
|
||||
"release_archive_patterns": [
|
||||
"zeroclaw-x86_64-unknown-linux-gnu.tar.gz",
|
||||
"zeroclaw-x86_64-unknown-linux-musl.tar.gz",
|
||||
"zeroclaw-aarch64-unknown-linux-gnu.tar.gz",
|
||||
"zeroclaw-aarch64-unknown-linux-musl.tar.gz",
|
||||
"zeroclaw-armv7-unknown-linux-gnueabihf.tar.gz",
|
||||
"zeroclaw-armv7-linux-androideabi.tar.gz",
|
||||
"zeroclaw-aarch64-linux-android.tar.gz",
|
||||
"zeroclaw-x86_64-unknown-freebsd.tar.gz",
|
||||
"zeroclaw-x86_64-apple-darwin.tar.gz",
|
||||
"zeroclaw-aarch64-apple-darwin.tar.gz",
|
||||
"zeroclaw-x86_64-pc-windows-msvc.zip"
|
||||
],
|
||||
"required_manifest_files": [
|
||||
"release-manifest.json",
|
||||
"release-manifest.md",
|
||||
"SHA256SUMS"
|
||||
],
|
||||
"required_sbom_files": [
|
||||
"zeroclaw.cdx.json",
|
||||
"zeroclaw.spdx.json"
|
||||
],
|
||||
"required_notice_files": [
|
||||
"LICENSE-APACHE",
|
||||
"LICENSE-MIT",
|
||||
"NOTICE"
|
||||
]
|
||||
}
|
||||
33
.github/security/deny-ignore-governance.json
vendored
Normal file
33
.github/security/deny-ignore-governance.json
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.deny-governance.v1",
|
||||
"advisories": [
|
||||
{
|
||||
"id": "RUSTSEC-2025-0141",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Transitive via probe-rs in current release path; tracked for replacement when probe-rs updates.",
|
||||
"ticket": "RMN-21",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"id": "RUSTSEC-2024-0384",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Upstream rust-nostr advisory mitigation is still in progress; monitor until released fix lands.",
|
||||
"ticket": "RMN-21",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"id": "RUSTSEC-2024-0388",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Transitive via matrix-sdk indexeddb dependency chain in current matrix release line; track removal when upstream drops derivative.",
|
||||
"ticket": "RMN-21",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"id": "RUSTSEC-2024-0436",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Transitive via wasmtime dependency stack; tracked until upstream removes or replaces paste.",
|
||||
"ticket": "RMN-21",
|
||||
"expires_on": "2026-12-31"
|
||||
}
|
||||
]
|
||||
}
|
||||
56
.github/security/gitleaks-allowlist-governance.json
vendored
Normal file
56
.github/security/gitleaks-allowlist-governance.json
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.secrets-governance.v1",
|
||||
"paths": [
|
||||
{
|
||||
"pattern": "src/security/leak_detector\\.rs",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Fixture patterns are intentionally embedded for regression tests in leak detector logic.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"pattern": "src/agent/loop_\\.rs",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Contains escaped template snippets used for command orchestration and parser coverage.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"pattern": "src/security/secrets\\.rs",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Contains detector test vectors and redaction examples required for secret scanning tests.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"pattern": "docs/(i18n/vi/|vi/)?zai-glm-setup\\.md",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Documentation contains literal environment variable placeholders for onboarding commands.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"pattern": "\\.github/workflows/pub-release\\.yml",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Release workflow emits masked authorization header examples during registry smoke checks.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
}
|
||||
],
|
||||
"regexes": [
|
||||
{
|
||||
"pattern": "Authorization: Bearer \\$\\{[^}]+\\}",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Intentional placeholder used in docs/workflow snippets for safe header examples.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
},
|
||||
{
|
||||
"pattern": "curl -sS -o /tmp/ghcr-release-manifest\\.json -w \"%\\{http_code\\}\"",
|
||||
"owner": "repo-maintainers",
|
||||
"reason": "Release smoke command string is non-secret telemetry and should not be flagged as credential leakage.",
|
||||
"ticket": "RMN-13",
|
||||
"expires_on": "2026-12-31"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
.github/security/unsafe-audit-governance.json
vendored
Normal file
5
.github/security/unsafe-audit-governance.json
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"schema_version": "zeroclaw.unsafe-audit-governance.v1",
|
||||
"ignore_paths": [],
|
||||
"ignore_pattern_ids": []
|
||||
}
|
||||
7
.github/workflows/README.md
vendored
7
.github/workflows/README.md
vendored
@ -19,7 +19,6 @@ Workflow behavior documentation in this directory:
|
||||
|
||||
Current workflow helper scripts:
|
||||
|
||||
- `.github/workflows/scripts/ci_workflow_owner_approval.js`
|
||||
- `.github/workflows/scripts/ci_license_file_owner_guard.js`
|
||||
- `.github/workflows/scripts/lint_feedback.js`
|
||||
- `.github/workflows/scripts/pr_auto_response_contributor_tier.js`
|
||||
@ -28,3 +27,9 @@ Current workflow helper scripts:
|
||||
- `.github/workflows/scripts/pr_intake_checks.js`
|
||||
- `.github/workflows/scripts/pr_labeler.js`
|
||||
- `.github/workflows/scripts/test_benchmarks_pr_comment.js`
|
||||
|
||||
Release/CI policy assets introduced for advanced delivery lanes:
|
||||
|
||||
- `.github/release/nightly-owner-routing.json`
|
||||
- `.github/release/canary-policy.json`
|
||||
- `.github/release/prerelease-stage-gates.json`
|
||||
|
||||
61
.github/workflows/ci-build-fast.yml
vendored
61
.github/workflows/ci-build-fast.yml
vendored
@ -1,61 +0,0 @@
|
||||
name: CI Build (Fast)
|
||||
|
||||
# Optional fast release build that runs alongside the normal Build (Smoke) job.
|
||||
# This workflow is informational and does not gate merges.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
|
||||
concurrency:
|
||||
group: ci-fast-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect Change Scope
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
outputs:
|
||||
rust_changed: ${{ steps.scope.outputs.rust_changed }}
|
||||
docs_only: ${{ steps.scope.outputs.docs_only }}
|
||||
workflow_changed: ${{ steps.scope.outputs.workflow_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Detect docs-only changes
|
||||
id: scope
|
||||
shell: bash
|
||||
env:
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
|
||||
run: ./scripts/ci/detect_change_scope.sh
|
||||
|
||||
build-fast:
|
||||
name: Build (Fast)
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.rust_changed == 'true' || needs.changes.outputs.workflow_changed == 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 25
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
with:
|
||||
prefix-key: fast-build
|
||||
cache-targets: true
|
||||
|
||||
- name: Build release binary
|
||||
run: cargo build --release --locked --verbose
|
||||
330
.github/workflows/ci-canary-gate.yml
vendored
Normal file
330
.github/workflows/ci-canary-gate.yml
vendored
Normal file
@ -0,0 +1,330 @@
|
||||
name: CI Canary Gate
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
mode:
|
||||
description: "dry-run computes decision only; execute enables canary dispatch"
|
||||
required: true
|
||||
default: dry-run
|
||||
type: choice
|
||||
options:
|
||||
- dry-run
|
||||
- execute
|
||||
candidate_tag:
|
||||
description: "Candidate release tag (e.g. v0.1.8-rc.1 or v0.1.8)"
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
candidate_sha:
|
||||
description: "Optional explicit candidate SHA"
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
error_rate:
|
||||
description: "Observed canary error rate (0.0-1.0)"
|
||||
required: true
|
||||
default: "0.0"
|
||||
type: string
|
||||
crash_rate:
|
||||
description: "Observed canary crash rate (0.0-1.0)"
|
||||
required: true
|
||||
default: "0.0"
|
||||
type: string
|
||||
p95_latency_ms:
|
||||
description: "Observed canary p95 latency in milliseconds"
|
||||
required: true
|
||||
default: "0"
|
||||
type: string
|
||||
sample_size:
|
||||
description: "Observed canary sample size"
|
||||
required: true
|
||||
default: "0"
|
||||
type: string
|
||||
emit_repository_dispatch:
|
||||
description: "Emit canary decision repository_dispatch event"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
trigger_rollback_on_abort:
|
||||
description: "Automatically dispatch CI Rollback Guard when canary decision is abort"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
rollback_branch:
|
||||
description: "Rollback integration branch used by CI Rollback Guard dispatch"
|
||||
required: true
|
||||
default: dev
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- main
|
||||
rollback_target_ref:
|
||||
description: "Optional explicit rollback target ref passed to CI Rollback Guard"
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
fail_on_violation:
|
||||
description: "Fail on policy violations"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
schedule:
|
||||
- cron: "45 7 * * 1" # Weekly Monday 07:45 UTC
|
||||
|
||||
concurrency:
|
||||
group: canary-gate-${{ github.event.inputs.candidate_tag || github.ref || github.run_id }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
canary-plan:
|
||||
name: Canary Plan
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
mode: ${{ steps.inputs.outputs.mode }}
|
||||
candidate_tag: ${{ steps.inputs.outputs.candidate_tag }}
|
||||
candidate_sha: ${{ steps.inputs.outputs.candidate_sha }}
|
||||
trigger_rollback_on_abort: ${{ steps.inputs.outputs.trigger_rollback_on_abort }}
|
||||
rollback_branch: ${{ steps.inputs.outputs.rollback_branch }}
|
||||
rollback_target_ref: ${{ steps.inputs.outputs.rollback_target_ref }}
|
||||
decision: ${{ steps.extract.outputs.decision }}
|
||||
ready_to_execute: ${{ steps.extract.outputs.ready_to_execute }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Resolve canary inputs
|
||||
id: inputs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
mode="dry-run"
|
||||
candidate_tag=""
|
||||
candidate_sha=""
|
||||
error_rate="0.0"
|
||||
crash_rate="0.0"
|
||||
p95_latency_ms="0"
|
||||
sample_size="0"
|
||||
trigger_rollback_on_abort="true"
|
||||
rollback_branch="dev"
|
||||
rollback_target_ref=""
|
||||
# Scheduled audits may not have live canary telemetry; report violations without failing by default.
|
||||
fail_on_violation="false"
|
||||
|
||||
if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
mode="${{ github.event.inputs.mode || 'dry-run' }}"
|
||||
candidate_tag="${{ github.event.inputs.candidate_tag || '' }}"
|
||||
candidate_sha="${{ github.event.inputs.candidate_sha || '' }}"
|
||||
error_rate="${{ github.event.inputs.error_rate || '0.0' }}"
|
||||
crash_rate="${{ github.event.inputs.crash_rate || '0.0' }}"
|
||||
p95_latency_ms="${{ github.event.inputs.p95_latency_ms || '0' }}"
|
||||
sample_size="${{ github.event.inputs.sample_size || '0' }}"
|
||||
trigger_rollback_on_abort="${{ github.event.inputs.trigger_rollback_on_abort || 'true' }}"
|
||||
rollback_branch="${{ github.event.inputs.rollback_branch || 'dev' }}"
|
||||
rollback_target_ref="${{ github.event.inputs.rollback_target_ref || '' }}"
|
||||
fail_on_violation="${{ github.event.inputs.fail_on_violation || 'true' }}"
|
||||
else
|
||||
git fetch --tags --force origin
|
||||
candidate_tag="$(git tag --list 'v*' --sort=-version:refname | head -n1)"
|
||||
if [ -n "$candidate_tag" ]; then
|
||||
candidate_sha="$(git rev-parse "${candidate_tag}^{commit}")"
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
echo "mode=${mode}"
|
||||
echo "candidate_tag=${candidate_tag}"
|
||||
echo "candidate_sha=${candidate_sha}"
|
||||
echo "error_rate=${error_rate}"
|
||||
echo "crash_rate=${crash_rate}"
|
||||
echo "p95_latency_ms=${p95_latency_ms}"
|
||||
echo "sample_size=${sample_size}"
|
||||
echo "trigger_rollback_on_abort=${trigger_rollback_on_abort}"
|
||||
echo "rollback_branch=${rollback_branch}"
|
||||
echo "rollback_target_ref=${rollback_target_ref}"
|
||||
echo "fail_on_violation=${fail_on_violation}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Run canary guard
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
|
||||
args=()
|
||||
if [ "${{ steps.inputs.outputs.fail_on_violation }}" = "true" ]; then
|
||||
args+=(--fail-on-violation)
|
||||
fi
|
||||
|
||||
python3 scripts/ci/canary_guard.py \
|
||||
--policy-file .github/release/canary-policy.json \
|
||||
--candidate-tag "${{ steps.inputs.outputs.candidate_tag }}" \
|
||||
--candidate-sha "${{ steps.inputs.outputs.candidate_sha }}" \
|
||||
--mode "${{ steps.inputs.outputs.mode }}" \
|
||||
--error-rate "${{ steps.inputs.outputs.error_rate }}" \
|
||||
--crash-rate "${{ steps.inputs.outputs.crash_rate }}" \
|
||||
--p95-latency-ms "${{ steps.inputs.outputs.p95_latency_ms }}" \
|
||||
--sample-size "${{ steps.inputs.outputs.sample_size }}" \
|
||||
--output-json artifacts/canary-guard.json \
|
||||
--output-md artifacts/canary-guard.md \
|
||||
"${args[@]}"
|
||||
|
||||
- name: Extract canary decision outputs
|
||||
id: extract
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
decision="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/canary-guard.json', encoding='utf-8'))
|
||||
print(data.get('decision', 'hold'))
|
||||
PY
|
||||
)"
|
||||
ready_to_execute="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/canary-guard.json', encoding='utf-8'))
|
||||
print(str(bool(data.get('ready_to_execute', False))).lower())
|
||||
PY
|
||||
)"
|
||||
echo "decision=${decision}" >> "$GITHUB_OUTPUT"
|
||||
echo "ready_to_execute=${ready_to_execute}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Emit canary audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type canary_guard \
|
||||
--input-json artifacts/canary-guard.json \
|
||||
--output-json artifacts/audit-event-canary-guard.json \
|
||||
--artifact-name canary-guard \
|
||||
--retention-days 21
|
||||
|
||||
- name: Publish canary summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/canary-guard.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload canary artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: canary-guard
|
||||
path: |
|
||||
artifacts/canary-guard.json
|
||||
artifacts/canary-guard.md
|
||||
artifacts/audit-event-canary-guard.json
|
||||
if-no-files-found: error
|
||||
retention-days: 21
|
||||
|
||||
canary-execute:
|
||||
name: Canary Execute
|
||||
needs: [canary-plan]
|
||||
if: github.event_name == 'workflow_dispatch' && needs.canary-plan.outputs.mode == 'execute' && needs.canary-plan.outputs.ready_to_execute == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Create canary marker tag
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
marker_tag="canary-${{ needs.canary-plan.outputs.candidate_tag }}-${{ github.run_id }}"
|
||||
git fetch --tags --force origin
|
||||
git tag -a "$marker_tag" "${{ needs.canary-plan.outputs.candidate_sha }}" -m "Canary decision marker from run ${{ github.run_id }}"
|
||||
git push origin "$marker_tag"
|
||||
echo "Created marker tag: $marker_tag" >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Emit canary repository dispatch
|
||||
if: github.event.inputs.emit_repository_dispatch == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
await github.rest.repos.createDispatchEvent({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
event_type: `canary_${{ needs.canary-plan.outputs.decision }}`,
|
||||
client_payload: {
|
||||
candidate_tag: "${{ needs.canary-plan.outputs.candidate_tag }}",
|
||||
candidate_sha: "${{ needs.canary-plan.outputs.candidate_sha }}",
|
||||
decision: "${{ needs.canary-plan.outputs.decision }}",
|
||||
run_id: context.runId,
|
||||
run_attempt: process.env.GITHUB_RUN_ATTEMPT,
|
||||
source_sha: context.sha
|
||||
}
|
||||
});
|
||||
|
||||
- name: Trigger rollback guard workflow on abort
|
||||
if: needs.canary-plan.outputs.decision == 'abort' && needs.canary-plan.outputs.trigger_rollback_on_abort == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const rollbackBranch = "${{ needs.canary-plan.outputs.rollback_branch }}" || "dev";
|
||||
const rollbackTargetRef = `${{ needs.canary-plan.outputs.rollback_target_ref }}`.trim();
|
||||
const workflowRef = process.env.GITHUB_REF_NAME || "dev";
|
||||
|
||||
const inputs = {
|
||||
branch: rollbackBranch,
|
||||
mode: "execute",
|
||||
allow_non_ancestor: "false",
|
||||
fail_on_violation: "true",
|
||||
create_marker_tag: "true",
|
||||
emit_repository_dispatch: "true",
|
||||
};
|
||||
|
||||
if (rollbackTargetRef.length > 0) {
|
||||
inputs.target_ref = rollbackTargetRef;
|
||||
}
|
||||
|
||||
await github.rest.actions.createWorkflowDispatch({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: "ci-rollback.yml",
|
||||
ref: workflowRef,
|
||||
inputs,
|
||||
});
|
||||
|
||||
- name: Publish rollback trigger summary
|
||||
if: needs.canary-plan.outputs.decision == 'abort'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ "${{ needs.canary-plan.outputs.trigger_rollback_on_abort }}" = "true" ]; then
|
||||
{
|
||||
echo "### Canary Abort Rollback Trigger"
|
||||
echo "- CI Rollback Guard dispatch: triggered"
|
||||
echo "- Rollback branch: \`${{ needs.canary-plan.outputs.rollback_branch }}\`"
|
||||
if [ -n "${{ needs.canary-plan.outputs.rollback_target_ref }}" ]; then
|
||||
echo "- Rollback target ref: \`${{ needs.canary-plan.outputs.rollback_target_ref }}\`"
|
||||
else
|
||||
echo "- Rollback target ref: _auto (latest release tag strategy)_"
|
||||
fi
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
{
|
||||
echo "### Canary Abort Rollback Trigger"
|
||||
echo "- CI Rollback Guard dispatch: skipped (trigger_rollback_on_abort=false)"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
160
.github/workflows/ci-change-audit.yml
vendored
Normal file
160
.github/workflows/ci-change-audit.yml
vendored
Normal file
@ -0,0 +1,160 @@
|
||||
name: CI/CD Change Audit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- ".github/workflows/**"
|
||||
- ".github/release/**"
|
||||
- ".github/codeql/**"
|
||||
- "scripts/ci/**"
|
||||
- ".github/dependabot.yml"
|
||||
- "deny.toml"
|
||||
- ".gitleaks.toml"
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- ".github/workflows/**"
|
||||
- ".github/release/**"
|
||||
- ".github/codeql/**"
|
||||
- "scripts/ci/**"
|
||||
- ".github/dependabot.yml"
|
||||
- "deny.toml"
|
||||
- ".gitleaks.toml"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
base_sha:
|
||||
description: "Optional base SHA (default: HEAD~1)"
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
fail_on_policy:
|
||||
description: "Fail when audit policy violations are found"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: ci-change-audit-${{ github.event.pull_request.number || github.sha || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
name: CI Change Audit
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Python
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 --version
|
||||
|
||||
- name: Resolve base/head commits
|
||||
id: refs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
head_sha="$(git rev-parse HEAD)"
|
||||
if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then
|
||||
# For pull_request events, checkout uses refs/pull/*/merge; HEAD^1 is the
|
||||
# effective base commit for this synthesized merge and avoids stale base.sha.
|
||||
if git rev-parse --verify HEAD^1 >/dev/null 2>&1; then
|
||||
base_sha="$(git rev-parse HEAD^1)"
|
||||
else
|
||||
base_sha="${{ github.event.pull_request.base.sha }}"
|
||||
fi
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then
|
||||
base_sha="${{ github.event.before }}"
|
||||
else
|
||||
base_sha="${{ github.event.inputs.base_sha || '' }}"
|
||||
if [ -z "$base_sha" ]; then
|
||||
base_sha="$(git rev-parse HEAD~1)"
|
||||
fi
|
||||
fi
|
||||
echo "base_sha=$base_sha" >> "$GITHUB_OUTPUT"
|
||||
echo "head_sha=$head_sha" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Run CI helper script unit tests
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 -m unittest discover -s scripts/ci/tests -p 'test_*.py' -v
|
||||
|
||||
- name: Generate CI change audit
|
||||
shell: bash
|
||||
env:
|
||||
BASE_SHA: ${{ steps.refs.outputs.base_sha }}
|
||||
HEAD_SHA: ${{ steps.refs.outputs.head_sha }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
fail_on_policy="true"
|
||||
if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
fail_on_policy="${{ github.event.inputs.fail_on_policy || 'true' }}"
|
||||
fi
|
||||
cmd=(python3 scripts/ci/ci_change_audit.py
|
||||
--base-sha "$BASE_SHA"
|
||||
--head-sha "$HEAD_SHA"
|
||||
--output-json artifacts/ci-change-audit.json
|
||||
--output-md artifacts/ci-change-audit.md)
|
||||
if [ "$fail_on_policy" = "true" ]; then
|
||||
cmd+=(--fail-on-violations)
|
||||
fi
|
||||
"${cmd[@]}"
|
||||
|
||||
- name: Emit normalized audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/ci-change-audit.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type ci_change_audit \
|
||||
--input-json artifacts/ci-change-audit.json \
|
||||
--output-json artifacts/audit-event-ci-change-audit.json \
|
||||
--artifact-name ci-change-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload audit artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always()
|
||||
with:
|
||||
name: ci-change-audit
|
||||
path: artifacts/ci-change-audit.*
|
||||
retention-days: 14
|
||||
|
||||
- name: Publish audit summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/ci-change-audit.md ]; then
|
||||
cat artifacts/ci-change-audit.md >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
echo "CI change audit report was not generated." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Upload audit event artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always()
|
||||
with:
|
||||
name: ci-change-audit-event
|
||||
path: artifacts/audit-event-ci-change-audit.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
88
.github/workflows/ci-post-release-validation.yml
vendored
Normal file
88
.github/workflows/ci-post-release-validation.yml
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
---
|
||||
name: Post-Release Validation
|
||||
|
||||
on:
|
||||
release:
|
||||
types: ["published"]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
name: Validate Published Release
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Download and verify release assets
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "Validating release: ${RELEASE_TAG}"
|
||||
|
||||
# 1. Check release exists and is not draft
|
||||
release_json="$(gh api \
|
||||
"repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}")"
|
||||
is_draft="$(echo "$release_json" \
|
||||
| python3 -c "import sys,json; print(json.load(sys.stdin)['draft'])")"
|
||||
if [ "$is_draft" = "True" ]; then
|
||||
echo "::warning::Release ${RELEASE_TAG} is still in draft."
|
||||
fi
|
||||
|
||||
# 2. Check expected assets against artifact contract
|
||||
asset_count="$(echo "$release_json" \
|
||||
| python3 -c "import sys,json; print(len(json.load(sys.stdin)['assets']))")"
|
||||
contract=".github/release/release-artifact-contract.json"
|
||||
expected_count="$(python3 -c "
|
||||
import json
|
||||
c = json.load(open('$contract'))
|
||||
total = sum(len(c[k]) for k in c if k != 'schema_version')
|
||||
print(total)
|
||||
")"
|
||||
echo "Release has ${asset_count} assets (contract expects ${expected_count})"
|
||||
if [ "$asset_count" -lt "$expected_count" ]; then
|
||||
echo "::error::Expected >=${expected_count} release assets (from ${contract}), found ${asset_count}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 3. Download checksum file and one archive
|
||||
gh release download "${RELEASE_TAG}" \
|
||||
--pattern "SHA256SUMS" \
|
||||
--dir /tmp/release-check
|
||||
gh release download "${RELEASE_TAG}" \
|
||||
--pattern "zeroclaw-x86_64-unknown-linux-gnu.tar.gz" \
|
||||
--dir /tmp/release-check
|
||||
|
||||
# 4. Verify checksum
|
||||
cd /tmp/release-check
|
||||
if sha256sum --check --ignore-missing SHA256SUMS; then
|
||||
echo "SHA256 checksum verification: passed"
|
||||
else
|
||||
echo "::error::SHA256 checksum verification failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 5. Extract binary
|
||||
tar xzf zeroclaw-x86_64-unknown-linux-gnu.tar.gz
|
||||
|
||||
- name: Smoke-test release binary
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cd /tmp/release-check
|
||||
if ./zeroclaw --version | grep -Fq "${RELEASE_TAG#v}"; then
|
||||
echo "Binary version check: passed (${RELEASE_TAG})"
|
||||
else
|
||||
actual="$(./zeroclaw --version)"
|
||||
echo "::error::Binary --version mismatch: ${actual}"
|
||||
exit 1
|
||||
fi
|
||||
echo "Post-release validation: all checks passed"
|
||||
112
.github/workflows/ci-provider-connectivity.yml
vendored
Normal file
112
.github/workflows/ci-provider-connectivity.yml
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
name: CI Provider Connectivity
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 */6 * * *" # Every 6 hours
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
fail_on_critical:
|
||||
description: "Fail run when critical endpoints are unreachable"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- ".github/workflows/ci-provider-connectivity.yml"
|
||||
- ".github/connectivity/providers.json"
|
||||
- "scripts/ci/provider_connectivity_matrix.py"
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- ".github/workflows/ci-provider-connectivity.yml"
|
||||
- ".github/connectivity/providers.json"
|
||||
- "scripts/ci/provider_connectivity_matrix.py"
|
||||
|
||||
concurrency:
|
||||
group: provider-connectivity-${{ github.event.pull_request.number || github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
probe:
|
||||
name: Provider Connectivity Probe
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Run connectivity matrix probe
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
fail_on_critical="false"
|
||||
case "${GITHUB_EVENT_NAME}" in
|
||||
schedule)
|
||||
fail_on_critical="true"
|
||||
;;
|
||||
workflow_dispatch)
|
||||
fail_on_critical="${{ github.event.inputs.fail_on_critical || 'false' }}"
|
||||
;;
|
||||
esac
|
||||
|
||||
cmd=(python3 scripts/ci/provider_connectivity_matrix.py
|
||||
--config .github/connectivity/providers.json
|
||||
--output-json artifacts/provider-connectivity-matrix.json
|
||||
--output-md artifacts/provider-connectivity-matrix.md)
|
||||
if [ "$fail_on_critical" = "true" ]; then
|
||||
cmd+=(--fail-on-critical)
|
||||
fi
|
||||
"${cmd[@]}"
|
||||
|
||||
- name: Emit normalized audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/provider-connectivity-matrix.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type provider_connectivity \
|
||||
--input-json artifacts/provider-connectivity-matrix.json \
|
||||
--output-json artifacts/audit-event-provider-connectivity.json \
|
||||
--artifact-name provider-connectivity-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload connectivity artifacts
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always()
|
||||
with:
|
||||
name: provider-connectivity-matrix
|
||||
path: artifacts/provider-connectivity-matrix.*
|
||||
retention-days: 14
|
||||
|
||||
- name: Publish summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/provider-connectivity-matrix.md ]; then
|
||||
cat artifacts/provider-connectivity-matrix.md >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
echo "Provider connectivity report missing." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Upload audit event artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: provider-connectivity-audit-event
|
||||
path: artifacts/audit-event-provider-connectivity.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
152
.github/workflows/ci-queue-hygiene.yml
vendored
Normal file
152
.github/workflows/ci-queue-hygiene.yml
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
name: CI Queue Hygiene
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "*/5 * * * *"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
apply:
|
||||
description: "Cancel selected queued runs (false = dry-run report only)"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
status:
|
||||
description: "Queued-run status scope"
|
||||
required: true
|
||||
default: queued
|
||||
type: choice
|
||||
options:
|
||||
- queued
|
||||
- in_progress
|
||||
- requested
|
||||
- waiting
|
||||
max_cancel:
|
||||
description: "Maximum runs to cancel in one execution"
|
||||
required: true
|
||||
default: "120"
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: ci-queue-hygiene
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
jobs:
|
||||
hygiene:
|
||||
name: Queue Hygiene
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Run queue hygiene policy
|
||||
id: hygiene
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
|
||||
status_scope="queued"
|
||||
max_cancel="120"
|
||||
apply_mode="true"
|
||||
if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
status_scope="${{ github.event.inputs.status || 'queued' }}"
|
||||
max_cancel="${{ github.event.inputs.max_cancel || '120' }}"
|
||||
apply_mode="${{ github.event.inputs.apply || 'false' }}"
|
||||
fi
|
||||
|
||||
cmd=(python3 scripts/ci/queue_hygiene.py
|
||||
--repo "${{ github.repository }}"
|
||||
--status "${status_scope}"
|
||||
--max-cancel "${max_cancel}"
|
||||
--dedupe-workflow "CI Run"
|
||||
--dedupe-workflow "Test E2E"
|
||||
--dedupe-workflow "Docs Deploy"
|
||||
--dedupe-workflow "PR Intake Checks"
|
||||
--dedupe-workflow "PR Labeler"
|
||||
--dedupe-workflow "PR Auto Responder"
|
||||
--dedupe-workflow "Workflow Sanity"
|
||||
--dedupe-workflow "PR Label Policy Check"
|
||||
--priority-branch-prefix "release/"
|
||||
--dedupe-include-non-pr
|
||||
--non-pr-key branch
|
||||
--output-json artifacts/queue-hygiene-report.json
|
||||
--verbose)
|
||||
|
||||
if [ "${apply_mode}" = "true" ]; then
|
||||
cmd+=(--apply)
|
||||
fi
|
||||
|
||||
"${cmd[@]}"
|
||||
|
||||
{
|
||||
echo "status_scope=${status_scope}"
|
||||
echo "max_cancel=${max_cancel}"
|
||||
echo "apply_mode=${apply_mode}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Publish queue hygiene summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ ! -f artifacts/queue-hygiene-report.json ]; then
|
||||
echo "Queue hygiene report not found." >> "$GITHUB_STEP_SUMMARY"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
python3 - <<'PY'
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
report_path = Path("artifacts/queue-hygiene-report.json")
|
||||
report = json.loads(report_path.read_text(encoding="utf-8"))
|
||||
counts = report.get("counts", {})
|
||||
results = report.get("results", {})
|
||||
reasons = report.get("reason_counts", {})
|
||||
|
||||
lines = [
|
||||
"### Queue Hygiene Report",
|
||||
f"- Mode: `{report.get('mode', 'unknown')}`",
|
||||
f"- Status scope: `{report.get('status_scope', 'queued')}`",
|
||||
f"- Runs in scope: `{counts.get('runs_in_scope', 0)}`",
|
||||
f"- Candidate runs before cap: `{counts.get('candidate_runs_before_cap', 0)}`",
|
||||
f"- Candidate runs after cap: `{counts.get('candidate_runs_after_cap', 0)}`",
|
||||
f"- Skipped by cap: `{counts.get('skipped_by_cap', 0)}`",
|
||||
f"- Canceled: `{results.get('canceled', 0)}`",
|
||||
f"- Cancel skipped (already terminal/conflict): `{results.get('skipped', 0)}`",
|
||||
f"- Cancel failed: `{results.get('failed', 0)}`",
|
||||
]
|
||||
if reasons:
|
||||
lines.append("")
|
||||
lines.append("Reason counts:")
|
||||
for reason, value in sorted(reasons.items()):
|
||||
lines.append(f"- `{reason}`: `{value}`")
|
||||
|
||||
with Path("/tmp/queue-hygiene-summary.md").open("w", encoding="utf-8") as handle:
|
||||
handle.write("\n".join(lines) + "\n")
|
||||
PY
|
||||
|
||||
cat /tmp/queue-hygiene-summary.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload queue hygiene report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: queue-hygiene-report
|
||||
path: artifacts/queue-hygiene-report.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
149
.github/workflows/ci-reproducible-build.yml
vendored
Normal file
149
.github/workflows/ci-reproducible-build.yml
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
name: CI Reproducible Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "scripts/ci/ensure_c_toolchain.sh"
|
||||
- "scripts/ci/ensure_cargo_component.sh"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- "scripts/ci/reproducible_build_check.sh"
|
||||
- "scripts/ci/self_heal_rust_toolchain.sh"
|
||||
- ".github/workflows/ci-reproducible-build.yml"
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "scripts/ci/ensure_c_toolchain.sh"
|
||||
- "scripts/ci/ensure_cargo_component.sh"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- "scripts/ci/reproducible_build_check.sh"
|
||||
- "scripts/ci/self_heal_rust_toolchain.sh"
|
||||
- ".github/workflows/ci-reproducible-build.yml"
|
||||
schedule:
|
||||
- cron: "45 5 * * 1" # Weekly Monday 05:45 UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
fail_on_drift:
|
||||
description: "Fail workflow if deterministic hash drift is detected"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
allow_build_id_drift:
|
||||
description: "Treat GNU build-id-only drift as non-blocking"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: repro-build-${{ github.event.pull_request.number || github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
reproducibility:
|
||||
name: Reproducible Build Probe
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 75
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- name: Run reproducible build check
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
fail_on_drift="false"
|
||||
allow_build_id_drift="true"
|
||||
if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then
|
||||
fail_on_drift="true"
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
fail_on_drift="${{ github.event.inputs.fail_on_drift || 'true' }}"
|
||||
allow_build_id_drift="${{ github.event.inputs.allow_build_id_drift || 'true' }}"
|
||||
fi
|
||||
FAIL_ON_DRIFT="$fail_on_drift" \
|
||||
ALLOW_BUILD_ID_DRIFT="$allow_build_id_drift" \
|
||||
OUTPUT_DIR="artifacts" \
|
||||
./scripts/ci/reproducible_build_check.sh
|
||||
|
||||
- name: Emit normalized audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/reproducible-build.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type reproducible_build \
|
||||
--input-json artifacts/reproducible-build.json \
|
||||
--output-json artifacts/audit-event-reproducible-build.json \
|
||||
--artifact-name reproducible-build-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload reproducibility artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: reproducible-build
|
||||
path: artifacts/reproducible-build*
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload audit event artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: reproducible-build-audit-event
|
||||
path: artifacts/audit-event-reproducible-build.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Publish summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/reproducible-build.md ]; then
|
||||
cat artifacts/reproducible-build.md >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
echo "Reproducible build report missing." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
258
.github/workflows/ci-rollback.yml
vendored
Normal file
258
.github/workflows/ci-rollback.yml
vendored
Normal file
@ -0,0 +1,258 @@
|
||||
name: CI Rollback Guard
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Integration branch this rollback targets"
|
||||
required: true
|
||||
default: dev
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- main
|
||||
mode:
|
||||
description: "dry-run only plans; execute enables rollback marker/dispatch actions"
|
||||
required: true
|
||||
default: dry-run
|
||||
type: choice
|
||||
options:
|
||||
- dry-run
|
||||
- execute
|
||||
target_ref:
|
||||
description: "Optional explicit rollback target (tag/sha/ref). Empty = latest matching tag."
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
allow_non_ancestor:
|
||||
description: "Allow target not being ancestor of current head (warning-only)"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
fail_on_violation:
|
||||
description: "Fail workflow when guard violations are detected"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
create_marker_tag:
|
||||
description: "In execute mode, create and push rollback marker tag"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
emit_repository_dispatch:
|
||||
description: "In execute mode, emit repository_dispatch event `rollback_execute`"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
schedule:
|
||||
- cron: "15 7 * * 1" # Weekly Monday 07:15 UTC
|
||||
|
||||
concurrency:
|
||||
group: ci-rollback-${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.branch || 'dev') || github.ref_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
rollback-plan:
|
||||
name: Rollback Guard Plan
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
branch: ${{ steps.plan.outputs.branch }}
|
||||
mode: ${{ steps.plan.outputs.mode }}
|
||||
target_sha: ${{ steps.plan.outputs.target_sha }}
|
||||
target_ref: ${{ steps.plan.outputs.target_ref }}
|
||||
ready_to_execute: ${{ steps.plan.outputs.ready_to_execute }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'workflow_dispatch' && (github.event.inputs.branch || 'dev') || github.ref_name }}
|
||||
|
||||
- name: Build rollback plan
|
||||
id: plan
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
|
||||
branch_input="${GITHUB_REF_NAME}"
|
||||
mode_input="dry-run"
|
||||
target_ref_input=""
|
||||
allow_non_ancestor="false"
|
||||
# Scheduled audits can surface historical rollback violations; report without blocking by default.
|
||||
fail_on_violation="false"
|
||||
|
||||
if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
branch_input="${{ github.event.inputs.branch || 'dev' }}"
|
||||
mode_input="${{ github.event.inputs.mode || 'dry-run' }}"
|
||||
target_ref_input="${{ github.event.inputs.target_ref || '' }}"
|
||||
allow_non_ancestor="${{ github.event.inputs.allow_non_ancestor || 'false' }}"
|
||||
fail_on_violation="${{ github.event.inputs.fail_on_violation || 'true' }}"
|
||||
fi
|
||||
|
||||
cmd=(python3 scripts/ci/rollback_guard.py
|
||||
--repo-root .
|
||||
--branch "$branch_input"
|
||||
--mode "$mode_input"
|
||||
--strategy latest-release-tag
|
||||
--tag-pattern "v*"
|
||||
--output-json artifacts/rollback-plan.json
|
||||
--output-md artifacts/rollback-plan.md)
|
||||
|
||||
if [ -n "$target_ref_input" ]; then
|
||||
cmd+=(--target-ref "$target_ref_input")
|
||||
fi
|
||||
if [ "$allow_non_ancestor" = "true" ]; then
|
||||
cmd+=(--allow-non-ancestor)
|
||||
fi
|
||||
if [ "$fail_on_violation" = "true" ]; then
|
||||
cmd+=(--fail-on-violation)
|
||||
fi
|
||||
|
||||
"${cmd[@]}"
|
||||
|
||||
target_sha="$(python3 - <<'PY'
|
||||
import json
|
||||
d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8"))
|
||||
print(d.get("target_sha", ""))
|
||||
PY
|
||||
)"
|
||||
target_ref="$(python3 - <<'PY'
|
||||
import json
|
||||
d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8"))
|
||||
print(d.get("target_ref", ""))
|
||||
PY
|
||||
)"
|
||||
ready_to_execute="$(python3 - <<'PY'
|
||||
import json
|
||||
d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8"))
|
||||
print(str(d.get("ready_to_execute", False)).lower())
|
||||
PY
|
||||
)"
|
||||
|
||||
{
|
||||
echo "branch=$branch_input"
|
||||
echo "mode=$mode_input"
|
||||
echo "target_sha=$target_sha"
|
||||
echo "target_ref=$target_ref"
|
||||
echo "ready_to_execute=$ready_to_execute"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Emit rollback audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/rollback-plan.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type rollback_guard \
|
||||
--input-json artifacts/rollback-plan.json \
|
||||
--output-json artifacts/audit-event-rollback-guard.json \
|
||||
--artifact-name ci-rollback-plan \
|
||||
--retention-days 21
|
||||
fi
|
||||
|
||||
- name: Upload rollback artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: ci-rollback-plan
|
||||
path: |
|
||||
artifacts/rollback-plan.*
|
||||
artifacts/audit-event-rollback-guard.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 21
|
||||
|
||||
- name: Publish rollback summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/rollback-plan.md ]; then
|
||||
cat artifacts/rollback-plan.md >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
echo "Rollback plan markdown report missing." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
rollback-execute:
|
||||
name: Rollback Execute Actions
|
||||
needs: [rollback-plan]
|
||||
if: github.event_name == 'workflow_dispatch' && needs.rollback-plan.outputs.mode == 'execute' && needs.rollback-plan.outputs.ready_to_execute == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: write
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ needs.rollback-plan.outputs.branch }}
|
||||
|
||||
- name: Fetch tags
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git fetch --tags --force origin
|
||||
|
||||
- name: Create rollback marker tag
|
||||
id: marker
|
||||
if: github.event.inputs.create_marker_tag == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
target_sha="${{ needs.rollback-plan.outputs.target_sha }}"
|
||||
if [ -z "$target_sha" ]; then
|
||||
echo "Rollback guard did not resolve target_sha."
|
||||
exit 1
|
||||
fi
|
||||
marker_tag="rollback-${{ needs.rollback-plan.outputs.branch }}-${{ github.run_id }}"
|
||||
git tag -a "$marker_tag" "$target_sha" -m "Rollback marker from run ${{ github.run_id }}"
|
||||
git push origin "$marker_tag"
|
||||
echo "marker_tag=$marker_tag" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Emit rollback repository dispatch
|
||||
if: github.event.inputs.emit_repository_dispatch == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
await github.rest.repos.createDispatchEvent({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
event_type: "rollback_execute",
|
||||
client_payload: {
|
||||
branch: "${{ needs.rollback-plan.outputs.branch }}",
|
||||
target_ref: "${{ needs.rollback-plan.outputs.target_ref }}",
|
||||
target_sha: "${{ needs.rollback-plan.outputs.target_sha }}",
|
||||
run_id: context.runId,
|
||||
run_attempt: process.env.GITHUB_RUN_ATTEMPT,
|
||||
source_sha: context.sha
|
||||
}
|
||||
});
|
||||
|
||||
- name: Publish execute summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
{
|
||||
echo "### Rollback Execute Actions"
|
||||
echo "- Branch: \`${{ needs.rollback-plan.outputs.branch }}\`"
|
||||
echo "- Target ref: \`${{ needs.rollback-plan.outputs.target_ref }}\`"
|
||||
echo "- Target sha: \`${{ needs.rollback-plan.outputs.target_sha }}\`"
|
||||
if [ -n "${{ steps.marker.outputs.marker_tag || '' }}" ]; then
|
||||
echo "- Marker tag: \`${{ steps.marker.outputs.marker_tag }}\`"
|
||||
fi
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
340
.github/workflows/ci-run.yml
vendored
340
.github/workflows/ci-run.yml
vendored
@ -5,26 +5,32 @@ on:
|
||||
branches: [dev, main]
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
merge_group:
|
||||
branches: [dev, main]
|
||||
|
||||
concurrency:
|
||||
group: ci-${{ github.event.pull_request.number || github.sha }}
|
||||
group: ci-run-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref_name || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect Change Scope
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
outputs:
|
||||
docs_only: ${{ steps.scope.outputs.docs_only }}
|
||||
docs_changed: ${{ steps.scope.outputs.docs_changed }}
|
||||
rust_changed: ${{ steps.scope.outputs.rust_changed }}
|
||||
workflow_changed: ${{ steps.scope.outputs.workflow_changed }}
|
||||
ci_cd_changed: ${{ steps.scope.outputs.ci_cd_changed }}
|
||||
docs_files: ${{ steps.scope.outputs.docs_files }}
|
||||
base_sha: ${{ steps.scope.outputs.base_sha }}
|
||||
steps:
|
||||
@ -37,24 +43,42 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
|
||||
BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event_name == 'merge_group' && github.event.merge_group.base_sha || github.event.before }}
|
||||
run: ./scripts/ci/detect_change_scope.sh
|
||||
|
||||
lint:
|
||||
name: Lint Gate (Format + Clippy + Strict Delta)
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.rust_changed == 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 25
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 75
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
components: rustfmt, clippy
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: ci-run-check
|
||||
cache-bin: false
|
||||
- name: Run rust quality gate
|
||||
run: ./scripts/ci/rust_quality_gate.sh
|
||||
- name: Run strict lint delta gate
|
||||
@ -62,44 +86,194 @@ jobs:
|
||||
BASE_SHA: ${{ needs.changes.outputs.base_sha }}
|
||||
run: ./scripts/ci/rust_strict_delta_gate.sh
|
||||
|
||||
test:
|
||||
name: Test
|
||||
needs: [changes, lint]
|
||||
if: needs.changes.outputs.rust_changed == 'true' && needs.lint.result == 'success'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
workspace-check:
|
||||
name: Workspace Check
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.rust_changed == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 45
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- name: Run tests
|
||||
run: cargo test --locked --verbose
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: ci-run-workspace-check
|
||||
cache-bin: false
|
||||
- name: Check workspace
|
||||
run: cargo check --workspace --locked
|
||||
|
||||
package-check:
|
||||
name: Package Check (${{ matrix.package }})
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.rust_changed == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 25
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
package: [zeroclaw-types, zeroclaw-core]
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: ci-run-package-check
|
||||
cache-bin: false
|
||||
- name: Check package
|
||||
run: cargo check -p ${{ matrix.package }} --locked
|
||||
|
||||
test:
|
||||
name: Test
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.rust_changed == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 120
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: ci-run-check
|
||||
cache-bin: false
|
||||
- name: Run tests with flake detection
|
||||
shell: bash
|
||||
env:
|
||||
BLOCK_ON_FLAKE: ${{ vars.CI_BLOCK_ON_FLAKE_SUSPECTED || 'false' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
|
||||
toolchain_bin=""
|
||||
if [ -n "${CARGO:-}" ]; then
|
||||
toolchain_bin="$(dirname "${CARGO}")"
|
||||
elif [ -n "${RUSTC:-}" ]; then
|
||||
toolchain_bin="$(dirname "${RUSTC}")"
|
||||
fi
|
||||
|
||||
if [ -n "${toolchain_bin}" ] && [ -d "${toolchain_bin}" ]; then
|
||||
case ":$PATH:" in
|
||||
*":${toolchain_bin}:"*) ;;
|
||||
*) export PATH="${toolchain_bin}:$PATH" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
if cargo test --locked --verbose; then
|
||||
echo '{"flake_suspected":false,"status":"success"}' > artifacts/flake-probe.json
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "::warning::First test run failed. Retrying for flake detection..."
|
||||
if cargo test --locked --verbose; then
|
||||
echo '{"flake_suspected":true,"status":"flake"}' > artifacts/flake-probe.json
|
||||
echo "::warning::Flake suspected — test passed on retry"
|
||||
if [ "${BLOCK_ON_FLAKE}" = "true" ]; then
|
||||
echo "BLOCK_ON_FLAKE is set; failing on suspected flake."
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo '{"flake_suspected":false,"status":"failure"}' > artifacts/flake-probe.json
|
||||
exit 1
|
||||
- name: Publish flake probe summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/flake-probe.json ]; then
|
||||
status=$(python3 -c "import json; print(json.load(open('artifacts/flake-probe.json'))['status'])")
|
||||
flake=$(python3 -c "import json; print(json.load(open('artifacts/flake-probe.json'))['flake_suspected'])")
|
||||
{
|
||||
echo "### Test Flake Probe"
|
||||
echo "- Status: \`${status}\`"
|
||||
echo "- Flake suspected: \`${flake}\`"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
- name: Upload flake probe artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: test-flake-probe
|
||||
path: artifacts/flake-probe.*
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
build:
|
||||
name: Build (Smoke)
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.rust_changed == 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 20
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 90
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: ci-run-build
|
||||
cache-targets: true
|
||||
cache-bin: false
|
||||
- name: Build binary (smoke check)
|
||||
run: cargo build --profile release-fast --locked --verbose
|
||||
env:
|
||||
CARGO_BUILD_JOBS: 2
|
||||
CI_SMOKE_BUILD_ATTEMPTS: 3
|
||||
run: bash scripts/ci/smoke_build_retry.sh
|
||||
- name: Check binary size
|
||||
env:
|
||||
BINARY_SIZE_HARD_LIMIT_MB: 28
|
||||
BINARY_SIZE_ADVISORY_MB: 20
|
||||
BINARY_SIZE_TARGET_MB: 5
|
||||
run: bash scripts/ci/check_binary_size.sh target/release-fast/zeroclaw
|
||||
|
||||
docs-only:
|
||||
name: Docs-Only Fast Path
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.docs_only == 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
steps:
|
||||
- name: Skip heavy jobs for docs-only change
|
||||
run: echo "Docs-only change detected. Rust lint/test/build skipped."
|
||||
@ -108,7 +282,7 @@ jobs:
|
||||
name: Non-Rust Fast Path
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.docs_only != 'true' && needs.changes.outputs.rust_changed != 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
steps:
|
||||
- name: Skip Rust jobs for non-Rust change scope
|
||||
run: echo "No Rust-impacting files changed. Rust lint/test/build skipped."
|
||||
@ -117,12 +291,16 @@ jobs:
|
||||
name: Docs Quality
|
||||
needs: [changes]
|
||||
if: needs.changes.outputs.docs_changed == 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Node.js for markdown lint
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Markdown lint (changed lines only)
|
||||
env:
|
||||
@ -153,7 +331,7 @@ jobs:
|
||||
|
||||
- name: Link check (offline, added links only)
|
||||
if: steps.collect_links.outputs.count != '0'
|
||||
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
|
||||
uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 # v2
|
||||
with:
|
||||
fail: true
|
||||
args: >-
|
||||
@ -172,7 +350,7 @@ jobs:
|
||||
name: Lint Feedback
|
||||
if: github.event_name == 'pull_request'
|
||||
needs: [changes, lint, docs-quality]
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
@ -194,32 +372,11 @@ jobs:
|
||||
const script = require('./.github/workflows/scripts/lint_feedback.js');
|
||||
await script({github, context, core});
|
||||
|
||||
workflow-owner-approval:
|
||||
name: Workflow Owner Approval
|
||||
needs: [changes]
|
||||
if: github.event_name == 'pull_request' && needs.changes.outputs.workflow_changed == 'true'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Require owner approval for workflow file changes
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
WORKFLOW_OWNER_LOGINS: ${{ vars.WORKFLOW_OWNER_LOGINS }}
|
||||
with:
|
||||
script: |
|
||||
const script = require('./.github/workflows/scripts/ci_workflow_owner_approval.js');
|
||||
await script({ github, context, core });
|
||||
|
||||
license-file-owner-guard:
|
||||
name: License File Owner Guard
|
||||
needs: [changes]
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
@ -236,8 +393,8 @@ jobs:
|
||||
ci-required:
|
||||
name: CI Required Gate
|
||||
if: always()
|
||||
needs: [changes, lint, test, build, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval, license-file-owner-guard]
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
needs: [changes, lint, workspace-check, package-check, test, build, docs-only, non-rust, docs-quality, lint-feedback, license-file-owner-guard]
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
steps:
|
||||
- name: Enforce required status
|
||||
shell: bash
|
||||
@ -247,94 +404,63 @@ jobs:
|
||||
event_name="${{ github.event_name }}"
|
||||
rust_changed="${{ needs.changes.outputs.rust_changed }}"
|
||||
docs_changed="${{ needs.changes.outputs.docs_changed }}"
|
||||
workflow_changed="${{ needs.changes.outputs.workflow_changed }}"
|
||||
docs_result="${{ needs.docs-quality.result }}"
|
||||
workflow_owner_result="${{ needs.workflow-owner-approval.result }}"
|
||||
license_owner_result="${{ needs.license-file-owner-guard.result }}"
|
||||
|
||||
if [ "${{ needs.changes.outputs.docs_only }}" = "true" ]; then
|
||||
echo "workflow_owner_approval=${workflow_owner_result}"
|
||||
echo "license_file_owner_guard=${license_owner_result}"
|
||||
if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then
|
||||
echo "Workflow files changed but workflow owner approval gate did not pass."
|
||||
exit 1
|
||||
fi
|
||||
if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then
|
||||
# --- Helper: enforce PR governance gates ---
|
||||
check_pr_governance() {
|
||||
if [ "$event_name" != "pull_request" ]; then return 0; fi
|
||||
if [ "$license_owner_result" != "success" ]; then
|
||||
echo "License file owner guard did not pass."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_docs_quality() {
|
||||
if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then
|
||||
echo "Docs-only change detected, but docs-quality did not pass."
|
||||
echo "Docs changed but docs-quality did not pass."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# --- Docs-only fast path ---
|
||||
if [ "${{ needs.changes.outputs.docs_only }}" = "true" ]; then
|
||||
check_pr_governance
|
||||
check_docs_quality
|
||||
echo "Docs-only fast path passed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# --- Non-rust fast path ---
|
||||
if [ "$rust_changed" != "true" ]; then
|
||||
echo "rust_changed=false (non-rust fast path)"
|
||||
echo "workflow_owner_approval=${workflow_owner_result}"
|
||||
echo "license_file_owner_guard=${license_owner_result}"
|
||||
if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then
|
||||
echo "Workflow files changed but workflow owner approval gate did not pass."
|
||||
exit 1
|
||||
fi
|
||||
if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then
|
||||
echo "License file owner guard did not pass."
|
||||
exit 1
|
||||
fi
|
||||
if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then
|
||||
echo "Non-rust change touched docs, but docs-quality did not pass."
|
||||
exit 1
|
||||
fi
|
||||
check_pr_governance
|
||||
check_docs_quality
|
||||
echo "Non-rust fast path passed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# --- Rust change path ---
|
||||
lint_result="${{ needs.lint.result }}"
|
||||
lint_strict_delta_result="${{ needs.lint.result }}"
|
||||
workspace_check_result="${{ needs.workspace-check.result }}"
|
||||
package_check_result="${{ needs.package-check.result }}"
|
||||
test_result="${{ needs.test.result }}"
|
||||
build_result="${{ needs.build.result }}"
|
||||
|
||||
echo "lint=${lint_result}"
|
||||
echo "lint_strict_delta=${lint_strict_delta_result}"
|
||||
echo "workspace-check=${workspace_check_result}"
|
||||
echo "package-check=${package_check_result}"
|
||||
echo "test=${test_result}"
|
||||
echo "build=${build_result}"
|
||||
echo "docs=${docs_result}"
|
||||
echo "workflow_owner_approval=${workflow_owner_result}"
|
||||
echo "license_file_owner_guard=${license_owner_result}"
|
||||
|
||||
if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then
|
||||
echo "Workflow files changed but workflow owner approval gate did not pass."
|
||||
check_pr_governance
|
||||
|
||||
if [ "$lint_result" != "success" ] || [ "$workspace_check_result" != "success" ] || [ "$package_check_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then
|
||||
echo "Required CI jobs did not pass: lint=${lint_result} workspace-check=${workspace_check_result} package-check=${package_check_result} test=${test_result} build=${build_result}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then
|
||||
echo "License file owner guard did not pass."
|
||||
exit 1
|
||||
fi
|
||||
check_docs_quality
|
||||
|
||||
if [ "$event_name" = "pull_request" ]; then
|
||||
if [ "$lint_result" != "success" ] || [ "$lint_strict_delta_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then
|
||||
echo "Required PR CI jobs did not pass."
|
||||
exit 1
|
||||
fi
|
||||
if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then
|
||||
echo "PR changed docs, but docs-quality did not pass."
|
||||
exit 1
|
||||
fi
|
||||
echo "PR required checks passed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$lint_result" != "success" ] || [ "$lint_strict_delta_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then
|
||||
echo "Required push CI jobs did not pass."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then
|
||||
echo "Push changed docs, but docs-quality did not pass."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Push required checks passed."
|
||||
echo "All required checks passed."
|
||||
|
||||
150
.github/workflows/ci-supply-chain-provenance.yml
vendored
Normal file
150
.github/workflows/ci-supply-chain-provenance.yml
vendored
Normal file
@ -0,0 +1,150 @@
|
||||
name: CI Supply Chain Provenance
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- "scripts/ci/generate_provenance.py"
|
||||
- ".github/workflows/ci-supply-chain-provenance.yml"
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "20 6 * * 1" # Weekly Monday 06:20 UTC
|
||||
|
||||
concurrency:
|
||||
group: supply-chain-provenance-${{ github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
provenance:
|
||||
name: Build + Provenance Bundle
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- name: Activate toolchain binaries on PATH
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
toolchain_bin="$(dirname "$(rustup which --toolchain 1.92.0 cargo)")"
|
||||
echo "$toolchain_bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Resolve host target
|
||||
id: rust-meta
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
host_target="$(rustup run 1.92.0 rustc -vV | sed -n 's/^host: //p')"
|
||||
if [ -z "${host_target}" ]; then
|
||||
echo "::error::Unable to resolve Rust host target."
|
||||
exit 1
|
||||
fi
|
||||
echo "host_target=${host_target}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Runner preflight (compiler + disk)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
./scripts/ci/ensure_cc.sh
|
||||
echo "Runner: ${RUNNER_NAME:-unknown} (${RUNNER_OS:-unknown}/${RUNNER_ARCH:-unknown})"
|
||||
free_kb="$(df -Pk . | awk 'NR==2 {print $4}')"
|
||||
min_kb=$((10 * 1024 * 1024))
|
||||
if [ "${free_kb}" -lt "${min_kb}" ]; then
|
||||
echo "::error::Insufficient disk space on runner (<10 GiB free)."
|
||||
df -h .
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Build release-fast artifact
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
host_target="${{ steps.rust-meta.outputs.host_target }}"
|
||||
cargo build --profile release-fast --locked --target "$host_target"
|
||||
cp "target/${host_target}/release-fast/zeroclaw" "artifacts/zeroclaw-${host_target}"
|
||||
sha256sum "artifacts/zeroclaw-${host_target}" > "artifacts/zeroclaw-${host_target}.sha256"
|
||||
|
||||
- name: Generate provenance statement
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
host_target="${{ steps.rust-meta.outputs.host_target }}"
|
||||
python3 scripts/ci/generate_provenance.py \
|
||||
--artifact "artifacts/zeroclaw-${host_target}" \
|
||||
--subject-name "zeroclaw-${host_target}" \
|
||||
--output "artifacts/provenance-${host_target}.intoto.json"
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- name: Sign provenance bundle
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
host_target="${{ steps.rust-meta.outputs.host_target }}"
|
||||
statement="artifacts/provenance-${host_target}.intoto.json"
|
||||
cosign sign-blob --yes \
|
||||
--bundle="${statement}.sigstore.json" \
|
||||
--output-signature="${statement}.sig" \
|
||||
--output-certificate="${statement}.pem" \
|
||||
"${statement}"
|
||||
|
||||
- name: Emit normalized audit event
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
host_target="${{ steps.rust-meta.outputs.host_target }}"
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type supply_chain_provenance \
|
||||
--input-json "artifacts/provenance-${host_target}.intoto.json" \
|
||||
--output-json "artifacts/audit-event-supply-chain-provenance.json" \
|
||||
--artifact-name supply-chain-provenance \
|
||||
--retention-days 30
|
||||
|
||||
- name: Upload provenance artifacts
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: supply-chain-provenance
|
||||
path: artifacts/*
|
||||
retention-days: 30
|
||||
|
||||
- name: Publish summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
host_target="${{ steps.rust-meta.outputs.host_target }}"
|
||||
{
|
||||
echo "### Supply Chain Provenance"
|
||||
echo "- Target: \`${host_target}\`"
|
||||
echo "- Artifact: \`artifacts/zeroclaw-${host_target}\`"
|
||||
echo "- Statement: \`artifacts/provenance-${host_target}.intoto.json\`"
|
||||
echo "- Signature: \`artifacts/provenance-${host_target}.intoto.json.sig\`"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
56
.github/workflows/deploy-web.yml
vendored
Normal file
56
.github/workflows/deploy-web.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: Deploy Web to GitHub Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'web/**'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./web
|
||||
run: npm ci
|
||||
|
||||
- name: Build
|
||||
working-directory: ./web
|
||||
run: npm run build
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4
|
||||
with:
|
||||
path: ./web/dist
|
||||
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
needs: build
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4
|
||||
301
.github/workflows/docs-deploy.yml
vendored
Normal file
301
.github/workflows/docs-deploy.yml
vendored
Normal file
@ -0,0 +1,301 @@
|
||||
name: Docs Deploy
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "README*.md"
|
||||
- ".github/workflows/docs-deploy.yml"
|
||||
- "scripts/ci/docs_quality_gate.sh"
|
||||
- "scripts/ci/collect_changed_links.py"
|
||||
- ".github/release/docs-deploy-policy.json"
|
||||
- "scripts/ci/docs_deploy_guard.py"
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "README*.md"
|
||||
- ".github/workflows/docs-deploy.yml"
|
||||
- "scripts/ci/docs_quality_gate.sh"
|
||||
- "scripts/ci/collect_changed_links.py"
|
||||
- ".github/release/docs-deploy-policy.json"
|
||||
- "scripts/ci/docs_deploy_guard.py"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
deploy_target:
|
||||
description: "preview uploads artifact only; production deploys to Pages"
|
||||
required: true
|
||||
default: preview
|
||||
type: choice
|
||||
options:
|
||||
- preview
|
||||
- production
|
||||
preview_evidence_run_url:
|
||||
description: "Required for manual production deploys when policy enforces preview promotion evidence"
|
||||
required: false
|
||||
default: ""
|
||||
rollback_ref:
|
||||
description: "Optional rollback source ref (tag/sha/ref) for manual production dispatch"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
concurrency:
|
||||
group: docs-deploy-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref_name || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
docs-quality:
|
||||
name: Docs Quality Gate
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
docs_files: ${{ steps.scope.outputs.docs_files }}
|
||||
base_sha: ${{ steps.scope.outputs.base_sha }}
|
||||
deploy_target: ${{ steps.deploy_guard.outputs.deploy_target }}
|
||||
deploy_mode: ${{ steps.deploy_guard.outputs.deploy_mode }}
|
||||
source_ref: ${{ steps.deploy_guard.outputs.source_ref }}
|
||||
production_branch_ref: ${{ steps.deploy_guard.outputs.production_branch_ref }}
|
||||
ready_to_deploy: ${{ steps.deploy_guard.outputs.ready_to_deploy }}
|
||||
docs_preview_retention_days: ${{ steps.deploy_guard.outputs.docs_preview_retention_days }}
|
||||
docs_guard_artifact_retention_days: ${{ steps.deploy_guard.outputs.docs_guard_artifact_retention_days }}
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Resolve docs diff scope
|
||||
id: scope
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
base_sha=""
|
||||
docs_files=""
|
||||
|
||||
if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then
|
||||
base_sha="${{ github.event.pull_request.base.sha }}"
|
||||
docs_files="$(git diff --name-only "$base_sha" HEAD | awk '/\.md$|\.mdx$|^README/ {print}')"
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then
|
||||
base_sha="${{ github.event.before }}"
|
||||
if [ -n "$base_sha" ] && [ "$base_sha" != "0000000000000000000000000000000000000000" ]; then
|
||||
docs_files="$(git diff --name-only "$base_sha" HEAD | awk '/\.md$|\.mdx$|^README/ {print}')"
|
||||
fi
|
||||
else
|
||||
docs_files="$(git ls-files 'docs/**/*.md' 'README*.md')"
|
||||
fi
|
||||
|
||||
{
|
||||
echo "base_sha=${base_sha}"
|
||||
echo "docs_files<<EOF"
|
||||
printf '%s\n' "$docs_files"
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Validate docs deploy contract
|
||||
id: deploy_guard
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_DEPLOY_TARGET: ${{ github.event.inputs.deploy_target || '' }}
|
||||
INPUT_PREVIEW_EVIDENCE_RUN_URL: ${{ github.event.inputs.preview_evidence_run_url || '' }}
|
||||
INPUT_ROLLBACK_REF: ${{ github.event.inputs.rollback_ref || '' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/docs_deploy_guard.py \
|
||||
--repo-root "$PWD" \
|
||||
--event-name "${GITHUB_EVENT_NAME}" \
|
||||
--git-ref "${GITHUB_REF}" \
|
||||
--git-sha "${GITHUB_SHA}" \
|
||||
--input-deploy-target "${INPUT_DEPLOY_TARGET}" \
|
||||
--input-preview-evidence-run-url "${INPUT_PREVIEW_EVIDENCE_RUN_URL}" \
|
||||
--input-rollback-ref "${INPUT_ROLLBACK_REF}" \
|
||||
--policy-file .github/release/docs-deploy-policy.json \
|
||||
--output-json artifacts/docs-deploy-guard.json \
|
||||
--output-md artifacts/docs-deploy-guard.md \
|
||||
--github-output-file "$GITHUB_OUTPUT" \
|
||||
--fail-on-violation
|
||||
|
||||
- name: Emit docs deploy guard audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/docs-deploy-guard.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type docs_deploy_guard \
|
||||
--input-json artifacts/docs-deploy-guard.json \
|
||||
--output-json artifacts/audit-event-docs-deploy-guard.json \
|
||||
--artifact-name docs-deploy-guard \
|
||||
--retention-days 21
|
||||
fi
|
||||
|
||||
- name: Publish docs deploy guard summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/docs-deploy-guard.md ]; then
|
||||
cat artifacts/docs-deploy-guard.md >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Upload docs deploy guard artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: docs-deploy-guard
|
||||
path: |
|
||||
artifacts/docs-deploy-guard.json
|
||||
artifacts/docs-deploy-guard.md
|
||||
artifacts/audit-event-docs-deploy-guard.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: ${{ steps.deploy_guard.outputs.docs_guard_artifact_retention_days || 21 }}
|
||||
|
||||
- name: Setup Node.js for markdown lint
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Markdown quality gate
|
||||
env:
|
||||
BASE_SHA: ${{ steps.scope.outputs.base_sha }}
|
||||
DOCS_FILES: ${{ steps.scope.outputs.docs_files }}
|
||||
run: ./scripts/ci/docs_quality_gate.sh
|
||||
|
||||
- name: Collect added links
|
||||
id: links
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
shell: bash
|
||||
env:
|
||||
BASE_SHA: ${{ steps.scope.outputs.base_sha }}
|
||||
DOCS_FILES: ${{ steps.scope.outputs.docs_files }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 ./scripts/ci/collect_changed_links.py \
|
||||
--base "$BASE_SHA" \
|
||||
--docs-files "$DOCS_FILES" \
|
||||
--output .ci-added-links.txt
|
||||
count=$(wc -l < .ci-added-links.txt | tr -d ' ')
|
||||
echo "count=$count" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Link check (added links)
|
||||
if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count != '0'
|
||||
uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 # v2
|
||||
with:
|
||||
fail: true
|
||||
args: >-
|
||||
--offline
|
||||
--no-progress
|
||||
--format detailed
|
||||
.ci-added-links.txt
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Skip link check (none added)
|
||||
if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count == '0'
|
||||
run: echo "No added links detected in changed docs lines."
|
||||
|
||||
docs-preview:
|
||||
name: Docs Preview Artifact
|
||||
needs: [docs-quality]
|
||||
if: github.event_name == 'pull_request' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_target == 'preview')
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Build preview bundle
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
rm -rf site
|
||||
mkdir -p site/docs
|
||||
cp -R docs/. site/docs/
|
||||
cp README.md site/README.md
|
||||
cat > site/index.md <<'EOF'
|
||||
# ZeroClaw Docs Preview
|
||||
|
||||
This preview bundle is produced by `.github/workflows/docs-deploy.yml`.
|
||||
|
||||
- [Repository README](./README.md)
|
||||
- [Docs Home](./docs/README.md)
|
||||
EOF
|
||||
|
||||
- name: Upload preview artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: docs-preview
|
||||
path: site/**
|
||||
if-no-files-found: error
|
||||
retention-days: ${{ needs.docs-quality.outputs.docs_preview_retention_days || 14 }}
|
||||
|
||||
docs-deploy:
|
||||
name: Deploy Docs to GitHub Pages
|
||||
needs: [docs-quality]
|
||||
if: needs.docs-quality.outputs.deploy_target == 'production' && needs.docs-quality.outputs.ready_to_deploy == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
ref: ${{ needs.docs-quality.outputs.source_ref }}
|
||||
|
||||
- name: Build deploy bundle
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
rm -rf site
|
||||
mkdir -p site/docs
|
||||
cp -R docs/. site/docs/
|
||||
cp README.md site/README.md
|
||||
cat > site/index.md <<'EOF'
|
||||
# ZeroClaw Documentation
|
||||
|
||||
This site is deployed automatically from `main` by `.github/workflows/docs-deploy.yml`.
|
||||
|
||||
- [Repository README](./README.md)
|
||||
- [Docs Home](./docs/README.md)
|
||||
EOF
|
||||
|
||||
- name: Publish deploy source summary
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "## Docs Deploy Source"
|
||||
echo "- Deploy mode: \`${{ needs.docs-quality.outputs.deploy_mode }}\`"
|
||||
echo "- Source ref: \`${{ needs.docs-quality.outputs.source_ref }}\`"
|
||||
echo "- Production branch ref: \`${{ needs.docs-quality.outputs.production_branch_ref }}\`"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5
|
||||
|
||||
- name: Upload Pages artifact
|
||||
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4
|
||||
with:
|
||||
path: site
|
||||
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4
|
||||
363
.github/workflows/feature-matrix.yml
vendored
363
.github/workflows/feature-matrix.yml
vendored
@ -1,57 +1,380 @@
|
||||
name: Feature Matrix
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "scripts/ci/nightly_matrix_report.py"
|
||||
- ".github/release/nightly-owner-routing.json"
|
||||
- ".github/workflows/feature-matrix.yml"
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
types: [labeled]
|
||||
merge_group:
|
||||
branches: [dev, main]
|
||||
schedule:
|
||||
- cron: "30 4 * * 1" # Weekly Monday 4:30am UTC
|
||||
- cron: "30 4 * * 1" # Weekly Monday 04:30 UTC
|
||||
- cron: "15 3 * * *" # Daily 03:15 UTC (nightly profile)
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
profile:
|
||||
description: "compile = merge-gate matrix, nightly = integration-oriented lane commands"
|
||||
required: true
|
||||
default: compile
|
||||
type: choice
|
||||
options:
|
||||
- compile
|
||||
- nightly
|
||||
fail_on_failure:
|
||||
description: "Fail summary job when any lane fails"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: feature-matrix-${{ github.event.pull_request.number || github.sha }}
|
||||
group: feature-matrix-${{ github.event.pull_request.number || github.ref || github.run_id }}-${{ github.event.inputs.profile || 'auto' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
resolve-profile:
|
||||
name: Resolve Matrix Profile
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
outputs:
|
||||
profile: ${{ steps.resolve.outputs.profile }}
|
||||
lane_job_prefix: ${{ steps.resolve.outputs.lane_job_prefix }}
|
||||
summary_job_name: ${{ steps.resolve.outputs.summary_job_name }}
|
||||
lane_retention_days: ${{ steps.resolve.outputs.lane_retention_days }}
|
||||
lane_timeout_minutes: ${{ steps.resolve.outputs.lane_timeout_minutes }}
|
||||
max_attempts: ${{ steps.resolve.outputs.max_attempts }}
|
||||
summary_artifact_name: ${{ steps.resolve.outputs.summary_artifact_name }}
|
||||
summary_json_name: ${{ steps.resolve.outputs.summary_json_name }}
|
||||
summary_md_name: ${{ steps.resolve.outputs.summary_md_name }}
|
||||
lane_artifact_prefix: ${{ steps.resolve.outputs.lane_artifact_prefix }}
|
||||
fail_on_failure: ${{ steps.resolve.outputs.fail_on_failure }}
|
||||
collect_history: ${{ steps.resolve.outputs.collect_history }}
|
||||
steps:
|
||||
- name: Resolve effective profile
|
||||
id: resolve
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
profile="compile"
|
||||
fail_on_failure="true"
|
||||
lane_job_prefix="Matrix Lane"
|
||||
summary_job_name="Feature Matrix Summary"
|
||||
lane_retention_days="21"
|
||||
lane_timeout_minutes="55"
|
||||
max_attempts="1"
|
||||
summary_artifact_name="feature-matrix-summary"
|
||||
summary_json_name="feature-matrix-summary.json"
|
||||
summary_md_name="feature-matrix-summary.md"
|
||||
lane_artifact_prefix="feature-matrix"
|
||||
collect_history="false"
|
||||
|
||||
if [ "${GITHUB_EVENT_NAME}" = "schedule" ] && [ "${{ github.event.schedule }}" = "15 3 * * *" ]; then
|
||||
profile="nightly"
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
profile="${{ github.event.inputs.profile || 'compile' }}"
|
||||
fail_on_failure="${{ github.event.inputs.fail_on_failure || 'true' }}"
|
||||
fi
|
||||
|
||||
if [ "$profile" = "nightly" ]; then
|
||||
lane_job_prefix="Nightly Lane"
|
||||
summary_job_name="Nightly Summary & Routing"
|
||||
lane_retention_days="30"
|
||||
lane_timeout_minutes="70"
|
||||
max_attempts="2"
|
||||
summary_artifact_name="nightly-all-features-summary"
|
||||
summary_json_name="nightly-summary.json"
|
||||
summary_md_name="nightly-summary.md"
|
||||
lane_artifact_prefix="nightly-lane"
|
||||
collect_history="true"
|
||||
fi
|
||||
|
||||
{
|
||||
echo "profile=${profile}"
|
||||
echo "lane_job_prefix=${lane_job_prefix}"
|
||||
echo "summary_job_name=${summary_job_name}"
|
||||
echo "lane_retention_days=${lane_retention_days}"
|
||||
echo "lane_timeout_minutes=${lane_timeout_minutes}"
|
||||
echo "max_attempts=${max_attempts}"
|
||||
echo "summary_artifact_name=${summary_artifact_name}"
|
||||
echo "summary_json_name=${summary_json_name}"
|
||||
echo "summary_md_name=${summary_md_name}"
|
||||
echo "lane_artifact_prefix=${lane_artifact_prefix}"
|
||||
echo "fail_on_failure=${fail_on_failure}"
|
||||
echo "collect_history=${collect_history}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
feature-check:
|
||||
name: Check (${{ matrix.name }})
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
name: ${{ needs.resolve-profile.outputs.lane_job_prefix }} (${{ matrix.name }})
|
||||
needs: [resolve-profile]
|
||||
if: >-
|
||||
github.event_name != 'pull_request' ||
|
||||
contains(github.event.pull_request.labels.*.name, 'ci:full') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'ci:feature-matrix')
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: ${{ fromJSON(needs.resolve-profile.outputs.lane_timeout_minutes) }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- name: no-default-features
|
||||
args: --no-default-features
|
||||
- name: default
|
||||
compile_command: cargo check --locked
|
||||
nightly_command: cargo test --locked --test agent_e2e --verbose
|
||||
install_libudev: false
|
||||
- name: all-features
|
||||
args: --all-features
|
||||
install_libudev: true
|
||||
- name: hardware-only
|
||||
args: --no-default-features --features hardware
|
||||
- name: whatsapp-web
|
||||
compile_command: cargo check --locked --no-default-features --features whatsapp-web
|
||||
nightly_command: cargo check --locked --no-default-features --features whatsapp-web --verbose
|
||||
install_libudev: false
|
||||
- name: browser-native
|
||||
args: --no-default-features --features browser-native
|
||||
compile_command: cargo check --locked --no-default-features --features browser-native
|
||||
nightly_command: cargo check --locked --no-default-features --features browser-native --verbose
|
||||
install_libudev: false
|
||||
- name: nightly-all-features
|
||||
compile_command: cargo check --locked --all-features
|
||||
nightly_command: cargo test --locked --all-features --test agent_e2e --verbose
|
||||
install_libudev: true
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
key: features-${{ matrix.name }}
|
||||
prefix-key: feature-matrix-${{ matrix.name }}
|
||||
|
||||
- name: Install Linux system dependencies for all-features
|
||||
- name: Ensure Linux deps for all-features lane
|
||||
if: matrix.install_libudev
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libudev-dev pkg-config
|
||||
set -euo pipefail
|
||||
|
||||
- name: Check feature combination
|
||||
run: cargo check --locked ${{ matrix.args }}
|
||||
if command -v pkg-config >/dev/null 2>&1 && pkg-config --exists libudev; then
|
||||
echo "libudev development headers already available; skipping apt install."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Installing missing libudev build dependencies..."
|
||||
for attempt in 1 2 3; do
|
||||
if sudo apt-get update -qq -o DPkg::Lock::Timeout=300 && \
|
||||
sudo apt-get install -y --no-install-recommends --no-upgrade -o DPkg::Lock::Timeout=300 libudev-dev pkg-config; then
|
||||
echo "Dependency installation succeeded on attempt ${attempt}."
|
||||
exit 0
|
||||
fi
|
||||
if [ "$attempt" -eq 3 ]; then
|
||||
echo "Failed to install libudev-dev/pkg-config after ${attempt} attempts." >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Dependency installation failed on attempt ${attempt}; retrying in 10s..."
|
||||
sleep 10
|
||||
done
|
||||
|
||||
- name: Run matrix lane command
|
||||
id: lane
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
|
||||
profile="${{ needs.resolve-profile.outputs.profile }}"
|
||||
lane_command="${{ matrix.compile_command }}"
|
||||
if [ "$profile" = "nightly" ]; then
|
||||
lane_command="${{ matrix.nightly_command }}"
|
||||
fi
|
||||
|
||||
max_attempts="${{ needs.resolve-profile.outputs.max_attempts }}"
|
||||
attempt=1
|
||||
status=1
|
||||
|
||||
started_at="$(date +%s)"
|
||||
while [ "$attempt" -le "$max_attempts" ]; do
|
||||
echo "Running lane command (attempt ${attempt}/${max_attempts}): ${lane_command}"
|
||||
set +e
|
||||
bash -lc "${lane_command}"
|
||||
status=$?
|
||||
set -e
|
||||
if [ "$status" -eq 0 ]; then
|
||||
break
|
||||
fi
|
||||
if [ "$attempt" -lt "$max_attempts" ]; then
|
||||
sleep 5
|
||||
fi
|
||||
attempt="$((attempt + 1))"
|
||||
done
|
||||
finished_at="$(date +%s)"
|
||||
duration="$((finished_at - started_at))"
|
||||
|
||||
lane_status="success"
|
||||
if [ "$status" -ne 0 ]; then
|
||||
lane_status="failure"
|
||||
fi
|
||||
|
||||
cat > "artifacts/nightly-result-${{ matrix.name }}.json" <<EOF
|
||||
{
|
||||
"lane": "${{ matrix.name }}",
|
||||
"mode": "${profile}",
|
||||
"status": "${lane_status}",
|
||||
"exit_code": ${status},
|
||||
"duration_seconds": ${duration},
|
||||
"command": "${lane_command}",
|
||||
"attempts_used": ${attempt},
|
||||
"max_attempts": ${max_attempts}
|
||||
}
|
||||
EOF
|
||||
|
||||
{
|
||||
echo "### ${{ needs.resolve-profile.outputs.lane_job_prefix }}: ${{ matrix.name }}"
|
||||
echo "- Profile: \`${profile}\`"
|
||||
echo "- Command: \`${lane_command}\`"
|
||||
echo "- Status: ${lane_status}"
|
||||
echo "- Exit code: ${status}"
|
||||
echo "- Duration (s): ${duration}"
|
||||
echo "- Attempts: ${attempt}/${max_attempts}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo "lane_status=${lane_status}" >> "$GITHUB_OUTPUT"
|
||||
echo "lane_exit_code=${status}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload lane report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: ${{ needs.resolve-profile.outputs.lane_artifact_prefix }}-${{ matrix.name }}
|
||||
path: artifacts/nightly-result-${{ matrix.name }}.json
|
||||
if-no-files-found: error
|
||||
retention-days: ${{ fromJSON(needs.resolve-profile.outputs.lane_retention_days) }}
|
||||
|
||||
- name: Enforce lane success
|
||||
if: steps.lane.outputs.lane_status != 'success'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
code="${{ steps.lane.outputs.lane_exit_code }}"
|
||||
if [[ "$code" =~ ^[0-9]+$ ]]; then
|
||||
# shellcheck disable=SC2242
|
||||
exit "$code"
|
||||
fi
|
||||
echo "Invalid lane exit code: $code" >&2
|
||||
exit 1
|
||||
|
||||
summary:
|
||||
name: ${{ needs.resolve-profile.outputs.summary_job_name }}
|
||||
needs: [resolve-profile, feature-check]
|
||||
if: always()
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Download lane reports
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Collect recent nightly history
|
||||
if: needs.resolve-profile.outputs.collect_history == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const workflowId = "feature-matrix.yml";
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
|
||||
const events = ["schedule", "workflow_dispatch"];
|
||||
let runs = [];
|
||||
for (const event of events) {
|
||||
const resp = await github.rest.actions.listWorkflowRuns({
|
||||
owner,
|
||||
repo,
|
||||
workflow_id: workflowId,
|
||||
branch: "dev",
|
||||
event,
|
||||
per_page: 20,
|
||||
});
|
||||
runs = runs.concat(resp.data.workflow_runs || []);
|
||||
}
|
||||
|
||||
const currentRunId = context.runId;
|
||||
runs = runs
|
||||
.filter((run) => run.id !== currentRunId && run.status === "completed")
|
||||
.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime())
|
||||
.slice(0, 3)
|
||||
.map((run) => ({
|
||||
run_id: run.id,
|
||||
url: run.html_url,
|
||||
event: run.event,
|
||||
conclusion: run.conclusion || "unknown",
|
||||
created_at: run.created_at,
|
||||
head_sha: run.head_sha,
|
||||
display_title: run.display_title || "",
|
||||
}));
|
||||
|
||||
fs.mkdirSync("artifacts", { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join("artifacts", "nightly-history.json"),
|
||||
`${JSON.stringify(runs, null, 2)}\n`,
|
||||
{ encoding: "utf8" }
|
||||
);
|
||||
|
||||
- name: Aggregate matrix summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
args=(
|
||||
--input-dir artifacts
|
||||
--owners-file .github/release/nightly-owner-routing.json
|
||||
--output-json "artifacts/${{ needs.resolve-profile.outputs.summary_json_name }}"
|
||||
--output-md "artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}"
|
||||
)
|
||||
|
||||
if [ "${{ needs.resolve-profile.outputs.collect_history }}" = "true" ] && [ -f artifacts/nightly-history.json ]; then
|
||||
args+=(--history-file artifacts/nightly-history.json)
|
||||
fi
|
||||
|
||||
if [ "${{ needs.resolve-profile.outputs.fail_on_failure }}" = "true" ]; then
|
||||
args+=(--fail-on-failure)
|
||||
fi
|
||||
|
||||
python3 scripts/ci/nightly_matrix_report.py "${args[@]}"
|
||||
|
||||
- name: Publish summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat "artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}" >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload summary artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: ${{ needs.resolve-profile.outputs.summary_artifact_name }}
|
||||
path: |
|
||||
artifacts/${{ needs.resolve-profile.outputs.summary_json_name }}
|
||||
artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}
|
||||
artifacts/nightly-history.json
|
||||
if-no-files-found: error
|
||||
retention-days: ${{ fromJSON(needs.resolve-profile.outputs.lane_retention_days) }}
|
||||
|
||||
132
.github/workflows/main-branch-flow.md
vendored
132
.github/workflows/main-branch-flow.md
vendored
@ -1,6 +1,6 @@
|
||||
# Main Branch Delivery Flows
|
||||
|
||||
This document explains what runs when code is proposed to `dev`, promoted to `main`, and released.
|
||||
This document explains what runs when code is proposed to `dev`/`main`, merged to `main`, and released.
|
||||
|
||||
Use this with:
|
||||
|
||||
@ -13,10 +13,10 @@ Use this with:
|
||||
| Event | Main workflows |
|
||||
| --- | --- |
|
||||
| PR activity (`pull_request_target`) | `pr-intake-checks.yml`, `pr-labeler.yml`, `pr-auto-response.yml` |
|
||||
| PR activity (`pull_request`) | `ci-run.yml`, `sec-audit.yml`, `main-promotion-gate.yml` (for `main` PRs), plus path-scoped workflows |
|
||||
| PR activity (`pull_request`) | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows |
|
||||
| Push to `dev`/`main` | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows |
|
||||
| Tag push (`v*`) | `pub-release.yml` publish mode, `pub-docker-img.yml` publish job |
|
||||
| Scheduled/manual | `pub-release.yml` verification mode, `pub-homebrew-core.yml` (manual), `sec-codeql.yml`, `feature-matrix.yml`, `test-fuzz.yml`, `pr-check-stale.yml`, `pr-check-status.yml`, `sync-contributors.yml`, `test-benchmarks.yml`, `test-e2e.yml` |
|
||||
| Scheduled/manual | `pub-release.yml` verification mode, `sec-codeql.yml`, `feature-matrix.yml`, `test-fuzz.yml`, `pr-check-stale.yml`, `pr-check-status.yml`, `ci-queue-hygiene.yml`, `sync-contributors.yml`, `test-benchmarks.yml`, `test-e2e.yml` |
|
||||
|
||||
## Runtime and Docker Matrix
|
||||
|
||||
@ -34,7 +34,6 @@ Observed averages below are from recent completed runs (sampled from GitHub Acti
|
||||
| `pub-docker-img.yml` (`pull_request`) | Docker build-input PR changes | 240.4s | Yes | Yes | No |
|
||||
| `pub-docker-img.yml` (`push`) | tag push `v*` | 139.9s | Yes | No | Yes |
|
||||
| `pub-release.yml` | Tag push `v*` (publish) + manual/scheduled verification (no publish) | N/A in recent sample | No | No | No |
|
||||
| `pub-homebrew-core.yml` | Manual workflow dispatch only | N/A in recent sample | No | No | No |
|
||||
|
||||
Notes:
|
||||
|
||||
@ -54,11 +53,17 @@ Notes:
|
||||
- `pr-auto-response.yml` runs first-interaction and label routes.
|
||||
3. `pull_request` CI workflows start:
|
||||
- `ci-run.yml`
|
||||
- `feature-matrix.yml` (Rust/workflow path scope)
|
||||
- `sec-audit.yml`
|
||||
- path-scoped workflows if matching files changed:
|
||||
- `pub-docker-img.yml` (Docker build-input paths only)
|
||||
- `workflow-sanity.yml` (workflow files only)
|
||||
- `sec-codeql.yml` (if Rust/codeql paths changed)
|
||||
- path-scoped workflows if matching files changed:
|
||||
- `pub-docker-img.yml` (Docker build-input paths only)
|
||||
- `docs-deploy.yml` (docs + README markdown paths; deploy contract guard enforces promotion + rollback ref policy)
|
||||
- `workflow-sanity.yml` (workflow files only)
|
||||
- `pr-label-policy-check.yml` (label-policy files only)
|
||||
- `ci-change-audit.yml` (CI/security path changes)
|
||||
- `ci-provider-connectivity.yml` (probe config/script/workflow changes)
|
||||
- `ci-reproducible-build.yml` (Rust/build reproducibility paths)
|
||||
4. In `ci-run.yml`, `changes` computes:
|
||||
- `docs_only`
|
||||
- `docs_changed`
|
||||
@ -69,13 +74,13 @@ Notes:
|
||||
- `lint`
|
||||
- `lint-strict-delta`
|
||||
- `test`
|
||||
- `flake-probe` (single-retry telemetry; optional block via `CI_BLOCK_ON_FLAKE_SUSPECTED`)
|
||||
- `docs-quality`
|
||||
7. If `.github/workflows/**` changed, `workflow-owner-approval` must pass.
|
||||
8. If root license files (`LICENSE-APACHE`, `LICENSE-MIT`) changed, `license-file-owner-guard` allows only PR author `willsarg`.
|
||||
9. `lint-feedback` posts actionable comment if lint/docs gates fail.
|
||||
10. `CI Required Gate` aggregates results to final pass/fail.
|
||||
11. Maintainer merges PR once checks and review policy are satisfied.
|
||||
12. Merge emits a `push` event on `dev` (see scenario 4).
|
||||
7. If root license files (`LICENSE-APACHE`, `LICENSE-MIT`) changed, `license-file-owner-guard` allows only PR author `willsarg`.
|
||||
8. `lint-feedback` posts actionable comment if lint/docs gates fail.
|
||||
9. `CI Required Gate` aggregates results to final pass/fail.
|
||||
10. Maintainer merges PR once checks and review policy are satisfied.
|
||||
11. Merge emits a `push` event on `dev` (see scenario 4).
|
||||
|
||||
### 2) PR from fork -> `dev`
|
||||
|
||||
@ -95,8 +100,8 @@ Notes:
|
||||
4. Approval gate possibility:
|
||||
- if Actions settings require maintainer approval for fork workflows, the `pull_request` run stays in `action_required`/waiting state until approved.
|
||||
5. Event fan-out after labeling:
|
||||
- `pr-labeler.yml` and manual label changes emit `labeled`/`unlabeled` events.
|
||||
- those events retrigger `pull_request_target` automation (`pr-labeler.yml` and `pr-auto-response.yml`), creating extra run volume/noise.
|
||||
- manual label changes emit `labeled`/`unlabeled` events.
|
||||
- those events retrigger only label-driven `pull_request_target` automation (`pr-auto-response.yml`); `pr-labeler.yml` now runs only on PR lifecycle events (`opened`/`reopened`/`synchronize`/`ready_for_review`) to reduce churn.
|
||||
6. When contributor pushes new commits to fork branch (`synchronize`):
|
||||
- reruns: `pr-intake-checks.yml`, `pr-labeler.yml`, `ci-run.yml`, `sec-audit.yml`, and matching path-scoped PR workflows.
|
||||
- does not rerun `pr-auto-response.yml` unless label/open events occur.
|
||||
@ -104,35 +109,34 @@ Notes:
|
||||
- `changes` computes `docs_only`, `docs_changed`, `rust_changed`, `workflow_changed`.
|
||||
- `build` runs for Rust-impacting changes.
|
||||
- `lint`/`lint-strict-delta`/`test`/`docs-quality` run on PR when `ci:full` label exists.
|
||||
- `workflow-owner-approval` runs when `.github/workflows/**` changed.
|
||||
- `CI Required Gate` emits final pass/fail for the PR head.
|
||||
8. Fork PR merge blockers to check first when diagnosing stalls:
|
||||
- run approval pending for fork workflows.
|
||||
- `workflow-owner-approval` failing on workflow-file changes.
|
||||
- `license-file-owner-guard` failing when root license files are modified by non-owner PR author.
|
||||
- `CI Required Gate` failure caused by upstream jobs.
|
||||
- repeated `pull_request_target` reruns from label churn causing noisy signals.
|
||||
9. After merge, normal `push` workflows on `dev` execute (scenario 4).
|
||||
|
||||
### 3) Promotion PR `dev` -> `main`
|
||||
### 3) PR to `main` (direct or from `dev`)
|
||||
|
||||
1. Maintainer opens PR with head `dev` and base `main`.
|
||||
2. `main-promotion-gate.yml` runs and fails unless PR author is `willsarg` or `theonlyhennygod`.
|
||||
3. `main-promotion-gate.yml` also fails if head repo/branch is not `<this-repo>:dev`.
|
||||
4. `ci-run.yml` and `sec-audit.yml` run on the promotion PR.
|
||||
5. Maintainer merges PR once checks and review policy pass.
|
||||
6. Merge emits a `push` event on `main`.
|
||||
1. Contributor or maintainer opens PR with base `main`.
|
||||
2. `ci-run.yml` and `sec-audit.yml` run on the PR, plus any path-scoped workflows.
|
||||
3. Maintainer merges PR once checks and review policy pass.
|
||||
4. Merge emits a `push` event on `main`.
|
||||
|
||||
### 4) Push to `dev` or `main` (including after merge)
|
||||
### 4) Push/Merge Queue to `dev` or `main` (including after merge)
|
||||
|
||||
1. Commit reaches `dev` or `main` (usually from a merged PR).
|
||||
2. `ci-run.yml` runs on `push`.
|
||||
3. `sec-audit.yml` runs on `push`.
|
||||
4. Path-filtered workflows run only if touched files match their filters.
|
||||
5. In `ci-run.yml`, push behavior differs from PR behavior:
|
||||
1. Commit reaches `dev` or `main` (usually from a merged PR), or merge queue creates a `merge_group` validation commit.
|
||||
2. `ci-run.yml` runs on `push` and `merge_group`.
|
||||
3. `feature-matrix.yml` runs on `push` to `dev` for Rust/workflow paths and on `merge_group`.
|
||||
4. `sec-audit.yml` runs on `push` and `merge_group`.
|
||||
5. `sec-codeql.yml` runs on `push`/`merge_group` when Rust/codeql paths change (path-scoped on push).
|
||||
6. `ci-supply-chain-provenance.yml` runs on push when Rust/build provenance paths change.
|
||||
7. Path-filtered workflows run only if touched files match their filters.
|
||||
8. In `ci-run.yml`, push/merge-group behavior differs from PR behavior:
|
||||
- Rust path: `lint`, `lint-strict-delta`, `test`, `build` are expected.
|
||||
- Docs/non-rust paths: fast-path behavior applies.
|
||||
6. `CI Required Gate` computes overall push result.
|
||||
9. `CI Required Gate` computes overall push/merge-group result.
|
||||
|
||||
## Docker Publish Logic
|
||||
|
||||
@ -142,7 +146,7 @@ Workflow: `.github/workflows/pub-docker-img.yml`
|
||||
|
||||
1. Triggered on `pull_request` to `dev` or `main` when Docker build-input paths change.
|
||||
2. Runs `PR Docker Smoke` job:
|
||||
- Builds local smoke image with Blacksmith builder.
|
||||
- Builds local smoke image with Buildx builder.
|
||||
- Verifies container with `docker run ... --version`.
|
||||
3. Typical runtime in recent sample: ~240.4s.
|
||||
4. No registry push happens on PR events.
|
||||
@ -152,10 +156,13 @@ Workflow: `.github/workflows/pub-docker-img.yml`
|
||||
1. `publish` job runs on tag pushes `v*` only.
|
||||
2. Workflow trigger includes semantic version tag pushes (`v*`) only.
|
||||
3. Login to `ghcr.io` uses `${{ github.actor }}` and `${{ secrets.GITHUB_TOKEN }}`.
|
||||
4. Tag computation includes semantic tag from pushed git tag (`vX.Y.Z`) + SHA tag.
|
||||
4. Tag computation includes semantic tag from pushed git tag (`vX.Y.Z`) + SHA tag (`sha-<12>`) + `latest`.
|
||||
5. Multi-platform publish is used for tag pushes (`linux/amd64,linux/arm64`).
|
||||
6. Typical runtime in recent sample: ~139.9s.
|
||||
7. Result: pushed image tags under `ghcr.io/<owner>/<repo>`.
|
||||
6. `scripts/ci/ghcr_publish_contract_guard.py` validates anonymous pullability and digest parity across `vX.Y.Z`, `sha-<12>`, and `latest`, then emits rollback candidate mapping evidence.
|
||||
7. Trivy scans are emitted for version, SHA, and latest references.
|
||||
8. `scripts/ci/ghcr_vulnerability_gate.py` validates Trivy JSON outputs against `.github/release/ghcr-vulnerability-policy.json` and emits audit-event evidence.
|
||||
9. Typical runtime in recent sample: ~139.9s.
|
||||
10. Result: pushed image tags under `ghcr.io/<owner>/<repo>` with publish-contract + vulnerability-gate + scan artifacts.
|
||||
|
||||
Important: Docker publish now requires a `v*` tag push; regular `dev`/`main` branch pushes do not publish images.
|
||||
|
||||
@ -167,26 +174,43 @@ Workflow: `.github/workflows/pub-release.yml`
|
||||
- Tag push `v*` -> publish mode.
|
||||
- Manual dispatch -> verification-only or publish mode (input-driven).
|
||||
- Weekly schedule -> verification-only mode.
|
||||
2. `prepare` resolves release context (`release_ref`, `release_tag`, publish/draft mode) and validates manual publish inputs.
|
||||
- publish mode enforces `release_tag` == `Cargo.toml` version at the tag commit.
|
||||
2. `prepare` resolves release context (`release_ref`, `release_tag`, publish/draft mode) and runs `scripts/ci/release_trigger_guard.py`.
|
||||
- publish mode enforces actor authorization, stable annotated tag policy, `origin/main` ancestry, and `release_tag` == `Cargo.toml` version at the tag commit.
|
||||
- trigger provenance is emitted as `release-trigger-guard` artifacts.
|
||||
3. `build-release` builds matrix artifacts across Linux/macOS/Windows targets.
|
||||
4. `verify-artifacts` enforces presence of all expected archives before any publish attempt.
|
||||
5. In publish mode, workflow generates SBOM (`CycloneDX` + `SPDX`), `SHA256SUMS`, keyless cosign signatures, and verifies GHCR release-tag availability.
|
||||
6. In publish mode, workflow creates/updates the GitHub Release for the resolved tag and commit-ish.
|
||||
4. `verify-artifacts` runs `scripts/ci/release_artifact_guard.py` against `.github/release/release-artifact-contract.json` in verify-stage mode (archive contract required; manifest/SBOM/notice checks intentionally skipped) and uploads `release-artifact-guard-verify` evidence.
|
||||
5. In publish mode, workflow generates SBOM (`CycloneDX` + `SPDX`), `SHA256SUMS`, and a checksum provenance statement (`zeroclaw.sha256sums.intoto.json`) plus audit-event envelope.
|
||||
6. In publish mode, after manifest generation, workflow reruns `release_artifact_guard.py` in full-contract mode and emits `release-artifact-guard.publish.json` plus `audit-event-release-artifact-guard-publish.json`.
|
||||
7. In publish mode, workflow keyless-signs release artifacts and composes a supply-chain release-notes preface via `release_notes_with_supply_chain_refs.py`.
|
||||
8. In publish mode, workflow verifies GHCR release-tag availability.
|
||||
9. In publish mode, workflow creates/updates the GitHub Release for the resolved tag and commit-ish, combining generated supply-chain preface with GitHub auto-generated commit notes.
|
||||
|
||||
Manual Homebrew formula flow:
|
||||
Pre-release path:
|
||||
|
||||
1. Run `.github/workflows/pub-homebrew-core.yml` with `release_tag=vX.Y.Z`.
|
||||
2. Use `dry_run=true` first to validate formula patch and metadata.
|
||||
3. Use `dry_run=false` to push from bot fork and open `homebrew-core` PR.
|
||||
1. Pre-release tags (`vX.Y.Z-alpha.N`, `vX.Y.Z-beta.N`, `vX.Y.Z-rc.N`) trigger `.github/workflows/pub-prerelease.yml`.
|
||||
2. `scripts/ci/prerelease_guard.py` enforces stage progression, `origin/main` ancestry, and Cargo version/tag alignment.
|
||||
3. In publish mode, prerelease assets are attached to a GitHub prerelease for the stage tag.
|
||||
|
||||
Canary policy lane:
|
||||
|
||||
1. `.github/workflows/ci-canary-gate.yml` runs weekly or manually.
|
||||
2. `scripts/ci/canary_guard.py` evaluates metrics against `.github/release/canary-policy.json`.
|
||||
3. Decision output is explicit (`promote`, `hold`, `abort`) with auditable artifacts and optional dispatch signal.
|
||||
|
||||
## Merge/Policy Notes
|
||||
|
||||
1. Workflow-file changes (`.github/workflows/**`) activate owner-approval gate in `ci-run.yml`.
|
||||
1. Workflow-file changes (`.github/workflows/**`) are validated through `pr-intake-checks.yml`, `ci-change-audit.yml`, and `CI Required Gate` without a dedicated owner-approval gate.
|
||||
2. PR lint/test strictness is intentionally controlled by `ci:full` label.
|
||||
3. `sec-audit.yml` runs on both PR and push, plus scheduled weekly.
|
||||
4. Some workflows are operational and non-merge-path (`pr-check-stale`, `pr-check-status`, `sync-contributors`, etc.).
|
||||
5. Workflow-specific JavaScript helpers are organized under `.github/workflows/scripts/`.
|
||||
4. `sec-audit.yml` runs on PR/push/merge queue (`merge_group`), plus scheduled weekly.
|
||||
5. `ci-change-audit.yml` enforces pinned `uses:` references for CI/security workflow changes.
|
||||
6. `sec-audit.yml` includes deny policy hygiene checks (`deny_policy_guard.py`) before cargo-deny.
|
||||
7. `sec-audit.yml` includes gitleaks allowlist governance checks (`secrets_governance_guard.py`) against `.github/security/gitleaks-allowlist-governance.json`.
|
||||
8. `ci-reproducible-build.yml` and `ci-supply-chain-provenance.yml` provide scheduled supply-chain assurance signals outside release-only windows.
|
||||
9. Some workflows are operational and non-merge-path (`pr-check-stale`, `pr-check-status`, `sync-contributors`, etc.).
|
||||
10. Workflow-specific JavaScript helpers are organized under `.github/workflows/scripts/`.
|
||||
11. `ci-run.yml` includes cache partitioning (`prefix-key`) across lint/test/build/flake-probe lanes to reduce cache contention.
|
||||
12. `ci-rollback.yml` provides a guarded rollback planning lane (scheduled dry-run + manual execute controls) with audit artifacts.
|
||||
13. `ci-queue-hygiene.yml` periodically deduplicates superseded queued runs for lightweight PR automation workflows to reduce queue pressure.
|
||||
|
||||
## Mermaid Diagrams
|
||||
|
||||
@ -211,29 +235,29 @@ flowchart TD
|
||||
G --> H["push event on dev"]
|
||||
```
|
||||
|
||||
### Promotion and Release
|
||||
### Main Delivery and Release
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
D0["Commit reaches dev"] --> B0["ci-run.yml"]
|
||||
D0 --> C0["sec-audit.yml"]
|
||||
P["Promotion PR dev -> main"] --> PG["main-promotion-gate.yml"]
|
||||
PG --> M["Merge to main"]
|
||||
PRM["PR to main"] --> QM["ci-run.yml + sec-audit.yml (+ path-scoped)"]
|
||||
QM --> M["Merge to main"]
|
||||
M --> A["Commit reaches main"]
|
||||
A --> B["ci-run.yml"]
|
||||
A --> C["sec-audit.yml"]
|
||||
A --> D["path-scoped workflows (if matched)"]
|
||||
T["Tag push v*"] --> R["pub-release.yml"]
|
||||
W["Manual/Scheduled release verify"] --> R
|
||||
T --> P["pub-docker-img.yml publish job"]
|
||||
T --> DP["pub-docker-img.yml publish job"]
|
||||
R --> R1["Artifacts + SBOM + checksums + signatures + GitHub Release"]
|
||||
W --> R2["Verification build only (no GitHub Release publish)"]
|
||||
P --> P1["Push ghcr image tags (version + sha)"]
|
||||
DP --> P1["Push ghcr image tags (version + sha + latest)"]
|
||||
```
|
||||
|
||||
## Quick Troubleshooting
|
||||
|
||||
1. Unexpected skipped jobs: inspect `scripts/ci/detect_change_scope.sh` outputs.
|
||||
2. Workflow-change PR blocked: verify `WORKFLOW_OWNER_LOGINS` and approvals.
|
||||
2. CI/CD-change PR blocked: verify `@chumyin` approved review is present.
|
||||
3. Fork PR appears stalled: check whether Actions run approval is pending.
|
||||
4. Docker not published: confirm a `v*` tag was pushed to the intended commit.
|
||||
|
||||
55
.github/workflows/main-promotion-gate.yml
vendored
55
.github/workflows/main-promotion-gate.yml
vendored
@ -1,55 +0,0 @@
|
||||
name: Main Promotion Gate
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
concurrency:
|
||||
group: main-promotion-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
enforce-dev-promotion:
|
||||
name: Enforce Dev -> Main Promotion
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Validate PR source branch
|
||||
shell: bash
|
||||
env:
|
||||
HEAD_REF: ${{ github.head_ref }}
|
||||
HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
BASE_REPO: ${{ github.repository }}
|
||||
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
pr_author_lc="$(echo "${PR_AUTHOR}" | tr '[:upper:]' '[:lower:]')"
|
||||
allowed_authors=("willsarg" "theonlyhennygod")
|
||||
|
||||
is_allowed_author=false
|
||||
for allowed in "${allowed_authors[@]}"; do
|
||||
if [[ "$pr_author_lc" == "$allowed" ]]; then
|
||||
is_allowed_author=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$is_allowed_author" != "true" ]]; then
|
||||
echo "::error::PRs into main are restricted to: willsarg, theonlyhennygod. PR author: ${PR_AUTHOR}. Open this PR against dev instead."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$HEAD_REPO" != "$BASE_REPO" ]]; then
|
||||
echo "::error::PRs into main must originate from ${BASE_REPO}:dev or ${BASE_REPO}:release/*. Current head repo: ${HEAD_REPO}."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$HEAD_REF" != "dev" && ! "$HEAD_REF" =~ ^release/ ]]; then
|
||||
echo "::error::PRs into main must use head branch 'dev' or 'release/*'. Current head branch: ${HEAD_REF}."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Promotion policy satisfied: author=${PR_AUTHOR}, source=${HEAD_REPO}:${HEAD_REF} -> main"
|
||||
192
.github/workflows/nightly-all-features.yml
vendored
Normal file
192
.github/workflows/nightly-all-features.yml
vendored
Normal file
@ -0,0 +1,192 @@
|
||||
name: Nightly All-Features
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "15 3 * * *" # Daily 03:15 UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
fail_on_failure:
|
||||
description: "Fail workflow when any nightly lane fails"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: nightly-all-features-${{ github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
nightly-lanes:
|
||||
name: Nightly Lane (${{ matrix.name }})
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 70
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- name: default
|
||||
command: cargo test --locked --test agent_e2e --verbose
|
||||
install_libudev: false
|
||||
- name: whatsapp-web
|
||||
command: cargo check --locked --no-default-features --features whatsapp-web --verbose
|
||||
install_libudev: false
|
||||
- name: browser-native
|
||||
command: cargo check --locked --no-default-features --features browser-native --verbose
|
||||
install_libudev: false
|
||||
- name: nightly-all-features
|
||||
command: cargo test --locked --all-features --test agent_e2e --verbose
|
||||
install_libudev: true
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: nightly-all-features-${{ matrix.name }}
|
||||
|
||||
- name: Ensure Linux deps for all-features lane
|
||||
if: matrix.install_libudev
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if command -v pkg-config >/dev/null 2>&1 && pkg-config --exists libudev; then
|
||||
echo "libudev development headers already available; skipping apt install."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Installing missing libudev build dependencies..."
|
||||
for attempt in 1 2 3; do
|
||||
if sudo apt-get update -qq -o DPkg::Lock::Timeout=300 && \
|
||||
sudo apt-get install -y --no-install-recommends --no-upgrade -o DPkg::Lock::Timeout=300 libudev-dev pkg-config; then
|
||||
echo "Dependency installation succeeded on attempt ${attempt}."
|
||||
exit 0
|
||||
fi
|
||||
if [ "$attempt" -eq 3 ]; then
|
||||
echo "Failed to install libudev-dev/pkg-config after ${attempt} attempts." >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Dependency installation failed on attempt ${attempt}; retrying in 10s..."
|
||||
sleep 10
|
||||
done
|
||||
|
||||
- name: Run nightly lane command
|
||||
id: lane
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
started_at="$(date +%s)"
|
||||
set +e
|
||||
bash -lc "${{ matrix.command }}"
|
||||
status=$?
|
||||
set -e
|
||||
finished_at="$(date +%s)"
|
||||
duration="$((finished_at - started_at))"
|
||||
|
||||
lane_status="success"
|
||||
if [ "$status" -ne 0 ]; then
|
||||
lane_status="failure"
|
||||
fi
|
||||
|
||||
cat > "artifacts/nightly-result-${{ matrix.name }}.json" <<EOF
|
||||
{
|
||||
"lane": "${{ matrix.name }}",
|
||||
"status": "${lane_status}",
|
||||
"exit_code": ${status},
|
||||
"duration_seconds": ${duration},
|
||||
"command": "${{ matrix.command }}"
|
||||
}
|
||||
EOF
|
||||
|
||||
{
|
||||
echo "### Nightly Lane: ${{ matrix.name }}"
|
||||
echo "- Command: \`${{ matrix.command }}\`"
|
||||
echo "- Status: ${lane_status}"
|
||||
echo "- Exit code: ${status}"
|
||||
echo "- Duration (s): ${duration}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo "lane_status=${lane_status}" >> "$GITHUB_OUTPUT"
|
||||
echo "lane_exit_code=${status}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload nightly lane artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: nightly-lane-${{ matrix.name }}
|
||||
path: artifacts/nightly-result-${{ matrix.name }}.json
|
||||
if-no-files-found: error
|
||||
retention-days: 30
|
||||
|
||||
nightly-summary:
|
||||
name: Nightly Summary & Routing
|
||||
needs: [nightly-lanes]
|
||||
if: always()
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Download nightly artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Aggregate nightly report
|
||||
shell: bash
|
||||
env:
|
||||
FAIL_ON_FAILURE_INPUT: ${{ github.event.inputs.fail_on_failure || 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
fail_on_failure="true"
|
||||
if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
fail_on_failure="${FAIL_ON_FAILURE_INPUT}"
|
||||
fi
|
||||
|
||||
args=()
|
||||
if [ "$fail_on_failure" = "true" ]; then
|
||||
args+=(--fail-on-failure)
|
||||
fi
|
||||
|
||||
python3 scripts/ci/nightly_matrix_report.py \
|
||||
--input-dir artifacts \
|
||||
--owners-file .github/release/nightly-owner-routing.json \
|
||||
--output-json artifacts/nightly-summary.json \
|
||||
--output-md artifacts/nightly-summary.md \
|
||||
"${args[@]}"
|
||||
|
||||
- name: Publish nightly summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/nightly-summary.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload nightly summary artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: nightly-all-features-summary
|
||||
path: |
|
||||
artifacts/nightly-summary.json
|
||||
artifacts/nightly-summary.md
|
||||
if-no-files-found: error
|
||||
retention-days: 30
|
||||
64
.github/workflows/pages-deploy.yml
vendored
Normal file
64
.github/workflows/pages-deploy.yml
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
name: Deploy GitHub Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- site/**
|
||||
- docs/**
|
||||
- README.md
|
||||
- .github/workflows/pages-deploy.yml
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
concurrency:
|
||||
group: github-pages
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: npm
|
||||
cache-dependency-path: site/package-lock.json
|
||||
|
||||
- name: Install Dependencies
|
||||
working-directory: site
|
||||
run: npm ci
|
||||
|
||||
- name: Build Site
|
||||
working-directory: site
|
||||
run: npm run build
|
||||
|
||||
- name: Configure Pages
|
||||
uses: actions/configure-pages@v5
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: gh-pages
|
||||
|
||||
deploy:
|
||||
needs: build
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
20
.github/workflows/pr-auto-response.yml
vendored
20
.github/workflows/pr-auto-response.yml
vendored
@ -7,19 +7,27 @@ on:
|
||||
branches: [dev, main]
|
||||
types: [opened, labeled, unlabeled]
|
||||
|
||||
concurrency:
|
||||
# Keep cancellation within the same lifecycle action to avoid `labeled`
|
||||
# events canceling an in-flight `opened` run for the same issue/PR.
|
||||
group: pr-auto-response-${{ github.event.pull_request.number || github.event.issue.number || github.run_id }}-${{ github.event.action || 'unknown' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
LABEL_POLICY_PATH: .github/label-policy.json
|
||||
|
||||
jobs:
|
||||
contributor-tier-issues:
|
||||
# Only run for opened/reopened events to avoid duplicate runs with labeled-routes job
|
||||
if: >-
|
||||
(github.event_name == 'issues' &&
|
||||
(github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'labeled' || github.event.action == 'unlabeled')) ||
|
||||
(github.event_name == 'pull_request_target' &&
|
||||
(github.event.action == 'labeled' || github.event.action == 'unlabeled'))
|
||||
runs-on: ubuntu-latest
|
||||
(github.event.action == 'opened' || github.event.action == 'reopened'))
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
@ -38,7 +46,7 @@ jobs:
|
||||
await script({ github, context, core });
|
||||
first-interaction:
|
||||
if: github.event.action == 'opened'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
@ -69,7 +77,7 @@ jobs:
|
||||
|
||||
labeled-routes:
|
||||
if: github.event.action == 'labeled'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
8
.github/workflows/pr-check-stale.yml
vendored
8
.github/workflows/pr-check-stale.yml
vendored
@ -7,12 +7,18 @@ on:
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Mark stale issues and pull requests
|
||||
uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
|
||||
|
||||
9
.github/workflows/pr-check-status.yml
vendored
9
.github/workflows/pr-check-status.yml
vendored
@ -11,9 +11,15 @@ concurrency:
|
||||
group: pr-check-status
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
jobs:
|
||||
nudge-stale-prs:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
@ -23,7 +29,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Nudge PRs that need rebase or CI refresh
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
|
||||
10
.github/workflows/pr-intake-checks.yml
vendored
10
.github/workflows/pr-intake-checks.yml
vendored
@ -3,7 +3,7 @@ name: PR Intake Checks
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [dev, main]
|
||||
types: [opened, reopened, synchronize, edited, ready_for_review]
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
|
||||
concurrency:
|
||||
group: pr-intake-checks-${{ github.event.pull_request.number || github.run_id }}
|
||||
@ -14,10 +14,16 @@ permissions:
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
intake:
|
||||
name: Intake Checks
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
|
||||
9
.github/workflows/pr-label-policy-check.yml
vendored
9
.github/workflows/pr-label-policy-check.yml
vendored
@ -7,6 +7,7 @@ on:
|
||||
- ".github/workflows/pr-labeler.yml"
|
||||
- ".github/workflows/pr-auto-response.yml"
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- ".github/label-policy.json"
|
||||
- ".github/workflows/pr-labeler.yml"
|
||||
@ -19,9 +20,15 @@ concurrency:
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
contributor-tier-consistency:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
7
.github/workflows/pr-labeler.yml
vendored
7
.github/workflows/pr-labeler.yml
vendored
@ -3,7 +3,7 @@ name: PR Labeler
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [dev, main]
|
||||
types: [opened, reopened, synchronize, edited, labeled, unlabeled]
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
mode:
|
||||
@ -25,11 +25,14 @@ permissions:
|
||||
issues: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
LABEL_POLICY_PATH: .github/label-policy.json
|
||||
|
||||
jobs:
|
||||
label:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
320
.github/workflows/pub-docker-img.yml
vendored
320
.github/workflows/pub-docker-img.yml
vendored
@ -12,21 +12,33 @@ on:
|
||||
- "rust-toolchain.toml"
|
||||
- "dev/config.template.toml"
|
||||
- ".github/workflows/pub-docker-img.yml"
|
||||
- ".github/release/ghcr-tag-policy.json"
|
||||
- ".github/release/ghcr-vulnerability-policy.json"
|
||||
- "scripts/ci/ghcr_publish_contract_guard.py"
|
||||
- "scripts/ci/ghcr_vulnerability_gate.py"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: "Existing release tag to publish (e.g. v0.2.0). Leave empty for smoke-only run."
|
||||
required: false
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: docker-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
pr-smoke:
|
||||
name: PR Docker Smoke
|
||||
if: github.event_name == 'workflow_dispatch' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || (github.event_name == 'workflow_dispatch' && inputs.release_tag == '')
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 25
|
||||
permissions:
|
||||
contents: read
|
||||
@ -34,8 +46,22 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Setup Blacksmith Builder
|
||||
uses: useblacksmith/setup-docker-builder@ef12d5b165b596e3aa44ea8198d8fde563eab402 # v1
|
||||
- name: Resolve Docker API version
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
server_api="$(docker version --format '{{.Server.APIVersion}}')"
|
||||
min_api="$(docker version --format '{{.Server.MinAPIVersion}}' 2>/dev/null || true)"
|
||||
if [[ -z "${server_api}" || "${server_api}" == "<no value>" ]]; then
|
||||
echo "::error::Unable to detect Docker server API version."
|
||||
docker version || true
|
||||
exit 1
|
||||
fi
|
||||
echo "DOCKER_API_VERSION=${server_api}" >> "$GITHUB_ENV"
|
||||
echo "Using Docker API version ${server_api} (server min: ${min_api:-unknown})"
|
||||
|
||||
- name: Setup Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
if: github.event_name == 'pull_request'
|
||||
@ -47,7 +73,7 @@ jobs:
|
||||
type=ref,event=pr
|
||||
|
||||
- name: Build smoke image
|
||||
uses: useblacksmith/build-push-action@30c71162f16ea2c27c3e21523255d209b8b538c1 # v2
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
@ -65,18 +91,35 @@ jobs:
|
||||
|
||||
publish:
|
||||
name: Build and Push Docker Image
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'zeroclaw-labs/zeroclaw'
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 45
|
||||
if: github.repository == 'zeroclaw-labs/zeroclaw' && ((github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')) || (github.event_name == 'workflow_dispatch' && inputs.release_tag != ''))
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 90
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
ref: ${{ github.event_name == 'workflow_dispatch' && format('refs/tags/{0}', inputs.release_tag) || github.ref }}
|
||||
|
||||
- name: Setup Blacksmith Builder
|
||||
uses: useblacksmith/setup-docker-builder@ef12d5b165b596e3aa44ea8198d8fde563eab402 # v1
|
||||
- name: Resolve Docker API version
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
server_api="$(docker version --format '{{.Server.APIVersion}}')"
|
||||
min_api="$(docker version --format '{{.Server.MinAPIVersion}}' 2>/dev/null || true)"
|
||||
if [[ -z "${server_api}" || "${server_api}" == "<no value>" ]]; then
|
||||
echo "::error::Unable to detect Docker server API version."
|
||||
docker version || true
|
||||
exit 1
|
||||
fi
|
||||
echo "DOCKER_API_VERSION=${server_api}" >> "$GITHUB_ENV"
|
||||
echo "Using Docker API version ${server_api} (server min: ${min_api:-unknown})"
|
||||
|
||||
- name: Setup Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
@ -91,22 +134,53 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
SHA_TAG="${IMAGE}:sha-${GITHUB_SHA::12}"
|
||||
if [[ "${GITHUB_REF}" != refs/tags/v* ]]; then
|
||||
echo "::error::Docker publish is restricted to v* tag pushes."
|
||||
if [[ "${GITHUB_EVENT_NAME}" == "push" ]]; then
|
||||
if [[ "${GITHUB_REF}" != refs/tags/v* ]]; then
|
||||
echo "::error::Docker publish is restricted to v* tag pushes."
|
||||
exit 1
|
||||
fi
|
||||
RELEASE_TAG="${GITHUB_REF#refs/tags/}"
|
||||
elif [[ "${GITHUB_EVENT_NAME}" == "workflow_dispatch" ]]; then
|
||||
RELEASE_TAG="${{ inputs.release_tag }}"
|
||||
if [[ -z "${RELEASE_TAG}" ]]; then
|
||||
echo "::error::workflow_dispatch publish requires inputs.release_tag"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! "${RELEASE_TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?$ ]]; then
|
||||
echo "::error::release_tag must be vX.Y.Z or vX.Y.Z-suffix (received: ${RELEASE_TAG})"
|
||||
exit 1
|
||||
fi
|
||||
if ! git rev-parse --verify "refs/tags/${RELEASE_TAG}" >/dev/null 2>&1; then
|
||||
echo "::error::release tag not found in checkout: ${RELEASE_TAG}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "::error::Unsupported event for publish: ${GITHUB_EVENT_NAME}"
|
||||
exit 1
|
||||
fi
|
||||
RELEASE_SHA="$(git rev-parse HEAD)"
|
||||
SHA_SUFFIX="sha-${RELEASE_SHA::12}"
|
||||
SHA_TAG="${IMAGE}:${SHA_SUFFIX}"
|
||||
LATEST_SUFFIX="latest"
|
||||
LATEST_TAG="${IMAGE}:${LATEST_SUFFIX}"
|
||||
VERSION_TAG="${IMAGE}:${RELEASE_TAG}"
|
||||
TAGS="${VERSION_TAG},${SHA_TAG},${LATEST_TAG}"
|
||||
|
||||
TAG_NAME="${GITHUB_REF#refs/tags/}"
|
||||
TAGS="${IMAGE}:${TAG_NAME},${SHA_TAG}"
|
||||
|
||||
echo "tags=${TAGS}" >> "$GITHUB_OUTPUT"
|
||||
{
|
||||
echo "tags=${TAGS}"
|
||||
echo "release_tag=${RELEASE_TAG}"
|
||||
echo "release_sha=${RELEASE_SHA}"
|
||||
echo "sha_tag=${SHA_SUFFIX}"
|
||||
echo "latest_tag=${LATEST_SUFFIX}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: useblacksmith/build-push-action@30c71162f16ea2c27c3e21523255d209b8b538c1 # v2
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
build-args: |
|
||||
ZEROCLAW_CARGO_ALL_FEATURES=true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
@ -146,30 +220,204 @@ jobs:
|
||||
done
|
||||
done
|
||||
|
||||
echo "::warning::Unable to update GHCR visibility via API in this run; proceeding to direct anonymous pull verification."
|
||||
echo "::warning::Unable to update GHCR visibility via API in this run; proceeding to GHCR publish contract verification."
|
||||
|
||||
- name: Verify anonymous GHCR pull access
|
||||
- name: Validate GHCR publish contract
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TAG_NAME="${GITHUB_REF#refs/tags/}"
|
||||
token_resp="$(curl -sS "https://ghcr.io/token?scope=repository:${GITHUB_REPOSITORY}:pull")"
|
||||
token="$(echo "$token_resp" | sed -n 's/.*"token":"\([^"]*\)".*/\1/p')"
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/ghcr_publish_contract_guard.py \
|
||||
--repository "${GITHUB_REPOSITORY,,}" \
|
||||
--release-tag "${{ steps.meta.outputs.release_tag }}" \
|
||||
--sha "${{ steps.meta.outputs.release_sha }}" \
|
||||
--policy-file .github/release/ghcr-tag-policy.json \
|
||||
--output-json artifacts/ghcr-publish-contract.json \
|
||||
--output-md artifacts/ghcr-publish-contract.md \
|
||||
--fail-on-violation
|
||||
|
||||
if [ -z "$token" ]; then
|
||||
echo "::error::Anonymous GHCR token request failed: $token_resp"
|
||||
exit 1
|
||||
- name: Emit GHCR publish contract audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/ghcr-publish-contract.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type ghcr_publish_contract \
|
||||
--input-json artifacts/ghcr-publish-contract.json \
|
||||
--output-json artifacts/audit-event-ghcr-publish-contract.json \
|
||||
--artifact-name ghcr-publish-contract \
|
||||
--retention-days 21
|
||||
fi
|
||||
|
||||
code="$(curl -sS -o /tmp/ghcr-manifest.json -w "%{http_code}" \
|
||||
-H "Authorization: Bearer ${token}" \
|
||||
-H "Accept: application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.v2+json" \
|
||||
"https://ghcr.io/v2/${GITHUB_REPOSITORY}/manifests/${TAG_NAME}")"
|
||||
|
||||
if [ "$code" != "200" ]; then
|
||||
echo "::error::Anonymous manifest pull failed with HTTP ${code}"
|
||||
cat /tmp/ghcr-manifest.json || true
|
||||
exit 1
|
||||
- name: Publish GHCR contract summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/ghcr-publish-contract.md ]; then
|
||||
cat artifacts/ghcr-publish-contract.md >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
echo "Anonymous GHCR pull access verified."
|
||||
- name: Upload GHCR publish contract artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: ghcr-publish-contract
|
||||
path: |
|
||||
artifacts/ghcr-publish-contract.json
|
||||
artifacts/ghcr-publish-contract.md
|
||||
artifacts/audit-event-ghcr-publish-contract.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 21
|
||||
|
||||
- name: Scan published image for vulnerabilities (Trivy)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
|
||||
TAG_NAME="${{ steps.meta.outputs.release_tag }}"
|
||||
SHA_TAG="${{ steps.meta.outputs.sha_tag }}"
|
||||
LATEST_TAG="${{ steps.meta.outputs.latest_tag }}"
|
||||
IMAGE_BASE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
VERSION_REF="${IMAGE_BASE}:${TAG_NAME}"
|
||||
SHA_REF="${IMAGE_BASE}:${SHA_TAG}"
|
||||
LATEST_REF="${IMAGE_BASE}:${LATEST_TAG}"
|
||||
SARIF_OUT="artifacts/trivy-${TAG_NAME}.sarif"
|
||||
TABLE_OUT="artifacts/trivy-${TAG_NAME}.txt"
|
||||
JSON_OUT="artifacts/trivy-${TAG_NAME}.json"
|
||||
SHA_TABLE_OUT="artifacts/trivy-${SHA_TAG}.txt"
|
||||
SHA_JSON_OUT="artifacts/trivy-${SHA_TAG}.json"
|
||||
LATEST_TABLE_OUT="artifacts/trivy-${LATEST_TAG}.txt"
|
||||
LATEST_JSON_OUT="artifacts/trivy-${LATEST_TAG}.json"
|
||||
|
||||
scan_trivy() {
|
||||
local image_ref="$1"
|
||||
local output_prefix="$2"
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD/artifacts:/work" \
|
||||
aquasec/trivy:0.58.2 image \
|
||||
--quiet \
|
||||
--ignore-unfixed \
|
||||
--severity HIGH,CRITICAL \
|
||||
--format json \
|
||||
--output "/work/${output_prefix}.json" \
|
||||
"${image_ref}"
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD/artifacts:/work" \
|
||||
aquasec/trivy:0.58.2 image \
|
||||
--quiet \
|
||||
--ignore-unfixed \
|
||||
--severity HIGH,CRITICAL \
|
||||
--format table \
|
||||
--output "/work/${output_prefix}.txt" \
|
||||
"${image_ref}"
|
||||
}
|
||||
|
||||
docker run --rm \
|
||||
-v "$PWD/artifacts:/work" \
|
||||
aquasec/trivy:0.58.2 image \
|
||||
--quiet \
|
||||
--ignore-unfixed \
|
||||
--severity HIGH,CRITICAL \
|
||||
--format sarif \
|
||||
--output "/work/trivy-${TAG_NAME}.sarif" \
|
||||
"${VERSION_REF}"
|
||||
|
||||
scan_trivy "${VERSION_REF}" "trivy-${TAG_NAME}"
|
||||
scan_trivy "${SHA_REF}" "trivy-${SHA_TAG}"
|
||||
scan_trivy "${LATEST_REF}" "trivy-${LATEST_TAG}"
|
||||
|
||||
echo "Generated Trivy reports:"
|
||||
ls -1 "$SARIF_OUT" "$TABLE_OUT" "$JSON_OUT" "$SHA_TABLE_OUT" "$SHA_JSON_OUT" "$LATEST_TABLE_OUT" "$LATEST_JSON_OUT"
|
||||
|
||||
- name: Validate GHCR vulnerability gate
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/ghcr_vulnerability_gate.py \
|
||||
--release-tag "${{ steps.meta.outputs.release_tag }}" \
|
||||
--sha-tag "${{ steps.meta.outputs.sha_tag }}" \
|
||||
--latest-tag "${{ steps.meta.outputs.latest_tag }}" \
|
||||
--release-report-json "artifacts/trivy-${{ steps.meta.outputs.release_tag }}.json" \
|
||||
--sha-report-json "artifacts/trivy-${{ steps.meta.outputs.sha_tag }}.json" \
|
||||
--latest-report-json "artifacts/trivy-${{ steps.meta.outputs.latest_tag }}.json" \
|
||||
--policy-file .github/release/ghcr-vulnerability-policy.json \
|
||||
--output-json artifacts/ghcr-vulnerability-gate.json \
|
||||
--output-md artifacts/ghcr-vulnerability-gate.md \
|
||||
--fail-on-violation
|
||||
|
||||
- name: Emit GHCR vulnerability gate audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/ghcr-vulnerability-gate.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type ghcr_vulnerability_gate \
|
||||
--input-json artifacts/ghcr-vulnerability-gate.json \
|
||||
--output-json artifacts/audit-event-ghcr-vulnerability-gate.json \
|
||||
--artifact-name ghcr-vulnerability-gate \
|
||||
--retention-days 21
|
||||
fi
|
||||
|
||||
- name: Publish GHCR vulnerability summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/ghcr-vulnerability-gate.md ]; then
|
||||
cat artifacts/ghcr-vulnerability-gate.md >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Upload GHCR vulnerability gate artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: ghcr-vulnerability-gate
|
||||
path: |
|
||||
artifacts/ghcr-vulnerability-gate.json
|
||||
artifacts/ghcr-vulnerability-gate.md
|
||||
artifacts/audit-event-ghcr-vulnerability-gate.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 21
|
||||
|
||||
- name: Detect Trivy SARIF report
|
||||
id: trivy-sarif
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sarif_path="artifacts/trivy-${{ steps.meta.outputs.release_tag }}.sarif"
|
||||
if [ -f "${sarif_path}" ]; then
|
||||
echo "exists=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "exists=false" >> "$GITHUB_OUTPUT"
|
||||
echo "::notice::Trivy SARIF report not found at ${sarif_path}; skipping SARIF upload."
|
||||
fi
|
||||
|
||||
- name: Upload Trivy SARIF
|
||||
if: always() && steps.trivy-sarif.outputs.exists == 'true'
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
with:
|
||||
sarif_file: artifacts/trivy-${{ steps.meta.outputs.release_tag }}.sarif
|
||||
category: ghcr-trivy
|
||||
|
||||
- name: Upload Trivy report artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: ghcr-trivy-report
|
||||
path: |
|
||||
artifacts/trivy-${{ steps.meta.outputs.release_tag }}.sarif
|
||||
artifacts/trivy-${{ steps.meta.outputs.release_tag }}.txt
|
||||
artifacts/trivy-${{ steps.meta.outputs.release_tag }}.json
|
||||
artifacts/trivy-sha-*.txt
|
||||
artifacts/trivy-sha-*.json
|
||||
artifacts/trivy-latest.txt
|
||||
artifacts/trivy-latest.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
221
.github/workflows/pub-homebrew-core.yml
vendored
221
.github/workflows/pub-homebrew-core.yml
vendored
@ -1,221 +0,0 @@
|
||||
name: Pub Homebrew Core
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: "Existing release tag to publish (vX.Y.Z)"
|
||||
required: true
|
||||
type: string
|
||||
dry_run:
|
||||
description: "Patch formula only (no push/PR)"
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: homebrew-core-${{ github.run_id }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish-homebrew-core:
|
||||
name: Publish Homebrew Core PR
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
env:
|
||||
UPSTREAM_REPO: Homebrew/homebrew-core
|
||||
FORMULA_PATH: Formula/z/zeroclaw.rb
|
||||
RELEASE_TAG: ${{ inputs.release_tag }}
|
||||
DRY_RUN: ${{ inputs.dry_run }}
|
||||
BOT_FORK_REPO: ${{ vars.HOMEBREW_CORE_BOT_FORK_REPO }}
|
||||
BOT_EMAIL: ${{ vars.HOMEBREW_CORE_BOT_EMAIL }}
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Validate release tag and version alignment
|
||||
id: release_meta
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
semver_pattern='^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?$'
|
||||
if [[ ! "$RELEASE_TAG" =~ $semver_pattern ]]; then
|
||||
echo "::error::release_tag must match semver-like format (vX.Y.Z[-suffix])."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! git rev-parse "refs/tags/${RELEASE_TAG}" >/dev/null 2>&1; then
|
||||
git fetch --tags origin
|
||||
fi
|
||||
|
||||
tag_version="${RELEASE_TAG#v}"
|
||||
cargo_version="$(git show "${RELEASE_TAG}:Cargo.toml" | sed -n 's/^version = "\([^"]*\)"/\1/p' | head -n1)"
|
||||
if [[ -z "$cargo_version" ]]; then
|
||||
echo "::error::Unable to read Cargo.toml version from tag ${RELEASE_TAG}."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$cargo_version" != "$tag_version" ]]; then
|
||||
echo "::error::Tag ${RELEASE_TAG} does not match Cargo.toml version (${cargo_version})."
|
||||
echo "::error::Bump Cargo.toml first, then publish Homebrew."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tarball_url="https://github.com/${GITHUB_REPOSITORY}/archive/refs/tags/${RELEASE_TAG}.tar.gz"
|
||||
tarball_sha="$(curl -fsSL "$tarball_url" | sha256sum | awk '{print $1}')"
|
||||
|
||||
{
|
||||
echo "tag_version=$tag_version"
|
||||
echo "tarball_url=$tarball_url"
|
||||
echo "tarball_sha=$tarball_sha"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
{
|
||||
echo "### Release Metadata"
|
||||
echo "- release_tag: ${RELEASE_TAG}"
|
||||
echo "- cargo_version: ${cargo_version}"
|
||||
echo "- tarball_sha256: ${tarball_sha}"
|
||||
echo "- dry_run: ${DRY_RUN}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Patch Homebrew formula
|
||||
id: patch_formula
|
||||
shell: bash
|
||||
env:
|
||||
HOMEBREW_CORE_BOT_TOKEN: ${{ secrets.HOMEBREW_UPSTREAM_PR_TOKEN || secrets.HOMEBREW_CORE_BOT_TOKEN }}
|
||||
GH_TOKEN: ${{ secrets.HOMEBREW_UPSTREAM_PR_TOKEN || secrets.HOMEBREW_CORE_BOT_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
tmp_repo="$(mktemp -d)"
|
||||
echo "tmp_repo=$tmp_repo" >> "$GITHUB_OUTPUT"
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
git clone --depth=1 "https://github.com/${UPSTREAM_REPO}.git" "$tmp_repo/homebrew-core"
|
||||
else
|
||||
if [[ -z "${BOT_FORK_REPO}" ]]; then
|
||||
echo "::error::Repository variable HOMEBREW_CORE_BOT_FORK_REPO is required when dry_run=false."
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "${HOMEBREW_CORE_BOT_TOKEN}" ]]; then
|
||||
echo "::error::Repository secret HOMEBREW_CORE_BOT_TOKEN is required when dry_run=false."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$BOT_FORK_REPO" != */* ]]; then
|
||||
echo "::error::HOMEBREW_CORE_BOT_FORK_REPO must be in owner/repo format."
|
||||
exit 1
|
||||
fi
|
||||
if ! command -v gh >/dev/null 2>&1; then
|
||||
echo "::error::gh CLI is required on the runner."
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "${GH_TOKEN:-}" ]]; then
|
||||
echo "::error::Repository secret HOMEBREW_CORE_BOT_TOKEN is missing."
|
||||
exit 1
|
||||
fi
|
||||
if ! gh api "repos/${BOT_FORK_REPO}" >/dev/null 2>&1; then
|
||||
echo "::error::HOMEBREW_CORE_BOT_TOKEN cannot access ${BOT_FORK_REPO}."
|
||||
exit 1
|
||||
fi
|
||||
gh repo clone "${BOT_FORK_REPO}" "$tmp_repo/homebrew-core" -- --depth=1
|
||||
fi
|
||||
|
||||
repo_dir="$tmp_repo/homebrew-core"
|
||||
formula_file="$repo_dir/$FORMULA_PATH"
|
||||
if [[ ! -f "$formula_file" ]]; then
|
||||
echo "::error::Formula file not found: $FORMULA_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$DRY_RUN" == "false" ]]; then
|
||||
if git -C "$repo_dir" remote get-url upstream >/dev/null 2>&1; then
|
||||
git -C "$repo_dir" remote set-url upstream "https://github.com/${UPSTREAM_REPO}.git"
|
||||
else
|
||||
git -C "$repo_dir" remote add upstream "https://github.com/${UPSTREAM_REPO}.git"
|
||||
fi
|
||||
if git -C "$repo_dir" ls-remote --exit-code --heads upstream main >/dev/null 2>&1; then
|
||||
upstream_ref="main"
|
||||
else
|
||||
upstream_ref="master"
|
||||
fi
|
||||
git -C "$repo_dir" fetch --depth=1 upstream "$upstream_ref"
|
||||
branch_name="zeroclaw-${RELEASE_TAG}-${GITHUB_RUN_ID}"
|
||||
git -C "$repo_dir" checkout -B "$branch_name" "upstream/$upstream_ref"
|
||||
echo "branch_name=$branch_name" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
tarball_url="${{ steps.release_meta.outputs.tarball_url }}"
|
||||
tarball_sha="${{ steps.release_meta.outputs.tarball_sha }}"
|
||||
|
||||
perl -0pi -e "s|^ url \".*\"| url \"${tarball_url}\"|m" "$formula_file"
|
||||
perl -0pi -e "s|^ sha256 \".*\"| sha256 \"${tarball_sha}\"|m" "$formula_file"
|
||||
perl -0pi -e "s|^ license \".*\"| license \"Apache-2.0 OR MIT\"|m" "$formula_file"
|
||||
perl -0pi -e 's|^ head "https://github\.com/zeroclaw-labs/zeroclaw\.git".*| head "https://github.com/zeroclaw-labs/zeroclaw.git"|m' "$formula_file"
|
||||
|
||||
git -C "$repo_dir" diff -- "$FORMULA_PATH" > "$tmp_repo/formula.diff"
|
||||
if [[ ! -s "$tmp_repo/formula.diff" ]]; then
|
||||
echo "::error::No formula changes generated. Nothing to publish."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
{
|
||||
echo "### Formula Diff"
|
||||
echo '```diff'
|
||||
cat "$tmp_repo/formula.diff"
|
||||
echo '```'
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Push branch and open Homebrew PR
|
||||
if: ${{ inputs.dry_run == false }}
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.HOMEBREW_UPSTREAM_PR_TOKEN || secrets.HOMEBREW_CORE_BOT_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
repo_dir="${{ steps.patch_formula.outputs.tmp_repo }}/homebrew-core"
|
||||
branch_name="${{ steps.patch_formula.outputs.branch_name }}"
|
||||
tag_version="${{ steps.release_meta.outputs.tag_version }}"
|
||||
fork_owner="${BOT_FORK_REPO%%/*}"
|
||||
bot_email="${BOT_EMAIL:-${fork_owner}@users.noreply.github.com}"
|
||||
|
||||
git -C "$repo_dir" config user.name "$fork_owner"
|
||||
git -C "$repo_dir" config user.email "$bot_email"
|
||||
git -C "$repo_dir" add "$FORMULA_PATH"
|
||||
git -C "$repo_dir" commit -m "zeroclaw ${tag_version}"
|
||||
if [[ -z "${GH_TOKEN:-}" ]]; then
|
||||
echo "::error::Repository secret HOMEBREW_CORE_BOT_TOKEN is missing."
|
||||
exit 1
|
||||
fi
|
||||
gh auth setup-git
|
||||
git -C "$repo_dir" push --set-upstream origin "$branch_name"
|
||||
|
||||
pr_title="zeroclaw ${tag_version}"
|
||||
pr_body=$(cat <<EOF
|
||||
Automated formula bump from ZeroClaw release workflow.
|
||||
|
||||
- Release tag: ${RELEASE_TAG}
|
||||
- Source tarball: ${{ steps.release_meta.outputs.tarball_url }}
|
||||
- Source sha256: ${{ steps.release_meta.outputs.tarball_sha }}
|
||||
EOF
|
||||
)
|
||||
|
||||
gh pr create \
|
||||
--repo "$UPSTREAM_REPO" \
|
||||
--base main \
|
||||
--head "${fork_owner}:${branch_name}" \
|
||||
--title "$pr_title" \
|
||||
--body "$pr_body"
|
||||
|
||||
- name: Summary output
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
echo "Dry run complete: formula diff generated, no push/PR performed."
|
||||
else
|
||||
echo "Publish complete: branch pushed and PR opened from bot fork."
|
||||
fi
|
||||
261
.github/workflows/pub-prerelease.yml
vendored
Normal file
261
.github/workflows/pub-prerelease.yml
vendored
Normal file
@ -0,0 +1,261 @@
|
||||
name: Pub Pre-release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*-alpha.*"
|
||||
- "v*-beta.*"
|
||||
- "v*-rc.*"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Existing pre-release tag (e.g. v0.1.8-rc.1)"
|
||||
required: true
|
||||
default: ""
|
||||
type: string
|
||||
mode:
|
||||
description: "dry-run validates/builds only; publish creates prerelease"
|
||||
required: true
|
||||
default: dry-run
|
||||
type: choice
|
||||
options:
|
||||
- dry-run
|
||||
- publish
|
||||
draft:
|
||||
description: "Create prerelease as draft"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: prerelease-${{ github.ref || github.run_id }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
prerelease-guard:
|
||||
name: Pre-release Guard
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
release_tag: ${{ steps.vars.outputs.release_tag }}
|
||||
mode: ${{ steps.vars.outputs.mode }}
|
||||
draft: ${{ steps.vars.outputs.draft }}
|
||||
ready_to_publish: ${{ steps.extract.outputs.ready_to_publish }}
|
||||
stage: ${{ steps.extract.outputs.stage }}
|
||||
transition_outcome: ${{ steps.extract.outputs.transition_outcome }}
|
||||
latest_stage: ${{ steps.extract.outputs.latest_stage }}
|
||||
latest_stage_tag: ${{ steps.extract.outputs.latest_stage_tag }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Resolve prerelease inputs
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
|
||||
release_tag="${GITHUB_REF_NAME}"
|
||||
mode="publish"
|
||||
draft="false"
|
||||
else
|
||||
release_tag="${{ inputs.tag }}"
|
||||
mode="${{ inputs.mode }}"
|
||||
draft="${{ inputs.draft }}"
|
||||
fi
|
||||
|
||||
{
|
||||
echo "release_tag=${release_tag}"
|
||||
echo "mode=${mode}"
|
||||
echo "draft=${draft}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Validate prerelease stage gate
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/prerelease_guard.py \
|
||||
--repo-root . \
|
||||
--tag "${{ steps.vars.outputs.release_tag }}" \
|
||||
--stage-config-file .github/release/prerelease-stage-gates.json \
|
||||
--mode "${{ steps.vars.outputs.mode }}" \
|
||||
--output-json artifacts/prerelease-guard.json \
|
||||
--output-md artifacts/prerelease-guard.md \
|
||||
--fail-on-violation
|
||||
|
||||
- name: Extract prerelease outputs
|
||||
id: extract
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ready_to_publish="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8'))
|
||||
print(str(bool(data.get('ready_to_publish', False))).lower())
|
||||
PY
|
||||
)"
|
||||
stage="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8'))
|
||||
print(data.get('stage', 'unknown'))
|
||||
PY
|
||||
)"
|
||||
transition_outcome="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8'))
|
||||
transition = data.get('transition') or {}
|
||||
print(transition.get('outcome', 'unknown'))
|
||||
PY
|
||||
)"
|
||||
latest_stage="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8'))
|
||||
history = data.get('stage_history') or {}
|
||||
print(history.get('latest_stage', 'unknown'))
|
||||
PY
|
||||
)"
|
||||
latest_stage_tag="$(python3 - <<'PY'
|
||||
import json
|
||||
data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8'))
|
||||
history = data.get('stage_history') or {}
|
||||
print(history.get('latest_tag', 'unknown'))
|
||||
PY
|
||||
)"
|
||||
{
|
||||
echo "ready_to_publish=${ready_to_publish}"
|
||||
echo "stage=${stage}"
|
||||
echo "transition_outcome=${transition_outcome}"
|
||||
echo "latest_stage=${latest_stage}"
|
||||
echo "latest_stage_tag=${latest_stage_tag}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Emit prerelease audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type prerelease_guard \
|
||||
--input-json artifacts/prerelease-guard.json \
|
||||
--output-json artifacts/audit-event-prerelease-guard.json \
|
||||
--artifact-name prerelease-guard \
|
||||
--retention-days 21
|
||||
|
||||
- name: Publish prerelease summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/prerelease-guard.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload prerelease guard artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: prerelease-guard
|
||||
path: |
|
||||
artifacts/prerelease-guard.json
|
||||
artifacts/prerelease-guard.md
|
||||
artifacts/audit-event-prerelease-guard.json
|
||||
if-no-files-found: error
|
||||
retention-days: 21
|
||||
|
||||
build-prerelease:
|
||||
name: Build Pre-release Artifact
|
||||
needs: [prerelease-guard]
|
||||
# Keep GNU Linux prerelease artifacts on Ubuntu 22.04 so runtime GLIBC
|
||||
# symbols remain compatible with Debian 12 / Ubuntu 22.04 hosts.
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 45
|
||||
steps:
|
||||
- name: Checkout tag
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
ref: ${{ needs.prerelease-guard.outputs.release_tag }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: prerelease-${{ needs.prerelease-guard.outputs.release_tag }}
|
||||
cache-targets: true
|
||||
|
||||
- name: Build release-fast binary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo build --profile release-fast --locked --target x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Package prerelease artifact
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
cp target/x86_64-unknown-linux-gnu/release-fast/zeroclaw artifacts/zeroclaw
|
||||
tar czf artifacts/zeroclaw-x86_64-unknown-linux-gnu.tar.gz -C artifacts zeroclaw
|
||||
rm artifacts/zeroclaw
|
||||
|
||||
- name: Generate manifest + checksums
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/release_manifest.py \
|
||||
--artifacts-dir artifacts \
|
||||
--release-tag "${{ needs.prerelease-guard.outputs.release_tag }}" \
|
||||
--output-json artifacts/prerelease-manifest.json \
|
||||
--output-md artifacts/prerelease-manifest.md \
|
||||
--checksums-path artifacts/SHA256SUMS \
|
||||
--fail-empty
|
||||
|
||||
- name: Publish prerelease build summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/prerelease-manifest.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload prerelease build artifacts
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: prerelease-artifacts
|
||||
path: artifacts/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
publish-prerelease:
|
||||
name: Publish GitHub Pre-release
|
||||
needs: [prerelease-guard, build-prerelease]
|
||||
if: needs.prerelease-guard.outputs.ready_to_publish == 'true'
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Download prerelease artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: prerelease-artifacts
|
||||
path: artifacts
|
||||
|
||||
- name: Create or update GitHub pre-release
|
||||
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2
|
||||
with:
|
||||
tag_name: ${{ needs.prerelease-guard.outputs.release_tag }}
|
||||
prerelease: true
|
||||
draft: ${{ needs.prerelease-guard.outputs.draft == 'true' }}
|
||||
generate_release_notes: true
|
||||
files: |
|
||||
artifacts/**/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
455
.github/workflows/pub-release.yml
vendored
455
.github/workflows/pub-release.yml
vendored
@ -39,12 +39,16 @@ permissions:
|
||||
id-token: write # Required for cosign keyless signing via OIDC
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: Prepare Release Context
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
if: github.event_name != 'push' || !contains(github.ref_name, '-')
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
outputs:
|
||||
release_ref: ${{ steps.vars.outputs.release_ref }}
|
||||
release_tag: ${{ steps.vars.outputs.release_tag }}
|
||||
@ -60,7 +64,6 @@ jobs:
|
||||
event_name="${GITHUB_EVENT_NAME}"
|
||||
publish_release="false"
|
||||
draft_release="false"
|
||||
semver_pattern='^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?$'
|
||||
|
||||
if [[ "$event_name" == "push" ]]; then
|
||||
release_ref="${GITHUB_REF_NAME}"
|
||||
@ -87,41 +90,6 @@ jobs:
|
||||
release_tag="verify-${GITHUB_SHA::12}"
|
||||
fi
|
||||
|
||||
if [[ "$publish_release" == "true" ]]; then
|
||||
if [[ ! "$release_tag" =~ $semver_pattern ]]; then
|
||||
echo "::error::release_tag must match semver-like format (vX.Y.Z[-suffix])"
|
||||
exit 1
|
||||
fi
|
||||
if ! git ls-remote --exit-code --tags "https://github.com/${GITHUB_REPOSITORY}.git" "refs/tags/${release_tag}" >/dev/null; then
|
||||
echo "::error::Tag ${release_tag} does not exist on origin. Push the tag first, then rerun manual publish."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Guardrail: release tags must resolve to commits already reachable from main.
|
||||
tmp_repo="$(mktemp -d)"
|
||||
trap 'rm -rf "$tmp_repo"' EXIT
|
||||
git -C "$tmp_repo" init -q
|
||||
git -C "$tmp_repo" remote add origin "https://github.com/${GITHUB_REPOSITORY}.git"
|
||||
git -C "$tmp_repo" fetch --quiet --filter=blob:none origin main "refs/tags/${release_tag}:refs/tags/${release_tag}"
|
||||
if ! git -C "$tmp_repo" merge-base --is-ancestor "refs/tags/${release_tag}" "origin/main"; then
|
||||
echo "::error::Tag ${release_tag} is not reachable from origin/main. Release tags must be cut from main."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Guardrail: release tag and Cargo package version must stay aligned.
|
||||
tag_version="${release_tag#v}"
|
||||
cargo_version="$(git -C "$tmp_repo" show "refs/tags/${release_tag}:Cargo.toml" | sed -n 's/^version = "\([^"]*\)"/\1/p' | head -n1)"
|
||||
if [[ -z "$cargo_version" ]]; then
|
||||
echo "::error::Unable to read Cargo package version from ${release_tag}:Cargo.toml"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$cargo_version" != "$tag_version" ]]; then
|
||||
echo "::error::Tag ${release_tag} does not match Cargo.toml version (${cargo_version})."
|
||||
echo "::error::Bump Cargo.toml version first, then create/publish the matching tag."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
echo "release_ref=${release_ref}"
|
||||
echo "release_tag=${release_tag}"
|
||||
@ -138,37 +106,143 @@ jobs:
|
||||
echo "- draft_release: ${draft_release}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Install gh CLI
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v gh &>/dev/null; then
|
||||
echo "gh already available: $(gh --version | head -1)"
|
||||
exit 0
|
||||
fi
|
||||
echo "Installing gh CLI..."
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||
| sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \
|
||||
| sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||
for i in {1..60}; do
|
||||
if sudo fuser /var/lib/apt/lists/lock >/dev/null 2>&1 \
|
||||
|| sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 \
|
||||
|| sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1; then
|
||||
echo "apt/dpkg locked; waiting ($i/60)..."
|
||||
sleep 5
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 update -qq
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 install -y gh
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
- name: Validate release trigger and authorization guard
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/release_trigger_guard.py \
|
||||
--repo-root . \
|
||||
--repository "${GITHUB_REPOSITORY}" \
|
||||
--event-name "${GITHUB_EVENT_NAME}" \
|
||||
--actor "${GITHUB_ACTOR}" \
|
||||
--release-ref "${{ steps.vars.outputs.release_ref }}" \
|
||||
--release-tag "${{ steps.vars.outputs.release_tag }}" \
|
||||
--publish-release "${{ steps.vars.outputs.publish_release }}" \
|
||||
--authorized-actors "${{ vars.RELEASE_AUTHORIZED_ACTORS || 'willsarg,theonlyhennygod,chumyin' }}" \
|
||||
--authorized-tagger-emails "${{ vars.RELEASE_AUTHORIZED_TAGGER_EMAILS || '' }}" \
|
||||
--require-annotated-tag true \
|
||||
--output-json artifacts/release-trigger-guard.json \
|
||||
--output-md artifacts/release-trigger-guard.md \
|
||||
--fail-on-violation
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
- name: Emit release trigger audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type release_trigger_guard \
|
||||
--input-json artifacts/release-trigger-guard.json \
|
||||
--output-json artifacts/audit-event-release-trigger-guard.json \
|
||||
--artifact-name release-trigger-guard \
|
||||
--retention-days 30
|
||||
|
||||
- name: Publish release trigger guard summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/release-trigger-guard.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload release trigger guard artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: release-trigger-guard
|
||||
path: |
|
||||
artifacts/release-trigger-guard.json
|
||||
artifacts/release-trigger-guard.md
|
||||
artifacts/audit-event-release-trigger-guard.json
|
||||
if-no-files-found: error
|
||||
retention-days: 30
|
||||
|
||||
build-release:
|
||||
name: Build ${{ matrix.target }}
|
||||
needs: [prepare]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 40
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}-${{ matrix.target }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}-${{ matrix.target }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/target
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
# Keep GNU Linux release artifacts on Ubuntu 22.04 to preserve
|
||||
# a broadly compatible GLIBC baseline for user distributions.
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: x86_64-unknown-linux-gnu
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
cross_compiler: ""
|
||||
linker_env: ""
|
||||
linker: ""
|
||||
- os: ubuntu-latest
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: x86_64-unknown-linux-musl
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
cross_compiler: ""
|
||||
linker_env: ""
|
||||
linker: ""
|
||||
use_cross: true
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: aarch64-unknown-linux-gnu
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
cross_compiler: gcc-aarch64-linux-gnu
|
||||
linker_env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER
|
||||
linker: aarch64-linux-gnu-gcc
|
||||
- os: ubuntu-latest
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: aarch64-unknown-linux-musl
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
cross_compiler: ""
|
||||
linker_env: ""
|
||||
linker: ""
|
||||
use_cross: true
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: armv7-unknown-linux-gnueabihf
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
cross_compiler: gcc-arm-linux-gnueabihf
|
||||
linker_env: CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER
|
||||
linker: arm-linux-gnueabihf-gcc
|
||||
- os: ubuntu-latest
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: armv7-linux-androideabi
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
@ -177,7 +251,7 @@ jobs:
|
||||
linker: ""
|
||||
android_ndk: true
|
||||
android_api: 21
|
||||
- os: ubuntu-latest
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: aarch64-linux-android
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
@ -186,6 +260,14 @@ jobs:
|
||||
linker: ""
|
||||
android_ndk: true
|
||||
android_api: 21
|
||||
- os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
target: x86_64-unknown-freebsd
|
||||
artifact: zeroclaw
|
||||
archive_ext: tar.gz
|
||||
cross_compiler: ""
|
||||
linker_env: ""
|
||||
linker: ""
|
||||
use_cross: true
|
||||
- os: macos-15-intel
|
||||
target: x86_64-apple-darwin
|
||||
artifact: zeroclaw
|
||||
@ -213,33 +295,94 @@ jobs:
|
||||
with:
|
||||
ref: ${{ needs.prepare.outputs.release_ref }}
|
||||
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
if: runner.os != 'Windows'
|
||||
|
||||
- name: Install cross for cross-built targets
|
||||
if: matrix.use_cross
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> "$GITHUB_PATH"
|
||||
cargo install cross --locked --version 0.2.5
|
||||
command -v cross
|
||||
cross --version
|
||||
|
||||
- name: Install cross-compilation toolchain (Linux)
|
||||
if: runner.os == 'Linux' && matrix.cross_compiler != ''
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y ${{ matrix.cross_compiler }}
|
||||
set -euo pipefail
|
||||
for i in {1..60}; do
|
||||
if sudo fuser /var/lib/apt/lists/lock >/dev/null 2>&1 \
|
||||
|| sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 \
|
||||
|| sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1; then
|
||||
echo "apt/dpkg locked; waiting ($i/60)..."
|
||||
sleep 5
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 update -qq
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 install -y "${{ matrix.cross_compiler }}"
|
||||
# Install matching libc dev headers for cross targets
|
||||
# (required by ring/aws-lc-sys C compilation)
|
||||
case "${{ matrix.target }}" in
|
||||
armv7-unknown-linux-gnueabihf)
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 install -y libc6-dev-armhf-cross ;;
|
||||
aarch64-unknown-linux-gnu)
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 install -y libc6-dev-arm64-cross ;;
|
||||
esac
|
||||
|
||||
- name: Setup Android NDK
|
||||
if: matrix.android_ndk
|
||||
uses: nttld/setup-ndk@v1
|
||||
id: setup-ndk
|
||||
with:
|
||||
ndk-version: r26d
|
||||
add-to-path: true
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
NDK_VERSION="r26d"
|
||||
NDK_ZIP="android-ndk-${NDK_VERSION}-linux.zip"
|
||||
NDK_URL="https://dl.google.com/android/repository/${NDK_ZIP}"
|
||||
NDK_ROOT="${RUNNER_TEMP}/android-ndk"
|
||||
NDK_HOME="${NDK_ROOT}/android-ndk-${NDK_VERSION}"
|
||||
|
||||
for i in {1..60}; do
|
||||
if sudo fuser /var/lib/apt/lists/lock >/dev/null 2>&1 \
|
||||
|| sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 \
|
||||
|| sudo fuser /var/lib/dpkg/lock >/dev/null 2>&1; then
|
||||
echo "apt/dpkg locked; waiting ($i/60)..."
|
||||
sleep 5
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 update -qq
|
||||
sudo apt-get -o DPkg::Lock::Timeout=600 -o Acquire::Retries=3 install -y unzip
|
||||
|
||||
mkdir -p "${NDK_ROOT}"
|
||||
curl -fsSL "${NDK_URL}" -o "${RUNNER_TEMP}/${NDK_ZIP}"
|
||||
unzip -q "${RUNNER_TEMP}/${NDK_ZIP}" -d "${NDK_ROOT}"
|
||||
|
||||
echo "ANDROID_NDK_HOME=${NDK_HOME}" >> "$GITHUB_ENV"
|
||||
echo "${NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Configure Android toolchain
|
||||
if: matrix.android_ndk
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Setting up Android NDK toolchain for ${{ matrix.target }}"
|
||||
NDK_HOME="${{ steps.setup-ndk.outputs.ndk-path }}"
|
||||
NDK_HOME="${ANDROID_NDK_HOME:-}"
|
||||
if [[ -z "$NDK_HOME" ]]; then
|
||||
echo "::error::ANDROID_NDK_HOME was not configured."
|
||||
exit 1
|
||||
fi
|
||||
TOOLCHAIN="$NDK_HOME/toolchains/llvm/prebuilt/linux-x86_64/bin"
|
||||
|
||||
# Add to path for linker resolution
|
||||
@ -247,9 +390,29 @@ jobs:
|
||||
|
||||
# Set linker environment variables
|
||||
if [[ "${{ matrix.target }}" == "armv7-linux-androideabi" ]]; then
|
||||
echo "CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER=${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang" >> "$GITHUB_ENV"
|
||||
ARMV7_CC="${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang"
|
||||
ARMV7_CXX="${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang++"
|
||||
|
||||
# Some crates still probe legacy compiler names (arm-linux-androideabi-clang).
|
||||
ln -sf "$ARMV7_CC" "${TOOLCHAIN}/arm-linux-androideabi-clang"
|
||||
ln -sf "$ARMV7_CXX" "${TOOLCHAIN}/arm-linux-androideabi-clang++"
|
||||
|
||||
{
|
||||
echo "CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER=${ARMV7_CC}"
|
||||
echo "CC_armv7_linux_androideabi=${ARMV7_CC}"
|
||||
echo "CXX_armv7_linux_androideabi=${ARMV7_CXX}"
|
||||
echo "AR_armv7_linux_androideabi=${TOOLCHAIN}/llvm-ar"
|
||||
} >> "$GITHUB_ENV"
|
||||
elif [[ "${{ matrix.target }}" == "aarch64-linux-android" ]]; then
|
||||
echo "CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang" >> "$GITHUB_ENV"
|
||||
AARCH64_CC="${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang"
|
||||
AARCH64_CXX="${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang++"
|
||||
|
||||
{
|
||||
echo "CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=${AARCH64_CC}"
|
||||
echo "CC_aarch64_linux_android=${AARCH64_CC}"
|
||||
echo "CXX_aarch64_linux_android=${AARCH64_CXX}"
|
||||
echo "AR_aarch64_linux_android=${TOOLCHAIN}/llvm-ar"
|
||||
} >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Build release
|
||||
@ -257,15 +420,25 @@ jobs:
|
||||
env:
|
||||
LINKER_ENV: ${{ matrix.linker_env }}
|
||||
LINKER: ${{ matrix.linker }}
|
||||
USE_CROSS: ${{ matrix.use_cross }}
|
||||
run: |
|
||||
if [ -n "$LINKER_ENV" ] && [ -n "$LINKER" ]; then
|
||||
echo "Using linker override: $LINKER_ENV=$LINKER"
|
||||
export "$LINKER_ENV=$LINKER"
|
||||
fi
|
||||
cargo build --profile release-fast --locked --target ${{ matrix.target }}
|
||||
if [ "$USE_CROSS" = "true" ]; then
|
||||
echo "Using cross for MUSL target"
|
||||
cross build --profile release-fast --locked --target ${{ matrix.target }}
|
||||
else
|
||||
cargo build --profile release-fast --locked --target ${{ matrix.target }}
|
||||
fi
|
||||
|
||||
- name: Check binary size (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
env:
|
||||
BINARY_SIZE_HARD_LIMIT_MB: 28
|
||||
BINARY_SIZE_ADVISORY_MB: 20
|
||||
BINARY_SIZE_TARGET_MB: 5
|
||||
run: bash scripts/ci/check_binary_size.sh "target/${{ matrix.target }}/release-fast/${{ matrix.artifact }}" "${{ matrix.target }}"
|
||||
|
||||
- name: Package (Unix)
|
||||
@ -290,47 +463,68 @@ jobs:
|
||||
verify-artifacts:
|
||||
name: Verify Artifact Set
|
||||
needs: [prepare, build-release]
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
ref: ${{ needs.prepare.outputs.release_ref }}
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Validate expected archives
|
||||
- name: Validate release archive contract (verify stage)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
expected=(
|
||||
"zeroclaw-x86_64-unknown-linux-gnu.tar.gz"
|
||||
"zeroclaw-aarch64-unknown-linux-gnu.tar.gz"
|
||||
"zeroclaw-armv7-unknown-linux-gnueabihf.tar.gz"
|
||||
"zeroclaw-armv7-linux-androideabi.tar.gz"
|
||||
"zeroclaw-aarch64-linux-android.tar.gz"
|
||||
"zeroclaw-x86_64-apple-darwin.tar.gz"
|
||||
"zeroclaw-aarch64-apple-darwin.tar.gz"
|
||||
"zeroclaw-x86_64-pc-windows-msvc.zip"
|
||||
)
|
||||
python3 scripts/ci/release_artifact_guard.py \
|
||||
--artifacts-dir artifacts \
|
||||
--contract-file .github/release/release-artifact-contract.json \
|
||||
--output-json artifacts/release-artifact-guard.verify.json \
|
||||
--output-md artifacts/release-artifact-guard.verify.md \
|
||||
--allow-extra-archives \
|
||||
--skip-manifest-files \
|
||||
--skip-sbom-files \
|
||||
--skip-notice-files \
|
||||
--fail-on-violation
|
||||
|
||||
missing=0
|
||||
for file in "${expected[@]}"; do
|
||||
if ! find artifacts -type f -name "$file" -print -quit | grep -q .; then
|
||||
echo "::error::Missing release archive: $file"
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
- name: Emit verify-stage artifact guard audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type release_artifact_guard_verify \
|
||||
--input-json artifacts/release-artifact-guard.verify.json \
|
||||
--output-json artifacts/audit-event-release-artifact-guard-verify.json \
|
||||
--artifact-name release-artifact-guard-verify \
|
||||
--retention-days 21
|
||||
|
||||
if [ "$missing" -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
- name: Publish verify-stage artifact guard summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/release-artifact-guard.verify.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
echo "All expected release archives are present."
|
||||
- name: Upload verify-stage artifact guard reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: release-artifact-guard-verify
|
||||
path: |
|
||||
artifacts/release-artifact-guard.verify.json
|
||||
artifacts/release-artifact-guard.verify.md
|
||||
artifacts/audit-event-release-artifact-guard-verify.json
|
||||
if-no-files-found: error
|
||||
retention-days: 21
|
||||
|
||||
publish:
|
||||
name: Publish Release
|
||||
if: needs.prepare.outputs.publish_release == 'true'
|
||||
needs: [prepare, verify-artifacts]
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 45
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
@ -343,8 +537,12 @@ jobs:
|
||||
path: artifacts
|
||||
|
||||
- name: Install syft
|
||||
shell: bash
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin
|
||||
set -euo pipefail
|
||||
mkdir -p "${RUNNER_TEMP}/bin"
|
||||
./scripts/ci/install_syft.sh "${RUNNER_TEMP}/bin"
|
||||
echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Generate SBOM (CycloneDX)
|
||||
run: |
|
||||
@ -361,12 +559,80 @@ jobs:
|
||||
cp LICENSE-MIT artifacts/LICENSE-MIT
|
||||
cp NOTICE artifacts/NOTICE
|
||||
|
||||
- name: Generate SHA256 checksums
|
||||
- name: Generate release manifest + checksums
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }}
|
||||
run: |
|
||||
cd artifacts
|
||||
find . -type f \( -name '*.tar.gz' -o -name '*.zip' -o -name '*.cdx.json' -o -name '*.spdx.json' -o -name 'LICENSE-APACHE' -o -name 'LICENSE-MIT' -o -name 'NOTICE' \) -exec sha256sum {} + | sed 's| \./[^/]*/| |' > SHA256SUMS
|
||||
echo "Generated checksums:"
|
||||
cat SHA256SUMS
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/release_manifest.py \
|
||||
--artifacts-dir artifacts \
|
||||
--release-tag "${RELEASE_TAG}" \
|
||||
--output-json artifacts/release-manifest.json \
|
||||
--output-md artifacts/release-manifest.md \
|
||||
--checksums-path artifacts/SHA256SUMS \
|
||||
--fail-empty
|
||||
|
||||
- name: Generate SHA256SUMS provenance statement
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/generate_provenance.py \
|
||||
--artifact artifacts/SHA256SUMS \
|
||||
--subject-name "zeroclaw-${RELEASE_TAG}-sha256sums" \
|
||||
--output artifacts/zeroclaw.sha256sums.intoto.json
|
||||
|
||||
- name: Emit SHA256SUMS provenance audit event
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type release_sha256sums_provenance \
|
||||
--input-json artifacts/zeroclaw.sha256sums.intoto.json \
|
||||
--output-json artifacts/audit-event-release-sha256sums-provenance.json \
|
||||
--artifact-name release-sha256sums-provenance \
|
||||
--retention-days 30
|
||||
|
||||
- name: Validate release artifact contract (publish stage)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/release_artifact_guard.py \
|
||||
--artifacts-dir artifacts \
|
||||
--contract-file .github/release/release-artifact-contract.json \
|
||||
--output-json artifacts/release-artifact-guard.publish.json \
|
||||
--output-md artifacts/release-artifact-guard.publish.md \
|
||||
--allow-extra-archives \
|
||||
--allow-extra-manifest-files \
|
||||
--allow-extra-sbom-files \
|
||||
--allow-extra-notice-files \
|
||||
--fail-on-violation
|
||||
|
||||
- name: Emit publish-stage artifact guard audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type release_artifact_guard_publish \
|
||||
--input-json artifacts/release-artifact-guard.publish.json \
|
||||
--output-json artifacts/audit-event-release-artifact-guard-publish.json \
|
||||
--artifact-name release-artifact-guard-publish \
|
||||
--retention-days 30
|
||||
|
||||
- name: Publish artifact guard summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/release-artifact-guard.publish.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Publish release manifest summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/release-manifest.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
@ -383,6 +649,26 @@ jobs:
|
||||
"$file"
|
||||
done < <(find artifacts -type f ! -name '*.sig' ! -name '*.pem' ! -name '*.sigstore.json' -print0)
|
||||
|
||||
- name: Compose release-notes supply-chain references
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 scripts/ci/release_notes_with_supply_chain_refs.py \
|
||||
--artifacts-dir artifacts \
|
||||
--repository "${GITHUB_REPOSITORY}" \
|
||||
--release-tag "${RELEASE_TAG}" \
|
||||
--output-json artifacts/release-notes-supply-chain.json \
|
||||
--output-md artifacts/release-notes-supply-chain.md \
|
||||
--fail-on-missing
|
||||
|
||||
- name: Publish release-notes supply-chain summary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat artifacts/release-notes-supply-chain.md >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Verify GHCR release tag availability
|
||||
shell: bash
|
||||
env:
|
||||
@ -428,6 +714,7 @@ jobs:
|
||||
with:
|
||||
tag_name: ${{ needs.prepare.outputs.release_tag }}
|
||||
draft: ${{ needs.prepare.outputs.draft_release == 'true' }}
|
||||
body_path: artifacts/release-notes-supply-chain.md
|
||||
generate_release_notes: true
|
||||
files: |
|
||||
artifacts/**/*
|
||||
|
||||
102
.github/workflows/release-build.yml
vendored
Normal file
102
.github/workflows/release-build.yml
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
name: Production Release Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
tags: ["v*"]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: production-release-build-${{ github.ref || github.run_id }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
name: Build and Test (Linux x86_64)
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 120
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
shell: bash
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- name: Ensure rustfmt and clippy components
|
||||
shell: bash
|
||||
run: rustup component add rustfmt clippy --toolchain 1.92.0
|
||||
|
||||
- name: Activate toolchain binaries on PATH
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
toolchain_bin="$(dirname "$(rustup which --toolchain 1.92.0 cargo)")"
|
||||
echo "$toolchain_bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Cache Cargo registry and target
|
||||
uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: production-release-build
|
||||
shared-key: ${{ runner.os }}-${{ hashFiles('Cargo.lock') }}
|
||||
cache-targets: true
|
||||
cache-bin: false
|
||||
|
||||
- name: Rust quality gates
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
./scripts/ci/rust_quality_gate.sh
|
||||
cargo test --locked --lib --bins --verbose
|
||||
|
||||
- name: Build production binary (canonical)
|
||||
shell: bash
|
||||
run: cargo build --release --locked
|
||||
|
||||
- name: Prepare artifact bundle
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
cp target/release/zeroclaw artifacts/zeroclaw
|
||||
sha256sum artifacts/zeroclaw > artifacts/zeroclaw.sha256
|
||||
|
||||
- name: Upload production artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: zeroclaw-linux-amd64
|
||||
path: |
|
||||
artifacts/zeroclaw
|
||||
artifacts/zeroclaw.sha256
|
||||
if-no-files-found: error
|
||||
retention-days: 21
|
||||
61
.github/workflows/scripts/ci_human_review_guard.js
vendored
Normal file
61
.github/workflows/scripts/ci_human_review_guard.js
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
// Enforce at least one human approval on pull requests.
|
||||
// Used by .github/workflows/ci-run.yml via actions/github-script.
|
||||
|
||||
module.exports = async ({ github, context, core }) => {
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
const prNumber = context.payload.pull_request?.number;
|
||||
if (!prNumber) {
|
||||
core.setFailed("Missing pull_request context.");
|
||||
return;
|
||||
}
|
||||
|
||||
const botAllowlist = new Set(
|
||||
(process.env.HUMAN_REVIEW_BOT_LOGINS || "github-actions[bot],dependabot[bot],coderabbitai[bot]")
|
||||
.split(",")
|
||||
.map((value) => value.trim().toLowerCase())
|
||||
.filter(Boolean),
|
||||
);
|
||||
|
||||
const isBotAccount = (login, accountType) => {
|
||||
if (!login) return false;
|
||||
if ((accountType || "").toLowerCase() === "bot") return true;
|
||||
if (login.endsWith("[bot]")) return true;
|
||||
return botAllowlist.has(login);
|
||||
};
|
||||
|
||||
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const latestReviewByUser = new Map();
|
||||
const decisiveStates = new Set(["APPROVED", "CHANGES_REQUESTED", "DISMISSED"]);
|
||||
for (const review of reviews) {
|
||||
const login = review.user?.login?.toLowerCase();
|
||||
if (!login) continue;
|
||||
if (!decisiveStates.has(review.state)) continue;
|
||||
latestReviewByUser.set(login, {
|
||||
state: review.state,
|
||||
type: review.user?.type || "",
|
||||
});
|
||||
}
|
||||
|
||||
const humanApprovers = [];
|
||||
for (const [login, review] of latestReviewByUser.entries()) {
|
||||
if (review.state !== "APPROVED") continue;
|
||||
if (isBotAccount(login, review.type)) continue;
|
||||
humanApprovers.push(login);
|
||||
}
|
||||
|
||||
if (humanApprovers.length === 0) {
|
||||
core.setFailed(
|
||||
"No human approving review found. At least one non-bot approval is required before merge.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
core.info(`Human approval check passed. Approver(s): ${humanApprovers.join(", ")}`);
|
||||
};
|
||||
@ -1,83 +0,0 @@
|
||||
// Extracted from ci-run.yml step: Require owner approval for workflow file changes
|
||||
|
||||
module.exports = async ({ github, context, core }) => {
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
const prNumber = context.payload.pull_request?.number;
|
||||
const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || "";
|
||||
if (!prNumber) {
|
||||
core.setFailed("Missing pull_request context.");
|
||||
return;
|
||||
}
|
||||
|
||||
const baseOwners = ["theonlyhennygod", "willsarg", "chumyin"];
|
||||
const configuredOwners = (process.env.WORKFLOW_OWNER_LOGINS || "")
|
||||
.split(",")
|
||||
.map((login) => login.trim().toLowerCase())
|
||||
.filter(Boolean);
|
||||
const ownerAllowlist = [...new Set([...baseOwners, ...configuredOwners])];
|
||||
|
||||
if (ownerAllowlist.length === 0) {
|
||||
core.setFailed("Workflow owner allowlist is empty.");
|
||||
return;
|
||||
}
|
||||
|
||||
core.info(`Workflow owner allowlist: ${ownerAllowlist.join(", ")}`);
|
||||
|
||||
const files = await github.paginate(github.rest.pulls.listFiles, {
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const workflowFiles = files
|
||||
.map((file) => file.filename)
|
||||
.filter((name) => name.startsWith(".github/workflows/"));
|
||||
|
||||
if (workflowFiles.length === 0) {
|
||||
core.info("No workflow files changed in this PR.");
|
||||
return;
|
||||
}
|
||||
|
||||
core.info(`Workflow files changed:\n- ${workflowFiles.join("\n- ")}`);
|
||||
|
||||
if (prAuthor && ownerAllowlist.includes(prAuthor)) {
|
||||
core.info(`Workflow PR authored by allowlisted owner: @${prAuthor}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const latestReviewByUser = new Map();
|
||||
for (const review of reviews) {
|
||||
const login = review.user?.login;
|
||||
if (!login) continue;
|
||||
latestReviewByUser.set(login.toLowerCase(), review.state);
|
||||
}
|
||||
|
||||
const approvedUsers = [...latestReviewByUser.entries()]
|
||||
.filter(([, state]) => state === "APPROVED")
|
||||
.map(([login]) => login);
|
||||
|
||||
if (approvedUsers.length === 0) {
|
||||
core.setFailed("Workflow files changed but no approving review is present.");
|
||||
return;
|
||||
}
|
||||
|
||||
const ownerApprover = approvedUsers.find((login) => ownerAllowlist.includes(login));
|
||||
if (!ownerApprover) {
|
||||
core.setFailed(
|
||||
`Workflow files changed. Approvals found (${approvedUsers.join(", ")}), but none match workflow owner allowlist.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
core.info(`Workflow owner approval present: @${ownerApprover}`);
|
||||
|
||||
};
|
||||
23
.github/workflows/scripts/pr_intake_checks.js
vendored
23
.github/workflows/scripts/pr_intake_checks.js
vendored
@ -6,8 +6,6 @@ module.exports = async ({ github, context, core }) => {
|
||||
const repo = context.repo.repo;
|
||||
const pr = context.payload.pull_request;
|
||||
if (!pr) return;
|
||||
const prAuthor = (pr.user?.login || "").toLowerCase();
|
||||
const prBaseRef = pr.base?.ref || "";
|
||||
|
||||
const marker = "<!-- pr-intake-checks -->";
|
||||
const legacyMarker = "<!-- pr-intake-sanity -->";
|
||||
@ -85,15 +83,6 @@ module.exports = async ({ github, context, core }) => {
|
||||
if (dangerousProblems.length > 0) {
|
||||
blockingFindings.push(`Dangerous patch markers found (${dangerousProblems.length})`);
|
||||
}
|
||||
const promotionAuthorAllowlist = new Set(["willsarg", "theonlyhennygod"]);
|
||||
const shouldRetargetToDev =
|
||||
prBaseRef === "main" && !promotionAuthorAllowlist.has(prAuthor);
|
||||
|
||||
if (shouldRetargetToDev) {
|
||||
advisoryFindings.push(
|
||||
"This PR targets `main`, but normal contributions must target `dev`. Retarget this PR to `dev` unless this is an authorized promotion PR.",
|
||||
);
|
||||
}
|
||||
|
||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
||||
owner,
|
||||
@ -136,13 +125,11 @@ module.exports = async ({ github, context, core }) => {
|
||||
|
||||
const isBlocking = blockingFindings.length > 0;
|
||||
|
||||
const ownerApprovalNote = workflowFilesChanged.length > 0
|
||||
const workflowChangeNote = workflowFilesChanged.length > 0
|
||||
? [
|
||||
"",
|
||||
"Workflow files changed in this PR:",
|
||||
...workflowFilesChanged.map((name) => `- \`${name}\``),
|
||||
"",
|
||||
"Reminder: workflow changes require owner approval via `CI Required Gate`.",
|
||||
].join("\n")
|
||||
: "";
|
||||
|
||||
@ -161,13 +148,11 @@ module.exports = async ({ github, context, core }) => {
|
||||
"Action items:",
|
||||
"1. Complete required PR template sections/fields.",
|
||||
"2. Remove tabs, trailing whitespace, and merge conflict markers from added lines.",
|
||||
"3. Re-run local checks before pushing:",
|
||||
"4. Re-run local checks before pushing:",
|
||||
" - `./scripts/ci/rust_quality_gate.sh`",
|
||||
" - `./scripts/ci/rust_strict_delta_gate.sh`",
|
||||
" - `./scripts/ci/docs_quality_gate.sh`",
|
||||
...(shouldRetargetToDev
|
||||
? ["4. Retarget this PR base branch from `main` to `dev`."]
|
||||
: []),
|
||||
"",
|
||||
"",
|
||||
`Run logs: ${runUrl}`,
|
||||
"",
|
||||
@ -176,7 +161,7 @@ module.exports = async ({ github, context, core }) => {
|
||||
"",
|
||||
"Detected advisory line issues (sample):",
|
||||
...(advisoryDetails.length > 0 ? advisoryDetails : ["- none"]),
|
||||
ownerApprovalNote,
|
||||
workflowChangeNote,
|
||||
].join("\n");
|
||||
|
||||
if (existing) {
|
||||
|
||||
647
.github/workflows/sec-audit.yml
vendored
647
.github/workflows/sec-audit.yml
vendored
@ -9,16 +9,49 @@ on:
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "deny.toml"
|
||||
- ".gitleaks.toml"
|
||||
- ".github/security/gitleaks-allowlist-governance.json"
|
||||
- ".github/security/deny-ignore-governance.json"
|
||||
- ".github/security/unsafe-audit-governance.json"
|
||||
- "scripts/ci/install_gitleaks.sh"
|
||||
- "scripts/ci/install_syft.sh"
|
||||
- "scripts/ci/ensure_c_toolchain.sh"
|
||||
- "scripts/ci/ensure_cargo_component.sh"
|
||||
- "scripts/ci/self_heal_rust_toolchain.sh"
|
||||
- "scripts/ci/deny_policy_guard.py"
|
||||
- "scripts/ci/secrets_governance_guard.py"
|
||||
- "scripts/ci/unsafe_debt_audit.py"
|
||||
- "scripts/ci/unsafe_policy_guard.py"
|
||||
- "scripts/ci/config/unsafe_debt_policy.toml"
|
||||
- "scripts/ci/emit_audit_event.py"
|
||||
- "scripts/ci/security_regression_tests.sh"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- ".github/workflows/sec-audit.yml"
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "deny.toml"
|
||||
# Do not gate pull_request by paths: main branch protection requires
|
||||
# "Security Required Gate" to always report a status on PRs.
|
||||
merge_group:
|
||||
branches: [dev, main]
|
||||
schedule:
|
||||
- cron: "0 6 * * 1" # Weekly on Monday 6am UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full_secret_scan:
|
||||
description: "Scan full git history for secrets"
|
||||
required: true
|
||||
default: false
|
||||
type: boolean
|
||||
fail_on_secret_leak:
|
||||
description: "Fail workflow if secret leaks are detected"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
fail_on_governance_violation:
|
||||
description: "Fail workflow if secrets governance policy violations are detected"
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: security-${{ github.event.pull_request.number || github.ref }}
|
||||
@ -31,27 +64,619 @@ permissions:
|
||||
checks: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
name: Security Audit
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 20
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 45
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- uses: rustsec/audit-check@69366f33c96575abad1ee0dba8212993eecbe998 # v2.0.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
deny:
|
||||
name: License & Supply Chain
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- name: Enforce deny policy hygiene
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/deny_policy_guard.py \
|
||||
--deny-file deny.toml \
|
||||
--governance-file .github/security/deny-ignore-governance.json \
|
||||
--output-json artifacts/deny-policy-guard.json \
|
||||
--output-md artifacts/deny-policy-guard.md \
|
||||
--fail-on-violation
|
||||
|
||||
- name: Install cargo-deny
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="0.19.0"
|
||||
arch="$(uname -m)"
|
||||
case "${arch}" in
|
||||
x86_64|amd64)
|
||||
target="x86_64-unknown-linux-musl"
|
||||
expected_sha256="0e8c2aa59128612c90d9e09c02204e912f29a5b8d9a64671b94608cbe09e064f"
|
||||
;;
|
||||
aarch64|arm64)
|
||||
target="aarch64-unknown-linux-musl"
|
||||
expected_sha256="2b3567a60b7491c159d1cef8b7d8479d1ad2a31e29ef49462634ad4552fcc77d"
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported runner architecture for cargo-deny: ${arch}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
install_dir="${RUNNER_TEMP}/cargo-deny-${version}"
|
||||
archive="${RUNNER_TEMP}/cargo-deny-${version}-${target}.tar.gz"
|
||||
mkdir -p "${install_dir}"
|
||||
curl --proto '=https' --tlsv1.2 --fail --location --silent --show-error \
|
||||
--output "${archive}" \
|
||||
"https://github.com/EmbarkStudios/cargo-deny/releases/download/${version}/cargo-deny-${version}-${target}.tar.gz"
|
||||
actual_sha256="$(sha256sum "${archive}" | awk '{print $1}')"
|
||||
if [ "${actual_sha256}" != "${expected_sha256}" ]; then
|
||||
echo "Checksum mismatch for cargo-deny ${version} (${target})" >&2
|
||||
echo "Expected: ${expected_sha256}" >&2
|
||||
echo "Actual: ${actual_sha256}" >&2
|
||||
exit 1
|
||||
fi
|
||||
tar -xzf "${archive}" -C "${install_dir}" --strip-components=1
|
||||
echo "${install_dir}" >> "${GITHUB_PATH}"
|
||||
"${install_dir}/cargo-deny" --version
|
||||
|
||||
- name: Run cargo-deny checks
|
||||
shell: bash
|
||||
run: cargo-deny check advisories licenses sources
|
||||
|
||||
- name: Emit deny audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/deny-policy-guard.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type deny_policy_guard \
|
||||
--input-json artifacts/deny-policy-guard.json \
|
||||
--output-json artifacts/audit-event-deny-policy-guard.json \
|
||||
--artifact-name deny-policy-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload deny policy artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: deny-policy-guard
|
||||
path: artifacts/deny-policy-guard.*
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload deny policy audit event
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: deny-policy-audit-event
|
||||
path: artifacts/audit-event-deny-policy-guard.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
security-regressions:
|
||||
name: Security Regression Tests
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
|
||||
- name: Self-heal Rust toolchain cache
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: sec-audit-security-regressions
|
||||
cache-bin: false
|
||||
- name: Run security regression suite
|
||||
shell: bash
|
||||
run: ./scripts/ci/security_regression_tests.sh
|
||||
|
||||
secrets:
|
||||
name: Secrets Governance (Gitleaks)
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Enforce gitleaks allowlist governance
|
||||
shell: bash
|
||||
env:
|
||||
FAIL_ON_GOVERNANCE_INPUT: ${{ github.event.inputs.fail_on_governance_violation || 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
fail_on_governance="true"
|
||||
if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
fail_on_governance="${FAIL_ON_GOVERNANCE_INPUT}"
|
||||
fi
|
||||
cmd=(python3 scripts/ci/secrets_governance_guard.py
|
||||
--gitleaks-file .gitleaks.toml
|
||||
--governance-file .github/security/gitleaks-allowlist-governance.json
|
||||
--output-json artifacts/secrets-governance-guard.json
|
||||
--output-md artifacts/secrets-governance-guard.md)
|
||||
if [ "$fail_on_governance" = "true" ]; then
|
||||
cmd+=(--fail-on-violation)
|
||||
fi
|
||||
"${cmd[@]}"
|
||||
|
||||
- name: Publish secrets governance summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/secrets-governance-guard.md ]; then
|
||||
cat artifacts/secrets-governance-guard.md >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
echo "Secrets governance report missing." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Emit secrets governance audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/secrets-governance-guard.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type secrets_governance_guard \
|
||||
--input-json artifacts/secrets-governance-guard.json \
|
||||
--output-json artifacts/audit-event-secrets-governance-guard.json \
|
||||
--artifact-name secrets-governance-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload secrets governance artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: secrets-governance-guard
|
||||
path: artifacts/secrets-governance-guard.*
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload secrets governance audit event
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: secrets-governance-audit-event
|
||||
path: artifacts/audit-event-secrets-governance-guard.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Install gitleaks
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p "${RUNNER_TEMP}/bin"
|
||||
./scripts/ci/install_gitleaks.sh "${RUNNER_TEMP}/bin"
|
||||
echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Run gitleaks scan
|
||||
shell: bash
|
||||
env:
|
||||
FULL_SECRET_SCAN_INPUT: ${{ github.event.inputs.full_secret_scan || 'false' }}
|
||||
FAIL_ON_SECRET_LEAK_INPUT: ${{ github.event.inputs.fail_on_secret_leak || 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
log_opts=""
|
||||
scan_scope="full-history"
|
||||
fail_on_leak="true"
|
||||
|
||||
if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then
|
||||
log_opts="${{ github.event.pull_request.base.sha }}..${GITHUB_SHA}"
|
||||
scan_scope="diff-range"
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then
|
||||
base_sha="${{ github.event.before }}"
|
||||
if [ -n "$base_sha" ] && [ "$base_sha" != "0000000000000000000000000000000000000000" ]; then
|
||||
log_opts="${base_sha}..${GITHUB_SHA}"
|
||||
scan_scope="diff-range"
|
||||
fi
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "merge_group" ]; then
|
||||
base_sha="${{ github.event.merge_group.base_sha }}"
|
||||
if [ -n "$base_sha" ]; then
|
||||
log_opts="${base_sha}..${GITHUB_SHA}"
|
||||
scan_scope="diff-range"
|
||||
fi
|
||||
elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then
|
||||
if [ "${FULL_SECRET_SCAN_INPUT}" != "true" ]; then
|
||||
if [ -n "${{ github.sha }}" ]; then
|
||||
log_opts="${{ github.sha }}~1..${{ github.sha }}"
|
||||
scan_scope="latest-commit"
|
||||
fi
|
||||
fi
|
||||
fail_on_leak="${FAIL_ON_SECRET_LEAK_INPUT}"
|
||||
fi
|
||||
|
||||
cmd=(gitleaks git
|
||||
--config .gitleaks.toml
|
||||
--redact
|
||||
--report-format sarif
|
||||
--report-path artifacts/gitleaks.sarif
|
||||
--verbose)
|
||||
if [ -n "$log_opts" ]; then
|
||||
cmd+=(--log-opts="$log_opts")
|
||||
fi
|
||||
|
||||
set +e
|
||||
"${cmd[@]}"
|
||||
status=$?
|
||||
set -e
|
||||
|
||||
echo "### Gitleaks scan" >> "$GITHUB_STEP_SUMMARY"
|
||||
echo "- Scope: ${scan_scope}" >> "$GITHUB_STEP_SUMMARY"
|
||||
if [ -n "$log_opts" ]; then
|
||||
echo "- Log range: \`${log_opts}\`" >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
echo "- Exit code: ${status}" >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
cat > artifacts/gitleaks-summary.json <<EOF
|
||||
{
|
||||
"schema_version": "zeroclaw.audit.v1",
|
||||
"event_type": "gitleaks_scan",
|
||||
"event_name": "${GITHUB_EVENT_NAME}",
|
||||
"scope": "${scan_scope}",
|
||||
"log_opts": "${log_opts}",
|
||||
"result_code": "${status}",
|
||||
"fail_on_leak": "${fail_on_leak}"
|
||||
}
|
||||
EOF
|
||||
|
||||
if [ "$status" -ne 0 ] && [ "$fail_on_leak" = "true" ]; then
|
||||
exit "$status"
|
||||
fi
|
||||
|
||||
- name: Upload gitleaks SARIF
|
||||
if: always()
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
with:
|
||||
sarif_file: artifacts/gitleaks.sarif
|
||||
category: gitleaks
|
||||
|
||||
- name: Upload gitleaks artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: gitleaks-report
|
||||
path: artifacts/gitleaks.sarif
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Emit gitleaks audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/gitleaks-summary.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type gitleaks_scan \
|
||||
--input-json artifacts/gitleaks-summary.json \
|
||||
--output-json artifacts/audit-event-gitleaks-scan.json \
|
||||
--artifact-name gitleaks-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload gitleaks audit event
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: gitleaks-audit-event
|
||||
path: artifacts/audit-event-gitleaks-scan.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
sbom:
|
||||
name: SBOM Snapshot
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- uses: EmbarkStudios/cargo-deny-action@3fd3802e88374d3fe9159b834c7714ec57d6c979 # v2
|
||||
- name: Install syft
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p "${RUNNER_TEMP}/bin"
|
||||
./scripts/ci/install_syft.sh "${RUNNER_TEMP}/bin"
|
||||
echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Generate CycloneDX + SPDX SBOM
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
syft dir:. --source-name zeroclaw \
|
||||
-o cyclonedx-json=artifacts/zeroclaw.cdx.json \
|
||||
-o spdx-json=artifacts/zeroclaw.spdx.json
|
||||
{
|
||||
echo "### SBOM snapshot"
|
||||
echo "- CycloneDX: artifacts/zeroclaw.cdx.json"
|
||||
echo "- SPDX: artifacts/zeroclaw.spdx.json"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Upload SBOM artifacts
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
command: check advisories licenses sources
|
||||
name: sbom-snapshot
|
||||
path: artifacts/zeroclaw.*.json
|
||||
retention-days: 14
|
||||
|
||||
- name: Emit SBOM audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cat > artifacts/sbom-summary.json <<EOF
|
||||
{
|
||||
"schema_version": "zeroclaw.audit.v1",
|
||||
"event_type": "sbom_snapshot",
|
||||
"cyclonedx_path": "artifacts/zeroclaw.cdx.json",
|
||||
"spdx_path": "artifacts/zeroclaw.spdx.json"
|
||||
}
|
||||
EOF
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type sbom_snapshot \
|
||||
--input-json artifacts/sbom-summary.json \
|
||||
--output-json artifacts/audit-event-sbom-snapshot.json \
|
||||
--artifact-name sbom-audit-event \
|
||||
--retention-days 14
|
||||
|
||||
- name: Upload SBOM audit event
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: sbom-audit-event
|
||||
path: artifacts/audit-event-sbom-snapshot.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
unsafe-debt:
|
||||
name: Unsafe Debt Audit
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Setup Python 3.11
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 --version
|
||||
|
||||
- name: Enforce unsafe policy governance
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/unsafe_policy_guard.py \
|
||||
--policy-file scripts/ci/config/unsafe_debt_policy.toml \
|
||||
--governance-file .github/security/unsafe-audit-governance.json \
|
||||
--output-json artifacts/unsafe-policy-guard.json \
|
||||
--output-md artifacts/unsafe-policy-guard.md \
|
||||
--fail-on-violation
|
||||
|
||||
- name: Publish unsafe governance summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/unsafe-policy-guard.md ]; then
|
||||
cat artifacts/unsafe-policy-guard.md >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
echo "Unsafe policy governance report missing." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Run unsafe debt audit
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts
|
||||
python3 scripts/ci/unsafe_debt_audit.py \
|
||||
--repo-root . \
|
||||
--policy-file scripts/ci/config/unsafe_debt_policy.toml \
|
||||
--output-json artifacts/unsafe-debt-audit.json \
|
||||
--fail-on-findings \
|
||||
--fail-on-excluded-crate-roots
|
||||
|
||||
- name: Publish unsafe debt summary
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/unsafe-debt-audit.json ]; then
|
||||
python3 - <<'PY' >> "$GITHUB_STEP_SUMMARY"
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
report = json.loads(Path("artifacts/unsafe-debt-audit.json").read_text(encoding="utf-8"))
|
||||
summary = report.get("summary", {})
|
||||
source = report.get("source", {})
|
||||
by_pattern = summary.get("by_pattern", {})
|
||||
|
||||
print("### Unsafe debt audit")
|
||||
print(f"- Total findings: `{summary.get('total_findings', 0)}`")
|
||||
print(f"- Files scanned: `{source.get('files_scanned', 0)}`")
|
||||
print(f"- Crate roots scanned: `{source.get('crate_roots_scanned', 0)}`")
|
||||
print(f"- Crate roots excluded: `{source.get('crate_roots_excluded', 0)}`")
|
||||
if by_pattern:
|
||||
print("- Findings by pattern:")
|
||||
for pattern_id, count in sorted(by_pattern.items()):
|
||||
print(f" - `{pattern_id}`: `{count}`")
|
||||
else:
|
||||
print("- Findings by pattern: none")
|
||||
PY
|
||||
else
|
||||
echo "Unsafe debt audit JSON report missing." >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Emit unsafe policy governance audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/unsafe-policy-guard.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type unsafe_policy_guard \
|
||||
--input-json artifacts/unsafe-policy-guard.json \
|
||||
--output-json artifacts/audit-event-unsafe-policy-guard.json \
|
||||
--artifact-name unsafe-policy-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Emit unsafe debt audit event
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -f artifacts/unsafe-debt-audit.json ]; then
|
||||
python3 scripts/ci/emit_audit_event.py \
|
||||
--event-type unsafe_debt_audit \
|
||||
--input-json artifacts/unsafe-debt-audit.json \
|
||||
--output-json artifacts/audit-event-unsafe-debt-audit.json \
|
||||
--artifact-name unsafe-debt-audit-event \
|
||||
--retention-days 14
|
||||
fi
|
||||
|
||||
- name: Upload unsafe policy guard artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: unsafe-policy-guard
|
||||
path: artifacts/unsafe-policy-guard.*
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload unsafe debt audit artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: unsafe-debt-audit
|
||||
path: artifacts/unsafe-debt-audit.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload unsafe policy audit event
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: unsafe-policy-audit-event
|
||||
path: artifacts/audit-event-unsafe-policy-guard.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload unsafe debt audit event
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: unsafe-debt-audit-event
|
||||
path: artifacts/audit-event-unsafe-debt-audit.json
|
||||
if-no-files-found: ignore
|
||||
retention-days: 14
|
||||
|
||||
security-required:
|
||||
name: Security Required Gate
|
||||
if: always() && (github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'merge_group')
|
||||
needs: [audit, deny, security-regressions, secrets, sbom, unsafe-debt]
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
steps:
|
||||
- name: Enforce security gate
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
results=(
|
||||
"audit=${{ needs.audit.result }}"
|
||||
"deny=${{ needs.deny.result }}"
|
||||
"security-regressions=${{ needs.security-regressions.result }}"
|
||||
"secrets=${{ needs.secrets.result }}"
|
||||
"sbom=${{ needs.sbom.result }}"
|
||||
"unsafe-debt=${{ needs['unsafe-debt'].result }}"
|
||||
)
|
||||
for item in "${results[@]}"; do
|
||||
echo "$item"
|
||||
done
|
||||
for item in "${results[@]}"; do
|
||||
result="${item#*=}"
|
||||
if [ "$result" != "success" ]; then
|
||||
echo "Security gate failed: $item"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
106
.github/workflows/sec-codeql.yml
vendored
106
.github/workflows/sec-codeql.yml
vendored
@ -1,12 +1,40 @@
|
||||
name: Sec CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "scripts/ci/ensure_c_toolchain.sh"
|
||||
- "scripts/ci/ensure_cargo_component.sh"
|
||||
- ".github/codeql/**"
|
||||
- "scripts/ci/self_heal_rust_toolchain.sh"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- ".github/workflows/sec-codeql.yml"
|
||||
pull_request:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "scripts/ci/ensure_c_toolchain.sh"
|
||||
- "scripts/ci/ensure_cargo_component.sh"
|
||||
- ".github/codeql/**"
|
||||
- "scripts/ci/self_heal_rust_toolchain.sh"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- ".github/workflows/sec-codeql.yml"
|
||||
merge_group:
|
||||
branches: [dev, main]
|
||||
schedule:
|
||||
- cron: "0 6 * * 1" # Weekly Monday 6am UTC
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: codeql-${{ github.ref }}
|
||||
group: codeql-${{ github.event.pull_request.number || github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
@ -14,26 +42,96 @@ permissions:
|
||||
security-events: write
|
||||
actions: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
select-runner:
|
||||
name: Select CodeQL Runner Lane
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
outputs:
|
||||
labels: ${{ steps.lane.outputs.labels }}
|
||||
lane: ${{ steps.lane.outputs.lane }}
|
||||
steps:
|
||||
- name: Resolve branch lane
|
||||
id: lane
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
branch="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
if [[ "$branch" == release/* ]]; then
|
||||
echo 'labels=["self-hosted","Linux","X64","hetzner","codeql"]' >> "$GITHUB_OUTPUT"
|
||||
echo 'lane=release' >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'labels=["self-hosted","Linux","X64","hetzner","codeql","codeql-general"]' >> "$GITHUB_OUTPUT"
|
||||
echo 'lane=general' >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
codeql:
|
||||
name: CodeQL Analysis
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
needs: [select-runner]
|
||||
runs-on: ${{ fromJSON(needs.select-runner.outputs.labels) }}
|
||||
timeout-minutes: 120
|
||||
env:
|
||||
CARGO_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/cargo
|
||||
RUSTUP_HOME: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/rustup
|
||||
CARGO_TARGET_DIR: ${{ github.workspace }}/.ci-rust/${{ github.run_id }}-${{ github.run_attempt }}-${{ github.job }}/target
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Ensure C toolchain
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_c_toolchain.sh
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
with:
|
||||
languages: rust
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
queries: security-and-quality
|
||||
|
||||
- name: Set up Rust
|
||||
shell: bash
|
||||
run: ./scripts/ci/self_heal_rust_toolchain.sh 1.92.0
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
with:
|
||||
prefix-key: sec-codeql-build
|
||||
cache-targets: true
|
||||
cache-bin: false
|
||||
|
||||
- name: Build
|
||||
run: cargo build --workspace --all-targets
|
||||
run: cargo build --workspace --all-targets --locked
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
with:
|
||||
category: "/language:rust"
|
||||
|
||||
- name: Summarize lane
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
echo "### CodeQL Runner Lane"
|
||||
echo "- Branch: \`${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}\`"
|
||||
echo "- Lane: \`${{ needs.select-runner.outputs.lane }}\`"
|
||||
echo "- Labels: \`${{ needs.select-runner.outputs.labels }}\`"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
8
.github/workflows/sec-vorpal-reviewdog.yml
vendored
8
.github/workflows/sec-vorpal-reviewdog.yml
vendored
@ -82,10 +82,16 @@ permissions:
|
||||
checks: write
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
vorpal:
|
||||
name: Vorpal Reviewdog Scan
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
3
.github/workflows/sync-contributors.yml
vendored
3
.github/workflows/sync-contributors.yml
vendored
@ -17,7 +17,8 @@ permissions:
|
||||
jobs:
|
||||
update-notice:
|
||||
name: Update NOTICE with new contributors
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
9
.github/workflows/test-benchmarks.yml
vendored
9
.github/workflows/test-benchmarks.yml
vendored
@ -14,26 +14,29 @@ permissions:
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Criterion Benchmarks
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
|
||||
- name: Run benchmarks
|
||||
run: cargo bench --locked 2>&1 | tee benchmark_output.txt
|
||||
|
||||
- name: Upload benchmark results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: benchmark-results
|
||||
path: |
|
||||
|
||||
42
.github/workflows/test-e2e.yml
vendored
42
.github/workflows/test-e2e.yml
vendored
@ -3,28 +3,64 @@ name: Test E2E
|
||||
on:
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- "Cargo.toml"
|
||||
- "Cargo.lock"
|
||||
- "src/**"
|
||||
- "crates/**"
|
||||
- "tests/**"
|
||||
- "scripts/**"
|
||||
- "scripts/ci/ensure_cc.sh"
|
||||
- ".github/workflows/test-e2e.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: e2e-${{ github.event.pull_request.number || github.sha }}
|
||||
group: test-e2e-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref_name || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
integration-tests:
|
||||
name: Integration / E2E Tests
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92.0
|
||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
- name: Ensure cargo component
|
||||
shell: bash
|
||||
env:
|
||||
ENSURE_CARGO_COMPONENT_STRICT: "true"
|
||||
run: bash ./scripts/ci/ensure_cargo_component.sh 1.92.0
|
||||
- name: Ensure C toolchain for Rust builds
|
||||
run: ./scripts/ci/ensure_cc.sh
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v3
|
||||
- name: Runner preflight (compiler + disk)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Runner: ${RUNNER_NAME:-unknown} (${RUNNER_OS:-unknown}/${RUNNER_ARCH:-unknown})"
|
||||
if ! command -v cc >/dev/null 2>&1; then
|
||||
echo "::error::Missing 'cc' compiler on runner. Install build-essential (Debian/Ubuntu) or equivalent."
|
||||
exit 1
|
||||
fi
|
||||
cc --version | head -n1
|
||||
free_kb="$(df -Pk . | awk 'NR==2 {print $4}')"
|
||||
min_kb=$((10 * 1024 * 1024))
|
||||
if [ "${free_kb}" -lt "${min_kb}" ]; then
|
||||
echo "::error::Insufficient disk space on runner (<10 GiB free)."
|
||||
df -h .
|
||||
exit 1
|
||||
fi
|
||||
- name: Run integration / E2E tests
|
||||
run: cargo test --test agent_e2e --locked --verbose
|
||||
|
||||
5
.github/workflows/test-fuzz.yml
vendored
5
.github/workflows/test-fuzz.yml
vendored
@ -19,12 +19,15 @@ permissions:
|
||||
issues: write
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
fuzz:
|
||||
name: Fuzz (${{ matrix.target }})
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
62
.github/workflows/test-rust-build.yml
vendored
62
.github/workflows/test-rust-build.yml
vendored
@ -1,62 +0,0 @@
|
||||
name: Test Rust Build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
run_command:
|
||||
description: "Shell command(s) to execute."
|
||||
required: true
|
||||
type: string
|
||||
timeout_minutes:
|
||||
description: "Job timeout in minutes."
|
||||
required: false
|
||||
default: 20
|
||||
type: number
|
||||
toolchain:
|
||||
description: "Rust toolchain channel/version."
|
||||
required: false
|
||||
default: "stable"
|
||||
type: string
|
||||
components:
|
||||
description: "Optional rustup components."
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
targets:
|
||||
description: "Optional rustup targets."
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
use_cache:
|
||||
description: "Whether to enable rust-cache."
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
run:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: ${{ inputs.timeout_minutes }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Setup Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: ${{ inputs.toolchain }}
|
||||
components: ${{ inputs.components }}
|
||||
targets: ${{ inputs.targets }}
|
||||
|
||||
- name: Restore Rust cache
|
||||
if: inputs.use_cache
|
||||
uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||
|
||||
- name: Run command
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
${{ inputs.run_command }}
|
||||
90
.github/workflows/test-self-hosted.yml
vendored
Normal file
90
.github/workflows/test-self-hosted.yml
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
name: Test Self-Hosted Runner
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "30 2 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
runner-health:
|
||||
name: Runner Health / self-hosted aws-india
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404]
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check runner info
|
||||
run: |
|
||||
echo "Runner: $(hostname)"
|
||||
echo "OS: $(uname -a)"
|
||||
echo "User: $(whoami)"
|
||||
if command -v rustc >/dev/null 2>&1; then
|
||||
echo "Rust: $(rustc --version)"
|
||||
else
|
||||
echo "Rust: <not installed>"
|
||||
fi
|
||||
if command -v cargo >/dev/null 2>&1; then
|
||||
echo "Cargo: $(cargo --version)"
|
||||
else
|
||||
echo "Cargo: <not installed>"
|
||||
fi
|
||||
if command -v cc >/dev/null 2>&1; then
|
||||
echo "CC: $(cc --version | head -n1)"
|
||||
else
|
||||
echo "CC: <not installed>"
|
||||
fi
|
||||
if command -v gcc >/dev/null 2>&1; then
|
||||
echo "GCC: $(gcc --version | head -n1)"
|
||||
else
|
||||
echo "GCC: <not installed>"
|
||||
fi
|
||||
if command -v clang >/dev/null 2>&1; then
|
||||
echo "Clang: $(clang --version | head -n1)"
|
||||
else
|
||||
echo "Clang: <not installed>"
|
||||
fi
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
echo "Docker: $(docker --version)"
|
||||
else
|
||||
echo "Docker: <not installed>"
|
||||
fi
|
||||
- name: Verify compiler + disk prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
failed=0
|
||||
|
||||
if ! command -v cc >/dev/null 2>&1; then
|
||||
echo "::error::Missing 'cc'. Install build-essential (or gcc/clang + symlink)."
|
||||
failed=1
|
||||
fi
|
||||
|
||||
free_kb="$(df -Pk . | awk 'NR==2 {print $4}')"
|
||||
min_kb=$((10 * 1024 * 1024))
|
||||
if [ "${free_kb}" -lt "${min_kb}" ]; then
|
||||
echo "::error::Disk free below 10 GiB; clean runner workspace/cache."
|
||||
df -h .
|
||||
failed=1
|
||||
fi
|
||||
|
||||
inode_used_pct="$(df -Pi . | awk 'NR==2 {gsub(/%/, "", $5); print $5}')"
|
||||
if [ "${inode_used_pct}" -ge 95 ]; then
|
||||
echo "::error::Inode usage >=95%; clean files to avoid ENOSPC."
|
||||
df -i .
|
||||
failed=1
|
||||
fi
|
||||
|
||||
if [ "${failed}" -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
- name: Test Docker
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if ! command -v docker >/dev/null 2>&1; then
|
||||
echo "::notice::Docker is not installed on this self-hosted runner. Skipping docker smoke test."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
docker run --rm hello-world
|
||||
51
.github/workflows/workflow-sanity.yml
vendored
51
.github/workflows/workflow-sanity.yml
vendored
@ -7,6 +7,7 @@ on:
|
||||
- ".github/*.yml"
|
||||
- ".github/*.yaml"
|
||||
push:
|
||||
branches: [dev, main]
|
||||
paths:
|
||||
- ".github/workflows/**"
|
||||
- ".github/*.yml"
|
||||
@ -19,11 +20,23 @@ concurrency:
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
GIT_CONFIG_COUNT: "1"
|
||||
GIT_CONFIG_KEY_0: core.hooksPath
|
||||
GIT_CONFIG_VALUE_0: /dev/null
|
||||
|
||||
|
||||
jobs:
|
||||
no-tabs:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Normalize git global hooks config
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git config --global --unset-all core.hooksPath || true
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
@ -31,7 +44,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python - <<'PY'
|
||||
python3 - <<'PY'
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
@ -54,11 +67,41 @@ jobs:
|
||||
PY
|
||||
|
||||
actionlint:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, Linux, X64, aws-india, light, cpu40]
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Normalize git global hooks config
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git config --global --unset-all core.hooksPath || true
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Install actionlint binary
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="1.7.11"
|
||||
arch="$(uname -m)"
|
||||
case "$arch" in
|
||||
x86_64|amd64) archive="actionlint_${version}_linux_amd64.tar.gz" ;;
|
||||
aarch64|arm64) archive="actionlint_${version}_linux_arm64.tar.gz" ;;
|
||||
*)
|
||||
echo "::error::Unsupported architecture: ${arch}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
curl -fsSL \
|
||||
-o "$RUNNER_TEMP/actionlint.tgz" \
|
||||
"https://github.com/rhysd/actionlint/releases/download/v${version}/${archive}"
|
||||
tar -xzf "$RUNNER_TEMP/actionlint.tgz" -C "$RUNNER_TEMP" actionlint
|
||||
chmod +x "$RUNNER_TEMP/actionlint"
|
||||
echo "$RUNNER_TEMP" >> "$GITHUB_PATH"
|
||||
"$RUNNER_TEMP/actionlint" -version
|
||||
|
||||
- name: Lint GitHub workflows
|
||||
uses: rhysd/actionlint@393031adb9afb225ee52ae2ccd7a5af5525e03e8 # v1.7.11
|
||||
shell: bash
|
||||
run: actionlint -color
|
||||
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@ -1,4 +1,6 @@
|
||||
/target
|
||||
/target_ci
|
||||
/target_review*
|
||||
firmware/*/target
|
||||
*.db
|
||||
*.db-journal
|
||||
@ -8,6 +10,13 @@ firmware/*/target
|
||||
__pycache__/
|
||||
*.pyc
|
||||
docker-compose.override.yml
|
||||
site/node_modules/
|
||||
site/.vite/
|
||||
site/public/docs-content/
|
||||
gh-pages/
|
||||
|
||||
.idea
|
||||
.claude
|
||||
|
||||
# Environment files (may contain secrets)
|
||||
.env
|
||||
@ -25,7 +34,12 @@ venv/
|
||||
|
||||
# Secret keys and credentials
|
||||
.secret_key
|
||||
otp-secret
|
||||
*.key
|
||||
*.pem
|
||||
credentials.json
|
||||
/config.toml
|
||||
.worktrees/
|
||||
|
||||
# Nix
|
||||
result
|
||||
|
||||
15
.gitleaks.toml
Normal file
15
.gitleaks.toml
Normal file
@ -0,0 +1,15 @@
|
||||
title = "ZeroClaw gitleaks configuration"
|
||||
|
||||
[allowlist]
|
||||
description = "Known false positives in detector fixtures and documentation examples"
|
||||
paths = [
|
||||
'''src/security/leak_detector\.rs''',
|
||||
'''src/agent/loop_\.rs''',
|
||||
'''src/security/secrets\.rs''',
|
||||
'''docs/(i18n/vi/|vi/)?zai-glm-setup\.md''',
|
||||
'''\.github/workflows/pub-release\.yml'''
|
||||
]
|
||||
regexes = [
|
||||
'''Authorization: Bearer \$\{[^}]+\}''',
|
||||
'''curl -sS -o /tmp/ghcr-release-manifest\.json -w "%\{http_code\}"'''
|
||||
]
|
||||
103
AGENTS.md
103
AGENTS.md
@ -3,6 +3,22 @@
|
||||
This file defines the default working protocol for coding agents in this repository.
|
||||
Scope: entire repository.
|
||||
|
||||
## 0) Session Default Target (Mandatory)
|
||||
|
||||
- When operator intent does not explicitly specify another repository/path, treat the active coding target as this repository (`/home/ubuntu/zeroclaw`).
|
||||
- Do not switch to or implement in other repositories unless the operator explicitly requests that scope in the current conversation.
|
||||
- Ambiguous wording (for example "这个仓库", "当前项目", "the repo") is resolved to `/home/ubuntu/zeroclaw` by default.
|
||||
- Context mentioning external repositories does not authorize cross-repo edits; explicit current-turn override is required.
|
||||
- Before any repo-affecting action, verify target lock (`pwd` + git root) to prevent accidental execution in sibling repositories.
|
||||
|
||||
## 0.1) Clean Worktree First Gate (Mandatory)
|
||||
|
||||
- Before handling any repository content (analysis, debugging, coding, tests, docs, CI), create a **new clean dedicated git worktree** for the active task.
|
||||
- Do not perform substantive task work in a dirty workspace.
|
||||
- Do not reuse a previously dirty worktree for a new task track.
|
||||
- If the current location is dirty, stop and bootstrap a clean worktree/branch first.
|
||||
- If worktree bootstrap fails, stop and report the blocker; do not continue in-place.
|
||||
|
||||
## 1) Project Snapshot (Read First)
|
||||
|
||||
ZeroClaw is a Rust-first autonomous agent runtime optimized for:
|
||||
@ -153,13 +169,14 @@ Treat documentation as a first-class product surface, not a post-merge artifact.
|
||||
|
||||
Canonical entry points:
|
||||
|
||||
- root READMEs: `README.md`, `README.zh-CN.md`, `README.ja.md`, `README.ru.md`, `README.fr.md`, `README.vi.md`
|
||||
- docs hubs: `docs/README.md`, `docs/README.zh-CN.md`, `docs/README.ja.md`, `docs/README.ru.md`, `docs/README.fr.md`, `docs/i18n/vi/README.md`
|
||||
- repository landing + localized hubs: `README.md`, `docs/i18n/zh-CN/README.md`, `docs/i18n/ja/README.md`, `docs/i18n/ru/README.md`, `docs/i18n/fr/README.md`, `docs/i18n/vi/README.md`, `docs/i18n/el/README.md`
|
||||
- docs hubs: `docs/README.md`, `docs/i18n/zh-CN/README.md`, `docs/i18n/ja/README.md`, `docs/i18n/ru/README.md`, `docs/i18n/fr/README.md`, `docs/i18n/vi/README.md`, `docs/i18n/el/README.md`
|
||||
- unified TOC: `docs/SUMMARY.md`
|
||||
- i18n governance docs: `docs/i18n-guide.md`, `docs/i18n/README.md`, `docs/i18n-coverage.md`
|
||||
|
||||
Supported locales (current contract):
|
||||
|
||||
- `en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`
|
||||
- `en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`
|
||||
|
||||
Collection indexes (category navigation):
|
||||
|
||||
@ -184,14 +201,25 @@ Runtime-contract references (must track behavior changes):
|
||||
Required docs governance rules:
|
||||
|
||||
- Keep README/hub top navigation and quick routes intuitive and non-duplicative.
|
||||
- Keep entry-point parity across all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`) when changing navigation architecture.
|
||||
- Keep entry-point parity across all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`) when changing navigation architecture.
|
||||
- If a change touches docs IA, runtime-contract references, or user-facing wording in shared docs, perform i18n follow-through for currently supported locales in the same PR:
|
||||
- Update locale navigation links (`README*`, `docs/README*`, `docs/SUMMARY.md`).
|
||||
- Update localized runtime-contract docs where equivalents exist (at minimum `commands-reference`, `config-reference`, `troubleshooting` for `fr` and `vi`).
|
||||
- For Vietnamese, treat `docs/i18n/vi/**` as canonical. Keep `docs/*.<locale>.md` compatibility shims aligned if present.
|
||||
- Update canonical locale hubs and summaries under `docs/i18n/<locale>/` for every supported locale.
|
||||
- Update localized runtime-contract docs where equivalents exist (currently full trees for `vi` and `el`; do not regress `zh-CN`/`ja`/`ru`/`fr` hub parity).
|
||||
- Keep `docs/*.<locale>.md` compatibility shims aligned if present.
|
||||
- Follow `docs/i18n-guide.md` as the mandatory completion checklist when docs navigation or shared wording changes.
|
||||
- Keep proposal/roadmap docs explicitly labeled; avoid mixing proposal text into runtime-contract docs.
|
||||
- Keep project snapshots date-stamped and immutable once superseded by a newer date.
|
||||
|
||||
### 4.2 Docs i18n Completion Gate (Required)
|
||||
|
||||
For any PR that changes docs IA, locale navigation, or shared docs wording:
|
||||
|
||||
1. Complete i18n follow-through in the same PR using `docs/i18n-guide.md`.
|
||||
2. Keep all supported locale hubs/summaries navigable through canonical `docs/i18n/<locale>/` paths.
|
||||
3. Update `docs/i18n-coverage.md` when coverage status or locale topology changes.
|
||||
4. If any translation must be deferred, record explicit owner + follow-up issue/PR in the PR description.
|
||||
|
||||
## 5) Risk Tiers by Path (Review Depth Contract)
|
||||
|
||||
Use these tiers when deciding validation depth and review rigor.
|
||||
@ -216,7 +244,8 @@ When uncertain, classify as higher risk.
|
||||
5. **Document impact**
|
||||
- Update docs/PR notes for behavior, risk, side effects, and rollback.
|
||||
- If CLI/config/provider/channel behavior changed, update corresponding runtime-contract references.
|
||||
- If docs entry points changed, keep all supported locale README/docs-hub navigation aligned (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`).
|
||||
- If docs entry points changed, keep all supported locale README/docs-hub navigation aligned (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`).
|
||||
- Run through `docs/i18n-guide.md` and record any explicit i18n deferrals in the PR summary.
|
||||
6. **Respect queue hygiene**
|
||||
- If stacked PR: declare `Depends on #...`.
|
||||
- If replacing old PR: declare `Supersedes #...`.
|
||||
@ -227,20 +256,46 @@ All contributors (human or agent) must follow the same collaboration flow:
|
||||
|
||||
- Create and work from a non-`main` branch.
|
||||
- Commit changes to that branch with clear, scoped commit messages.
|
||||
- Open a PR to `dev`; do not push directly to `dev` or `main`.
|
||||
- `main` is reserved for release promotion PRs from `dev`.
|
||||
- Open a PR to `main` by default (`dev` is optional for integration batching); do not push directly to `dev` or `main`.
|
||||
- `main` accepts direct PR merges after required checks and review policy pass.
|
||||
- Wait for required checks and review outcomes before merging.
|
||||
- Merge via PR controls (squash/rebase/merge as repository policy allows).
|
||||
- Branch deletion after merge is optional; long-lived branches are allowed when intentionally maintained.
|
||||
- After merge/close, clean up task branches/worktrees that are no longer needed.
|
||||
- Keep long-lived branches only when intentionally maintained with clear owner and purpose.
|
||||
|
||||
### 6.2 Worktree Workflow (Required for Multi-Track Agent Work)
|
||||
### 6.1A PR Disposition and Workflow Authority (Required)
|
||||
|
||||
Use Git worktrees to isolate concurrent agent/human tracks safely and predictably:
|
||||
- Decide merge/close outcomes from repository-local authority in this order: `.github/workflows/**`, GitHub branch protection/rulesets, `docs/pr-workflow.md`, then this `AGENTS.md`.
|
||||
- External agent skills/templates are execution aids only; they must not override repository-local policy.
|
||||
- A normal contributor PR targeting `main` is valid under the main-first flow when required checks and review policy are satisfied; use `dev` only for explicit integration batching.
|
||||
- Direct-close the PR (do not supersede/replay) when high-confidence integrity-risk signals exist:
|
||||
- unapproved or unrelated repository rebranding attempts (for example replacing project logo/identity assets)
|
||||
- unauthorized platform-surface expansion (for example introducing `web` apps, dashboards, frontend stacks, or UI surfaces not requested by maintainers)
|
||||
- title/scope deception that hides high-risk code changes (for example `docs:` title with broad `src/**` changes)
|
||||
- spam-like or intentionally harmful payload patterns
|
||||
- multi-domain dirty-bundle changes with no safe, auditable isolation path
|
||||
- If unauthorized platform-surface expansion is detected during review/implementation, report to maintainers immediately and pause further execution until explicit direction is given.
|
||||
- Use supersede flow only when maintainers explicitly want to preserve valid work and attribution.
|
||||
- In public PR close/block comments, state only direct actionable reasons; do not include internal decision-process narration or "non-reason" qualifiers.
|
||||
|
||||
- Use one worktree per active branch/PR stream to avoid cross-task contamination.
|
||||
- Keep each worktree on a single branch; do not mix unrelated edits in one worktree.
|
||||
### 6.1B Assignee-First Gate (Required)
|
||||
|
||||
- For any GitHub issue or PR selected for active handling, the first action is to ensure `@chumyin` is an assignee.
|
||||
- This is additive ownership: keep existing assignees and add `@chumyin` if missing.
|
||||
- Do not start triage/review/implementation/merge work before assignee assignment is confirmed.
|
||||
- Queue safety rule: assign only the currently active target; do not pre-assign future queued targets.
|
||||
|
||||
### 6.2 Worktree Workflow (Required for All Task Streams)
|
||||
|
||||
Use Git worktrees to isolate every active task stream safely and predictably:
|
||||
|
||||
- Use one dedicated worktree per active branch/PR stream; do not implement directly in a shared default workspace.
|
||||
- Keep each worktree on a single branch and a single concern; do not mix unrelated edits in one worktree.
|
||||
- Before each commit/push, verify commit hygiene in that worktree (`git status --short` and `git diff --cached`) so only scoped files are included.
|
||||
- Run validation commands inside the corresponding worktree before commit/PR.
|
||||
- Name worktrees clearly by scope (for example: `wt/ci-hardening`, `wt/provider-fix`) and remove stale worktrees when no longer needed.
|
||||
- Name worktrees clearly by scope (for example: `wt/ci-hardening`, `wt/provider-fix`).
|
||||
- After PR merge/close (or task abandonment), remove stale worktrees/branches and prune refs (`git worktree prune`, `git fetch --prune`).
|
||||
- Local Codex automation may use one-command cleanup helper: `~/.codex/skills/zeroclaw-pr-issue-automation/scripts/cleanup_track.sh --repo-dir <repo_dir> --worktree <worktree_path> --branch <branch_name>`.
|
||||
- PR checkpoint rules from section 6.1 still apply to worktree-based development.
|
||||
|
||||
### 6.3 Code Naming Contract (Required)
|
||||
@ -305,11 +360,12 @@ Use these rules to keep the trait/factory architecture stable under growth.
|
||||
- Treat docs navigation as product UX: preserve clear pathing from README -> docs hub -> SUMMARY -> category index.
|
||||
- Keep top-level nav concise; avoid duplicative links across adjacent nav blocks.
|
||||
- When runtime surfaces change, update related references (`commands/providers/channels/config/runbook/troubleshooting`).
|
||||
- Keep multilingual entry-point parity for all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`) when nav or key wording changes.
|
||||
- Keep multilingual entry-point parity for all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`) when nav or key wording changes.
|
||||
- When shared docs wording changes, sync corresponding localized docs for supported locales in the same PR (or explicitly document deferral and follow-up PR).
|
||||
- Treat `docs/i18n/<locale>/**` as canonical for localized hubs/summaries; keep docs-root compatibility shims aligned when edited.
|
||||
- Apply `docs/i18n-guide.md` completion checklist before merge and include i18n status in PR notes.
|
||||
- For docs snapshots, add new date-stamped files for new sprints rather than rewriting historical context.
|
||||
|
||||
|
||||
## 8) Validation Matrix
|
||||
|
||||
Default local checks for code changes:
|
||||
@ -335,7 +391,7 @@ Additional expectations by change type:
|
||||
|
||||
- **Docs/template-only**:
|
||||
- run markdown lint and link-integrity checks
|
||||
- if touching README/docs-hub/SUMMARY/collection indexes, verify EN/ZH/JA/RU navigation parity
|
||||
- if touching README/docs-hub/SUMMARY/collection indexes, verify EN/ZH-CN/JA/RU/FR/VI/EL navigation parity
|
||||
- if touching bootstrap docs/scripts, run `bash -n bootstrap.sh scripts/bootstrap.sh scripts/install.sh`
|
||||
- **Workflow changes**: validate YAML syntax; run workflow lint/sanity checks when available.
|
||||
- **Security/runtime/gateway/tools**: include at least one boundary/failure-mode validation.
|
||||
@ -346,6 +402,12 @@ If full checks are impractical, run the most relevant subset and document what w
|
||||
|
||||
- Follow `.github/pull_request_template.md` fully (including side effects / blast radius).
|
||||
- Keep PR descriptions concrete: problem, change, non-goals, risk, rollback.
|
||||
- For issue-driven work, add explicit issue-closing keywords in the **PR body** for every resolved issue (for example `Closes #1502`).
|
||||
- Do not rely on issue comments alone for linkage visibility; comments are supplemental, not a substitute for PR-body closing references.
|
||||
- Default to one issue per clean commit/PR track. For multiple issues, split into separate clean commits/PRs unless there is clear technical coupling.
|
||||
- If multiple issues are intentionally bundled in one PR, document the coupling rationale explicitly in the PR summary.
|
||||
- Commit hygiene is mandatory: stage only task-scoped files and split unrelated changes into separate commits/worktrees.
|
||||
- Completion hygiene is mandatory: after merge/close, clean stale local branches/worktrees before starting the next track.
|
||||
- Use conventional commit titles.
|
||||
- Prefer small PRs (`size: XS/S/M`) when possible.
|
||||
- Agent-assisted PRs are welcome, **but contributors remain accountable for understanding what their code will do**.
|
||||
@ -439,6 +501,9 @@ Reference docs:
|
||||
- `CONTRIBUTING.md`
|
||||
- `docs/README.md`
|
||||
- `docs/SUMMARY.md`
|
||||
- `docs/i18n-guide.md`
|
||||
- `docs/i18n/README.md`
|
||||
- `docs/i18n-coverage.md`
|
||||
- `docs/docs-inventory.md`
|
||||
- `docs/commands-reference.md`
|
||||
- `docs/providers-reference.md`
|
||||
@ -462,6 +527,8 @@ Reference docs:
|
||||
- Do not bypass failing checks without explicit explanation.
|
||||
- Do not hide behavior-changing side effects in refactor commits.
|
||||
- Do not include personal identity or sensitive information in test data, examples, docs, or commits.
|
||||
- Do not attempt repository rebranding/identity replacement unless maintainers explicitly requested it in the current scope.
|
||||
- Do not introduce new platform surfaces (for example `web` apps, dashboards, frontend stacks, or UI portals) unless maintainers explicitly requested them in the current scope.
|
||||
|
||||
## 11) Handoff Template (Agent -> Agent / Maintainer)
|
||||
|
||||
|
||||
@ -18,6 +18,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
value if the input used the legacy `enc:` format
|
||||
- `SecretStore::needs_migration()` — Check if a value uses the legacy `enc:` format
|
||||
- `SecretStore::is_secure_encrypted()` — Check if a value uses the secure `enc2:` format
|
||||
- `feishu_doc` tool — Feishu/Lark document operations (`read`, `write`, `append`, `create`, `list_blocks`, `get_block`, `update_block`, `delete_block`, `create_table`, `write_table_cells`, `create_table_with_values`, `upload_image`, `upload_file`)
|
||||
- Agent session persistence guidance now includes explicit backend/strategy/TTL key names for rollout notes.
|
||||
- **Telegram mention_only mode** — New config option `mention_only` for Telegram channel.
|
||||
When enabled, bot only responds to messages that @-mention the bot in group chats.
|
||||
Direct messages always work regardless of this setting. Default: `false`.
|
||||
@ -27,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
Legacy values are still decrypted for backward compatibility but should be migrated.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **Gemini thinking model support** — Responses from thinking models (e.g. `gemini-3-pro-preview`)
|
||||
are now handled correctly. The provider skips internal reasoning parts (`thought: true`) and
|
||||
signature parts (`thoughtSignature`), extracting only the final answer text. Falls back to
|
||||
@ -64,4 +67,4 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Workspace escape prevention
|
||||
- Forbidden system path protection (`/etc`, `/root`, `~/.ssh`)
|
||||
|
||||
[0.1.0]: https://github.com/theonlyhennygod/zeroclaw/releases/tag/v0.1.0
|
||||
[0.1.0]: https://github.com/zeroclaw-labs/zeroclaw/releases/tag/v0.1.0
|
||||
|
||||
86
CLAUDE.md
86
CLAUDE.md
@ -153,13 +153,14 @@ Treat documentation as a first-class product surface, not a post-merge artifact.
|
||||
|
||||
Canonical entry points:
|
||||
|
||||
- root READMEs: `README.md`, `README.zh-CN.md`, `README.ja.md`, `README.ru.md`, `README.fr.md`, `README.vi.md`
|
||||
- docs hubs: `docs/README.md`, `docs/README.zh-CN.md`, `docs/README.ja.md`, `docs/README.ru.md`, `docs/README.fr.md`, `docs/i18n/vi/README.md`
|
||||
- repository landing + localized hubs: `README.md`, `docs/i18n/zh-CN/README.md`, `docs/i18n/ja/README.md`, `docs/i18n/ru/README.md`, `docs/i18n/fr/README.md`, `docs/i18n/vi/README.md`, `docs/i18n/el/README.md`
|
||||
- docs hubs: `docs/README.md`, `docs/i18n/zh-CN/README.md`, `docs/i18n/ja/README.md`, `docs/i18n/ru/README.md`, `docs/i18n/fr/README.md`, `docs/i18n/vi/README.md`, `docs/i18n/el/README.md`
|
||||
- unified TOC: `docs/SUMMARY.md`
|
||||
- i18n governance docs: `docs/i18n-guide.md`, `docs/i18n/README.md`, `docs/i18n-coverage.md`
|
||||
|
||||
Supported locales (current contract):
|
||||
|
||||
- `en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`
|
||||
- `en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`
|
||||
|
||||
Collection indexes (category navigation):
|
||||
|
||||
@ -184,14 +185,25 @@ Runtime-contract references (must track behavior changes):
|
||||
Required docs governance rules:
|
||||
|
||||
- Keep README/hub top navigation and quick routes intuitive and non-duplicative.
|
||||
- Keep entry-point parity across all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`) when changing navigation architecture.
|
||||
- Keep entry-point parity across all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`) when changing navigation architecture.
|
||||
- If a change touches docs IA, runtime-contract references, or user-facing wording in shared docs, perform i18n follow-through for currently supported locales in the same PR:
|
||||
- Update locale navigation links (`README*`, `docs/README*`, `docs/SUMMARY.md`).
|
||||
- Update localized runtime-contract docs where equivalents exist (at minimum `commands-reference`, `config-reference`, `troubleshooting` for `fr` and `vi`).
|
||||
- For Vietnamese, treat `docs/i18n/vi/**` as canonical. Keep `docs/*.<locale>.md` compatibility shims aligned if present.
|
||||
- Update canonical locale hubs and summaries under `docs/i18n/<locale>/` for every supported locale.
|
||||
- Update localized runtime-contract docs where equivalents exist (currently full trees for `vi` and `el`; do not regress `zh-CN`/`ja`/`ru`/`fr` hub parity).
|
||||
- Keep `docs/*.<locale>.md` compatibility shims aligned if present.
|
||||
- Follow `docs/i18n-guide.md` as the mandatory completion checklist when docs navigation or shared wording changes.
|
||||
- Keep proposal/roadmap docs explicitly labeled; avoid mixing proposal text into runtime-contract docs.
|
||||
- Keep project snapshots date-stamped and immutable once superseded by a newer date.
|
||||
|
||||
### 4.2 Docs i18n Completion Gate (Required)
|
||||
|
||||
For any PR that changes docs IA, locale navigation, or shared docs wording:
|
||||
|
||||
1. Complete i18n follow-through in the same PR using `docs/i18n-guide.md`.
|
||||
2. Keep all supported locale hubs/summaries navigable through canonical `docs/i18n/<locale>/` paths.
|
||||
3. Update `docs/i18n-coverage.md` when coverage status or locale topology changes.
|
||||
4. If any translation must be deferred, record explicit owner + follow-up issue/PR in the PR description.
|
||||
|
||||
## 5) Risk Tiers by Path (Review Depth Contract)
|
||||
|
||||
Use these tiers when deciding validation depth and review rigor.
|
||||
@ -216,7 +228,8 @@ When uncertain, classify as higher risk.
|
||||
5. **Document impact**
|
||||
- Update docs/PR notes for behavior, risk, side effects, and rollback.
|
||||
- If CLI/config/provider/channel behavior changed, update corresponding runtime-contract references.
|
||||
- If docs entry points changed, keep all supported locale README/docs-hub navigation aligned (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`).
|
||||
- If docs entry points changed, keep all supported locale README/docs-hub navigation aligned (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`).
|
||||
- Run through `docs/i18n-guide.md` and record any explicit i18n deferrals in the PR summary.
|
||||
6. **Respect queue hygiene**
|
||||
- If stacked PR: declare `Depends on #...`.
|
||||
- If replacing old PR: declare `Supersedes #...`.
|
||||
@ -227,19 +240,46 @@ All contributors (human or agent) must follow the same collaboration flow:
|
||||
|
||||
- Create and work from a non-`main` branch.
|
||||
- Commit changes to that branch with clear, scoped commit messages.
|
||||
- Open a PR to `main`; do not push directly to `main`.
|
||||
- Open a PR to `main` by default (`dev` is optional for integration batching); do not push directly to `dev` or `main`.
|
||||
- `main` accepts direct PR merges after required checks and review policy pass.
|
||||
- Wait for required checks and review outcomes before merging.
|
||||
- Merge via PR controls (squash/rebase/merge as repository policy allows).
|
||||
- Branch deletion after merge is optional; long-lived branches are allowed when intentionally maintained.
|
||||
- After merge/close, clean up task branches/worktrees that are no longer needed.
|
||||
- Keep long-lived branches only when intentionally maintained with clear owner and purpose.
|
||||
|
||||
### 6.2 Worktree Workflow (Required for Multi-Track Agent Work)
|
||||
### 6.1A PR Disposition and Workflow Authority (Required)
|
||||
|
||||
Use Git worktrees to isolate concurrent agent/human tracks safely and predictably:
|
||||
- Decide merge/close outcomes from repository-local authority in this order: `.github/workflows/**`, GitHub branch protection/rulesets, `docs/pr-workflow.md`, then this `CLAUDE.md`.
|
||||
- External agent skills/templates are execution aids only; they must not override repository-local policy.
|
||||
- A normal contributor PR targeting `main` is valid under the main-first flow when required checks and review policy are satisfied; use `dev` only for explicit integration batching.
|
||||
- Direct-close the PR (do not supersede/replay) when high-confidence integrity-risk signals exist:
|
||||
- unapproved or unrelated repository rebranding attempts (for example replacing project logo/identity assets)
|
||||
- unauthorized platform-surface expansion (for example introducing `web` apps, dashboards, frontend stacks, or UI surfaces not requested by maintainers)
|
||||
- title/scope deception that hides high-risk code changes (for example `docs:` title with broad `src/**` changes)
|
||||
- spam-like or intentionally harmful payload patterns
|
||||
- multi-domain dirty-bundle changes with no safe, auditable isolation path
|
||||
- If unauthorized platform-surface expansion is detected during review/implementation, report to maintainers immediately and pause further execution until explicit direction is given.
|
||||
- Use supersede flow only when maintainers explicitly want to preserve valid work and attribution.
|
||||
- In public PR close/block comments, state only direct actionable reasons; do not include internal decision-process narration or "non-reason" qualifiers.
|
||||
|
||||
- Use one worktree per active branch/PR stream to avoid cross-task contamination.
|
||||
- Keep each worktree on a single branch; do not mix unrelated edits in one worktree.
|
||||
### 6.1B Assignee-First Gate (Required)
|
||||
|
||||
- For any GitHub issue or PR selected for active handling, the first action is to ensure `@chumyin` is an assignee.
|
||||
- This is additive ownership: keep existing assignees and add `@chumyin` if missing.
|
||||
- Do not start triage/review/implementation/merge work before assignee assignment is confirmed.
|
||||
- Queue safety rule: assign only the currently active target; do not pre-assign future queued targets.
|
||||
|
||||
### 6.2 Worktree Workflow (Required for All Task Streams)
|
||||
|
||||
Use Git worktrees to isolate every active task stream safely and predictably:
|
||||
|
||||
- Use one dedicated worktree per active branch/PR stream; do not implement directly in a shared default workspace.
|
||||
- Keep each worktree on a single branch and a single concern; do not mix unrelated edits in one worktree.
|
||||
- Before each commit/push, verify commit hygiene in that worktree (`git status --short` and `git diff --cached`) so only scoped files are included.
|
||||
- Run validation commands inside the corresponding worktree before commit/PR.
|
||||
- Name worktrees clearly by scope (for example: `wt/ci-hardening`, `wt/provider-fix`) and remove stale worktrees when no longer needed.
|
||||
- Name worktrees clearly by scope (for example: `wt/ci-hardening`, `wt/provider-fix`).
|
||||
- After PR merge/close (or task abandonment), remove stale worktrees/branches and prune refs (`git worktree prune`, `git fetch --prune`).
|
||||
- Local Codex automation may use one-command cleanup helper: `~/.codex/skills/zeroclaw-pr-issue-automation/scripts/cleanup_track.sh --repo-dir <repo_dir> --worktree <worktree_path> --branch <branch_name>`.
|
||||
- PR checkpoint rules from section 6.1 still apply to worktree-based development.
|
||||
|
||||
### 6.3 Code Naming Contract (Required)
|
||||
@ -304,11 +344,12 @@ Use these rules to keep the trait/factory architecture stable under growth.
|
||||
- Treat docs navigation as product UX: preserve clear pathing from README -> docs hub -> SUMMARY -> category index.
|
||||
- Keep top-level nav concise; avoid duplicative links across adjacent nav blocks.
|
||||
- When runtime surfaces change, update related references (`commands/providers/channels/config/runbook/troubleshooting`).
|
||||
- Keep multilingual entry-point parity for all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`) when nav or key wording changes.
|
||||
- Keep multilingual entry-point parity for all supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`, `el`) when nav or key wording changes.
|
||||
- When shared docs wording changes, sync corresponding localized docs for supported locales in the same PR (or explicitly document deferral and follow-up PR).
|
||||
- Treat `docs/i18n/<locale>/**` as canonical for localized hubs/summaries; keep docs-root compatibility shims aligned when edited.
|
||||
- Apply `docs/i18n-guide.md` completion checklist before merge and include i18n status in PR notes.
|
||||
- For docs snapshots, add new date-stamped files for new sprints rather than rewriting historical context.
|
||||
|
||||
|
||||
## 8) Validation Matrix
|
||||
|
||||
Default local checks for code changes:
|
||||
@ -334,7 +375,7 @@ Additional expectations by change type:
|
||||
|
||||
- **Docs/template-only**:
|
||||
- run markdown lint and link-integrity checks
|
||||
- if touching README/docs-hub/SUMMARY/collection indexes, verify EN/ZH/JA/RU navigation parity
|
||||
- if touching README/docs-hub/SUMMARY/collection indexes, verify EN/ZH-CN/JA/RU/FR/VI/EL navigation parity
|
||||
- if touching bootstrap docs/scripts, run `bash -n bootstrap.sh scripts/bootstrap.sh scripts/install.sh`
|
||||
- **Workflow changes**: validate YAML syntax; run workflow lint/sanity checks when available.
|
||||
- **Security/runtime/gateway/tools**: include at least one boundary/failure-mode validation.
|
||||
@ -345,6 +386,12 @@ If full checks are impractical, run the most relevant subset and document what w
|
||||
|
||||
- Follow `.github/pull_request_template.md` fully (including side effects / blast radius).
|
||||
- Keep PR descriptions concrete: problem, change, non-goals, risk, rollback.
|
||||
- For issue-driven work, add explicit issue-closing keywords in the **PR body** for every resolved issue (for example `Closes #1502`).
|
||||
- Do not rely on issue comments alone for linkage visibility; comments are supplemental, not a substitute for PR-body closing references.
|
||||
- Default to one issue per clean commit/PR track. For multiple issues, split into separate clean commits/PRs unless there is clear technical coupling.
|
||||
- If multiple issues are intentionally bundled in one PR, document the coupling rationale explicitly in the PR summary.
|
||||
- Commit hygiene is mandatory: stage only task-scoped files and split unrelated changes into separate commits/worktrees.
|
||||
- Completion hygiene is mandatory: after merge/close, clean stale local branches/worktrees before starting the next track.
|
||||
- Use conventional commit titles.
|
||||
- Prefer small PRs (`size: XS/S/M`) when possible.
|
||||
- Agent-assisted PRs are welcome, **but contributors remain accountable for understanding what their code will do**.
|
||||
@ -438,6 +485,9 @@ Reference docs:
|
||||
- `CONTRIBUTING.md`
|
||||
- `docs/README.md`
|
||||
- `docs/SUMMARY.md`
|
||||
- `docs/i18n-guide.md`
|
||||
- `docs/i18n/README.md`
|
||||
- `docs/i18n-coverage.md`
|
||||
- `docs/docs-inventory.md`
|
||||
- `docs/commands-reference.md`
|
||||
- `docs/providers-reference.md`
|
||||
@ -461,6 +511,8 @@ Reference docs:
|
||||
- Do not bypass failing checks without explicit explanation.
|
||||
- Do not hide behavior-changing side effects in refactor commits.
|
||||
- Do not include personal identity or sensitive information in test data, examples, docs, or commits.
|
||||
- Do not attempt repository rebranding/identity replacement unless maintainers explicitly requested it in the current scope.
|
||||
- Do not introduce new platform surfaces (for example `web` apps, dashboards, frontend stacks, or UI portals) unless maintainers explicitly requested them in the current scope.
|
||||
|
||||
## 11) Handoff Template (Agent -> Agent / Maintainer)
|
||||
|
||||
|
||||
93
CONTRIBUTING.el.md
Normal file
93
CONTRIBUTING.el.md
Normal file
@ -0,0 +1,93 @@
|
||||
# Συνεισφορά στο ZeroClaw
|
||||
|
||||
Σας ευχαριστούμε για το ενδιαφέρον σας να συνεισφέρετε στο ZeroClaw! Αυτός ο οδηγός θα σας βοηθήσει να ξεκινήσετε.
|
||||
|
||||
## Συνεισφέροντες για πρώτη φορά
|
||||
|
||||
Καλώς ήρθατε — οι συνεισφορές κάθε μεγέθους είναι πολύτιμες. Εάν αυτή είναι η πρώτη σας συνεισφορά, δείτε πώς μπορείτε να ξεκινήσετε:
|
||||
|
||||
1. **Βρείτε ένα ζήτημα.** Αναζητήστε ζητήματα με την ετικέτα [`good first issue`](https://github.com/zeroclaw-labs/zeroclaw/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) — αυτά είναι σχεδιασμένα για νεοεισερχόμενους και περιλαμβάνουν το απαραίτητο πλαίσιο για να ξεκινήσετε γρήγορα.
|
||||
|
||||
2. **Επιλέξτε ένα πεδίο.** Καλές πρώτες συνεισφορές περιλαμβάνουν:
|
||||
- Διορθώσεις τυπογραφικών λαθών και τεκμηρίωσης
|
||||
- Προσθήκες ή βελτιώσεις δοκιμών (tests)
|
||||
- Μικρές διορθώσεις σφαλμάτων με σαφή βήματα αναπαραγωγής
|
||||
|
||||
3. **Ακολουθήστε τη ροή εργασίας fork → branch → change → test → PR:**
|
||||
- Κάντε fork το αποθετήριο και κλωνοποιήστε το δικό σας fork
|
||||
- Δημιουργήστε έναν κλάδο δυνατοτήτων (feature branch) (`git checkout -b fix/my-change`)
|
||||
- Κάντε τις αλλαγές σας και εκτελέστε `cargo fmt && cargo clippy && cargo test`
|
||||
- Ανοίξτε ένα PR προς το `dev` χρησιμοποιώντας το πρότυπο PR
|
||||
|
||||
4. **Ξεκινήστε με το Track A.** Το ZeroClaw χρησιμοποιεί τρία [επίπεδα συνεργασίας](#επίπεδα-συνεργασίας-βάσει-κινδύνου) (A/B/C) βάσει κινδύνου. Οι συνεισφέροντες για πρώτη φορά θα πρέπει να στοχεύουν στο **Track A** (τεκμηρίωση, δοκιμές, μικροεργασίες) — αυτά απαιτούν ελαφρύτερη αναθεώρηση και είναι η ταχύτερη διαδρομή για την ενσωμάτωση (merge) ενός PR.
|
||||
|
||||
Εάν κολλήσετε, ανοίξτε ένα draft PR νωρίς και κάντε ερωτήσεις στην περιγραφή.
|
||||
|
||||
## Ρύθμιση Ανάπτυξης
|
||||
|
||||
```bash
|
||||
# Κλωνοποιήστε το αποθετήριο
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
|
||||
# Ενεργοποιήστε το pre-push hook (εκτελεί fmt, clippy, δοκιμές πριν από κάθε push)
|
||||
git config core.hooksPath .githooks
|
||||
|
||||
# Κατασκευή (Build)
|
||||
cargo build
|
||||
|
||||
# Εκτέλεση δοκιμών (πρέπει να περάσουν όλες)
|
||||
cargo test --locked
|
||||
|
||||
# Μορφοποίηση και έλεγχος (απαιτείται πριν το PR)
|
||||
./scripts/ci/rust_quality_gate.sh
|
||||
|
||||
# Έκδοση release
|
||||
cargo build --release --locked
|
||||
```
|
||||
|
||||
### Pre-push hook
|
||||
|
||||
Το αποθετήριο περιλαμβάνει ένα pre-push hook στο `.githooks/` που επιβάλλει το `./scripts/ci/rust_quality_gate.sh` και το `cargo test --locked` πριν από κάθε push. Ενεργοποιήστε το με την εντολή `git config core.hooksPath .githooks`.
|
||||
|
||||
## Τοπική Διαχείριση Μυστικών (Απαιτείται)
|
||||
|
||||
Το ZeroClaw υποστηρίζει κλιμακωτή διαχείριση μυστικών για την τοπική ανάπτυξη και την υγιεινή του CI.
|
||||
|
||||
### Επιλογές Αποθήκευσης Μυστικών
|
||||
|
||||
1. **Μεταβλητές περιβάλλοντος** (συνιστάται για τοπική ανάπτυξη)
|
||||
- Αντιγράψτε το `.env.example` στο `.env` και συμπληρώστε τις τιμές
|
||||
- Τα αρχεία `.env` αγνοούνται από το Git και πρέπει να παραμένουν τοπικά
|
||||
|
||||
2. **Αρχείο ρυθμίσεων** (`~/.zeroclaw/config.toml`)
|
||||
- Μόνιμη ρύθμιση για μακροχρόνια χρήση
|
||||
- Όταν `secrets.encrypt = true` (προεπιλογή), οι τιμές κρυπτογραφούνται πριν την αποθήκευση
|
||||
|
||||
### Κανόνες Επίλυσης κατά την Εκτέλεση
|
||||
|
||||
Η επίλυση του κλειδιού API ακολουθεί αυτή τη σειρά:
|
||||
|
||||
1. Ρητό κλειδί που μεταδίδεται από το config/CLI
|
||||
2. Μεταβλητές περιβάλλοντος ειδικά για τον πάροχο (`OPENROUTER_API_KEY`, `OPENAI_API_KEY`, κ.λπ.)
|
||||
3. Γενικές μεταβλητές περιβάλλοντος (`ZEROCLAW_API_KEY`, `API_KEY`)
|
||||
|
||||
### Υγιεινή Μυστικών Πριν το Commit (Υποχρεωτικό)
|
||||
|
||||
Πριν από κάθε commit, επαληθεύστε:
|
||||
|
||||
- [ ] Δεν έχουν προστεθεί αρχεία `.env` (μόνο το `.env.example` επιτρέπεται)
|
||||
- [ ] Δεν υπάρχουν κλειδιά API/tokens στον κώδικα, τις δοκιμές, τα παραδείγματα ή τα μηνύματα commit
|
||||
- [ ] Δεν υπάρχουν διαπιστευτήρια σε εξόδους αποσφαλμάτωσης (debug output)
|
||||
|
||||
## Επίπεδα Συνεργασίας (Βάσει Κινδύνου)
|
||||
|
||||
| Επίπεδο | Τυπικό πεδίο | Απαιτούμενο βάθος αναθεώρησης |
|
||||
|---|---|---|
|
||||
| **Track A (Χαμηλός κίνδυνος)** | τεκμηρίωση/δοκιμές, απομονωμένο refactoring | 1 αναθεώρηση από συντηρητή + επιτυχές CI |
|
||||
| **Track B (Μεσαίος κίνδυνος)** | αλλαγές συμπεριφοράς παρόχων/καναλιών/μνήμης | 1 αναθεώρηση με γνώση του υποσυστήματος + τεκμηρίωση επαλήθευσης |
|
||||
| **Track C (Υψηλός κίνδυνος)** | ασφάλεια, περιβάλλον εκτέλεσης, CI, όρια πρόσβασης | Αναθεώρηση 2 φάσεων + σχέδιο επαναφοράς (rollback) |
|
||||
|
||||
---
|
||||
|
||||
**ZeroClaw** — Μηδενική επιβάρυνση. Κανένας συμβιβασμός. 🦀
|
||||
2064
Cargo.lock
generated
2064
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
60
Cargo.toml
60
Cargo.toml
@ -1,11 +1,17 @@
|
||||
[workspace]
|
||||
members = [".", "crates/robot-kit"]
|
||||
members = [
|
||||
".",
|
||||
"crates/robot-kit",
|
||||
"crates/zeroclaw-types",
|
||||
"crates/zeroclaw-core",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "zeroclaw"
|
||||
version = "0.1.7"
|
||||
version = "0.1.8"
|
||||
edition = "2021"
|
||||
build = "build.rs"
|
||||
authors = ["theonlyhennygod"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Zero overhead. Zero compromise. 100% Rust. The fastest, smallest AI assistant."
|
||||
@ -34,7 +40,6 @@ matrix-sdk = { version = "0.16", optional = true, default-features = false, feat
|
||||
# Serialization
|
||||
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0", default-features = false, features = ["std"] }
|
||||
serde_ignored = "0.1"
|
||||
|
||||
# Config
|
||||
directories = "6.0"
|
||||
@ -46,7 +51,7 @@ schemars = "1.2"
|
||||
|
||||
# Logging - minimal
|
||||
tracing = { version = "0.1", default-features = false }
|
||||
tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "ansi", "env-filter"] }
|
||||
tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "ansi", "env-filter", "chrono"] }
|
||||
|
||||
# Observability - Prometheus metrics
|
||||
prometheus = { version = "0.14", default-features = false }
|
||||
@ -58,15 +63,26 @@ image = { version = "0.25", default-features = false, features = ["jpeg", "png"]
|
||||
# URL encoding for web search
|
||||
urlencoding = "2.1"
|
||||
|
||||
# HTML to plain text conversion (web_fetch tool)
|
||||
# HTML to plain text / markdown conversion (web_fetch tool)
|
||||
nanohtml2text = "0.2"
|
||||
html2md = { package = "fast_html2md", version = "0.0.58", optional = true }
|
||||
|
||||
# Zip archive extraction
|
||||
zip = { version = "8.1", default-features = false, features = ["deflate"] }
|
||||
|
||||
# XML parsing (DOCX text extraction)
|
||||
quick-xml = "0.37"
|
||||
|
||||
# Optional Rust-native browser automation backend
|
||||
fantoccini = { version = "0.22.0", optional = true, default-features = false, features = ["rustls-tls"] }
|
||||
|
||||
# Optional in-process WASM runtime for sandboxed tool execution
|
||||
wasmi = { version = "1.0.9", optional = true, default-features = true }
|
||||
|
||||
# Error handling
|
||||
anyhow = "1.0"
|
||||
thiserror = "2.0"
|
||||
aho-corasick = "1.1"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.11", default-features = false, features = ["v4", "std"] }
|
||||
@ -100,13 +116,17 @@ prost = { version = "0.14", default-features = false, features = ["derive"], opt
|
||||
# Memory / persistence
|
||||
rusqlite = { version = "0.37", features = ["bundled"] }
|
||||
postgres = { version = "0.19", features = ["with-chrono-0_4"], optional = true }
|
||||
tokio-postgres-rustls = { version = "0.13", optional = true }
|
||||
chrono = { version = "0.4", default-features = false, features = ["clock", "std", "serde"] }
|
||||
chrono-tz = "0.10"
|
||||
cron = "0.15"
|
||||
|
||||
# Interactive CLI prompts
|
||||
dialoguer = { version = "0.12", features = ["fuzzy-select"] }
|
||||
rustyline = "17.0"
|
||||
console = "0.16"
|
||||
crossterm = "0.29"
|
||||
ratatui = { version = "0.29", default-features = false, features = ["crossterm"] }
|
||||
|
||||
# Hardware discovery (device path globbing)
|
||||
glob = "0.3"
|
||||
@ -114,6 +134,9 @@ glob = "0.3"
|
||||
# Binary discovery (init system detection)
|
||||
which = "8.0"
|
||||
|
||||
# Temporary directory creation (for self-update)
|
||||
tempfile = "3.14"
|
||||
|
||||
# WebSocket client channels (Discord/Lark/DingTalk/Nostr)
|
||||
tokio-tungstenite = { version = "0.28", features = ["rustls-tls-webpki-roots"] }
|
||||
futures-util = { version = "0.3", default-features = false, features = ["sink"] }
|
||||
@ -148,6 +171,11 @@ opentelemetry = { version = "0.31", default-features = false, features = ["trace
|
||||
opentelemetry_sdk = { version = "0.31", default-features = false, features = ["trace", "metrics"], optional = true }
|
||||
opentelemetry-otlp = { version = "0.31", default-features = false, features = ["trace", "metrics", "http-proto", "reqwest-blocking-client", "reqwest-rustls-webpki-roots"], optional = true }
|
||||
|
||||
# WASM runtime for plugin execution
|
||||
# Keep this on a RustSec-patched line that remains compatible with the
|
||||
# workspace rust-version = "1.87".
|
||||
wasmtime = { version = "36.0.6", default-features = false, features = ["runtime", "cranelift"] }
|
||||
|
||||
# Serial port for peripheral communication (STM32, etc.)
|
||||
tokio-serial = { version = "5", default-features = false, optional = true }
|
||||
|
||||
@ -161,6 +189,11 @@ probe-rs = { version = "0.31", optional = true }
|
||||
|
||||
# PDF extraction for datasheet RAG (optional, enable with --features rag-pdf)
|
||||
pdf-extract = { version = "0.10", optional = true }
|
||||
tempfile = "3.14"
|
||||
|
||||
# WASM plugin runtime (optional, enable with --features wasm-tools)
|
||||
# Uses WASI stdio protocol — tools read JSON from stdin, write JSON to stdout.
|
||||
wasmtime-wasi = { version = "36.0.6", optional = true, default-features = false, features = ["preview1"] }
|
||||
|
||||
# Terminal QR rendering for WhatsApp Web pairing flow.
|
||||
qrcode = { version = "0.14", optional = true }
|
||||
@ -184,17 +217,20 @@ landlock = { version = "0.4", optional = true }
|
||||
libc = "0.2"
|
||||
|
||||
[features]
|
||||
# Keep default minimal for widest host compatibility (including macOS 10.15).
|
||||
default = []
|
||||
hardware = ["nusb", "tokio-serial"]
|
||||
channel-matrix = ["dep:matrix-sdk"]
|
||||
channel-lark = ["dep:prost"]
|
||||
memory-postgres = ["dep:postgres"]
|
||||
memory-postgres = ["dep:postgres", "dep:tokio-postgres-rustls"]
|
||||
observability-otel = ["dep:opentelemetry", "dep:opentelemetry_sdk", "dep:opentelemetry-otlp"]
|
||||
peripheral-rpi = ["rppal"]
|
||||
# Browser backend feature alias used by cfg(feature = "browser-native")
|
||||
browser-native = ["dep:fantoccini"]
|
||||
# Backward-compatible alias for older invocations
|
||||
fantoccini = ["browser-native"]
|
||||
# In-process WASM runtime (capability-based sandbox)
|
||||
runtime-wasm = ["dep:wasmi"]
|
||||
# Sandbox feature aliases used by cfg(feature = "sandbox-*")
|
||||
sandbox-landlock = ["dep:landlock"]
|
||||
sandbox-bubblewrap = []
|
||||
@ -204,8 +240,15 @@ landlock = ["sandbox-landlock"]
|
||||
probe = ["dep:probe-rs"]
|
||||
# rag-pdf = PDF ingestion for datasheet RAG
|
||||
rag-pdf = ["dep:pdf-extract"]
|
||||
# wasm-tools = WASM plugin engine for dynamically-loaded tool packages (WASI stdio protocol)
|
||||
# Runtime implementation is active on Linux/macOS/Windows; unsupported targets use stubs.
|
||||
wasm-tools = ["dep:wasmtime-wasi"]
|
||||
# whatsapp-web = Native WhatsApp Web client with custom rusqlite storage backend
|
||||
whatsapp-web = ["dep:wa-rs", "dep:wa-rs-core", "dep:wa-rs-binary", "dep:wa-rs-proto", "dep:wa-rs-ureq-http", "dep:wa-rs-tokio-transport", "dep:serde-big-array", "dep:prost", "dep:qrcode"]
|
||||
# Optional provider feature flags used by cfg(feature = "...") guards.
|
||||
# Keep disabled by default to preserve current runtime behavior.
|
||||
firecrawl = []
|
||||
web-fetch-html2md = ["dep:html2md"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "z" # Optimize for size
|
||||
@ -217,8 +260,9 @@ panic = "abort" # Reduce binary size
|
||||
|
||||
[profile.release-fast]
|
||||
inherits = "release"
|
||||
codegen-units = 8 # Parallel codegen for faster builds on powerful machines (16GB+ RAM recommended)
|
||||
# Use: cargo build --profile release-fast
|
||||
# Keep release-fast under CI binary size safeguard (20MB hard gate).
|
||||
# Using 1 codegen unit preserves release-level size characteristics.
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
39
Dockerfile
39
Dockerfile
@ -1,36 +1,51 @@
|
||||
# syntax=docker/dockerfile:1.7
|
||||
|
||||
# ── Stage 1: Build ────────────────────────────────────────────
|
||||
FROM rust:1.93-slim@sha256:9663b80a1621253d30b146454f903de48f0af925c967be48c84745537cd35d8b AS builder
|
||||
FROM rust:1.93-slim@sha256:7e6fa79cf81be23fd45d857f75f583d80cfdbb11c91fa06180fd747fda37a61d AS builder
|
||||
|
||||
WORKDIR /app
|
||||
ARG ZEROCLAW_CARGO_FEATURES=""
|
||||
ARG ZEROCLAW_CARGO_ALL_FEATURES="false"
|
||||
|
||||
# Install build dependencies
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y \
|
||||
libudev-dev \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# 1. Copy manifests to cache dependencies
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY build.rs build.rs
|
||||
COPY crates/robot-kit/Cargo.toml crates/robot-kit/Cargo.toml
|
||||
COPY crates/zeroclaw-types/Cargo.toml crates/zeroclaw-types/Cargo.toml
|
||||
COPY crates/zeroclaw-core/Cargo.toml crates/zeroclaw-core/Cargo.toml
|
||||
# Create dummy targets declared in Cargo.toml so manifest parsing succeeds.
|
||||
RUN mkdir -p src benches crates/robot-kit/src \
|
||||
RUN mkdir -p src benches crates/robot-kit/src crates/zeroclaw-types/src crates/zeroclaw-core/src \
|
||||
&& echo "fn main() {}" > src/main.rs \
|
||||
&& echo "fn main() {}" > benches/agent_benchmarks.rs \
|
||||
&& echo "pub fn placeholder() {}" > crates/robot-kit/src/lib.rs
|
||||
&& echo "pub fn placeholder() {}" > crates/robot-kit/src/lib.rs \
|
||||
&& echo "pub fn placeholder() {}" > crates/zeroclaw-types/src/lib.rs \
|
||||
&& echo "pub fn placeholder() {}" > crates/zeroclaw-core/src/lib.rs
|
||||
RUN --mount=type=cache,id=zeroclaw-cargo-registry,target=/usr/local/cargo/registry,sharing=locked \
|
||||
--mount=type=cache,id=zeroclaw-cargo-git,target=/usr/local/cargo/git,sharing=locked \
|
||||
--mount=type=cache,id=zeroclaw-target,target=/app/target,sharing=locked \
|
||||
cargo build --release --locked
|
||||
RUN rm -rf src benches crates/robot-kit/src
|
||||
if [ "$ZEROCLAW_CARGO_ALL_FEATURES" = "true" ]; then \
|
||||
cargo build --release --locked --all-features; \
|
||||
elif [ -n "$ZEROCLAW_CARGO_FEATURES" ]; then \
|
||||
cargo build --release --locked --features "$ZEROCLAW_CARGO_FEATURES"; \
|
||||
else \
|
||||
cargo build --release --locked; \
|
||||
fi
|
||||
RUN rm -rf src benches crates/robot-kit/src crates/zeroclaw-types/src crates/zeroclaw-core/src
|
||||
|
||||
# 2. Copy only build-relevant source paths (avoid cache-busting on docs/tests/scripts)
|
||||
COPY src/ src/
|
||||
COPY benches/ benches/
|
||||
COPY crates/ crates/
|
||||
COPY firmware/ firmware/
|
||||
COPY templates/ templates/
|
||||
COPY web/ web/
|
||||
# Keep release builds resilient when frontend dist assets are not prebuilt in Git.
|
||||
RUN mkdir -p web/dist && \
|
||||
@ -52,7 +67,13 @@ RUN mkdir -p web/dist && \
|
||||
RUN --mount=type=cache,id=zeroclaw-cargo-registry,target=/usr/local/cargo/registry,sharing=locked \
|
||||
--mount=type=cache,id=zeroclaw-cargo-git,target=/usr/local/cargo/git,sharing=locked \
|
||||
--mount=type=cache,id=zeroclaw-target,target=/app/target,sharing=locked \
|
||||
cargo build --release --locked && \
|
||||
if [ "$ZEROCLAW_CARGO_ALL_FEATURES" = "true" ]; then \
|
||||
cargo build --release --locked --all-features; \
|
||||
elif [ -n "$ZEROCLAW_CARGO_FEATURES" ]; then \
|
||||
cargo build --release --locked --features "$ZEROCLAW_CARGO_FEATURES"; \
|
||||
else \
|
||||
cargo build --release --locked; \
|
||||
fi && \
|
||||
cp target/release/zeroclaw /app/zeroclaw && \
|
||||
strip /app/zeroclaw
|
||||
|
||||
@ -69,12 +90,12 @@ default_temperature = 0.7
|
||||
|
||||
[gateway]
|
||||
port = 42617
|
||||
host = "[::]"
|
||||
allow_public_bind = true
|
||||
host = "127.0.0.1"
|
||||
allow_public_bind = false
|
||||
EOF
|
||||
|
||||
# ── Stage 2: Development Runtime (Debian) ────────────────────
|
||||
FROM debian:trixie-slim@sha256:f6e2cfac5cf956ea044b4bd75e6397b4372ad88fe00908045e9a0d21712ae3ba AS dev
|
||||
FROM debian:trixie-slim@sha256:1d3c811171a08a5adaa4a163fbafd96b61b87aa871bbc7aa15431ac275d3d430 AS dev
|
||||
|
||||
# Install essential runtime dependencies only (use docker-compose.override.yml for dev tools)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
|
||||
51
PR_DESCRIPTION_UPDATE.md
Normal file
51
PR_DESCRIPTION_UPDATE.md
Normal file
@ -0,0 +1,51 @@
|
||||
## Android Phase 3 - Agent Integration
|
||||
|
||||
This PR implements the Android client for ZeroClaw with full agent integration, including foreground service, Quick Settings tile, boot receiver, and background heartbeat support.
|
||||
|
||||
### Changes
|
||||
- `ZeroClawApp.kt` - Application setup with notification channels and WorkManager
|
||||
- `SettingsRepository.kt` - DataStore + EncryptedSharedPreferences for secure settings
|
||||
- `SettingsScreen.kt` - Compose UI for configuring the agent
|
||||
- `BootReceiver.kt` - Auto-start on boot when enabled
|
||||
- `HeartbeatWorker.kt` - Background periodic tasks via WorkManager
|
||||
- `ZeroClawTileService.kt` - Quick Settings tile for agent control
|
||||
- `ShareHandler.kt` - Handle content shared from other apps
|
||||
- `ci-android.yml` - GitHub Actions workflow for Android builds
|
||||
- `proguard-rules.pro` - R8 optimization rules
|
||||
|
||||
---
|
||||
|
||||
## Validation Evidence
|
||||
|
||||
- [x] All HIGH and MEDIUM CodeRabbit issues addressed
|
||||
- [x] DataStore IOException handling added to prevent crashes on corrupted preferences
|
||||
- [x] BootReceiver double `pendingResult.finish()` call removed
|
||||
- [x] `text/uri-list` MIME type routed correctly in ShareHandler
|
||||
- [x] API 34+ PendingIntent overload added to TileService
|
||||
- [x] Kotlin Intrinsics null checks preserved in ProGuard rules
|
||||
- [x] HeartbeatWorker enforces 15-minute minimum and uses UPDATE policy
|
||||
- [x] SettingsScreen refreshes battery optimization state on resume
|
||||
- [x] ZeroClawApp listens for settings changes to update heartbeat schedule
|
||||
- [x] Trailing whitespace removed from all Kotlin files
|
||||
- [ ] Manual testing: Build and install on Android 14 device (pending)
|
||||
|
||||
## Security Impact
|
||||
|
||||
- **API Keys**: Stored in Android Keystore via EncryptedSharedPreferences (AES-256-GCM)
|
||||
- **Permissions**: RECEIVE_BOOT_COMPLETED, FOREGROUND_SERVICE, POST_NOTIFICATIONS
|
||||
- **Data in Transit**: All API calls use HTTPS
|
||||
- **No New Vulnerabilities**: No raw SQL, no WebView JavaScript, no exported components without protection
|
||||
|
||||
## Privacy and Data Hygiene
|
||||
|
||||
- **Local Storage Only**: All settings stored on-device, nothing transmitted except to configured AI provider
|
||||
- **No Analytics**: No third-party analytics or tracking SDKs
|
||||
- **User Control**: API key can be cleared via settings, auto-start is opt-in
|
||||
- **Minimal Permissions**: Only requests permissions necessary for core functionality
|
||||
|
||||
## Rollback Plan
|
||||
|
||||
1. **Feature Flag**: Not yet implemented; can be added if needed
|
||||
2. **Version Pinning**: Users can stay on previous APK version
|
||||
3. **Clean Uninstall**: All data stored in app's private directory, removed on uninstall
|
||||
4. **Server-Side**: No backend changes required; rollback is client-only
|
||||
884
README.fr.md
884
README.fr.md
@ -1,884 +0,0 @@
|
||||
<p align="center">
|
||||
<img src="zeroclaw.png" alt="ZeroClaw" width="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">ZeroClaw 🦀</h1>
|
||||
|
||||
<p align="center">
|
||||
<strong>Zéro surcharge. Zéro compromis. 100% Rust. 100% Agnostique.</strong><br>
|
||||
⚡️ <strong>Fonctionne sur du matériel à 10$ avec <5 Mo de RAM : C'est 99% de mémoire en moins qu'OpenClaw et 98% moins cher qu'un Mac mini !</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="LICENSE-APACHE"><img src="https://img.shields.io/badge/license-MIT%20OR%20Apache%202.0-blue.svg" alt="Licence : MIT ou Apache-2.0" /></a>
|
||||
<a href="NOTICE"><img src="https://img.shields.io/badge/contributors-27+-green.svg" alt="Contributeurs" /></a>
|
||||
<a href="https://buymeacoffee.com/argenistherose"><img src="https://img.shields.io/badge/Buy%20Me%20a%20Coffee-Donate-yellow.svg?style=flat&logo=buy-me-a-coffee" alt="Offrez-moi un café" /></a>
|
||||
<a href="https://x.com/zeroclawlabs?s=21"><img src="https://img.shields.io/badge/X-%40zeroclawlabs-000000?style=flat&logo=x&logoColor=white" alt="X : @zeroclawlabs" /></a>
|
||||
<a href="https://zeroclawlabs.cn/group.jpg"><img src="https://img.shields.io/badge/WeChat-Group-B7D7A8?logo=wechat&logoColor=white" alt="WeChat Group" /></a>
|
||||
<a href="https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search"><img src="https://img.shields.io/badge/Xiaohongshu-Official-FF2442?style=flat" alt="Xiaohongshu : Officiel" /></a>
|
||||
<a href="https://t.me/zeroclawlabs"><img src="https://img.shields.io/badge/Telegram-%40zeroclawlabs-26A5E4?style=flat&logo=telegram&logoColor=white" alt="Telegram : @zeroclawlabs" /></a>
|
||||
<a href="https://www.facebook.com/groups/zeroclaw"><img src="https://img.shields.io/badge/Facebook-Group-1877F2?style=flat&logo=facebook&logoColor=white" alt="Facebook Group" /></a>
|
||||
<a href="https://www.reddit.com/r/zeroclawlabs/"><img src="https://img.shields.io/badge/Reddit-r%2Fzeroclawlabs-FF4500?style=flat&logo=reddit&logoColor=white" alt="Reddit : r/zeroclawlabs" /></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
Construit par des étudiants et membres des communautés Harvard, MIT et Sundai.Club.
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
🌐 <strong>Langues :</strong> <a href="README.md">English</a> · <a href="README.zh-CN.md">简体中文</a> · <a href="README.ja.md">日本語</a> · <a href="README.ru.md">Русский</a> · <a href="README.fr.md">Français</a> · <a href="README.vi.md">Tiếng Việt</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="#démarrage-rapide">Démarrage</a> |
|
||||
<a href="bootstrap.sh">Configuration en un clic</a> |
|
||||
<a href="docs/README.md">Hub Documentation</a> |
|
||||
<a href="docs/SUMMARY.md">Table des matières Documentation</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<strong>Accès rapides :</strong>
|
||||
<a href="docs/reference/README.md">Référence</a> ·
|
||||
<a href="docs/operations/README.md">Opérations</a> ·
|
||||
<a href="docs/troubleshooting.md">Dépannage</a> ·
|
||||
<a href="docs/security/README.md">Sécurité</a> ·
|
||||
<a href="docs/hardware/README.md">Matériel</a> ·
|
||||
<a href="docs/contributing/README.md">Contribuer</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<strong>Infrastructure d'assistant IA rapide, légère et entièrement autonome</strong><br />
|
||||
Déployez n'importe où. Échangez n'importe quoi.
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
ZeroClaw est le <strong>système d'exploitation runtime</strong> pour les workflows agentiques — une infrastructure qui abstrait les modèles, outils, mémoire et exécution pour construire des agents une fois et les exécuter partout.
|
||||
</p>
|
||||
|
||||
<p align="center"><code>Architecture pilotée par traits · runtime sécurisé par défaut · fournisseur/canal/outil interchangeables · tout est pluggable</code></p>
|
||||
|
||||
### 📢 Annonces
|
||||
|
||||
Utilisez ce tableau pour les avis importants (changements incompatibles, avis de sécurité, fenêtres de maintenance et bloqueurs de version).
|
||||
|
||||
| Date (UTC) | Niveau | Avis | Action |
|
||||
| ---------- | ----------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| 2026-02-19 | _Critique_ | Nous ne sommes **pas affiliés** à `openagen/zeroclaw` ou `zeroclaw.org`. Le domaine `zeroclaw.org` pointe actuellement vers le fork `openagen/zeroclaw`, et ce domaine/dépôt usurpe l'identité de notre site web/projet officiel. | Ne faites pas confiance aux informations, binaires, levées de fonds ou annonces provenant de ces sources. Utilisez uniquement [ce dépôt](https://github.com/zeroclaw-labs/zeroclaw) et nos comptes sociaux vérifiés. |
|
||||
| 2026-02-21 | _Important_ | Notre site officiel est désormais en ligne : [zeroclawlabs.ai](https://zeroclawlabs.ai). Merci pour votre patience pendant cette attente. Nous constatons toujours des tentatives d'usurpation : ne participez à aucune activité d'investissement/financement au nom de ZeroClaw si elle n'est pas publiée via nos canaux officiels. | Utilisez [ce dépôt](https://github.com/zeroclaw-labs/zeroclaw) comme source unique de vérité. Suivez [X (@zeroclawlabs)](https://x.com/zeroclawlabs?s=21), [Telegram (@zeroclawlabs)](https://t.me/zeroclawlabs), [Facebook (groupe)](https://www.facebook.com/groups/zeroclaw), [Reddit (r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/), et [Xiaohongshu](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) pour les mises à jour officielles. |
|
||||
| 2026-02-19 | _Important_ | Anthropic a mis à jour les conditions d'utilisation de l'authentification et des identifiants le 2026-02-19. L'authentification OAuth (Free, Pro, Max) est exclusivement destinée à Claude Code et Claude.ai ; l'utilisation de tokens OAuth de Claude Free/Pro/Max dans tout autre produit, outil ou service (y compris Agent SDK) n'est pas autorisée et peut violer les Conditions d'utilisation grand public. | Veuillez temporairement éviter les intégrations OAuth de Claude Code pour prévenir toute perte potentielle. Clause originale : [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use). |
|
||||
|
||||
### ✨ Fonctionnalités
|
||||
|
||||
- 🏎️ **Runtime Léger par Défaut :** Les workflows CLI courants et de statut s'exécutent dans une enveloppe mémoire de quelques mégaoctets sur les builds de production.
|
||||
- 💰 **Déploiement Économique :** Conçu pour les cartes à faible coût et les petites instances cloud sans dépendances runtime lourdes.
|
||||
- ⚡ **Démarrages à Froid Rapides :** Le runtime Rust mono-binaire maintient le démarrage des commandes et démons quasi instantané pour les opérations quotidiennes.
|
||||
- 🌍 **Architecture Portable :** Un workflow binaire unique sur ARM, x86 et RISC-V avec fournisseurs/canaux/outils interchangeables.
|
||||
|
||||
### Pourquoi les équipes choisissent ZeroClaw
|
||||
|
||||
- **Léger par défaut :** petit binaire Rust, démarrage rapide, empreinte mémoire faible.
|
||||
- **Sécurisé par conception :** appairage, sandboxing strict, listes d'autorisation explicites, portée de workspace.
|
||||
- **Entièrement interchangeable :** les systèmes centraux sont des traits (fournisseurs, canaux, outils, mémoire, tunnels).
|
||||
- **Aucun verrouillage :** support de fournisseur compatible OpenAI + endpoints personnalisés pluggables.
|
||||
|
||||
## Instantané de Benchmark (ZeroClaw vs OpenClaw, Reproductible)
|
||||
|
||||
Benchmark rapide sur machine locale (macOS arm64, fév. 2026) normalisé pour matériel edge 0.8 GHz.
|
||||
|
||||
| | OpenClaw | NanoBot | PicoClaw | ZeroClaw 🦀 |
|
||||
| ---------------------------- | ------------- | -------------- | --------------- | --------------------- |
|
||||
| **Langage** | TypeScript | Python | Go | **Rust** |
|
||||
| **RAM** | > 1 Go | > 100 Mo | < 10 Mo | **< 5 Mo** |
|
||||
| **Démarrage (cœur 0.8 GHz)** | > 500s | > 30s | < 1s | **< 10ms** |
|
||||
| **Taille Binaire** | ~28 Mo (dist) | N/A (Scripts) | ~8 Mo | **3.4 Mo** |
|
||||
| **Coût** | Mac Mini 599$ | Linux SBC ~50$ | Carte Linux 10$ | **Tout matériel 10$** |
|
||||
|
||||
> Notes : Les résultats ZeroClaw sont mesurés sur des builds de production utilisant `/usr/bin/time -l`. OpenClaw nécessite le runtime Node.js (typiquement ~390 Mo de surcharge mémoire supplémentaire), tandis que NanoBot nécessite le runtime Python. PicoClaw et ZeroClaw sont des binaires statiques. Les chiffres RAM ci-dessus sont la mémoire runtime ; les exigences de compilation build-time sont plus élevées.
|
||||
|
||||
<p align="center">
|
||||
<img src="zero-claw.jpeg" alt="Comparaison ZeroClaw vs OpenClaw" width="800" />
|
||||
</p>
|
||||
|
||||
### Mesure locale reproductible
|
||||
|
||||
Les affirmations de benchmark peuvent dériver au fil de l'évolution du code et des toolchains, donc mesurez toujours votre build actuel localement :
|
||||
|
||||
```bash
|
||||
cargo build --release
|
||||
ls -lh target/release/zeroclaw
|
||||
|
||||
/usr/bin/time -l target/release/zeroclaw --help
|
||||
/usr/bin/time -l target/release/zeroclaw status
|
||||
```
|
||||
|
||||
Exemple d'échantillon (macOS arm64, mesuré le 18 février 2026) :
|
||||
|
||||
- Taille binaire release : `8.8M`
|
||||
- `zeroclaw --help` : environ `0.02s` de temps réel, ~`3.9 Mo` d'empreinte mémoire maximale
|
||||
- `zeroclaw status` : environ `0.01s` de temps réel, ~`4.1 Mo` d'empreinte mémoire maximale
|
||||
|
||||
## Prérequis
|
||||
|
||||
<details>
|
||||
<summary><strong>Windows</strong></summary>
|
||||
|
||||
### Windows — Requis
|
||||
|
||||
1. **Visual Studio Build Tools** (fournit le linker MSVC et le Windows SDK) :
|
||||
|
||||
```powershell
|
||||
winget install Microsoft.VisualStudio.2022.BuildTools
|
||||
```
|
||||
|
||||
Pendant l'installation (ou via le Visual Studio Installer), sélectionnez la charge de travail **"Développement Desktop en C++"**.
|
||||
|
||||
2. **Toolchain Rust :**
|
||||
|
||||
```powershell
|
||||
winget install Rustlang.Rustup
|
||||
```
|
||||
|
||||
Après l'installation, ouvrez un nouveau terminal et exécutez `rustup default stable` pour vous assurer que la toolchain stable est active.
|
||||
|
||||
3. **Vérifiez** que les deux fonctionnent :
|
||||
```powershell
|
||||
rustc --version
|
||||
cargo --version
|
||||
```
|
||||
|
||||
### Windows — Optionnel
|
||||
|
||||
- **Docker Desktop** — requis seulement si vous utilisez le [runtime sandboxé Docker](#support-runtime-actuel) (`runtime.kind = "docker"`). Installez via `winget install Docker.DockerDesktop`.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Linux / macOS</strong></summary>
|
||||
|
||||
### Linux / macOS — Requis
|
||||
|
||||
1. **Outils de build essentiels :**
|
||||
- **Linux (Debian/Ubuntu) :** `sudo apt install build-essential pkg-config`
|
||||
- **Linux (Fedora/RHEL) :** `sudo dnf group install development-tools && sudo dnf install pkg-config`
|
||||
- **macOS :** Installez les Outils de Ligne de Commande Xcode : `xcode-select --install`
|
||||
|
||||
2. **Toolchain Rust :**
|
||||
|
||||
```bash
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
```
|
||||
|
||||
Voir [rustup.rs](https://rustup.rs) pour les détails.
|
||||
|
||||
3. **Vérifiez :**
|
||||
```bash
|
||||
rustc --version
|
||||
cargo --version
|
||||
```
|
||||
|
||||
### Linux / macOS — Optionnel
|
||||
|
||||
- **Docker** — requis seulement si vous utilisez le [runtime sandboxé Docker](#support-runtime-actuel) (`runtime.kind = "docker"`).
|
||||
- **Linux (Debian/Ubuntu) :** voir [docs.docker.com](https://docs.docker.com/engine/install/ubuntu/)
|
||||
- **Linux (Fedora/RHEL) :** voir [docs.docker.com](https://docs.docker.com/engine/install/fedora/)
|
||||
- **macOS :** installez Docker Desktop via [docker.com/products/docker-desktop](https://www.docker.com/products/docker-desktop/)
|
||||
|
||||
</details>
|
||||
|
||||
## Démarrage Rapide
|
||||
|
||||
### Option 1 : Configuration automatisée (recommandée)
|
||||
|
||||
Le script `bootstrap.sh` installe Rust, clone ZeroClaw, le compile, et configure votre environnement de développement initial :
|
||||
|
||||
```bash
|
||||
curl -fsSL https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/bootstrap.sh | bash
|
||||
```
|
||||
|
||||
Ceci va :
|
||||
|
||||
1. Installer Rust (si absent)
|
||||
2. Cloner le dépôt ZeroClaw
|
||||
3. Compiler ZeroClaw en mode release
|
||||
4. Installer `zeroclaw` dans `~/.cargo/bin/`
|
||||
5. Créer la structure de workspace par défaut dans `~/.zeroclaw/workspace/`
|
||||
6. Générer un fichier de configuration `~/.zeroclaw/workspace/config.toml` de démarrage
|
||||
|
||||
Après le bootstrap, relancez votre shell ou exécutez `source ~/.cargo/env` pour utiliser la commande `zeroclaw` globalement.
|
||||
|
||||
### Option 2 : Installation manuelle
|
||||
|
||||
<details>
|
||||
<summary><strong>Cliquez pour voir les étapes d'installation manuelle</strong></summary>
|
||||
|
||||
```bash
|
||||
# 1. Clonez le dépôt
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
|
||||
# 2. Compilez en release
|
||||
cargo build --release --locked
|
||||
|
||||
# 3. Installez le binaire
|
||||
cargo install --path . --locked
|
||||
|
||||
# 4. Initialisez le workspace
|
||||
zeroclaw init
|
||||
|
||||
# 5. Vérifiez l'installation
|
||||
zeroclaw --version
|
||||
zeroclaw status
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Après l'installation
|
||||
|
||||
Une fois installé (via bootstrap ou manuellement), vous devriez voir :
|
||||
|
||||
```
|
||||
~/.zeroclaw/workspace/
|
||||
├── config.toml # Configuration principale
|
||||
├── .pairing # Secrets de pairing (généré au premier lancement)
|
||||
├── logs/ # Journaux de daemon/agent
|
||||
├── skills/ # Compétences personnalisées
|
||||
└── memory/ # Stockage de contexte conversationnel
|
||||
```
|
||||
|
||||
**Prochaines étapes :**
|
||||
|
||||
1. Configurez vos fournisseurs d'IA dans `~/.zeroclaw/workspace/config.toml`
|
||||
2. Consultez la [référence de configuration](docs/config-reference.md) pour les options avancées
|
||||
3. Lancez l'agent : `zeroclaw agent start`
|
||||
4. Testez via votre canal préféré (voir [référence des canaux](docs/channels-reference.md))
|
||||
|
||||
## Configuration
|
||||
|
||||
Éditez `~/.zeroclaw/workspace/config.toml` pour configurer les fournisseurs, canaux et comportement du système.
|
||||
|
||||
### Référence de Configuration Rapide
|
||||
|
||||
```toml
|
||||
[providers.anthropic]
|
||||
api_key = "sk-ant-..."
|
||||
model = "claude-sonnet-4-20250514"
|
||||
|
||||
[providers.openai]
|
||||
api_key = "sk-..."
|
||||
model = "gpt-4o"
|
||||
|
||||
[channels.telegram]
|
||||
enabled = true
|
||||
bot_token = "123456:ABC-DEF..."
|
||||
|
||||
[channels.matrix]
|
||||
enabled = true
|
||||
homeserver_url = "https://matrix.org"
|
||||
username = "@bot:matrix.org"
|
||||
password = "..."
|
||||
|
||||
[memory]
|
||||
kind = "markdown" # ou "sqlite" ou "none"
|
||||
|
||||
[runtime]
|
||||
kind = "native" # ou "docker" (nécessite Docker)
|
||||
```
|
||||
|
||||
**Documents de référence complets :**
|
||||
|
||||
- [Référence de Configuration](docs/config-reference.md) — tous les paramètres, validations, valeurs par défaut
|
||||
- [Référence des Fournisseurs](docs/providers-reference.md) — configurations spécifiques aux fournisseurs d'IA
|
||||
- [Référence des Canaux](docs/channels-reference.md) — Telegram, Matrix, Slack, Discord et plus
|
||||
- [Opérations](docs/operations-runbook.md) — surveillance en production, rotation des secrets, mise à l'échelle
|
||||
|
||||
### Support Runtime (actuel)
|
||||
|
||||
ZeroClaw prend en charge deux backends d'exécution de code :
|
||||
|
||||
- **`native`** (par défaut) — exécution de processus directe, chemin le plus rapide, idéal pour les environnements de confiance
|
||||
- **`docker`** — isolation complète du conteneur, politiques de sécurité renforcées, nécessite Docker
|
||||
|
||||
Utilisez `runtime.kind = "docker"` si vous avez besoin d'un sandboxing strict ou de l'isolation réseau. Voir [référence de configuration](docs/config-reference.md#runtime) pour les détails complets.
|
||||
|
||||
## Commandes
|
||||
|
||||
```bash
|
||||
# Gestion du workspace
|
||||
zeroclaw init # Initialise un nouveau workspace
|
||||
zeroclaw status # Affiche l'état du daemon/agent
|
||||
zeroclaw config validate # Vérifie la syntaxe et les valeurs de config.toml
|
||||
|
||||
# Gestion du daemon
|
||||
zeroclaw daemon start # Démarre le daemon en arrière-plan
|
||||
zeroclaw daemon stop # Arrête le daemon en cours d'exécution
|
||||
zeroclaw daemon restart # Redémarre le daemon (rechargement de config)
|
||||
zeroclaw daemon logs # Affiche les journaux du daemon
|
||||
|
||||
# Gestion de l'agent
|
||||
zeroclaw agent start # Démarre l'agent (nécessite daemon en cours d'exécution)
|
||||
zeroclaw agent stop # Arrête l'agent
|
||||
zeroclaw agent restart # Redémarre l'agent (rechargement de config)
|
||||
|
||||
# Opérations de pairing
|
||||
zeroclaw pairing init # Génère un nouveau secret de pairing
|
||||
zeroclaw pairing rotate # Fait tourner le secret de pairing existant
|
||||
|
||||
# Tunneling (pour exposition publique)
|
||||
zeroclaw tunnel start # Démarre un tunnel vers le daemon local
|
||||
zeroclaw tunnel stop # Arrête le tunnel actif
|
||||
|
||||
# Diagnostic
|
||||
zeroclaw doctor # Exécute les vérifications de santé du système
|
||||
zeroclaw version # Affiche la version et les informations de build
|
||||
```
|
||||
|
||||
Voir [Référence des Commandes](docs/commands-reference.md) pour les options et exemples complets.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Canaux (trait) │
|
||||
│ Telegram │ Matrix │ Slack │ Discord │ Web │ CLI │ Custom │
|
||||
└─────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Orchestrateur Agent │
|
||||
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
|
||||
│ │ Routage │ │ Contexte │ │ Exécution │ │
|
||||
│ │ Message │ │ Mémoire │ │ Outil │ │
|
||||
│ └──────────────┘ └──────────────┘ └──────────────┘ │
|
||||
└─────────────────────────┬───────────────────────────────────────┘
|
||||
│
|
||||
┌───────────────┼───────────────┐
|
||||
▼ ▼ ▼
|
||||
┌──────────────┐ ┌──────────────┐ ┌──────────────┐
|
||||
│ Fournisseurs │ │ Mémoire │ │ Outils │
|
||||
│ (trait) │ │ (trait) │ │ (trait) │
|
||||
├──────────────┤ ├──────────────┤ ├──────────────┤
|
||||
│ Anthropic │ │ Markdown │ │ Filesystem │
|
||||
│ OpenAI │ │ SQLite │ │ Bash │
|
||||
│ Gemini │ │ None │ │ Web Fetch │
|
||||
│ Ollama │ │ Custom │ │ Custom │
|
||||
│ Custom │ └──────────────┘ └──────────────┘
|
||||
└──────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Runtime (trait) │
|
||||
│ Native │ Docker │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
**Principes clés :**
|
||||
|
||||
- Tout est un **trait** — fournisseurs, canaux, outils, mémoire, tunnels
|
||||
- Les canaux appellent l'orchestrateur ; l'orchestrateur appelle les fournisseurs + outils
|
||||
- Le système mémoire gère le contexte conversationnel (markdown, SQLite, ou aucun)
|
||||
- Le runtime abstrait l'exécution de code (natif ou Docker)
|
||||
- Aucun verrouillage de fournisseur — échangez Anthropic ↔ OpenAI ↔ Gemini ↔ Ollama sans changement de code
|
||||
|
||||
Voir [documentation architecture](docs/architecture.svg) pour les diagrammes détaillés et les détails d'implémentation.
|
||||
|
||||
## Exemples
|
||||
|
||||
### Telegram Bot
|
||||
|
||||
```toml
|
||||
[channels.telegram]
|
||||
enabled = true
|
||||
bot_token = "123456:ABC-DEF..."
|
||||
allowed_users = [987654321] # Votre Telegram user ID
|
||||
```
|
||||
|
||||
Démarrez le daemon + agent, puis envoyez un message à votre bot sur Telegram :
|
||||
|
||||
```
|
||||
/start
|
||||
Bonjour ! Pouvez-vous m'aider à écrire un script Python ?
|
||||
```
|
||||
|
||||
Le bot répond avec le code généré par l'IA, exécute les outils si demandé, et conserve le contexte de conversation.
|
||||
|
||||
### Matrix (chiffré de bout en bout)
|
||||
|
||||
```toml
|
||||
[channels.matrix]
|
||||
enabled = true
|
||||
homeserver_url = "https://matrix.org"
|
||||
username = "@zeroclaw:matrix.org"
|
||||
password = "..."
|
||||
device_name = "zeroclaw-prod"
|
||||
e2ee_enabled = true
|
||||
```
|
||||
|
||||
Invitez `@zeroclaw:matrix.org` dans une salle chiffrée, et le bot répondra avec le chiffrement complet. Voir [Guide Matrix E2EE](docs/matrix-e2ee-guide.md) pour la configuration de vérification de dispositif.
|
||||
|
||||
### Multi-Fournisseur
|
||||
|
||||
```toml
|
||||
[providers.anthropic]
|
||||
enabled = true
|
||||
api_key = "sk-ant-..."
|
||||
model = "claude-sonnet-4-20250514"
|
||||
|
||||
[providers.openai]
|
||||
enabled = true
|
||||
api_key = "sk-..."
|
||||
model = "gpt-4o"
|
||||
|
||||
[orchestrator]
|
||||
default_provider = "anthropic"
|
||||
fallback_providers = ["openai"] # Bascule en cas d'erreur du fournisseur
|
||||
```
|
||||
|
||||
Si Anthropic échoue ou rate-limit, l'orchestrateur bascule automatiquement vers OpenAI.
|
||||
|
||||
### Mémoire Personnalisée
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
kind = "sqlite"
|
||||
path = "~/.zeroclaw/workspace/memory/conversations.db"
|
||||
retention_days = 90 # Purge automatique après 90 jours
|
||||
```
|
||||
|
||||
Ou utilisez Markdown pour un stockage lisible par l'humain :
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
kind = "markdown"
|
||||
path = "~/.zeroclaw/workspace/memory/"
|
||||
```
|
||||
|
||||
Voir [Référence de Configuration](docs/config-reference.md#memory) pour toutes les options mémoire.
|
||||
|
||||
## Support de Fournisseur
|
||||
|
||||
| Fournisseur | Statut | Clé API | Modèles Exemple |
|
||||
| ----------------- | ----------- | ------------------- | ---------------------------------------------------- |
|
||||
| **Anthropic** | ✅ Stable | `ANTHROPIC_API_KEY` | `claude-sonnet-4-20250514`, `claude-opus-4-20250514` |
|
||||
| **OpenAI** | ✅ Stable | `OPENAI_API_KEY` | `gpt-4o`, `gpt-4o-mini`, `o1`, `o1-mini` |
|
||||
| **Google Gemini** | ✅ Stable | `GOOGLE_API_KEY` | `gemini-2.0-flash-exp`, `gemini-exp-1206` |
|
||||
| **Ollama** | ✅ Stable | N/A (local) | `llama3.3`, `qwen2.5`, `phi4` |
|
||||
| **Cerebras** | ✅ Stable | `CEREBRAS_API_KEY` | `llama-3.3-70b` |
|
||||
| **Groq** | ✅ Stable | `GROQ_API_KEY` | `llama-3.3-70b-versatile` |
|
||||
| **Mistral** | 🚧 Planifié | `MISTRAL_API_KEY` | TBD |
|
||||
| **Cohere** | 🚧 Planifié | `COHERE_API_KEY` | TBD |
|
||||
|
||||
### Endpoints Personnalisés
|
||||
|
||||
ZeroClaw prend en charge les endpoints compatibles OpenAI :
|
||||
|
||||
```toml
|
||||
[providers.custom]
|
||||
enabled = true
|
||||
api_key = "..."
|
||||
base_url = "https://api.your-llm-provider.com/v1"
|
||||
model = "your-model-name"
|
||||
```
|
||||
|
||||
Exemple : utilisez [LiteLLM](https://github.com/BerriAI/litellm) comme proxy pour accéder à n'importe quel LLM via l'interface OpenAI.
|
||||
|
||||
Voir [Référence des Fournisseurs](docs/providers-reference.md) pour les détails de configuration complets.
|
||||
|
||||
## Support de Canal
|
||||
|
||||
| Canal | Statut | Authentification | Notes |
|
||||
| ------------ | ----------- | ------------------------ | --------------------------------------------------------- |
|
||||
| **Telegram** | ✅ Stable | Bot Token | Support complet incluant fichiers, images, boutons inline |
|
||||
| **Matrix** | ✅ Stable | Mot de passe ou Token | Support E2EE avec vérification de dispositif |
|
||||
| **Slack** | 🚧 Planifié | OAuth ou Bot Token | Accès workspace requis |
|
||||
| **Discord** | 🚧 Planifié | Bot Token | Permissions guild requises |
|
||||
| **WhatsApp** | 🚧 Planifié | Twilio ou API officielle | Compte business requis |
|
||||
| **CLI** | ✅ Stable | Aucun | Interface conversationnelle directe |
|
||||
| **Web** | 🚧 Planifié | Clé API ou OAuth | Interface de chat basée navigateur |
|
||||
|
||||
Voir [Référence des Canaux](docs/channels-reference.md) pour les instructions de configuration complètes.
|
||||
|
||||
## Support d'Outil
|
||||
|
||||
ZeroClaw fournit des outils intégrés pour l'exécution de code, l'accès au système de fichiers et la récupération web :
|
||||
|
||||
| Outil | Description | Runtime Requis |
|
||||
| -------------------- | --------------------------- | ----------------------------- |
|
||||
| **bash** | Exécute des commandes shell | Native ou Docker |
|
||||
| **python** | Exécute des scripts Python | Python 3.8+ (natif) ou Docker |
|
||||
| **javascript** | Exécute du code Node.js | Node.js 18+ (natif) ou Docker |
|
||||
| **filesystem_read** | Lit des fichiers | Native ou Docker |
|
||||
| **filesystem_write** | Écrit des fichiers | Native ou Docker |
|
||||
| **web_fetch** | Récupère du contenu web | Native ou Docker |
|
||||
|
||||
### Sécurité de l'Exécution
|
||||
|
||||
- **Runtime Natif** — s'exécute en tant que processus utilisateur du daemon, accès complet au système de fichiers
|
||||
- **Runtime Docker** — isolation complète du conteneur, systèmes de fichiers et réseaux séparés
|
||||
|
||||
Configurez la politique d'exécution dans `config.toml` :
|
||||
|
||||
```toml
|
||||
[runtime]
|
||||
kind = "docker"
|
||||
allowed_tools = ["bash", "python", "filesystem_read"] # Liste d'autorisation explicite
|
||||
```
|
||||
|
||||
Voir [Référence de Configuration](docs/config-reference.md#runtime) pour les options de sécurité complètes.
|
||||
|
||||
## Déploiement
|
||||
|
||||
### Déploiement Local (Développement)
|
||||
|
||||
```bash
|
||||
zeroclaw daemon start
|
||||
zeroclaw agent start
|
||||
```
|
||||
|
||||
### Déploiement Serveur (Production)
|
||||
|
||||
Utilisez systemd pour gérer le daemon et l'agent en tant que services :
|
||||
|
||||
```bash
|
||||
# Installez le binaire
|
||||
cargo install --path . --locked
|
||||
|
||||
# Configurez le workspace
|
||||
zeroclaw init
|
||||
|
||||
# Créez les fichiers de service systemd
|
||||
sudo cp deployment/systemd/zeroclaw-daemon.service /etc/systemd/system/
|
||||
sudo cp deployment/systemd/zeroclaw-agent.service /etc/systemd/system/
|
||||
|
||||
# Activez et démarrez les services
|
||||
sudo systemctl enable zeroclaw-daemon zeroclaw-agent
|
||||
sudo systemctl start zeroclaw-daemon zeroclaw-agent
|
||||
|
||||
# Vérifiez le statut
|
||||
sudo systemctl status zeroclaw-daemon
|
||||
sudo systemctl status zeroclaw-agent
|
||||
```
|
||||
|
||||
Voir [Guide de Déploiement Réseau](docs/network-deployment.md) pour les instructions de déploiement en production complètes.
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
# Compilez l'image
|
||||
docker build -t zeroclaw:latest .
|
||||
|
||||
# Exécutez le conteneur
|
||||
docker run -d \
|
||||
--name zeroclaw \
|
||||
-v ~/.zeroclaw/workspace:/workspace \
|
||||
-e ANTHROPIC_API_KEY=sk-ant-... \
|
||||
zeroclaw:latest
|
||||
```
|
||||
|
||||
Voir [`Dockerfile`](Dockerfile) pour les détails de construction et les options de configuration.
|
||||
|
||||
### Matériel Edge
|
||||
|
||||
ZeroClaw est conçu pour fonctionner sur du matériel à faible consommation d'énergie :
|
||||
|
||||
- **Raspberry Pi Zero 2 W** — ~512 Mo RAM, cœur ARMv8 simple, <5$ coût matériel
|
||||
- **Raspberry Pi 4/5** — 1 Go+ RAM, multi-cœur, idéal pour les charges de travail concurrentes
|
||||
- **Orange Pi Zero 2** — ~512 Mo RAM, quad-core ARMv8, coût ultra-faible
|
||||
- **SBCs x86 (Intel N100)** — 4-8 Go RAM, builds rapides, support Docker natif
|
||||
|
||||
Voir [Guide du Matériel](docs/hardware/README.md) pour les instructions de configuration spécifiques aux dispositifs.
|
||||
|
||||
## Tunneling (Exposition Publique)
|
||||
|
||||
Exposez votre daemon ZeroClaw local au réseau public via des tunnels sécurisés :
|
||||
|
||||
```bash
|
||||
zeroclaw tunnel start --provider cloudflare
|
||||
```
|
||||
|
||||
Fournisseurs de tunnel supportés :
|
||||
|
||||
- **Cloudflare Tunnel** — HTTPS gratuit, aucune exposition de port, support multi-domaine
|
||||
- **Ngrok** — configuration rapide, domaines personnalisés (plan payant)
|
||||
- **Tailscale** — réseau maillé privé, pas de port public
|
||||
|
||||
Voir [Référence de Configuration](docs/config-reference.md#tunnel) pour les options de configuration complètes.
|
||||
|
||||
## Sécurité
|
||||
|
||||
ZeroClaw implémente plusieurs couches de sécurité :
|
||||
|
||||
### Pairing
|
||||
|
||||
Le daemon génère un secret de pairing au premier lancement stocké dans `~/.zeroclaw/workspace/.pairing`. Les clients (agent, CLI) doivent présenter ce secret pour se connecter.
|
||||
|
||||
```bash
|
||||
zeroclaw pairing rotate # Génère un nouveau secret et invalide l'ancien
|
||||
```
|
||||
|
||||
### Sandboxing
|
||||
|
||||
- **Runtime Docker** — isolation complète du conteneur avec systèmes de fichiers et réseaux séparés
|
||||
- **Runtime Natif** — exécute en tant que processus utilisateur, scoped au workspace par défaut
|
||||
|
||||
### Listes d'Autorisation
|
||||
|
||||
Les canaux peuvent restreindre l'accès par ID utilisateur :
|
||||
|
||||
```toml
|
||||
[channels.telegram]
|
||||
enabled = true
|
||||
allowed_users = [123456789, 987654321] # Liste d'autorisation explicite
|
||||
```
|
||||
|
||||
### Chiffrement
|
||||
|
||||
- **Matrix E2EE** — chiffrement de bout en bout complet avec vérification de dispositif
|
||||
- **Transport TLS** — tout le trafic API et tunnel utilise HTTPS/TLS
|
||||
|
||||
Voir [Documentation Sécurité](docs/security/README.md) pour les politiques et pratiques complètes.
|
||||
|
||||
## Observabilité
|
||||
|
||||
ZeroClaw journalise vers `~/.zeroclaw/workspace/logs/` par défaut. Les journaux sont stockés par composant :
|
||||
|
||||
```
|
||||
~/.zeroclaw/workspace/logs/
|
||||
├── daemon.log # Journaux du daemon (startup, requêtes API, erreurs)
|
||||
├── agent.log # Journaux de l'agent (routage message, exécution outil)
|
||||
├── telegram.log # Journaux spécifiques au canal (si activé)
|
||||
└── matrix.log # Journaux spécifiques au canal (si activé)
|
||||
```
|
||||
|
||||
### Configuration de Journalisation
|
||||
|
||||
```toml
|
||||
[logging]
|
||||
level = "info" # debug, info, warn, error
|
||||
path = "~/.zeroclaw/workspace/logs/"
|
||||
rotation = "daily" # daily, hourly, size
|
||||
max_size_mb = 100 # Pour rotation basée sur la taille
|
||||
retention_days = 30 # Purge automatique après N jours
|
||||
```
|
||||
|
||||
Voir [Référence de Configuration](docs/config-reference.md#logging) pour toutes les options de journalisation.
|
||||
|
||||
### Métriques (Planifié)
|
||||
|
||||
Support de métriques Prometheus pour la surveillance en production à venir. Suivi dans [#234](https://github.com/zeroclaw-labs/zeroclaw/issues/234).
|
||||
|
||||
## Compétences (Skills)
|
||||
|
||||
ZeroClaw prend en charge les compétences personnalisées — des modules réutilisables qui étendent les capacités du système.
|
||||
|
||||
### Définition de Compétence
|
||||
|
||||
Les compétences sont stockées dans `~/.zeroclaw/workspace/skills/<nom-compétence>/` avec cette structure :
|
||||
|
||||
```
|
||||
skills/
|
||||
└── ma-compétence/
|
||||
├── skill.toml # Métadonnées de compétence (nom, description, dépendances)
|
||||
├── prompt.md # Prompt système pour l'IA
|
||||
└── tools/ # Outils personnalisés optionnels
|
||||
└── mon_outil.py
|
||||
```
|
||||
|
||||
### Exemple de Compétence
|
||||
|
||||
```toml
|
||||
# skills/recherche-web/skill.toml
|
||||
[skill]
|
||||
name = "recherche-web"
|
||||
description = "Recherche sur le web et résume les résultats"
|
||||
version = "1.0.0"
|
||||
|
||||
[dependencies]
|
||||
tools = ["web_fetch", "bash"]
|
||||
```
|
||||
|
||||
```markdown
|
||||
<!-- skills/recherche-web/prompt.md -->
|
||||
|
||||
Tu es un assistant de recherche. Lorsqu'on te demande de rechercher quelque chose :
|
||||
|
||||
1. Utilise web_fetch pour récupérer le contenu
|
||||
2. Résume les résultats dans un format facile à lire
|
||||
3. Cite les sources avec des URLs
|
||||
```
|
||||
|
||||
### Utilisation de Compétences
|
||||
|
||||
Les compétences sont chargées automatiquement au démarrage de l'agent. Référencez-les par nom dans les conversations :
|
||||
|
||||
```
|
||||
Utilisateur : Utilise la compétence recherche-web pour trouver les dernières actualités IA
|
||||
Bot : [charge la compétence recherche-web, exécute web_fetch, résume les résultats]
|
||||
```
|
||||
|
||||
Voir la section [Compétences (Skills)](#compétences-skills) pour les instructions de création de compétences complètes.
|
||||
|
||||
## Open Skills
|
||||
|
||||
ZeroClaw prend en charge les [Open Skills](https://github.com/openagents-com/open-skills) — un système modulaire et agnostique des fournisseurs pour étendre les capacités des agents IA.
|
||||
|
||||
### Activer Open Skills
|
||||
|
||||
```toml
|
||||
[skills]
|
||||
open_skills_enabled = true
|
||||
# open_skills_dir = "/path/to/open-skills" # optionnel
|
||||
```
|
||||
|
||||
Vous pouvez également surcharger au runtime avec `ZEROCLAW_OPEN_SKILLS_ENABLED` et `ZEROCLAW_OPEN_SKILLS_DIR`.
|
||||
|
||||
## Développement
|
||||
|
||||
```bash
|
||||
cargo build # Build de développement
|
||||
cargo build --release # Build release (codegen-units=1, fonctionne sur tous les dispositifs incluant Raspberry Pi)
|
||||
cargo build --profile release-fast # Build plus rapide (codegen-units=8, nécessite 16 Go+ RAM)
|
||||
cargo test # Exécute la suite de tests complète
|
||||
cargo clippy --locked --all-targets -- -D clippy::correctness
|
||||
cargo fmt # Format
|
||||
|
||||
# Exécute le benchmark de comparaison SQLite vs Markdown
|
||||
cargo test --test memory_comparison -- --nocapture
|
||||
```
|
||||
|
||||
### Hook pre-push
|
||||
|
||||
Un hook git exécute `cargo fmt --check`, `cargo clippy -- -D warnings`, et `cargo test` avant chaque push. Activez-le une fois :
|
||||
|
||||
```bash
|
||||
git config core.hooksPath .githooks
|
||||
```
|
||||
|
||||
### Dépannage de Build (erreurs OpenSSL sur Linux)
|
||||
|
||||
Si vous rencontrez une erreur de build `openssl-sys`, synchronisez les dépendances et recompilez avec le lockfile du dépôt :
|
||||
|
||||
```bash
|
||||
git pull
|
||||
cargo build --release --locked
|
||||
cargo install --path . --force --locked
|
||||
```
|
||||
|
||||
ZeroClaw est configuré pour utiliser `rustls` pour les dépendances HTTP/TLS ; `--locked` maintient le graphe transitif déterministe sur les environnements vierges.
|
||||
|
||||
Pour sauter le hook lorsque vous avez besoin d'un push rapide pendant le développement :
|
||||
|
||||
```bash
|
||||
git push --no-verify
|
||||
```
|
||||
|
||||
## Collaboration & Docs
|
||||
|
||||
Commencez par le hub de documentation pour une carte basée sur les tâches :
|
||||
|
||||
- Hub de documentation : [`docs/README.md`](docs/README.md)
|
||||
- Table des matières unifiée docs : [`docs/SUMMARY.md`](docs/SUMMARY.md)
|
||||
- Référence des commandes : [`docs/commands-reference.md`](docs/commands-reference.md)
|
||||
- Référence de configuration : [`docs/config-reference.md`](docs/config-reference.md)
|
||||
- Référence des fournisseurs : [`docs/providers-reference.md`](docs/providers-reference.md)
|
||||
- Référence des canaux : [`docs/channels-reference.md`](docs/channels-reference.md)
|
||||
- Runbook des opérations : [`docs/operations-runbook.md`](docs/operations-runbook.md)
|
||||
- Dépannage : [`docs/troubleshooting.md`](docs/troubleshooting.md)
|
||||
- Inventaire/classification docs : [`docs/docs-inventory.md`](docs/docs-inventory.md)
|
||||
- Instantané triage PR/Issue (au 18 février 2026) : [`docs/project-triage-snapshot-2026-02-18.md`](docs/project-triage-snapshot-2026-02-18.md)
|
||||
|
||||
Références de collaboration principales :
|
||||
|
||||
- Hub de documentation : [docs/README.md](docs/README.md)
|
||||
- Modèle de documentation : [docs/doc-template.md](docs/doc-template.md)
|
||||
- Checklist de modification de documentation : [docs/README.md#4-documentation-change-checklist](docs/README.md#4-documentation-change-checklist)
|
||||
- Référence de configuration des canaux : [docs/channels-reference.md](docs/channels-reference.md)
|
||||
- Opérations de salles chiffrées Matrix : [docs/matrix-e2ee-guide.md](docs/matrix-e2ee-guide.md)
|
||||
- Guide de contribution : [CONTRIBUTING.md](CONTRIBUTING.md)
|
||||
- Politique de workflow PR : [docs/pr-workflow.md](docs/pr-workflow.md)
|
||||
- Playbook du relecteur (triage + revue approfondie) : [docs/reviewer-playbook.md](docs/reviewer-playbook.md)
|
||||
- Carte de propriété et triage CI : [docs/ci-map.md](docs/ci-map.md)
|
||||
- Politique de divulgation de sécurité : [SECURITY.md](SECURITY.md)
|
||||
|
||||
Pour le déploiement et les opérations runtime :
|
||||
|
||||
- Guide de déploiement réseau : [docs/network-deployment.md](docs/network-deployment.md)
|
||||
- Playbook d'agent proxy : [docs/proxy-agent-playbook.md](docs/proxy-agent-playbook.md)
|
||||
|
||||
## Soutenir ZeroClaw
|
||||
|
||||
Si ZeroClaw aide votre travail et que vous souhaitez soutenir le développement continu, vous pouvez faire un don ici :
|
||||
|
||||
<a href="https://buymeacoffee.com/argenistherose"><img src="https://img.shields.io/badge/Buy%20Me%20a%20Coffee-Donate-yellow.svg?style=for-the-badge&logo=buy-me-a-coffee" alt="Offrez-moi un café" /></a>
|
||||
|
||||
### 🙏 Remerciements Spéciaux
|
||||
|
||||
Un remerciement sincère aux communautés et institutions qui inspirent et alimentent ce travail open-source :
|
||||
|
||||
- **Harvard University** — pour favoriser la curiosité intellectuelle et repousser les limites du possible.
|
||||
- **MIT** — pour défendre la connaissance ouverte, l'open source, et la conviction que la technologie devrait être accessible à tous.
|
||||
- **Sundai Club** — pour la communauté, l'énergie, et la volonté incessante de construire des choses qui comptent.
|
||||
- **Le Monde & Au-Delà** 🌍✨ — à chaque contributeur, rêveur, et constructeur là-bas qui fait de l'open source une force pour le bien. C'est pour vous.
|
||||
|
||||
Nous construisons en open source parce que les meilleures idées viennent de partout. Si vous lisez ceci, vous en faites partie. Bienvenue. 🦀❤️
|
||||
|
||||
## ⚠️ Dépôt Officiel & Avertissement d'Usurpation d'Identité
|
||||
|
||||
**Ceci est le seul dépôt officiel ZeroClaw :**
|
||||
|
||||
> <https://github.com/zeroclaw-labs/zeroclaw>
|
||||
|
||||
Tout autre dépôt, organisation, domaine ou package prétendant être "ZeroClaw" ou impliquant une affiliation avec ZeroClaw Labs est **non autorisé et non affilié à ce projet**. Les forks non autorisés connus seront listés dans [TRADEMARK.md](TRADEMARK.md).
|
||||
|
||||
Si vous rencontrez une usurpation d'identité ou une utilisation abusive de marque, veuillez [ouvrir une issue](https://github.com/zeroclaw-labs/zeroclaw/issues).
|
||||
|
||||
---
|
||||
|
||||
## Licence
|
||||
|
||||
ZeroClaw est sous double licence pour une ouverture maximale et la protection des contributeurs :
|
||||
|
||||
| Licence | Cas d'utilisation |
|
||||
| ---------------------------- | ------------------------------------------------------------ |
|
||||
| [MIT](LICENSE-MIT) | Open-source, recherche, académique, usage personnel |
|
||||
| [Apache 2.0](LICENSE-APACHE) | Protection de brevet, institutionnel, déploiement commercial |
|
||||
|
||||
Vous pouvez choisir l'une ou l'autre licence. **Les contributeurs accordent automatiquement des droits sous les deux** — voir [CLA.md](CLA.md) pour l'accord de contributeur complet.
|
||||
|
||||
### Marque
|
||||
|
||||
Le nom **ZeroClaw** et le logo sont des marques déposées de ZeroClaw Labs. Cette licence n'accorde pas la permission de les utiliser pour impliquer une approbation ou une affiliation. Voir [TRADEMARK.md](TRADEMARK.md) pour les utilisations permises et interdites.
|
||||
|
||||
### Protections des Contributeurs
|
||||
|
||||
- Vous **conservez les droits d'auteur** de vos contributions
|
||||
- **Concession de brevet** (Apache 2.0) vous protège contre les réclamations de brevet par d'autres contributeurs
|
||||
- Vos contributions sont **attribuées de manière permanente** dans l'historique des commits et [NOTICE](NOTICE)
|
||||
- Aucun droit de marque n'est transféré en contribuant
|
||||
|
||||
## Contribuer
|
||||
|
||||
Voir [CONTRIBUTING.md](CONTRIBUTING.md) et [CLA.md](CLA.md). Implémentez un trait, soumettez une PR :
|
||||
|
||||
- Guide de workflow CI : [docs/ci-map.md](docs/ci-map.md)
|
||||
- Nouveau `Provider` → `src/providers/`
|
||||
- Nouveau `Channel` → `src/channels/`
|
||||
- Nouveau `Observer` → `src/observability/`
|
||||
- Nouveau `Tool` → `src/tools/`
|
||||
- Nouvelle `Memory` → `src/memory/`
|
||||
- Nouveau `Tunnel` → `src/tunnel/`
|
||||
- Nouvelle `Skill` → `~/.zeroclaw/workspace/skills/<n>/`
|
||||
|
||||
---
|
||||
|
||||
**ZeroClaw** — Zéro surcharge. Zéro compromis. Déployez n'importe où. Échangez n'importe quoi. 🦀
|
||||
|
||||
## Historique des Étoiles
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.star-history.com/#zeroclaw-labs/zeroclaw&type=date&legend=top-left">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=zeroclaw-labs/zeroclaw&type=date&theme=dark&legend=top-left" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=zeroclaw-labs/zeroclaw&type=date&legend=top-left" />
|
||||
<img alt="Graphique Historique des Étoiles" src="https://api.star-history.com/svg?repos=zeroclaw-labs/zeroclaw&type=date&legend=top-left" />
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
300
README.ja.md
300
README.ja.md
@ -1,300 +0,0 @@
|
||||
<p align="center">
|
||||
<img src="zeroclaw.png" alt="ZeroClaw" width="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">ZeroClaw 🦀(日本語)</h1>
|
||||
|
||||
<p align="center">
|
||||
<strong>Zero overhead. Zero compromise. 100% Rust. 100% Agnostic.</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="LICENSE-APACHE"><img src="https://img.shields.io/badge/license-MIT%20OR%20Apache%202.0-blue.svg" alt="License: MIT OR Apache-2.0" /></a>
|
||||
<a href="NOTICE"><img src="https://img.shields.io/badge/contributors-27+-green.svg" alt="Contributors" /></a>
|
||||
<a href="https://buymeacoffee.com/argenistherose"><img src="https://img.shields.io/badge/Buy%20Me%20a%20Coffee-Donate-yellow.svg?style=flat&logo=buy-me-a-coffee" alt="Buy Me a Coffee" /></a>
|
||||
<a href="https://x.com/zeroclawlabs?s=21"><img src="https://img.shields.io/badge/X-%40zeroclawlabs-000000?style=flat&logo=x&logoColor=white" alt="X: @zeroclawlabs" /></a>
|
||||
<a href="https://zeroclawlabs.cn/group.jpg"><img src="https://img.shields.io/badge/WeChat-Group-B7D7A8?logo=wechat&logoColor=white" alt="WeChat Group" /></a>
|
||||
<a href="https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search"><img src="https://img.shields.io/badge/Xiaohongshu-Official-FF2442?style=flat" alt="Xiaohongshu: Official" /></a>
|
||||
<a href="https://t.me/zeroclawlabs"><img src="https://img.shields.io/badge/Telegram-%40zeroclawlabs-26A5E4?style=flat&logo=telegram&logoColor=white" alt="Telegram: @zeroclawlabs" /></a>
|
||||
<a href="https://www.facebook.com/groups/zeroclaw"><img src="https://img.shields.io/badge/Facebook-Group-1877F2?style=flat&logo=facebook&logoColor=white" alt="Facebook Group" /></a>
|
||||
<a href="https://www.reddit.com/r/zeroclawlabs/"><img src="https://img.shields.io/badge/Reddit-r%2Fzeroclawlabs-FF4500?style=flat&logo=reddit&logoColor=white" alt="Reddit: r/zeroclawlabs" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
🌐 言語: <a href="README.md">English</a> · <a href="README.zh-CN.md">简体中文</a> · <a href="README.ja.md">日本語</a> · <a href="README.ru.md">Русский</a> · <a href="README.fr.md">Français</a> · <a href="README.vi.md">Tiếng Việt</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="bootstrap.sh">ワンクリック導入</a> |
|
||||
<a href="docs/getting-started/README.md">導入ガイド</a> |
|
||||
<a href="docs/README.ja.md">ドキュメントハブ</a> |
|
||||
<a href="docs/SUMMARY.md">Docs TOC</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<strong>クイック分流:</strong>
|
||||
<a href="docs/reference/README.md">参照</a> ·
|
||||
<a href="docs/operations/README.md">運用</a> ·
|
||||
<a href="docs/troubleshooting.md">障害対応</a> ·
|
||||
<a href="docs/security/README.md">セキュリティ</a> ·
|
||||
<a href="docs/hardware/README.md">ハードウェア</a> ·
|
||||
<a href="docs/contributing/README.md">貢献・CI</a>
|
||||
</p>
|
||||
|
||||
> この文書は `README.md` の内容を、正確性と可読性を重視して日本語に整えた版です(逐語訳ではありません)。
|
||||
>
|
||||
> コマンド名、設定キー、API パス、Trait 名などの技術識別子は英語のまま維持しています。
|
||||
>
|
||||
> 最終同期日: **2026-02-19**。
|
||||
|
||||
## 📢 お知らせボード
|
||||
|
||||
重要なお知らせ(互換性破壊変更、セキュリティ告知、メンテナンス時間、リリース阻害事項など)をここに掲載します。
|
||||
|
||||
| 日付 (UTC) | レベル | お知らせ | 対応 |
|
||||
|---|---|---|---|
|
||||
| 2026-02-19 | _緊急_ | 私たちは `openagen/zeroclaw` および `zeroclaw.org` とは**一切関係ありません**。`zeroclaw.org` は現在 `openagen/zeroclaw` の fork を指しており、そのドメイン/リポジトリは当プロジェクトの公式サイト・公式プロジェクトを装っています。 | これらの情報源による案内、バイナリ、資金調達情報、公式発表は信頼しないでください。必ず[本リポジトリ](https://github.com/zeroclaw-labs/zeroclaw)と認証済み公式SNSのみを参照してください。 |
|
||||
| 2026-02-21 | _重要_ | 公式サイトを公開しました: [zeroclawlabs.ai](https://zeroclawlabs.ai)。公開までお待ちいただきありがとうございました。引き続きなりすましの試みを確認しているため、ZeroClaw 名義の投資・資金調達などの案内は、公式チャネルで確認できない限り参加しないでください。 | 情報は[本リポジトリ](https://github.com/zeroclaw-labs/zeroclaw)を最優先で確認し、[X(@zeroclawlabs)](https://x.com/zeroclawlabs?s=21)、[Telegram(@zeroclawlabs)](https://t.me/zeroclawlabs)、[Facebook(グループ)](https://www.facebook.com/groups/zeroclaw)、[Reddit(r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/) と [小紅書アカウント](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) で公式更新を確認してください。 |
|
||||
| 2026-02-19 | _重要_ | Anthropic は 2026-02-19 に Authentication and Credential Use を更新しました。条文では、OAuth authentication(Free/Pro/Max)は Claude Code と Claude.ai 専用であり、Claude Free/Pro/Max で取得した OAuth トークンを他の製品・ツール・サービス(Agent SDK を含む)で使用することは許可されず、Consumer Terms of Service 違反に該当すると明記されています。 | 損失回避のため、当面は Claude Code OAuth 連携を試さないでください。原文: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use)。 |
|
||||
|
||||
## 概要
|
||||
|
||||
ZeroClaw は、高速・省リソース・高拡張性を重視した自律エージェント実行基盤です。ZeroClawはエージェントワークフローのための**ランタイムオペレーティングシステム**です — モデル、ツール、メモリ、実行を抽象化し、エージェントを一度構築すればどこでも実行できるインフラストラクチャです。
|
||||
|
||||
- Rust ネイティブ実装、単一バイナリで配布可能
|
||||
- Trait ベース設計(`Provider` / `Channel` / `Tool` / `Memory` など)
|
||||
- セキュアデフォルト(ペアリング、明示 allowlist、サンドボックス、スコープ制御)
|
||||
|
||||
## ZeroClaw が選ばれる理由
|
||||
|
||||
- **軽量ランタイムを標準化**: CLI や `status` などの常用操作は数MB級メモリで動作。
|
||||
- **低コスト環境に適合**: 低価格ボードや小規模クラウドでも、重い実行基盤なしで運用可能。
|
||||
- **高速コールドスタート**: Rust 単一バイナリにより、主要コマンドと daemon 起動が非常に速い。
|
||||
- **高い移植性**: ARM / x86 / RISC-V を同じ運用モデルで扱え、provider/channel/tool を差し替え可能。
|
||||
|
||||
## ベンチマークスナップショット(ZeroClaw vs OpenClaw、再現可能)
|
||||
|
||||
以下はローカルのクイック比較(macOS arm64、2026年2月)を、0.8GHz エッジ CPU 基準で正規化したものです。
|
||||
|
||||
| | OpenClaw | NanoBot | PicoClaw | ZeroClaw 🦀 |
|
||||
|---|---|---|---|---|
|
||||
| **言語** | TypeScript | Python | Go | **Rust** |
|
||||
| **RAM** | > 1GB | > 100MB | < 10MB | **< 5MB** |
|
||||
| **起動時間(0.8GHz コア)** | > 500s | > 30s | < 1s | **< 10ms** |
|
||||
| **バイナリサイズ** | ~28MB(dist) | N/A(スクリプト) | ~8MB | **~8.8 MB** |
|
||||
| **コスト** | Mac Mini $599 | Linux SBC ~$50 | Linux ボード $10 | **任意の $10 ハードウェア** |
|
||||
|
||||
> 注記: ZeroClaw の結果は release ビルドを `/usr/bin/time -l` で計測したものです。OpenClaw は Node.js ランタイムが必要で、ランタイム由来だけで通常は約390MBの追加メモリを要します。NanoBot は Python ランタイムが必要です。PicoClaw と ZeroClaw は静的バイナリです。
|
||||
|
||||
<p align="center">
|
||||
<img src="zero-claw.jpeg" alt="ZeroClaw vs OpenClaw Comparison" width="800" />
|
||||
</p>
|
||||
|
||||
### ローカルで再現可能な測定
|
||||
|
||||
ベンチマーク値はコードやツールチェーン更新で変わるため、必ず自身の環境で再測定してください。
|
||||
|
||||
```bash
|
||||
cargo build --release
|
||||
ls -lh target/release/zeroclaw
|
||||
|
||||
/usr/bin/time -l target/release/zeroclaw --help
|
||||
/usr/bin/time -l target/release/zeroclaw status
|
||||
```
|
||||
|
||||
README のサンプル値(macOS arm64, 2026-02-18):
|
||||
|
||||
- Release バイナリ: `8.8M`
|
||||
- `zeroclaw --help`: 約 `0.02s`、ピークメモリ 約 `3.9MB`
|
||||
- `zeroclaw status`: 約 `0.01s`、ピークメモリ 約 `4.1MB`
|
||||
|
||||
## ワンクリック導入
|
||||
|
||||
```bash
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
./bootstrap.sh
|
||||
```
|
||||
|
||||
環境ごと初期化する場合: `./bootstrap.sh --install-system-deps --install-rust`(システムパッケージで `sudo` が必要な場合があります)。
|
||||
|
||||
詳細は [`docs/one-click-bootstrap.md`](docs/one-click-bootstrap.md) を参照してください。
|
||||
|
||||
## クイックスタート
|
||||
|
||||
### Homebrew(macOS/Linuxbrew)
|
||||
|
||||
```bash
|
||||
brew install zeroclaw
|
||||
```
|
||||
|
||||
```bash
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
cargo build --release --locked
|
||||
cargo install --path . --force --locked
|
||||
|
||||
zeroclaw onboard --api-key sk-... --provider openrouter
|
||||
zeroclaw onboard --interactive
|
||||
|
||||
zeroclaw agent -m "Hello, ZeroClaw!"
|
||||
|
||||
# default: 127.0.0.1:42617
|
||||
zeroclaw gateway
|
||||
|
||||
zeroclaw daemon
|
||||
```
|
||||
|
||||
## Subscription Auth(OpenAI Codex / Claude Code)
|
||||
|
||||
ZeroClaw はサブスクリプションベースのネイティブ認証プロファイルをサポートしています(マルチアカウント対応、保存時暗号化)。
|
||||
|
||||
- 保存先: `~/.zeroclaw/auth-profiles.json`
|
||||
- 暗号化キー: `~/.zeroclaw/.secret_key`
|
||||
- Profile ID 形式: `<provider>:<profile_name>`(例: `openai-codex:work`)
|
||||
|
||||
OpenAI Codex OAuth(ChatGPT サブスクリプション):
|
||||
|
||||
```bash
|
||||
# サーバー/ヘッドレス環境向け推奨
|
||||
zeroclaw auth login --provider openai-codex --device-code
|
||||
|
||||
# ブラウザ/コールバックフロー(ペーストフォールバック付き)
|
||||
zeroclaw auth login --provider openai-codex --profile default
|
||||
zeroclaw auth paste-redirect --provider openai-codex --profile default
|
||||
|
||||
# 確認 / リフレッシュ / プロファイル切替
|
||||
zeroclaw auth status
|
||||
zeroclaw auth refresh --provider openai-codex --profile default
|
||||
zeroclaw auth use --provider openai-codex --profile work
|
||||
```
|
||||
|
||||
Claude Code / Anthropic setup-token:
|
||||
|
||||
```bash
|
||||
# サブスクリプション/setup token の貼り付け(Authorization header モード)
|
||||
zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
|
||||
|
||||
# エイリアスコマンド
|
||||
zeroclaw auth setup-token --provider anthropic --profile default
|
||||
```
|
||||
|
||||
Subscription auth で agent を実行:
|
||||
|
||||
```bash
|
||||
zeroclaw agent --provider openai-codex -m "hello"
|
||||
zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
|
||||
|
||||
# Anthropic は API key と auth token の両方の環境変数をサポート:
|
||||
# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
|
||||
zeroclaw agent --provider anthropic -m "hello"
|
||||
```
|
||||
|
||||
## アーキテクチャ
|
||||
|
||||
すべてのサブシステムは **Trait** — 設定変更だけで実装を差し替え可能、コード変更不要。
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/architecture.svg" alt="ZeroClaw アーキテクチャ" width="900" />
|
||||
</p>
|
||||
|
||||
| サブシステム | Trait | 内蔵実装 | 拡張方法 |
|
||||
|-------------|-------|----------|----------|
|
||||
| **AI モデル** | `Provider` | `zeroclaw providers` で確認(現在 28 個の組み込み + エイリアス、カスタムエンドポイント対応) | `custom:https://your-api.com`(OpenAI 互換)または `anthropic-custom:https://your-api.com` |
|
||||
| **チャネル** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Linq, Email, IRC, Lark, DingTalk, QQ, Webhook | 任意のメッセージ API |
|
||||
| **メモリ** | `Memory` | SQLite ハイブリッド検索, PostgreSQL バックエンド, Lucid ブリッジ, Markdown ファイル, 明示的 `none` バックエンド, スナップショット/復元, オプション応答キャッシュ | 任意の永続化バックエンド |
|
||||
| **ツール** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, ハードウェアツール | 任意の機能 |
|
||||
| **オブザーバビリティ** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
|
||||
| **ランタイム** | `RuntimeAdapter` | Native, Docker(サンドボックス) | adapter 経由で追加可能;未対応の kind は即座にエラー |
|
||||
| **セキュリティ** | `SecurityPolicy` | Gateway ペアリング, サンドボックス, allowlist, レート制限, ファイルシステムスコープ, 暗号化シークレット | — |
|
||||
| **アイデンティティ** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | 任意の ID フォーマット |
|
||||
| **トンネル** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | 任意のトンネルバイナリ |
|
||||
| **ハートビート** | Engine | HEARTBEAT.md 定期タスク | — |
|
||||
| **スキル** | Loader | TOML マニフェスト + SKILL.md インストラクション | コミュニティスキルパック |
|
||||
| **インテグレーション** | Registry | 9 カテゴリ、70 件以上の連携 | プラグインシステム |
|
||||
|
||||
### ランタイムサポート(現状)
|
||||
|
||||
- ✅ 現在サポート: `runtime.kind = "native"` または `runtime.kind = "docker"`
|
||||
- 🚧 計画中(未実装): WASM / エッジランタイム
|
||||
|
||||
未対応の `runtime.kind` が設定された場合、ZeroClaw は native へのサイレントフォールバックではなく、明確なエラーで終了します。
|
||||
|
||||
### メモリシステム(フルスタック検索エンジン)
|
||||
|
||||
すべて自社実装、外部依存ゼロ — Pinecone、Elasticsearch、LangChain 不要:
|
||||
|
||||
| レイヤー | 実装 |
|
||||
|---------|------|
|
||||
| **ベクトル DB** | Embeddings を SQLite に BLOB として保存、コサイン類似度検索 |
|
||||
| **キーワード検索** | FTS5 仮想テーブル、BM25 スコアリング |
|
||||
| **ハイブリッドマージ** | カスタム重み付きマージ関数(`vector.rs`) |
|
||||
| **Embeddings** | `EmbeddingProvider` trait — OpenAI、カスタム URL、または noop |
|
||||
| **チャンキング** | 行ベースの Markdown チャンカー(見出し構造保持) |
|
||||
| **キャッシュ** | SQLite `embedding_cache` テーブル、LRU エビクション |
|
||||
| **安全な再インデックス** | FTS5 再構築 + 欠落ベクトルの再埋め込みをアトミックに実行 |
|
||||
|
||||
Agent はツール経由でメモリの呼び出し・保存・管理を自動的に行います。
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
|
||||
auto_save = true
|
||||
embedding_provider = "none" # "none", "openai", "custom:https://..."
|
||||
vector_weight = 0.7
|
||||
keyword_weight = 0.3
|
||||
```
|
||||
|
||||
## セキュリティのデフォルト
|
||||
|
||||
- Gateway の既定バインド: `127.0.0.1:42617`
|
||||
- 既定でペアリング必須: `require_pairing = true`
|
||||
- 既定で公開バインド禁止: `allow_public_bind = false`
|
||||
- Channel allowlist:
|
||||
- `[]` は deny-by-default
|
||||
- `["*"]` は allow all(意図的に使う場合のみ)
|
||||
|
||||
## 設定例
|
||||
|
||||
```toml
|
||||
api_key = "sk-..."
|
||||
default_provider = "openrouter"
|
||||
default_model = "anthropic/claude-sonnet-4-6"
|
||||
default_temperature = 0.7
|
||||
|
||||
[memory]
|
||||
backend = "sqlite"
|
||||
auto_save = true
|
||||
embedding_provider = "none"
|
||||
|
||||
[gateway]
|
||||
host = "127.0.0.1"
|
||||
port = 42617
|
||||
require_pairing = true
|
||||
allow_public_bind = false
|
||||
```
|
||||
|
||||
## ドキュメント入口
|
||||
|
||||
- ドキュメントハブ(英語): [`docs/README.md`](docs/README.md)
|
||||
- 統合 TOC: [`docs/SUMMARY.md`](docs/SUMMARY.md)
|
||||
- ドキュメントハブ(日本語): [`docs/README.ja.md`](docs/README.ja.md)
|
||||
- コマンドリファレンス: [`docs/commands-reference.md`](docs/commands-reference.md)
|
||||
- 設定リファレンス: [`docs/config-reference.md`](docs/config-reference.md)
|
||||
- Provider リファレンス: [`docs/providers-reference.md`](docs/providers-reference.md)
|
||||
- Channel リファレンス: [`docs/channels-reference.md`](docs/channels-reference.md)
|
||||
- 運用ガイド(Runbook): [`docs/operations-runbook.md`](docs/operations-runbook.md)
|
||||
- トラブルシューティング: [`docs/troubleshooting.md`](docs/troubleshooting.md)
|
||||
- ドキュメント一覧 / 分類: [`docs/docs-inventory.md`](docs/docs-inventory.md)
|
||||
- プロジェクト triage スナップショット: [`docs/project-triage-snapshot-2026-02-18.md`](docs/project-triage-snapshot-2026-02-18.md)
|
||||
|
||||
## コントリビュート / ライセンス
|
||||
|
||||
- Contributing: [`CONTRIBUTING.md`](CONTRIBUTING.md)
|
||||
- PR Workflow: [`docs/pr-workflow.md`](docs/pr-workflow.md)
|
||||
- Reviewer Playbook: [`docs/reviewer-playbook.md`](docs/reviewer-playbook.md)
|
||||
- License: MIT or Apache 2.0([`LICENSE-MIT`](LICENSE-MIT), [`LICENSE-APACHE`](LICENSE-APACHE), [`NOTICE`](NOTICE))
|
||||
|
||||
---
|
||||
|
||||
詳細仕様(全コマンド、アーキテクチャ、API 仕様、開発フロー)は英語版の [`README.md`](README.md) を参照してください。
|
||||
300
README.ru.md
300
README.ru.md
@ -1,300 +0,0 @@
|
||||
<p align="center">
|
||||
<img src="zeroclaw.png" alt="ZeroClaw" width="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">ZeroClaw 🦀(Русский)</h1>
|
||||
|
||||
<p align="center">
|
||||
<strong>Zero overhead. Zero compromise. 100% Rust. 100% Agnostic.</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="LICENSE-APACHE"><img src="https://img.shields.io/badge/license-MIT%20OR%20Apache%202.0-blue.svg" alt="License: MIT OR Apache-2.0" /></a>
|
||||
<a href="NOTICE"><img src="https://img.shields.io/badge/contributors-27+-green.svg" alt="Contributors" /></a>
|
||||
<a href="https://buymeacoffee.com/argenistherose"><img src="https://img.shields.io/badge/Buy%20Me%20a%20Coffee-Donate-yellow.svg?style=flat&logo=buy-me-a-coffee" alt="Buy Me a Coffee" /></a>
|
||||
<a href="https://x.com/zeroclawlabs?s=21"><img src="https://img.shields.io/badge/X-%40zeroclawlabs-000000?style=flat&logo=x&logoColor=white" alt="X: @zeroclawlabs" /></a>
|
||||
<a href="https://zeroclawlabs.cn/group.jpg"><img src="https://img.shields.io/badge/WeChat-Group-B7D7A8?logo=wechat&logoColor=white" alt="WeChat Group" /></a>
|
||||
<a href="https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search"><img src="https://img.shields.io/badge/Xiaohongshu-Official-FF2442?style=flat" alt="Xiaohongshu: Official" /></a>
|
||||
<a href="https://t.me/zeroclawlabs"><img src="https://img.shields.io/badge/Telegram-%40zeroclawlabs-26A5E4?style=flat&logo=telegram&logoColor=white" alt="Telegram: @zeroclawlabs" /></a>
|
||||
<a href="https://www.facebook.com/groups/zeroclaw"><img src="https://img.shields.io/badge/Facebook-Group-1877F2?style=flat&logo=facebook&logoColor=white" alt="Facebook Group" /></a>
|
||||
<a href="https://www.reddit.com/r/zeroclawlabs/"><img src="https://img.shields.io/badge/Reddit-r%2Fzeroclawlabs-FF4500?style=flat&logo=reddit&logoColor=white" alt="Reddit: r/zeroclawlabs" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
🌐 Языки: <a href="README.md">English</a> · <a href="README.zh-CN.md">简体中文</a> · <a href="README.ja.md">日本語</a> · <a href="README.ru.md">Русский</a> · <a href="README.fr.md">Français</a> · <a href="README.vi.md">Tiếng Việt</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="bootstrap.sh">Установка в 1 клик</a> |
|
||||
<a href="docs/getting-started/README.md">Быстрый старт</a> |
|
||||
<a href="docs/README.ru.md">Хаб документации</a> |
|
||||
<a href="docs/SUMMARY.md">TOC docs</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<strong>Быстрые маршруты:</strong>
|
||||
<a href="docs/reference/README.md">Справочники</a> ·
|
||||
<a href="docs/operations/README.md">Операции</a> ·
|
||||
<a href="docs/troubleshooting.md">Диагностика</a> ·
|
||||
<a href="docs/security/README.md">Безопасность</a> ·
|
||||
<a href="docs/hardware/README.md">Аппаратная часть</a> ·
|
||||
<a href="docs/contributing/README.md">Вклад и CI</a>
|
||||
</p>
|
||||
|
||||
> Этот файл — выверенный перевод `README.md` с акцентом на точность и читаемость (не дословный перевод).
|
||||
>
|
||||
> Технические идентификаторы (команды, ключи конфигурации, API-пути, имена Trait) сохранены на английском.
|
||||
>
|
||||
> Последняя синхронизация: **2026-02-19**.
|
||||
|
||||
## 📢 Доска объявлений
|
||||
|
||||
Публикуйте здесь важные уведомления (breaking changes, security advisories, окна обслуживания и блокеры релиза).
|
||||
|
||||
| Дата (UTC) | Уровень | Объявление | Действие |
|
||||
|---|---|---|---|
|
||||
| 2026-02-19 | _Срочно_ | Мы **не аффилированы** с `openagen/zeroclaw` и `zeroclaw.org`. Домен `zeroclaw.org` сейчас указывает на fork `openagen/zeroclaw`, и этот домен/репозиторий выдают себя за наш официальный сайт и проект. | Не доверяйте информации, бинарникам, сборам средств и «официальным» объявлениям из этих источников. Используйте только [этот репозиторий](https://github.com/zeroclaw-labs/zeroclaw) и наши верифицированные соцсети. |
|
||||
| 2026-02-21 | _Важно_ | Наш официальный сайт уже запущен: [zeroclawlabs.ai](https://zeroclawlabs.ai). Спасибо, что дождались запуска. При этом попытки выдавать себя за ZeroClaw продолжаются, поэтому не участвуйте в инвестициях, сборах средств и похожих активностях, если они не подтверждены через наши официальные каналы. | Ориентируйтесь только на [этот репозиторий](https://github.com/zeroclaw-labs/zeroclaw); также следите за [X (@zeroclawlabs)](https://x.com/zeroclawlabs?s=21), [Telegram (@zeroclawlabs)](https://t.me/zeroclawlabs), [Facebook (группа)](https://www.facebook.com/groups/zeroclaw), [Reddit (r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/) и [Xiaohongshu](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) для официальных обновлений. |
|
||||
| 2026-02-19 | _Важно_ | Anthropic обновил раздел Authentication and Credential Use 2026-02-19. В нем указано, что OAuth authentication (Free/Pro/Max) предназначена только для Claude Code и Claude.ai; использование OAuth-токенов, полученных через Claude Free/Pro/Max, в любых других продуктах, инструментах или сервисах (включая Agent SDK), не допускается и может считаться нарушением Consumer Terms of Service. | Чтобы избежать потерь, временно не используйте Claude Code OAuth-интеграции. Оригинал: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use). |
|
||||
|
||||
## О проекте
|
||||
|
||||
ZeroClaw — это производительная и расширяемая инфраструктура автономного AI-агента. ZeroClaw — это **операционная система времени выполнения** для агентных рабочих процессов — инфраструктура, абстрагирующая модели, инструменты, память и выполнение, позволяя создавать агентов один раз и запускать где угодно.
|
||||
|
||||
- Нативно на Rust, единый бинарник, переносимость между ARM / x86 / RISC-V
|
||||
- Архитектура на Trait (`Provider`, `Channel`, `Tool`, `Memory` и др.)
|
||||
- Безопасные значения по умолчанию: pairing, явные allowlist, sandbox и scope-ограничения
|
||||
|
||||
## Почему выбирают ZeroClaw
|
||||
|
||||
- **Лёгкий runtime по умолчанию**: Повседневные CLI-операции и `status` обычно укладываются в несколько МБ памяти.
|
||||
- **Оптимизирован для недорогих сред**: Подходит для бюджетных плат и небольших cloud-инстансов без тяжёлой runtime-обвязки.
|
||||
- **Быстрый cold start**: Архитектура одного Rust-бинарника ускоряет запуск основных команд и daemon-режима.
|
||||
- **Портативная модель деплоя**: Единый подход для ARM / x86 / RISC-V и возможность менять providers/channels/tools.
|
||||
|
||||
## Снимок бенчмарка (ZeroClaw vs OpenClaw, воспроизводимо)
|
||||
|
||||
Ниже — быстрый локальный сравнительный срез (macOS arm64, февраль 2026), нормализованный под 0.8GHz edge CPU.
|
||||
|
||||
| | OpenClaw | NanoBot | PicoClaw | ZeroClaw 🦀 |
|
||||
|---|---|---|---|---|
|
||||
| **Язык** | TypeScript | Python | Go | **Rust** |
|
||||
| **RAM** | > 1GB | > 100MB | < 10MB | **< 5MB** |
|
||||
| **Старт (ядро 0.8GHz)** | > 500s | > 30s | < 1s | **< 10ms** |
|
||||
| **Размер бинарника** | ~28MB (dist) | N/A (скрипты) | ~8MB | **~8.8 MB** |
|
||||
| **Стоимость** | Mac Mini $599 | Linux SBC ~$50 | Linux-плата $10 | **Любое железо за $10** |
|
||||
|
||||
> Примечание: результаты ZeroClaw получены на release-сборке с помощью `/usr/bin/time -l`. OpenClaw требует Node.js runtime; только этот runtime обычно добавляет около 390MB дополнительного потребления памяти. NanoBot требует Python runtime. PicoClaw и ZeroClaw — статические бинарники.
|
||||
|
||||
<p align="center">
|
||||
<img src="zero-claw.jpeg" alt="Сравнение ZeroClaw и OpenClaw" width="800" />
|
||||
</p>
|
||||
|
||||
### Локально воспроизводимое измерение
|
||||
|
||||
Метрики могут меняться вместе с кодом и toolchain, поэтому проверяйте результаты в своей среде:
|
||||
|
||||
```bash
|
||||
cargo build --release
|
||||
ls -lh target/release/zeroclaw
|
||||
|
||||
/usr/bin/time -l target/release/zeroclaw --help
|
||||
/usr/bin/time -l target/release/zeroclaw status
|
||||
```
|
||||
|
||||
Текущие примерные значения из README (macOS arm64, 2026-02-18):
|
||||
|
||||
- Размер release-бинарника: `8.8M`
|
||||
- `zeroclaw --help`: ~`0.02s`, пик памяти ~`3.9MB`
|
||||
- `zeroclaw status`: ~`0.01s`, пик памяти ~`4.1MB`
|
||||
|
||||
## Установка в 1 клик
|
||||
|
||||
```bash
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
./bootstrap.sh
|
||||
```
|
||||
|
||||
Для полной инициализации окружения: `./bootstrap.sh --install-system-deps --install-rust` (для системных пакетов может потребоваться `sudo`).
|
||||
|
||||
Подробности: [`docs/one-click-bootstrap.md`](docs/one-click-bootstrap.md).
|
||||
|
||||
## Быстрый старт
|
||||
|
||||
### Homebrew (macOS/Linuxbrew)
|
||||
|
||||
```bash
|
||||
brew install zeroclaw
|
||||
```
|
||||
|
||||
```bash
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
cargo build --release --locked
|
||||
cargo install --path . --force --locked
|
||||
|
||||
zeroclaw onboard --api-key sk-... --provider openrouter
|
||||
zeroclaw onboard --interactive
|
||||
|
||||
zeroclaw agent -m "Hello, ZeroClaw!"
|
||||
|
||||
# default: 127.0.0.1:42617
|
||||
zeroclaw gateway
|
||||
|
||||
zeroclaw daemon
|
||||
```
|
||||
|
||||
## Subscription Auth (OpenAI Codex / Claude Code)
|
||||
|
||||
ZeroClaw поддерживает нативные профили авторизации на основе подписки (мультиаккаунт, шифрование при хранении).
|
||||
|
||||
- Файл хранения: `~/.zeroclaw/auth-profiles.json`
|
||||
- Ключ шифрования: `~/.zeroclaw/.secret_key`
|
||||
- Формат Profile ID: `<provider>:<profile_name>` (пример: `openai-codex:work`)
|
||||
|
||||
OpenAI Codex OAuth (подписка ChatGPT):
|
||||
|
||||
```bash
|
||||
# Рекомендуется для серверов/headless-окружений
|
||||
zeroclaw auth login --provider openai-codex --device-code
|
||||
|
||||
# Браузерный/callback-поток с paste-фолбэком
|
||||
zeroclaw auth login --provider openai-codex --profile default
|
||||
zeroclaw auth paste-redirect --provider openai-codex --profile default
|
||||
|
||||
# Проверка / обновление / переключение профиля
|
||||
zeroclaw auth status
|
||||
zeroclaw auth refresh --provider openai-codex --profile default
|
||||
zeroclaw auth use --provider openai-codex --profile work
|
||||
```
|
||||
|
||||
Claude Code / Anthropic setup-token:
|
||||
|
||||
```bash
|
||||
# Вставка subscription/setup token (режим Authorization header)
|
||||
zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
|
||||
|
||||
# Команда-алиас
|
||||
zeroclaw auth setup-token --provider anthropic --profile default
|
||||
```
|
||||
|
||||
Запуск agent с subscription auth:
|
||||
|
||||
```bash
|
||||
zeroclaw agent --provider openai-codex -m "hello"
|
||||
zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
|
||||
|
||||
# Anthropic поддерживает и API key, и auth token через переменные окружения:
|
||||
# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
|
||||
zeroclaw agent --provider anthropic -m "hello"
|
||||
```
|
||||
|
||||
## Архитектура
|
||||
|
||||
Каждая подсистема — это **Trait**: меняйте реализации через конфигурацию, без изменения кода.
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/architecture.svg" alt="Архитектура ZeroClaw" width="900" />
|
||||
</p>
|
||||
|
||||
| Подсистема | Trait | Встроенные реализации | Расширение |
|
||||
|-----------|-------|---------------------|------------|
|
||||
| **AI-модели** | `Provider` | Каталог через `zeroclaw providers` (сейчас 28 встроенных + алиасы, плюс пользовательские endpoint) | `custom:https://your-api.com` (OpenAI-совместимый) или `anthropic-custom:https://your-api.com` |
|
||||
| **Каналы** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Linq, Email, IRC, Lark, DingTalk, QQ, Webhook | Любой messaging API |
|
||||
| **Память** | `Memory` | SQLite гибридный поиск, PostgreSQL-бэкенд, Lucid-мост, Markdown-файлы, явный `none`-бэкенд, snapshot/hydrate, опциональный кэш ответов | Любой persistence-бэкенд |
|
||||
| **Инструменты** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, аппаратные инструменты | Любая функциональность |
|
||||
| **Наблюдаемость** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
|
||||
| **Runtime** | `RuntimeAdapter` | Native, Docker (sandbox) | Через adapter; неподдерживаемые kind завершаются с ошибкой |
|
||||
| **Безопасность** | `SecurityPolicy` | Gateway pairing, sandbox, allowlist, rate limits, scoping файловой системы, шифрование секретов | — |
|
||||
| **Идентификация** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | Любой формат идентификации |
|
||||
| **Туннели** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | Любой tunnel-бинарник |
|
||||
| **Heartbeat** | Engine | HEARTBEAT.md — периодические задачи | — |
|
||||
| **Навыки** | Loader | TOML-манифесты + SKILL.md-инструкции | Пакеты навыков сообщества |
|
||||
| **Интеграции** | Registry | 70+ интеграций в 9 категориях | Плагинная система |
|
||||
|
||||
### Поддержка runtime (текущая)
|
||||
|
||||
- ✅ Поддерживается сейчас: `runtime.kind = "native"` или `runtime.kind = "docker"`
|
||||
- 🚧 Запланировано, но ещё не реализовано: WASM / edge-runtime
|
||||
|
||||
При указании неподдерживаемого `runtime.kind` ZeroClaw завершается с явной ошибкой, а не молча откатывается к native.
|
||||
|
||||
### Система памяти (полнофункциональный поисковый движок)
|
||||
|
||||
Полностью собственная реализация, ноль внешних зависимостей — без Pinecone, Elasticsearch, LangChain:
|
||||
|
||||
| Уровень | Реализация |
|
||||
|---------|-----------|
|
||||
| **Векторная БД** | Embeddings хранятся как BLOB в SQLite, поиск по косинусному сходству |
|
||||
| **Поиск по ключевым словам** | Виртуальные таблицы FTS5 со скорингом BM25 |
|
||||
| **Гибридное слияние** | Пользовательская взвешенная функция слияния (`vector.rs`) |
|
||||
| **Embeddings** | Trait `EmbeddingProvider` — OpenAI, пользовательский URL или noop |
|
||||
| **Чанкинг** | Построчный Markdown-чанкер с сохранением заголовков |
|
||||
| **Кэширование** | Таблица `embedding_cache` в SQLite с LRU-вытеснением |
|
||||
| **Безопасная переиндексация** | Атомарная перестройка FTS5 + повторное встраивание отсутствующих векторов |
|
||||
|
||||
Agent автоматически вспоминает, сохраняет и управляет памятью через инструменты.
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
|
||||
auto_save = true
|
||||
embedding_provider = "none" # "none", "openai", "custom:https://..."
|
||||
vector_weight = 0.7
|
||||
keyword_weight = 0.3
|
||||
```
|
||||
|
||||
## Важные security-дефолты
|
||||
|
||||
- Gateway по умолчанию: `127.0.0.1:42617`
|
||||
- Pairing обязателен по умолчанию: `require_pairing = true`
|
||||
- Публичный bind запрещён по умолчанию: `allow_public_bind = false`
|
||||
- Семантика allowlist каналов:
|
||||
- `[]` => deny-by-default
|
||||
- `["*"]` => allow all (используйте осознанно)
|
||||
|
||||
## Пример конфигурации
|
||||
|
||||
```toml
|
||||
api_key = "sk-..."
|
||||
default_provider = "openrouter"
|
||||
default_model = "anthropic/claude-sonnet-4-6"
|
||||
default_temperature = 0.7
|
||||
|
||||
[memory]
|
||||
backend = "sqlite"
|
||||
auto_save = true
|
||||
embedding_provider = "none"
|
||||
|
||||
[gateway]
|
||||
host = "127.0.0.1"
|
||||
port = 42617
|
||||
require_pairing = true
|
||||
allow_public_bind = false
|
||||
```
|
||||
|
||||
## Навигация по документации
|
||||
|
||||
- Хаб документации (English): [`docs/README.md`](docs/README.md)
|
||||
- Единый TOC docs: [`docs/SUMMARY.md`](docs/SUMMARY.md)
|
||||
- Хаб документации (Русский): [`docs/README.ru.md`](docs/README.ru.md)
|
||||
- Справочник команд: [`docs/commands-reference.md`](docs/commands-reference.md)
|
||||
- Справочник конфигурации: [`docs/config-reference.md`](docs/config-reference.md)
|
||||
- Справочник providers: [`docs/providers-reference.md`](docs/providers-reference.md)
|
||||
- Справочник channels: [`docs/channels-reference.md`](docs/channels-reference.md)
|
||||
- Операционный runbook: [`docs/operations-runbook.md`](docs/operations-runbook.md)
|
||||
- Устранение неполадок: [`docs/troubleshooting.md`](docs/troubleshooting.md)
|
||||
- Инвентарь и классификация docs: [`docs/docs-inventory.md`](docs/docs-inventory.md)
|
||||
- Снимок triage проекта: [`docs/project-triage-snapshot-2026-02-18.md`](docs/project-triage-snapshot-2026-02-18.md)
|
||||
|
||||
## Вклад и лицензия
|
||||
|
||||
- Contribution guide: [`CONTRIBUTING.md`](CONTRIBUTING.md)
|
||||
- PR workflow: [`docs/pr-workflow.md`](docs/pr-workflow.md)
|
||||
- Reviewer playbook: [`docs/reviewer-playbook.md`](docs/reviewer-playbook.md)
|
||||
- License: MIT or Apache 2.0 ([`LICENSE-MIT`](LICENSE-MIT), [`LICENSE-APACHE`](LICENSE-APACHE), [`NOTICE`](NOTICE))
|
||||
|
||||
---
|
||||
|
||||
Для полной и исчерпывающей информации (архитектура, все команды, API, разработка) используйте основной английский документ: [`README.md`](README.md).
|
||||
1060
README.vi.md
1060
README.vi.md
File diff suppressed because it is too large
Load Diff
305
README.zh-CN.md
305
README.zh-CN.md
@ -1,305 +0,0 @@
|
||||
<p align="center">
|
||||
<img src="zeroclaw.png" alt="ZeroClaw" width="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">ZeroClaw 🦀(简体中文)</h1>
|
||||
|
||||
<p align="center">
|
||||
<strong>零开销、零妥协;随处部署、万物可换。</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="LICENSE-APACHE"><img src="https://img.shields.io/badge/license-MIT%20OR%20Apache%202.0-blue.svg" alt="License: MIT OR Apache-2.0" /></a>
|
||||
<a href="NOTICE"><img src="https://img.shields.io/badge/contributors-27+-green.svg" alt="Contributors" /></a>
|
||||
<a href="https://buymeacoffee.com/argenistherose"><img src="https://img.shields.io/badge/Buy%20Me%20a%20Coffee-Donate-yellow.svg?style=flat&logo=buy-me-a-coffee" alt="Buy Me a Coffee" /></a>
|
||||
<a href="https://x.com/zeroclawlabs?s=21"><img src="https://img.shields.io/badge/X-%40zeroclawlabs-000000?style=flat&logo=x&logoColor=white" alt="X: @zeroclawlabs" /></a>
|
||||
<a href="https://zeroclawlabs.cn/group.jpg"><img src="https://img.shields.io/badge/WeChat-Group-B7D7A8?logo=wechat&logoColor=white" alt="WeChat Group" /></a>
|
||||
<a href="https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search"><img src="https://img.shields.io/badge/Xiaohongshu-Official-FF2442?style=flat" alt="Xiaohongshu: Official" /></a>
|
||||
<a href="https://t.me/zeroclawlabs"><img src="https://img.shields.io/badge/Telegram-%40zeroclawlabs-26A5E4?style=flat&logo=telegram&logoColor=white" alt="Telegram: @zeroclawlabs" /></a>
|
||||
<a href="https://www.facebook.com/groups/zeroclaw"><img src="https://img.shields.io/badge/Facebook-Group-1877F2?style=flat&logo=facebook&logoColor=white" alt="Facebook Group" /></a>
|
||||
<a href="https://www.reddit.com/r/zeroclawlabs/"><img src="https://img.shields.io/badge/Reddit-r%2Fzeroclawlabs-FF4500?style=flat&logo=reddit&logoColor=white" alt="Reddit: r/zeroclawlabs" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
🌐 语言:<a href="README.md">English</a> · <a href="README.zh-CN.md">简体中文</a> · <a href="README.ja.md">日本語</a> · <a href="README.ru.md">Русский</a> · <a href="README.fr.md">Français</a> · <a href="README.vi.md">Tiếng Việt</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="bootstrap.sh">一键部署</a> |
|
||||
<a href="docs/getting-started/README.md">安装入门</a> |
|
||||
<a href="docs/README.zh-CN.md">文档总览</a> |
|
||||
<a href="docs/SUMMARY.md">文档目录</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<strong>场景分流:</strong>
|
||||
<a href="docs/reference/README.md">参考手册</a> ·
|
||||
<a href="docs/operations/README.md">运维部署</a> ·
|
||||
<a href="docs/troubleshooting.md">故障排查</a> ·
|
||||
<a href="docs/security/README.md">安全专题</a> ·
|
||||
<a href="docs/hardware/README.md">硬件外设</a> ·
|
||||
<a href="docs/contributing/README.md">贡献与 CI</a>
|
||||
</p>
|
||||
|
||||
> 本文是对 `README.md` 的人工对齐翻译(强调可读性与准确性,不做逐字直译)。
|
||||
>
|
||||
> 技术标识(命令、配置键、API 路径、Trait 名称)保持英文,避免语义漂移。
|
||||
>
|
||||
> 最后对齐时间:**2026-02-22**。
|
||||
|
||||
## 📢 公告板
|
||||
|
||||
用于发布重要通知(破坏性变更、安全通告、维护窗口、版本阻塞问题等)。
|
||||
|
||||
| 日期(UTC) | 级别 | 通知 | 处理建议 |
|
||||
|---|---|---|---|
|
||||
| 2026-02-19 | _紧急_ | 我们与 `openagen/zeroclaw` 及 `zeroclaw.org` **没有任何关系**。`zeroclaw.org` 当前会指向 `openagen/zeroclaw` 这个 fork,并且该域名/仓库正在冒充我们的官网与官方项目。 | 请不要相信上述来源发布的任何信息、二进制、募资活动或官方声明。请仅以[本仓库](https://github.com/zeroclaw-labs/zeroclaw)和已验证官方社媒为准。 |
|
||||
| 2026-02-21 | _重要_ | 我们的官网现已上线:[zeroclawlabs.ai](https://zeroclawlabs.ai)。感谢大家一直以来的耐心等待。我们仍在持续发现冒充行为,请勿参与任何未经我们官方渠道发布、但打着 ZeroClaw 名义进行的投资、募资或类似活动。 | 一切信息请以[本仓库](https://github.com/zeroclaw-labs/zeroclaw)为准;也可关注 [X(@zeroclawlabs)](https://x.com/zeroclawlabs?s=21)、[Telegram(@zeroclawlabs)](https://t.me/zeroclawlabs)、[Facebook(群组)](https://www.facebook.com/groups/zeroclaw)、[Reddit(r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/) 与 [小红书账号](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) 获取官方最新动态。 |
|
||||
| 2026-02-19 | _重要_ | Anthropic 于 2026-02-19 更新了 Authentication and Credential Use 条款。条款明确:OAuth authentication(用于 Free、Pro、Max)仅适用于 Claude Code 与 Claude.ai;将 Claude Free/Pro/Max 账号获得的 OAuth token 用于其他任何产品、工具或服务(包括 Agent SDK)不被允许,并可能构成对 Consumer Terms of Service 的违规。 | 为避免损失,请暂时不要尝试 Claude Code OAuth 集成;原文见:[Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use)。 |
|
||||
|
||||
## 项目简介
|
||||
|
||||
ZeroClaw 是一个高性能、低资源占用、可组合的自主智能体运行时。ZeroClaw 是面向智能代理工作流的**运行时操作系统** — 它抽象了模型、工具、记忆和执行层,使代理可以一次构建、随处运行。
|
||||
|
||||
- Rust 原生实现,单二进制部署,跨 ARM / x86 / RISC-V。
|
||||
- Trait 驱动架构,`Provider` / `Channel` / `Tool` / `Memory` 可替换。
|
||||
- 安全默认值优先:配对鉴权、显式 allowlist、沙箱与作用域约束。
|
||||
|
||||
## 为什么选择 ZeroClaw
|
||||
|
||||
- **默认轻量运行时**:常见 CLI 与 `status` 工作流通常保持在几 MB 级内存范围。
|
||||
- **低成本部署友好**:面向低价板卡与小规格云主机设计,不依赖厚重运行时。
|
||||
- **冷启动速度快**:Rust 单二进制让常用命令与守护进程启动更接近“秒开”。
|
||||
- **跨架构可移植**:同一套二进制优先流程覆盖 ARM / x86 / RISC-V,并保持 provider/channel/tool 可替换。
|
||||
|
||||
## 基准快照(ZeroClaw vs OpenClaw,可复现)
|
||||
|
||||
以下是本地快速基准对比(macOS arm64,2026 年 2 月),按 0.8GHz 边缘 CPU 进行归一化展示:
|
||||
|
||||
| | OpenClaw | NanoBot | PicoClaw | ZeroClaw 🦀 |
|
||||
|---|---|---|---|---|
|
||||
| **语言** | TypeScript | Python | Go | **Rust** |
|
||||
| **RAM** | > 1GB | > 100MB | < 10MB | **< 5MB** |
|
||||
| **启动时间(0.8GHz 核)** | > 500s | > 30s | < 1s | **< 10ms** |
|
||||
| **二进制体积** | ~28MB(dist) | N/A(脚本) | ~8MB | **~8.8 MB** |
|
||||
| **成本** | Mac Mini $599 | Linux SBC ~$50 | Linux 板卡 $10 | **任意 $10 硬件** |
|
||||
|
||||
> 说明:ZeroClaw 的数据来自 release 构建,并通过 `/usr/bin/time -l` 测得。OpenClaw 需要 Node.js 运行时环境,仅该运行时通常就会带来约 390MB 的额外内存占用;NanoBot 需要 Python 运行时环境。PicoClaw 与 ZeroClaw 为静态二进制。
|
||||
|
||||
<p align="center">
|
||||
<img src="zero-claw.jpeg" alt="ZeroClaw vs OpenClaw 对比图" width="800" />
|
||||
</p>
|
||||
|
||||
### 本地可复现测量
|
||||
|
||||
基准数据会随代码与工具链变化,建议始终在你的目标环境自行复测:
|
||||
|
||||
```bash
|
||||
cargo build --release
|
||||
ls -lh target/release/zeroclaw
|
||||
|
||||
/usr/bin/time -l target/release/zeroclaw --help
|
||||
/usr/bin/time -l target/release/zeroclaw status
|
||||
```
|
||||
|
||||
当前 README 的样例数据(macOS arm64,2026-02-18):
|
||||
|
||||
- Release 二进制:`8.8M`
|
||||
- `zeroclaw --help`:约 `0.02s`,峰值内存约 `3.9MB`
|
||||
- `zeroclaw status`:约 `0.01s`,峰值内存约 `4.1MB`
|
||||
|
||||
## 一键部署
|
||||
|
||||
```bash
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
./bootstrap.sh
|
||||
```
|
||||
|
||||
可选环境初始化:`./bootstrap.sh --install-system-deps --install-rust`(可能需要 `sudo`)。
|
||||
|
||||
详细说明见:[`docs/one-click-bootstrap.md`](docs/one-click-bootstrap.md)。
|
||||
|
||||
## 快速开始
|
||||
|
||||
### Homebrew(macOS/Linuxbrew)
|
||||
|
||||
```bash
|
||||
brew install zeroclaw
|
||||
```
|
||||
|
||||
```bash
|
||||
git clone https://github.com/zeroclaw-labs/zeroclaw.git
|
||||
cd zeroclaw
|
||||
cargo build --release --locked
|
||||
cargo install --path . --force --locked
|
||||
|
||||
# 快速初始化(无交互)
|
||||
zeroclaw onboard --api-key sk-... --provider openrouter
|
||||
|
||||
# 或使用交互式向导
|
||||
zeroclaw onboard --interactive
|
||||
|
||||
# 单次对话
|
||||
zeroclaw agent -m "Hello, ZeroClaw!"
|
||||
|
||||
# 启动网关(默认: 127.0.0.1:42617)
|
||||
zeroclaw gateway
|
||||
|
||||
# 启动长期运行模式
|
||||
zeroclaw daemon
|
||||
```
|
||||
|
||||
## Subscription Auth(OpenAI Codex / Claude Code)
|
||||
|
||||
ZeroClaw 现已支持基于订阅的原生鉴权配置(多账号、静态加密存储)。
|
||||
|
||||
- 配置文件:`~/.zeroclaw/auth-profiles.json`
|
||||
- 加密密钥:`~/.zeroclaw/.secret_key`
|
||||
- Profile ID 格式:`<provider>:<profile_name>`(例:`openai-codex:work`)
|
||||
|
||||
OpenAI Codex OAuth(ChatGPT 订阅):
|
||||
|
||||
```bash
|
||||
# 推荐用于服务器/无显示器环境
|
||||
zeroclaw auth login --provider openai-codex --device-code
|
||||
|
||||
# 浏览器/回调流程,支持粘贴回退
|
||||
zeroclaw auth login --provider openai-codex --profile default
|
||||
zeroclaw auth paste-redirect --provider openai-codex --profile default
|
||||
|
||||
# 检查 / 刷新 / 切换 profile
|
||||
zeroclaw auth status
|
||||
zeroclaw auth refresh --provider openai-codex --profile default
|
||||
zeroclaw auth use --provider openai-codex --profile work
|
||||
```
|
||||
|
||||
Claude Code / Anthropic setup-token:
|
||||
|
||||
```bash
|
||||
# 粘贴订阅/setup token(Authorization header 模式)
|
||||
zeroclaw auth paste-token --provider anthropic --profile default --auth-kind authorization
|
||||
|
||||
# 别名命令
|
||||
zeroclaw auth setup-token --provider anthropic --profile default
|
||||
```
|
||||
|
||||
使用 subscription auth 运行 agent:
|
||||
|
||||
```bash
|
||||
zeroclaw agent --provider openai-codex -m "hello"
|
||||
zeroclaw agent --provider openai-codex --auth-profile openai-codex:work -m "hello"
|
||||
|
||||
# Anthropic 同时支持 API key 和 auth token 环境变量:
|
||||
# ANTHROPIC_AUTH_TOKEN, ANTHROPIC_OAUTH_TOKEN, ANTHROPIC_API_KEY
|
||||
zeroclaw agent --provider anthropic -m "hello"
|
||||
```
|
||||
|
||||
## 架构
|
||||
|
||||
每个子系统都是一个 **Trait** — 通过配置切换即可更换实现,无需修改代码。
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/architecture.svg" alt="ZeroClaw 架构图" width="900" />
|
||||
</p>
|
||||
|
||||
| 子系统 | Trait | 内置实现 | 扩展方式 |
|
||||
|--------|-------|----------|----------|
|
||||
| **AI 模型** | `Provider` | 通过 `zeroclaw providers` 查看(当前 28 个内置 + 别名,以及自定义端点) | `custom:https://your-api.com`(OpenAI 兼容)或 `anthropic-custom:https://your-api.com` |
|
||||
| **通道** | `Channel` | CLI, Telegram, Discord, Slack, Mattermost, iMessage, Matrix, Signal, WhatsApp, Linq, Email, IRC, Lark, DingTalk, QQ, Webhook | 任意消息 API |
|
||||
| **记忆** | `Memory` | SQLite 混合搜索, PostgreSQL 后端, Lucid 桥接, Markdown 文件, 显式 `none` 后端, 快照/恢复, 可选响应缓存 | 任意持久化后端 |
|
||||
| **工具** | `Tool` | shell/file/memory, cron/schedule, git, pushover, browser, http_request, screenshot/image_info, composio (opt-in), delegate, 硬件工具 | 任意能力 |
|
||||
| **可观测性** | `Observer` | Noop, Log, Multi | Prometheus, OTel |
|
||||
| **运行时** | `RuntimeAdapter` | Native, Docker(沙箱) | 通过 adapter 添加;不支持的类型会快速失败 |
|
||||
| **安全** | `SecurityPolicy` | Gateway 配对, 沙箱, allowlist, 速率限制, 文件系统作用域, 加密密钥 | — |
|
||||
| **身份** | `IdentityConfig` | OpenClaw (markdown), AIEOS v1.1 (JSON) | 任意身份格式 |
|
||||
| **隧道** | `Tunnel` | None, Cloudflare, Tailscale, ngrok, Custom | 任意隧道工具 |
|
||||
| **心跳** | Engine | HEARTBEAT.md 定期任务 | — |
|
||||
| **技能** | Loader | TOML 清单 + SKILL.md 指令 | 社区技能包 |
|
||||
| **集成** | Registry | 9 个分类下 70+ 集成 | 插件系统 |
|
||||
|
||||
### 运行时支持(当前)
|
||||
|
||||
- ✅ 当前支持:`runtime.kind = "native"` 或 `runtime.kind = "docker"`
|
||||
- 🚧 计划中,尚未实现:WASM / 边缘运行时
|
||||
|
||||
配置了不支持的 `runtime.kind` 时,ZeroClaw 会以明确的错误退出,而非静默回退到 native。
|
||||
|
||||
### 记忆系统(全栈搜索引擎)
|
||||
|
||||
全部自研,零外部依赖 — 无需 Pinecone、Elasticsearch、LangChain:
|
||||
|
||||
| 层级 | 实现 |
|
||||
|------|------|
|
||||
| **向量数据库** | Embeddings 以 BLOB 存储于 SQLite,余弦相似度搜索 |
|
||||
| **关键词搜索** | FTS5 虚拟表,BM25 评分 |
|
||||
| **混合合并** | 自定义加权合并函数(`vector.rs`) |
|
||||
| **Embeddings** | `EmbeddingProvider` trait — OpenAI、自定义 URL 或 noop |
|
||||
| **分块** | 基于行的 Markdown 分块器,保留标题结构 |
|
||||
| **缓存** | SQLite `embedding_cache` 表,LRU 淘汰策略 |
|
||||
| **安全重索引** | 原子化重建 FTS5 + 重新嵌入缺失向量 |
|
||||
|
||||
Agent 通过工具自动进行记忆的回忆、保存和管理。
|
||||
|
||||
```toml
|
||||
[memory]
|
||||
backend = "sqlite" # "sqlite", "lucid", "postgres", "markdown", "none"
|
||||
auto_save = true
|
||||
embedding_provider = "none" # "none", "openai", "custom:https://..."
|
||||
vector_weight = 0.7
|
||||
keyword_weight = 0.3
|
||||
```
|
||||
|
||||
## 安全默认行为(关键)
|
||||
|
||||
- Gateway 默认绑定:`127.0.0.1:42617`
|
||||
- Gateway 默认要求配对:`require_pairing = true`
|
||||
- 默认拒绝公网绑定:`allow_public_bind = false`
|
||||
- Channel allowlist 语义:
|
||||
- 空列表 `[]` => deny-by-default
|
||||
- `"*"` => allow all(仅在明确知道风险时使用)
|
||||
|
||||
## 常用配置片段
|
||||
|
||||
```toml
|
||||
api_key = "sk-..."
|
||||
default_provider = "openrouter"
|
||||
default_model = "anthropic/claude-sonnet-4-6"
|
||||
default_temperature = 0.7
|
||||
|
||||
[memory]
|
||||
backend = "sqlite" # sqlite | lucid | markdown | none
|
||||
auto_save = true
|
||||
embedding_provider = "none" # none | openai | custom:https://...
|
||||
|
||||
[gateway]
|
||||
host = "127.0.0.1"
|
||||
port = 42617
|
||||
require_pairing = true
|
||||
allow_public_bind = false
|
||||
```
|
||||
|
||||
## 文档导航(推荐从这里开始)
|
||||
|
||||
- 文档总览(英文):[`docs/README.md`](docs/README.md)
|
||||
- 统一目录(TOC):[`docs/SUMMARY.md`](docs/SUMMARY.md)
|
||||
- 文档总览(简体中文):[`docs/README.zh-CN.md`](docs/README.zh-CN.md)
|
||||
- 命令参考:[`docs/commands-reference.md`](docs/commands-reference.md)
|
||||
- 配置参考:[`docs/config-reference.md`](docs/config-reference.md)
|
||||
- Provider 参考:[`docs/providers-reference.md`](docs/providers-reference.md)
|
||||
- Channel 参考:[`docs/channels-reference.md`](docs/channels-reference.md)
|
||||
- 运维手册:[`docs/operations-runbook.md`](docs/operations-runbook.md)
|
||||
- 故障排查:[`docs/troubleshooting.md`](docs/troubleshooting.md)
|
||||
- 文档清单与分类:[`docs/docs-inventory.md`](docs/docs-inventory.md)
|
||||
- 项目 triage 快照(2026-02-18):[`docs/project-triage-snapshot-2026-02-18.md`](docs/project-triage-snapshot-2026-02-18.md)
|
||||
|
||||
## 贡献与许可证
|
||||
|
||||
- 贡献指南:[`CONTRIBUTING.md`](CONTRIBUTING.md)
|
||||
- PR 工作流:[`docs/pr-workflow.md`](docs/pr-workflow.md)
|
||||
- Reviewer 指南:[`docs/reviewer-playbook.md`](docs/reviewer-playbook.md)
|
||||
- 许可证:MIT 或 Apache 2.0(见 [`LICENSE-MIT`](LICENSE-MIT)、[`LICENSE-APACHE`](LICENSE-APACHE) 与 [`NOTICE`](NOTICE))
|
||||
|
||||
---
|
||||
|
||||
如果你需要完整实现细节(架构图、全部命令、完整 API、开发流程),请直接阅读英文主文档:[`README.md`](README.md)。
|
||||
@ -13,6 +13,8 @@
|
||||
cargo test telegram --lib
|
||||
```
|
||||
|
||||
Toolchain note: CI/release metadata is aligned with Rust `1.88`; use the same stable toolchain when reproducing release-facing checks locally.
|
||||
|
||||
## 📝 What Was Created For You
|
||||
|
||||
### 1. **test_telegram_integration.sh** (Main Test Suite)
|
||||
@ -298,6 +300,6 @@ If all tests pass:
|
||||
|
||||
## 📞 Support
|
||||
|
||||
- Issues: https://github.com/theonlyhennygod/zeroclaw/issues
|
||||
- Issues: https://github.com/zeroclaw-labs/zeroclaw/issues
|
||||
- Docs: `./TESTING_TELEGRAM.md`
|
||||
- Help: `zeroclaw --help`
|
||||
|
||||
227
SECURITY.md
227
SECURITY.md
@ -6,56 +6,194 @@
|
||||
| ------- | ------------------ |
|
||||
| 0.1.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
## Report a Vulnerability (Private)
|
||||
|
||||
**Please do NOT open a public GitHub issue for security vulnerabilities.**
|
||||
Please do not open public GitHub issues for unpatched security vulnerabilities.
|
||||
|
||||
Instead, please report them responsibly:
|
||||
ZeroClaw uses GitHub's private vulnerability reporting and advisory workflow for important security issues.
|
||||
|
||||
1. **Email**: Send details to the maintainers via GitHub private vulnerability reporting
|
||||
2. **GitHub**: Use [GitHub Security Advisories](https://github.com/theonlyhennygod/zeroclaw/security/advisories/new)
|
||||
Preferred reporting paths:
|
||||
|
||||
### What to Include
|
||||
1. If you are a researcher or user:
|
||||
- Go to `Security` -> `Report a vulnerability`.
|
||||
- Private reporting is enabled for this repository.
|
||||
- Use this report template:
|
||||
- English: [`docs/security/private-vulnerability-report-template.md`](docs/security/private-vulnerability-report-template.md)
|
||||
- 中文: [`docs/security/private-vulnerability-report-template.zh-CN.md`](docs/security/private-vulnerability-report-template.zh-CN.md)
|
||||
2. If you are a maintainer/admin opening a draft directly:
|
||||
- <https://github.com/zeroclaw-labs/zeroclaw/security/advisories/new>
|
||||
|
||||
- Description of the vulnerability
|
||||
- Steps to reproduce
|
||||
- Impact assessment
|
||||
- Suggested fix (if any)
|
||||
### What to Include in a Report
|
||||
|
||||
### Response Timeline
|
||||
- Vulnerability summary and security impact
|
||||
- Affected versions, commits, or deployment scope
|
||||
- Reproduction steps and prerequisites
|
||||
- Safe/minimized proof of concept
|
||||
- Suggested mitigation or patch direction (if known)
|
||||
- Any known workaround
|
||||
|
||||
- **Acknowledgment**: Within 48 hours
|
||||
- **Assessment**: Within 1 week
|
||||
- **Fix**: Within 2 weeks for critical issues
|
||||
## Official Channels and Anti-Fraud Notice
|
||||
|
||||
Impersonation scams are a real risk in open communities.
|
||||
|
||||
Security-critical rule:
|
||||
|
||||
- ZeroClaw maintainers will not ask for cryptocurrency, wallet seed phrases, or private financial credentials.
|
||||
- Treat direct-message payment requests as fraudulent unless independently verified in the repository.
|
||||
- Verify announcements using repository sources first.
|
||||
|
||||
Canonical statement and reporting guidance:
|
||||
|
||||
- [docs/security/official-channels-and-fraud-prevention.md](docs/security/official-channels-and-fraud-prevention.md)
|
||||
|
||||
## Maintainer Handling Workflow (GitHub-Native)
|
||||
|
||||
### 1. Intake and triage (private)
|
||||
|
||||
When a report arrives in `Security` -> `Advisories` with `Triage` status:
|
||||
|
||||
1. Confirm whether this is a security issue.
|
||||
2. Choose one path:
|
||||
- `Accept and open as draft` for likely/confirmed security issues.
|
||||
- `Start a temporary private fork` for embargoed fix collaboration.
|
||||
- Request more details in advisory comments.
|
||||
- Close only when confirmed non-security, with rationale.
|
||||
|
||||
Maintainers should run the lifecycle checklist:
|
||||
|
||||
- English: [`docs/security/advisory-maintainer-checklist.md`](docs/security/advisory-maintainer-checklist.md)
|
||||
- 中文: [`docs/security/advisory-maintainer-checklist.zh-CN.md`](docs/security/advisory-maintainer-checklist.zh-CN.md)
|
||||
- Advisory metadata template:
|
||||
- English: [`docs/security/advisory-metadata-template.md`](docs/security/advisory-metadata-template.md)
|
||||
- 中文: [`docs/security/advisory-metadata-template.zh-CN.md`](docs/security/advisory-metadata-template.zh-CN.md)
|
||||
|
||||
### 2. Private fix development and verification
|
||||
|
||||
Develop embargoed fixes in the advisory temporary private fork.
|
||||
|
||||
Important constraints in temporary private forks:
|
||||
|
||||
- Status checks do not run there.
|
||||
- Branch protection rules are not enforced there.
|
||||
- You cannot merge individual PRs one by one there.
|
||||
|
||||
Required verification before disclosure:
|
||||
|
||||
- Reproduce the vulnerability and verify the fix.
|
||||
- Run full local validation:
|
||||
- `cargo test --workspace --all-targets`
|
||||
- Run targeted security regressions:
|
||||
- `cargo test -- security`
|
||||
- `cargo test -- tools::shell`
|
||||
- `cargo test -- tools::file_read`
|
||||
- `cargo test -- tools::file_write`
|
||||
- Ensure no exploit details or secrets leak into public channels.
|
||||
|
||||
### 3. Publish advisory with actionable remediation
|
||||
|
||||
Before publishing a repository security advisory:
|
||||
|
||||
- Fill affected version ranges precisely.
|
||||
- Provide fixed version(s) whenever possible.
|
||||
- Include mitigations when no fixed release is available yet.
|
||||
|
||||
Then publish the advisory to disclose publicly and enable downstream remediation workflows.
|
||||
|
||||
### 4. CVE and post-disclosure maintenance
|
||||
|
||||
- Request a CVE from GitHub when appropriate, or attach existing CVE IDs.
|
||||
- Update affected/fixed version ranges if scope changes.
|
||||
- Backport fixes where needed and keep advisory metadata aligned.
|
||||
|
||||
## Internal Rule for Critical Security Issues
|
||||
|
||||
For high-severity security issues (for example sandbox escape, auth bypass, data exfiltration, or RCE):
|
||||
|
||||
- Do not use public issues as primary tracking before remediation.
|
||||
- Do not publish exploit details in public PRs before advisory publication.
|
||||
- Use GitHub Security Advisory workflow first, then coordinate release/disclosure.
|
||||
|
||||
## Response Timeline Targets
|
||||
|
||||
- Acknowledgment: within 48 hours
|
||||
- Initial triage: within 7 days
|
||||
- Critical fix target: within 14 days (or publish mitigation plan)
|
||||
|
||||
## Severity Levels and SLA Matrix
|
||||
|
||||
These SLAs are target windows for private security handling and may be adjusted based on complexity and dependency constraints.
|
||||
|
||||
| Severity | Typical impact examples | Acknowledgment target | Triage target | Initial mitigation target | Fix release target |
|
||||
| ------- | ----------------------- | --------------------- | ------------- | ------------------------- | ------------------ |
|
||||
| S0 Critical | Active exploitation, unauthenticated RCE, broad data exfiltration | 24 hours | 72 hours | 72 hours | 7 days |
|
||||
| S1 High | Auth bypass, privilege escalation, significant data exposure | 24 hours | 5 days | 7 days | 14 days |
|
||||
| S2 Medium | Constrained exploit path, partial data/control impact | 48 hours | 7 days | 14 days | 30 days |
|
||||
| S3 Low | Limited impact, hard-to-exploit, defense-in-depth gaps | 72 hours | 14 days | As needed | Next planned release |
|
||||
|
||||
SLA guidance notes:
|
||||
|
||||
- Severity is assigned during private triage and can be revised with new evidence.
|
||||
- If active exploitation is observed, prioritize mitigation and containment over full feature work.
|
||||
- When a fixed release is delayed, publish mitigations/workarounds in advisory notes first.
|
||||
|
||||
## Severity Assignment Guide
|
||||
|
||||
Use the S0-S3 matrix as operational severity. CVSS is an input, not the only decision factor.
|
||||
|
||||
| Severity | Typical CVSS range | Assignment guidance |
|
||||
| ------- | ------------------ | ------------------- |
|
||||
| S0 Critical | 9.0-10.0 | Active exploitation or near-term exploitability with severe impact (for example pre-auth RCE or broad data exfiltration). |
|
||||
| S1 High | 7.0-8.9 | High-impact security boundary break with practical exploit path. |
|
||||
| S2 Medium | 4.0-6.9 | Meaningful but constrained impact due to required conditions or lower blast radius. |
|
||||
| S3 Low | 0.1-3.9 | Limited impact or defense-in-depth gap with hard-to-exploit conditions. |
|
||||
|
||||
Severity override rules:
|
||||
|
||||
- Escalate one level when reliable evidence of active exploitation exists.
|
||||
- Escalate one level when affected surface includes default configurations used by most deployments.
|
||||
- De-escalate one level only with documented exploit constraints and validated compensating controls.
|
||||
|
||||
## Public Communication and Commit Hygiene (Pre-Disclosure)
|
||||
|
||||
Before advisory publication:
|
||||
|
||||
- Keep exploit-specific details in private advisory threads only.
|
||||
- Avoid explicit vulnerability naming in public branch names and PR titles.
|
||||
- Keep public commit messages neutral and fix-oriented (avoid step-by-step exploit instructions).
|
||||
- Do not include secrets or sensitive payloads in logs, snippets, or screenshots.
|
||||
|
||||
## Security Architecture
|
||||
|
||||
ZeroClaw implements defense-in-depth security:
|
||||
ZeroClaw uses defense-in-depth controls.
|
||||
|
||||
### Autonomy Levels
|
||||
- **ReadOnly** — Agent can only read, no shell or write access
|
||||
- **Supervised** — Agent can act within allowlists (default)
|
||||
- **Full** — Agent has full access within workspace sandbox
|
||||
|
||||
- `ReadOnly`: read access only, no shell/file write
|
||||
- `Supervised`: policy-constrained actions (default)
|
||||
- `Full`: broader autonomy within workspace sandbox constraints
|
||||
|
||||
### Sandboxing Layers
|
||||
1. **Workspace isolation** — All file operations confined to workspace directory
|
||||
2. **Path traversal blocking** — `..` sequences and absolute paths rejected
|
||||
3. **Command allowlisting** — Only explicitly approved commands can execute
|
||||
4. **Forbidden path list** — Critical system paths (`/etc`, `/root`, `~/.ssh`) always blocked
|
||||
5. **Rate limiting** — Max actions per hour and cost per day caps
|
||||
|
||||
### What We Protect Against
|
||||
- Path traversal attacks (`../../../etc/passwd`)
|
||||
- Command injection (`rm -rf /`, `curl | sh`)
|
||||
- Workspace escape via symlinks or absolute paths
|
||||
- Runaway cost from LLM API calls
|
||||
- Unauthorized shell command execution
|
||||
1. Workspace isolation for file operations
|
||||
2. Path traversal blocking for unsafe path patterns
|
||||
3. Command allowlisting for shell execution
|
||||
4. Forbidden path controls for critical system locations
|
||||
5. Runtime safeguards for rate/cost/safety limits
|
||||
|
||||
### Threats Addressed
|
||||
|
||||
- Path traversal (for example `../../../etc/passwd`)
|
||||
- Command injection (for example `curl | sh`)
|
||||
- Workspace escape via symlink/absolute path abuse
|
||||
- Unauthorized shell execution
|
||||
- Runaway tool/model usage
|
||||
|
||||
## Security Testing
|
||||
|
||||
All security mechanisms are covered by automated tests (129 tests):
|
||||
Core security mechanisms are validated with automated tests:
|
||||
|
||||
```bash
|
||||
cargo test --workspace --all-targets
|
||||
cargo test -- security
|
||||
cargo test -- tools::shell
|
||||
cargo test -- tools::file_read
|
||||
@ -64,14 +202,13 @@ cargo test -- tools::file_write
|
||||
|
||||
## Container Security
|
||||
|
||||
ZeroClaw Docker images follow CIS Docker Benchmark best practices:
|
||||
ZeroClaw images follow CIS Docker Benchmark-oriented hardening.
|
||||
|
||||
| Control | Implementation |
|
||||
|---------|----------------|
|
||||
| **4.1 Non-root user** | Container runs as UID 65534 (distroless nonroot) |
|
||||
| **4.2 Minimal base image** | `gcr.io/distroless/cc-debian12:nonroot` — no shell, no package manager |
|
||||
| **4.6 HEALTHCHECK** | Not applicable (stateless CLI/gateway) |
|
||||
| **5.25 Read-only filesystem** | Supported via `docker run --read-only` with `/workspace` volume |
|
||||
| ------- | -------------- |
|
||||
| 4.1 Non-root user | Container runs as UID 65534 (distroless nonroot) |
|
||||
| 4.2 Minimal base image | `gcr.io/distroless/cc-debian12:nonroot` |
|
||||
| 5.25 Read-only filesystem | Supported via `docker run --read-only` with `/workspace` volume |
|
||||
|
||||
### Verifying Container Security
|
||||
|
||||
@ -87,7 +224,19 @@ docker run --read-only -v /path/to/workspace:/workspace zeroclaw gateway
|
||||
|
||||
### CI Enforcement
|
||||
|
||||
The `docker` job in `.github/workflows/ci.yml` automatically verifies:
|
||||
The `docker` job in `.github/workflows/ci.yml` verifies:
|
||||
|
||||
1. Container does not run as root (UID 0)
|
||||
2. Runtime stage uses `:nonroot` variant
|
||||
3. Explicit `USER` directive with numeric UID exists
|
||||
2. Runtime stage uses `:nonroot` base
|
||||
3. `USER` directive with numeric UID exists
|
||||
|
||||
## References
|
||||
|
||||
- How-tos for fixing vulnerabilities:
|
||||
- <https://docs.github.com/en/enterprise-cloud@latest/code-security/how-tos/report-and-fix-vulnerabilities/fix-reported-vulnerabilities>
|
||||
- Managing privately reported vulnerabilities:
|
||||
- <https://docs.github.com/en/enterprise-cloud@latest/code-security/how-tos/report-and-fix-vulnerabilities/fix-reported-vulnerabilities/managing-privately-reported-security-vulnerabilities>
|
||||
- Collaborating in temporary private forks:
|
||||
- <https://docs.github.com/en/enterprise-cloud@latest/code-security/tutorials/fix-reported-vulnerabilities/collaborate-in-a-fork>
|
||||
- Publishing repository advisories:
|
||||
- <https://docs.github.com/en/enterprise-cloud@latest/code-security/how-tos/report-and-fix-vulnerabilities/fix-reported-vulnerabilities/publishing-a-repository-security-advisory>
|
||||
|
||||
@ -115,6 +115,9 @@ After running automated tests, perform these manual checks:
|
||||
- Send message with @botname mention
|
||||
- Verify: Bot responds and mention is stripped
|
||||
- DM/private chat should always work regardless of mention_only
|
||||
- Regression check (group non-text): verify group media without mention does not trigger bot reply
|
||||
- Regression command:
|
||||
`cargo test -q telegram_mention_only_group_photo_without_caption_is_ignored`
|
||||
|
||||
6. **Error logging**
|
||||
|
||||
@ -297,7 +300,7 @@ on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: [self-hosted, aws-india]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
@ -349,4 +352,4 @@ zeroclaw channel doctor
|
||||
- [Telegram Bot API Documentation](https://core.telegram.org/bots/api)
|
||||
- [ZeroClaw Main README](README.md)
|
||||
- [Contributing Guide](CONTRIBUTING.md)
|
||||
- [Issue Tracker](https://github.com/theonlyhennygod/zeroclaw/issues)
|
||||
- [Issue Tracker](https://github.com/zeroclaw-labs/zeroclaw/issues)
|
||||
|
||||
@ -41,6 +41,9 @@ impl BenchProvider {
|
||||
tool_calls: vec![],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
}]),
|
||||
}
|
||||
}
|
||||
@ -57,12 +60,18 @@ impl BenchProvider {
|
||||
}],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
},
|
||||
ChatResponse {
|
||||
text: Some("done".into()),
|
||||
tool_calls: vec![],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
},
|
||||
]),
|
||||
}
|
||||
@ -94,6 +103,9 @@ impl Provider for BenchProvider {
|
||||
tool_calls: vec![],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
});
|
||||
}
|
||||
Ok(guard.remove(0))
|
||||
@ -161,6 +173,9 @@ Let me know if you need more."#
|
||||
tool_calls: vec![],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
};
|
||||
|
||||
let multi_tool = ChatResponse {
|
||||
@ -179,6 +194,9 @@ Let me know if you need more."#
|
||||
tool_calls: vec![],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
};
|
||||
|
||||
c.bench_function("xml_parse_single_tool_call", |b| {
|
||||
@ -213,6 +231,9 @@ fn bench_native_parsing(c: &mut Criterion) {
|
||||
],
|
||||
usage: None,
|
||||
reasoning_content: None,
|
||||
quota_metadata: None,
|
||||
stop_reason: None,
|
||||
raw_stop_reason: None,
|
||||
};
|
||||
|
||||
c.bench_function("native_parse_tool_calls", |b| {
|
||||
|
||||
80
build.rs
Normal file
80
build.rs
Normal file
@ -0,0 +1,80 @@
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
fn git_short_sha(manifest_dir: &str) -> Option<String> {
|
||||
let output = Command::new("git")
|
||||
.args(["rev-parse", "--short", "HEAD"])
|
||||
.current_dir(manifest_dir)
|
||||
.output()
|
||||
.ok()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let short_sha = String::from_utf8(output.stdout).ok()?;
|
||||
let trimmed = short_sha.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_git_rerun_hints(manifest_dir: &str) {
|
||||
let output = Command::new("git")
|
||||
.args(["rev-parse", "--git-dir"])
|
||||
.current_dir(manifest_dir)
|
||||
.output();
|
||||
|
||||
let Ok(output) = output else {
|
||||
return;
|
||||
};
|
||||
if !output.status.success() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Ok(git_dir_raw) = String::from_utf8(output.stdout) else {
|
||||
return;
|
||||
};
|
||||
let git_dir_raw = git_dir_raw.trim();
|
||||
if git_dir_raw.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let git_dir = if PathBuf::from(git_dir_raw).is_absolute() {
|
||||
PathBuf::from(git_dir_raw)
|
||||
} else {
|
||||
PathBuf::from(manifest_dir).join(git_dir_raw)
|
||||
};
|
||||
|
||||
println!("cargo:rerun-if-changed={}", git_dir.join("HEAD").display());
|
||||
println!("cargo:rerun-if-changed={}", git_dir.join("refs").display());
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
println!("cargo:rerun-if-env-changed=ZEROCLAW_GIT_SHORT_SHA");
|
||||
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string());
|
||||
emit_git_rerun_hints(&manifest_dir);
|
||||
|
||||
let package_version = env::var("CARGO_PKG_VERSION").unwrap_or_else(|_| "0.0.0".to_string());
|
||||
let short_sha = env::var("ZEROCLAW_GIT_SHORT_SHA")
|
||||
.ok()
|
||||
.filter(|v| !v.trim().is_empty())
|
||||
.or_else(|| git_short_sha(&manifest_dir));
|
||||
|
||||
let build_version = if let Some(sha) = short_sha.as_deref() {
|
||||
format!("{package_version} ({sha})")
|
||||
} else {
|
||||
package_version
|
||||
};
|
||||
|
||||
println!("cargo:rustc-env=ZEROCLAW_BUILD_VERSION={build_version}");
|
||||
println!(
|
||||
"cargo:rustc-env=ZEROCLAW_GIT_SHORT_SHA={}",
|
||||
short_sha.unwrap_or_default()
|
||||
);
|
||||
}
|
||||
19
build_and_run.sh
Normal file
19
build_and_run.sh
Normal file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Build ZeroClaw in release mode
|
||||
echo "Building ZeroClaw in release mode..."
|
||||
cargo build --release
|
||||
|
||||
# Check if build was successful
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Build successful!"
|
||||
echo "To start the web dashboard, run:"
|
||||
echo "./target/release/zeroclaw gateway"
|
||||
echo ""
|
||||
echo "The dashboard will typically be available at http://127.0.0.1:3000/"
|
||||
echo "You can also specify a custom port with -p, e.g.:"
|
||||
echo "./target/release/zeroclaw gateway -p 8080"
|
||||
else
|
||||
echo "Build failed!"
|
||||
exit 1
|
||||
fi
|
||||
8
build_release.sh
Normal file
8
build_release.sh
Normal file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
# Build ZeroClaw in release mode
|
||||
set -e
|
||||
echo "Building ZeroClaw in release mode..."
|
||||
cd /Users/argenisdelarosa/Downloads/zeroclaw
|
||||
cargo build --release
|
||||
echo "Build completed successfully!"
|
||||
echo "Binary location: target/release/zeroclaw"
|
||||
43
clients/android-bridge/Cargo.toml
Normal file
43
clients/android-bridge/Cargo.toml
Normal file
@ -0,0 +1,43 @@
|
||||
[package]
|
||||
name = "zeroclaw-android-bridge"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Android JNI bridge for ZeroClaw"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
name = "zeroclaw_android"
|
||||
|
||||
[dependencies]
|
||||
# Note: zeroclaw dep commented out until we integrate properly
|
||||
# zeroclaw = { path = "../.." }
|
||||
uniffi = { version = "0.27" }
|
||||
# Minimal tokio - only what we need
|
||||
tokio = { version = "1", default-features = false, features = ["rt", "rt-multi-thread", "sync"] }
|
||||
anyhow = "1"
|
||||
serde = { version = "1", default-features = false, features = ["derive"] }
|
||||
serde_json = "1"
|
||||
# Minimal tracing for mobile
|
||||
tracing = { version = "0.1", default-features = false }
|
||||
tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "env-filter"] }
|
||||
|
||||
[[bin]]
|
||||
name = "uniffi-bindgen"
|
||||
path = "uniffi-bindgen.rs"
|
||||
|
||||
# ============================================
|
||||
# BINARY SIZE OPTIMIZATION
|
||||
# ============================================
|
||||
# Target: <3MB native library per ABI
|
||||
|
||||
[profile.release]
|
||||
# Optimize for size over speed
|
||||
opt-level = "z" # Smallest binary (was "3" for speed)
|
||||
lto = true # Link-time optimization - removes dead code
|
||||
codegen-units = 1 # Better optimization, slower compile
|
||||
panic = "abort" # No unwinding = smaller binary
|
||||
strip = true # Strip symbols
|
||||
|
||||
[profile.release.package."*"]
|
||||
opt-level = "z" # Apply to all dependencies too
|
||||
305
clients/android-bridge/src/lib.rs
Normal file
305
clients/android-bridge/src/lib.rs
Normal file
@ -0,0 +1,305 @@
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! ZeroClaw Android Bridge
|
||||
//!
|
||||
//! This crate provides UniFFI bindings for ZeroClaw to be used from Kotlin/Android.
|
||||
//! It exposes a simplified API for:
|
||||
//! - Starting/stopping the gateway
|
||||
//! - Sending messages to the agent
|
||||
//! - Receiving responses
|
||||
//! - Managing configuration
|
||||
|
||||
use std::sync::{Arc, Mutex, OnceLock};
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
uniffi::setup_scaffolding!();
|
||||
|
||||
/// Global runtime for async operations
|
||||
static RUNTIME: OnceLock<Runtime> = OnceLock::new();
|
||||
|
||||
fn runtime() -> &'static Runtime {
|
||||
RUNTIME.get_or_init(|| {
|
||||
tokio::runtime::Builder::new_multi_thread()
|
||||
.worker_threads(2)
|
||||
.enable_all()
|
||||
.build()
|
||||
.expect("Failed to create Tokio runtime")
|
||||
})
|
||||
}
|
||||
|
||||
/// Agent status enum exposed to Kotlin
|
||||
#[derive(Debug, Clone, uniffi::Enum)]
|
||||
pub enum AgentStatus {
|
||||
Stopped,
|
||||
Starting,
|
||||
Running,
|
||||
Thinking,
|
||||
Error { message: String },
|
||||
}
|
||||
|
||||
/// Configuration for the ZeroClaw agent
|
||||
#[derive(Debug, Clone, uniffi::Record)]
|
||||
pub struct ZeroClawConfig {
|
||||
pub data_dir: String,
|
||||
pub provider: String,
|
||||
pub model: String,
|
||||
pub api_key: String,
|
||||
pub system_prompt: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for ZeroClawConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
data_dir: String::new(),
|
||||
provider: "anthropic".to_string(),
|
||||
model: "claude-sonnet-4-5".to_string(),
|
||||
api_key: String::new(),
|
||||
system_prompt: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A message in the conversation
|
||||
#[derive(Debug, Clone, uniffi::Record)]
|
||||
pub struct ChatMessage {
|
||||
pub id: String,
|
||||
pub content: String,
|
||||
pub role: String, // "user" | "assistant" | "system"
|
||||
pub timestamp_ms: i64,
|
||||
}
|
||||
|
||||
/// Response from sending a message
|
||||
#[derive(Debug, Clone, uniffi::Record)]
|
||||
pub struct SendResult {
|
||||
pub success: bool,
|
||||
pub message_id: Option<String>,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Main ZeroClaw controller exposed to Android
|
||||
#[derive(uniffi::Object)]
|
||||
pub struct ZeroClawController {
|
||||
config: Mutex<ZeroClawConfig>,
|
||||
status: Mutex<AgentStatus>,
|
||||
messages: Mutex<Vec<ChatMessage>>,
|
||||
// TODO: Add actual gateway handle
|
||||
// gateway: Mutex<Option<GatewayHandle>>,
|
||||
}
|
||||
|
||||
#[uniffi::export]
|
||||
impl ZeroClawController {
|
||||
/// Create a new controller with the given config
|
||||
#[uniffi::constructor]
|
||||
pub fn new(config: ZeroClawConfig) -> Arc<Self> {
|
||||
// Initialize logging
|
||||
let _ = tracing_subscriber::fmt()
|
||||
.with_env_filter("zeroclaw=info")
|
||||
.try_init();
|
||||
|
||||
Arc::new(Self {
|
||||
config: Mutex::new(config),
|
||||
status: Mutex::new(AgentStatus::Stopped),
|
||||
messages: Mutex::new(Vec::new()),
|
||||
})
|
||||
}
|
||||
|
||||
/// Create with default config
|
||||
#[uniffi::constructor]
|
||||
pub fn with_defaults(data_dir: String) -> Arc<Self> {
|
||||
let mut config = ZeroClawConfig::default();
|
||||
config.data_dir = data_dir;
|
||||
Self::new(config)
|
||||
}
|
||||
|
||||
/// Start the ZeroClaw gateway
|
||||
pub fn start(&self) -> Result<(), ZeroClawError> {
|
||||
let mut status = self.status.lock().map_err(|_| ZeroClawError::LockError)?;
|
||||
|
||||
if matches!(*status, AgentStatus::Running | AgentStatus::Starting) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
*status = AgentStatus::Starting;
|
||||
drop(status);
|
||||
|
||||
// TODO: Actually start the gateway
|
||||
// runtime().spawn(async move {
|
||||
// let config = zeroclaw::Config::load()?;
|
||||
// let gateway = zeroclaw::Gateway::new(config).await?;
|
||||
// gateway.run().await
|
||||
// });
|
||||
|
||||
// For now, simulate successful start
|
||||
let mut status = self.status.lock().map_err(|_| ZeroClawError::LockError)?;
|
||||
*status = AgentStatus::Running;
|
||||
|
||||
tracing::info!("ZeroClaw gateway started");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop the gateway
|
||||
pub fn stop(&self) -> Result<(), ZeroClawError> {
|
||||
let mut status = self.status.lock().map_err(|_| ZeroClawError::LockError)?;
|
||||
|
||||
// TODO: Actually stop the gateway
|
||||
// if let Some(gateway) = self.gateway.lock()?.take() {
|
||||
// gateway.shutdown();
|
||||
// }
|
||||
|
||||
*status = AgentStatus::Stopped;
|
||||
tracing::info!("ZeroClaw gateway stopped");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get current agent status
|
||||
pub fn get_status(&self) -> AgentStatus {
|
||||
self.status
|
||||
.lock()
|
||||
.map(|s| s.clone())
|
||||
.unwrap_or(AgentStatus::Error {
|
||||
message: "Failed to get status".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Send a message to the agent
|
||||
pub fn send_message(&self, content: String) -> SendResult {
|
||||
let msg_id = uuid_v4();
|
||||
|
||||
// Add user message
|
||||
if let Ok(mut messages) = self.messages.lock() {
|
||||
messages.push(ChatMessage {
|
||||
id: msg_id.clone(),
|
||||
content: content.clone(),
|
||||
role: "user".to_string(),
|
||||
timestamp_ms: current_timestamp_ms(),
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: Actually send to gateway and get response
|
||||
// For now, echo back
|
||||
if let Ok(mut messages) = self.messages.lock() {
|
||||
messages.push(ChatMessage {
|
||||
id: uuid_v4(),
|
||||
content: format!("Echo: {}", content),
|
||||
role: "assistant".to_string(),
|
||||
timestamp_ms: current_timestamp_ms(),
|
||||
});
|
||||
}
|
||||
|
||||
SendResult {
|
||||
success: true,
|
||||
message_id: Some(msg_id),
|
||||
error: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get conversation history
|
||||
pub fn get_messages(&self) -> Vec<ChatMessage> {
|
||||
self.messages
|
||||
.lock()
|
||||
.map(|m| m.clone())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Clear conversation history
|
||||
pub fn clear_messages(&self) {
|
||||
if let Ok(mut messages) = self.messages.lock() {
|
||||
messages.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
pub fn update_config(&self, config: ZeroClawConfig) -> Result<(), ZeroClawError> {
|
||||
let mut current = self.config.lock().map_err(|_| ZeroClawError::LockError)?;
|
||||
*current = config;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get current configuration
|
||||
pub fn get_config(&self) -> Result<ZeroClawConfig, ZeroClawError> {
|
||||
self.config
|
||||
.lock()
|
||||
.map(|c| c.clone())
|
||||
.map_err(|_| ZeroClawError::LockError)
|
||||
}
|
||||
|
||||
/// Check if API key is configured
|
||||
pub fn is_configured(&self) -> bool {
|
||||
self.config
|
||||
.lock()
|
||||
.map(|c| !c.api_key.is_empty())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur in the bridge
|
||||
#[derive(Debug, Clone, uniffi::Error)]
|
||||
pub enum ZeroClawError {
|
||||
NotInitialized,
|
||||
AlreadyRunning,
|
||||
ConfigError { message: String },
|
||||
GatewayError { message: String },
|
||||
LockError,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ZeroClawError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::NotInitialized => write!(f, "ZeroClaw not initialized"),
|
||||
Self::AlreadyRunning => write!(f, "Gateway already running"),
|
||||
Self::ConfigError { message } => write!(f, "Config error: {}", message),
|
||||
Self::GatewayError { message } => write!(f, "Gateway error: {}", message),
|
||||
Self::LockError => write!(f, "Failed to acquire lock"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ZeroClawError {}
|
||||
|
||||
// Helper functions
|
||||
fn uuid_v4() -> String {
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_nanos();
|
||||
format!("{:x}", now)
|
||||
}
|
||||
|
||||
fn current_timestamp_ms() -> i64 {
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.map(|d| d.as_millis() as i64)
|
||||
.unwrap_or(0)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_controller_creation() {
|
||||
let controller = ZeroClawController::with_defaults("/tmp/zeroclaw".to_string());
|
||||
assert!(matches!(controller.get_status(), AgentStatus::Stopped));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_start_stop() {
|
||||
let controller = ZeroClawController::with_defaults("/tmp/zeroclaw".to_string());
|
||||
controller.start().unwrap();
|
||||
assert!(matches!(controller.get_status(), AgentStatus::Running));
|
||||
controller.stop().unwrap();
|
||||
assert!(matches!(controller.get_status(), AgentStatus::Stopped));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_send_message() {
|
||||
let controller = ZeroClawController::with_defaults("/tmp/zeroclaw".to_string());
|
||||
let result = controller.send_message("Hello".to_string());
|
||||
assert!(result.success);
|
||||
|
||||
let messages = controller.get_messages();
|
||||
assert_eq!(messages.len(), 2); // User + assistant
|
||||
}
|
||||
}
|
||||
5
clients/android-bridge/uniffi-bindgen.rs
Normal file
5
clients/android-bridge/uniffi-bindgen.rs
Normal file
@ -0,0 +1,5 @@
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
fn main() {
|
||||
uniffi::uniffi_bindgen_main()
|
||||
}
|
||||
108
clients/android/README.md
Normal file
108
clients/android/README.md
Normal file
@ -0,0 +1,108 @@
|
||||
# ZeroClaw Android Client 🦀📱
|
||||
|
||||
Native Android client for ZeroClaw - run your autonomous AI assistant on Android.
|
||||
|
||||
## Features
|
||||
|
||||
- 🚀 **Native Performance** - Kotlin/Jetpack Compose, not a webview
|
||||
- 🔋 **Battery Efficient** - WorkManager, Doze-aware, minimal wake locks
|
||||
- 🔐 **Security First** - Android Keystore for secrets, sandboxed execution
|
||||
- 🦀 **ZeroClaw Core** - Full Rust binary via UniFFI/JNI
|
||||
- 🎨 **Material You** - Dynamic theming, modern Android UX
|
||||
|
||||
## Requirements
|
||||
|
||||
- Android 8.0+ (API 26+)
|
||||
- ~50MB storage
|
||||
- ARM64 (arm64-v8a) or ARMv7 (armeabi-v7a)
|
||||
|
||||
## Building
|
||||
|
||||
### Prerequisites
|
||||
|
||||
```bash
|
||||
# Install Rust Android targets
|
||||
rustup target add aarch64-linux-android armv7-linux-androideabi x86_64-linux-android
|
||||
|
||||
# Install cargo-ndk
|
||||
cargo install cargo-ndk
|
||||
|
||||
# Android SDK (via Android Studio or sdkman)
|
||||
# NDK r25+ required
|
||||
```
|
||||
|
||||
### Build APK
|
||||
|
||||
```bash
|
||||
cd clients/android
|
||||
./gradlew assembleDebug
|
||||
```
|
||||
|
||||
### Build with Rust
|
||||
|
||||
```bash
|
||||
# Build native library first
|
||||
cargo ndk -t arm64-v8a -o app/src/main/jniLibs build --release
|
||||
|
||||
# Then build APK
|
||||
./gradlew assembleRelease
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ UI (Jetpack Compose) │
|
||||
├─────────────────────────────────────┤
|
||||
│ Service Layer (Kotlin) │
|
||||
│ ├─ ZeroClawService │
|
||||
│ ├─ NotificationHandler │
|
||||
│ └─ WorkManager Jobs │
|
||||
├─────────────────────────────────────┤
|
||||
│ Bridge (UniFFI) │
|
||||
├─────────────────────────────────────┤
|
||||
│ Native (libzeroclaw.so) │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Status
|
||||
|
||||
✅ **Phase 1: Foundation** (Complete)
|
||||
- [x] Project setup (Kotlin/Compose/Gradle)
|
||||
- [x] Basic JNI bridge stub
|
||||
- [x] Foreground service
|
||||
- [x] Notification channels
|
||||
- [x] Boot receiver
|
||||
|
||||
✅ **Phase 2: Core Features** (Complete)
|
||||
- [x] UniFFI bridge crate
|
||||
- [x] Settings UI (provider/model/API key)
|
||||
- [x] Chat UI scaffold
|
||||
- [x] Theme system (Material 3)
|
||||
|
||||
✅ **Phase 3: Integration** (Complete)
|
||||
- [x] WorkManager for cron/heartbeat
|
||||
- [x] DataStore + encrypted preferences
|
||||
- [x] Quick Settings tile
|
||||
- [x] Share intent handling
|
||||
- [x] Battery optimization helpers
|
||||
- [x] CI workflow for Android builds
|
||||
|
||||
✅ **Phase 4: Polish** (Complete)
|
||||
- [x] Home screen widget
|
||||
- [x] Accessibility utilities (TalkBack support)
|
||||
- [x] One-liner install scripts (Termux, ADB)
|
||||
- [x] Web installer page
|
||||
|
||||
🚀 **Ready for Production**
|
||||
- [ ] Cargo NDK CI integration
|
||||
- [ ] F-Droid submission
|
||||
- [ ] Google Play submission
|
||||
|
||||
## Contributing
|
||||
|
||||
See the RFC in issue discussions for design decisions.
|
||||
|
||||
## License
|
||||
|
||||
Same as ZeroClaw (MIT/Apache-2.0)
|
||||
97
clients/android/SIZE.md
Normal file
97
clients/android/SIZE.md
Normal file
@ -0,0 +1,97 @@
|
||||
# ZeroClaw Android - Binary Size Optimization
|
||||
|
||||
## Target Sizes
|
||||
|
||||
| Component | Target | Notes |
|
||||
|-----------|--------|-------|
|
||||
| Native lib (per ABI) | <3MB | Rust, optimized for size |
|
||||
| APK (arm64-v8a) | <10MB | Single ABI, most users |
|
||||
| APK (universal) | <20MB | All ABIs, fallback |
|
||||
|
||||
## Optimization Strategy
|
||||
|
||||
### 1. Rust Native Library
|
||||
|
||||
```toml
|
||||
[profile.release]
|
||||
opt-level = "z" # Optimize for size
|
||||
lto = true # Link-time optimization
|
||||
codegen-units = 1 # Better optimization
|
||||
panic = "abort" # No unwinding overhead
|
||||
strip = true # Remove symbols
|
||||
```
|
||||
|
||||
**Expected savings:** ~40% reduction vs default release
|
||||
|
||||
### 2. Android APK
|
||||
|
||||
**Enabled:**
|
||||
- R8 minification (`isMinifyEnabled = true`)
|
||||
- Resource shrinking (`isShrinkResources = true`)
|
||||
- ABI splits (users download only their arch)
|
||||
- Aggressive ProGuard rules
|
||||
|
||||
**Removed:**
|
||||
- `material-icons-extended` (~5MB → 0MB)
|
||||
- `kotlinx-serialization` (~300KB, unused)
|
||||
- `ui-tooling-preview` (~100KB, debug only)
|
||||
- Debug symbols in release
|
||||
|
||||
### 3. Dependencies Audit
|
||||
|
||||
| Dependency | Size | Keep? |
|
||||
|------------|------|-------|
|
||||
| Compose BOM | ~3MB | ✅ Required |
|
||||
| Material3 | ~1MB | ✅ Required |
|
||||
| material-icons-extended | ~5MB | ❌ Removed |
|
||||
| Navigation | ~200KB | ✅ Required |
|
||||
| DataStore | ~100KB | ✅ Required |
|
||||
| WorkManager | ~300KB | ✅ Required |
|
||||
| Security-crypto | ~100KB | ✅ Required |
|
||||
| Coroutines | ~200KB | ✅ Required |
|
||||
| Serialization | ~300KB | ❌ Removed (unused) |
|
||||
|
||||
### 4. Split APKs
|
||||
|
||||
```kotlin
|
||||
splits {
|
||||
abi {
|
||||
isEnable = true
|
||||
include("arm64-v8a", "armeabi-v7a", "x86_64")
|
||||
isUniversalApk = true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Result:**
|
||||
- `app-arm64-v8a-release.apk` → ~10MB (90% of users)
|
||||
- `app-armeabi-v7a-release.apk` → ~9MB (older devices)
|
||||
- `app-x86_64-release.apk` → ~10MB (emulators)
|
||||
- `app-universal-release.apk` → ~18MB (fallback)
|
||||
|
||||
## Measuring Size
|
||||
|
||||
```bash
|
||||
# Build release APK
|
||||
./gradlew assembleRelease
|
||||
|
||||
# Check sizes
|
||||
ls -lh app/build/outputs/apk/release/
|
||||
|
||||
# Analyze APK contents
|
||||
$ANDROID_HOME/build-tools/34.0.0/apkanalyzer apk summary app-release.apk
|
||||
```
|
||||
|
||||
## Future Optimizations
|
||||
|
||||
1. **Baseline Profiles** - Pre-compile hot paths
|
||||
2. **R8 full mode** - More aggressive shrinking
|
||||
3. **Custom Compose compiler** - Smaller runtime
|
||||
4. **WebP images** - Smaller than PNG
|
||||
5. **Dynamic delivery** - On-demand features
|
||||
|
||||
## Philosophy
|
||||
|
||||
> "Zero overhead. Zero compromise."
|
||||
|
||||
Every KB matters. We ship what users need, nothing more.
|
||||
140
clients/android/app/build.gradle.kts
Normal file
140
clients/android/app/build.gradle.kts
Normal file
@ -0,0 +1,140 @@
|
||||
plugins {
|
||||
id("com.android.application")
|
||||
id("org.jetbrains.kotlin.android")
|
||||
}
|
||||
|
||||
android {
|
||||
namespace = "ai.zeroclaw.android"
|
||||
compileSdk = 34
|
||||
|
||||
defaultConfig {
|
||||
applicationId = "ai.zeroclaw.android"
|
||||
minSdk = 26
|
||||
targetSdk = 34
|
||||
versionCode = 1
|
||||
versionName = "0.1.0"
|
||||
|
||||
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
|
||||
|
||||
vectorDrawables {
|
||||
useSupportLibrary = true
|
||||
}
|
||||
|
||||
ndk {
|
||||
abiFilters += listOf("arm64-v8a", "armeabi-v7a", "x86_64")
|
||||
}
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
release {
|
||||
isMinifyEnabled = true
|
||||
isShrinkResources = true
|
||||
proguardFiles(
|
||||
getDefaultProguardFile("proguard-android-optimize.txt"),
|
||||
"proguard-rules.pro"
|
||||
)
|
||||
// Aggressive optimization
|
||||
ndk {
|
||||
debugSymbolLevel = "NONE"
|
||||
}
|
||||
}
|
||||
debug {
|
||||
isDebuggable = true
|
||||
applicationIdSuffix = ".debug"
|
||||
}
|
||||
}
|
||||
|
||||
// Split APKs by ABI - users only download what they need
|
||||
splits {
|
||||
abi {
|
||||
isEnable = true
|
||||
reset()
|
||||
include("arm64-v8a", "armeabi-v7a", "x86_64")
|
||||
isUniversalApk = true // Also build universal for fallback
|
||||
}
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility = JavaVersion.VERSION_17
|
||||
targetCompatibility = JavaVersion.VERSION_17
|
||||
}
|
||||
|
||||
kotlinOptions {
|
||||
jvmTarget = "17"
|
||||
}
|
||||
|
||||
buildFeatures {
|
||||
compose = true
|
||||
buildConfig = true
|
||||
}
|
||||
|
||||
composeOptions {
|
||||
kotlinCompilerExtensionVersion = "1.5.8"
|
||||
}
|
||||
|
||||
packaging {
|
||||
resources {
|
||||
excludes += "/META-INF/{AL2.0,LGPL2.1}"
|
||||
}
|
||||
}
|
||||
|
||||
// Task to build native library before APK
|
||||
tasks.register("buildRustLibrary") {
|
||||
doLast {
|
||||
exec {
|
||||
workingDir = rootProject.projectDir.parentFile.parentFile // zeroclaw root
|
||||
commandLine("cargo", "ndk",
|
||||
"-t", "arm64-v8a",
|
||||
"-t", "armeabi-v7a",
|
||||
"-t", "x86_64",
|
||||
"-o", "clients/android/app/src/main/jniLibs",
|
||||
"build", "--release", "-p", "zeroclaw-android-bridge")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
// Core Android
|
||||
implementation("androidx.core:core-ktx:1.12.0")
|
||||
implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.7.0")
|
||||
implementation("androidx.lifecycle:lifecycle-viewmodel-compose:2.7.0")
|
||||
implementation("androidx.activity:activity-compose:1.8.2")
|
||||
|
||||
// Compose - minimal set
|
||||
implementation(platform("androidx.compose:compose-bom:2024.02.00"))
|
||||
implementation("androidx.compose.ui:ui")
|
||||
implementation("androidx.compose.ui:ui-graphics")
|
||||
implementation("androidx.compose.material3:material3")
|
||||
// NOTE: Using material-icons-core (small) instead of extended (5MB+)
|
||||
// Add individual icons via drawable if needed
|
||||
|
||||
// Navigation
|
||||
implementation("androidx.navigation:navigation-compose:2.7.7")
|
||||
|
||||
// DataStore (preferences)
|
||||
implementation("androidx.datastore:datastore-preferences:1.0.0")
|
||||
|
||||
// WorkManager (background tasks)
|
||||
implementation("androidx.work:work-runtime-ktx:2.9.0")
|
||||
|
||||
// Security (Keystore)
|
||||
implementation("androidx.security:security-crypto:1.1.0-alpha06")
|
||||
|
||||
// Coroutines
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3")
|
||||
|
||||
// NOTE: Serialization removed - not used yet, saves ~300KB
|
||||
// Add back when needed: implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.6.2")
|
||||
|
||||
// Testing
|
||||
testImplementation("junit:junit:4.13.2")
|
||||
androidTestImplementation("androidx.test.ext:junit:1.1.5")
|
||||
androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1")
|
||||
androidTestImplementation(platform("androidx.compose:compose-bom:2024.02.00"))
|
||||
androidTestImplementation("androidx.compose.ui:ui-test-junit4")
|
||||
|
||||
// Debug
|
||||
debugImplementation("androidx.compose.ui:ui-tooling")
|
||||
debugImplementation("androidx.compose.ui:ui-test-manifest")
|
||||
}
|
||||
67
clients/android/app/proguard-rules.pro
vendored
Normal file
67
clients/android/app/proguard-rules.pro
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
# ZeroClaw Android ProGuard Rules
|
||||
# Goal: Smallest possible APK
|
||||
|
||||
# ============================================
|
||||
# KEEP NATIVE BRIDGE
|
||||
# ============================================
|
||||
-keep class ai.zeroclaw.android.bridge.** { *; }
|
||||
-keepclassmembers class ai.zeroclaw.android.bridge.** { *; }
|
||||
|
||||
# Keep JNI methods
|
||||
-keepclasseswithmembernames class * {
|
||||
native <methods>;
|
||||
}
|
||||
|
||||
# ============================================
|
||||
# KEEP DATA CLASSES
|
||||
# ============================================
|
||||
-keep class ai.zeroclaw.android.data.** { *; }
|
||||
-keepclassmembers class ai.zeroclaw.android.data.** { *; }
|
||||
|
||||
# ============================================
|
||||
# KOTLIN SERIALIZATION
|
||||
# ============================================
|
||||
-keepattributes *Annotation*, InnerClasses
|
||||
-dontnote kotlinx.serialization.AnnotationsKt
|
||||
-keepclassmembers class kotlinx.serialization.json.** {
|
||||
*** Companion;
|
||||
}
|
||||
-keepclasseswithmembers class kotlinx.serialization.json.** {
|
||||
kotlinx.serialization.KSerializer serializer(...);
|
||||
}
|
||||
|
||||
# ============================================
|
||||
# AGGRESSIVE OPTIMIZATIONS
|
||||
# ============================================
|
||||
|
||||
# Remove logging in release
|
||||
-assumenosideeffects class android.util.Log {
|
||||
public static int v(...);
|
||||
public static int d(...);
|
||||
public static int i(...);
|
||||
}
|
||||
|
||||
# KEEP Kotlin null checks - stripping them hides bugs and causes crashes
|
||||
# (Previously removed; CodeRabbit HIGH severity fix)
|
||||
# -assumenosideeffects class kotlin.jvm.internal.Intrinsics { ... }
|
||||
|
||||
# Optimize enums
|
||||
-optimizations !code/simplification/enum*
|
||||
|
||||
# Remove unused Compose stuff
|
||||
-dontwarn androidx.compose.**
|
||||
|
||||
# ============================================
|
||||
# SIZE OPTIMIZATIONS
|
||||
# ============================================
|
||||
|
||||
# Merge classes where possible
|
||||
-repackageclasses ''
|
||||
-allowaccessmodification
|
||||
|
||||
# Remove unused code paths
|
||||
-optimizationpasses 5
|
||||
|
||||
# Don't keep attributes we don't need
|
||||
-keepattributes SourceFile,LineNumberTable # Keep for crash reports
|
||||
-renamesourcefileattribute SourceFile
|
||||
129
clients/android/app/src/main/AndroidManifest.xml
Normal file
129
clients/android/app/src/main/AndroidManifest.xml
Normal file
@ -0,0 +1,129 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools">
|
||||
|
||||
<!-- Network -->
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
||||
|
||||
<!-- Background execution -->
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_DATA_SYNC" />
|
||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
||||
|
||||
<!-- Notifications -->
|
||||
<uses-permission android:name="android.permission.POST_NOTIFICATIONS" />
|
||||
|
||||
<!-- Battery optimization (optional - for requesting exemption) -->
|
||||
<uses-permission android:name="android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS" />
|
||||
|
||||
<application
|
||||
android:name=".ZeroClawApp"
|
||||
android:allowBackup="true"
|
||||
android:icon="@mipmap/ic_launcher"
|
||||
android:label="@string/app_name"
|
||||
android:roundIcon="@mipmap/ic_launcher_round"
|
||||
android:supportsRtl="true"
|
||||
android:theme="@style/Theme.ZeroClaw"
|
||||
tools:targetApi="34">
|
||||
|
||||
<!-- Main Activity -->
|
||||
<activity
|
||||
android:name=".MainActivity"
|
||||
android:exported="true"
|
||||
android:label="@string/app_name"
|
||||
android:theme="@style/Theme.ZeroClaw"
|
||||
android:launchMode="singleTop">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.MAIN" />
|
||||
<category android:name="android.intent.category.LAUNCHER" />
|
||||
</intent-filter>
|
||||
|
||||
<!-- Handle text share intents -->
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.SEND" />
|
||||
<category android:name="android.intent.category.DEFAULT" />
|
||||
<data android:mimeType="text/plain" />
|
||||
</intent-filter>
|
||||
|
||||
<!-- Handle URL share intents -->
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.SEND" />
|
||||
<category android:name="android.intent.category.DEFAULT" />
|
||||
<data android:mimeType="text/uri-list" />
|
||||
</intent-filter>
|
||||
|
||||
<!-- Handle image share intents -->
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.SEND" />
|
||||
<category android:name="android.intent.category.DEFAULT" />
|
||||
<data android:mimeType="image/*" />
|
||||
</intent-filter>
|
||||
</activity>
|
||||
|
||||
<!-- Background Service -->
|
||||
<service
|
||||
android:name=".service.ZeroClawService"
|
||||
android:exported="false"
|
||||
android:foregroundServiceType="dataSync" />
|
||||
|
||||
<!-- Quick Settings Tile -->
|
||||
<service
|
||||
android:name=".tile.ZeroClawTileService"
|
||||
android:exported="true"
|
||||
android:icon="@drawable/ic_notification"
|
||||
android:label="@string/app_name"
|
||||
android:permission="android.permission.BIND_QUICK_SETTINGS_TILE">
|
||||
<intent-filter>
|
||||
<action android:name="android.service.quicksettings.action.QS_TILE" />
|
||||
</intent-filter>
|
||||
<meta-data
|
||||
android:name="android.service.quicksettings.ACTIVE_TILE"
|
||||
android:value="false" />
|
||||
</service>
|
||||
|
||||
<!-- Boot Receiver -->
|
||||
<receiver
|
||||
android:name=".receiver.BootReceiver"
|
||||
android:exported="true"
|
||||
android:enabled="true">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.BOOT_COMPLETED" />
|
||||
<action android:name="android.intent.action.QUICKBOOT_POWERON" />
|
||||
<action android:name="android.intent.action.MY_PACKAGE_REPLACED" />
|
||||
</intent-filter>
|
||||
</receiver>
|
||||
|
||||
<!-- Home Screen Widget -->
|
||||
<receiver
|
||||
android:name=".widget.ZeroClawWidget"
|
||||
android:exported="true"
|
||||
android:label="@string/app_name">
|
||||
<intent-filter>
|
||||
<action android:name="android.appwidget.action.APPWIDGET_UPDATE" />
|
||||
</intent-filter>
|
||||
<intent-filter>
|
||||
<action android:name="ai.zeroclaw.widget.TOGGLE" />
|
||||
<action android:name="ai.zeroclaw.widget.QUICK_MESSAGE" />
|
||||
</intent-filter>
|
||||
<meta-data
|
||||
android:name="android.appwidget.provider"
|
||||
android:resource="@xml/widget_info" />
|
||||
</receiver>
|
||||
|
||||
<!-- WorkManager Initialization (disable default, we initialize manually) -->
|
||||
<provider
|
||||
android:name="androidx.startup.InitializationProvider"
|
||||
android:authorities="${applicationId}.androidx-startup"
|
||||
android:exported="false"
|
||||
tools:node="merge">
|
||||
<meta-data
|
||||
android:name="androidx.work.WorkManagerInitializer"
|
||||
android:value="androidx.startup"
|
||||
tools:node="remove" />
|
||||
</provider>
|
||||
|
||||
</application>
|
||||
|
||||
</manifest>
|
||||
@ -0,0 +1,212 @@
|
||||
package ai.zeroclaw.android
|
||||
|
||||
import android.os.Bundle
|
||||
import androidx.activity.ComponentActivity
|
||||
import androidx.activity.compose.setContent
|
||||
import androidx.compose.foundation.layout.*
|
||||
import androidx.compose.material3.*
|
||||
import androidx.compose.runtime.*
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.text.style.TextAlign
|
||||
import androidx.compose.ui.unit.dp
|
||||
import ai.zeroclaw.android.ui.theme.ZeroClawTheme
|
||||
|
||||
class MainActivity : ComponentActivity() {
|
||||
override fun onCreate(savedInstanceState: Bundle?) {
|
||||
super.onCreate(savedInstanceState)
|
||||
setContent {
|
||||
ZeroClawTheme {
|
||||
Surface(
|
||||
modifier = Modifier.fillMaxSize(),
|
||||
color = MaterialTheme.colorScheme.background
|
||||
) {
|
||||
ZeroClawApp()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@OptIn(ExperimentalMaterial3Api::class)
|
||||
@Composable
|
||||
fun ZeroClawApp() {
|
||||
var agentStatus by remember { mutableStateOf(AgentStatus.Stopped) }
|
||||
var messages by remember { mutableStateOf(listOf<ChatMessage>()) }
|
||||
var inputText by remember { mutableStateOf("") }
|
||||
|
||||
Scaffold(
|
||||
topBar = {
|
||||
TopAppBar(
|
||||
title = { Text("ZeroClaw") },
|
||||
actions = {
|
||||
StatusIndicator(status = agentStatus)
|
||||
}
|
||||
)
|
||||
},
|
||||
bottomBar = {
|
||||
ChatInput(
|
||||
text = inputText,
|
||||
onTextChange = { inputText = it },
|
||||
onSend = {
|
||||
if (inputText.isNotBlank()) {
|
||||
messages = messages + ChatMessage(
|
||||
content = inputText,
|
||||
isUser = true
|
||||
)
|
||||
inputText = ""
|
||||
// TODO: Send to native layer
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
) { padding ->
|
||||
Column(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.padding(padding)
|
||||
) {
|
||||
if (messages.isEmpty()) {
|
||||
EmptyState(
|
||||
status = agentStatus,
|
||||
onStart = { agentStatus = AgentStatus.Running }
|
||||
)
|
||||
} else {
|
||||
ChatMessageList(
|
||||
messages = messages,
|
||||
modifier = Modifier.weight(1f)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun StatusIndicator(status: AgentStatus) {
|
||||
val (color, text) = when (status) {
|
||||
AgentStatus.Running -> MaterialTheme.colorScheme.primary to "Running"
|
||||
AgentStatus.Stopped -> MaterialTheme.colorScheme.outline to "Stopped"
|
||||
AgentStatus.Error -> MaterialTheme.colorScheme.error to "Error"
|
||||
}
|
||||
|
||||
Surface(
|
||||
color = color.copy(alpha = 0.2f),
|
||||
shape = MaterialTheme.shapes.small
|
||||
) {
|
||||
Text(
|
||||
text = text,
|
||||
modifier = Modifier.padding(horizontal = 12.dp, vertical = 4.dp),
|
||||
color = color,
|
||||
style = MaterialTheme.typography.labelMedium
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun EmptyState(status: AgentStatus, onStart: () -> Unit) {
|
||||
Column(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.padding(32.dp),
|
||||
horizontalAlignment = Alignment.CenterHorizontally,
|
||||
verticalArrangement = Arrangement.Center
|
||||
) {
|
||||
Text(
|
||||
text = "🦀",
|
||||
style = MaterialTheme.typography.displayLarge
|
||||
)
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
Text(
|
||||
text = "ZeroClaw",
|
||||
style = MaterialTheme.typography.headlineMedium
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
Text(
|
||||
text = "Your AI assistant, running locally",
|
||||
style = MaterialTheme.typography.bodyMedium,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant,
|
||||
textAlign = TextAlign.Center
|
||||
)
|
||||
Spacer(modifier = Modifier.height(32.dp))
|
||||
|
||||
if (status == AgentStatus.Stopped) {
|
||||
Button(onClick = onStart) {
|
||||
Text("Start Agent")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun ChatInput(
|
||||
text: String,
|
||||
onTextChange: (String) -> Unit,
|
||||
onSend: () -> Unit
|
||||
) {
|
||||
Surface(
|
||||
tonalElevation = 3.dp
|
||||
) {
|
||||
Row(
|
||||
modifier = Modifier
|
||||
.fillMaxWidth()
|
||||
.padding(8.dp),
|
||||
verticalAlignment = Alignment.CenterVertically
|
||||
) {
|
||||
OutlinedTextField(
|
||||
value = text,
|
||||
onValueChange = onTextChange,
|
||||
modifier = Modifier.weight(1f),
|
||||
placeholder = { Text("Message ZeroClaw...") },
|
||||
singleLine = true
|
||||
)
|
||||
Spacer(modifier = Modifier.width(8.dp))
|
||||
IconButton(onClick = onSend) {
|
||||
Text("→")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun ChatMessageList(messages: List<ChatMessage>, modifier: Modifier = Modifier) {
|
||||
Column(modifier = modifier.padding(16.dp)) {
|
||||
messages.forEach { message ->
|
||||
ChatBubble(message = message)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun ChatBubble(message: ChatMessage) {
|
||||
val alignment = if (message.isUser) Alignment.End else Alignment.Start
|
||||
val color = if (message.isUser)
|
||||
MaterialTheme.colorScheme.primaryContainer
|
||||
else
|
||||
MaterialTheme.colorScheme.surfaceVariant
|
||||
|
||||
Box(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
contentAlignment = if (message.isUser) Alignment.CenterEnd else Alignment.CenterStart
|
||||
) {
|
||||
Surface(
|
||||
color = color,
|
||||
shape = MaterialTheme.shapes.medium
|
||||
) {
|
||||
Text(
|
||||
text = message.content,
|
||||
modifier = Modifier.padding(12.dp)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data class ChatMessage(
|
||||
val content: String,
|
||||
val isUser: Boolean,
|
||||
val timestamp: Long = System.currentTimeMillis()
|
||||
)
|
||||
|
||||
enum class AgentStatus {
|
||||
Running, Stopped, Error
|
||||
}
|
||||
@ -0,0 +1,104 @@
|
||||
package ai.zeroclaw.android
|
||||
|
||||
import android.content.Intent
|
||||
import android.net.Uri
|
||||
|
||||
/**
|
||||
* Handles content shared TO ZeroClaw from other apps.
|
||||
*
|
||||
* Supports:
|
||||
* - Plain text
|
||||
* - URLs
|
||||
* - Images (future)
|
||||
* - Files (future)
|
||||
*/
|
||||
object ShareHandler {
|
||||
|
||||
sealed class SharedContent {
|
||||
data class Text(val text: String) : SharedContent()
|
||||
data class Url(val url: String, val title: String? = null) : SharedContent()
|
||||
data class Image(val uri: Uri) : SharedContent()
|
||||
data class File(val uri: Uri, val mimeType: String) : SharedContent()
|
||||
object None : SharedContent()
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse incoming share intent
|
||||
*/
|
||||
fun parseIntent(intent: Intent): SharedContent {
|
||||
if (intent.action != Intent.ACTION_SEND) {
|
||||
return SharedContent.None
|
||||
}
|
||||
|
||||
val type = intent.type ?: return SharedContent.None
|
||||
|
||||
return when {
|
||||
type == "text/plain" -> parseTextIntent(intent)
|
||||
type == "text/uri-list" -> parseUriListIntent(intent)
|
||||
type.startsWith("image/") -> parseImageIntent(intent)
|
||||
else -> parseFileIntent(intent, type)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseTextIntent(intent: Intent): SharedContent {
|
||||
val text = intent.getStringExtra(Intent.EXTRA_TEXT) ?: return SharedContent.None
|
||||
|
||||
// Check if it's a URL
|
||||
if (text.startsWith("http://") || text.startsWith("https://")) {
|
||||
val title = intent.getStringExtra(Intent.EXTRA_SUBJECT)
|
||||
return SharedContent.Url(text, title)
|
||||
}
|
||||
|
||||
return SharedContent.Text(text)
|
||||
}
|
||||
|
||||
private fun parseUriListIntent(intent: Intent): SharedContent {
|
||||
val text = intent.getStringExtra(Intent.EXTRA_TEXT) ?: return SharedContent.None
|
||||
// text/uri-list contains URLs separated by newlines
|
||||
val firstUrl = text.lines().firstOrNull { it.startsWith("http://") || it.startsWith("https://") }
|
||||
return if (firstUrl != null) {
|
||||
val title = intent.getStringExtra(Intent.EXTRA_SUBJECT)
|
||||
SharedContent.Url(firstUrl, title)
|
||||
} else {
|
||||
SharedContent.Text(text)
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseImageIntent(intent: Intent): SharedContent {
|
||||
val uri = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.TIRAMISU) {
|
||||
intent.getParcelableExtra(Intent.EXTRA_STREAM, Uri::class.java)
|
||||
} else {
|
||||
@Suppress("DEPRECATION")
|
||||
intent.getParcelableExtra(Intent.EXTRA_STREAM)
|
||||
}
|
||||
|
||||
return uri?.let { SharedContent.Image(it) } ?: SharedContent.None
|
||||
}
|
||||
|
||||
private fun parseFileIntent(intent: Intent, mimeType: String): SharedContent {
|
||||
val uri = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.TIRAMISU) {
|
||||
intent.getParcelableExtra(Intent.EXTRA_STREAM, Uri::class.java)
|
||||
} else {
|
||||
@Suppress("DEPRECATION")
|
||||
intent.getParcelableExtra(Intent.EXTRA_STREAM)
|
||||
}
|
||||
|
||||
return uri?.let { SharedContent.File(it, mimeType) } ?: SharedContent.None
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a prompt from shared content
|
||||
*/
|
||||
fun generatePrompt(content: SharedContent): String {
|
||||
return when (content) {
|
||||
is SharedContent.Text -> "I'm sharing this text with you:\n\n${content.text}"
|
||||
is SharedContent.Url -> {
|
||||
val title = content.title?.let { "\"$it\"\n" } ?: ""
|
||||
"${title}I'm sharing this URL: ${content.url}\n\nPlease summarize or help me with this."
|
||||
}
|
||||
is SharedContent.Image -> "I'm sharing an image with you. [Image attached]"
|
||||
is SharedContent.File -> "I'm sharing a file with you. [File: ${content.mimeType}]"
|
||||
SharedContent.None -> ""
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,116 @@
|
||||
package ai.zeroclaw.android
|
||||
|
||||
import android.app.Application
|
||||
import android.app.NotificationChannel
|
||||
import android.app.NotificationManager
|
||||
import android.os.Build
|
||||
import androidx.work.Configuration
|
||||
import androidx.work.WorkManager
|
||||
import ai.zeroclaw.android.data.SettingsRepository
|
||||
import ai.zeroclaw.android.worker.HeartbeatWorker
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.SupervisorJob
|
||||
import kotlinx.coroutines.flow.distinctUntilChanged
|
||||
import kotlinx.coroutines.flow.first
|
||||
import kotlinx.coroutines.flow.map
|
||||
import kotlinx.coroutines.launch
|
||||
|
||||
class ZeroClawApp : Application(), Configuration.Provider {
|
||||
|
||||
companion object {
|
||||
const val CHANNEL_ID = "zeroclaw_service"
|
||||
const val CHANNEL_NAME = "ZeroClaw Agent"
|
||||
const val AGENT_CHANNEL_ID = "zeroclaw_agent"
|
||||
const val AGENT_CHANNEL_NAME = "Agent Messages"
|
||||
|
||||
// Singleton instance for easy access
|
||||
lateinit var instance: ZeroClawApp
|
||||
private set
|
||||
}
|
||||
|
||||
// Application scope for coroutines
|
||||
private val applicationScope = CoroutineScope(SupervisorJob() + Dispatchers.Main)
|
||||
|
||||
// Lazy initialized repositories
|
||||
val settingsRepository by lazy { SettingsRepository(this) }
|
||||
|
||||
override fun onCreate() {
|
||||
super.onCreate()
|
||||
instance = this
|
||||
|
||||
createNotificationChannels()
|
||||
initializeWorkManager()
|
||||
|
||||
// Schedule heartbeat if auto-start is enabled
|
||||
applicationScope.launch {
|
||||
val settings = settingsRepository.settings.first()
|
||||
if (settings.autoStart && settings.isConfigured()) {
|
||||
HeartbeatWorker.scheduleHeartbeat(
|
||||
this@ZeroClawApp,
|
||||
settings.heartbeatIntervalMinutes.toLong()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Listen for settings changes and update heartbeat schedule
|
||||
applicationScope.launch {
|
||||
settingsRepository.settings
|
||||
.map { Triple(it.autoStart, it.isConfigured(), it.heartbeatIntervalMinutes) }
|
||||
.distinctUntilChanged()
|
||||
.collect { (autoStart, isConfigured, intervalMinutes) ->
|
||||
if (autoStart && isConfigured) {
|
||||
HeartbeatWorker.scheduleHeartbeat(this@ZeroClawApp, intervalMinutes.toLong())
|
||||
} else {
|
||||
HeartbeatWorker.cancelHeartbeat(this@ZeroClawApp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Initialize native library
|
||||
// System.loadLibrary("zeroclaw_android")
|
||||
}
|
||||
|
||||
private fun createNotificationChannels() {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
val manager = getSystemService(NotificationManager::class.java)
|
||||
|
||||
// Service channel (foreground service - low priority, silent)
|
||||
val serviceChannel = NotificationChannel(
|
||||
CHANNEL_ID,
|
||||
CHANNEL_NAME,
|
||||
NotificationManager.IMPORTANCE_LOW
|
||||
).apply {
|
||||
description = "ZeroClaw background service notification"
|
||||
setShowBadge(false)
|
||||
enableVibration(false)
|
||||
setSound(null, null)
|
||||
}
|
||||
|
||||
// Agent messages channel (high priority for important messages)
|
||||
val agentChannel = NotificationChannel(
|
||||
AGENT_CHANNEL_ID,
|
||||
AGENT_CHANNEL_NAME,
|
||||
NotificationManager.IMPORTANCE_HIGH
|
||||
).apply {
|
||||
description = "Messages and alerts from your AI agent"
|
||||
enableVibration(true)
|
||||
setShowBadge(true)
|
||||
}
|
||||
|
||||
manager.createNotificationChannel(serviceChannel)
|
||||
manager.createNotificationChannel(agentChannel)
|
||||
}
|
||||
}
|
||||
|
||||
private fun initializeWorkManager() {
|
||||
// WorkManager is initialized via Configuration.Provider
|
||||
// This ensures it's ready before any work is scheduled
|
||||
}
|
||||
|
||||
// Configuration.Provider implementation for custom WorkManager setup
|
||||
override val workManagerConfiguration: Configuration
|
||||
get() = Configuration.Builder()
|
||||
.setMinimumLoggingLevel(android.util.Log.INFO)
|
||||
.build()
|
||||
}
|
||||
@ -0,0 +1,123 @@
|
||||
package ai.zeroclaw.android.accessibility
|
||||
|
||||
import android.content.Context
|
||||
import android.view.accessibility.AccessibilityManager
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.semantics.SemanticsPropertyKey
|
||||
import androidx.compose.ui.semantics.SemanticsPropertyReceiver
|
||||
|
||||
/**
|
||||
* Accessibility utilities for ZeroClaw Android.
|
||||
*
|
||||
* Ensures the app is usable with:
|
||||
* - TalkBack (screen reader)
|
||||
* - Switch Access
|
||||
* - Voice Access
|
||||
* - Large text/display size
|
||||
*/
|
||||
object AccessibilityUtils {
|
||||
|
||||
/**
|
||||
* Check if TalkBack or similar screen reader is enabled
|
||||
*/
|
||||
fun isScreenReaderEnabled(context: Context): Boolean {
|
||||
val am = context.getSystemService(Context.ACCESSIBILITY_SERVICE) as AccessibilityManager
|
||||
return am.isEnabled && am.isTouchExplorationEnabled
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if any accessibility service is enabled
|
||||
*/
|
||||
fun isAccessibilityEnabled(context: Context): Boolean {
|
||||
val am = context.getSystemService(Context.ACCESSIBILITY_SERVICE) as AccessibilityManager
|
||||
return am.isEnabled
|
||||
}
|
||||
|
||||
/**
|
||||
* Get appropriate content description for agent status
|
||||
*/
|
||||
fun getStatusDescription(isRunning: Boolean, isThinking: Boolean = false): String {
|
||||
return when {
|
||||
isThinking -> "Agent is thinking and processing your request"
|
||||
isRunning -> "Agent is running and ready to help"
|
||||
else -> "Agent is stopped. Tap to start"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get content description for chat messages
|
||||
*/
|
||||
fun getMessageDescription(
|
||||
content: String,
|
||||
isUser: Boolean,
|
||||
timestamp: String
|
||||
): String {
|
||||
val sender = if (isUser) "You said" else "Agent replied"
|
||||
return "$sender at $timestamp: $content"
|
||||
}
|
||||
|
||||
/**
|
||||
* Announce message for screen readers
|
||||
*/
|
||||
fun announceForAccessibility(context: Context, message: String) {
|
||||
val am = context.getSystemService(Context.ACCESSIBILITY_SERVICE) as AccessibilityManager
|
||||
if (am.isEnabled) {
|
||||
val event = android.view.accessibility.AccessibilityEvent.obtain(
|
||||
android.view.accessibility.AccessibilityEvent.TYPE_ANNOUNCEMENT
|
||||
)
|
||||
event.text.add(message)
|
||||
am.sendAccessibilityEvent(event)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom semantic property for live regions
|
||||
*/
|
||||
val LiveRegion = SemanticsPropertyKey<LiveRegionMode>("LiveRegion")
|
||||
var SemanticsPropertyReceiver.liveRegion by LiveRegion
|
||||
|
||||
enum class LiveRegionMode {
|
||||
None,
|
||||
Polite, // Announce when user is idle
|
||||
Assertive // Announce immediately
|
||||
}
|
||||
|
||||
/**
|
||||
* Composable to check screen reader status
|
||||
*/
|
||||
@Composable
|
||||
fun rememberAccessibilityState(): AccessibilityState {
|
||||
val context = LocalContext.current
|
||||
return remember {
|
||||
AccessibilityState(
|
||||
isScreenReaderEnabled = AccessibilityUtils.isScreenReaderEnabled(context),
|
||||
isAccessibilityEnabled = AccessibilityUtils.isAccessibilityEnabled(context)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
data class AccessibilityState(
|
||||
val isScreenReaderEnabled: Boolean,
|
||||
val isAccessibilityEnabled: Boolean
|
||||
)
|
||||
|
||||
/**
|
||||
* Content descriptions for common UI elements
|
||||
*/
|
||||
object ContentDescriptions {
|
||||
const val TOGGLE_AGENT = "Toggle agent on or off"
|
||||
const val SEND_MESSAGE = "Send message"
|
||||
const val CLEAR_CHAT = "Clear conversation"
|
||||
const val OPEN_SETTINGS = "Open settings"
|
||||
const val BACK = "Go back"
|
||||
const val AGENT_STATUS = "Agent status"
|
||||
const val MESSAGE_INPUT = "Type your message here"
|
||||
const val PROVIDER_DROPDOWN = "Select AI provider"
|
||||
const val MODEL_DROPDOWN = "Select AI model"
|
||||
const val API_KEY_INPUT = "Enter your API key"
|
||||
const val SHOW_API_KEY = "Show API key"
|
||||
const val HIDE_API_KEY = "Hide API key"
|
||||
}
|
||||
@ -0,0 +1,110 @@
|
||||
package ai.zeroclaw.android.bridge
|
||||
|
||||
/**
|
||||
* JNI bridge to ZeroClaw Rust library.
|
||||
*
|
||||
* This class will be replaced by UniFFI-generated bindings.
|
||||
* For now, it provides stub implementations.
|
||||
*
|
||||
* Native library: libzeroclaw.so
|
||||
* Build command: cargo ndk -t arm64-v8a -o app/src/main/jniLibs build --release
|
||||
*/
|
||||
object ZeroClawBridge {
|
||||
|
||||
private var initialized = false
|
||||
|
||||
/**
|
||||
* Initialize the ZeroClaw runtime.
|
||||
* Must be called before any other methods.
|
||||
*/
|
||||
fun initialize(dataDir: String): Result<Unit> {
|
||||
return runCatching {
|
||||
// TODO: Load native library
|
||||
// System.loadLibrary("zeroclaw")
|
||||
// nativeInit(dataDir)
|
||||
initialized = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the ZeroClaw gateway.
|
||||
* @param configPath Path to zeroclaw.toml config file
|
||||
*/
|
||||
fun start(configPath: String): Result<Unit> {
|
||||
check(initialized) { "ZeroClawBridge not initialized" }
|
||||
return runCatching {
|
||||
// TODO: nativeStart(configPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the ZeroClaw gateway.
|
||||
*/
|
||||
fun stop(): Result<Unit> {
|
||||
return runCatching {
|
||||
// TODO: nativeStop()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a message to the agent.
|
||||
*/
|
||||
fun sendMessage(message: String): Result<Unit> {
|
||||
check(initialized) { "ZeroClawBridge not initialized" }
|
||||
return runCatching {
|
||||
// TODO: nativeSendMessage(message)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll for the next message from the agent.
|
||||
* Returns null if no message available.
|
||||
*/
|
||||
fun pollMessage(): String? {
|
||||
if (!initialized) return null
|
||||
// TODO: return nativePollMessage()
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current agent status.
|
||||
*/
|
||||
fun getStatus(): AgentStatus {
|
||||
if (!initialized) return AgentStatus.Stopped
|
||||
// TODO: return nativeGetStatus()
|
||||
return AgentStatus.Stopped
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the native library is loaded.
|
||||
*/
|
||||
fun isLoaded(): Boolean = initialized
|
||||
|
||||
// Native method declarations (to be implemented)
|
||||
// private external fun nativeInit(dataDir: String)
|
||||
// private external fun nativeStart(configPath: String)
|
||||
// private external fun nativeStop()
|
||||
// private external fun nativeSendMessage(message: String)
|
||||
// private external fun nativePollMessage(): String?
|
||||
// private external fun nativeGetStatus(): Int
|
||||
}
|
||||
|
||||
enum class AgentStatus {
|
||||
Stopped,
|
||||
Starting,
|
||||
Running,
|
||||
Thinking,
|
||||
Error
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for ZeroClaw.
|
||||
*/
|
||||
data class ZeroClawConfig(
|
||||
val provider: String = "anthropic",
|
||||
val model: String = "claude-sonnet-4-5",
|
||||
val apiKey: String = "",
|
||||
val systemPrompt: String? = null,
|
||||
val maxTokens: Int = 4096,
|
||||
val temperature: Double = 0.7
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user