Compare commits
735 Commits
it
...
update_Pro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e8b21ce8a | ||
|
|
06433f955b | ||
|
|
e5b25a908b | ||
|
|
55afbe81c4 | ||
|
|
fd84f36033 | ||
|
|
826343929e | ||
|
|
c84b9d31a3 | ||
|
|
b6af849e11 | ||
|
|
862cfc7732 | ||
|
|
5380f79daf | ||
|
|
0a62a19c2f | ||
|
|
9dbbab5cdb | ||
|
|
65b8b4477a | ||
|
|
fd7e4176d1 | ||
|
|
24d60416c8 | ||
|
|
7c68eeecc6 | ||
|
|
0167a15568 | ||
|
|
2c897fc8a1 | ||
|
|
27b86c1bd0 | ||
|
|
a873b03005 | ||
|
|
cdcac0c1df | ||
|
|
3333cb3315 | ||
|
|
6cd2d68471 | ||
|
|
75115ef884 | ||
|
|
d9feef72f3 | ||
|
|
b510992854 | ||
|
|
b054fe536d | ||
|
|
84b8efa343 | ||
|
|
d25a46d41c | ||
|
|
7c16632a63 | ||
|
|
77427a069a | ||
|
|
6726a5eee4 | ||
|
|
293ae05fb9 | ||
|
|
a02f8ad45e | ||
|
|
a9a58a9d84 | ||
|
|
449be62264 | ||
|
|
63c74776fc | ||
|
|
9c2191ecae | ||
|
|
7cc4479e64 | ||
|
|
9ae10ba9d7 | ||
|
|
9a4644dc0f | ||
|
|
3a7f081f42 | ||
|
|
d4dcccb82d | ||
|
|
9c558ff9df | ||
|
|
469887faef | ||
|
|
9968ab5901 | ||
|
|
92ec260969 | ||
|
|
5775dd889f | ||
|
|
fbc88db666 | ||
|
|
6e9d109c8e | ||
|
|
1c91bb9cf9 | ||
|
|
2a67405a78 | ||
|
|
a41bcbce89 | ||
|
|
3f8aa12ce9 | ||
|
|
8c472fbf01 | ||
|
|
b0d0266670 | ||
|
|
d56be2b9b2 | ||
|
|
65a1490ad0 | ||
|
|
0d4fb441a9 | ||
|
|
92e958069d | ||
|
|
98e8a9cc67 | ||
|
|
83306f353e | ||
|
|
9f2ba6206d | ||
|
|
9665e1fced | ||
|
|
400cf2a607 | ||
|
|
06c0c04ebd | ||
|
|
98eb150b91 | ||
|
|
468bd28887 | ||
|
|
d4d7511794 | ||
|
|
45b2e5e0a8 | ||
|
|
e7a5f0fe28 | ||
|
|
1a856147be | ||
|
|
47c4cdb89b | ||
|
|
e09246386b | ||
|
|
8f646225ac | ||
|
|
ffda2bfb9c | ||
|
|
ff06c914fc | ||
|
|
e4e6a409ce | ||
|
|
6fc8a8126e | ||
|
|
08c2e42b76 | ||
|
|
01e37a9b81 | ||
|
|
1719f8ed3c | ||
|
|
95d13f8b89 | ||
|
|
123b37d1f3 | ||
|
|
9df8a4ac92 | ||
|
|
6dd86b2c9e | ||
|
|
95302db34c | ||
|
|
90bd042880 | ||
|
|
1077cf6f89 | ||
|
|
27fd007fdd | ||
|
|
29e379d07d | ||
|
|
83663e4f98 | ||
|
|
b5b72b0d26 | ||
|
|
0f213ea2db | ||
|
|
35eafd8d54 | ||
|
|
9508f50485 | ||
|
|
e188809f70 | ||
|
|
4bc4e19891 | ||
|
|
a0bbc75c9a | ||
|
|
58f5fb17af | ||
|
|
ab41eee8e8 | ||
|
|
005de3a0e1 | ||
|
|
abb82f5a11 | ||
|
|
ea47be9876 | ||
|
|
a9f99c670e | ||
|
|
a1e67da3cd | ||
|
|
d4b5bd37da | ||
|
|
9bb5984b1a | ||
|
|
03a213fcdd | ||
|
|
3da7552a83 | ||
|
|
927c77529c | ||
|
|
e9003a3050 | ||
|
|
50520f6c7d | ||
|
|
6411d85ebf | ||
|
|
1b274752fd | ||
|
|
63f5aa81e3 | ||
|
|
eb7245d3fd | ||
|
|
675092de06 | ||
|
|
570c0f46af | ||
|
|
2468851007 | ||
|
|
d0ebc37eb3 | ||
|
|
79fd264473 | ||
|
|
becec234f4 | ||
|
|
143e6bdfe9 | ||
|
|
2a5d2dea9e | ||
|
|
9b52c7953d | ||
|
|
07628e009a | ||
|
|
8d39c38b58 | ||
|
|
7097f55620 | ||
|
|
f96fed548e | ||
|
|
21b31a3be3 | ||
|
|
5d031d4518 | ||
|
|
1e51bb702d | ||
|
|
1111212cbb | ||
|
|
bb763109dc | ||
|
|
25af34d5a2 | ||
|
|
a10148e331 | ||
|
|
b904273a19 | ||
|
|
0aa87b8319 | ||
|
|
004e341804 | ||
|
|
24c1d54861 | ||
|
|
015b24f51c | ||
|
|
8589cf621f | ||
|
|
c8957b9107 | ||
|
|
ecebe97de5 | ||
|
|
fe691c5c50 | ||
|
|
de064b1b68 | ||
|
|
ea0f667e57 | ||
|
|
b7a1554deb | ||
|
|
1304799271 | ||
|
|
18e756320d | ||
|
|
78767e199c | ||
|
|
65816a9798 | ||
|
|
fc5e23269c | ||
|
|
89a2ab54ae | ||
|
|
f3afa739ad | ||
|
|
d11f3a3880 | ||
|
|
590e54ea9e | ||
|
|
f539a9e2d9 | ||
|
|
e153dc47b0 | ||
|
|
9242d2e4d9 | ||
|
|
37b03b3517 | ||
|
|
a6491998d2 | ||
|
|
dba44c006e | ||
|
|
b9b20e4567 | ||
|
|
391b11e92c | ||
|
|
19024e5a7c | ||
|
|
4d9445d2bb | ||
|
|
7f435558c4 | ||
|
|
a7ce58fa25 | ||
|
|
5b5e339f96 | ||
|
|
5bd2aafc8e | ||
|
|
00730ca794 | ||
|
|
923f510164 | ||
|
|
fec9bfb986 | ||
|
|
6a11053885 | ||
|
|
de46109976 | ||
|
|
fd19dc2304 | ||
|
|
599d45c50a | ||
|
|
5b2a228050 | ||
|
|
d1f95b1929 | ||
|
|
846ad61b73 | ||
|
|
c09016a56f | ||
|
|
77b76bfb00 | ||
|
|
3883d1a74e | ||
|
|
ebb51f81bb | ||
|
|
d761716a28 | ||
|
|
467491e1ae | ||
|
|
d05d94d995 | ||
|
|
bc1201eb61 | ||
|
|
15ff9a7d1c | ||
|
|
4880cb4574 | ||
|
|
05853fcc19 | ||
|
|
a45973b8a7 | ||
|
|
38dae42b81 | ||
|
|
5adcd244f6 | ||
|
|
33ca677b86 | ||
|
|
b1af5ce692 | ||
|
|
61ae9f83db | ||
|
|
07a16af4ec | ||
|
|
f45429555e | ||
|
|
f92d67cfdc | ||
|
|
d7c57cba6e | ||
|
|
a641fcea8a | ||
|
|
eb4f46f714 | ||
|
|
1ceeca1326 | ||
|
|
68267218a7 | ||
|
|
3901748f3d | ||
|
|
68de9f8acc | ||
|
|
236a8a2cec | ||
|
|
d373f62166 | ||
|
|
c2c232fd46 | ||
|
|
f3fd4b9294 | ||
|
|
dd2c5af442 | ||
|
|
ee4da87049 | ||
|
|
3b1f434a66 | ||
|
|
8db7266efb | ||
|
|
59437b9b32 | ||
|
|
ea3041d9a2 | ||
|
|
f171d1a97d | ||
|
|
855ef5fd9e | ||
|
|
3ff0c8a86f | ||
|
|
414eeda035 | ||
|
|
dac7b0f906 | ||
|
|
3b456ebc2e | ||
|
|
f0df70528a | ||
|
|
f705477774 | ||
|
|
aff8ab0252 | ||
|
|
06b577d42f | ||
|
|
14e986b2a7 | ||
|
|
581f09f904 | ||
|
|
c76cc24a59 | ||
|
|
15bde67918 | ||
|
|
3f16d3c5f3 | ||
|
|
82a44ea4c0 | ||
|
|
839f139795 | ||
|
|
b82a88252c | ||
|
|
c3cfb95b87 | ||
|
|
e0b92e3b7a | ||
|
|
f521c0d95a | ||
|
|
96b0de9ec9 | ||
|
|
6b96bae348 | ||
|
|
5fd9ed5048 | ||
|
|
3157069bde | ||
|
|
ccd50a451d | ||
|
|
0a1f3dea22 | ||
|
|
e1bc13c19c | ||
|
|
58c7ae8399 | ||
|
|
0ba0d247a8 | ||
|
|
6f8738f34f | ||
|
|
5a6cd9a85c | ||
|
|
ed2ae1e58f | ||
|
|
dbe2969386 | ||
|
|
97759b6cec | ||
|
|
95f380db6b | ||
|
|
65da889db0 | ||
|
|
45a7b74a0f | ||
|
|
4d2fa75b55 | ||
|
|
84bc28f8bb | ||
|
|
ebd15ccb63 | ||
|
|
f72768b30f | ||
|
|
7a92891381 | ||
|
|
e98c16371b | ||
|
|
b1b0b0c536 | ||
|
|
e324b93d88 | ||
|
|
baff049eb8 | ||
|
|
46a8364006 | ||
|
|
ce9f3f87af | ||
|
|
8655bc665f | ||
|
|
26022c0005 | ||
|
|
a315ad9465 | ||
|
|
26d6010bfd | ||
|
|
d613dc8ce7 | ||
|
|
e93215546e | ||
|
|
b4bb813717 | ||
|
|
ab12e74c5e | ||
|
|
55658adf68 | ||
|
|
e10e318840 | ||
|
|
3662845c9c | ||
|
|
7b475f151e | ||
|
|
cfacf65682 | ||
|
|
cacc26efe4 | ||
|
|
3f6485403c | ||
|
|
ec63e0077a | ||
|
|
8197352f5b | ||
|
|
e75ef47d73 | ||
|
|
4c5a2d0b51 | ||
|
|
f26eba3574 | ||
|
|
e1a1b2a31f | ||
|
|
d21b704799 | ||
|
|
278e22cf25 | ||
|
|
ba62c1ded8 | ||
|
|
e6af835b2d | ||
|
|
c3110e5dc2 | ||
|
|
c3d732d46b | ||
|
|
16833c8621 | ||
|
|
e004aa173d | ||
|
|
32d8b32e1f | ||
|
|
8cb8cf4b78 | ||
|
|
682420bd96 | ||
|
|
423b2f5d24 | ||
|
|
06302efcc4 | ||
|
|
12f8a8240c | ||
|
|
cc8c3b9bc7 | ||
|
|
e3be82da7b | ||
|
|
5c9151d0a9 | ||
|
|
4f8f9ebb5d | ||
|
|
a59586d035 | ||
|
|
6f5f13f1d1 | ||
|
|
d0a10b4b59 | ||
|
|
3153e9e112 | ||
|
|
4ba5101450 | ||
|
|
46317efe3f | ||
|
|
13cd85219b | ||
|
|
bb4337235e | ||
|
|
4adabb8e45 | ||
|
|
64e6b18369 | ||
|
|
e79a494c75 | ||
|
|
e8310823ee | ||
|
|
94d6bb7be6 | ||
|
|
3886eb0679 | ||
|
|
b4a33ce277 | ||
|
|
b206e184f6 | ||
|
|
1562438890 | ||
|
|
9c7ae3465b | ||
|
|
afef551baa | ||
|
|
45f06743a5 | ||
|
|
2cf7ab9070 | ||
|
|
a67f9f67e9 | ||
|
|
b76f4ee32e | ||
|
|
c57b961d3f | ||
|
|
6a65d520db | ||
|
|
2d8e6cc317 | ||
|
|
1af7a95753 | ||
|
|
81bd25041e | ||
|
|
a2fc1bb9e4 | ||
|
|
245801a8f3 | ||
|
|
c8ee4e1f63 | ||
|
|
87d7a35977 | ||
|
|
df415302d4 | ||
|
|
227bd60d9d | ||
|
|
6113778d42 | ||
|
|
6229cc5c3f | ||
|
|
cc3464f588 | ||
|
|
84c84de0f6 | ||
|
|
1af5f28379 | ||
|
|
d22733b802 | ||
|
|
13e0bddd0c | ||
|
|
2f1397e2df | ||
|
|
a1718ef3d5 | ||
|
|
c01bb34d34 | ||
|
|
57d0f100e5 | ||
|
|
9e731ee081 | ||
|
|
f37c444854 | ||
|
|
cb0f15bd18 | ||
|
|
05ef9e4e88 | ||
|
|
653f16bf02 | ||
|
|
02f3d3d27e | ||
|
|
fd7e52a9f0 | ||
|
|
a31a609a53 | ||
|
|
2d20c080f1 | ||
|
|
a1abf4a40e | ||
|
|
3267900f8e | ||
|
|
d24b4f4947 | ||
|
|
cb93bc9325 | ||
|
|
89bd5603b5 | ||
|
|
a214e36c16 | ||
|
|
46fb09dbc7 | ||
|
|
c6c5326731 | ||
|
|
0d99994b28 | ||
|
|
518ec63594 | ||
|
|
365ec0ea1b | ||
|
|
55bb9cabcd | ||
|
|
b63860c1b3 | ||
|
|
f396d310ed | ||
|
|
fb64ce166d | ||
|
|
60fe5b65e9 | ||
|
|
d6a90112e4 | ||
|
|
8ac043f850 | ||
|
|
d0ca3b4c13 | ||
|
|
0721cb17a8 | ||
|
|
0a7aa1d734 | ||
|
|
f976ae1b70 | ||
|
|
73895ccc90 | ||
|
|
a0c66139cf | ||
|
|
6d3e83b6fa | ||
|
|
323213ba74 | ||
|
|
f87ea41409 | ||
|
|
fa3baebd58 | ||
|
|
ecb7b8f136 | ||
|
|
64bb51ab33 | ||
|
|
043f28492d | ||
|
|
7136d61e6b | ||
|
|
aa4d2f583f | ||
|
|
5276c9c7db | ||
|
|
005dee76e9 | ||
|
|
954173982d | ||
|
|
014036afb7 | ||
|
|
ff9846dbc6 | ||
|
|
5c6a20ff62 | ||
|
|
524edcbc09 | ||
|
|
8be8f721ca | ||
|
|
d7937f5851 | ||
|
|
8cca683281 | ||
|
|
29db46c537 | ||
|
|
35075688aa | ||
|
|
49023a7e71 | ||
|
|
ec902048e6 | ||
|
|
413635f6ed | ||
|
|
160cdf0767 | ||
|
|
ce9d8e9162 | ||
|
|
27bda8c014 | ||
|
|
d9b93c0df5 | ||
|
|
62ee9ee386 | ||
|
|
1c761c2a55 | ||
|
|
849a545f21 | ||
|
|
cfc01c0374 | ||
|
|
6915cfa68c | ||
|
|
aed8d2e643 | ||
|
|
b2bf4d9b07 | ||
|
|
6788b5e0a5 | ||
|
|
5c702e69b9 | ||
|
|
42f78679a2 | ||
|
|
6c40f6cac4 | ||
|
|
fcb6c989fc | ||
|
|
8d310c43f5 | ||
|
|
27d96d81e1 | ||
|
|
6d88cb548f | ||
|
|
1216308b18 | ||
|
|
00f4a32ae3 | ||
|
|
96579673c1 | ||
|
|
f8c4c4d8ac | ||
|
|
3902e8cafd | ||
|
|
39876cd315 | ||
|
|
d668cf5452 | ||
|
|
d54cb2b5ff | ||
|
|
c79c359fd2 | ||
|
|
1efe5e7e77 | ||
|
|
7991ff4fae | ||
|
|
045b6c2320 | ||
|
|
ab888d748c | ||
|
|
5d3c7b0348 | ||
|
|
d77d87d686 | ||
|
|
9c0cfb6529 | ||
|
|
2730856acc | ||
|
|
221636beae | ||
|
|
740801ab61 | ||
|
|
23ce7a243e | ||
|
|
13f89a6674 | ||
|
|
776d9f73df | ||
|
|
c8c09b0abb | ||
|
|
329ef07c7e | ||
|
|
aad012f215 | ||
|
|
50029a6488 | ||
|
|
5bb4e02a58 | ||
|
|
c0a3872982 | ||
|
|
a19517f026 | ||
|
|
429913c5ec | ||
|
|
2d32b37d9c | ||
|
|
77da1e58ca | ||
|
|
516553f4bf | ||
|
|
188b9a7b0a | ||
|
|
58556acb7d | ||
|
|
0148473b67 | ||
|
|
b832183456 | ||
|
|
3e96d64e50 | ||
|
|
7336c976ae | ||
|
|
def87f1ffb | ||
|
|
b715d43fad | ||
|
|
c6b3795cc5 | ||
|
|
8fa715b08d | ||
|
|
aeb3f8b582 | ||
|
|
568e936ca0 | ||
|
|
2456bca341 | ||
|
|
f138e0366f | ||
|
|
c089890c84 | ||
|
|
901725b847 | ||
|
|
fea4bb8938 | ||
|
|
7026e4e728 | ||
|
|
0e15aeffba | ||
|
|
b7dc63cd26 | ||
|
|
4c9c8c10ac | ||
|
|
64f5661515 | ||
|
|
bee34f3c05 | ||
|
|
2f84f3f328 | ||
|
|
892232fe26 | ||
|
|
c71aa6f7b1 | ||
|
|
ab3e89c82d | ||
|
|
caf320e355 | ||
|
|
e841f06505 | ||
|
|
6456eab2cf | ||
|
|
3c746383c6 | ||
|
|
064162062f | ||
|
|
e3ca81040e | ||
|
|
4313cc72bc | ||
|
|
80b91382f3 | ||
|
|
0828130954 | ||
|
|
94e634230a | ||
|
|
5e48ce18e0 | ||
|
|
61c70bfefd | ||
|
|
ce2ede1e01 | ||
|
|
127c85e7d2 | ||
|
|
40e08a1893 | ||
|
|
e746e3e353 | ||
|
|
0f7175eb98 | ||
|
|
d109bb6b44 | ||
|
|
2505aec847 | ||
|
|
cc5dc4c885 | ||
|
|
09a10afd24 | ||
|
|
0d9b4e5917 | ||
|
|
ad0fd2ac62 | ||
|
|
47c4eb2f02 | ||
|
|
8b8705f5a2 | ||
|
|
e82c35b07f | ||
|
|
5f47797e6a | ||
|
|
90a2f79a0f | ||
|
|
baa1c7240d | ||
|
|
0f6da30192 | ||
|
|
06d67a4432 | ||
|
|
f9413e0d34 | ||
|
|
31acbca2ed | ||
|
|
e938af6965 | ||
|
|
8d0d445b93 | ||
|
|
9cd2ef8e2f | ||
|
|
c4b06ab12c | ||
|
|
5537bfe63d | ||
|
|
6e477bc296 | ||
|
|
46af6b9474 | ||
|
|
e6644e6caa | ||
|
|
fcc20e6908 | ||
|
|
8be9956703 | ||
|
|
2be77df66d | ||
|
|
8594fa5343 | ||
|
|
ba04a6a2ff | ||
|
|
d2f2655fc4 | ||
|
|
5a5104fe95 | ||
|
|
ef85d7fdd5 | ||
|
|
83bc9f97e8 | ||
|
|
d3b9883283 | ||
|
|
81acaa16e0 | ||
|
|
70d0f13e7e | ||
|
|
8392cf548c | ||
|
|
0b97b3caff | ||
|
|
4df9252db4 | ||
|
|
3350e31738 | ||
|
|
17be115fa6 | ||
|
|
ccbbfaee00 | ||
|
|
b98496aaed | ||
|
|
30399528e4 | ||
|
|
650655363f | ||
|
|
615a959bb6 | ||
|
|
0d198fe961 | ||
|
|
abbd0a816b | ||
|
|
13df9aee51 | ||
|
|
eb110dfd72 | ||
|
|
491627bf9f | ||
|
|
ca5a9e1037 | ||
|
|
7537334e2c | ||
|
|
bb664c142d | ||
|
|
4eafaed2b1 | ||
|
|
e158438a0f | ||
|
|
57da0bf8db | ||
|
|
ae7aefe448 | ||
|
|
19401f19d6 | ||
|
|
92b3c384c8 | ||
|
|
0d26e5ed8c | ||
|
|
cd5de3879c | ||
|
|
dc557dad46 | ||
|
|
d6597f9990 | ||
|
|
77a117d772 | ||
|
|
699a707bb6 | ||
|
|
b741525093 | ||
|
|
60b1ca6b88 | ||
|
|
30236714f5 | ||
|
|
f258a13eef | ||
|
|
4e491e3f55 | ||
|
|
b3cfc40029 | ||
|
|
26bf439a59 | ||
|
|
3757efbd43 | ||
|
|
d13ebeaeb5 | ||
|
|
3f01e5e4fa | ||
|
|
8452766003 | ||
|
|
9bea483104 | ||
|
|
7162236a6b | ||
|
|
f5c7490026 | ||
|
|
2383d6958b | ||
|
|
fd5fc9957a | ||
|
|
d42d244a6a | ||
|
|
117bb933af | ||
|
|
9a9ea3101f | ||
|
|
ec6fcd37f0 | ||
|
|
551524079b | ||
|
|
92d27a9632 | ||
|
|
cfcc836974 | ||
|
|
7838f68347 | ||
|
|
452fb430b3 | ||
|
|
ba94d85a36 | ||
|
|
a9b9e95899 | ||
|
|
5a2543873e | ||
|
|
b353597cd3 | ||
|
|
9ed1b29bb9 | ||
|
|
01295fcd13 | ||
|
|
81d2665909 | ||
|
|
873eba38c0 | ||
|
|
979ea57c3b | ||
|
|
ae6616b63b | ||
|
|
dbac949488 | ||
|
|
3e06c28e43 | ||
|
|
5d21fb67a6 | ||
|
|
25395ae94a | ||
|
|
626155bec1 | ||
|
|
8f02f9f5a5 | ||
|
|
d9c68fcf04 | ||
|
|
c76f3defdd | ||
|
|
569f2be7c9 | ||
|
|
416e7cf699 | ||
|
|
480c6ba178 | ||
|
|
8f6514eed9 | ||
|
|
6736e6d5c8 | ||
|
|
1bc1085d79 | ||
|
|
50f10aa913 | ||
|
|
bb6035c0ca | ||
|
|
b6310f4ac6 | ||
|
|
9bcaf7c6df | ||
|
|
a60a5be8d2 | ||
|
|
250329b9aa | ||
|
|
64ab139a57 | ||
|
|
38ec0650ca | ||
|
|
a027dd2a21 | ||
|
|
1e69b05fe7 | ||
|
|
9f84500da6 | ||
|
|
62664886c7 | ||
|
|
e4b1066789 | ||
|
|
0f2e9443ca | ||
|
|
dbb3f98a12 | ||
|
|
234398ad6e | ||
|
|
a6e5b378be | ||
|
|
cdd6583bb6 | ||
|
|
12c468f714 | ||
|
|
0996afea1b | ||
|
|
3fd7b2b8a2 | ||
|
|
e47fdfb9ea | ||
|
|
d6f87481ef | ||
|
|
3000c3b4fe | ||
|
|
15ce1f2a40 | ||
|
|
8ec34fc329 | ||
|
|
5d6efaa6a6 | ||
|
|
73e6fee408 | ||
|
|
833b571498 | ||
|
|
37bf365f5b | ||
|
|
8b52bc23a3 | ||
|
|
d9f6b34673 | ||
|
|
9021590856 | ||
|
|
8ea0f4a3f1 | ||
|
|
78334993a2 | ||
|
|
e8cee6743c | ||
|
|
55fc400bf7 | ||
|
|
dc16c9ff9f | ||
|
|
6d926a6f72 | ||
|
|
d9a7ae2880 | ||
|
|
be659333c8 | ||
|
|
cb67f59e59 | ||
|
|
b3f82cc35c | ||
|
|
1f00ec798e | ||
|
|
782d409f47 | ||
|
|
30f14affe7 | ||
|
|
65cfe4be40 | ||
|
|
39ef4428ef | ||
|
|
de7e5d2eb0 | ||
|
|
78f6ebfdc9 | ||
|
|
4b9cb59f09 | ||
|
|
3b671863b3 | ||
|
|
110692ec5f | ||
|
|
065d29019c | ||
|
|
184a36301a | ||
|
|
c3b572db87 | ||
|
|
894d2f8dc6 | ||
|
|
3eff514680 | ||
|
|
4fae1360a8 | ||
|
|
372ccdf299 | ||
|
|
0c3d697d67 | ||
|
|
cdf8430598 | ||
|
|
432e916b2f | ||
|
|
cb23139acd | ||
|
|
1a41153b95 | ||
|
|
06184f73ec | ||
|
|
bde9b73eb1 | ||
|
|
69db82891a | ||
|
|
52142003ec | ||
|
|
a850e268dc | ||
|
|
cc120ade68 | ||
|
|
3000248da2 | ||
|
|
1a5f666c80 | ||
|
|
95f91529c7 | ||
|
|
b65df65002 | ||
|
|
1c333d4cab | ||
|
|
b0794c4b1c | ||
|
|
009ef58e30 | ||
|
|
ad6c542f82 | ||
|
|
3a7480d764 | ||
|
|
4b71029629 | ||
|
|
c1aee098b6 | ||
|
|
ec0ff62bcb | ||
|
|
2244c6b485 | ||
|
|
61bc94e77d | ||
|
|
13358c1371 | ||
|
|
1e29cb77cc | ||
|
|
d65983432b | ||
|
|
18d1953edd | ||
|
|
c395861d3d | ||
|
|
d9247bf598 | ||
|
|
853e602bc2 | ||
|
|
8750a93d41 | ||
|
|
b918609383 | ||
|
|
3d1f96fd4a | ||
|
|
9eec79f700 | ||
|
|
8a25a0856f | ||
|
|
e67cc3f450 | ||
|
|
194ae6d76b | ||
|
|
d53ffc12eb | ||
|
|
c520826284 | ||
|
|
7a0a0329a7 | ||
|
|
a7bf8124da | ||
|
|
b340bf8ada | ||
|
|
bad627b7db | ||
|
|
96cc8f9772 | ||
|
|
1922e106fd | ||
|
|
d7d5e4a93c | ||
|
|
716aa06779 | ||
|
|
4ef00e6b1b | ||
|
|
2beb8398a6 | ||
|
|
d0b9174054 | ||
|
|
d0e6a85e6f | ||
|
|
d96df379fd | ||
|
|
4d622f5500 |
56
.github/workflows/build_docker.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: Build and Push Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
- 'book/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: build_docker
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Check out the repository to get the Dockerfile
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# 2. Log into GitHub Container Registry
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# 3. Build and push
|
||||
- name: Build and push Docker image
|
||||
run: |
|
||||
# Define image name
|
||||
IMAGE_NAME=ghcr.io/hacktricks-wiki/hacktricks-cloud/translator-image
|
||||
|
||||
# Build Docker image
|
||||
docker build -t $IMAGE_NAME:latest .
|
||||
|
||||
# Push Docker image to GHCR
|
||||
docker push $IMAGE_NAME:latest
|
||||
|
||||
# Set image visibility to public
|
||||
curl -X PATCH \
|
||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
https://api.github.com/user/packages/container/translator-image/visibility \
|
||||
-d '{"visibility":"public"}'
|
||||
114
.github/workflows/build_master.yml
vendored
@@ -14,12 +14,15 @@ on:
|
||||
concurrency: build_master
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ghcr.io/hacktricks-wiki/hacktricks-cloud/translator-image:latest
|
||||
environment: prod
|
||||
|
||||
steps:
|
||||
@@ -27,32 +30,99 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Cat hacktricks-preprocessor.log
|
||||
#- name: Cat hacktricks-preprocessor.log
|
||||
# run: cat hacktricks-preprocessor.log
|
||||
run: MDBOOK_BOOK__LANGUAGE=en mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1)
|
||||
|
||||
- name: Push search index to hacktricks-searchindex repo
|
||||
shell: bash
|
||||
env:
|
||||
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
ASSET="book/searchindex.js"
|
||||
TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
|
||||
FILENAME="searchindex-cloud-en.js"
|
||||
|
||||
if [ ! -f "$ASSET" ]; then
|
||||
echo "Expected $ASSET to exist after build" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOKEN="${PAT_TOKEN}"
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "No PAT_TOKEN available" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clone the searchindex repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
|
||||
|
||||
cd /tmp/searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
# Save all current files from main branch to temp directory
|
||||
mkdir -p /tmp/searchindex-backup
|
||||
cp -r * /tmp/searchindex-backup/ 2>/dev/null || true
|
||||
|
||||
# Create a fresh orphan branch (no history)
|
||||
git checkout --orphan new-main
|
||||
|
||||
# Remove all files from git index (but keep working directory)
|
||||
git rm -rf . 2>/dev/null || true
|
||||
|
||||
# Restore all the files from backup (keeps all language files)
|
||||
cp -r /tmp/searchindex-backup/* . 2>/dev/null || true
|
||||
|
||||
# Now update/add our English searchindex file
|
||||
# First, compress the original file (in the build directory)
|
||||
cd "${GITHUB_WORKSPACE}"
|
||||
gzip -9 -k -f "$ASSET"
|
||||
|
||||
# Show compression stats
|
||||
ORIGINAL_SIZE=$(wc -c < "$ASSET")
|
||||
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
|
||||
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
|
||||
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
|
||||
|
||||
# XOR encrypt the compressed file
|
||||
KEY='Prevent_Online_AVs_From_Flagging_HackTricks_Search_Gzip_As_Malicious_394h7gt8rf9u3rf9g'
|
||||
cat > /tmp/xor_encrypt.py << 'EOF'
|
||||
import sys
|
||||
key = sys.argv[1]
|
||||
input_file = sys.argv[2]
|
||||
output_file = sys.argv[3]
|
||||
with open(input_file, 'rb') as f:
|
||||
data = f.read()
|
||||
key_bytes = key.encode('utf-8')
|
||||
encrypted = bytearray(len(data))
|
||||
for i in range(len(data)):
|
||||
encrypted[i] = data[i] ^ key_bytes[i % len(key_bytes)]
|
||||
with open(output_file, 'wb') as f:
|
||||
f.write(encrypted)
|
||||
print(f"Encrypted: {len(data)} bytes")
|
||||
EOF
|
||||
python3 /tmp/xor_encrypt.py "$KEY" "${ASSET}.gz" "${ASSET}.gz.enc"
|
||||
|
||||
# Copy ONLY the encrypted .gz version to the searchindex repo (no uncompressed .js)
|
||||
cd /tmp/searchindex-repo
|
||||
cp "${GITHUB_WORKSPACE}/${ASSET}.gz.enc" "${FILENAME}.gz"
|
||||
|
||||
# Stage all files
|
||||
git add -A
|
||||
|
||||
# Commit with timestamp
|
||||
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
|
||||
git commit -m "Update searchindex files - ${TIMESTAMP}" --allow-empty
|
||||
|
||||
# Force push to replace master branch (deletes history, keeps all files)
|
||||
git push -f origin new-main:master
|
||||
|
||||
echo "Successfully reset repository history and pushed all searchindex files"
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
|
||||
204
.github/workflows/cleanup_branches.yml
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
name: Cleanup Merged/Closed PR Branches
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * 0' # Every Sunday at 2 AM UTC
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
inputs:
|
||||
dry_run:
|
||||
description: 'Dry run (show what would be deleted without actually deleting)'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
cleanup-branches:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Need full history to see all branches
|
||||
token: ${{ secrets.PAT_TOKEN }}
|
||||
|
||||
- name: Install GitHub CLI
|
||||
run: |
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||
&& sudo apt update \
|
||||
&& sudo apt install gh -y
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global user.email "action@github.com"
|
||||
git config --global user.name "GitHub Action"
|
||||
|
||||
- name: Cleanup merged/closed PR branches
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.PAT_TOKEN }}
|
||||
run: |
|
||||
echo "Starting branch cleanup process..."
|
||||
|
||||
# Check if this is a dry run
|
||||
DRY_RUN="${{ github.event.inputs.dry_run || 'false' }}"
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo "🔍 DRY RUN MODE - No branches will actually be deleted"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Define protected branches and patterns
|
||||
protected_branches=(
|
||||
"master"
|
||||
"main"
|
||||
)
|
||||
|
||||
# Translation branch patterns (any 2-letter combination)
|
||||
translation_pattern="^[a-zA-Z]{2}$"
|
||||
|
||||
# Get all remote branches except protected ones
|
||||
echo "Fetching all remote branches..."
|
||||
git fetch --all --prune
|
||||
|
||||
# Get list of all remote branches (excluding HEAD)
|
||||
all_branches=$(git branch -r | grep -v 'HEAD' | sed 's/origin\///' | grep -v '^$')
|
||||
|
||||
# Get all open PRs to identify branches with open PRs
|
||||
echo "Getting list of open PRs..."
|
||||
open_pr_branches=$(gh pr list --state open --json headRefName --jq '.[].headRefName' | sort | uniq)
|
||||
|
||||
echo "Open PR branches:"
|
||||
echo "$open_pr_branches"
|
||||
echo ""
|
||||
|
||||
deleted_count=0
|
||||
skipped_count=0
|
||||
|
||||
for branch in $all_branches; do
|
||||
branch=$(echo "$branch" | xargs) # Trim whitespace
|
||||
|
||||
# Skip if empty
|
||||
if [ -z "$branch" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Checking branch: $branch"
|
||||
|
||||
# Check if it's a protected branch
|
||||
is_protected=false
|
||||
for protected in "${protected_branches[@]}"; do
|
||||
if [ "$branch" = "$protected" ]; then
|
||||
echo " ✓ Skipping protected branch: $branch"
|
||||
is_protected=true
|
||||
skipped_count=$((skipped_count + 1))
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$is_protected" = true ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if it's a translation branch (any 2-letter combination)
|
||||
# Also protect any branch that starts with 2 letters followed by additional content
|
||||
if echo "$branch" | grep -Eq "$translation_pattern" || echo "$branch" | grep -Eq "^[a-zA-Z]{2}[_-]"; then
|
||||
echo " ✓ Skipping translation/language branch: $branch"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if branch has an open PR
|
||||
if echo "$open_pr_branches" | grep -Fxq "$branch"; then
|
||||
echo " ✓ Skipping branch with open PR: $branch"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if branch had a PR that was merged or closed
|
||||
echo " → Checking PR history for branch: $branch"
|
||||
|
||||
# Look for PRs from this branch (both merged and closed)
|
||||
pr_info=$(gh pr list --state all --head "$branch" --json number,state,mergedAt --limit 1)
|
||||
|
||||
if [ "$pr_info" != "[]" ]; then
|
||||
pr_state=$(echo "$pr_info" | jq -r '.[0].state')
|
||||
pr_number=$(echo "$pr_info" | jq -r '.[0].number')
|
||||
merged_at=$(echo "$pr_info" | jq -r '.[0].mergedAt')
|
||||
|
||||
if [ "$pr_state" = "MERGED" ] || [ "$pr_state" = "CLOSED" ]; then
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo " 🔍 [DRY RUN] Would delete branch: $branch (PR #$pr_number was $pr_state)"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " ✗ Deleting branch: $branch (PR #$pr_number was $pr_state)"
|
||||
|
||||
# Delete the remote branch
|
||||
if git push origin --delete "$branch" 2>/dev/null; then
|
||||
echo " Successfully deleted remote branch: $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " Failed to delete remote branch: $branch"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo " ✓ Skipping branch with open PR: $branch (PR #$pr_number is $pr_state)"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
fi
|
||||
else
|
||||
# No PR found for this branch - it might be a stale branch
|
||||
# Check if branch is older than 30 days and has no recent activity
|
||||
last_commit_date=$(git log -1 --format="%ct" origin/"$branch" 2>/dev/null || echo "0")
|
||||
|
||||
if [ "$last_commit_date" != "0" ] && [ -n "$last_commit_date" ]; then
|
||||
# Calculate 30 days ago in seconds since epoch
|
||||
thirty_days_ago=$(($(date +%s) - 30 * 24 * 60 * 60))
|
||||
|
||||
if [ "$last_commit_date" -lt "$thirty_days_ago" ]; then
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo " 🔍 [DRY RUN] Would delete stale branch (no PR, >30 days old): $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " ✗ Deleting stale branch (no PR, >30 days old): $branch"
|
||||
|
||||
if git push origin --delete "$branch" 2>/dev/null; then
|
||||
echo " Successfully deleted stale branch: $branch"
|
||||
deleted_count=$((deleted_count + 1))
|
||||
else
|
||||
echo " Failed to delete stale branch: $branch"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo " ✓ Skipping recent branch (no PR, <30 days old): $branch"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
fi
|
||||
else
|
||||
echo " ✓ Skipping branch (cannot determine age): $branch"
|
||||
skipped_count=$((skipped_count + 1))
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
done
|
||||
|
||||
echo "=================================="
|
||||
echo "Branch cleanup completed!"
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo "Branches that would be deleted: $deleted_count"
|
||||
else
|
||||
echo "Branches deleted: $deleted_count"
|
||||
fi
|
||||
echo "Branches skipped: $skipped_count"
|
||||
echo "=================================="
|
||||
|
||||
# Clean up local tracking branches (only if not dry run)
|
||||
if [ "$DRY_RUN" != "true" ]; then
|
||||
echo "Cleaning up local tracking branches..."
|
||||
git remote prune origin
|
||||
fi
|
||||
|
||||
echo "Cleanup process finished."
|
||||
119
.github/workflows/translate_af.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to AF (Afrikaans)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: af
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Afrikaans
|
||||
BRANCH: af
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
282
.github/workflows/translate_all.yml
vendored
Normal file
@@ -0,0 +1,282 @@
|
||||
name: Translator All
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
- Dockerfile
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
translate:
|
||||
name: Translate → ${{ matrix.name }} (${{ matrix.branch }})
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
|
||||
# Run N languages in parallel (tune max-parallel if needed)
|
||||
strategy:
|
||||
fail-fast: false
|
||||
# max-parallel: 3 #Nothing to run all in parallel
|
||||
matrix:
|
||||
include:
|
||||
- { name: "Afrikaans", language: "Afrikaans", branch: "af" }
|
||||
- { name: "German", language: "German", branch: "de" }
|
||||
- { name: "Greek", language: "Greek", branch: "el" }
|
||||
- { name: "Spanish", language: "Spanish", branch: "es" }
|
||||
- { name: "French", language: "French", branch: "fr" }
|
||||
- { name: "Hindi", language: "Hindi", branch: "hi" }
|
||||
- { name: "Italian", language: "Italian", branch: "it" }
|
||||
- { name: "Japanese", language: "Japanese", branch: "ja" }
|
||||
- { name: "Korean", language: "Korean", branch: "ko" }
|
||||
- { name: "Polish", language: "Polish", branch: "pl" }
|
||||
- { name: "Portuguese", language: "Portuguese", branch: "pt" }
|
||||
- { name: "Serbian", language: "Serbian", branch: "sr" }
|
||||
- { name: "Swahili", language: "Swahili", branch: "sw" }
|
||||
- { name: "Turkish", language: "Turkish", branch: "tr" }
|
||||
- { name: "Ukrainian", language: "Ukrainian", branch: "uk" }
|
||||
- { name: "Chinese", language: "Chinese", branch: "zh" }
|
||||
|
||||
# Ensure only one job per branch runs at a time (even across workflow runs)
|
||||
concurrency:
|
||||
group: translate-${{ matrix.branch }}
|
||||
cancel-in-progress: false
|
||||
|
||||
container:
|
||||
image: ghcr.io/hacktricks-wiki/hacktricks-cloud/translator-image:latest
|
||||
|
||||
env:
|
||||
LANGUAGE: ${{ matrix.language }}
|
||||
BRANCH: ${{ matrix.branch }}
|
||||
|
||||
steps:
|
||||
- name: Wait for build_master to start
|
||||
run: |
|
||||
echo "Waiting 30 seconds to let build_master.yml initialize and reset the searchindex repo..."
|
||||
sleep 30
|
||||
echo "Continuing with translation workflow"
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update and download scripts
|
||||
run: |
|
||||
sudo apt-get update
|
||||
# Install GitHub CLI properly
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||
&& sudo apt update \
|
||||
&& sudo apt install gh -y \
|
||||
&& sudo apt-get install -y wget
|
||||
wget -O /tmp/get_and_save_refs.py https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-cloud/master/scripts/get_and_save_refs.py
|
||||
wget -O /tmp/compare_and_fix_refs.py https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-cloud/master/scripts/compare_and_fix_refs.py
|
||||
wget -O /tmp/translator.py https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-cloud/master/scripts/translator.py
|
||||
|
||||
- name: Run get_and_save_refs.py
|
||||
run: |
|
||||
python /tmp/get_and_save_refs.py
|
||||
|
||||
- name: Download language branch & update refs
|
||||
run: |
|
||||
pwd
|
||||
ls -la
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git config pull.rebase false
|
||||
git checkout $BRANCH
|
||||
git pull
|
||||
python /tmp/compare_and_fix_refs.py --files-unmatched-paths /tmp/file_paths.txt
|
||||
git add .
|
||||
git commit -m "Fix unmatched refs" || echo "No changes to commit"
|
||||
git push || echo "No changes to push"
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
git checkout master
|
||||
cp src/SUMMARY.md /tmp/master-summary.md
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n ",$file" >> /tmp/file_paths.txt
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Files to translate (`wc -l < /tmp/file_paths.txt`):"
|
||||
cat /tmp/file_paths.txt
|
||||
echo ""
|
||||
echo ""
|
||||
touch /tmp/file_paths.txt
|
||||
|
||||
if [ -s /tmp/file_paths.txt ]; then
|
||||
python /tmp/translator.py \
|
||||
--language "$LANGUAGE" \
|
||||
--branch "$BRANCH" \
|
||||
--api-key "$OPENAI_API_KEY" \
|
||||
-f "$(cat /tmp/file_paths.txt)" \
|
||||
-t 3
|
||||
else
|
||||
echo "No markdown files changed, skipping translation."
|
||||
fi
|
||||
|
||||
- name: Sync SUMMARY.md from master
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
if [ -f /tmp/master-summary.md ]; then
|
||||
cp /tmp/master-summary.md src/SUMMARY.md
|
||||
git add src/SUMMARY.md
|
||||
git commit -m "Sync SUMMARY.md with master" || echo "SUMMARY already up to date"
|
||||
git push || echo "No SUMMARY updates to push"
|
||||
else
|
||||
echo "master summary not exported; failing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Build mdBook
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
MDBOOK_BOOK__LANGUAGE=$BRANCH mdbook build || (echo "Error logs" && cat hacktricks-preprocessor-error.log && echo "" && echo "" && echo "Debug logs" && (cat hacktricks-preprocessor.log | tail -n 20) && exit 1)
|
||||
|
||||
- name: Push search index to hacktricks-searchindex repo
|
||||
shell: bash
|
||||
env:
|
||||
PAT_TOKEN: ${{ secrets.PAT_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
ASSET="book/searchindex.js"
|
||||
TARGET_REPO="HackTricks-wiki/hacktricks-searchindex"
|
||||
FILENAME="searchindex-cloud-${BRANCH}.js"
|
||||
|
||||
if [ ! -f "$ASSET" ]; then
|
||||
echo "Expected $ASSET to exist after build" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOKEN="${PAT_TOKEN}"
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "No PAT_TOKEN available" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Clone the searchindex repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git /tmp/searchindex-repo
|
||||
|
||||
# Compress the searchindex file
|
||||
gzip -9 -k -f "$ASSET"
|
||||
|
||||
# Show compression stats
|
||||
ORIGINAL_SIZE=$(wc -c < "$ASSET")
|
||||
COMPRESSED_SIZE=$(wc -c < "${ASSET}.gz")
|
||||
RATIO=$(awk "BEGIN {printf \"%.1f\", ($COMPRESSED_SIZE / $ORIGINAL_SIZE) * 100}")
|
||||
echo "Compression: ${ORIGINAL_SIZE} bytes -> ${COMPRESSED_SIZE} bytes (${RATIO}%)"
|
||||
|
||||
# XOR encrypt the compressed file
|
||||
KEY='Prevent_Online_AVs_From_Flagging_HackTricks_Search_Gzip_As_Malicious_394h7gt8rf9u3rf9g'
|
||||
cat > /tmp/xor_encrypt.py << 'EOF'
|
||||
import sys
|
||||
key = sys.argv[1]
|
||||
input_file = sys.argv[2]
|
||||
output_file = sys.argv[3]
|
||||
with open(input_file, 'rb') as f:
|
||||
data = f.read()
|
||||
key_bytes = key.encode('utf-8')
|
||||
encrypted = bytearray(len(data))
|
||||
for i in range(len(data)):
|
||||
encrypted[i] = data[i] ^ key_bytes[i % len(key_bytes)]
|
||||
with open(output_file, 'wb') as f:
|
||||
f.write(encrypted)
|
||||
print(f"Encrypted: {len(data)} bytes")
|
||||
EOF
|
||||
python3 /tmp/xor_encrypt.py "$KEY" "${ASSET}.gz" "${ASSET}.gz.enc"
|
||||
|
||||
# Copy ONLY the encrypted .gz version to the searchindex repo (no uncompressed .js)
|
||||
cp "${ASSET}.gz.enc" "/tmp/searchindex-repo/${FILENAME}.gz"
|
||||
|
||||
# Commit and push with retry logic
|
||||
cd /tmp/searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
git add "${FILENAME}.gz"
|
||||
|
||||
if git diff --staged --quiet; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git commit -m "Update ${FILENAME} from hacktricks-cloud build"
|
||||
|
||||
# Retry push up to 20 times with pull --rebase between attempts
|
||||
MAX_RETRIES=20
|
||||
RETRY_COUNT=0
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if git push origin master; then
|
||||
echo "Successfully pushed on attempt $((RETRY_COUNT + 1))"
|
||||
break
|
||||
else
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
|
||||
echo "Push failed, attempt $RETRY_COUNT/$MAX_RETRIES. Pulling and retrying..."
|
||||
|
||||
# Try normal rebase first
|
||||
if git pull --rebase origin master 2>&1 | tee /tmp/pull_output.txt; then
|
||||
echo "Rebase successful, retrying push..."
|
||||
else
|
||||
# If rebase fails due to divergent histories (orphan branch reset), re-clone
|
||||
if grep -q "unrelated histories\|refusing to merge\|fatal: invalid upstream\|couldn't find remote ref" /tmp/pull_output.txt; then
|
||||
echo "Detected history rewrite, re-cloning repository..."
|
||||
cd /tmp
|
||||
rm -rf searchindex-repo
|
||||
git clone https://x-access-token:${TOKEN}@github.com/${TARGET_REPO}.git searchindex-repo
|
||||
cd searchindex-repo
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "github-actions@github.com"
|
||||
|
||||
# Re-copy ONLY the encrypted .gz version (no uncompressed .js)
|
||||
cp "${ASSET}.gz.enc" "${FILENAME}.gz"
|
||||
|
||||
git add "${FILENAME}.gz"
|
||||
git commit -m "Update ${FILENAME}.gz from hacktricks-cloud build"
|
||||
echo "Re-cloned and re-committed, will retry push..."
|
||||
else
|
||||
echo "Rebase failed for unknown reason, retrying anyway..."
|
||||
fi
|
||||
fi
|
||||
|
||||
sleep 1
|
||||
else
|
||||
echo "Failed to push after $MAX_RETRIES attempts"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: |
|
||||
echo "Current branch:"
|
||||
git rev-parse --abbrev-ref HEAD
|
||||
echo "Syncing $BRANCH to S3"
|
||||
aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
echo "Sync completed"
|
||||
echo "Cat 3 files from the book"
|
||||
find . -type f -name 'index.html' -print | head -n 3 | xargs -r cat
|
||||
119
.github/workflows/translate_de.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to DE (German)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: de
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: German
|
||||
BRANCH: de
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_el.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to EL (Greek)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: el
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Greek
|
||||
BRANCH: el
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_es.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to ES (Spanish)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: es
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Spanish
|
||||
BRANCH: es
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_fr.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to FR (French)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: fr
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: French
|
||||
BRANCH: fr
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_in.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to IN (Hindi)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: in
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Hindi
|
||||
BRANCH: in
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_it.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to IT (Italian)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: it
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Italian
|
||||
BRANCH: it
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_ja.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to JA (Japanese)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: ja
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Japanese
|
||||
BRANCH: ja
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_ko.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to KO (Korean)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: ko
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Korean
|
||||
BRANCH: ko
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_pl.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to PL (Polish)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: pl
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Polish
|
||||
BRANCH: pl
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_pt.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to PT (Portuguese)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: pt
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Portuguese
|
||||
BRANCH: pt
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_sr.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to SR (Serbian)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: sr
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Serbian
|
||||
BRANCH: sr
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_sw.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to SW (Swahili)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: sw
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Swahili
|
||||
BRANCH: sw
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_tr.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to TR (Turkish)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: tr
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Turkish
|
||||
BRANCH: tr
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_uk.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to UK (Ukranian)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: uk
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Ukranian
|
||||
BRANCH: uk
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
119
.github/workflows/translate_zh.yml
vendored
@@ -1,119 +0,0 @@
|
||||
name: Translator to ZH (Chinese)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'scripts/**'
|
||||
- '.gitignore'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: zh
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
run-translation:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
env:
|
||||
LANGUAGE: Chinese
|
||||
BRANCH: zh
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0 #Needed to download everything to be able to access the master & language branches
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip3 install openai tqdm tiktoken
|
||||
|
||||
# Install Rust and Cargo
|
||||
- name: Install Rust and Cargo
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Install mdBook and Plugins
|
||||
- name: Install mdBook and Plugins
|
||||
run: |
|
||||
cargo install mdbook
|
||||
cargo install mdbook-alerts
|
||||
cargo install mdbook-reading-time
|
||||
cargo install mdbook-pagetoc
|
||||
cargo install mdbook-tabs
|
||||
cargo install mdbook-codename
|
||||
|
||||
|
||||
- name: Update & install wget & translator.py
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install wget -y
|
||||
cd scripts
|
||||
rm -f translator.py
|
||||
wget https://raw.githubusercontent.com/carlospolop/hacktricks-cloud/master/scripts/translator.py
|
||||
cd ..
|
||||
|
||||
- name: Download language branch #Make sure we have last version
|
||||
run: |
|
||||
git config --global user.name 'Translator'
|
||||
git config --global user.email 'github-actions@github.com'
|
||||
git checkout "$BRANCH"
|
||||
git pull
|
||||
git checkout master
|
||||
|
||||
- name: Run translation script on changed files
|
||||
run: |
|
||||
echo "Starting translations"
|
||||
echo "Commit: $GITHUB_SHA"
|
||||
|
||||
# Export the OpenAI API key as an environment variable
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
# Run the translation script on each changed file
|
||||
git diff --name-only HEAD~1 | grep -v "SUMMARY.md" | while read -r file; do
|
||||
if echo "$file" | grep -qE '\.md$'; then
|
||||
echo -n "$file , " >> /tmp/file_paths.txt
|
||||
else
|
||||
echo "Skipping $file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Translating $(cat /tmp/file_paths.txt)"
|
||||
python scripts/translator.py --language "$LANGUAGE" --branch "$BRANCH" --api-key "$OPENAI_API_KEY" -f "$(cat /tmp/file_paths.txt)" -t 3
|
||||
|
||||
# Push changes to the repository
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
git checkout "$BRANCH"
|
||||
git add -A
|
||||
git commit -m "Translated $BRANCH files" || true
|
||||
git push --set-upstream origin "$BRANCH"
|
||||
|
||||
# Build the mdBook
|
||||
- name: Build mdBook
|
||||
run: mdbook build
|
||||
|
||||
# Login in AWs
|
||||
- name: Configure AWS credentials using OIDC
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
|
||||
aws-region: us-east-1
|
||||
|
||||
# Sync the build to S3
|
||||
- name: Sync to S3
|
||||
run: aws s3 sync ./book s3://hacktricks-cloud/$BRANCH --delete
|
||||
23
.github/workflows/upload_ht_to_ai.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Upload HackTricks to HackTricks AI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 5 1 * *"
|
||||
|
||||
|
||||
jobs:
|
||||
dowload-clean-push:
|
||||
runs-on: ubuntu-latest
|
||||
environment: prod
|
||||
steps:
|
||||
# 1. Download the script
|
||||
- name: Dowload script
|
||||
run: wget "https://raw.githubusercontent.com/HackTricks-wiki/hacktricks-cloud/refs/heads/master/scripts/upload_ht_to_ai.py"
|
||||
|
||||
- name: Install pip dependencies
|
||||
run: python3 -m pip install openai
|
||||
|
||||
# 2. Execute the script
|
||||
- name: Execute script
|
||||
run: export MY_OPENAI_API_KEY=${{ secrets.MY_OPENAI_API_KEY }}; python3 "./upload_ht_to_ai.py"
|
||||
4
.gitignore
vendored
@@ -3,8 +3,6 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
@@ -36,3 +34,5 @@ Temporary Items
|
||||
book
|
||||
book/*
|
||||
hacktricks-preprocessor.log
|
||||
hacktricks-preprocessor-error.log
|
||||
searchindex.js
|
||||
|
||||
31
Dockerfile
Normal file
@@ -0,0 +1,31 @@
|
||||
# Use the official Python 3.12 Bullseye image as the base
|
||||
FROM python:3.12-bullseye
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
wget \
|
||||
git \
|
||||
sudo \
|
||||
build-essential \
|
||||
awscli
|
||||
|
||||
# Install Python libraries
|
||||
RUN pip install --upgrade pip && \
|
||||
pip install openai tqdm tiktoken
|
||||
|
||||
# Install Rust & Cargo
|
||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Install mdBook & plugins
|
||||
RUN cargo install mdbook
|
||||
RUN cargo install mdbook-alerts
|
||||
RUN cargo install mdbook-reading-time
|
||||
RUN cargo install mdbook-pagetoc
|
||||
RUN cargo install mdbook-tabs
|
||||
RUN cargo install mdbook-codename
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
19
book.toml
@@ -1,7 +1,6 @@
|
||||
[book]
|
||||
authors = ["Carlos Polop"]
|
||||
authors = ["HackTricks Team"]
|
||||
language = "en"
|
||||
multilingual = false
|
||||
src = "src"
|
||||
title = "HackTricks Cloud"
|
||||
|
||||
@@ -9,31 +8,25 @@ title = "HackTricks Cloud"
|
||||
create-missing = false
|
||||
extra-watch-dirs = ["translations"]
|
||||
|
||||
[preprocessor.alerts]
|
||||
after = ["links"]
|
||||
|
||||
[preprocessor.reading-time]
|
||||
|
||||
[preprocessor.pagetoc]
|
||||
|
||||
[preprocessor.tabs]
|
||||
|
||||
[preprocessor.codename]
|
||||
|
||||
[preprocessor.hacktricks]
|
||||
command = "python3 ./hacktricks-preprocessor.py"
|
||||
env = "prod"
|
||||
|
||||
[output.html]
|
||||
additional-css = ["theme/pagetoc.css", "theme/tabs.css"]
|
||||
additional-css = ["theme/tabs.css", "theme/pagetoc.css"]
|
||||
additional-js = [
|
||||
"theme/pagetoc.js",
|
||||
"theme/tabs.js",
|
||||
"theme/pagetoc.js",
|
||||
"theme/ht_searcher.js",
|
||||
"theme/sponsor.js",
|
||||
"theme/ai.js"
|
||||
]
|
||||
no-section-label = true
|
||||
preferred-dark-theme = "hacktricks-dark"
|
||||
default-theme = "hacktricks-light"
|
||||
hash-files = false
|
||||
|
||||
[output.html.fold]
|
||||
enable = true # whether or not to enable section folding
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
@@ -6,7 +7,14 @@ from os import path
|
||||
from urllib.request import urlopen, Request
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(filename='hacktricks-preprocessor.log', filemode='w', encoding='utf-8', level=logging.DEBUG)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = logging.FileHandler(filename='hacktricks-preprocessor.log', mode='w', encoding='utf-8')
|
||||
handler.setLevel(logging.DEBUG)
|
||||
logger.addHandler(handler)
|
||||
|
||||
handler2 = logging.FileHandler(filename='hacktricks-preprocessor-error.log', mode='w', encoding='utf-8')
|
||||
handler2.setLevel(logging.ERROR)
|
||||
logger.addHandler(handler2)
|
||||
|
||||
|
||||
def findtitle(search ,obj, key, path=(),):
|
||||
@@ -26,37 +34,63 @@ def findtitle(search ,obj, key, path=(),):
|
||||
|
||||
|
||||
def ref(matchobj):
|
||||
logger.debug(f'Match: {matchobj.groups(0)[0].strip()}')
|
||||
logger.debug(f'Ref match: {matchobj.groups(0)[0].strip()}')
|
||||
href = matchobj.groups(0)[0].strip()
|
||||
title = href
|
||||
if href.startswith("http://") or href.startswith("https://"):
|
||||
# pass
|
||||
try:
|
||||
raw_html = str(urlopen(Request(href, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0'})).read())
|
||||
match = re.search('<title>(.*?)</title>', raw_html)
|
||||
title = match.group(1) if match else href
|
||||
except Exception as e:
|
||||
logger.debug(f'Error opening URL {href}: {e}')
|
||||
pass #nDont stop on broken link
|
||||
if context['config']['preprocessor']['hacktricks']['env'] == 'dev':
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
raw_html = str(urlopen(Request(href, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0'})).read())
|
||||
match = re.search('<title>(.*?)</title>', raw_html)
|
||||
title = match.group(1) if match else href
|
||||
except Exception as e:
|
||||
logger.error(f'Error opening URL {href}: {e}')
|
||||
pass #Dont stop on broken link
|
||||
else:
|
||||
try:
|
||||
if href.endswith("/"):
|
||||
href = href+"README.md" # Fix if ref points to a folder
|
||||
chapter, _path = findtitle(href, book, "source_path")
|
||||
logger.debug(f'Recursive title search result: {chapter['name']}')
|
||||
title = chapter['name']
|
||||
if "#" in href:
|
||||
result = findtitle(href.split("#")[0], book, "source_path")
|
||||
if result is None or result[0] is None:
|
||||
raise Exception(f"Chapter not found")
|
||||
chapter, _path = result
|
||||
title = " ".join(href.split("#")[1].split("-")).title()
|
||||
logger.debug(f'Ref has # using title: {title}')
|
||||
else:
|
||||
result = findtitle(href, book, "source_path")
|
||||
if result is None or result[0] is None:
|
||||
raise Exception(f"Chapter not found")
|
||||
chapter, _path = result
|
||||
logger.debug(f'Recursive title search result: {chapter["name"]}')
|
||||
title = chapter['name']
|
||||
except Exception as e:
|
||||
try:
|
||||
dir = path.dirname(current_chapter['source_path'])
|
||||
logger.debug(f'Error getting chapter title: {href} trying with relative path {path.normpath(path.join(dir,href))}')
|
||||
chapter, _path = findtitle(path.normpath(path.join(dir,href)), book, "source_path")
|
||||
logger.debug(f'Recursive title search result: {chapter['name']}')
|
||||
title = chapter['name']
|
||||
if "#" in href:
|
||||
result = findtitle(path.normpath(path.join(dir,href.split('#')[0])), book, "source_path")
|
||||
if result is None or result[0] is None:
|
||||
raise Exception(f"Chapter not found")
|
||||
chapter, _path = result
|
||||
title = " ".join(href.split("#")[1].split("-")).title()
|
||||
logger.debug(f'Ref has # using title: {title}')
|
||||
else:
|
||||
result = findtitle(path.normpath(path.join(dir,href.split('#')[0])), book, "source_path")
|
||||
if result is None or result[0] is None:
|
||||
raise Exception(f"Chapter not found")
|
||||
chapter, _path = result
|
||||
title = chapter["name"]
|
||||
logger.debug(f'Recursive title search result: {chapter["name"]}')
|
||||
except Exception as e:
|
||||
logger.debug(f'Error getting chapter title: {path.normpath(path.join(dir,href))}')
|
||||
print(f'Error getting chapter title: {path.normpath(path.join(dir,href))}')
|
||||
logger.error(f"Error: {e}")
|
||||
logger.error(f'Error getting chapter title: {path.normpath(path.join(dir,href))}')
|
||||
sys.exit(1)
|
||||
|
||||
if href.endswith("/README.md"):
|
||||
href = href.replace("/README.md", "/index.html")
|
||||
|
||||
template = f"""<a class="content_ref" href="{href}"><span class="content_ref_label">{title}</span></a>"""
|
||||
|
||||
@@ -67,6 +101,41 @@ def ref(matchobj):
|
||||
return result
|
||||
|
||||
|
||||
def files(matchobj):
|
||||
logger.debug(f'Files match: {matchobj.groups(0)[0].strip()}')
|
||||
href = matchobj.groups(0)[0].strip()
|
||||
title = ""
|
||||
|
||||
try:
|
||||
for root, dirs, files in os.walk(os.getcwd()+'/src/files'):
|
||||
logger.debug(root)
|
||||
logger.debug(files)
|
||||
if href in files:
|
||||
title = href
|
||||
logger.debug(f'File search result: {os.path.join(root, href)}')
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error: {e}")
|
||||
logger.error(f'Error searching file: {href}')
|
||||
sys.exit(1)
|
||||
|
||||
if title=="":
|
||||
logger.error(f'Error searching file: {href}')
|
||||
sys.exit(1)
|
||||
|
||||
template = f"""<a class="content_ref" href="/files/{href}"><span class="content_ref_label">{title}</span></a>"""
|
||||
|
||||
result = template
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def add_read_time(content):
|
||||
regex = r'(<\/style>\n# .*(?=\n))'
|
||||
new_content = re.sub(regex, lambda x: x.group(0) + "\n\nReading time: {{ #reading_time }}", content)
|
||||
return new_content
|
||||
|
||||
|
||||
def iterate_chapters(sections):
|
||||
if isinstance(sections, dict) and "PartTitle" in sections: # Not a chapter section
|
||||
return
|
||||
@@ -90,13 +159,22 @@ if __name__ == '__main__':
|
||||
context, book = json.load(sys.stdin)
|
||||
|
||||
logger.debug(f"Context: {context}")
|
||||
logger.debug(f"Book keys: {book.keys()}")
|
||||
|
||||
|
||||
for chapter in iterate_chapters(book['sections']):
|
||||
# Handle both old (sections) and new (items) mdbook API
|
||||
book_items = book.get('sections') or book.get('items', [])
|
||||
|
||||
for chapter in iterate_chapters(book_items):
|
||||
if chapter is None:
|
||||
continue
|
||||
logger.debug(f"Chapter: {chapter['path']}")
|
||||
current_chapter = chapter
|
||||
regex = r'{{[\s]*#ref[\s]*}}(?:\n)?([^\\\n]*)(?:\n)?{{[\s]*#endref[\s]*}}'
|
||||
# regex = r'{{[\s]*#ref[\s]*}}(?:\n)?([^\\\n]*)(?:\n)?{{[\s]*#endref[\s]*}}'
|
||||
regex = r'{{[\s]*#ref[\s]*}}(?:\n)?([^\\\n#]*(?:#(.*))?)(?:\n)?{{[\s]*#endref[\s]*}}'
|
||||
new_content = re.sub(regex, ref, chapter['content'])
|
||||
regex = r'{{[\s]*#file[\s]*}}(?:\n)?([^\\\n]*)(?:\n)?{{[\s]*#endfile[\s]*}}'
|
||||
new_content = re.sub(regex, files, new_content)
|
||||
new_content = add_read_time(new_content)
|
||||
chapter['content'] = new_content
|
||||
|
||||
content = json.dumps(book)
|
||||
|
||||
166
hacktricks-preprocessor.py.bak
Normal file
@@ -0,0 +1,166 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
from os import path
|
||||
from urllib.request import urlopen, Request
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = logging.FileHandler(filename='hacktricks-preprocessor.log', mode='w', encoding='utf-8')
|
||||
handler.setLevel(logging.DEBUG)
|
||||
logger.addHandler(handler)
|
||||
|
||||
handler2 = logging.FileHandler(filename='hacktricks-preprocessor-error.log', mode='w', encoding='utf-8')
|
||||
handler2.setLevel(logging.ERROR)
|
||||
logger.addHandler(handler2)
|
||||
|
||||
|
||||
def findtitle(search ,obj, key, path=(),):
|
||||
# logger.debug(f"Looking for {search} in {path}")
|
||||
if isinstance(obj, dict) and key in obj and obj[key] == search:
|
||||
return obj, path
|
||||
if isinstance(obj, list):
|
||||
for k, v in enumerate(obj):
|
||||
item = findtitle(search, v, key, (*path, k))
|
||||
if item is not None:
|
||||
return item
|
||||
if isinstance(obj, dict):
|
||||
for k, v in obj.items():
|
||||
item = findtitle(search, v, key, (*path, k))
|
||||
if item is not None:
|
||||
return item
|
||||
|
||||
|
||||
def ref(matchobj):
|
||||
logger.debug(f'Ref match: {matchobj.groups(0)[0].strip()}')
|
||||
href = matchobj.groups(0)[0].strip()
|
||||
title = href
|
||||
if href.startswith("http://") or href.startswith("https://"):
|
||||
if context['config']['preprocessor']['hacktricks']['env'] == 'dev':
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
raw_html = str(urlopen(Request(href, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0'})).read())
|
||||
match = re.search('<title>(.*?)</title>', raw_html)
|
||||
title = match.group(1) if match else href
|
||||
except Exception as e:
|
||||
logger.error(f'Error opening URL {href}: {e}')
|
||||
pass #Dont stop on broken link
|
||||
else:
|
||||
try:
|
||||
if href.endswith("/"):
|
||||
href = href+"README.md" # Fix if ref points to a folder
|
||||
if "#" in href:
|
||||
chapter, _path = findtitle(href.split("#")[0], book, "source_path")
|
||||
title = " ".join(href.split("#")[1].split("-")).title()
|
||||
logger.debug(f'Ref has # using title: {title}')
|
||||
else:
|
||||
chapter, _path = findtitle(href, book, "source_path")
|
||||
logger.debug(f'Recursive title search result: {chapter["name"]}')
|
||||
title = chapter['name']
|
||||
except Exception as e:
|
||||
try:
|
||||
dir = path.dirname(current_chapter['source_path'])
|
||||
logger.debug(f'Error getting chapter title: {href} trying with relative path {path.normpath(path.join(dir,href))}')
|
||||
if "#" in href:
|
||||
chapter, _path = findtitle(path.normpath(path.join(dir,href.split('#')[0])), book, "source_path")
|
||||
title = " ".join(href.split("#")[1].split("-")).title()
|
||||
logger.debug(f'Ref has # using title: {title}')
|
||||
else:
|
||||
chapter, _path = findtitle(path.normpath(path.join(dir,href.split('#')[0])), book, "source_path")
|
||||
title = chapter["name"]
|
||||
logger.debug(f'Recursive title search result: {chapter["name"]}')
|
||||
except Exception as e:
|
||||
logger.error(f"Error: {e}")
|
||||
logger.error(f'Error getting chapter title: {path.normpath(path.join(dir,href))}')
|
||||
sys.exit(1)
|
||||
|
||||
if href.endswith("/README.md"):
|
||||
href = href.replace("/README.md", "/index.html")
|
||||
|
||||
template = f"""<a class="content_ref" href="{href}"><span class="content_ref_label">{title}</span></a>"""
|
||||
|
||||
# translate_table = str.maketrans({"\"":"\\\"","\n":"\\n"})
|
||||
# translated_text = template.translate(translate_table)
|
||||
result = template
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def files(matchobj):
|
||||
logger.debug(f'Files match: {matchobj.groups(0)[0].strip()}')
|
||||
href = matchobj.groups(0)[0].strip()
|
||||
title = ""
|
||||
|
||||
try:
|
||||
for root, dirs, files in os.walk(os.getcwd()+'/src/files'):
|
||||
logger.debug(root)
|
||||
logger.debug(files)
|
||||
if href in files:
|
||||
title = href
|
||||
logger.debug(f'File search result: {os.path.join(root, href)}')
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error: {e}")
|
||||
logger.error(f'Error searching file: {href}')
|
||||
sys.exit(1)
|
||||
|
||||
if title=="":
|
||||
logger.error(f'Error searching file: {href}')
|
||||
sys.exit(1)
|
||||
|
||||
template = f"""<a class="content_ref" href="/files/{href}"><span class="content_ref_label">{title}</span></a>"""
|
||||
|
||||
result = template
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def add_read_time(content):
|
||||
regex = r'(<\/style>\n# .*(?=\n))'
|
||||
new_content = re.sub(regex, lambda x: x.group(0) + "\n\nReading time: {{ #reading_time }}", content)
|
||||
return new_content
|
||||
|
||||
|
||||
def iterate_chapters(sections):
|
||||
if isinstance(sections, dict) and "PartTitle" in sections: # Not a chapter section
|
||||
return
|
||||
elif isinstance(sections, dict) and "Chapter" in sections: # Is a chapter return it and look into sub items
|
||||
# logger.debug(f"Chapter {sections['Chapter']}")
|
||||
yield sections['Chapter']
|
||||
yield from iterate_chapters(sections['Chapter']["sub_items"])
|
||||
elif isinstance(sections, list): # Iterate through list when in sections and in sub_items
|
||||
for k, v in enumerate(sections):
|
||||
yield from iterate_chapters(v)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
global context, book, current_chapter
|
||||
if len(sys.argv) > 1: # we check if we received any argument
|
||||
if sys.argv[1] == "supports":
|
||||
# then we are good to return an exit status code of 0, since the other argument will just be the renderer's name
|
||||
sys.exit(0)
|
||||
logger.debug('Started hacktricks preprocessor')
|
||||
# load both the context and the book representations from stdin
|
||||
context, book = json.load(sys.stdin)
|
||||
|
||||
logger.debug(f"Context: {context}")
|
||||
|
||||
for chapter in iterate_chapters(book['sections']):
|
||||
logger.debug(f"Chapter: {chapter['path']}")
|
||||
current_chapter = chapter
|
||||
# regex = r'{{[\s]*#ref[\s]*}}(?:\n)?([^\\\n]*)(?:\n)?{{[\s]*#endref[\s]*}}'
|
||||
regex = r'{{[\s]*#ref[\s]*}}(?:\n)?([^\\\n#]*(?:#(.*))?)(?:\n)?{{[\s]*#endref[\s]*}}'
|
||||
new_content = re.sub(regex, ref, chapter['content'])
|
||||
regex = r'{{[\s]*#file[\s]*}}(?:\n)?([^\\\n]*)(?:\n)?{{[\s]*#endfile[\s]*}}'
|
||||
new_content = re.sub(regex, files, new_content)
|
||||
new_content = add_read_time(new_content)
|
||||
chapter['content'] = new_content
|
||||
|
||||
content = json.dumps(book)
|
||||
logger.debug(content)
|
||||
|
||||
|
||||
print(content)
|
||||
@@ -1,145 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
def clean_and_merge_md_files(start_folder, exclude_keywords, output_file):
|
||||
def clean_file_content(file_path):
|
||||
"""Clean the content of a single file and return the cleaned lines."""
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
content = f.readlines()
|
||||
|
||||
cleaned_lines = []
|
||||
inside_hint = False
|
||||
for i,line in enumerate(content):
|
||||
# Skip lines containing excluded keywords
|
||||
if any(keyword in line for keyword in exclude_keywords):
|
||||
continue
|
||||
|
||||
# Detect and skip {% hint %} ... {% endhint %} blocks
|
||||
if "{% hint style=\"success\" %}" in line and "Learn & practice" in content[i+1]:
|
||||
inside_hint = True
|
||||
if "{% endhint %}" in line:
|
||||
inside_hint = False
|
||||
continue
|
||||
if inside_hint:
|
||||
continue
|
||||
|
||||
# Skip lines with <figure> ... </figure>
|
||||
if re.match(r"<figure>.*?</figure>", line):
|
||||
continue
|
||||
|
||||
# Add the line if it passed all checks
|
||||
cleaned_lines.append(line.rstrip())
|
||||
|
||||
# Remove excess consecutive empty lines
|
||||
cleaned_lines = remove_consecutive_empty_lines(cleaned_lines)
|
||||
return cleaned_lines
|
||||
|
||||
def remove_consecutive_empty_lines(lines):
|
||||
"""Allow no more than one consecutive empty line."""
|
||||
cleaned_lines = []
|
||||
previous_line_empty = False
|
||||
for line in lines:
|
||||
if line.strip() == "":
|
||||
if not previous_line_empty:
|
||||
cleaned_lines.append("")
|
||||
previous_line_empty = True
|
||||
else:
|
||||
cleaned_lines.append(line)
|
||||
previous_line_empty = False
|
||||
return cleaned_lines
|
||||
|
||||
def gather_files_in_order(start_folder):
|
||||
"""Gather all .md files in a depth-first order."""
|
||||
files = []
|
||||
for root, _, filenames in os.walk(start_folder):
|
||||
md_files = sorted([os.path.join(root, f) for f in filenames if f.endswith(".md")])
|
||||
files.extend(md_files)
|
||||
return files
|
||||
|
||||
# Gather files in depth-first order
|
||||
all_files = gather_files_in_order(start_folder)
|
||||
|
||||
# Process files and merge into a single output
|
||||
with open(output_file, "w", encoding="utf-8") as output:
|
||||
for file_path in all_files:
|
||||
# Clean the content of the file
|
||||
cleaned_content = clean_file_content(file_path)
|
||||
|
||||
# Skip saving if the cleaned file has fewer than 10 non-empty lines
|
||||
if len([line for line in cleaned_content if line.strip()]) < 10:
|
||||
continue
|
||||
|
||||
# Get the name of the file for the header
|
||||
file_name = os.path.basename(file_path)
|
||||
|
||||
# Write header, cleaned content, and 2 extra new lines
|
||||
output.write(f"# {file_name}\n\n")
|
||||
output.write("\n".join(cleaned_content))
|
||||
output.write("\n\n")
|
||||
|
||||
def main():
|
||||
# Specify the starting folder and output file
|
||||
start_folder = os.getcwd()
|
||||
output_file = os.path.join(tempfile.gettempdir(), "merged_output.md")
|
||||
|
||||
# Keywords to exclude from lines
|
||||
exclude_keywords = [
|
||||
"STM Cyber", # STM Cyber ads
|
||||
"offer several valuable cybersecurity services", # STM Cyber ads
|
||||
"and hack the unhackable", # STM Cyber ads
|
||||
"blog.stmcyber.com", # STM Cyber ads
|
||||
|
||||
"RootedCON", # RootedCON ads
|
||||
"rootedcon.com", # RootedCON ads
|
||||
"the mission of promoting technical knowledge", # RootedCON ads
|
||||
|
||||
"Intigriti", # Intigriti ads
|
||||
"intigriti.com", # Intigriti ads
|
||||
|
||||
"Trickest", # Trickest ads
|
||||
"trickest.com", # Trickest ads,
|
||||
"Get Access Today:",
|
||||
|
||||
"HACKENPROOF", # Hackenproof ads
|
||||
"hackenproof.com", # Hackenproof ads
|
||||
"HackenProof", # Hackenproof ads
|
||||
"discord.com/invite/N3FrSbmwdy", # Hackenproof ads
|
||||
"Hacking Insights:", # Hackenproof ads
|
||||
"Engage with content that delves", # Hackenproof ads
|
||||
"Real-Time Hack News:", # Hackenproof ads
|
||||
"Keep up-to-date with fast-paced", # Hackenproof ads
|
||||
"Latest Announcements:", # Hackenproof ads
|
||||
"Stay informed with the newest bug", # Hackenproof ads
|
||||
"start collaborating with top hackers today!", # Hackenproof ads
|
||||
"discord.com/invite/N3FrSbmwdy", # Hackenproof ads
|
||||
|
||||
"Pentest-Tools", # Pentest-Tools.com ads
|
||||
"pentest-tools.com", # Pentest-Tools.com ads
|
||||
"perspective on your web apps, network, and", # Pentest-Tools.com ads
|
||||
"report critical, exploitable vulnerabilities with real business impact", # Pentest-Tools.com ads
|
||||
|
||||
"SerpApi", # SerpApi ads
|
||||
"serpapi.com", # SerpApi ads
|
||||
"offers fast and easy real-time", # SerpApi ads
|
||||
"plans includes access to over 50 different APIs for scraping", # SerpApi ads
|
||||
|
||||
"8kSec", # 8kSec ads
|
||||
"academy.8ksec.io", # 8kSec ads
|
||||
"Learn the technologies and skills required", # 8kSec ads
|
||||
|
||||
"WebSec", # WebSec ads
|
||||
"websec.nl", # WebSec ads
|
||||
"which means they do it all; Pentesting", # WebSec ads
|
||||
]
|
||||
|
||||
# Clean and merge .md files
|
||||
clean_and_merge_md_files(start_folder, exclude_keywords, output_file)
|
||||
|
||||
# Print the path to the output file
|
||||
print(f"Merged content has been saved to: {output_file}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Execute this from the hacktricks folder to clean
|
||||
# It will clean all the .md files and compile them into 1 in a proper order
|
||||
main()
|
||||
88
scripts/clean_unused_images.sh
Normal file
@@ -0,0 +1,88 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Define the image folder and the root of your project
|
||||
IMAGE_FOLDER="./src/images"
|
||||
PROJECT_ROOT="."
|
||||
|
||||
# Move to the project root
|
||||
cd "$PROJECT_ROOT" || exit
|
||||
|
||||
# Loop through each image file in the folder
|
||||
find "$IMAGE_FOLDER" -type f | while IFS= read -r image; do
|
||||
# Extract the filename without the path
|
||||
image_name=$(basename "$image")
|
||||
|
||||
# If image file name contains "sponsor", skip it
|
||||
if [[ "$image_name" == *"sponsor"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"arte"* ]]; then
|
||||
echo "Skipping arte image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"grte"* ]]; then
|
||||
echo "Skipping grte image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"azrte"* ]]; then
|
||||
echo "Skipping azrte image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"websec"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"venacus"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"CLOUD"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"cloud.gif"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"CH_logo"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$image_name" == *"lasttower"* ]]; then
|
||||
echo "Skipping sponsor image: $image_name"
|
||||
continue
|
||||
fi
|
||||
|
||||
|
||||
|
||||
echo "Checking image: $image_name"
|
||||
|
||||
# Search for the image name using rg and capture the result
|
||||
search_result=$(rg -F --files-with-matches "$image_name" \
|
||||
--no-ignore --hidden \
|
||||
--glob '!.git/*' \
|
||||
--glob '!$IMAGE_FOLDER/*' < /dev/null)
|
||||
|
||||
echo "Search result: $search_result"
|
||||
|
||||
# If rg doesn't find any matches, delete the image
|
||||
if [ -z "$search_result" ]; then
|
||||
echo "Deleting unused image: $image"
|
||||
rm "$image"
|
||||
else
|
||||
echo "Image used: $image_name"
|
||||
echo "$search_result"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Cleanup completed!"
|
||||
165
scripts/compare_and_fix_refs.py
Normal file
@@ -0,0 +1,165 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
SRC_DIR = Path("./src")
|
||||
REFS_JSON = Path("/tmp/refs.json")
|
||||
|
||||
# Matches content between {{#ref}} and {{#endref}}, including newlines, lazily
|
||||
REF_RE = re.compile(r"{{#ref}}\s*([\s\S]*?)\s*{{#endref}}", re.MULTILINE)
|
||||
|
||||
def extract_refs(text: str):
|
||||
"""Return a list of refs (trimmed) in appearance order."""
|
||||
return [m.strip() for m in REF_RE.findall(text)]
|
||||
|
||||
def replace_refs_in_text(text: str, new_refs: list):
|
||||
"""Replace all refs in text with new_refs, maintaining order."""
|
||||
matches = list(REF_RE.finditer(text))
|
||||
if len(matches) != len(new_refs):
|
||||
return text # Can't replace if counts don't match
|
||||
|
||||
# Replace from end to beginning to avoid offset issues
|
||||
result = text
|
||||
for match, new_ref in zip(reversed(matches), reversed(new_refs)):
|
||||
# Get the full match span to replace the entire {{#ref}}...{{#endref}} block
|
||||
start, end = match.span()
|
||||
# Format the replacement with proper newlines
|
||||
formatted_replacement = f"{{{{#ref}}}}\n{new_ref}\n{{{{#endref}}}}"
|
||||
result = result[:start] + formatted_replacement + result[end:]
|
||||
|
||||
return result
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Compare and fix refs between current branch and master branch")
|
||||
parser.add_argument("--files-unmatched-paths", type=str,
|
||||
help="Path to file where unmatched file paths will be saved (comma-separated on first line)")
|
||||
args = parser.parse_args()
|
||||
|
||||
if not SRC_DIR.is_dir():
|
||||
raise SystemExit(f"Not a directory: {SRC_DIR}")
|
||||
|
||||
if not REFS_JSON.exists():
|
||||
raise SystemExit(f"Reference file not found: {REFS_JSON}")
|
||||
|
||||
# Load the reference refs from master branch
|
||||
try:
|
||||
with open(REFS_JSON, 'r', encoding='utf-8') as f:
|
||||
master_refs = json.load(f)
|
||||
except (json.JSONDecodeError, UnicodeDecodeError) as e:
|
||||
raise SystemExit(f"Error reading {REFS_JSON}: {e}")
|
||||
|
||||
print(f"Loaded reference data for {len(master_refs)} files from {REFS_JSON}")
|
||||
|
||||
files_processed = 0
|
||||
files_modified = 0
|
||||
files_with_differences = 0
|
||||
unmatched_files = [] # Track files with unmatched refs
|
||||
|
||||
# Track which files exist in current branch
|
||||
current_files = set()
|
||||
|
||||
for md_path in sorted(SRC_DIR.rglob("*.md")):
|
||||
rel = md_path.relative_to(SRC_DIR).as_posix()
|
||||
rel_with_src = f"{SRC_DIR.name}/{rel}" # Include src/ prefix for output
|
||||
files_processed += 1
|
||||
|
||||
# Track this file as existing in current branch
|
||||
current_files.add(rel)
|
||||
|
||||
try:
|
||||
content = md_path.read_text(encoding="utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Fallback if encoding is odd
|
||||
content = md_path.read_text(errors="replace")
|
||||
|
||||
current_refs = extract_refs(content)
|
||||
|
||||
# Check if file exists in master refs
|
||||
if rel not in master_refs:
|
||||
if current_refs:
|
||||
print(f"⚠️ NEW FILE with refs: {rel_with_src} (has {len(current_refs)} refs)")
|
||||
files_with_differences += 1
|
||||
unmatched_files.append(rel_with_src)
|
||||
continue
|
||||
|
||||
master_file_refs = master_refs[rel]
|
||||
|
||||
# Compare ref counts
|
||||
if len(current_refs) != len(master_file_refs):
|
||||
print(f"📊 REF COUNT MISMATCH: {rel_with_src} -- Master: {len(master_file_refs)} refs, Current: {len(current_refs)} refs")
|
||||
files_with_differences += 1
|
||||
unmatched_files.append(rel_with_src)
|
||||
continue
|
||||
|
||||
# If no refs in either, skip
|
||||
if not current_refs and not master_file_refs:
|
||||
continue
|
||||
|
||||
# Compare individual refs
|
||||
differences_found = False
|
||||
for i, (current_ref, master_ref) in enumerate(zip(current_refs, master_file_refs)):
|
||||
if current_ref != master_ref:
|
||||
if not differences_found:
|
||||
print(f"🔍 REF DIFFERENCES in {rel_with_src}:")
|
||||
differences_found = True
|
||||
print(f" Ref {i+1}:")
|
||||
print(f" Master: {repr(master_ref)}")
|
||||
print(f" Current: {repr(current_ref)}")
|
||||
|
||||
if differences_found:
|
||||
files_with_differences += 1
|
||||
unmatched_files.append(rel_with_src)
|
||||
|
||||
# Replace current refs with master refs
|
||||
try:
|
||||
new_content = replace_refs_in_text(content, master_file_refs)
|
||||
if new_content != content:
|
||||
md_path.write_text(new_content, encoding="utf-8")
|
||||
files_modified += 1
|
||||
print(f" ✅ Fixed refs in {rel_with_src}")
|
||||
else:
|
||||
print(f" ❌ Failed to replace refs in {rel_with_src}")
|
||||
except Exception as e:
|
||||
print(f" ❌ Error fixing refs in {rel_with_src}: {e}")
|
||||
|
||||
# Check for files that exist in master refs but not in current branch
|
||||
unexisted_files = 0
|
||||
for master_file_rel in master_refs.keys():
|
||||
if master_file_rel not in current_files:
|
||||
rel_with_src = f"{SRC_DIR.name}/{master_file_rel}"
|
||||
print(f"🗑️ {rel_with_src} (existed in master but not in current one)")
|
||||
unexisted_files += 1
|
||||
unmatched_files.append(rel_with_src)
|
||||
|
||||
# Save unmatched files to specified path if requested
|
||||
if args.files_unmatched_paths and unmatched_files:
|
||||
try:
|
||||
unmatched_paths_file = Path(args.files_unmatched_paths)
|
||||
unmatched_paths_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(unmatched_paths_file, 'w', encoding='utf-8') as f:
|
||||
f.write(','.join(list(set(unmatched_files))))
|
||||
print(f"📝 Saved {len(unmatched_files)} unmatched file paths to: {unmatched_paths_file}")
|
||||
except Exception as e:
|
||||
print(f"❌ Error saving unmatched paths to {args.files_unmatched_paths}: {e}")
|
||||
elif args.files_unmatched_paths and not unmatched_files:
|
||||
# Create empty file if no unmatched files found
|
||||
try:
|
||||
unmatched_paths_file = Path(args.files_unmatched_paths)
|
||||
unmatched_paths_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
unmatched_paths_file.write_text('\n', encoding='utf-8')
|
||||
print(f"<EFBFBD> No unmatched files found. Created empty file: {unmatched_paths_file}")
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating empty unmatched paths file {args.files_unmatched_paths}: {e}")
|
||||
|
||||
print(f"\n SUMMARY:")
|
||||
print(f" Files processed: {files_processed}")
|
||||
print(f" Files with different refs: {files_with_differences}")
|
||||
print(f" Files modified: {files_modified}")
|
||||
print(f" Non existing files: {unexisted_files}")
|
||||
if unmatched_files:
|
||||
print(f" Unmatched files: {len(unmatched_files)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
38
scripts/get_and_save_refs.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
SRC_DIR = Path("./src")
|
||||
REFS_JSON = Path("/tmp/refs.json")
|
||||
|
||||
# Matches content between {{#ref}} and {{#endref}}, including newlines, lazily
|
||||
REF_RE = re.compile(r"{{#ref}}\s*([\s\S]*?)\s*{{#endref}}", re.MULTILINE)
|
||||
|
||||
def extract_refs(text: str):
|
||||
"""Return a list of refs (trimmed) in appearance order."""
|
||||
return [m.strip() for m in REF_RE.findall(text)]
|
||||
|
||||
def main():
|
||||
if not SRC_DIR.is_dir():
|
||||
raise SystemExit(f"Not a directory: {SRC_DIR}")
|
||||
|
||||
refs_per_path = {} # { "relative/path.md": [ref1, ref2, ...] }
|
||||
|
||||
for md_path in sorted(SRC_DIR.rglob("*.md")):
|
||||
rel = md_path.relative_to(SRC_DIR).as_posix()
|
||||
try:
|
||||
content = md_path.read_text(encoding="utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Fallback if encoding is odd
|
||||
content = md_path.read_text(errors="replace")
|
||||
|
||||
refs = extract_refs(content)
|
||||
refs_per_path[rel] = refs # keep order from findall
|
||||
|
||||
|
||||
REFS_JSON.write_text(json.dumps(refs_per_path, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
|
||||
print(f"Wrote {REFS_JSON} with {len(refs_per_path)} files.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -12,15 +12,58 @@ from tqdm import tqdm #pip3 install tqdm
|
||||
import traceback
|
||||
|
||||
|
||||
|
||||
MASTER_BRANCH = "master"
|
||||
VERBOSE = True
|
||||
MAX_TOKENS = 10000 #gpt-4-1106-preview
|
||||
MAX_TOKENS = 50000 #gpt-4-1106-preview
|
||||
DISALLOWED_SPECIAL = "<|endoftext|>"
|
||||
REPLACEMENT_TOKEN = "<END_OF_TEXT>"
|
||||
|
||||
def run_git_command_with_retry(cmd, max_retries=1, delay=5, **kwargs):
|
||||
"""
|
||||
Run a git command with retry logic.
|
||||
|
||||
Args:
|
||||
cmd: Command to run (list or string)
|
||||
max_retries: Number of additional retries after first failure
|
||||
delay: Delay in seconds between retries
|
||||
**kwargs: Additional arguments to pass to subprocess.run
|
||||
|
||||
Returns:
|
||||
subprocess.CompletedProcess result
|
||||
"""
|
||||
last_exception = None
|
||||
for attempt in range(max_retries + 1):
|
||||
try:
|
||||
result = subprocess.run(cmd, **kwargs)
|
||||
return result
|
||||
except Exception as e:
|
||||
last_exception = e
|
||||
if attempt < max_retries:
|
||||
print(f"Git command failed (attempt {attempt + 1}/{max_retries + 1}): {e}")
|
||||
print(f"Retrying in {delay} seconds...")
|
||||
time.sleep(delay)
|
||||
else:
|
||||
print(f"Git command failed after {max_retries + 1} attempts: {e}")
|
||||
|
||||
# If we get here, all attempts failed, re-raise the last exception
|
||||
if last_exception:
|
||||
raise last_exception
|
||||
else:
|
||||
raise RuntimeError("Unexpected error in git command retry logic")
|
||||
|
||||
def _sanitize(text: str) -> str:
|
||||
"""
|
||||
Replace the reserved tiktoken token with a harmless placeholder.
|
||||
Called everywhere a string can flow into tiktoken.encode() or the
|
||||
OpenAI client.
|
||||
"""
|
||||
return text.replace(DISALLOWED_SPECIAL, REPLACEMENT_TOKEN)
|
||||
|
||||
def reportTokens(prompt, model):
|
||||
encoding = tiktoken.encoding_for_model(model)
|
||||
# print number of tokens in light gray, with first 50 characters of prompt in green. if truncated, show that it is truncated
|
||||
#print("\033[37m" + str(len(encoding.encode(prompt))) + " tokens\033[0m" + " in prompt: " + "\033[92m" + prompt[:50] + "\033[0m" + ("..." if len(prompt) > 50 else ""))
|
||||
prompt = _sanitize(prompt)
|
||||
return len(encoding.encode(prompt))
|
||||
|
||||
|
||||
@@ -32,39 +75,41 @@ def check_git_dir(path):
|
||||
def get_branch_files(branch):
|
||||
"""Get a list of all files in a branch."""
|
||||
command = f"git ls-tree -r --name-only {branch}"
|
||||
result = subprocess.run(command.split(), stdout=subprocess.PIPE)
|
||||
result = run_git_command_with_retry(command.split(), stdout=subprocess.PIPE)
|
||||
files = result.stdout.decode().splitlines()
|
||||
return set(files)
|
||||
|
||||
def delete_unique_files(branch):
|
||||
def get_unused_files(branch):
|
||||
"""Delete files that are unique to branch2."""
|
||||
# Get the files in each branch
|
||||
files_branch1 = get_branch_files(MASTER_BRANCH)
|
||||
files_branch2 = get_branch_files(branch)
|
||||
files_branch_master = get_branch_files(MASTER_BRANCH)
|
||||
files_branch_lang = get_branch_files(branch)
|
||||
|
||||
# Find the files that are in branch2 but not in branch1
|
||||
unique_files = files_branch2 - files_branch1
|
||||
unique_files = files_branch_lang - files_branch_master
|
||||
|
||||
if unique_files:
|
||||
# Switch to the second branch
|
||||
subprocess.run(["git", "checkout", branch])
|
||||
|
||||
# Delete the unique files from the second branch
|
||||
for file in unique_files:
|
||||
subprocess.run(["git", "rm", file])
|
||||
|
||||
subprocess.run(["git", "checkout", MASTER_BRANCH])
|
||||
|
||||
print(f"[+] Deleted {len(unique_files)} files from branch: {branch}")
|
||||
return unique_files
|
||||
|
||||
|
||||
def cp_translation_to_repo_dir_and_check_gh_branch(branch, temp_folder, translate_files):
|
||||
branch_exists = subprocess.run(['git', 'show-ref', '--verify', '--quiet', 'refs/heads/' + branch])
|
||||
"""
|
||||
Get the translated files from the temp folder and copy them to the repo directory in the expected branch.
|
||||
Also remove all the files that are not in the master branch.
|
||||
"""
|
||||
branch_exists = run_git_command_with_retry(['git', 'show-ref', '--verify', '--quiet', 'refs/heads/' + branch])
|
||||
# If branch doesn't exist, create it
|
||||
if branch_exists.returncode != 0:
|
||||
subprocess.run(['git', 'checkout', '-b', branch])
|
||||
run_git_command_with_retry(['git', 'checkout', '-b', branch])
|
||||
else:
|
||||
subprocess.run(['git', 'checkout', branch])
|
||||
run_git_command_with_retry(['git', 'checkout', branch])
|
||||
|
||||
# Get files to delete
|
||||
files_to_delete = get_unused_files(branch)
|
||||
|
||||
# Delete files
|
||||
for file in files_to_delete:
|
||||
os.remove(file)
|
||||
print(f"[+] Deleted {file}")
|
||||
|
||||
# Walk through source directory
|
||||
for dirpath, dirnames, filenames in os.walk(temp_folder):
|
||||
@@ -79,32 +124,72 @@ def cp_translation_to_repo_dir_and_check_gh_branch(branch, temp_folder, translat
|
||||
for file_name in filenames:
|
||||
src_file = os.path.join(dirpath, file_name)
|
||||
shutil.copy2(src_file, dest_path)
|
||||
|
||||
print(f"Translated files copied to branch: {branch}")
|
||||
if not "/images/" in src_file and not "/theme/" in src_file:
|
||||
print(f"[+] Copied from {src_file} to {file_name}")
|
||||
|
||||
if translate_files:
|
||||
subprocess.run(['git', 'add', "-A"])
|
||||
subprocess.run(['git', 'commit', '-m', f"Translated {translate_files} to {branch}"[:72]])
|
||||
subprocess.run(['git', 'checkout', MASTER_BRANCH])
|
||||
print("Commit created and moved to master branch")
|
||||
commit_and_push(translate_files, branch)
|
||||
else:
|
||||
print("No commiting anything, leaving in language branch")
|
||||
|
||||
def commit_and_push(translate_files, branch):
|
||||
# Define the commands we want to run
|
||||
commands = [
|
||||
['git', 'add', '-A'],
|
||||
['git', 'commit', '-m', f"Translated {translate_files} to {branch}"[:72]],
|
||||
['git', 'push', '--set-upstream', 'origin', branch],
|
||||
]
|
||||
|
||||
for cmd in commands:
|
||||
result = run_git_command_with_retry(cmd, capture_output=True, text=True)
|
||||
|
||||
# Print stdout and stderr (if any)
|
||||
if result.stdout:
|
||||
print(f"STDOUT for {cmd}:\n{result.stdout}")
|
||||
if "nothing to commit" in result.stdout.lower():
|
||||
print("Nothing to commit, leaving")
|
||||
exit(0)
|
||||
|
||||
if result.stderr:
|
||||
print(f"STDERR for {cmd}:\n{result.stderr}")
|
||||
|
||||
# Check for errors
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"Command `{cmd}` failed with exit code {result.returncode}"
|
||||
)
|
||||
|
||||
print("Commit created and pushed")
|
||||
|
||||
|
||||
def translate_text(language, text, file_path, model, cont=0, slpitted=False, client=None):
|
||||
if not text:
|
||||
return text
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a professional hacker, translator and writer. You write everything super clear and as concise as possible without loosing information. Do not return invalid Unicode output."},
|
||||
{"role": "system", "content": f"The following is content from a hacking book about hacking techiques. The following content is from the file {file_path}. Translate the relevant English text to {language} and return the translation keeping excatly the same markdown and html syntax. Do not translate things like code, hacking technique names, hacking word, cloud/SaaS platform names (like Workspace, aws, gcp...), the word 'leak', pentesting, and markdown tags. Also don't add any extra stuff apart from the translation and markdown syntax."},
|
||||
{"role": "system", "content": "You are a professional hacker, translator and writer. You translate everything super clear and as concise as possible without loosing information. Do not return invalid Unicode output and do not translate markdown or html tags or links."},
|
||||
{"role": "system", "content": f"""The following is content from a hacking book about technical hacking techiques. The following given content is from the file {file_path}.
|
||||
Translate the relevant English text to {language} and return the translation keeping exactly the same markdown and html syntax and following this guidance:
|
||||
|
||||
- Don't translate things like code, hacking technique names, common hacking words, cloud/SaaS platform names (like Workspace, aws, gcp...), the word 'leak', pentesting, links and markdown tags.
|
||||
- Don't translate links or paths, e.g. if a link or ref is to "lamda-post-exploitation.md" don't translate that path to the language.
|
||||
- Don't translate or modify tags, links, refs and paths like in:
|
||||
- {{#tabs}}
|
||||
- {{#tab name="Method1"}}
|
||||
- {{#ref}}\ngeneric-methodologies-and-resources/pentesting-methodology.md\n{{#endref}}
|
||||
- {{#include ./banners/hacktricks-training.md}}
|
||||
- {{#ref}}macos-tcc-bypasses/{{#endref}}
|
||||
- {{#ref}}0.-basic-llm-concepts.md{{#endref}}
|
||||
- Don't translate any other tag, just return markdown and html content as is.
|
||||
|
||||
Also don't add any extra stuff in your response that is not part of the translation and markdown syntax."""},
|
||||
{"role": "user", "content": text},
|
||||
]
|
||||
try:
|
||||
response = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
temperature=0
|
||||
temperature=1 # 1 because gpt-5 doesn't support other
|
||||
)
|
||||
except Exception as e:
|
||||
print("Python Exception: " + str(e))
|
||||
@@ -149,6 +234,9 @@ def translate_text(language, text, file_path, model, cont=0, slpitted=False, cli
|
||||
return translate_text(language, text, file_path, model, cont, False, client)
|
||||
|
||||
response_message = response.choices[0].message.content.strip()
|
||||
response_message = response_message.replace("bypassy", "bypasses") # PL translations translates that from time to time
|
||||
response_message = response_message.replace("Bypassy", "Bypasses")
|
||||
response_message = response_message.replace("-privec.md", "-privesc.md") # PL translations translates that from time to time
|
||||
|
||||
# Sometimes chatgpt modified the number of "#" at the beginning of the text, so we need to fix that. This is specially important for the first line of the MD that mucst have only 1 "#"
|
||||
cont2 = 0
|
||||
@@ -170,9 +258,11 @@ def split_text(text, model):
|
||||
chunks = []
|
||||
chunk = ''
|
||||
in_code_block = False
|
||||
in_ref = False
|
||||
|
||||
for line in lines:
|
||||
# If we are in a code block, just add the code to the chunk
|
||||
|
||||
# Keep code blocks as one chunk
|
||||
if line.startswith('```'):
|
||||
|
||||
# If we are in a code block, finish it with the "```"
|
||||
@@ -188,8 +278,24 @@ def split_text(text, model):
|
||||
chunk += line + '\n'
|
||||
|
||||
continue
|
||||
|
||||
"""
|
||||
Prevent refs using `` like:
|
||||
{{#ref}}
|
||||
../../generic-methodologies-and-resources/pentesting-network/`spoofing-llmnr-nbt-ns-mdns-dns-and-wpad-and-relay-attacks.md`
|
||||
{{#endref}}
|
||||
"""
|
||||
if line.startswith('{{#ref}}'):
|
||||
in_ref = True
|
||||
|
||||
if in_ref:
|
||||
line = line.replace("`", "")
|
||||
|
||||
if line.startswith('{{#endref}}'):
|
||||
in_ref = False
|
||||
|
||||
|
||||
# If new section, see if we should be splitting the text
|
||||
if (line.startswith('#') and reportTokens(chunk + "\n" + line.strip(), model) > MAX_TOKENS*0.8) or \
|
||||
reportTokens(chunk + "\n" + line.strip(), model) > MAX_TOKENS:
|
||||
|
||||
@@ -202,23 +308,30 @@ def split_text(text, model):
|
||||
return chunks
|
||||
|
||||
|
||||
def copy_gitbook_dir(source_path, dest_path):
|
||||
folder_name = ".gitbook/"
|
||||
source_folder = os.path.join(source_path, folder_name)
|
||||
destination_folder = os.path.join(dest_path, folder_name)
|
||||
if not os.path.exists(source_folder):
|
||||
print(f"Error: {source_folder} does not exist.")
|
||||
else:
|
||||
# Copy the .gitbook folder
|
||||
shutil.copytree(source_folder, destination_folder)
|
||||
print(f"Copied .gitbook folder from {source_folder} to {destination_folder}")
|
||||
def copy_dirs(source_path, dest_path, folder_names):
|
||||
for folder_name in folder_names:
|
||||
source_folder = os.path.join(source_path, folder_name)
|
||||
destination_folder = os.path.join(dest_path, folder_name)
|
||||
if not os.path.exists(source_folder):
|
||||
print(f"Error: {source_folder} does not exist.")
|
||||
else:
|
||||
# Copy the theme folder
|
||||
shutil.copytree(source_folder, destination_folder)
|
||||
print(f"Copied {folder_name} folder from {source_folder} to {destination_folder}")
|
||||
|
||||
def copy_summary(source_path, dest_path):
|
||||
file_name = "src/SUMMARY.md"
|
||||
source_filepath = os.path.join(source_path, file_name)
|
||||
dest_filepath = os.path.join(dest_path, file_name)
|
||||
shutil.copy2(source_filepath, dest_filepath)
|
||||
print("[+] Copied SUMMARY.md")
|
||||
def move_files_to_push(source_path, dest_path, relative_file_paths):
|
||||
for file_path in relative_file_paths:
|
||||
source_filepath = os.path.join(source_path, file_path)
|
||||
dest_filepath = os.path.join(dest_path, file_path)
|
||||
if not os.path.exists(source_filepath):
|
||||
print(f"Error: {source_filepath} does not exist.")
|
||||
else:
|
||||
shutil.copy2(source_filepath, dest_filepath)
|
||||
print(f"[+] Copied {file_path}")
|
||||
|
||||
def copy_files(source_path, dest_path):
|
||||
file_names = ["src/SUMMARY.md", "hacktricks-preprocessor.py", "book.toml", ".gitignore", "src/robots.txt"]
|
||||
move_files_to_push(source_path, dest_path, file_names)
|
||||
|
||||
def translate_file(language, file_path, file_dest_path, model, client):
|
||||
global VERBOSE
|
||||
@@ -234,7 +347,7 @@ def translate_file(language, file_path, file_dest_path, model, client):
|
||||
translated_content = ''
|
||||
start_time = time.time()
|
||||
for chunk in content_chunks:
|
||||
# Don't trasnlate code blocks
|
||||
# Don't translate code blocks
|
||||
if chunk.startswith('```'):
|
||||
translated_content += chunk + '\n'
|
||||
else:
|
||||
@@ -248,9 +361,10 @@ def translate_file(language, file_path, file_dest_path, model, client):
|
||||
f.write(translated_content)
|
||||
|
||||
#if VERBOSE:
|
||||
print(f"Page {file_path} translated in {elapsed_time:.2f} seconds")
|
||||
print(f"Page {file_path} translated in {file_dest_path} in {elapsed_time:.2f} seconds")
|
||||
|
||||
|
||||
"""
|
||||
def translate_directory(language, source_path, dest_path, model, num_threads, client):
|
||||
all_markdown_files = []
|
||||
for subdir, dirs, files in os.walk(source_path):
|
||||
@@ -280,17 +394,17 @@ def translate_directory(language, source_path, dest_path, model, num_threads, cl
|
||||
tb = traceback.format_exc()
|
||||
print(f'Translation generated an exception: {exc}')
|
||||
print("Traceback:", tb)
|
||||
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("- Version 1.1.1")
|
||||
print("- Version 2.0.0")
|
||||
# Set up argparse
|
||||
parser = argparse.ArgumentParser(description='Translate gitbook and copy to a new branch.')
|
||||
parser.add_argument('-d', '--directory', action='store_true', help='Translate a full directory.')
|
||||
#parser.add_argument('-d', '--directory', action='store_true', help='Translate a full directory.')
|
||||
parser.add_argument('-l', '--language', required=True, help='Target language for translation.')
|
||||
parser.add_argument('-b', '--branch', required=True, help='Branch name to copy translated files.')
|
||||
parser.add_argument('-k', '--api-key', required=True, help='API key to use.')
|
||||
parser.add_argument('-m', '--model', default="gpt-4o-mini", help='The openai model to use. By default: gpt-4o-mini')
|
||||
parser.add_argument('-m', '--model', default="gpt-5-mini", help='The openai model to use. By default: gpt-5-mini')
|
||||
parser.add_argument('-o', '--org-id', help='The org ID to use (if not set the default one will be used).')
|
||||
parser.add_argument('-f', '--file-paths', help='If this is set, only the indicated files will be translated (" , " separated).')
|
||||
parser.add_argument('-n', '--dont-cd', action='store_false', help="If this is true, the script won't change the current directory.")
|
||||
@@ -345,7 +459,7 @@ if __name__ == "__main__":
|
||||
translate_files = None # Need to initialize it here to avoid error
|
||||
if args.file_paths:
|
||||
# Translate only the indicated file
|
||||
translate_files = [f for f in args.file_paths.split(' , ') if f]
|
||||
translate_files = list(set([f.strip() for f in args.file_paths.split(',') if f]))
|
||||
for file_path in translate_files:
|
||||
#with tqdm(total=len(all_markdown_files), desc="Translating Files") as pbar:
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=num_threads) as executor:
|
||||
@@ -359,23 +473,21 @@ if __name__ == "__main__":
|
||||
#pbar.update()
|
||||
except Exception as exc:
|
||||
print(f'Translation generated an exception: {exc}')
|
||||
|
||||
# Delete possibly removed files from the master branch
|
||||
delete_unique_files(branch)
|
||||
|
||||
elif args.directory:
|
||||
|
||||
#elif args.directory:
|
||||
# Translate everything
|
||||
translate_directory(language, source_folder, dest_folder, model, num_threads, client)
|
||||
#translate_directory(language, source_folder, dest_folder, model, num_threads, client)
|
||||
|
||||
else:
|
||||
print("You need to indicate either a directory or a list of files to translate.")
|
||||
exit(1)
|
||||
exit(0)
|
||||
|
||||
# Copy summary
|
||||
copy_summary(source_folder, dest_folder)
|
||||
# Copy Summary
|
||||
copy_files(source_folder, dest_folder)
|
||||
|
||||
# Copy .gitbook folder
|
||||
copy_gitbook_dir(source_folder, dest_folder)
|
||||
folder_names = ["theme/", "src/images/"]
|
||||
copy_dirs(source_folder, dest_folder, folder_names)
|
||||
|
||||
# Create the branch and copy the translated files
|
||||
cp_translation_to_repo_dir_and_check_gh_branch(branch, dest_folder, translate_files)
|
||||
|
||||
297
scripts/upload_ht_to_ai.py
Normal file
@@ -0,0 +1,297 @@
|
||||
import os
|
||||
import requests
|
||||
import zipfile
|
||||
import tempfile
|
||||
import time
|
||||
import glob
|
||||
import re
|
||||
|
||||
from openai import OpenAI
|
||||
|
||||
# Initialize OpenAI client
|
||||
client = OpenAI(api_key=os.getenv("MY_OPENAI_API_KEY"))
|
||||
|
||||
# Vector Store ID
|
||||
VECTOR_STORE_ID = "vs_67e9f92e8cc88191911be54f81492fb8"
|
||||
|
||||
# --------------------------------------------------
|
||||
# Step 1: Download and Extract Markdown Files
|
||||
# --------------------------------------------------
|
||||
|
||||
def download_zip(url, save_path):
|
||||
print(f"Downloading zip from: {url}")
|
||||
response = requests.get(url)
|
||||
response.raise_for_status() # Ensure the download succeeded
|
||||
with open(save_path, "wb") as f:
|
||||
f.write(response.content)
|
||||
print(f"Downloaded zip from: {url}")
|
||||
|
||||
def extract_markdown_files(zip_path, extract_dir):
|
||||
print(f"Extracting zip: {zip_path} to {extract_dir}")
|
||||
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
||||
zip_ref.extractall(extract_dir)
|
||||
# Recursively find all .md files
|
||||
md_files = glob.glob(os.path.join(extract_dir, "**", "*.md"), recursive=True)
|
||||
|
||||
return md_files
|
||||
|
||||
# Repository URLs
|
||||
hacktricks_url = "https://github.com/HackTricks-wiki/hacktricks/archive/refs/heads/master.zip"
|
||||
hacktricks_cloud_url = "https://github.com/HackTricks-wiki/hacktricks-cloud/archive/refs/heads/main.zip"
|
||||
|
||||
# Temporary directory for downloads and extraction
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
# Download zip archives
|
||||
print("Downloading Hacktricks repositories...")
|
||||
hacktricks_zip = os.path.join(temp_dir, "hacktricks.zip")
|
||||
hacktricks_cloud_zip = os.path.join(temp_dir, "hacktricks_cloud.zip")
|
||||
download_zip(hacktricks_url, hacktricks_zip)
|
||||
download_zip(hacktricks_cloud_url, hacktricks_cloud_zip)
|
||||
|
||||
# Extract the markdown files
|
||||
hacktricks_extract_dir = os.path.join(temp_dir, "hacktricks")
|
||||
hacktricks_cloud_extract_dir = os.path.join(temp_dir, "hacktricks_cloud")
|
||||
|
||||
md_files_hacktricks = extract_markdown_files(hacktricks_zip, hacktricks_extract_dir)
|
||||
md_files_hacktricks_cloud = extract_markdown_files(hacktricks_cloud_zip, hacktricks_cloud_extract_dir)
|
||||
|
||||
all_md_files = md_files_hacktricks + md_files_hacktricks_cloud
|
||||
print(f"Found {len(all_md_files)} markdown files.")
|
||||
finally:
|
||||
# Optional cleanup of temporary files after processing
|
||||
# shutil.rmtree(temp_dir)
|
||||
pass
|
||||
|
||||
# --------------------------------------------------
|
||||
# Step 2: Remove All Existing Files in the Vector Store
|
||||
# --------------------------------------------------
|
||||
# List current files in the vector store and delete each one.
|
||||
existing_files = list(client.vector_stores.files.list(VECTOR_STORE_ID))
|
||||
print(f"Found {len(existing_files)} files in the vector store. Removing them...")
|
||||
|
||||
for file_obj in existing_files:
|
||||
# Delete the underlying file object; this removes it from the vector store.
|
||||
try:
|
||||
client.files.delete(file_id=file_obj.id)
|
||||
print(f"Deleted file: {file_obj.id}")
|
||||
time.sleep(1) # Give it a moment to ensure the deletion is processed
|
||||
except Exception as e:
|
||||
# Handle potential errors during deletion
|
||||
print(f"Error deleting file {file_obj.id}: {e}")
|
||||
|
||||
# ----------------------------------------------------
|
||||
# Step 3: Clean markdown Files
|
||||
# ----------------------------------------------------
|
||||
# Clean markdown files and marge them so it's easier to
|
||||
# uplaod to the vector store.
|
||||
|
||||
|
||||
def clean_and_merge_md_files(start_folder, exclude_keywords, output_file):
|
||||
def clean_file_content(file_path):
|
||||
"""Clean the content of a single file and return the cleaned lines."""
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
content = f.readlines()
|
||||
|
||||
cleaned_lines = []
|
||||
inside_hint = False
|
||||
for i,line in enumerate(content):
|
||||
# Skip lines containing excluded keywords
|
||||
if any(keyword in line for keyword in exclude_keywords):
|
||||
continue
|
||||
|
||||
# Detect and skip {% hint %} ... {% endhint %} blocks
|
||||
if "{% hint style=\"success\" %}" in line and "Learn & practice" in content[i+1]:
|
||||
inside_hint = True
|
||||
if "{% endhint %}" in line:
|
||||
inside_hint = False
|
||||
continue
|
||||
if inside_hint:
|
||||
continue
|
||||
|
||||
if line.startswith("#") and "reference" in line.lower(): #If references part reached, just stop reading the file
|
||||
break
|
||||
|
||||
# Skip lines with <figure> ... </figure>
|
||||
if re.match(r"<figure>.*?</figure>", line):
|
||||
continue
|
||||
|
||||
# Add the line if it passed all checks
|
||||
cleaned_lines.append(line.rstrip())
|
||||
|
||||
# Remove excess consecutive empty lines
|
||||
cleaned_lines = remove_consecutive_empty_lines(cleaned_lines)
|
||||
return cleaned_lines
|
||||
|
||||
def remove_consecutive_empty_lines(lines):
|
||||
"""Allow no more than one consecutive empty line."""
|
||||
cleaned_lines = []
|
||||
previous_line_empty = False
|
||||
for line in lines:
|
||||
if line.strip() == "":
|
||||
if not previous_line_empty:
|
||||
cleaned_lines.append("")
|
||||
previous_line_empty = True
|
||||
else:
|
||||
cleaned_lines.append(line)
|
||||
previous_line_empty = False
|
||||
return cleaned_lines
|
||||
|
||||
def gather_files_in_order(start_folder):
|
||||
"""Gather all .md files in a depth-first order."""
|
||||
files = []
|
||||
for root, _, filenames in os.walk(start_folder):
|
||||
md_files = sorted([os.path.join(root, f) for f in filenames if f.endswith(".md") and f.lower() not in ["summary.md", "references.md"]])
|
||||
files.extend(md_files)
|
||||
return files
|
||||
|
||||
# Gather files in depth-first order
|
||||
all_files = gather_files_in_order(start_folder)
|
||||
|
||||
# Process files and merge into a single output
|
||||
with open(output_file, "w", encoding="utf-8") as output:
|
||||
for file_path in all_files:
|
||||
# Clean the content of the file
|
||||
cleaned_content = clean_file_content(file_path)
|
||||
|
||||
# Skip saving if the cleaned file has fewer than 10 non-empty lines
|
||||
if len([line for line in cleaned_content if line.strip()]) < 10:
|
||||
continue
|
||||
|
||||
# Get the name of the file for the header
|
||||
file_name = os.path.basename(file_path)
|
||||
|
||||
# Write header, cleaned content, and 2 extra new lines
|
||||
output.write(f"### Start file: {file_name} ###\n\n")
|
||||
output.write("\n".join(cleaned_content))
|
||||
output.write("\n\n")
|
||||
|
||||
# Specify the starting folder and output file
|
||||
start_folder = os.getcwd()
|
||||
|
||||
# Keywords to exclude from lines
|
||||
exclude_keywords = [
|
||||
"hacktricks-training.md",
|
||||
", "hacktricks.md")
|
||||
htc_file = os.path.join(tempfile.gettempdir(), "hacktricks-cloud.md")
|
||||
clean_and_merge_md_files(hacktricks_extract_dir, exclude_keywords, ht_file)
|
||||
print(f"Merged content has been saved to: {ht_file}")
|
||||
clean_and_merge_md_files(hacktricks_cloud_extract_dir, exclude_keywords, htc_file)
|
||||
print(f"Merged content has been saved to: {htc_file}")
|
||||
|
||||
|
||||
# ----------------------------------------------------
|
||||
# Step 4: Upload All Markdown Files to the Vector Store
|
||||
# ----------------------------------------------------
|
||||
# Upload two files to the vector store.
|
||||
# Uploading .md hacktricks files individually can be slow,
|
||||
# so thats why we merged it before into just 2 files.
|
||||
|
||||
file_streams = []
|
||||
|
||||
ht_stream = open(ht_file, "rb")
|
||||
file_streams.append(ht_stream)
|
||||
htc_stream = open(htc_file, "rb")
|
||||
file_streams.append(htc_stream)
|
||||
|
||||
file_batch = client.vector_stores.file_batches.upload_and_poll(
|
||||
vector_store_id=VECTOR_STORE_ID,
|
||||
files=file_streams
|
||||
)
|
||||
|
||||
time.sleep(60) # Sleep for a minute to ensure the upload is processed
|
||||
ht_stream.close()
|
||||
htc_stream.close()
|
||||
|
||||
|
||||
""""This was to upload each .md independently, wich turned out to be a nightmare
|
||||
# Ensure we don't exceed the maximum number of file streams
|
||||
|
||||
for file_path in all_md_files:
|
||||
# Check if we have reached the maximum number of streams
|
||||
if len(file_streams) >= 300:
|
||||
print("Reached maximum number of file streams (300). Uploading current batch...")
|
||||
# Upload the current batch before adding more files
|
||||
file_batch = client.vector_stores.file_batches.upload_and_poll(
|
||||
vector_store_id=VECTOR_STORE_ID,
|
||||
files=file_streams
|
||||
)
|
||||
print("Upload status:", file_batch.status)
|
||||
print("File counts:", file_batch.file_counts)
|
||||
# Clear the list for the next batch
|
||||
file_streams = []
|
||||
time.sleep(120) # Sleep for 2 minutes to avoid hitting API limits
|
||||
try:
|
||||
stream = open(file_path, "rb")
|
||||
file_streams.append(stream)
|
||||
except Exception as e:
|
||||
print(f"Error opening {file_path}: {e}")
|
||||
|
||||
if file_streams:
|
||||
# Upload files and poll for completion
|
||||
file_batch = client.vector_stores.file_batches.upload_and_poll(
|
||||
vector_store_id=VECTOR_STORE_ID,
|
||||
files=file_streams
|
||||
)
|
||||
print("Upload status:", file_batch.status)
|
||||
print("File counts:", file_batch.file_counts)
|
||||
else:
|
||||
print("No markdown files to upload.")"
|
||||
|
||||
|
||||
# Close all file streams
|
||||
for stream in file_streams:
|
||||
stream.close()
|
||||
"""
|
||||
@@ -1,15 +1,41 @@
|
||||
# HackTricks Cloud
|
||||
|
||||
Reading time: {{ #reading_time }}
|
||||
|
||||
{{#include ./banners/hacktricks-training.md}}
|
||||
|
||||
<figure><img src="images/cloud.gif" alt=""><figcaption></figcaption></figure>
|
||||
|
||||
_Hacktricks logos & motion designed by_ [_@ppiernacho_](https://www.instagram.com/ppieranacho/)_._
|
||||
_Hacktricks logos & motion designed by_ [_@ppieranacho_](https://www.instagram.com/ppieranacho/)_._
|
||||
|
||||
> [!TIP]
|
||||
> Welcome to the page where you will find each **hacking trick/technique/whatever related to CI/CD & Cloud** I have learnt in **CTFs**, **real** life **environments**, **researching**, and **reading** researches and news.
|
||||
### Run HackTricks Cloud Locally
|
||||
|
||||
```bash
|
||||
# Download latest version of hacktricks cloud
|
||||
git clone https://github.com/HackTricks-wiki/hacktricks-cloud
|
||||
|
||||
# Select the language you want to use
|
||||
export LANG="master" # Leave master for English
|
||||
# "af" for Afrikaans
|
||||
# "de" for German
|
||||
# "el" for Greek
|
||||
# "es" for Spanish
|
||||
# "fr" for French
|
||||
# "hi" for Hindi
|
||||
# "it" for Italian
|
||||
# "ja" for Japanese
|
||||
# "ko" for Korean
|
||||
# "pl" for Polish
|
||||
# "pt" for Portuguese
|
||||
# "sr" for Serbian
|
||||
# "sw" for Swahili
|
||||
# "tr" for Turkish
|
||||
# "uk" for Ukrainian
|
||||
# "zh" for Chinese
|
||||
|
||||
# Run the docker container indicating the path to the hacktricks-cloud folder
|
||||
docker run -d --rm --platform linux/amd64 -p 3377:3000 --name hacktricks_cloud -v $(pwd)/hacktricks-cloud:/app ghcr.io/hacktricks-wiki/hacktricks-cloud/translator-image bash -c "mkdir -p ~/.ssh && ssh-keyscan -H github.com >> ~/.ssh/known_hosts && cd /app && git checkout $LANG && git pull && MDBOOK_PREPROCESSOR__HACKTRICKS__ENV=dev mdbook serve --hostname 0.0.0.0"
|
||||
```
|
||||
|
||||
Your local copy of HackTricks Cloud will be **available at [http://localhost:3377](http://localhost:3377)** after a minute.
|
||||
|
||||
### **Pentesting CI/CD Methodology**
|
||||
|
||||
@@ -35,6 +61,3 @@ _Hacktricks logos & motion designed by_ [_@ppiernacho_](https://www.instagram.co
|
||||
|
||||
{{#include ./banners/hacktricks-training.md}}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
368
src/SUMMARY.md
@@ -3,12 +3,15 @@
|
||||
# 👽 Welcome!
|
||||
|
||||
- [HackTricks Cloud](README.md)
|
||||
- [About the Author$$external:https://book.hacktricks.xyz/welcome/about-the-author$$]()
|
||||
- [HackTricks Values & faq$$external:https://book.hacktricks.xyz/welcome/hacktricks-values-and-faq$$]()
|
||||
- [About the Author$$external:https://book.hacktricks.wiki/en/welcome/about-the-author.html$$]()
|
||||
- [HackTricks Values & faq$$external:https://book.hacktricks.wiki/en/welcome/hacktricks-values-and-faq.html$$]()
|
||||
|
||||
# 🏭 Pentesting CI/CD
|
||||
|
||||
- [Pentesting CI/CD Methodology](pentesting-ci-cd/pentesting-ci-cd-methodology.md)
|
||||
- [Docker Build Context Abuse in Cloud Envs](pentesting-ci-cd/docker-build-context-abuse.md)
|
||||
- [Gitblit Security](pentesting-ci-cd/gitblit-security/README.md)
|
||||
- [Ssh Auth Bypass](pentesting-ci-cd/gitblit-security/gitblit-embedded-ssh-auth-bypass-cve-2024-28080.md)
|
||||
- [Github Security](pentesting-ci-cd/github-security/README.md)
|
||||
- [Abusing Github Actions](pentesting-ci-cd/github-security/abusing-github-actions/README.md)
|
||||
- [Gh Actions - Artifact Poisoning](pentesting-ci-cd/github-security/abusing-github-actions/gh-actions-artifact-poisoning.md)
|
||||
@@ -39,18 +42,22 @@
|
||||
- [Atlantis Security](pentesting-ci-cd/atlantis-security.md)
|
||||
- [Cloudflare Security](pentesting-ci-cd/cloudflare-security/README.md)
|
||||
- [Cloudflare Domains](pentesting-ci-cd/cloudflare-security/cloudflare-domains.md)
|
||||
- [Cloudflare Workers Pass Through Proxy Ip Rotation](pentesting-ci-cd/cloudflare-security/cloudflare-workers-pass-through-proxy-ip-rotation.md)
|
||||
- [Cloudflare Zero Trust Network](pentesting-ci-cd/cloudflare-security/cloudflare-zero-trust-network.md)
|
||||
- [Okta Security](pentesting-ci-cd/okta-security/README.md)
|
||||
- [Okta Hardening](pentesting-ci-cd/okta-security/okta-hardening.md)
|
||||
- [Serverless.com Security](pentesting-ci-cd/serverless.com-security.md)
|
||||
- [Supabase Security](pentesting-ci-cd/supabase-security.md)
|
||||
- [Ansible Tower / AWX / Automation controller Security](pentesting-ci-cd/ansible-tower-awx-automation-controller-security.md)
|
||||
- [Check Automate Security](pentesting-ci-cd/chef-automate-security/README.md)
|
||||
- [Chef Automate Enumeration And Attacks](pentesting-ci-cd/chef-automate-security/chef-automate-enumeration-and-attacks.md)
|
||||
- [Vercel Security](pentesting-ci-cd/vercel-security.md)
|
||||
- [Ansible Tower / AWX / Automation controller Security](pentesting-ci-cd/ansible-tower-awx-automation-controller-security.md)
|
||||
- [TODO](pentesting-ci-cd/todo.md)
|
||||
|
||||
# ⛈️ Pentesting Cloud
|
||||
|
||||
- [Pentesting Cloud Methodology](pentesting-cloud/pentesting-cloud-methodology.md)
|
||||
- [Luks2 Header Malleability Null Cipher Abuse](pentesting-cloud/confidential-computing/luks2-header-malleability-null-cipher-abuse.md)
|
||||
- [Kubernetes Pentesting](pentesting-cloud/kubernetes-security/README.md)
|
||||
- [Kubernetes Basics](pentesting-cloud/kubernetes-security/kubernetes-basics.md)
|
||||
- [Pentesting Kubernetes Services](pentesting-cloud/kubernetes-security/pentesting-kubernetes-services/README.md)
|
||||
@@ -80,6 +87,7 @@
|
||||
- [GCP - Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/README.md)
|
||||
- [GCP - App Engine Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-app-engine-post-exploitation.md)
|
||||
- [GCP - Artifact Registry Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-artifact-registry-post-exploitation.md)
|
||||
- [GCP - Bigtable Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-bigtable-post-exploitation.md)
|
||||
- [GCP - Cloud Build Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-cloud-build-post-exploitation.md)
|
||||
- [GCP - Cloud Functions Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-cloud-functions-post-exploitation.md)
|
||||
- [GCP - Cloud Run Post Exploitation](pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-cloud-run-post-exploitation.md)
|
||||
@@ -102,18 +110,22 @@
|
||||
- [GCP - Artifact Registry Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-artifact-registry-privesc.md)
|
||||
- [GCP - Batch Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-batch-privesc.md)
|
||||
- [GCP - BigQuery Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-bigquery-privesc.md)
|
||||
- [GCP - Bigtable Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-bigtable-privesc.md)
|
||||
- [GCP - ClientAuthConfig Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-clientauthconfig-privesc.md)
|
||||
- [GCP - Cloudbuild Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-cloudbuild-privesc.md)
|
||||
- [GCP - Cloudfunctions Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-cloudfunctions-privesc.md)
|
||||
- [GCP - Cloudidentity Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-cloudidentity-privesc.md)
|
||||
- [GCP - Cloud Scheduler Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-cloudscheduler-privesc.md)
|
||||
- [GCP - Cloud Tasks Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-cloudtasks-privesc.md)
|
||||
- [GCP - Compute Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-compute-privesc/README.md)
|
||||
- [GCP - Add Custom SSH Metadata](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-compute-privesc/gcp-add-custom-ssh-metadata.md)
|
||||
- [GCP - Composer Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-composer-privesc.md)
|
||||
- [GCP - Container Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-container-privesc.md)
|
||||
- [GCP - Dataproc Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-dataproc-privesc.md)
|
||||
- [GCP - Deploymentmaneger Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-deploymentmaneger-privesc.md)
|
||||
- [GCP - IAM Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-iam-privesc.md)
|
||||
- [GCP - KMS Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-kms-privesc.md)
|
||||
- [GCP - Firebase Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-firebase-privesc.md)
|
||||
- [GCP - Orgpolicy Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-orgpolicy-privesc.md)
|
||||
- [GCP - Pubsub Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-pubsub-privesc.md)
|
||||
- [GCP - Resourcemanager Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-resourcemanager-privesc.md)
|
||||
@@ -122,6 +134,7 @@
|
||||
- [GCP - Serviceusage Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-serviceusage-privesc.md)
|
||||
- [GCP - Sourcerepos Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-sourcerepos-privesc.md)
|
||||
- [GCP - Storage Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-storage-privesc.md)
|
||||
- [GCP - Vertex AI Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-vertex-ai-privesc.md)
|
||||
- [GCP - Workflows Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-workflows-privesc.md)
|
||||
- [GCP - Generic Permissions Privesc](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-misc-perms-privesc.md)
|
||||
- [GCP - Network Docker Escape](pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-network-docker-escape.md)
|
||||
@@ -131,6 +144,7 @@
|
||||
- [GCP - App Engine Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-app-engine-persistence.md)
|
||||
- [GCP - Artifact Registry Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-artifact-registry-persistence.md)
|
||||
- [GCP - BigQuery Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-bigquery-persistence.md)
|
||||
- [GCP - Bigtable Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-bigtable-persistence.md)
|
||||
- [GCP - Cloud Functions Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-cloud-functions-persistence.md)
|
||||
- [GCP - Cloud Run Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-cloud-run-persistence.md)
|
||||
- [GCP - Cloud Shell Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-cloud-shell-persistence.md)
|
||||
@@ -141,7 +155,7 @@
|
||||
- [GCP - Logging Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-logging-persistence.md)
|
||||
- [GCP - Secret Manager Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-secret-manager-persistence.md)
|
||||
- [GCP - Storage Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-storage-persistence.md)
|
||||
- [GCP - Token Persistance](pentesting-cloud/gcp-security/gcp-persistence/gcp-non-svc-persistance.md)
|
||||
- [GCP - Token Persistence](pentesting-cloud/gcp-security/gcp-persistence/gcp-non-svc-persistence.md)
|
||||
- [GCP - Services](pentesting-cloud/gcp-security/gcp-services/README.md)
|
||||
- [GCP - AI Platform Enum](pentesting-cloud/gcp-security/gcp-services/gcp-ai-platform-enum.md)
|
||||
- [GCP - API Keys Enum](pentesting-cloud/gcp-security/gcp-services/gcp-api-keys-enum.md)
|
||||
@@ -161,6 +175,7 @@
|
||||
- [GCP - VPC & Networking](pentesting-cloud/gcp-security/gcp-services/gcp-compute-instances-enum/gcp-vpc-and-networking.md)
|
||||
- [GCP - Composer Enum](pentesting-cloud/gcp-security/gcp-services/gcp-composer-enum.md)
|
||||
- [GCP - Containers & GKE Enum](pentesting-cloud/gcp-security/gcp-services/gcp-containers-gke-and-composer-enum.md)
|
||||
- [GCP - Dataproc Enum](pentesting-cloud/gcp-security/gcp-services/gcp-dataproc-enum.md)
|
||||
- [GCP - DNS Enum](pentesting-cloud/gcp-security/gcp-services/gcp-dns-enum.md)
|
||||
- [GCP - Filestore Enum](pentesting-cloud/gcp-security/gcp-services/gcp-filestore-enum.md)
|
||||
- [GCP - Firebase Enum](pentesting-cloud/gcp-security/gcp-services/gcp-firebase-enum.md)
|
||||
@@ -177,6 +192,7 @@
|
||||
- [GCP - Spanner Enum](pentesting-cloud/gcp-security/gcp-services/gcp-spanner-enum.md)
|
||||
- [GCP - Stackdriver Enum](pentesting-cloud/gcp-security/gcp-services/gcp-stackdriver-enum.md)
|
||||
- [GCP - Storage Enum](pentesting-cloud/gcp-security/gcp-services/gcp-storage-enum.md)
|
||||
- [GCP - Vertex AI Enum](pentesting-cloud/gcp-security/gcp-services/gcp-vertex-ai-enum.md)
|
||||
- [GCP - Workflows Enum](pentesting-cloud/gcp-security/gcp-services/gcp-workflows-enum.md)
|
||||
- [GCP <--> Workspace Pivoting](pentesting-cloud/gcp-security/gcp-to-workspace-pivoting/README.md)
|
||||
- [GCP - Understanding Domain-Wide Delegation](pentesting-cloud/gcp-security/gcp-to-workspace-pivoting/gcp-understanding-domain-wide-delegation.md)
|
||||
@@ -208,105 +224,139 @@
|
||||
- [AWS - Federation Abuse](pentesting-cloud/aws-security/aws-basic-information/aws-federation-abuse.md)
|
||||
- [AWS - Permissions for a Pentest](pentesting-cloud/aws-security/aws-permissions-for-a-pentest.md)
|
||||
- [AWS - Persistence](pentesting-cloud/aws-security/aws-persistence/README.md)
|
||||
- [AWS - API Gateway Persistence](pentesting-cloud/aws-security/aws-persistence/aws-api-gateway-persistence.md)
|
||||
- [AWS - Cognito Persistence](pentesting-cloud/aws-security/aws-persistence/aws-cognito-persistence.md)
|
||||
- [AWS - DynamoDB Persistence](pentesting-cloud/aws-security/aws-persistence/aws-dynamodb-persistence.md)
|
||||
- [AWS - EC2 Persistence](pentesting-cloud/aws-security/aws-persistence/aws-ec2-persistence.md)
|
||||
- [AWS - ECR Persistence](pentesting-cloud/aws-security/aws-persistence/aws-ecr-persistence.md)
|
||||
- [AWS - ECS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-ecs-persistence.md)
|
||||
- [AWS - Elastic Beanstalk Persistence](pentesting-cloud/aws-security/aws-persistence/aws-elastic-beanstalk-persistence.md)
|
||||
- [AWS - EFS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-efs-persistence.md)
|
||||
- [AWS - IAM Persistence](pentesting-cloud/aws-security/aws-persistence/aws-iam-persistence.md)
|
||||
- [AWS - KMS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-kms-persistence.md)
|
||||
- [AWS - API Gateway Persistence](pentesting-cloud/aws-security/aws-persistence/aws-api-gateway-persistence/README.md)
|
||||
- [AWS - Cloudformation Persistence](pentesting-cloud/aws-security/aws-persistence/aws-cloudformation-persistence/README.md)
|
||||
- [AWS - Cognito Persistence](pentesting-cloud/aws-security/aws-persistence/aws-cognito-persistence/README.md)
|
||||
- [AWS - DynamoDB Persistence](pentesting-cloud/aws-security/aws-persistence/aws-dynamodb-persistence/README.md)
|
||||
- [AWS - EC2 Persistence](pentesting-cloud/aws-security/aws-persistence/aws-ec2-persistence/README.md)
|
||||
- [AWS - EC2 ReplaceRootVolume Task (Stealth Backdoor / Persistence)](pentesting-cloud/aws-security/aws-persistence/aws-ec2-replace-root-volume-persistence/README.md)
|
||||
- [AWS - ECR Persistence](pentesting-cloud/aws-security/aws-persistence/aws-ecr-persistence/README.md)
|
||||
- [AWS - ECS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-ecs-persistence/README.md)
|
||||
- [AWS - Elastic Beanstalk Persistence](pentesting-cloud/aws-security/aws-persistence/aws-elastic-beanstalk-persistence/README.md)
|
||||
- [AWS - EFS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-efs-persistence/README.md)
|
||||
- [AWS - IAM Persistence](pentesting-cloud/aws-security/aws-persistence/aws-iam-persistence/README.md)
|
||||
- [AWS - KMS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-kms-persistence/README.md)
|
||||
- [AWS - Lambda Persistence](pentesting-cloud/aws-security/aws-persistence/aws-lambda-persistence/README.md)
|
||||
- [AWS - Abusing Lambda Extensions](pentesting-cloud/aws-security/aws-persistence/aws-lambda-persistence/aws-abusing-lambda-extensions.md)
|
||||
- [AWS - Lambda Alias Version Policy Backdoor](pentesting-cloud/aws-security/aws-persistence/aws-lambda-persistence/aws-lambda-alias-version-policy-backdoor.md)
|
||||
- [AWS - Lambda Async Self Loop Persistence](pentesting-cloud/aws-security/aws-persistence/aws-lambda-persistence/aws-lambda-async-self-loop-persistence.md)
|
||||
- [AWS - Lambda Layers Persistence](pentesting-cloud/aws-security/aws-persistence/aws-lambda-persistence/aws-lambda-layers-persistence.md)
|
||||
- [AWS - Lightsail Persistence](pentesting-cloud/aws-security/aws-persistence/aws-lightsail-persistence.md)
|
||||
- [AWS - RDS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-rds-persistence.md)
|
||||
- [AWS - S3 Persistence](pentesting-cloud/aws-security/aws-persistence/aws-s3-persistence.md)
|
||||
- [AWS - SNS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sns-persistence.md)
|
||||
- [AWS - Secrets Manager Persistence](pentesting-cloud/aws-security/aws-persistence/aws-secrets-manager-persistence.md)
|
||||
- [AWS - SQS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sqs-persistence.md)
|
||||
- [AWS - SSM Perssitence](pentesting-cloud/aws-security/aws-persistence/aws-ssm-perssitence.md)
|
||||
- [AWS - Step Functions Persistence](pentesting-cloud/aws-security/aws-persistence/aws-step-functions-persistence.md)
|
||||
- [AWS - STS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sts-persistence.md)
|
||||
- [AWS - Lambda Exec Wrapper Persistence](pentesting-cloud/aws-security/aws-persistence/aws-lambda-persistence/aws-lambda-exec-wrapper-persistence.md)
|
||||
- [AWS - Lightsail Persistence](pentesting-cloud/aws-security/aws-persistence/aws-lightsail-persistence/README.md)
|
||||
- [AWS - RDS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-rds-persistence/README.md)
|
||||
- [AWS - S3 Persistence](pentesting-cloud/aws-security/aws-persistence/aws-s3-persistence/README.md)
|
||||
- [Aws Sagemaker Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sagemaker-persistence/README.md)
|
||||
- [AWS - SNS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sns-persistence/README.md)
|
||||
- [AWS - Secrets Manager Persistence](pentesting-cloud/aws-security/aws-persistence/aws-secrets-manager-persistence/README.md)
|
||||
- [AWS - SQS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sqs-persistence/README.md)
|
||||
- [AWS - SQS DLQ Backdoor Persistence via RedrivePolicy/RedriveAllowPolicy](pentesting-cloud/aws-security/aws-persistence/aws-sqs-persistence/aws-sqs-dlq-backdoor-persistence.md)
|
||||
- [AWS - SQS OrgID Policy Backdoor](pentesting-cloud/aws-security/aws-persistence/aws-sqs-persistence/aws-sqs-orgid-policy-backdoor.md)
|
||||
- [AWS - SSM Perssitence](pentesting-cloud/aws-security/aws-persistence/aws-ssm-persistence/README.md)
|
||||
- [AWS - Step Functions Persistence](pentesting-cloud/aws-security/aws-persistence/aws-step-functions-persistence/README.md)
|
||||
- [AWS - STS Persistence](pentesting-cloud/aws-security/aws-persistence/aws-sts-persistence/README.md)
|
||||
- [AWS - Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/README.md)
|
||||
- [AWS - API Gateway Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-api-gateway-post-exploitation.md)
|
||||
- [AWS - CloudFront Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-cloudfront-post-exploitation.md)
|
||||
- [AWS - API Gateway Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-api-gateway-post-exploitation/README.md)
|
||||
- [AWS - Bedrock Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-bedrock-post-exploitation/README.md)
|
||||
- [AWS - CloudFront Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-cloudfront-post-exploitation/README.md)
|
||||
- [AWS - CodeBuild Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-codebuild-post-exploitation/README.md)
|
||||
- [AWS Codebuild - Token Leakage](pentesting-cloud/aws-security/aws-post-exploitation/aws-codebuild-post-exploitation/aws-codebuild-token-leakage.md)
|
||||
- [AWS - Control Tower Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-control-tower-post-exploitation.md)
|
||||
- [AWS - DLM Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-dlm-post-exploitation.md)
|
||||
- [AWS - DynamoDB Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-dynamodb-post-exploitation.md)
|
||||
- [AWS - Control Tower Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-control-tower-post-exploitation/README.md)
|
||||
- [AWS - DLM Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-dlm-post-exploitation/README.md)
|
||||
- [AWS - DynamoDB Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-dynamodb-post-exploitation/README.md)
|
||||
- [AWS - EC2, EBS, SSM & VPC Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/README.md)
|
||||
- [AWS - EBS Snapshot Dump](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-ebs-snapshot-dump.md)
|
||||
- [AWS – Covert Disk Exfiltration via AMI Store-to-S3 (CreateStoreImageTask)](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-ami-store-s3-exfiltration.md)
|
||||
- [AWS - Live Data Theft via EBS Multi-Attach](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-ebs-multi-attach-data-theft.md)
|
||||
- [AWS - EC2 Instance Connect Endpoint backdoor + ephemeral SSH key injection](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-ec2-instance-connect-endpoint-backdoor.md)
|
||||
- [AWS – EC2 ENI Secondary Private IP Hijack (Trust/Allowlist Bypass)](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-eni-secondary-ip-hijack.md)
|
||||
- [AWS - Elastic IP Hijack for Ingress/Egress IP Impersonation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-eip-hijack-impersonation.md)
|
||||
- [AWS - Security Group Backdoor via Managed Prefix Lists](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-managed-prefix-list-backdoor.md)
|
||||
- [AWS – Egress Bypass from Isolated Subnets via VPC Endpoints](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-vpc-endpoint-egress-bypass.md)
|
||||
- [AWS - VPC Flow Logs Cross-Account Exfiltration to S3](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-vpc-flow-logs-cross-account-exfiltration.md)
|
||||
- [AWS - Malicious VPC Mirror](pentesting-cloud/aws-security/aws-post-exploitation/aws-ec2-ebs-ssm-and-vpc-post-exploitation/aws-malicious-vpc-mirror.md)
|
||||
- [AWS - ECR Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ecr-post-exploitation.md)
|
||||
- [AWS - ECS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ecs-post-exploitation.md)
|
||||
- [AWS - EFS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-efs-post-exploitation.md)
|
||||
- [AWS - EKS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-eks-post-exploitation.md)
|
||||
- [AWS - Elastic Beanstalk Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-elastic-beanstalk-post-exploitation.md)
|
||||
- [AWS - IAM Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-iam-post-exploitation.md)
|
||||
- [AWS - KMS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-kms-post-exploitation.md)
|
||||
- [AWS - ECR Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ecr-post-exploitation/README.md)
|
||||
- [AWS - ECS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ecs-post-exploitation/README.md)
|
||||
- [AWS - EFS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-efs-post-exploitation/README.md)
|
||||
- [AWS - EKS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-eks-post-exploitation/README.md)
|
||||
- [AWS - Elastic Beanstalk Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-elastic-beanstalk-post-exploitation/README.md)
|
||||
- [AWS - IAM Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-iam-post-exploitation/README.md)
|
||||
- [AWS - KMS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-kms-post-exploitation/README.md)
|
||||
- [AWS - Lambda Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/README.md)
|
||||
- [AWS - Steal Lambda Requests](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-warm-lambda-persistence.md)
|
||||
- [AWS - Lightsail Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-lightsail-post-exploitation.md)
|
||||
- [AWS - Organizations Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-organizations-post-exploitation.md)
|
||||
- [AWS - RDS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-rds-post-exploitation.md)
|
||||
- [AWS - S3 Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-s3-post-exploitation.md)
|
||||
- [AWS - Secrets Manager Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-secrets-manager-post-exploitation.md)
|
||||
- [AWS - SES Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ses-post-exploitation.md)
|
||||
- [AWS - SNS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sns-post-exploitation.md)
|
||||
- [AWS - SQS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sqs-post-exploitation.md)
|
||||
- [AWS - SSO & identitystore Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sso-and-identitystore-post-exploitation.md)
|
||||
- [AWS - Step Functions Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-stepfunctions-post-exploitation.md)
|
||||
- [AWS - STS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sts-post-exploitation.md)
|
||||
- [AWS - VPN Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-vpn-post-exploitation.md)
|
||||
- [AWS - Lambda EFS Mount Injection](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-lambda-efs-mount-injection.md)
|
||||
- [AWS - Lambda Event Source Mapping Hijack](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-lambda-event-source-mapping-hijack.md)
|
||||
- [AWS - Lambda Function URL Public Exposure](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-lambda-function-url-public-exposure.md)
|
||||
- [AWS - Lambda LoggingConfig Redirection](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-lambda-loggingconfig-redirection.md)
|
||||
- [AWS - Lambda Runtime Pinning Abuse](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-lambda-runtime-pinning-abuse.md)
|
||||
- [AWS - Lambda Steal Requests](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-warm-lambda-persistence.md)
|
||||
- [AWS - Lambda VPC Egress Bypass](pentesting-cloud/aws-security/aws-post-exploitation/aws-lambda-post-exploitation/aws-lambda-vpc-egress-bypass.md)
|
||||
- [AWS - Lightsail Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-lightsail-post-exploitation/README.md)
|
||||
- [AWS - MWAA Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-mwaa-post-exploitation/README.md)
|
||||
- [AWS - Organizations Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-organizations-post-exploitation/README.md)
|
||||
- [AWS - RDS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-rds-post-exploitation/README.md)
|
||||
- [AWS - SageMaker Post-Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sagemaker-post-exploitation/README.md)
|
||||
- [Feature Store Poisoning](pentesting-cloud/aws-security/aws-post-exploitation/aws-sagemaker-post-exploitation/feature-store-poisoning.md)
|
||||
- [AWS - S3 Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-s3-post-exploitation/README.md)
|
||||
- [AWS - Secrets Manager Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-secrets-manager-post-exploitation/README.md)
|
||||
- [AWS - SES Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-ses-post-exploitation/README.md)
|
||||
- [AWS - SNS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sns-post-exploitation/README.md)
|
||||
- [AWS - SNS Message Data Protection Bypass via Policy Downgrade](pentesting-cloud/aws-security/aws-post-exploitation/aws-sns-post-exploitation/aws-sns-data-protection-bypass.md)
|
||||
- [SNS FIFO Archive Replay Exfiltration via Attacker SQS FIFO Subscription](pentesting-cloud/aws-security/aws-post-exploitation/aws-sns-post-exploitation/aws-sns-fifo-replay-exfil.md)
|
||||
- [AWS - SNS to Kinesis Firehose Exfiltration (Fanout to S3)](pentesting-cloud/aws-security/aws-post-exploitation/aws-sns-post-exploitation/aws-sns-firehose-exfil.md)
|
||||
- [AWS - SQS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sqs-post-exploitation/README.md)
|
||||
- [AWS – SQS DLQ Redrive Exfiltration via StartMessageMoveTask](pentesting-cloud/aws-security/aws-post-exploitation/aws-sqs-post-exploitation/aws-sqs-dlq-redrive-exfiltration.md)
|
||||
- [AWS – SQS Cross-/Same-Account Injection via SNS Subscription + Queue Policy](pentesting-cloud/aws-security/aws-post-exploitation/aws-sqs-post-exploitation/aws-sqs-sns-injection.md)
|
||||
- [AWS - SSO & identitystore Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sso-and-identitystore-post-exploitation/README.md)
|
||||
- [AWS - Step Functions Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-stepfunctions-post-exploitation/README.md)
|
||||
- [AWS - STS Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-sts-post-exploitation/README.md)
|
||||
- [AWS - VPN Post Exploitation](pentesting-cloud/aws-security/aws-post-exploitation/aws-vpn-post-exploitation/README.md)
|
||||
- [AWS - Privilege Escalation](pentesting-cloud/aws-security/aws-privilege-escalation/README.md)
|
||||
- [AWS - Apigateway Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-apigateway-privesc.md)
|
||||
- [AWS - Chime Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-chime-privesc.md)
|
||||
- [AWS - Codebuild Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codebuild-privesc.md)
|
||||
- [AWS - Codepipeline Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codepipeline-privesc.md)
|
||||
- [AWS - Apigateway Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-apigateway-privesc/README.md)
|
||||
- [AWS - AppRunner Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-apprunner-privesc/README.md)
|
||||
- [AWS - Chime Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-chime-privesc/README.md)
|
||||
- [AWS - CloudFront](pentesting-cloud/aws-security/aws-privilege-escalation/aws-cloudfront-privesc/README.md)
|
||||
- [AWS - Codebuild Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codebuild-privesc/README.md)
|
||||
- [AWS - Codepipeline Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codepipeline-privesc/README.md)
|
||||
- [AWS - Codestar Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codestar-privesc/README.md)
|
||||
- [codestar:CreateProject, codestar:AssociateTeamMember](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codestar-privesc/codestar-createproject-codestar-associateteammember.md)
|
||||
- [iam:PassRole, codestar:CreateProject](pentesting-cloud/aws-security/aws-privilege-escalation/aws-codestar-privesc/iam-passrole-codestar-createproject.md)
|
||||
- [AWS - Cloudformation Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-cloudformation-privesc/README.md)
|
||||
- [iam:PassRole, cloudformation:CreateStack,and cloudformation:DescribeStacks](pentesting-cloud/aws-security/aws-privilege-escalation/aws-cloudformation-privesc/iam-passrole-cloudformation-createstack-and-cloudformation-describestacks.md)
|
||||
- [AWS - Cognito Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-cognito-privesc.md)
|
||||
- [AWS - Datapipeline Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-datapipeline-privesc.md)
|
||||
- [AWS - Directory Services Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-directory-services-privesc.md)
|
||||
- [AWS - DynamoDB Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-dynamodb-privesc.md)
|
||||
- [AWS - EBS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ebs-privesc.md)
|
||||
- [AWS - EC2 Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ec2-privesc.md)
|
||||
- [AWS - ECR Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ecr-privesc.md)
|
||||
- [AWS - ECS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ecs-privesc.md)
|
||||
- [AWS - EFS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-efs-privesc.md)
|
||||
- [AWS - Elastic Beanstalk Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-elastic-beanstalk-privesc.md)
|
||||
- [AWS - EMR Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-emr-privesc.md)
|
||||
- [AWS - EventBridge Scheduler Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/eventbridgescheduler-privesc.md)
|
||||
- [AWS - Gamelift](pentesting-cloud/aws-security/aws-privilege-escalation/aws-gamelift.md)
|
||||
- [AWS - Glue Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-glue-privesc.md)
|
||||
- [AWS - IAM Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-iam-privesc.md)
|
||||
- [AWS - KMS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-kms-privesc.md)
|
||||
- [AWS - Lambda Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-lambda-privesc.md)
|
||||
- [AWS - Lightsail Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-lightsail-privesc.md)
|
||||
- [AWS - Mediapackage Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-mediapackage-privesc.md)
|
||||
- [AWS - MQ Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-mq-privesc.md)
|
||||
- [AWS - MSK Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-msk-privesc.md)
|
||||
- [AWS - RDS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-rds-privesc.md)
|
||||
- [AWS - Redshift Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-redshift-privesc.md)
|
||||
- [AWS - Route53 Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/route53-createhostedzone-route53-changeresourcerecordsets-acm-pca-issuecertificate-acm-pca-getcer.md)
|
||||
- [AWS - SNS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sns-privesc.md)
|
||||
- [AWS - SQS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sqs-privesc.md)
|
||||
- [AWS - SSO & identitystore Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sso-and-identitystore-privesc.md)
|
||||
- [AWS - Organizations Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-organizations-prinvesc.md)
|
||||
- [AWS - S3 Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-s3-privesc.md)
|
||||
- [AWS - Sagemaker Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sagemaker-privesc.md)
|
||||
- [AWS - Secrets Manager Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-secrets-manager-privesc.md)
|
||||
- [AWS - SSM Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ssm-privesc.md)
|
||||
- [AWS - Step Functions Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-stepfunctions-privesc.md)
|
||||
- [AWS - STS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sts-privesc.md)
|
||||
- [AWS - WorkDocs Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-workdocs-privesc.md)
|
||||
- [AWS - Cognito Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-cognito-privesc/README.md)
|
||||
- [AWS - Datapipeline Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-datapipeline-privesc/README.md)
|
||||
- [AWS - Directory Services Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-directory-services-privesc/README.md)
|
||||
- [AWS - DynamoDB Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-dynamodb-privesc/README.md)
|
||||
- [AWS - EBS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ebs-privesc/README.md)
|
||||
- [AWS - EC2 Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ec2-privesc/README.md)
|
||||
- [AWS - ECR Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ecr-privesc/README.md)
|
||||
- [AWS - ECS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ecs-privesc/README.md)
|
||||
- [AWS - EFS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-efs-privesc/README.md)
|
||||
- [AWS - Elastic Beanstalk Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-elastic-beanstalk-privesc/README.md)
|
||||
- [AWS - EMR Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-emr-privesc/README.md)
|
||||
- [AWS - EventBridge Scheduler Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/eventbridgescheduler-privesc/README.md)
|
||||
- [AWS - Gamelift](pentesting-cloud/aws-security/aws-privilege-escalation/aws-gamelift/README.md)
|
||||
- [AWS - Glue Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-glue-privesc/README.md)
|
||||
- [AWS - IAM Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-iam-privesc/README.md)
|
||||
- [AWS - KMS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-kms-privesc/README.md)
|
||||
- [AWS - Lambda Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-lambda-privesc/README.md)
|
||||
- [AWS - Lightsail Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-lightsail-privesc/README.md)
|
||||
- [AWS - Macie Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-macie-privesc/README.md)
|
||||
- [AWS - Mediapackage Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-mediapackage-privesc/README.md)
|
||||
- [AWS - MQ Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-mq-privesc/README.md)
|
||||
- [AWS - MSK Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-msk-privesc/README.md)
|
||||
- [AWS - RDS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-rds-privesc/README.md)
|
||||
- [AWS - Redshift Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-redshift-privesc/README.md)
|
||||
- [AWS - Route53 Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/route53-createhostedzone-route53-changeresourcerecordsets-acm-pca-issuecertificate-acm-pca-getcer/README.md)
|
||||
- [AWS - SNS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sns-privesc/README.md)
|
||||
- [AWS - SQS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sqs-privesc/README.md)
|
||||
- [AWS - SSO & identitystore Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sso-and-identitystore-privesc/README.md)
|
||||
- [AWS - Organizations Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-organizations-prinvesc/README.md)
|
||||
- [AWS - S3 Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-s3-privesc/README.md)
|
||||
- [AWS - Sagemaker Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sagemaker-privesc/README.md)
|
||||
- [AWS - Secrets Manager Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-secrets-manager-privesc/README.md)
|
||||
- [AWS - SSM Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-ssm-privesc/README.md)
|
||||
- [AWS - Step Functions Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-stepfunctions-privesc/README.md)
|
||||
- [AWS - STS Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-sts-privesc/README.md)
|
||||
- [AWS - WorkDocs Privesc](pentesting-cloud/aws-security/aws-privilege-escalation/aws-workdocs-privesc/README.md)
|
||||
- [AWS - Services](pentesting-cloud/aws-security/aws-services/README.md)
|
||||
- [AWS - Security & Detection Services](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/README.md)
|
||||
- [AWS - CloudTrail Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-cloudtrail-enum.md)
|
||||
@@ -318,12 +368,12 @@
|
||||
- [AWS - Firewall Manager Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-firewall-manager-enum.md)
|
||||
- [AWS - GuardDuty Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-guardduty-enum.md)
|
||||
- [AWS - Inspector Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-inspector-enum.md)
|
||||
- [AWS - Macie Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-macie-enum.md)
|
||||
- [AWS - Security Hub Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-security-hub-enum.md)
|
||||
- [AWS - Shield Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-shield-enum.md)
|
||||
- [AWS - Trusted Advisor Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-trusted-advisor-enum.md)
|
||||
- [AWS - WAF Enum](pentesting-cloud/aws-security/aws-services/aws-security-and-detection-services/aws-waf-enum.md)
|
||||
- [AWS - API Gateway Enum](pentesting-cloud/aws-security/aws-services/aws-api-gateway-enum.md)
|
||||
- [AWS - Bedrock Enum](pentesting-cloud/aws-security/aws-services/aws-bedrock-enum.md)
|
||||
- [AWS - Certificate Manager (ACM) & Private Certificate Authority (PCA)](pentesting-cloud/aws-security/aws-services/aws-certificate-manager-acm-and-private-certificate-authority-pca.md)
|
||||
- [AWS - CloudFormation & Codestar Enum](pentesting-cloud/aws-security/aws-services/aws-cloudformation-and-codestar-enum.md)
|
||||
- [AWS - CloudHSM Enum](pentesting-cloud/aws-security/aws-services/aws-cloudhsm-enum.md)
|
||||
@@ -334,7 +384,7 @@
|
||||
- [Cognito User Pools](pentesting-cloud/aws-security/aws-services/aws-cognito-enum/cognito-user-pools.md)
|
||||
- [AWS - DataPipeline, CodePipeline & CodeCommit Enum](pentesting-cloud/aws-security/aws-services/aws-datapipeline-codepipeline-codebuild-and-codecommit.md)
|
||||
- [AWS - Directory Services / WorkDocs Enum](pentesting-cloud/aws-security/aws-services/aws-directory-services-workdocs-enum.md)
|
||||
- [AWS - DocumentDB Enum](pentesting-cloud/aws-security/aws-services/aws-documentdb-enum.md)
|
||||
- [AWS - DocumentDB Enum](pentesting-cloud/aws-security/aws-services/aws-documentdb-enum/README.md)
|
||||
- [AWS - DynamoDB Enum](pentesting-cloud/aws-security/aws-services/aws-dynamodb-enum.md)
|
||||
- [AWS - EC2, EBS, ELB, SSM, VPC & VPN Enum](pentesting-cloud/aws-security/aws-services/aws-ec2-ebs-elb-ssm-vpc-and-vpn-enum/README.md)
|
||||
- [AWS - Nitro Enum](pentesting-cloud/aws-security/aws-services/aws-ec2-ebs-elb-ssm-vpc-and-vpn-enum/aws-nitro-enum.md)
|
||||
@@ -352,12 +402,14 @@
|
||||
- [AWS - KMS Enum](pentesting-cloud/aws-security/aws-services/aws-kms-enum.md)
|
||||
- [AWS - Lambda Enum](pentesting-cloud/aws-security/aws-services/aws-lambda-enum.md)
|
||||
- [AWS - Lightsail Enum](pentesting-cloud/aws-security/aws-services/aws-lightsail-enum.md)
|
||||
- [AWS - Macie Enum](pentesting-cloud/aws-security/aws-services/aws-macie-enum.md)
|
||||
- [AWS - MQ Enum](pentesting-cloud/aws-security/aws-services/aws-mq-enum.md)
|
||||
- [AWS - MSK Enum](pentesting-cloud/aws-security/aws-services/aws-msk-enum.md)
|
||||
- [AWS - Organizations Enum](pentesting-cloud/aws-security/aws-services/aws-organizations-enum.md)
|
||||
- [AWS - Redshift Enum](pentesting-cloud/aws-security/aws-services/aws-redshift-enum.md)
|
||||
- [AWS - Relational Database (RDS) Enum](pentesting-cloud/aws-security/aws-services/aws-relational-database-rds-enum.md)
|
||||
- [AWS - Route53 Enum](pentesting-cloud/aws-security/aws-services/aws-route53-enum.md)
|
||||
- [AWS - SageMaker Enum](pentesting-cloud/aws-security/aws-services/aws-sagemaker-enum/README.md)
|
||||
- [AWS - Secrets Manager Enum](pentesting-cloud/aws-security/aws-services/aws-secrets-manager-enum.md)
|
||||
- [AWS - SES Enum](pentesting-cloud/aws-security/aws-services/aws-ses-enum.md)
|
||||
- [AWS - SNS Enum](pentesting-cloud/aws-security/aws-services/aws-sns-enum.md)
|
||||
@@ -367,38 +419,42 @@
|
||||
- [AWS - STS Enum](pentesting-cloud/aws-security/aws-services/aws-sts-enum.md)
|
||||
- [AWS - Other Services Enum](pentesting-cloud/aws-security/aws-services/aws-other-services-enum.md)
|
||||
- [AWS - Unauthenticated Enum & Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/README.md)
|
||||
- [AWS - Accounts Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-accounts-unauthenticated-enum.md)
|
||||
- [AWS - API Gateway Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-api-gateway-unauthenticated-enum.md)
|
||||
- [AWS - Cloudfront Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-cloudfront-unauthenticated-enum.md)
|
||||
- [AWS - Cognito Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-cognito-unauthenticated-enum.md)
|
||||
- [AWS - CodeBuild Unauthenticated Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-codebuild-unauthenticated-access.md)
|
||||
- [AWS - DocumentDB Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-documentdb-enum.md)
|
||||
- [AWS - DynamoDB Unauthenticated Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-dynamodb-unauthenticated-access.md)
|
||||
- [AWS - EC2 Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-ec2-unauthenticated-enum.md)
|
||||
- [AWS - ECR Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-ecr-unauthenticated-enum.md)
|
||||
- [AWS - ECS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-ecs-unauthenticated-enum.md)
|
||||
- [AWS - Elastic Beanstalk Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-elastic-beanstalk-unauthenticated-enum.md)
|
||||
- [AWS - Elasticsearch Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-elasticsearch-unauthenticated-enum.md)
|
||||
- [AWS - IAM & STS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-iam-and-sts-unauthenticated-enum.md)
|
||||
- [AWS - Identity Center & SSO Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-identity-center-and-sso-unauthenticated-enum.md)
|
||||
- [AWS - IoT Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-iot-unauthenticated-enum.md)
|
||||
- [AWS - Kinesis Video Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-kinesis-video-unauthenticated-enum.md)
|
||||
- [AWS - Lambda Unauthenticated Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-lambda-unauthenticated-access.md)
|
||||
- [AWS - Media Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-media-unauthenticated-enum.md)
|
||||
- [AWS - MQ Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-mq-unauthenticated-enum.md)
|
||||
- [AWS - MSK Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-msk-unauthenticated-enum.md)
|
||||
- [AWS - RDS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-rds-unauthenticated-enum.md)
|
||||
- [AWS - Redshift Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-redshift-unauthenticated-enum.md)
|
||||
- [AWS - SQS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-sqs-unauthenticated-enum.md)
|
||||
- [AWS - SNS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-sns-unauthenticated-enum.md)
|
||||
- [AWS - S3 Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-s3-unauthenticated-enum.md)
|
||||
- [AWS - Accounts Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-accounts-unauthenticated-enum/README.md)
|
||||
- [AWS - API Gateway Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-api-gateway-unauthenticated-enum/README.md)
|
||||
- [AWS - Cloudfront Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-cloudfront-unauthenticated-enum/README.md)
|
||||
- [AWS - Cognito Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-cognito-unauthenticated-enum/README.md)
|
||||
- [AWS - CodeBuild Unauthenticated Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-codebuild-unauthenticated-access/README.md)
|
||||
- [AWS - DocumentDB Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-documentdb-enum/README.md)
|
||||
- [AWS - DynamoDB Unauthenticated Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-dynamodb-unauthenticated-access/README.md)
|
||||
- [AWS - EC2 Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-ec2-unauthenticated-enum/README.md)
|
||||
- [AWS - ECR Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-ecr-unauthenticated-enum/README.md)
|
||||
- [AWS - ECS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-ecs-unauthenticated-enum/README.md)
|
||||
- [AWS - Elastic Beanstalk Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-elastic-beanstalk-unauthenticated-enum/README.md)
|
||||
- [AWS - Elasticsearch Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-elasticsearch-unauthenticated-enum/README.md)
|
||||
- [AWS - IAM & STS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-iam-and-sts-unauthenticated-enum/README.md)
|
||||
- [AWS - Identity Center & SSO Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-identity-center-and-sso-unauthenticated-enum/README.md)
|
||||
- [AWS - IoT Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-iot-unauthenticated-enum/README.md)
|
||||
- [AWS - Kinesis Video Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-kinesis-video-unauthenticated-enum/README.md)
|
||||
- [AWS - Lambda Unauthenticated Access](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-lambda-unauthenticated-access/README.md)
|
||||
- [AWS - Media Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-media-unauthenticated-enum/README.md)
|
||||
- [AWS - MQ Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-mq-unauthenticated-enum/README.md)
|
||||
- [AWS - MSK Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-msk-unauthenticated-enum/README.md)
|
||||
- [AWS - RDS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-rds-unauthenticated-enum/README.md)
|
||||
- [AWS - Redshift Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-redshift-unauthenticated-enum/README.md)
|
||||
- [AWS - SageMaker Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-sagemaker-unauthenticated-enum/README.md)
|
||||
- [AWS - SQS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-sqs-unauthenticated-enum/README.md)
|
||||
- [AWS - SNS Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-sns-unauthenticated-enum/README.md)
|
||||
- [AWS - S3 Unauthenticated Enum](pentesting-cloud/aws-security/aws-unauthenticated-enum-access/aws-s3-unauthenticated-enum/README.md)
|
||||
- [Azure Pentesting](pentesting-cloud/azure-security/README.md)
|
||||
- [Az - Basic Information](pentesting-cloud/azure-security/az-basic-information/README.md)
|
||||
- [Az Federation Abuse](pentesting-cloud/azure-security/az-basic-information/az-federation-abuse.md)
|
||||
- [Az - Tokens & Public Applications](pentesting-cloud/azure-security/az-basic-information/az-tokens-and-public-applications.md)
|
||||
- [Az - Enumeration Tools](pentesting-cloud/azure-security/az-enumeration-tools.md)
|
||||
- [Az - Unauthenticated Enum & Initial Entry](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/README.md)
|
||||
- [Az - Container Registry Unauth](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-container-registry-unauth.md)
|
||||
- [Az - OAuth Apps Phishing](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-oauth-apps-phishing.md)
|
||||
- [Az - VMs Unath](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-vms-unath.md)
|
||||
- [Az - Storage Unauth](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-storage-unauth.md)
|
||||
- [Az - VMs Unauth](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-vms-unauth.md)
|
||||
- [Az - Device Code Authentication Phishing](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-device-code-authentication-phishing.md)
|
||||
- [Az - Password Spraying](pentesting-cloud/azure-security/az-unauthenticated-enum-and-initial-entry/az-password-spraying.md)
|
||||
- [Az - Services](pentesting-cloud/azure-security/az-services/README.md)
|
||||
@@ -406,65 +462,95 @@
|
||||
- [Az - ACR](pentesting-cloud/azure-security/az-services/az-acr.md)
|
||||
- [Az - Application Proxy](pentesting-cloud/azure-security/az-services/az-application-proxy.md)
|
||||
- [Az - ARM Templates / Deployments](pentesting-cloud/azure-security/az-services/az-arm-templates.md)
|
||||
- [Az - Automation Account](pentesting-cloud/azure-security/az-services/az-automation-account/README.md)
|
||||
- [Az - State Configuration RCE](pentesting-cloud/azure-security/az-services/az-automation-account/az-state-configuration-rce.md)
|
||||
- [Az - Azure App Service & Function Apps](pentesting-cloud/azure-security/az-services/az-app-service.md)
|
||||
- [Az - Intune](pentesting-cloud/azure-security/az-services/intune.md)
|
||||
- [Az - Automation Accounts](pentesting-cloud/azure-security/az-services/az-automation-accounts.md)
|
||||
- [Az - Azure App Services](pentesting-cloud/azure-security/az-services/az-app-services.md)
|
||||
- [Az - AI Foundry](pentesting-cloud/azure-security/az-services/az-ai-foundry.md)
|
||||
- [Az - Cloud Shell](pentesting-cloud/azure-security/az-services/az-cloud-shell.md)
|
||||
- [Az - Container Registry](pentesting-cloud/azure-security/az-services/az-container-registry.md)
|
||||
- [Az - Container Instances, Apps & Jobs](pentesting-cloud/azure-security/az-services/az-container-instances-apps-jobs.md)
|
||||
- [Az - CosmosDB](pentesting-cloud/azure-security/az-services/az-cosmosDB.md)
|
||||
- [Az - Defender](pentesting-cloud/azure-security/az-services/az-defender.md)
|
||||
- [Az - File Shares](pentesting-cloud/azure-security/az-services/az-file-shares.md)
|
||||
- [Az - Front Door](pentesting-cloud/azure-security/az-services/az-front-door.md)
|
||||
- [Az - Function Apps](pentesting-cloud/azure-security/az-services/az-function-apps.md)
|
||||
- [Az - Key Vault](pentesting-cloud/azure-security/az-services/keyvault.md)
|
||||
- [Az - Intune](pentesting-cloud/azure-security/az-services/intune.md)
|
||||
- [Az - Key Vault](pentesting-cloud/azure-security/az-services/az-keyvault.md)
|
||||
- [Az - Logic Apps](pentesting-cloud/azure-security/az-services/az-logic-apps.md)
|
||||
- [Az - Management Groups, Subscriptions & Resource Groups](pentesting-cloud/azure-security/az-services/az-management-groups-subscriptions-and-resource-groups.md)
|
||||
- [Az - Queue Storage](pentesting-cloud/azure-security/az-services/az-queue-enum.md)
|
||||
- [Az - Service Bus](pentesting-cloud/azure-security/az-services/az-servicebus-enum.md)
|
||||
- [Az - Misc](pentesting-cloud/azure-security/az-services/az-misc.md)
|
||||
- [Az - Monitoring](pentesting-cloud/azure-security/az-services/az-monitoring.md)
|
||||
- [Az - MySQL](pentesting-cloud/azure-security/az-services/az-mysql.md)
|
||||
- [Az - PostgreSQL](pentesting-cloud/azure-security/az-services/az-postgresql.md)
|
||||
- [Az - Queue Storage](pentesting-cloud/azure-security/az-services/az-queue.md)
|
||||
- [Az - Sentinel](pentesting-cloud/azure-security/az-services/az-sentinel.md)
|
||||
- [Az - Service Bus](pentesting-cloud/azure-security/az-services/az-servicebus.md)
|
||||
- [Az - SQL](pentesting-cloud/azure-security/az-services/az-sql.md)
|
||||
- [Az - Static Web Applications](pentesting-cloud/azure-security/az-services/az-static-web-apps.md)
|
||||
- [Az - Storage Accounts & Blobs](pentesting-cloud/azure-security/az-services/az-storage.md)
|
||||
- [Az - Table Storage](pentesting-cloud/azure-security/az-services/az-table-storage.md)
|
||||
- [Az - Virtual Desktop](pentesting-cloud/azure-security/az-services/az-virtual-desktop.md)
|
||||
- [Az - Virtual Machines & Network](pentesting-cloud/azure-security/az-services/vms/README.md)
|
||||
- [Az - Azure Network](pentesting-cloud/azure-security/az-services/vms/az-azure-network.md)
|
||||
- [Az - Permissions for a Pentest](pentesting-cloud/azure-security/az-permissions-for-a-pentest.md)
|
||||
- [Az - Lateral Movement (Cloud - On-Prem)](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/README.md)
|
||||
- [Az AD Connect - Hybrid Identity](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/README.md)
|
||||
- [Az- Synchronising New Users](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/az-synchronising-new-users.md)
|
||||
- [Az - Default Applications](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/az-default-applications.md)
|
||||
- [Az - Cloud Kerberos Trust](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/az-cloud-kerberos-trust.md)
|
||||
- [Az - Federation](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/federation.md)
|
||||
- [Az - PHS - Password Hash Sync](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/phs-password-hash-sync.md)
|
||||
- [Az - PTA - Pass-through Authentication](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/pta-pass-through-authentication.md)
|
||||
- [Az - Seamless SSO](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/azure-ad-connect-hybrid-identity/seamless-sso.md)
|
||||
- [Az - Arc vulnerable GPO Deploy Script](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-arc-vulnerable-gpo-deploy-script.md)
|
||||
- [Az - Arc vulnerable GPO Deploy Script](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-arc-vulnerable-gpo-deploy-script.md)
|
||||
- [Az - Cloud Kerberos Trust](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-cloud-kerberos-trust.md)
|
||||
- [Az - Cloud Sync](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-cloud-sync.md)
|
||||
- [Az - Connect Sync](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-connect-sync.md)
|
||||
- [Az - Domain Services](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-domain-services.md)
|
||||
- [Az - Federation](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-federation.md)
|
||||
- [Az - Hybrid Identity Misc Attacks](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-hybrid-identity-misc-attacks.md)
|
||||
- [Az - Local Cloud Credentials](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-local-cloud-credentials.md)
|
||||
- [Az - Pass the Cookie](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-pass-the-cookie.md)
|
||||
- [Az - Pass the Certificate](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-pass-the-certificate.md)
|
||||
- [Az - Pass the PRT](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/pass-the-prt.md)
|
||||
- [Az - Phishing Primary Refresh Token (Microsoft Entra)](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-phishing-primary-refresh-token-microsoft-entra.md)
|
||||
- [Az - Processes Memory Access Token](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-processes-memory-access-token.md)
|
||||
- [Az - Pass the Cookie](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-pass-the-cookie.md)
|
||||
- [Az - Primary Refresh Token (PRT)](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-primary-refresh-token-prt.md)
|
||||
- [Az - PTA - Pass-through Authentication](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-pta-pass-through-authentication.md)
|
||||
- [Az - Seamless SSO](pentesting-cloud/azure-security/az-lateral-movement-cloud-on-prem/az-seamless-sso.md)
|
||||
- [Az - Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/README.md)
|
||||
- [Az Azure Ai Foundry Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-azure-ai-foundry-post-exploitation.md)
|
||||
- [Az - Blob Storage Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-blob-storage-post-exploitation.md)
|
||||
- [Az - CosmosDB Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-cosmosDB-post-exploitation.md)
|
||||
- [Az - File Share Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-file-share-post-exploitation.md)
|
||||
- [Az - Function Apps Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-function-apps-post-exploitation.md)
|
||||
- [Az - Key Vault Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-key-vault-post-exploitation.md)
|
||||
- [Az - Logic Apps Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-logic-apps-post-exploitation.md)
|
||||
- [Az - MySQL Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-mysql-post-exploitation.md)
|
||||
- [Az - PostgreSQL Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-postgresql-post-exploitation.md)
|
||||
- [Az - Queue Storage Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-queue-post-exploitation.md)
|
||||
- [Az - Service Bus Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-servicebus-post-exploitation.md)
|
||||
- [Az - Table Storage Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-table-storage-post-exploitation.md)
|
||||
- [Az - SQL Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-sql-post-exploitation.md)
|
||||
- [Az - Virtual Desktop Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-virtual-desktop-post-exploitation.md)
|
||||
- [Az - VMs & Network Post Exploitation](pentesting-cloud/azure-security/az-post-exploitation/az-vms-and-network-post-exploitation.md)
|
||||
- [Az - Privilege Escalation](pentesting-cloud/azure-security/az-privilege-escalation/README.md)
|
||||
- [Az - Azure IAM Privesc (Authorization)](pentesting-cloud/azure-security/az-privilege-escalation/az-authorization-privesc.md)
|
||||
- [Az - AI Foundry Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-ai-foundry-privesc.md)
|
||||
- [Az - App Services Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-app-services-privesc.md)
|
||||
- [Az - Automation Accounts Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-automation-accounts-privesc.md)
|
||||
- [Az - Container Registry Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-container-registry-privesc.md)
|
||||
- [Az - Container Instances, Apps & Jobs Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-container-instances-apps-jobs-privesc.md)
|
||||
- [Az - CosmosDB Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-cosmosDB-privesc.md)
|
||||
- [Az - EntraID Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-entraid-privesc/README.md)
|
||||
- [Az - Conditional Access Policies & MFA Bypass](pentesting-cloud/azure-security/az-privilege-escalation/az-entraid-privesc/az-conditional-access-policies-mfa-bypass.md)
|
||||
- [Az - Dynamic Groups Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-entraid-privesc/dynamic-groups.md)
|
||||
- [Az - Functions App Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-functions-app-privesc.md)
|
||||
- [Az - Key Vault Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-key-vault-privesc.md)
|
||||
- [Az - Logic Apps Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-logic-apps-privesc.md)
|
||||
- [Az - MySQL Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-mysql-privesc.md)
|
||||
- [Az - PostgreSQL Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-postgresql-privesc.md)
|
||||
- [Az - Queue Storage Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-queue-privesc.md)
|
||||
- [Az - Service Bus Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-servicebus-privesc.md)
|
||||
- [Az - Virtual Machines & Network Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-virtual-machines-and-network-privesc.md)
|
||||
- [Az - Static Web App Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-static-web-apps-privesc.md)
|
||||
- [Az - Storage Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-storage-privesc.md)
|
||||
- [Az - SQL Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-sql-privesc.md)
|
||||
- [Az - Virtual Desktop Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-virtual-desktop-privesc.md)
|
||||
- [Az - Virtual Machines & Network Privesc](pentesting-cloud/azure-security/az-privilege-escalation/az-virtual-machines-and-network-privesc.md)
|
||||
- [Az - Persistence](pentesting-cloud/azure-security/az-persistence/README.md)
|
||||
- [Az - Queue Storage Persistence](pentesting-cloud/azure-security/az-persistence/az-queue-persistance.md)
|
||||
- [Az - Automation Accounts Persistence](pentesting-cloud/azure-security/az-persistence/az-automation-accounts-persistence.md)
|
||||
- [Az - Cloud Shell Persistence](pentesting-cloud/azure-security/az-persistence/az-cloud-shell-persistence.md)
|
||||
- [Az - Logic Apps Persistence](pentesting-cloud/azure-security/az-persistence/az-logic-apps-persistence.md)
|
||||
- [Az - SQL Persistence](pentesting-cloud/azure-security/az-persistence/az-sql-persistence.md)
|
||||
- [Az - Queue Storage Persistence](pentesting-cloud/azure-security/az-persistence/az-queue-persistence.md)
|
||||
- [Az - VMs Persistence](pentesting-cloud/azure-security/az-persistence/az-vms-persistence.md)
|
||||
- [Az - Storage Persistence](pentesting-cloud/azure-security/az-persistence/az-storage-persistence.md)
|
||||
- [Az - Device Registration](pentesting-cloud/azure-security/az-device-registration.md)
|
||||
@@ -499,9 +585,5 @@
|
||||
|
||||
# 🛫 Pentesting Network Services
|
||||
|
||||
- [HackTricks Pentesting Network$$external:https://book.hacktricks.xyz/generic-methodologies-and-resources/pentesting-network$$]()
|
||||
- [HackTricks Pentesting Services$$external:https://book.hacktricks.xyz/network-services-pentesting/pentesting-ssh$$]()
|
||||
|
||||
|
||||
|
||||
|
||||
- [HackTricks Pentesting Network$$external:https://book.hacktricks.wiki/en/generic-methodologies-and-resources/pentesting-network/index.html$$]()
|
||||
- [HackTricks Pentesting Services$$external:https://book.hacktricks.wiki/en/network-services-pentesting/pentesting-ssh.html$$]()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
> [!TIP]
|
||||
> Learn & practice AWS Hacking:<img src="../../../../../images/arte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">[**HackTricks Training AWS Red Team Expert (ARTE)**](https://training.hacktricks.xyz/courses/arte)<img src="../../../../../images/arte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">\
|
||||
> Learn & practice GCP Hacking: <img src="../../../../../images/grte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">[**HackTricks Training GCP Red Team Expert (GRTE)**](https://training.hacktricks.xyz/courses/grte)<img src="../../../../../images/grte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">
|
||||
> Learn & practice GCP Hacking: <img src="../../../../../images/grte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">[**HackTricks Training GCP Red Team Expert (GRTE)**](https://training.hacktricks.xyz/courses/grte)<img src="../../../../../images/grte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">\
|
||||
> Learn & practice Az Hacking: <img src="../../../../../images/azrte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">[**HackTricks Training Azure Red Team Expert (AzRTE)**](https://training.hacktricks.xyz/courses/azrte)<img src="../../../../../images/azrte.png" alt="" style="width:auto;height:24px;vertical-align:middle;">
|
||||
>
|
||||
> <details>
|
||||
>
|
||||
|
||||
BIN
src/files/empty.zip
Normal file
|
Before Width: | Height: | Size: 124 KiB |
|
Before Width: | Height: | Size: 65 KiB |
|
Before Width: | Height: | Size: 129 KiB |
|
Before Width: | Height: | Size: 280 KiB |
|
Before Width: | Height: | Size: 156 KiB |
BIN
src/images/CH_logo_ads.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 206 KiB |
|
Before Width: | Height: | Size: 66 KiB |
|
Before Width: | Height: | Size: 1.1 MiB After Width: | Height: | Size: 614 KiB |
BIN
src/images/azrte.png
Normal file
|
After Width: | Height: | Size: 1.0 MiB |
BIN
src/images/azure_static_password.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 2.4 MiB |
BIN
src/images/discount.jpeg
Normal file
|
After Width: | Height: | Size: 186 KiB |
|
Before Width: | Height: | Size: 283 KiB |
|
Before Width: | Height: | Size: 271 KiB |
|
Before Width: | Height: | Size: 124 KiB |
|
Before Width: | Height: | Size: 124 KiB |
|
Before Width: | Height: | Size: 482 KiB |
|
Before Width: | Height: | Size: 271 KiB |
|
Before Width: | Height: | Size: 132 KiB |
|
Before Width: | Height: | Size: 271 KiB |
|
Before Width: | Height: | Size: 40 KiB |
|
Before Width: | Height: | Size: 58 KiB |
|
Before Width: | Height: | Size: 117 KiB |
|
Before Width: | Height: | Size: 8.4 KiB |
|
Before Width: | Height: | Size: 62 KiB |
|
Before Width: | Height: | Size: 112 KiB |
|
Before Width: | Height: | Size: 152 KiB |
|
Before Width: | Height: | Size: 81 KiB |
|
Before Width: | Height: | Size: 138 KiB |
|
Before Width: | Height: | Size: 132 KiB |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 56 KiB |
|
Before Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 4.2 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 22 KiB |
|
Before Width: | Height: | Size: 115 KiB |
|
Before Width: | Height: | Size: 756 KiB |
|
Before Width: | Height: | Size: 756 KiB |
|
Before Width: | Height: | Size: 126 KiB |
|
Before Width: | Height: | Size: 79 KiB |
|
Before Width: | Height: | Size: 8.9 KiB |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 67 KiB |
|
Before Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 43 KiB |
|
Before Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 110 KiB |
|
Before Width: | Height: | Size: 148 KiB |
|
Before Width: | Height: | Size: 490 KiB |
|
Before Width: | Height: | Size: 148 KiB |
|
Before Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 29 KiB |
|
Before Width: | Height: | Size: 160 KiB |
|
Before Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 410 KiB |
|
Before Width: | Height: | Size: 774 KiB |
|
Before Width: | Height: | Size: 56 KiB |
|
Before Width: | Height: | Size: 22 KiB |
|
Before Width: | Height: | Size: 60 KiB |