mirror of
https://github.com/github/codeql-action.git
synced 2025-12-11 18:24:43 +08:00
Compare commits
685 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76621b61de | ||
|
|
29ac3cefbb | ||
|
|
737cfdebe6 | ||
|
|
679a40d337 | ||
|
|
6fe50b283a | ||
|
|
6bc91d64f6 | ||
|
|
6b4fedca4f | ||
|
|
5794ffcb4a | ||
|
|
bd62bf449c | ||
|
|
2afb4e6f3c | ||
|
|
1fd38a4712 | ||
|
|
bf301d1b77 | ||
|
|
2ee230f7c4 | ||
|
|
3425bf931d | ||
|
|
ddc8e21357 | ||
|
|
afbbdf51df | ||
|
|
e1be6ef300 | ||
|
|
3c7d12c160 | ||
|
|
a4e1a019f5 | ||
|
|
4a32399f5f | ||
|
|
c587f0a77d | ||
|
|
8e6104d51e | ||
|
|
67a6ea72bf | ||
|
|
588ff737e7 | ||
|
|
239ed87059 | ||
|
|
8c8bdce638 | ||
|
|
b7beff905a | ||
|
|
6422cf7859 | ||
|
|
eddeaf42e5 | ||
|
|
739fb03359 | ||
|
|
bb56324516 | ||
|
|
bc90418e92 | ||
|
|
f28436bcbf | ||
|
|
f8c2086872 | ||
|
|
c7884c6fd8 | ||
|
|
a625e1693a | ||
|
|
5e22b5feee | ||
|
|
0d72a5b371 | ||
|
|
43638b10a0 | ||
|
|
1cfc0c2621 | ||
|
|
7b33b610d4 | ||
|
|
e2b6f0f4a3 | ||
|
|
ec8d9c637a | ||
|
|
6db9524876 | ||
|
|
ae2a79254b | ||
|
|
2d082457bf | ||
|
|
594623d72f | ||
|
|
e448add687 | ||
|
|
1b76c0b9c1 | ||
|
|
b1228d060c | ||
|
|
c87fc48ec5 | ||
|
|
9fb8f2d0c2 | ||
|
|
72770345eb | ||
|
|
f5d3601aaa | ||
|
|
06e521573a | ||
|
|
60bf7dfc0e | ||
|
|
f30d00fe8d | ||
|
|
377976a96e | ||
|
|
ea05bf27b6 | ||
|
|
e682065360 | ||
|
|
fa18cc9db4 | ||
|
|
bf692c08e7 | ||
|
|
83e92edc4b | ||
|
|
bbfff2f20a | ||
|
|
cfb8d07200 | ||
|
|
87e59d0f95 | ||
|
|
c481481d7d | ||
|
|
e37c03628f | ||
|
|
563cbbb24d | ||
|
|
443f94c758 | ||
|
|
a5cdb299bc | ||
|
|
5547ed31c9 | ||
|
|
69ccd54e34 | ||
|
|
a3810fa54b | ||
|
|
a28b9b5e2f | ||
|
|
fff9bbe33f | ||
|
|
cfa0a4e416 | ||
|
|
f9d6919415 | ||
|
|
e95a3a9768 | ||
|
|
7273f08caa | ||
|
|
b9b3b12fa2 | ||
|
|
20c7f06b9a | ||
|
|
acdac9e37d | ||
|
|
1a4f45d622 | ||
|
|
297691ddab | ||
|
|
51f77329af | ||
|
|
8e90243ddb | ||
|
|
0521b5facf | ||
|
|
84720e2ef6 | ||
|
|
80a09d7b0b | ||
|
|
8388115dc8 | ||
|
|
401ecaf503 | ||
|
|
45f48a349a | ||
|
|
ab5c0c5fa5 | ||
|
|
cd264d4dcd | ||
|
|
4599055b1e | ||
|
|
fd7ad511e6 | ||
|
|
ac0c9bfe1e | ||
|
|
88d99b3033 | ||
|
|
409486919c | ||
|
|
abbda19c1d | ||
|
|
eb90c18c83 | ||
|
|
12e4b97fba | ||
|
|
264ce42cbb | ||
|
|
d8be08468e | ||
|
|
9b6aeca680 | ||
|
|
a005f73253 | ||
|
|
701df0e49d | ||
|
|
06bb1e016c | ||
|
|
264c5cf3c9 | ||
|
|
4e828ff8d4 | ||
|
|
b3114b8965 | ||
|
|
37264dc0b3 | ||
|
|
5a29823d01 | ||
|
|
5a2327a6fd | ||
|
|
287d421cf3 | ||
|
|
43afe6ec0b | ||
|
|
0f549a757b | ||
|
|
f67ceea75b | ||
|
|
8f2e63676d | ||
|
|
76bf77db0b | ||
|
|
9e7d13dd99 | ||
|
|
2b952be91d | ||
|
|
48ce740f61 | ||
|
|
4749491b98 | ||
|
|
b7a5452764 | ||
|
|
20477a3fe1 | ||
|
|
eefe1b5db9 | ||
|
|
b6332872af | ||
|
|
8e442bc480 | ||
|
|
a7cb1b8b39 | ||
|
|
b195e1bfc6 | ||
|
|
df82387698 | ||
|
|
d6bbdef45e | ||
|
|
210cc9bfa2 | ||
|
|
39b0524b50 | ||
|
|
c3bbcab41b | ||
|
|
e37b293334 | ||
|
|
19075c4376 | ||
|
|
7710ed11e3 | ||
|
|
6a49a8cbce | ||
|
|
3aef4108d1 | ||
|
|
614b64c6ec | ||
|
|
aefb854fe5 | ||
|
|
03a2a17e75 | ||
|
|
07455ed3c3 | ||
|
|
3fb562ddcc | ||
|
|
709cf22a66 | ||
|
|
3eaefb4deb | ||
|
|
e30db30685 | ||
|
|
0d17ea4843 | ||
|
|
38fdaed818 | ||
|
|
37e3c3113a | ||
|
|
15605b194f | ||
|
|
0b8d278f47 | ||
|
|
ca53360d04 | ||
|
|
bbf184bd4c | ||
|
|
b419190c59 | ||
|
|
0c2ac60444 | ||
|
|
6f936b5c2d | ||
|
|
c6a6c1490f | ||
|
|
4e20239e7b | ||
|
|
59d67fc4bf | ||
|
|
b37e7e2c5d | ||
|
|
90d7727554 | ||
|
|
fb771764cb | ||
|
|
d799ff5e6a | ||
|
|
9f70a5fc86 | ||
|
|
55cb6b8b94 | ||
|
|
4bdb7fe04f | ||
|
|
64fce5856f | ||
|
|
fe7205c739 | ||
|
|
4cd7a721f7 | ||
|
|
f4358b38d1 | ||
|
|
f53ec7c550 | ||
|
|
624d0bca90 | ||
|
|
ec836d6b8a | ||
|
|
95a1b7e2bf | ||
|
|
8c5122ea75 | ||
|
|
aafbeb29bc | ||
|
|
6a51e635a5 | ||
|
|
42835b3971 | ||
|
|
2fc04c80cc | ||
|
|
b95402dae1 | ||
|
|
6ca06f41c4 | ||
|
|
d42ce71087 | ||
|
|
b4425372ef | ||
|
|
93e8729640 | ||
|
|
da758dc0cd | ||
|
|
60a2a7d623 | ||
|
|
a336faa497 | ||
|
|
ee8a8c4e0b | ||
|
|
9022c7382c | ||
|
|
b69421388d | ||
|
|
33f84897c3 | ||
|
|
612df8d91c | ||
|
|
dcc1a6637b | ||
|
|
144d3b8f62 | ||
|
|
6881d2cdc1 | ||
|
|
181d5eefc2 | ||
|
|
c77386a9db | ||
|
|
8d43d4ecec | ||
|
|
9281048a40 | ||
|
|
6b83dc33ed | ||
|
|
ca0540d370 | ||
|
|
e9938e34d5 | ||
|
|
4c57370d03 | ||
|
|
2830b750e5 | ||
|
|
aa72ddaead | ||
|
|
65d1e45f0b | ||
|
|
362ebf85da | ||
|
|
10a3e4b17d | ||
|
|
8593ea65e2 | ||
|
|
3e95091e3b | ||
|
|
7b3d150883 | ||
|
|
2e3a72539c | ||
|
|
baf20c9b52 | ||
|
|
39edc492db | ||
|
|
27c4fb1eef | ||
|
|
428aea55f5 | ||
|
|
973250f3d2 | ||
|
|
ad6046ff97 | ||
|
|
9ec0bb9605 | ||
|
|
8ef17824cf | ||
|
|
08955dbc0d | ||
|
|
71dd63398f | ||
|
|
27db6cb5d6 | ||
|
|
768fc170da | ||
|
|
79049d92c6 | ||
|
|
e382508853 | ||
|
|
2c76207fa4 | ||
|
|
83de9b082b | ||
|
|
f3bfb98603 | ||
|
|
2b4afc20b6 | ||
|
|
86f47e8b74 | ||
|
|
9b9286a835 | ||
|
|
af32bc6d6f | ||
|
|
51891595a7 | ||
|
|
f7fbaa019f | ||
|
|
9b02dc2f60 | ||
|
|
7ab92d0295 | ||
|
|
2cae828745 | ||
|
|
6b78c6eca2 | ||
|
|
f7258be256 | ||
|
|
35083eedc1 | ||
|
|
80e2dc47d8 | ||
|
|
2e3b93fe41 | ||
|
|
bbfc5bef5b | ||
|
|
6abacdb184 | ||
|
|
f1834221f2 | ||
|
|
45b3bec064 | ||
|
|
22444a650f | ||
|
|
320f7b0fd6 | ||
|
|
3a7544ea8f | ||
|
|
aba8788d12 | ||
|
|
3963bf423a | ||
|
|
6e22e41a25 | ||
|
|
0cec254fa1 | ||
|
|
6a3692d673 | ||
|
|
9ee60a6e32 | ||
|
|
cce0287569 | ||
|
|
e044b152ab | ||
|
|
46cafbca67 | ||
|
|
fcd0ad43d5 | ||
|
|
c55fb0ab89 | ||
|
|
37a3fcc3af | ||
|
|
7ca4105454 | ||
|
|
286556a968 | ||
|
|
e8ad3afb1e | ||
|
|
0180811a94 | ||
|
|
6b9b66d6f9 | ||
|
|
ac30a39d8c | ||
|
|
66d72553a2 | ||
|
|
65abb79a75 | ||
|
|
0b8d151adc | ||
|
|
f5304e7bf5 | ||
|
|
1764e3d1c2 | ||
|
|
ef36b69c6d | ||
|
|
4cb21ac46b | ||
|
|
dee9f91810 | ||
|
|
3de706a4a3 | ||
|
|
0fb9447fd1 | ||
|
|
6b66390454 | ||
|
|
22b1968d7c | ||
|
|
7e3bc059bb | ||
|
|
f4c96f59d9 | ||
|
|
87c547189e | ||
|
|
f10997b601 | ||
|
|
8f71d47b93 | ||
|
|
ece6bb6fe7 | ||
|
|
3f8ca3519d | ||
|
|
04b73050b2 | ||
|
|
2847b7f7ab | ||
|
|
3c60275a04 | ||
|
|
be30325fa6 | ||
|
|
429b71ea4b | ||
|
|
3d4b4d2241 | ||
|
|
bbab10229f | ||
|
|
de1f97ca1d | ||
|
|
e74e30ba7f | ||
|
|
466d6ce584 | ||
|
|
853b3397ce | ||
|
|
eaadd985c8 | ||
|
|
ce28f5bb42 | ||
|
|
bc251b7932 | ||
|
|
e8799281c8 | ||
|
|
efd43b3097 | ||
|
|
7cb9b16051 | ||
|
|
3855117ba1 | ||
|
|
f5d4e2a7ca | ||
|
|
22deae890c | ||
|
|
df2a830ca4 | ||
|
|
b1e4dc3db5 | ||
|
|
72be4b6df6 | ||
|
|
1eab40885f | ||
|
|
075e08aca6 | ||
|
|
be60d9f5f9 | ||
|
|
a28627ae8f | ||
|
|
fca7ace96b | ||
|
|
1dcd2bebbb | ||
|
|
313daefcef | ||
|
|
55ff016766 | ||
|
|
4a00331d4e | ||
|
|
c0a821da11 | ||
|
|
d6216866b4 | ||
|
|
dc138d4f51 | ||
|
|
a28197c30c | ||
|
|
1d22e8316c | ||
|
|
932be8feda | ||
|
|
e303175b83 | ||
|
|
fa0b6fff20 | ||
|
|
3b57965c44 | ||
|
|
3201e46e26 | ||
|
|
d54c5e2206 | ||
|
|
36121ec458 | ||
|
|
7419bc61b3 | ||
|
|
c7f3c79ac0 | ||
|
|
0be24c0c9a | ||
|
|
fb70a8a3d6 | ||
|
|
7fd62151d9 | ||
|
|
31eae5e821 | ||
|
|
bc02a25f64 | ||
|
|
1a67b5df99 | ||
|
|
97fbf51190 | ||
|
|
eaed21baf2 | ||
|
|
655a969b7c | ||
|
|
3934d2b758 | ||
|
|
0abe43cb59 | ||
|
|
83a4df546f | ||
|
|
7b0fb5a4ac | ||
|
|
23262aef80 | ||
|
|
5239ab193d | ||
|
|
bcaa06bbb4 | ||
|
|
b63847bb99 | ||
|
|
54a7f3b869 | ||
|
|
ba7fabd835 | ||
|
|
cae4996048 | ||
|
|
566c8dfa81 | ||
|
|
396fd27c30 | ||
|
|
57eebf61a2 | ||
|
|
4428f8e35c | ||
|
|
655a335537 | ||
|
|
ff0a06e83c | ||
|
|
a41e0844be | ||
|
|
99ec5f3dd6 | ||
|
|
c07c4ee026 | ||
|
|
b86edfc27a | ||
|
|
e93b90025f | ||
|
|
510dfa3460 | ||
|
|
492d783245 | ||
|
|
83bdf3b7f9 | ||
|
|
cffc916774 | ||
|
|
4420887272 | ||
|
|
4e178c5841 | ||
|
|
05446e4bbf | ||
|
|
bb9fc01aa6 | ||
|
|
3dce55ac70 | ||
|
|
bacf5fe7c2 | ||
|
|
15f19ac220 | ||
|
|
f7ab654551 | ||
|
|
2f70a988e7 | ||
|
|
f681ad69a7 | ||
|
|
15447f393e | ||
|
|
ded79fc5fd | ||
|
|
77ae18dc82 | ||
|
|
df7d681f04 | ||
|
|
15bce5bb14 | ||
|
|
c64095f75e | ||
|
|
07dbe6f6f7 | ||
|
|
3d97729508 | ||
|
|
d5e9ae3f8b | ||
|
|
c41b278fa8 | ||
|
|
7657741c79 | ||
|
|
5eb3ed6614 | ||
|
|
213a8a5a44 | ||
|
|
c46165d67e | ||
|
|
60168efe1c | ||
|
|
0d5a3115da | ||
|
|
97a2bfd2a3 | ||
|
|
9aba20e4c9 | ||
|
|
81a9508deb | ||
|
|
1569f4c145 | ||
|
|
62fbeb66b3 | ||
|
|
f122d1dc9e | ||
|
|
083772aae4 | ||
|
|
5db14d0471 | ||
|
|
40e16edda1 | ||
|
|
3ca9a88941 | ||
|
|
ed51cb5abd | ||
|
|
8ccb6b16a6 | ||
|
|
1817a33c8b | ||
|
|
6893d12604 | ||
|
|
83605b3ce2 | ||
|
|
6a3cfab0e9 | ||
|
|
4b7eecf8a7 | ||
|
|
018ac1a585 | ||
|
|
6ad5d99ccc | ||
|
|
f843d94177 | ||
|
|
2264a4ecc1 | ||
|
|
d3b65fcaf0 | ||
|
|
eea52ddc4e | ||
|
|
6ef9b921b1 | ||
|
|
4ffa2364a0 | ||
|
|
7e00290d34 | ||
|
|
259434501f | ||
|
|
28deaeda66 | ||
|
|
03c5d71c11 | ||
|
|
2a8cbadc02 | ||
|
|
95d52b7807 | ||
|
|
c9f0d30a86 | ||
|
|
f76eaf51a6 | ||
|
|
e63b3f5166 | ||
|
|
c0cffae534 | ||
|
|
7eaba0dbc6 | ||
|
|
d1c7d49753 | ||
|
|
4c3e536282 | ||
|
|
56dd02f26d | ||
|
|
192406dd84 | ||
|
|
c7dbb2084e | ||
|
|
9a45cd8c50 | ||
|
|
d26c46acea | ||
|
|
51c83e1588 | ||
|
|
8774e3f945 | ||
|
|
45775bd823 | ||
|
|
dd78aab407 | ||
|
|
e40af59174 | ||
|
|
a35ae8c380 | ||
|
|
5bddbeb2bf | ||
|
|
c7102cdca1 | ||
|
|
a1ca4846bc | ||
|
|
bb59df6c17 | ||
|
|
4b508f5964 | ||
|
|
ca00afb5f1 | ||
|
|
2969c78ce0 | ||
|
|
fc7e4a0fa0 | ||
|
|
be0175c800 | ||
|
|
a8be43c24e | ||
|
|
94102d99b0 | ||
|
|
fd8685f16e | ||
|
|
56feaac968 | ||
|
|
362ef4ce20 | ||
|
|
2b85c00718 | ||
|
|
41aa437638 | ||
|
|
92864f48b0 | ||
|
|
46fbf563e6 | ||
|
|
e13fe0dd2d | ||
|
|
4a19b5125b | ||
|
|
06703ce3e5 | ||
|
|
676a422916 | ||
|
|
498c7f37e8 | ||
|
|
efd29bef22 | ||
|
|
dab8a02091 | ||
|
|
10771737a9 | ||
|
|
17379bcd20 | ||
|
|
dbb232a3d8 | ||
|
|
4b72bef651 | ||
|
|
b53826d56d | ||
|
|
55ee663d5f | ||
|
|
a27e401674 | ||
|
|
a69f5113b7 | ||
|
|
b6f76bd566 | ||
|
|
01f1a1f2c9 | ||
|
|
efffb483ec | ||
|
|
f21cf0bbd7 | ||
|
|
72a2b1295e | ||
|
|
a022653e2d | ||
|
|
3c42562190 | ||
|
|
e4ca874973 | ||
|
|
e7f67e2e61 | ||
|
|
9f45e7498b | ||
|
|
73c938dbc0 | ||
|
|
2be6da694a | ||
|
|
76f9ed9cd9 | ||
|
|
71ab101d38 | ||
|
|
da967b1ade | ||
|
|
3c4533916b | ||
|
|
1994ea768e | ||
|
|
534bc63d5e | ||
|
|
3fbee52426 | ||
|
|
9bd18b486f | ||
|
|
0afd488dc1 | ||
|
|
c1fc897eb2 | ||
|
|
f88459c0a3 | ||
|
|
b22f3341fe | ||
|
|
486ab5a292 | ||
|
|
5275714183 | ||
|
|
08e5c8d618 | ||
|
|
be853de3b7 | ||
|
|
502426aa6b | ||
|
|
4cdde5c397 | ||
|
|
6ceaf4460c | ||
|
|
f15aac3db1 | ||
|
|
e149e39832 | ||
|
|
f313d62247 | ||
|
|
1b549b9259 | ||
|
|
82630c85f3 | ||
|
|
e0ea141027 | ||
|
|
b361a91508 | ||
|
|
bd1d9ab4ed | ||
|
|
b98ae6ca52 | ||
|
|
9825184a0a | ||
|
|
ac67cffe5c | ||
|
|
9c674ba4f5 | ||
|
|
d109dd5d33 | ||
|
|
3e5446c3d2 | ||
|
|
6adda79888 | ||
|
|
6be6984cc1 | ||
|
|
c50c157cc3 | ||
|
|
c74c378e29 | ||
|
|
d271bde0ec | ||
|
|
df9f80e0f0 | ||
|
|
46371933a7 | ||
|
|
ee6a063cbd | ||
|
|
5f8171a638 | ||
|
|
bb59f7707d | ||
|
|
8b0dccd066 | ||
|
|
6349095d19 | ||
|
|
d7d03fda12 | ||
|
|
4e3a5342c5 | ||
|
|
55f023701c | ||
|
|
6a151cd774 | ||
|
|
7866bcdb1b | ||
|
|
611289e0b0 | ||
|
|
4c409a5b66 | ||
|
|
70df9def86 | ||
|
|
5f98c40063 | ||
|
|
f338ec87a3 | ||
|
|
c31f6c89e8 | ||
|
|
251c7fdf5d | ||
|
|
afa3ed33bb | ||
|
|
f8367fb063 | ||
|
|
dc49dcabdb | ||
|
|
7254660adc | ||
|
|
13f2f96cdd | ||
|
|
0efe12d12c | ||
|
|
ff5f0b9efd | ||
|
|
270886f805 | ||
|
|
d3762699d1 | ||
|
|
b46b37a8a3 | ||
|
|
aecf01557d | ||
|
|
053e2184a0 | ||
|
|
248ab9b811 | ||
|
|
d76f393713 | ||
|
|
88676f2b14 | ||
|
|
b2e6519679 | ||
|
|
ff91c9db25 | ||
|
|
d1b3f740d8 | ||
|
|
6bb031afdd | ||
|
|
6bca7dd940 | ||
|
|
56b25d5d52 | ||
|
|
256aa16582 | ||
|
|
911d845ab6 | ||
|
|
7b7ed63503 | ||
|
|
608ccd6cd9 | ||
|
|
35d04d3627 | ||
|
|
ec3b22164b | ||
|
|
8dc01f6342 | ||
|
|
b378daf0bc | ||
|
|
80f9930395 | ||
|
|
f544ec5e4a | ||
|
|
d37931ae65 | ||
|
|
4b35b04661 | ||
|
|
1a69221aeb | ||
|
|
452ffd6e8e | ||
|
|
a8ade63a2f | ||
|
|
2db5b5a35f | ||
|
|
85e30fe57a | ||
|
|
c7c9a57be6 | ||
|
|
c29cab9aac | ||
|
|
83923549f6 | ||
|
|
96632630a9 | ||
|
|
97aac9bb56 | ||
|
|
d59d0eb99a | ||
|
|
0ae74e1ae0 | ||
|
|
146dd5cfb0 | ||
|
|
32505c6f2d | ||
|
|
8c69433c34 | ||
|
|
c4f2a076e5 | ||
|
|
a8849fbe63 | ||
|
|
628c1e669a | ||
|
|
e12eb8d7c1 | ||
|
|
3b348d9a54 | ||
|
|
7567eab606 | ||
|
|
a9f7529f47 | ||
|
|
5e88a178fe | ||
|
|
c0a8eb9a67 | ||
|
|
286fd68a67 | ||
|
|
d3c7d03197 | ||
|
|
03c921eac5 | ||
|
|
ff79de67cc | ||
|
|
5d1a3cb0ee | ||
|
|
2923046360 | ||
|
|
b56ba49b26 | ||
|
|
60c9c77c33 | ||
|
|
9856c48b1a | ||
|
|
9572e09da4 | ||
|
|
1a529366ac | ||
|
|
cf7e90952b | ||
|
|
b7006aab6d | ||
|
|
cfedae723e | ||
|
|
3971ed2a74 | ||
|
|
d38c6e60df | ||
|
|
c0d59dba56 | ||
|
|
c1745a9831 | ||
|
|
67e48c1eaf | ||
|
|
dbbcbe019d | ||
|
|
fb3e7cdd88 | ||
|
|
ff50469ca0 | ||
|
|
d0aab9fc20 | ||
|
|
c9ebc3bb8b | ||
|
|
a7b17782a9 | ||
|
|
f85d8b5a74 | ||
|
|
dae1626680 | ||
|
|
d99c7e8e5b | ||
|
|
eb88b40ca4 | ||
|
|
6b1da0d33e | ||
|
|
906452d251 | ||
|
|
0656d7fb91 | ||
|
|
1bb15d06a6 | ||
|
|
65a3aa1fbc | ||
|
|
acadfedea5 | ||
|
|
1930ca4359 | ||
|
|
1d4f241470 | ||
|
|
9dfa165835 | ||
|
|
47d5364431 | ||
|
|
30b1c2ae15 | ||
|
|
c4158ff890 | ||
|
|
2be5f244ff | ||
|
|
8c1551cdd4 | ||
|
|
fc4873bed7 | ||
|
|
c3ad6e9deb | ||
|
|
61c77a48ff | ||
|
|
4267fa66a2 | ||
|
|
c4a8587f45 | ||
|
|
77bc2a595e | ||
|
|
1c15a48f3f | ||
|
|
3df6d20d31 | ||
|
|
affec202b3 | ||
|
|
a963b41ebd | ||
|
|
683c0f5360 | ||
|
|
6063925771 | ||
|
|
67eb53aecb | ||
|
|
226ab86c29 | ||
|
|
078f43891a | ||
|
|
ccc5046d0b | ||
|
|
8c70d43f73 | ||
|
|
0a35e8f686 | ||
|
|
fb1a08b0c7 | ||
|
|
fc5ba27156 | ||
|
|
9e8d0789d4 | ||
|
|
43d9be6701 | ||
|
|
7b5dd253ad | ||
|
|
24e1c2d337 | ||
|
|
57a08c0c7f | ||
|
|
52189d23af | ||
|
|
08bc0cf022 | ||
|
|
cf7c687919 | ||
|
|
ad42dbd32d | ||
|
|
a8f5935da0 | ||
|
|
9660df3fcc | ||
|
|
3e913ef09d | ||
|
|
e456c53578 | ||
|
|
0d043c929c | ||
|
|
695f3263e3 | ||
|
|
7b4c9fef7d |
@@ -61,11 +61,12 @@ runs:
|
||||
- name: Check config
|
||||
working-directory: ${{ github.action_path }}
|
||||
shell: bash
|
||||
run: ts-node ./index.ts "${{ runner.temp }}/user-config.yaml" '${{ inputs.expected-config-file-contents }}'
|
||||
|
||||
env:
|
||||
EXPECTED_CONFIG_FILE_CONTENTS: '${{ inputs.expected-config-file-contents }}'
|
||||
run: ts-node ./index.ts "$RUNNER_TEMP/user-config.yaml" "$EXPECTED_CONFIG_FILE_CONTENTS"
|
||||
- name: Clean up
|
||||
shell: bash
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ${{ runner.temp }}/codescanning-config-cli-test
|
||||
rm -rf ${{ runner.temp }}/user-config.yaml
|
||||
rm -rf $RUNNER_TEMP/codescanning-config-cli-test
|
||||
rm -rf $RUNNER_TEMP/user-config.yaml
|
||||
|
||||
@@ -8,7 +8,7 @@ const actualConfig = loadActualConfig()
|
||||
|
||||
const rawExpectedConfig = process.argv[3].trim()
|
||||
if (!rawExpectedConfig) {
|
||||
core.info('No expected configuration provided')
|
||||
core.setFailed('No expected configuration provided')
|
||||
} else {
|
||||
core.startGroup('Expected generated user config')
|
||||
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))
|
||||
|
||||
21
.github/actions/prepare-test/action.yml
vendored
21
.github/actions/prepare-test/action.yml
vendored
@@ -29,24 +29,27 @@ runs:
|
||||
- id: get-url
|
||||
name: Determine URL
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ inputs.version }}
|
||||
USE_ALL_PLATFORM_BUNDLE: ${{ inputs.use-all-platform-bundle }}
|
||||
run: |
|
||||
set -e # Fail this Action if `gh release list` fails.
|
||||
|
||||
if [[ ${{ inputs.version }} == "linked" ]]; then
|
||||
if [[ "$VERSION" == "linked" ]]; then
|
||||
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ ${{ inputs.version }} == "default" ]]; then
|
||||
elif [[ "$VERSION" == "default" ]]; then
|
||||
echo "tools-url=" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ ${{ inputs.version }} == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
|
||||
if [[ "$VERSION" == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
|
||||
extension="tar.zst"
|
||||
else
|
||||
extension="tar.gz"
|
||||
fi
|
||||
|
||||
if [[ ${{ inputs.use-all-platform-bundle }} == "true" ]]; then
|
||||
if [[ "$USE_ALL_PLATFORM_BUNDLE" == "true" ]]; then
|
||||
artifact_name="codeql-bundle.$extension"
|
||||
elif [[ "$RUNNER_OS" == "Linux" ]]; then
|
||||
artifact_name="codeql-bundle-linux64.$extension"
|
||||
@@ -59,14 +62,14 @@ runs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
||||
if [[ "$VERSION" == "nightly-latest" ]]; then
|
||||
tag=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$tag/$artifact_name" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
||||
version=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
elif [[ "$VERSION" == *"nightly"* ]]; then
|
||||
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||
version=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
elif [[ "$VERSION" == *"stable"* ]]; then
|
||||
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
|
||||
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "::error::Unrecognized version specified!"
|
||||
|
||||
7
.github/actions/release-branches/action.yml
vendored
7
.github/actions/release-branches/action.yml
vendored
@@ -18,8 +18,11 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- id: branches
|
||||
env:
|
||||
MAJOR_VERSION: ${{ inputs.major_version }}
|
||||
LATEST_TAG: ${{ inputs.latest_tag }}
|
||||
run: |
|
||||
python ${{ github.action_path }}/release-branches.py \
|
||||
--major-version ${{ inputs.major_version }} \
|
||||
--latest-tag ${{ inputs.latest_tag }}
|
||||
--major-version "$MAJOR_VERSION" \
|
||||
--latest-tag "$LATEST_TAG"
|
||||
shell: bash
|
||||
|
||||
5
.github/copilot-instructions.md
vendored
Normal file
5
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# CodeQL Action - Copilot Instructions
|
||||
|
||||
The CodeQL Action is written in TypeScript and compiled to JavaScript. Both the TypeScript sources and the **generated** JavaScript code are contained in this repository. The TypeScript sources are contained in the `src` directory and the JavaScript code is contained in the `lib` directory. A GitHub Actions workflow checks that the JavaScript code in `lib` is up-to-date. Therefore, you should not review any changes to the contents of the `lib` folder and it is expected that the JavaScript code in `lib` closely mirrors the TypeScript code it is generated from.
|
||||
|
||||
GitHub Actions workflows in the `.github/workflows` directory whose filenames start with two underscores (e.g. `__all-platform-bundle.yml`) are automatically generated using the `pr-checks/sync.sh` script from template files in the `pr-checks/checks` directory. Therefore, you do not need to review files in the `.github/workflows` directory that starts with two underscores. However, you should review changes to the `pr-checks` directory as well as workflows in the `.github/workflows` directory that do not start with underscores.
|
||||
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@@ -2,8 +2,6 @@ version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
reviewers:
|
||||
- "github/codeql-production-shield"
|
||||
schedule:
|
||||
interval: weekly
|
||||
labels:
|
||||
@@ -26,8 +24,6 @@ updates:
|
||||
- "*"
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
reviewers:
|
||||
- "github/codeql-production-shield"
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
@@ -36,8 +32,6 @@ updates:
|
||||
- "*"
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/setup-swift/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
|
||||
reviewers:
|
||||
- "github/codeql-production-shield"
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
|
||||
5
.github/workflows/__all-platform-bundle.yml
generated
vendored
5
.github/workflows/__all-platform-bundle.yml
generated
vendored
@@ -45,6 +45,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'true'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
|
||||
5
.github/workflows/__analyze-ref-input.yml
generated
vendored
5
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -49,6 +49,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
5
.github/workflows/__build-mode-manual.yml
generated
vendored
5
.github/workflows/__build-mode-manual.yml
generated
vendored
@@ -45,6 +45,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
2
.github/workflows/__cpp-deptrace-enabled-on-macos.yml
generated
vendored
2
.github/workflows/__cpp-deptrace-enabled-on-macos.yml
generated
vendored
@@ -27,6 +27,8 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'C/C++: autoinstalling dependencies is skipped (macOS)'
|
||||
|
||||
5
.github/workflows/__export-file-baseline-information.yml
generated
vendored
5
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -49,6 +49,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
4
.github/workflows/__go-custom-queries.yml
generated
vendored
4
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -47,9 +47,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
|
||||
5
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
5
.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml
generated
vendored
@@ -45,10 +45,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
# We need a Go version that ships with statically linked binaries on Linux
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
|
||||
5
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
5
.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml
generated
vendored
@@ -45,10 +45,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
# We need a Go version that ships with statically linked binaries on Linux
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- name: Remove `file` program
|
||||
run: |
|
||||
echo $(which file)
|
||||
|
||||
5
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
5
.github/workflows/__go-indirect-tracing-workaround.yml
generated
vendored
@@ -45,10 +45,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
# We need a Go version that ships with statically linked binaries on Linux
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
|
||||
23
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
23
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -27,14 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: macos-latest
|
||||
@@ -47,6 +39,14 @@ jobs:
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.20.7
|
||||
- os: macos-latest
|
||||
version: stable-v2.20.7
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.21.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.21.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
@@ -75,11 +75,10 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ~1.23.0
|
||||
# to avoid potentially misleading autobuilder results where we expect it to download
|
||||
# dependencies successfully, but they actually come from a warm cache
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
23
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
23
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -27,14 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: macos-latest
|
||||
@@ -47,6 +39,14 @@ jobs:
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.20.7
|
||||
- os: macos-latest
|
||||
version: stable-v2.20.7
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.21.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.21.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
@@ -75,11 +75,10 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ~1.23.0
|
||||
# to avoid potentially misleading autobuilder results where we expect it to download
|
||||
# dependencies successfully, but they actually come from a warm cache
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
23
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
23
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -27,14 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: macos-latest
|
||||
@@ -47,6 +39,14 @@ jobs:
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.20.7
|
||||
- os: macos-latest
|
||||
version: stable-v2.20.7
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.21.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.21.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
@@ -75,11 +75,10 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ~1.23.0
|
||||
# to avoid potentially misleading autobuilder results where we expect it to download
|
||||
# dependencies successfully, but they actually come from a warm cache
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
21
.github/workflows/__multi-language-autodetect.yml
generated
vendored
21
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -27,14 +27,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
version: stable-v2.15.5
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.15.5
|
||||
- os: macos-latest
|
||||
version: stable-v2.16.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.16.6
|
||||
- os: macos-latest
|
||||
version: stable-v2.17.6
|
||||
- os: ubuntu-latest
|
||||
@@ -47,6 +39,14 @@ jobs:
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.19.4
|
||||
- os: macos-latest
|
||||
version: stable-v2.20.7
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.20.7
|
||||
- os: macos-latest
|
||||
version: stable-v2.21.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.21.4
|
||||
- os: macos-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
@@ -75,10 +75,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: actions/setup-go@v5
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
5
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
5
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -61,6 +61,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
5
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
5
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -61,6 +61,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
5
.github/workflows/__packaging-config-js.yml
generated
vendored
5
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -61,6 +61,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging.yml
|
||||
|
||||
5
.github/workflows/__packaging-inputs-js.yml
generated
vendored
5
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -61,6 +61,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||
|
||||
117
.github/workflows/__quality-queries.yml
generated
vendored
Normal file
117
.github/workflows/__quality-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Quality queries input
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
quality-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: macos-latest
|
||||
version: linked
|
||||
- os: windows-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Quality queries input
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
quality-queries: code-quality
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload security SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: quality-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: quality-queries-${{ matrix.os }}-${{ matrix.version }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
|
||||
EXPECT_PRESENT: 'false'
|
||||
with:
|
||||
script: ${{ env.CHECK_SCRIPT }}
|
||||
- name: Check quality query appears in quality SARIF
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
SARIF_PATH: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
EXPECT_PRESENT: 'true'
|
||||
with:
|
||||
script: ${{ env.CHECK_SCRIPT }}
|
||||
env:
|
||||
CHECK_SCRIPT: |
|
||||
const fs = require('fs');
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(process.env['SARIF_PATH'], 'utf8'));
|
||||
const expectPresent = JSON.parse(process.env['EXPECT_PRESENT']);
|
||||
const run = sarif.runs[0];
|
||||
const extensions = run.tool.extensions;
|
||||
|
||||
if (extensions === undefined) {
|
||||
core.setFailed('`extensions` property not found in the SARIF run property bag.');
|
||||
}
|
||||
|
||||
// ID of a query we want to check the presence for
|
||||
const targetId = 'js/regex/always-matches';
|
||||
const found = extensions.find(extension => extension.rules && extension.rules.find(rule => rule.id === targetId));
|
||||
|
||||
if (found && expectPresent) {
|
||||
console.log(`Found rule with id '${targetId}'.`);
|
||||
} else if (!found && !expectPresent) {
|
||||
console.log(`Rule with id '${targetId}' was not found.`);
|
||||
} else {
|
||||
core.setFailed(`${ found ? "Found" : "Didn't find" } rule ${targetId}`);
|
||||
}
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
5
.github/workflows/__remote-config.yml
generated
vendored
5
.github/workflows/__remote-config.yml
generated
vendored
@@ -47,6 +47,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@2654679fe7f7c29875c669398a8ec0791b8a64a1 # v1.215.0
|
||||
uses: ruby/setup-ruby@2a7b30092b0caf9c046252510f9273b4875f3db9 # v1.254.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
71
.github/workflows/__rust.yml
generated
vendored
Normal file
71
.github/workflows/__rust.yml
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Rust analysis
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
rust:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Rust analysis
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: rust
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
CODEQL_ACTION_RUST_ANALYSIS: true
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
RUST_DB="${{ fromJson(steps.analysis.outputs.db-locations).rust }}"
|
||||
if [[ ! -d "$RUST_DB" ]]; then
|
||||
echo "Did not create a database for Rust."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
5
.github/workflows/__split-workflow.yml
generated
vendored
5
.github/workflows/__split-workflow.yml
generated
vendored
@@ -55,6 +55,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
5
.github/workflows/__swift-custom-build.yml
generated
vendored
5
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -49,6 +49,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
5
.github/workflows/__test-local-codeql.yml
generated
vendored
5
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -45,6 +45,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- name: Fetch a CodeQL bundle
|
||||
shell: bash
|
||||
env:
|
||||
|
||||
8
.github/workflows/__unset-environment.yml
generated
vendored
8
.github/workflows/__unset-environment.yml
generated
vendored
@@ -47,6 +47,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
@@ -54,9 +59,6 @@ jobs:
|
||||
# Swift is not supported on Ubuntu so we manually exclude it from the list here
|
||||
languages: cpp,csharp,go,java,javascript,python,ruby
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
|
||||
78
.github/workflows/__upload-quality-sarif.yml
generated
vendored
Normal file
78
.github/workflows/__upload-quality-sarif.yml
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Upload-sarif: code quality endpoint'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-quality-sarif:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: macos-latest
|
||||
version: default
|
||||
- os: windows-latest
|
||||
version: default
|
||||
name: 'Upload-sarif: code quality endpoint'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
quality-queries: code-quality
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
5
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
5
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -49,6 +49,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
5
.github/workflows/__with-checkout-path.yml
generated
vendored
5
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -49,6 +49,11 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.21.0'
|
||||
cache: false
|
||||
- name: Delete original checkout
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04,ubuntu-22.04,windows-2019,windows-2022,macos-13,macos-14]
|
||||
os: [ubuntu-22.04,ubuntu-24.04,windows-2022,windows-2025,macos-13,macos-14,macos-15]
|
||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
name: Code-Scanning config CLI tests
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Diff informed queries add an additional query filter which is not yet
|
||||
# taken into account by these tests.
|
||||
CODEQL_ACTION_DIFF_INFORMED_QUERIES: false
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -168,7 +168,7 @@ jobs:
|
||||
--draft
|
||||
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
||||
uses: actions/create-github-app-token@v2.0.6
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
|
||||
91
.github/workflows/rebuild.yml
vendored
91
.github/workflows/rebuild.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
rebuild:
|
||||
name: Rebuild Action
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'Rebuild'
|
||||
if: github.event.label.name == 'Rebuild' || github.event_name == 'workflow_dispatch'
|
||||
|
||||
permissions:
|
||||
contents: write # needed to push rebuilt commit
|
||||
@@ -18,9 +18,11 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.pull_request.head.ref || github.event.ref }}
|
||||
|
||||
- name: Remove label
|
||||
if: github.event_name == 'pull_request'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
@@ -28,21 +30,35 @@ jobs:
|
||||
gh pr edit --repo github/codeql-action "$PR_NUMBER" \
|
||||
--remove-label "Rebuild"
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Merge in changes from base branch
|
||||
id: merge
|
||||
env:
|
||||
BASE_BRANCH: ${{ github.event.pull_request.base.ref }}
|
||||
BASE_BRANCH: ${{ github.event.pull_request.base.ref || 'main' }}
|
||||
run: |
|
||||
git fetch origin "$BASE_BRANCH"
|
||||
|
||||
# Allow merge conflicts in `lib`, since rebuilding should resolve them.
|
||||
git merge "origin/$BASE_BRANCH" || echo "Merge conflicts detected"
|
||||
git merge "origin/$BASE_BRANCH" || echo "Merge conflicts detected, continuing."
|
||||
MERGE_RESULT=$?
|
||||
|
||||
# Check for merge conflicts outside of `lib`. Disable git diff's trailing whitespace check
|
||||
# since `node_modules/@types/semver/README.md` fails it.
|
||||
if git -c core.whitespace=-trailing-space diff --check | grep --invert-match '^lib/'; then
|
||||
echo "Merge conflicts detected outside of lib/ directory. Please resolve them manually."
|
||||
git -c core.whitespace=-trailing-space diff --check | grep --invert-match '^lib/' || true
|
||||
exit 1
|
||||
if [ "$MERGE_RESULT" -ne 0 ]; then
|
||||
echo "merge-in-progress=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check for merge conflicts outside of `lib`. Disable git diff's trailing whitespace check
|
||||
# since `node_modules/@types/semver/README.md` fails it.
|
||||
if git -c core.whitespace=-trailing-space diff --check | grep --invert-match '^lib/'; then
|
||||
echo "Merge conflicts were detected outside of the lib directory. Please resolve them manually."
|
||||
git -c core.whitespace=-trailing-space diff --check | grep --invert-match '^lib/' || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "No merge conflicts found outside the lib directory. We should be able to resolve all of" \
|
||||
"these by rebuilding the Action."
|
||||
fi
|
||||
|
||||
- name: Compile TypeScript
|
||||
@@ -63,20 +79,49 @@ jobs:
|
||||
pip install ruamel.yaml==0.17.31
|
||||
python3 sync.py
|
||||
|
||||
- name: Check for changes and push
|
||||
- name: "Merge in progress: Finish merge and push"
|
||||
if: steps.merge.outputs.merge-in-progress == 'true'
|
||||
run: |
|
||||
echo "Finishing merge and pushing changes."
|
||||
git add --all
|
||||
git commit --no-edit
|
||||
git push
|
||||
|
||||
- name: "No merge in progress: Check for changes and push"
|
||||
if: steps.merge.outputs.merge-in-progress != 'true'
|
||||
id: push
|
||||
run: |
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
echo "Changes detected, committing and pushing."
|
||||
git add --all
|
||||
# If the merge originally had conflicts, finish the merge.
|
||||
# Otherwise, just commit the changes.
|
||||
if git rev-parse --verify MERGE_HEAD >/dev/null 2>&1; then
|
||||
echo "In progress merge detected, finishing it up."
|
||||
git merge --continue
|
||||
else
|
||||
echo "No in-progress merge detected, committing changes."
|
||||
git commit -m "Rebuild"
|
||||
fi
|
||||
echo "Pushing changes"
|
||||
git push
|
||||
echo "changes=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "No changes detected, nothing to commit."
|
||||
fi
|
||||
|
||||
- name: Notify about rebuild
|
||||
if: >-
|
||||
github.event_name == 'pull_request' &&
|
||||
(
|
||||
steps.merge.outputs.merge-in-progress == 'true' ||
|
||||
steps.push.outputs.changes == 'true'
|
||||
)
|
||||
env:
|
||||
BRANCH: ${{ github.event.pull_request.head.ref }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git add --all
|
||||
git commit -m "Rebuild"
|
||||
git push origin "HEAD:$BRANCH"
|
||||
echo "Pushed a commit to rebuild the Action." \
|
||||
"Please mark the PR as ready for review to trigger PR checks." |
|
||||
gh pr comment --body-file - --repo github/codeql-action "$PR_NUMBER"
|
||||
gh pr ready --undo --repo github/codeql-action "$PR_NUMBER"
|
||||
fi
|
||||
echo "Pushed a commit to rebuild the Action." \
|
||||
"Please mark the PR as ready for review to trigger PR checks." |
|
||||
gh pr comment --body-file - --repo github/codeql-action "$PR_NUMBER"
|
||||
gh pr ready --undo --repo github/codeql-action "$PR_NUMBER"
|
||||
|
||||
101
.github/workflows/update-proxy-release.yml
vendored
Normal file
101
.github/workflows/update-proxy-release.yml
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
name: Update dependency proxy release assets
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The tag of CodeQL Bundle release that contains the proxy binaries as release assets"
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: Update code and create PR
|
||||
timeout-minutes: 15
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # needed to push the updated files
|
||||
pull-requests: write # needed to create the PR
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.tag }}
|
||||
steps:
|
||||
- name: Check release tag format
|
||||
id: checks
|
||||
shell: bash
|
||||
run: |
|
||||
if ! [[ $RELEASE_TAG =~ ^codeql-bundle-v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Invalid release tag: expected a CodeQL bundle tag in the 'codeql-bundle-vM.N.P' format."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "target_branch=dependency-proxy/$RELEASE_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check that the release exists
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
run: |
|
||||
(gh release view --repo "$GITHUB_REPOSITORY" --json "assets" "$RELEASE_TAG" && echo "Release found.") || exit 1
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||
ref: main
|
||||
|
||||
- name: Update git config
|
||||
shell: bash
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update release tag and version
|
||||
shell: bash
|
||||
run: |
|
||||
NOW=$(date +"%Y%m%d%H%M%S") # only used to make sure we don't fetch stale binaries from the toolcache
|
||||
sed -i "s|https://github.com/github/codeql-action/releases/download/codeql-bundle-v[0-9.]\+/|https://github.com/github/codeql-action/releases/download/$RELEASE_TAG/|g" ./src/start-proxy-action.ts
|
||||
sed -i "s/\"v2.0.[0-9]\+\"/\"v2.0.$NOW\"/g" ./src/start-proxy-action.ts
|
||||
|
||||
- name: Compile TypeScript and commit changes
|
||||
shell: bash
|
||||
env:
|
||||
TARGET_BRANCH: ${{ steps.checks.outputs.target_branch }}
|
||||
run: |
|
||||
set -exu
|
||||
git checkout -b "$TARGET_BRANCH"
|
||||
|
||||
npm run build
|
||||
git add ./src/start-proxy-action.ts
|
||||
git add ./lib
|
||||
git commit -m "Update release used by \`start-proxy\` action"
|
||||
|
||||
- name: Push changes and open PR
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
TARGET_BRANCH: ${{ steps.checks.outputs.target_branch }}
|
||||
PR_FLAG: ${{ (github.event_name == 'workflow_dispatch' && '--draft') || '--dry-run' }}
|
||||
run: |
|
||||
set -exu
|
||||
pr_title="Update release used by \`start-proxy\` to \`$RELEASE_TAG\`"
|
||||
pr_body=$(cat << EOF
|
||||
This PR updates the \`start-proxy\` action to use the private registry proxy binaries that
|
||||
are attached as release assets to the \`$RELEASE_TAG\` release.
|
||||
|
||||
|
||||
Please do the following before merging:
|
||||
|
||||
- [ ] Verify that the changes to the code are correct.
|
||||
- [ ] Mark the PR as ready for review to trigger the CI.
|
||||
EOF
|
||||
)
|
||||
|
||||
git push origin "$TARGET_BRANCH"
|
||||
gh pr create \
|
||||
--head "$TARGET_BRANCH" \
|
||||
--base "main" \
|
||||
--title "${pr_title}" \
|
||||
--body "${pr_body}" \
|
||||
$PR_FLAG
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -124,7 +124,7 @@ jobs:
|
||||
pull-requests: write # needed to create pull request
|
||||
steps:
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
|
||||
uses: actions/create-github-app-token@v2.0.6
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: lint-ts
|
||||
name: Lint typescript code
|
||||
files: \.ts$
|
||||
language: system
|
||||
entry: npm run lint -- --fix
|
||||
- id: compile-ts
|
||||
name: Compile typescript
|
||||
files: \.[tj]s$
|
||||
language: system
|
||||
entry: npm run build
|
||||
pass_filenames: false
|
||||
- id: lint-ts
|
||||
name: Lint typescript code
|
||||
files: \.ts$
|
||||
language: system
|
||||
entry: npm run lint -- --fix
|
||||
- id: pr-checks-sync
|
||||
name: Synchronize PR check workflows
|
||||
files: ^.github/workflows/__.*\.yml$|^pr-checks
|
||||
language: system
|
||||
entry: python3 pr-checks/sync.py
|
||||
entry: pr-checks/sync.sh
|
||||
pass_filenames: false
|
||||
|
||||
97
CHANGELOG.md
97
CHANGELOG.md
@@ -2,10 +2,105 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## [UNRELEASED]
|
||||
## 3.29.8 - 08 Aug 2025
|
||||
|
||||
- Fix an issue where the Action would autodetect unsupported languages such as HTML. [#3015](https://github.com/github/codeql-action/pull/3015)
|
||||
|
||||
## 3.29.7 - 07 Aug 2025
|
||||
|
||||
This release rolls back 3.29.6 to address issues with language autodetection. It is identical to 3.29.5.
|
||||
|
||||
## 3.29.6 - 07 Aug 2025
|
||||
|
||||
- The `cleanup-level` input to the `analyze` Action is now deprecated. The CodeQL Action has written a limited amount of intermediate results to the database since version 2.2.5, and now automatically manages cleanup. [#2999](https://github.com/github/codeql-action/pull/2999)
|
||||
- Update default CodeQL bundle version to 2.22.3. [#3000](https://github.com/github/codeql-action/pull/3000)
|
||||
|
||||
## 3.29.5 - 29 Jul 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.22.2. [#2986](https://github.com/github/codeql-action/pull/2986)
|
||||
|
||||
## 3.29.4 - 23 Jul 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.29.3 - 21 Jul 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.29.2 - 30 Jun 2025
|
||||
|
||||
- Experimental: When the `quality-queries` input for the `init` action is provided with an argument, separate `.quality.sarif` files are produced and uploaded for each language with the results of the specified queries. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#2935](https://github.com/github/codeql-action/pull/2935)
|
||||
|
||||
## 3.29.1 - 27 Jun 2025
|
||||
|
||||
- Fix bug in PR analysis where user-provided `include` query filter fails to exclude non-included queries. [#2938](https://github.com/github/codeql-action/pull/2938)
|
||||
- Update default CodeQL bundle version to 2.22.1. [#2950](https://github.com/github/codeql-action/pull/2950)
|
||||
|
||||
## 3.29.0 - 11 Jun 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.22.0. [#2925](https://github.com/github/codeql-action/pull/2925)
|
||||
- Bump minimum CodeQL bundle version to 2.16.6. [#2912](https://github.com/github/codeql-action/pull/2912)
|
||||
|
||||
## 3.28.21 - 28 July 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.28.20 - 21 July 2025
|
||||
|
||||
- Remove support for combining SARIF files from a single upload for GHES 3.18, see [the changelog post](https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload/). [#2959](https://github.com/github/codeql-action/pull/2959)
|
||||
|
||||
## 3.28.19 - 03 Jun 2025
|
||||
|
||||
- The CodeQL Action no longer includes its own copy of the extractor for the `actions` language, which is currently in public preview.
|
||||
The `actions` extractor has been included in the CodeQL CLI since v2.20.6. If your workflow has enabled the `actions` language _and_ you have pinned
|
||||
your `tools:` property to a specific version of the CodeQL CLI earlier than v2.20.6, you will need to update to at least CodeQL v2.20.6 or disable
|
||||
`actions` analysis.
|
||||
- Update default CodeQL bundle version to 2.21.4. [#2910](https://github.com/github/codeql-action/pull/2910)
|
||||
|
||||
## 3.28.18 - 16 May 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.21.3. [#2893](https://github.com/github/codeql-action/pull/2893)
|
||||
- Skip validating SARIF produced by CodeQL for improved performance. [#2894](https://github.com/github/codeql-action/pull/2894)
|
||||
- The number of threads and amount of RAM used by CodeQL can now be set via the `CODEQL_THREADS` and `CODEQL_RAM` runner environment variables. If set, these environment variables override the `threads` and `ram` inputs respectively. [#2891](https://github.com/github/codeql-action/pull/2891)
|
||||
|
||||
## 3.28.17 - 02 May 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.21.2. [#2872](https://github.com/github/codeql-action/pull/2872)
|
||||
|
||||
## 3.28.16 - 23 Apr 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.21.1. [#2863](https://github.com/github/codeql-action/pull/2863)
|
||||
|
||||
## 3.28.15 - 07 Apr 2025
|
||||
|
||||
- Fix bug where the action would fail if it tried to produce a debug artifact with more than 65535 files. [#2842](https://github.com/github/codeql-action/pull/2842)
|
||||
|
||||
## 3.28.14 - 07 Apr 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.21.0. [#2838](https://github.com/github/codeql-action/pull/2838)
|
||||
|
||||
## 3.28.13 - 24 Mar 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 3.28.12 - 19 Mar 2025
|
||||
|
||||
- Dependency caching should now cache more dependencies for Java `build-mode: none` extractions. This should speed up workflows and avoid inconsistent alerts in some cases.
|
||||
- Update default CodeQL bundle version to 2.20.7. [#2810](https://github.com/github/codeql-action/pull/2810)
|
||||
|
||||
## 3.28.11 - 07 Mar 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.20.6. [#2793](https://github.com/github/codeql-action/pull/2793)
|
||||
|
||||
## 3.28.10 - 21 Feb 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.20.5. [#2772](https://github.com/github/codeql-action/pull/2772)
|
||||
- Address an issue where the CodeQL Bundle would occasionally fail to decompress on macOS. [#2768](https://github.com/github/codeql-action/pull/2768)
|
||||
|
||||
## 3.28.9 - 07 Feb 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.20.4. [#2753](https://github.com/github/codeql-action/pull/2753)
|
||||
|
||||
## 3.28.8 - 29 Jan 2025
|
||||
|
||||
- Enable support for Kotlin 2.1.10 when running with CodeQL CLI v2.20.3. [#2744](https://github.com/github/codeql-action/pull/2744)
|
||||
|
||||
11
README.md
11
README.md
@@ -55,7 +55,7 @@ For compiled languages:
|
||||
|
||||
- `manual` build mode will typically produce the most precise results, but it is more difficult to set up and will cause the analysis to take slightly more time to run.
|
||||
- `autobuild` build mode is simpler to set up, but will only work for projects with generic build steps that can be guessed by the heuristics of the autobuild scripts. If `autobuild` fails, then you must switch to `manual` or `none`. If `autobuild` succeeds, then the results and run time will be the same as `manual` mode.
|
||||
- `none` build mode is also simpler to set up and is slightly faster to run, but there is a possibility that some alerts will be missed. This may happen if your repository does any code generation during compilation or if there are any dependencies downloaded from registries that the workflow does not have access to. `none` is not yet supported by C/C++, Swift, Go, or Kotlin.
|
||||
- `none` build mode is also simpler to set up and is slightly faster to run, but there is a possibility that some alerts will be missed. This may happen if your repository does any code generation during compilation or if there are any dependencies downloaded from registries that the workflow does not have access to. `none` is not yet supported by Swift, Go, or Kotlin. It is in public preview for C/C++.
|
||||
|
||||
|
||||
## Supported versions of the CodeQL Action
|
||||
@@ -70,10 +70,11 @@ We typically release new minor versions of the CodeQL Action and Bundle when a n
|
||||
|
||||
| Minimum CodeQL Action | Minimum CodeQL Bundle Version | GitHub Environment | Notes |
|
||||
|-----------------------|-------------------------------|--------------------|-------|
|
||||
| `v3.26.6` | `2.18.4` | Enterprise Server 3.15 | |
|
||||
| `v3.25.11` | `2.17.6` | Enterprise Server 3.14 | |
|
||||
| `v3.24.11` | `2.16.6` | Enterprise Server 3.13 | |
|
||||
| `v3.22.12` | `2.15.5` | Enterprise Server 3.12 | |
|
||||
| `v3.28.21` | `2.21.3` | Enterprise Server 3.18 | |
|
||||
| `v3.28.12` | `2.20.7` | Enterprise Server 3.17 | |
|
||||
| `v3.28.6` | `2.20.3` | Enterprise Server 3.16 | |
|
||||
| `v3.28.6` | `2.20.3` | Enterprise Server 3.15 | |
|
||||
| `v3.28.6` | `2.20.3` | Enterprise Server 3.14 | |
|
||||
|
||||
See the full list of GHES release and deprecation dates at [GitHub Enterprise Server releases](https://docs.github.com/en/enterprise-server/admin/all-releases#releases-of-github-enterprise-server).
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
name: "actions"
|
||||
aliases: []
|
||||
display_name: "GitHub Actions"
|
||||
version: 0.0.1
|
||||
column_kind: "utf16"
|
||||
unicode_newlines: true
|
||||
build_modes:
|
||||
- none
|
||||
file_coverage_languages: []
|
||||
github_api_languages: []
|
||||
scc_languages: []
|
||||
file_types:
|
||||
- name: workflow
|
||||
display_name: GitHub Actions workflow files
|
||||
extensions:
|
||||
- .yml
|
||||
- .yaml
|
||||
forwarded_extractor_name: javascript
|
||||
options:
|
||||
trap:
|
||||
title: TRAP options
|
||||
description: Options about how the extractor handles TRAP files
|
||||
type: object
|
||||
visibility: 3
|
||||
properties:
|
||||
cache:
|
||||
title: TRAP cache options
|
||||
description: Options about how the extractor handles its TRAP cache
|
||||
type: object
|
||||
properties:
|
||||
dir:
|
||||
title: TRAP cache directory
|
||||
description: The directory of the TRAP cache to use
|
||||
type: string
|
||||
bound:
|
||||
title: TRAP cache bound
|
||||
description: A soft limit (in MB) on the size of the TRAP cache
|
||||
type: string
|
||||
pattern: "[0-9]+"
|
||||
write:
|
||||
title: TRAP cache writeable
|
||||
description: Whether to write to the TRAP cache as well as reading it
|
||||
type: string
|
||||
pattern: "(true|TRUE|false|FALSE)"
|
||||
@@ -1,40 +0,0 @@
|
||||
if (($null -ne $env:LGTM_INDEX_INCLUDE) -or ($null -ne $env:LGTM_INDEX_EXCLUDE) -or ($null -ne $env:LGTM_INDEX_FILTERS)) {
|
||||
Write-Output 'Path filters set. Passing them through to the JavaScript extractor.'
|
||||
} else {
|
||||
Write-Output 'No path filters set. Using the default filters.'
|
||||
$DefaultPathFilters = @(
|
||||
'exclude:**/*',
|
||||
'include:.github/workflows/**/*.yml',
|
||||
'include:.github/workflows/**/*.yaml',
|
||||
'include:**/action.yml',
|
||||
'include:**/action.yaml'
|
||||
)
|
||||
|
||||
$env:LGTM_INDEX_FILTERS = $DefaultPathFilters -join "`n"
|
||||
}
|
||||
|
||||
# Find the JavaScript extractor directory via `codeql resolve extractor`.
|
||||
$CodeQL = Join-Path $env:CODEQL_DIST 'codeql.exe'
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_ROOT = &$CodeQL resolve extractor --language javascript
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw 'Failed to resolve JavaScript extractor.'
|
||||
}
|
||||
|
||||
Write-Output "Found JavaScript extractor at '${env:CODEQL_EXTRACTOR_JAVASCRIPT_ROOT}'."
|
||||
|
||||
# Run the JavaScript autobuilder.
|
||||
$JavaScriptAutoBuild = Join-Path $env:CODEQL_EXTRACTOR_JAVASCRIPT_ROOT 'tools\autobuild.cmd'
|
||||
Write-Output "Running JavaScript autobuilder at '${JavaScriptAutoBuild}'."
|
||||
|
||||
# Copy the values of the Actions extractor environment variables to the JavaScript extractor environment variables.
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_DIAGNOSTIC_DIR = $env:CODEQL_EXTRACTOR_ACTIONS_DIAGNOSTIC_DIR
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_LOG_DIR = $env:CODEQL_EXTRACTOR_ACTIONS_LOG_DIR
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_SCRATCH_DIR = $env:CODEQL_EXTRACTOR_ACTIONS_SCRATCH_DIR
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_SOURCE_ARCHIVE_DIR = $env:CODEQL_EXTRACTOR_ACTIONS_SOURCE_ARCHIVE_DIR
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_TRAP_DIR = $env:CODEQL_EXTRACTOR_ACTIONS_TRAP_DIR
|
||||
$env:CODEQL_EXTRACTOR_JAVASCRIPT_WIP_DATABASE = $env:CODEQL_EXTRACTOR_ACTIONS_WIP_DATABASE
|
||||
|
||||
&$JavaScriptAutoBuild
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "JavaScript autobuilder failed."
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
@echo off
|
||||
rem All of the work is done in the PowerShell script
|
||||
powershell.exe %~dp0autobuild-impl.ps1
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
DEFAULT_PATH_FILTERS=$(cat << END
|
||||
exclude:**/*
|
||||
include:.github/workflows/**/*.yml
|
||||
include:.github/workflows/**/*.yaml
|
||||
include:**/action.yml
|
||||
include:**/action.yaml
|
||||
END
|
||||
)
|
||||
|
||||
if [ -n "${LGTM_INDEX_INCLUDE:-}" ] || [ -n "${LGTM_INDEX_EXCLUDE:-}" ] || [ -n "${LGTM_INDEX_FILTERS:-}" ] ; then
|
||||
echo "Path filters set. Passing them through to the JavaScript extractor."
|
||||
else
|
||||
echo "No path filters set. Using the default filters."
|
||||
LGTM_INDEX_FILTERS="${DEFAULT_PATH_FILTERS}"
|
||||
export LGTM_INDEX_FILTERS
|
||||
fi
|
||||
|
||||
# Find the JavaScript extractor directory via `codeql resolve extractor`.
|
||||
CODEQL_EXTRACTOR_JAVASCRIPT_ROOT="$($CODEQL_DIST/codeql resolve extractor --language javascript)"
|
||||
export CODEQL_EXTRACTOR_JAVASCRIPT_ROOT
|
||||
|
||||
echo "Found JavaScript extractor at '${CODEQL_EXTRACTOR_JAVASCRIPT_ROOT}'."
|
||||
|
||||
# Run the JavaScript autobuilder
|
||||
JAVASCRIPT_AUTO_BUILD="${CODEQL_EXTRACTOR_JAVASCRIPT_ROOT}/tools/autobuild.sh"
|
||||
echo "Running JavaScript autobuilder at '${JAVASCRIPT_AUTO_BUILD}'."
|
||||
|
||||
# Copy the values of the Actions extractor environment variables to the JavaScript extractor environment variables.
|
||||
env CODEQL_EXTRACTOR_JAVASCRIPT_DIAGNOSTIC_DIR="${CODEQL_EXTRACTOR_ACTIONS_DIAGNOSTIC_DIR}" \
|
||||
CODEQL_EXTRACTOR_JAVASCRIPT_LOG_DIR="${CODEQL_EXTRACTOR_ACTIONS_LOG_DIR}" \
|
||||
CODEQL_EXTRACTOR_JAVASCRIPT_SCRATCH_DIR="${CODEQL_EXTRACTOR_ACTIONS_SCRATCH_DIR}" \
|
||||
CODEQL_EXTRACTOR_JAVASCRIPT_SOURCE_ARCHIVE_DIR="${CODEQL_EXTRACTOR_ACTIONS_SOURCE_ARCHIVE_DIR}" \
|
||||
CODEQL_EXTRACTOR_JAVASCRIPT_TRAP_DIR="${CODEQL_EXTRACTOR_ACTIONS_TRAP_DIR}" \
|
||||
CODEQL_EXTRACTOR_JAVASCRIPT_WIP_DATABASE="${CODEQL_EXTRACTOR_ACTIONS_WIP_DATABASE}" \
|
||||
${JAVASCRIPT_AUTO_BUILD}
|
||||
@@ -19,9 +19,10 @@ inputs:
|
||||
# If changing this, make sure to update workflow.ts accordingly.
|
||||
default: "always"
|
||||
cleanup-level:
|
||||
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --cache-cleanup flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||
description: >-
|
||||
DEPRECATED. This option is ignored since, for performance reasons, the CodeQL Action no longer saves
|
||||
intermediate results during evaluation.
|
||||
required: false
|
||||
default: "brutal"
|
||||
ram:
|
||||
description: >-
|
||||
The amount of memory in MB that can be used by CodeQL for database finalization and query execution.
|
||||
|
||||
@@ -138,6 +138,7 @@ export default [
|
||||
rules: {
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-enum-comparison": "off",
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||
|
||||
@@ -83,6 +83,9 @@ inputs:
|
||||
queries:
|
||||
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
||||
required: false
|
||||
quality-queries:
|
||||
description: '[Internal] Comma-separated list of code quality queries to run.'
|
||||
required: false
|
||||
packs:
|
||||
description: >-
|
||||
Comma-separated list of packs to run. Reference a pack in the format `scope/name[@version]`. If `version` is not
|
||||
|
||||
30
justfile
Normal file
30
justfile
Normal file
@@ -0,0 +1,30 @@
|
||||
# Perform all working copy cleanup operations
|
||||
all: lint sync
|
||||
|
||||
# Lint source typescript
|
||||
lint:
|
||||
npm run lint-fix
|
||||
|
||||
# Sync generated files (javascript and PR checks)
|
||||
sync: build update-pr-checks
|
||||
|
||||
# Perform all necessary steps to update the PR checks
|
||||
update-pr-checks:
|
||||
pr-checks/sync.sh
|
||||
|
||||
# Transpile typescript code into javascript
|
||||
build:
|
||||
npm run build
|
||||
|
||||
# Build then run all the tests
|
||||
test: build
|
||||
npm run test
|
||||
|
||||
# Run the tests for a single file
|
||||
test_file filename: build
|
||||
npx ava --verbose {{filename}}
|
||||
|
||||
[doc("Refresh the .js build artefacts in the lib directory")]
|
||||
[confirm]
|
||||
refresh-lib:
|
||||
rm -rf lib && npm run build
|
||||
77
lib/actions-util.js
generated
77
lib/actions-util.js
generated
@@ -49,10 +49,14 @@ exports.isDefaultSetup = isDefaultSetup;
|
||||
exports.prettyPrintInvocation = prettyPrintInvocation;
|
||||
exports.ensureEndsInPeriod = ensureEndsInPeriod;
|
||||
exports.runTool = runTool;
|
||||
exports.getPullRequestBranches = getPullRequestBranches;
|
||||
exports.isAnalyzingPullRequest = isAnalyzingPullRequest;
|
||||
exports.fixCodeQualityCategory = fixCodeQualityCategory;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs, @typescript-eslint/no-require-imports
|
||||
@@ -261,7 +265,7 @@ function prettyPrintInvocation(cmd, args) {
|
||||
* An error from a tool invocation, with associated exit code, stderr, etc.
|
||||
*/
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, stderr, stdout) {
|
||||
constructor(cmd, args, exitCode, stderr, stdout = "") {
|
||||
const prettyCommand = prettyPrintInvocation(cmd, args);
|
||||
const lastLine = ensureEndsInPeriod(stderr.trim().split("\n").pop()?.trim() || "n/a");
|
||||
super(`Failed to run "${prettyCommand}". ` +
|
||||
@@ -352,4 +356,75 @@ const restoreInputs = function () {
|
||||
}
|
||||
};
|
||||
exports.restoreInputs = restoreInputs;
|
||||
/**
|
||||
* Returns the base and head branches of the pull request being analyzed.
|
||||
*
|
||||
* @returns the base and head branches of the pull request, or undefined if
|
||||
* we are not analyzing a pull request.
|
||||
*/
|
||||
function getPullRequestBranches() {
|
||||
const pullRequest = github.context.payload.pull_request;
|
||||
if (pullRequest) {
|
||||
return {
|
||||
base: pullRequest.base.ref,
|
||||
// We use the head label instead of the head ref here, because the head
|
||||
// ref lacks owner information and by itself does not uniquely identify
|
||||
// the head branch (which may be in a forked repository).
|
||||
head: pullRequest.head.label,
|
||||
};
|
||||
}
|
||||
// PR analysis under Default Setup does not have the pull_request context,
|
||||
// but it should set CODE_SCANNING_REF and CODE_SCANNING_BASE_BRANCH.
|
||||
const codeScanningRef = process.env.CODE_SCANNING_REF;
|
||||
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
|
||||
if (codeScanningRef && codeScanningBaseBranch) {
|
||||
return {
|
||||
base: codeScanningBaseBranch,
|
||||
// PR analysis under Default Setup analyzes the PR head commit instead of
|
||||
// the merge commit, so we can use the provided ref directly.
|
||||
head: codeScanningRef,
|
||||
};
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Returns whether we are analyzing a pull request.
|
||||
*/
|
||||
function isAnalyzingPullRequest() {
|
||||
return getPullRequestBranches() !== undefined;
|
||||
}
|
||||
/**
|
||||
* A workaround for code quality to map category names from old default setup workflows
|
||||
* to ones that the code quality service expects.
|
||||
*/
|
||||
const qualityCategoryMapping = {
|
||||
"c#": "csharp",
|
||||
cpp: "c-cpp",
|
||||
c: "c-cpp",
|
||||
"c++": "c-cpp",
|
||||
java: "java-kotlin",
|
||||
javascript: "javascript-typescript",
|
||||
typescript: "javascript-typescript",
|
||||
kotlin: "java-kotlin",
|
||||
};
|
||||
/** Adjusts the category string for a Code Quality SARIF file if an "old"
|
||||
* category identifier is used by Default Setup.
|
||||
*/
|
||||
function fixCodeQualityCategory(logger, category) {
|
||||
// The `category` should always be set by Default Setup. We perform this check
|
||||
// to avoid potential issues if Code Quality supports Advanced Setup in the future
|
||||
// and before this workaround is removed.
|
||||
if (category !== undefined &&
|
||||
isDefaultSetup() &&
|
||||
category.startsWith("/language:")) {
|
||||
const language = category.substring("/language:".length);
|
||||
const mappedLanguage = qualityCategoryMapping[language];
|
||||
if (mappedLanguage) {
|
||||
const newCategory = `/language:${mappedLanguage}`;
|
||||
logger.info(`Adjusted category for Code Quality from '${category}' to '${newCategory}'.`);
|
||||
return newCategory;
|
||||
}
|
||||
}
|
||||
return category;
|
||||
}
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
File diff suppressed because one or more lines are too long
160
lib/actions-util.test.js
generated
160
lib/actions-util.test.js
generated
@@ -1,14 +1,79 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
function withMockedContext(mockPayload, testFn) {
|
||||
const originalPayload = github.context.payload;
|
||||
github.context.payload = mockPayload;
|
||||
try {
|
||||
return testFn();
|
||||
}
|
||||
finally {
|
||||
github.context.payload = originalPayload;
|
||||
}
|
||||
}
|
||||
function withMockedEnv(envVars, testFn) {
|
||||
const originalEnv = { ...process.env };
|
||||
// Apply environment changes
|
||||
for (const [key, value] of Object.entries(envVars)) {
|
||||
if (value === undefined) {
|
||||
delete process.env[key];
|
||||
}
|
||||
else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
try {
|
||||
return testFn();
|
||||
}
|
||||
finally {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
}
|
||||
}
|
||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||
@@ -25,8 +90,103 @@ const util_1 = require("./util");
|
||||
actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||
});
|
||||
(0, ava_1.default)("getPullRequestBranches() with pull request context", (t) => {
|
||||
withMockedContext({
|
||||
pull_request: {
|
||||
number: 123,
|
||||
base: { ref: "main" },
|
||||
head: { label: "user:feature-branch" },
|
||||
},
|
||||
}, () => {
|
||||
t.deepEqual((0, actions_util_1.getPullRequestBranches)(), {
|
||||
base: "main",
|
||||
head: "user:feature-branch",
|
||||
});
|
||||
t.is((0, actions_util_1.isAnalyzingPullRequest)(), true);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getPullRequestBranches() returns undefined with push context", (t) => {
|
||||
withMockedContext({
|
||||
push: {
|
||||
ref: "refs/heads/main",
|
||||
},
|
||||
}, () => {
|
||||
t.is((0, actions_util_1.getPullRequestBranches)(), undefined);
|
||||
t.is((0, actions_util_1.isAnalyzingPullRequest)(), false);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getPullRequestBranches() with Default Setup environment variables", (t) => {
|
||||
withMockedContext({}, () => {
|
||||
withMockedEnv({
|
||||
CODE_SCANNING_REF: "refs/heads/feature-branch",
|
||||
CODE_SCANNING_BASE_BRANCH: "main",
|
||||
}, () => {
|
||||
t.deepEqual((0, actions_util_1.getPullRequestBranches)(), {
|
||||
base: "main",
|
||||
head: "refs/heads/feature-branch",
|
||||
});
|
||||
t.is((0, actions_util_1.isAnalyzingPullRequest)(), true);
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set", (t) => {
|
||||
withMockedContext({}, () => {
|
||||
withMockedEnv({
|
||||
CODE_SCANNING_REF: "refs/heads/feature-branch",
|
||||
CODE_SCANNING_BASE_BRANCH: undefined,
|
||||
}, () => {
|
||||
t.is((0, actions_util_1.getPullRequestBranches)(), undefined);
|
||||
t.is((0, actions_util_1.isAnalyzingPullRequest)(), false);
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set", (t) => {
|
||||
withMockedContext({}, () => {
|
||||
withMockedEnv({
|
||||
CODE_SCANNING_REF: undefined,
|
||||
CODE_SCANNING_BASE_BRANCH: "main",
|
||||
}, () => {
|
||||
t.is((0, actions_util_1.getPullRequestBranches)(), undefined);
|
||||
t.is((0, actions_util_1.isAnalyzingPullRequest)(), false);
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getPullRequestBranches() returns undefined when no PR context", (t) => {
|
||||
withMockedContext({}, () => {
|
||||
withMockedEnv({
|
||||
CODE_SCANNING_REF: undefined,
|
||||
CODE_SCANNING_BASE_BRANCH: undefined,
|
||||
}, () => {
|
||||
t.is((0, actions_util_1.getPullRequestBranches)(), undefined);
|
||||
t.is((0, actions_util_1.isAnalyzingPullRequest)(), false);
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
t.deepEqual(process.env[environment_1.EnvVar.VERSION], "1.2.3");
|
||||
});
|
||||
(0, ava_1.default)("fixCodeQualityCategory", (t) => {
|
||||
withMockedEnv({
|
||||
GITHUB_EVENT_NAME: "dynamic",
|
||||
}, () => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
// Categories that should get adjusted.
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:c#"), "/language:csharp");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:cpp"), "/language:c-cpp");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:c"), "/language:c-cpp");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:java"), "/language:java-kotlin");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:javascript"), "/language:javascript-typescript");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:typescript"), "/language:javascript-typescript");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:kotlin"), "/language:java-kotlin");
|
||||
// Categories that should not get adjusted.
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:csharp"), "/language:csharp");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:go"), "/language:go");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "/language:actions"), "/language:actions");
|
||||
// Other cases.
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, undefined), undefined);
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "random string"), "random string");
|
||||
t.is((0, actions_util_1.fixCodeQualityCategory)(logger, "kotlin"), "kotlin");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
5
lib/analyze-action-env.test.js
generated
5
lib/analyze-action-env.test.js
generated
@@ -68,6 +68,7 @@ const util = __importStar(require("./util"));
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion,
|
||||
augmentationProperties: {},
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
@@ -75,8 +76,8 @@ const util = __importStar(require("./util"));
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
requiredInputStub.withArgs("output").returns("out");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -95,8 +96,10 @@ const util = __importStar(require("./util"));
|
||||
// runFinalize and runQueries are correctly captured by spies, we explicitly
|
||||
// wait for the action promise to complete before starting verification.
|
||||
await analyzeAction.runPromise;
|
||||
t.assert(runFinalizeStub.calledOnce);
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
||||
t.assert(runQueriesStub.calledOnce);
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
||||
});
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEhE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEhE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,sBAAsB,EAAE,EAAE;YAC1B,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACpD,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,MAAM,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC;QACrC,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;QACpC,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
5
lib/analyze-action-input.test.js
generated
5
lib/analyze-action-input.test.js
generated
@@ -67,6 +67,7 @@ const util = __importStar(require("./util"));
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion,
|
||||
augmentationProperties: {},
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
@@ -74,8 +75,8 @@ const util = __importStar(require("./util"));
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
requiredInputStub.withArgs("output").returns("out");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
@@ -95,8 +96,10 @@ const util = __importStar(require("./util"));
|
||||
// runFinalize and runQueries are correctly captured by spies, we explicitly
|
||||
// wait for the action promise to complete before starting verification.
|
||||
await analyzeAction.runPromise;
|
||||
t.assert(runFinalizeStub.calledOnce);
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
||||
t.assert(runQueriesStub.calledOnce);
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
||||
});
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAChE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,sBAAsB,EAAE,EAAE;YAC1B,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACpD,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAChE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,MAAM,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC;QACrC,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;QACpC,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
16
lib/analyze-action-post.js
generated
16
lib/analyze-action-post.js
generated
@@ -38,12 +38,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
* It will run after the all steps in this job, in reverse order in relation to
|
||||
* other `post:` hooks.
|
||||
*/
|
||||
const fs = __importStar(require("fs"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const dependency_caching_1 = require("./dependency-caching");
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
@@ -60,7 +62,19 @@ async function runWrapper() {
|
||||
if (config !== undefined) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const version = await codeql.getVersion();
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version));
|
||||
await debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version);
|
||||
}
|
||||
}
|
||||
// If we analysed Java in build-mode: none, we may have downloaded dependencies
|
||||
// to the temp directory. Clean these up so they don't persist unnecessarily
|
||||
// long on self-hosted runners.
|
||||
const javaTempDependencyDir = (0, dependency_caching_1.getJavaTempDependencyDir)();
|
||||
if (fs.existsSync(javaTempDependencyDir)) {
|
||||
try {
|
||||
fs.rmSync(javaTempDependencyDir, { recursive: true });
|
||||
}
|
||||
catch (error) {
|
||||
logger.info(`Failed to remove temporary Java dependencies directory: ${(0, util_1.getErrorMessage)(error)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,uCAAyB;AAEzB,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,6DAAgE;AAChE,+CAAuC;AACvC,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,cAAc,CAAC,4BAA4B,CAC/C,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CAAC;YACJ,CAAC;QACH,CAAC;QAED,+EAA+E;QAC/E,4EAA4E;QAC5E,+BAA+B;QAC/B,MAAM,qBAAqB,GAAG,IAAA,6CAAwB,GAAE,CAAC;QACzD,IAAI,EAAE,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE,CAAC;YACzC,IAAI,CAAC;gBACH,EAAE,CAAC,MAAM,CAAC,qBAAqB,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,MAAM,CAAC,IAAI,CACT,2DAA2D,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACpF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
47
lib/analyze-action.js
generated
47
lib/analyze-action.js
generated
@@ -41,7 +41,6 @@ const fs = __importStar(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const api_client_1 = require("./api-client");
|
||||
@@ -51,10 +50,12 @@ const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const dependency_caching_1 = require("./dependency-caching");
|
||||
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||
const repository_1 = require("./repository");
|
||||
const statusReport = __importStar(require("./status-report"));
|
||||
const status_report_1 = require("./status-report");
|
||||
@@ -94,8 +95,8 @@ function hasBadExpectErrorInput() {
|
||||
* indicating whether Go extraction has extracted at least one file.
|
||||
*/
|
||||
function doesGoExtractionOutputExist(config) {
|
||||
const golangDbDirectory = util.getCodeQLDatabasePath(config, languages_1.Language.go);
|
||||
const trapDirectory = path_1.default.join(golangDbDirectory, "trap", languages_1.Language.go);
|
||||
const golangDbDirectory = util.getCodeQLDatabasePath(config, languages_1.KnownLanguage.go);
|
||||
const trapDirectory = path_1.default.join(golangDbDirectory, "trap", languages_1.KnownLanguage.go);
|
||||
return (fs.existsSync(trapDirectory) &&
|
||||
fs
|
||||
.readdirSync(trapDirectory)
|
||||
@@ -122,7 +123,7 @@ function doesGoExtractionOutputExist(config) {
|
||||
* whether any extraction output already exists for Go.
|
||||
*/
|
||||
async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
if (!config.languages.includes(languages_1.Language.go)) {
|
||||
if (!config.languages.includes(languages_1.KnownLanguage.go)) {
|
||||
return;
|
||||
}
|
||||
if (config.buildMode) {
|
||||
@@ -133,7 +134,7 @@ async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
logger.debug("Won't run Go autobuild since it has already been run.");
|
||||
return;
|
||||
}
|
||||
if ((0, analyze_1.dbIsFinalized)(config, languages_1.Language.go, logger)) {
|
||||
if ((0, analyze_1.dbIsFinalized)(config, languages_1.KnownLanguage.go, logger)) {
|
||||
logger.debug("Won't run Go autobuild since there is already a finalized database for Go.");
|
||||
return;
|
||||
}
|
||||
@@ -148,7 +149,7 @@ async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
return;
|
||||
}
|
||||
logger.debug("Running Go autobuild because extraction output (TRAP files) for Go code has not been found.");
|
||||
await (0, autobuild_1.runAutobuild)(config, languages_1.Language.go, logger);
|
||||
await (0, autobuild_1.runAutobuild)(config, languages_1.KnownLanguage.go, logger);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
@@ -185,26 +186,28 @@ async function run() {
|
||||
if (hasBadExpectErrorInput()) {
|
||||
throw new util.ConfigurationError("`expect-error` input parameter is for internal use only. It should only be set by codeql-action or a fork.");
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "") {
|
||||
logger.info("The 'cleanup-level' input is ignored since the CodeQL Action now automatically " +
|
||||
"manages database cleanup. This input can safely be removed from your workflow.");
|
||||
}
|
||||
const apiDetails = (0, api_client_1.getApiDetails)();
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
core.exportVariable(environment_1.EnvVar.SARIF_RESULTS_OUTPUT_DIR, outputDir);
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
util.checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
|
||||
const pull_request = github.context.payload.pull_request;
|
||||
const diffRangePackDir = pull_request &&
|
||||
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.label, codeql, logger, features));
|
||||
const branches = await (0, diff_informed_analysis_utils_1.getDiffInformedAnalysisBranches)(codeql, features, logger);
|
||||
const diffRangePackDir = branches
|
||||
? await (0, analyze_1.setupDiffInformedQueryRun)(branches, logger)
|
||||
: undefined;
|
||||
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, diffRangePackDir, actionsUtil.getOptionalInput("category"), config, logger, features);
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, diffRangePackDir, actionsUtil.getOptionalInput("category"), codeql, config, logger, features);
|
||||
}
|
||||
const dbLocations = {};
|
||||
for (const language of config.languages) {
|
||||
@@ -214,14 +217,22 @@ async function run() {
|
||||
core.setOutput("sarif-output", path_1.default.resolve(outputDir));
|
||||
const uploadInput = actionsUtil.getOptionalInput("upload");
|
||||
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
||||
uploadResult = await uploadLib.uploadFiles(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), features, logger);
|
||||
uploadResult = await uploadLib.uploadFiles(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), features, logger, uploadLib.CodeScanningTarget);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (config.augmentationProperties.qualityQueriesInput !== undefined) {
|
||||
const qualityUploadResult = await uploadLib.uploadFiles(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.fixCodeQualityCategory(logger, actionsUtil.getOptionalInput("category")), features, logger, uploadLib.CodeQualityTarget);
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||
// Possibly upload the overlay-base database to actions cache.
|
||||
// If databases are to be uploaded, they will first be cleaned up at the overlay level.
|
||||
await (0, overlay_database_utils_1.uploadOverlayBaseDatabaseToCache)(codeql, config, logger);
|
||||
// Possibly upload the database bundles for remote queries.
|
||||
// If databases are to be uploaded, they will first be cleaned up at the clear level.
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, codeql, config, apiDetails, logger);
|
||||
// Possibly upload the TRAP caches for later re-use
|
||||
const trapCacheUploadStartTime = perf_hooks_1.performance.now();
|
||||
didUploadTrapCaches = await (0, trap_caching_1.uploadTrapCaches)(codeql, config, logger);
|
||||
@@ -238,7 +249,7 @@ async function run() {
|
||||
}
|
||||
else if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await uploadLib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
|
||||
await uploadLib.waitForProcessing((0, repository_1.getRepositoryNwo)(), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
// If we did not throw an error yet here, but we expect one, throw it.
|
||||
if (actionsUtil.getOptionalInput("expect-error") === "true") {
|
||||
|
||||
File diff suppressed because one or more lines are too long
194
lib/analyze.js
generated
194
lib/analyze.js
generated
@@ -36,32 +36,33 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.exportedForTesting = exports.CodeQLAnalysisError = void 0;
|
||||
exports.exportedForTesting = exports.defaultSuites = exports.CodeQLAnalysisError = void 0;
|
||||
exports.runExtraction = runExtraction;
|
||||
exports.dbIsFinalized = dbIsFinalized;
|
||||
exports.setupDiffInformedQueryRun = setupDiffInformedQueryRun;
|
||||
exports.resolveQuerySuiteAlias = resolveQuerySuiteAlias;
|
||||
exports.runQueries = runQueries;
|
||||
exports.runFinalize = runFinalize;
|
||||
exports.warnIfGoInstalledAfterInit = warnIfGoInstalledAfterInit;
|
||||
exports.runCleanup = runCleanup;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const dependency_caching_1 = require("./dependency-caching");
|
||||
const diagnostics_1 = require("./diagnostics");
|
||||
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||
const repository_1 = require("./repository");
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const upload_lib_1 = require("./upload-lib");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
class CodeQLAnalysisError extends Error {
|
||||
@@ -90,17 +91,25 @@ async function runExtraction(codeql, config, logger) {
|
||||
logger.debug(`Database for ${language} has already been finalized, skipping extraction.`);
|
||||
continue;
|
||||
}
|
||||
if (shouldExtractLanguage(config, language)) {
|
||||
if (await shouldExtractLanguage(codeql, config, language)) {
|
||||
logger.startGroup(`Extracting ${language}`);
|
||||
if (language === languages_1.Language.python) {
|
||||
if (language === languages_1.KnownLanguage.python) {
|
||||
await setupPythonExtractor(logger);
|
||||
}
|
||||
if (config.buildMode &&
|
||||
(await codeql.supportsFeature(tools_features_1.ToolsFeature.TraceCommandUseBuildMode))) {
|
||||
if (language === languages_1.Language.cpp &&
|
||||
if (config.buildMode) {
|
||||
if (language === languages_1.KnownLanguage.cpp &&
|
||||
config.buildMode === util_1.BuildMode.Autobuild) {
|
||||
await (0, autobuild_1.setupCppAutobuild)(codeql, logger);
|
||||
}
|
||||
// The Java `build-mode: none` extractor places dependencies (.jar files) in the
|
||||
// database scratch directory by default. For dependency caching purposes, we want
|
||||
// a stable path that caches can be restored into and that we can cache at the
|
||||
// end of the workflow (i.e. that does not get removed when the scratch directory is).
|
||||
if (language === languages_1.KnownLanguage.java &&
|
||||
config.buildMode === util_1.BuildMode.None) {
|
||||
process.env["CODEQL_EXTRACTOR_JAVA_OPTION_BUILDLESS_DEPENDENCY_DIR"] =
|
||||
(0, dependency_caching_1.getJavaTempDependencyDir)();
|
||||
}
|
||||
await codeql.extractUsingBuildMode(config, language);
|
||||
}
|
||||
else {
|
||||
@@ -110,11 +119,11 @@ async function runExtraction(codeql, config, logger) {
|
||||
}
|
||||
}
|
||||
}
|
||||
function shouldExtractLanguage(config, language) {
|
||||
async function shouldExtractLanguage(codeql, config, language) {
|
||||
return (config.buildMode === util_1.BuildMode.None ||
|
||||
(config.buildMode === util_1.BuildMode.Autobuild &&
|
||||
process.env[environment_1.EnvVar.AUTOBUILD_DID_COMPLETE_SUCCESSFULLY] !== "true") ||
|
||||
(!config.buildMode && (0, languages_1.isScannedLanguage)(language)));
|
||||
(!config.buildMode && (await codeql.isScannedLanguage(language))));
|
||||
}
|
||||
function dbIsFinalized(config, language, logger) {
|
||||
const dbPath = util.getCodeQLDatabasePath(config, language);
|
||||
@@ -151,21 +160,13 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
|
||||
/**
|
||||
* Set up the diff-informed analysis feature.
|
||||
*
|
||||
* @param baseRef The base branch name, used for calculating the diff range.
|
||||
* @param headLabel The label that uniquely identifies the head branch across
|
||||
* repositories, used for calculating the diff range.
|
||||
* @param codeql
|
||||
* @param logger
|
||||
* @param features
|
||||
* @returns Absolute path to the directory containing the extension pack for
|
||||
* the diff range information, or `undefined` if the feature is disabled.
|
||||
*/
|
||||
async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, features) {
|
||||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||
return undefined;
|
||||
}
|
||||
async function setupDiffInformedQueryRun(branches, logger) {
|
||||
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headLabel, logger);
|
||||
logger.info(`Calculating diff ranges for ${branches.base}...${branches.head}`);
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(branches, logger);
|
||||
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
if (packDir === undefined) {
|
||||
logger.warning("Cannot create diff range extension pack for diff-informed queries; " +
|
||||
@@ -180,17 +181,15 @@ async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, fea
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param baseRef The base branch name, used for calculating the diff range.
|
||||
* @param headLabel The label that uniquely identifies the head branch across
|
||||
* repositories, used for calculating the diff range.
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(baseRef, headLabel, logger);
|
||||
async function getPullRequestEditedDiffRanges(branches, logger) {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -213,15 +212,15 @@ async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async function getFileDiffsWithBasehead(baseRef, headLabel, logger) {
|
||||
const ownerRepo = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
const owner = ownerRepo[0];
|
||||
const repo = ownerRepo[1];
|
||||
const basehead = `${baseRef}...${headLabel}`;
|
||||
async function getFileDiffsWithBasehead(branches, logger) {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = (0, repository_1.getRepositoryNwoFromEnv)("CODE_SCANNING_REPOSITORY", "GITHUB_REPOSITORY");
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await (0, api_client_1.getApiClient)().rest.repos.compareCommitsWithBasehead({
|
||||
owner,
|
||||
repo,
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
});
|
||||
@@ -247,7 +246,7 @@ function getDiffRanges(fileDiff, logger) {
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.join((0, actions_util_1.getRequiredInput)("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
@@ -333,8 +332,21 @@ function writeDiffRangeDataExtensionPack(logger, ranges) {
|
||||
if (ranges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const diffRangeDir = path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range");
|
||||
fs.mkdirSync(diffRangeDir);
|
||||
if (ranges.length === 0) {
|
||||
// An empty diff range means that there are no added or modified lines in
|
||||
// the pull request. But the `restrictAlertsTo` extensible predicate
|
||||
// interprets an empty data extension differently, as an indication that
|
||||
// all alerts should be included. So we need to specifically set the diff
|
||||
// range to a non-empty list that cannot match any alert location.
|
||||
ranges = [{ path: "", startLine: 0, endLine: 0 }];
|
||||
}
|
||||
const diffRangeDir = path.join((0, actions_util_1.getTemporaryDirectory)(), "pr-diff-range");
|
||||
// We expect the Actions temporary directory to already exist, so are mainly
|
||||
// using `recursive: true` to avoid errors if the directory already exists,
|
||||
// for example if the analyze Action is run multiple times in the same job.
|
||||
// This is not really something that is supported, but we make use of it in
|
||||
// tests.
|
||||
fs.mkdirSync(diffRangeDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(diffRangeDir, "qlpack.yml"), `
|
||||
name: codeql-action/pr-diff-range
|
||||
version: 0.0.0
|
||||
@@ -349,6 +361,7 @@ extensions:
|
||||
- addsTo:
|
||||
pack: codeql/util
|
||||
extensible: restrictAlertsTo
|
||||
checkPresence: false
|
||||
data:
|
||||
`;
|
||||
let data = ranges
|
||||
@@ -368,26 +381,73 @@ extensions:
|
||||
const extensionFilePath = path.join(diffRangeDir, "pr-diff-range.yml");
|
||||
fs.writeFileSync(extensionFilePath, extensionContents);
|
||||
logger.debug(`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`);
|
||||
// Write the diff ranges to a JSON file, for action-side alert filtering by the
|
||||
// upload-lib module.
|
||||
(0, diff_informed_analysis_utils_1.writeDiffRangesJsonFile)(logger, ranges);
|
||||
return diffRangeDir;
|
||||
}
|
||||
// A set of default query suite names that are understood by the CLI.
|
||||
exports.defaultSuites = new Set([
|
||||
"security-experimental",
|
||||
"security-extended",
|
||||
"security-and-quality",
|
||||
"code-quality",
|
||||
"code-scanning",
|
||||
]);
|
||||
/**
|
||||
* If `maybeSuite` is the name of a default query suite, it is resolved into the corresponding
|
||||
* query suite name for the given `language`. Otherwise, `maybeSuite` is returned as is.
|
||||
*
|
||||
* @param language The language for which to resolve the default query suite name.
|
||||
* @param maybeSuite The string that potentially contains the name of a default query suite.
|
||||
* @returns Returns the resolved query suite name, or the unmodified input.
|
||||
*/
|
||||
function resolveQuerySuiteAlias(language, maybeSuite) {
|
||||
if (exports.defaultSuites.has(maybeSuite)) {
|
||||
return `${language}-${maybeSuite}.qls`;
|
||||
}
|
||||
return maybeSuite;
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, diffRangePackDir, automationDetailsId, config, logger, features) {
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, diffRangePackDir, automationDetailsId, codeql, config, logger, features) {
|
||||
const statusReport = {};
|
||||
const queryFlags = [memoryFlag, threadsFlag];
|
||||
const incrementalMode = [];
|
||||
// Preserve cached intermediate results for overlay-base databases.
|
||||
if (config.augmentationProperties.overlayDatabaseMode !==
|
||||
overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
|
||||
queryFlags.push("--expect-discarded-cache");
|
||||
}
|
||||
statusReport.analysis_is_diff_informed = diffRangePackDir !== undefined;
|
||||
const dataExtensionFlags = diffRangePackDir
|
||||
? [
|
||||
`--additional-packs=${diffRangePackDir}`,
|
||||
"--extension-packs=codeql-action/pr-diff-range",
|
||||
]
|
||||
: [];
|
||||
const sarifRunPropertyFlag = diffRangePackDir
|
||||
? "--sarif-run-property=incrementalMode=diff-informed"
|
||||
if (diffRangePackDir) {
|
||||
queryFlags.push(`--additional-packs=${diffRangePackDir}`);
|
||||
queryFlags.push("--extension-packs=codeql-action/pr-diff-range");
|
||||
incrementalMode.push("diff-informed");
|
||||
}
|
||||
statusReport.analysis_is_overlay =
|
||||
config.augmentationProperties.overlayDatabaseMode ===
|
||||
overlay_database_utils_1.OverlayDatabaseMode.Overlay;
|
||||
statusReport.analysis_builds_overlay_base_database =
|
||||
config.augmentationProperties.overlayDatabaseMode ===
|
||||
overlay_database_utils_1.OverlayDatabaseMode.OverlayBase;
|
||||
if (config.augmentationProperties.overlayDatabaseMode ===
|
||||
overlay_database_utils_1.OverlayDatabaseMode.Overlay) {
|
||||
incrementalMode.push("overlay");
|
||||
}
|
||||
const sarifRunPropertyFlag = incrementalMode.length > 0
|
||||
? `--sarif-run-property=incrementalMode=${incrementalMode.join(",")}`
|
||||
: undefined;
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const queryFlags = [memoryFlag, threadsFlag, ...dataExtensionFlags];
|
||||
for (const language of config.languages) {
|
||||
try {
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
const queries = [];
|
||||
if (config.augmentationProperties.qualityQueriesInput !== undefined) {
|
||||
queries.push(path.join(util.getCodeQLDatabasePath(config, language), "temp", "config-queries.qls"));
|
||||
for (const qualityQuery of config.augmentationProperties
|
||||
.qualityQueriesInput) {
|
||||
queries.push(resolveQuerySuiteAlias(language, qualityQuery.uses));
|
||||
}
|
||||
}
|
||||
// The work needed to generate the query suites
|
||||
// is done in the CLI. We just need to make a single
|
||||
// call to run all the queries for each language and
|
||||
@@ -395,7 +455,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const startTimeRunQueries = new Date().getTime();
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
await codeql.databaseRunQueries(databasePath, queryFlags);
|
||||
await codeql.databaseRunQueries(databasePath, queryFlags, queries);
|
||||
logger.debug(`Finished running queries for ${language}.`);
|
||||
// TODO should not be using `builtin` here. We should be using `all` instead.
|
||||
// The status report does not support `all` yet.
|
||||
@@ -403,14 +463,24 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
new Date().getTime() - startTimeRunQueries;
|
||||
logger.startGroup(`Interpreting results for ${language}`);
|
||||
const startTimeInterpretResults = new Date();
|
||||
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode);
|
||||
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode, automationDetailsId);
|
||||
let qualityAnalysisSummary;
|
||||
if (config.augmentationProperties.qualityQueriesInput !== undefined) {
|
||||
logger.info(`Interpreting quality results for ${language}`);
|
||||
const qualityCategory = (0, actions_util_1.fixCodeQualityCategory)(logger, automationDetailsId);
|
||||
const qualitySarifFile = path.join(sarifFolder, `${language}.quality.sarif`);
|
||||
qualityAnalysisSummary = await runInterpretResults(language, config.augmentationProperties.qualityQueriesInput.map((i) => resolveQuerySuiteAlias(language, i.uses)), qualitySarifFile, config.debugMode, qualityCategory);
|
||||
}
|
||||
const endTimeInterpretResults = new Date();
|
||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||
endTimeInterpretResults.getTime() - startTimeInterpretResults.getTime();
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
if (qualityAnalysisSummary) {
|
||||
logger.info(qualityAnalysisSummary);
|
||||
}
|
||||
if (await features.getValue(feature_flags_1.Feature.QaTelemetryEnabled)) {
|
||||
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
|
||||
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile);
|
||||
const perQueryAlertCountEventReport = {
|
||||
event: "codeql database interpret-results",
|
||||
started_at: startTimeInterpretResults.toISOString(),
|
||||
@@ -433,13 +503,12 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
}
|
||||
}
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging, category) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", sarifRunPropertyFlag, automationDetailsId, config, features);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", sarifRunPropertyFlag, category, config, features);
|
||||
}
|
||||
/** Get an object with all queries and their counts parsed from a SARIF file path. */
|
||||
function getPerQueryAlertCounts(sarifPath, log) {
|
||||
(0, upload_lib_1.validateSarifFileSchema)(sarifPath, log);
|
||||
function getPerQueryAlertCounts(sarifPath) {
|
||||
const sarifObject = JSON.parse(fs.readFileSync(sarifPath, "utf8"));
|
||||
// We do not need to compute fingerprints because we are not sending data based off of locations.
|
||||
// Generate the query: alert count object
|
||||
@@ -488,7 +557,7 @@ async function warnIfGoInstalledAfterInit(config, logger) {
|
||||
const goBinaryPath = await io.which("go", true);
|
||||
if (goInitPath !== goBinaryPath) {
|
||||
logger.warning(`Expected \`which go\` to return ${goInitPath}, but got ${goBinaryPath}: please ensure that the correct version of Go is installed before the \`codeql-action/init\` Action is used.`);
|
||||
(0, diagnostics_1.addDiagnostic)(config, languages_1.Language.go, (0, diagnostics_1.makeDiagnostic)("go/workflow/go-installed-after-codeql-init", "Go was installed after the `codeql-action/init` Action was run", {
|
||||
(0, diagnostics_1.addDiagnostic)(config, languages_1.KnownLanguage.go, (0, diagnostics_1.makeDiagnostic)("go/workflow/go-installed-after-codeql-init", "Go was installed after the `codeql-action/init` Action was run", {
|
||||
markdownMessage: "To avoid interfering with the CodeQL analysis, perform all installation steps before calling the `github/codeql-action/init` Action.",
|
||||
visibility: {
|
||||
statusPage: true,
|
||||
@@ -500,15 +569,6 @@ async function warnIfGoInstalledAfterInit(config, logger) {
|
||||
}
|
||||
}
|
||||
}
|
||||
async function runCleanup(config, cleanupLevel, logger) {
|
||||
logger.startGroup("Cleaning up databases");
|
||||
for (const language of config.languages) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
await codeql.databaseCleanup(databasePath, cleanupLevel);
|
||||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
|
||||
File diff suppressed because one or more lines are too long
21
lib/analyze.test.js
generated
21
lib/analyze.test.js
generated
@@ -63,8 +63,8 @@ const util = __importStar(require("./util"));
|
||||
const addSnippetsFlag = "";
|
||||
const threadsFlag = "";
|
||||
sinon.stub(uploadLib, "validateSarifFileSchema");
|
||||
for (const language of Object.values(languages_1.Language)) {
|
||||
(0, codeql_1.setCodeQL)({
|
||||
for (const language of Object.values(languages_1.KnownLanguage)) {
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
databaseRunQueries: async () => { },
|
||||
packDownload: async () => ({ packs: [] }),
|
||||
databaseInterpretResults: async (_db, _queriesRun, sarifFile) => {
|
||||
@@ -114,9 +114,11 @@ const util = __importStar(require("./util"));
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
});
|
||||
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
|
||||
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, undefined, codeql, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
|
||||
t.deepEqual(Object.keys(statusReport).sort(), [
|
||||
"analysis_builds_overlay_base_database",
|
||||
"analysis_is_diff_informed",
|
||||
"analysis_is_overlay",
|
||||
`analyze_builtin_queries_${language}_duration_ms`,
|
||||
"event_reports",
|
||||
`interpret_results_${language}_duration_ms`,
|
||||
@@ -313,4 +315,17 @@ function runGetDiffRanges(changes, patch) {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
(0, ava_1.default)("resolveQuerySuiteAlias", (t) => {
|
||||
// default query suite names should resolve to something language-specific ending in `.qls`.
|
||||
for (const suite of analyze_1.defaultSuites) {
|
||||
const resolved = (0, analyze_1.resolveQuerySuiteAlias)(languages_1.KnownLanguage.go, suite);
|
||||
t.assert(resolved.endsWith(".qls"), "Resolved default suite doesn't end in .qls");
|
||||
t.assert(resolved.indexOf(languages_1.KnownLanguage.go) >= 0, "Resolved default suite doesn't contain language name");
|
||||
}
|
||||
// other inputs should be returned unchanged
|
||||
const names = ["foo", "bar", "codeql/go-queries@1.0"];
|
||||
for (const name of names) {
|
||||
t.deepEqual((0, analyze_1.resolveQuerySuiteAlias)(languages_1.KnownLanguage.go, name), name);
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=analyze.test.js.map
|
||||
File diff suppressed because one or more lines are too long
21
lib/api-client.js
generated
21
lib/api-client.js
generated
@@ -122,14 +122,12 @@ async function getGitHubVersion() {
|
||||
* Get the path of the currently executing workflow relative to the repository root.
|
||||
*/
|
||||
async function getWorkflowRelativePath() {
|
||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const repo_nwo = (0, repository_1.getRepositoryNwo)();
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = getApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
owner: repo_nwo.owner,
|
||||
repo: repo_nwo.repo,
|
||||
run_id,
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
@@ -187,7 +185,7 @@ function computeAutomationID(analysis_key, environment) {
|
||||
}
|
||||
/** List all Actions cache entries matching the provided key and ref. */
|
||||
async function listActionsCaches(key, ref) {
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||
return await getApiClient().paginate("GET /repos/{owner}/{repo}/actions/caches", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
@@ -197,7 +195,7 @@ async function listActionsCaches(key, ref) {
|
||||
}
|
||||
/** Delete an Actions cache item by its ID. */
|
||||
async function deleteActionsCache(id) {
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||
await getApiClient().rest.actions.deleteActionsCacheById({
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
@@ -206,11 +204,16 @@ async function deleteActionsCache(id) {
|
||||
}
|
||||
function wrapApiConfigurationError(e) {
|
||||
if ((0, util_1.isHTTPError)(e)) {
|
||||
if (e.message.includes("API rate limit exceeded for site ID installation") ||
|
||||
if (e.message.includes("API rate limit exceeded for installation") ||
|
||||
e.message.includes("commit not found") ||
|
||||
/^ref .* not found in this repository$/.test(e.message)) {
|
||||
e.message.includes("Resource not accessible by integration") ||
|
||||
/ref .* not found in this repository/.test(e.message)) {
|
||||
return new util_1.ConfigurationError(e.message);
|
||||
}
|
||||
else if (e.message.includes("Bad credentials") ||
|
||||
e.message.includes("Not Found")) {
|
||||
return new util_1.ConfigurationError("Please check that your token is valid and has the required permissions: contents: read, security-events: write");
|
||||
}
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
36
lib/api-client.test.js
generated
36
lib/api-client.test.js
generated
@@ -120,4 +120,40 @@ function mockGetMetaVersionHeader(versionHeader) {
|
||||
});
|
||||
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
||||
});
|
||||
(0, ava_1.default)("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {
|
||||
// We don't reclassify arbitrary errors
|
||||
const arbitraryError = new Error("arbitrary error");
|
||||
let res = api.wrapApiConfigurationError(arbitraryError);
|
||||
t.is(res, arbitraryError);
|
||||
// Same goes for arbitrary errors
|
||||
const configError = new util.ConfigurationError("arbitrary error");
|
||||
res = api.wrapApiConfigurationError(configError);
|
||||
t.is(res, configError);
|
||||
// If an HTTP error doesn't contain a specific error message, we don't
|
||||
// wrap is an an API error.
|
||||
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
|
||||
res = api.wrapApiConfigurationError(httpError);
|
||||
t.is(res, httpError);
|
||||
// For other HTTP errors, we wrap them as Configuration errors if they contain
|
||||
// specific error messages.
|
||||
const httpNotFoundError = new util.HTTPError("commit not found", 404);
|
||||
res = api.wrapApiConfigurationError(httpNotFoundError);
|
||||
t.deepEqual(res, new util.ConfigurationError("commit not found"));
|
||||
const refNotFoundError = new util.HTTPError("ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest", 404);
|
||||
res = api.wrapApiConfigurationError(refNotFoundError);
|
||||
t.deepEqual(res, new util.ConfigurationError("ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest"));
|
||||
const apiRateLimitError = new util.HTTPError("API rate limit exceeded for installation", 403);
|
||||
res = api.wrapApiConfigurationError(apiRateLimitError);
|
||||
t.deepEqual(res, new util.ConfigurationError("API rate limit exceeded for installation"));
|
||||
const tokenSuggestionMessage = "Please check that your token is valid and has the required permissions: contents: read, security-events: write";
|
||||
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
|
||||
res = api.wrapApiConfigurationError(badCredentialsError);
|
||||
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
|
||||
const notFoundError = new util.HTTPError("Not Found", 404);
|
||||
res = api.wrapApiConfigurationError(notFoundError);
|
||||
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
|
||||
const resourceNotAccessibleError = new util.HTTPError("Resource not accessible by integration", 403);
|
||||
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
|
||||
t.deepEqual(res, new util.ConfigurationError("Resource not accessible by integration"));
|
||||
});
|
||||
//# sourceMappingURL=api-client.test.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.16", "minimumVersion": "3.12" }
|
||||
{ "maximumVersion": "3.18", "minimumVersion": "3.14" }
|
||||
|
||||
21
lib/autobuild.js
generated
21
lib/autobuild.js
generated
@@ -45,11 +45,9 @@ const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const repository_1 = require("./repository");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const util_1 = require("./util");
|
||||
async function determineAutobuildLanguages(codeql, config, logger) {
|
||||
if ((config.buildMode === util_1.BuildMode.None &&
|
||||
(await codeql.supportsFeature(tools_features_1.ToolsFeature.TraceCommandUseBuildMode))) ||
|
||||
if (config.buildMode === util_1.BuildMode.None ||
|
||||
config.buildMode === util_1.BuildMode.Manual) {
|
||||
logger.info(`Using build mode "${config.buildMode}", nothing to autobuild. ` +
|
||||
`See ${doc_url_1.DocUrl.CODEQL_BUILD_MODES} for more information.`);
|
||||
@@ -59,8 +57,8 @@ async function determineAutobuildLanguages(codeql, config, logger) {
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l));
|
||||
if (!autobuildLanguages) {
|
||||
const autobuildLanguages = await (0, util_1.asyncFilter)(config.languages, async (language) => await codeql.isTracedLanguage(language));
|
||||
if (autobuildLanguages.length === 0) {
|
||||
logger.info("None of the languages in this project require extra build steps");
|
||||
return undefined;
|
||||
}
|
||||
@@ -91,7 +89,7 @@ async function determineAutobuildLanguages(codeql, config, logger) {
|
||||
* This special case behavior should be removed as part of the next major
|
||||
* version of the CodeQL Action.
|
||||
*/
|
||||
const autobuildLanguagesWithoutGo = autobuildLanguages.filter((l) => l !== languages_1.Language.go);
|
||||
const autobuildLanguagesWithoutGo = autobuildLanguages.filter((l) => l !== languages_1.KnownLanguage.go);
|
||||
const languages = [];
|
||||
// First run the autobuilder for the first non-Go traced language, if one
|
||||
// exists.
|
||||
@@ -101,7 +99,7 @@ async function determineAutobuildLanguages(codeql, config, logger) {
|
||||
// If Go is requested, run the Go autobuilder last to ensure it doesn't
|
||||
// interfere with the other autobuilder.
|
||||
if (autobuildLanguages.length !== autobuildLanguagesWithoutGo.length) {
|
||||
languages.push(languages_1.Language.go);
|
||||
languages.push(languages_1.KnownLanguage.go);
|
||||
}
|
||||
logger.debug(`Will autobuild ${languages.join(" and ")}.`);
|
||||
// In general the autobuilders for other traced languages may conflict with
|
||||
@@ -123,7 +121,7 @@ async function setupCppAutobuild(codeql, logger) {
|
||||
const envVar = feature_flags_1.featureConfig[feature_flags_1.Feature.CppDependencyInstallation].envVar;
|
||||
const featureName = "C++ automatic installation of dependencies";
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
if (await features.getValue(feature_flags_1.Feature.CppDependencyInstallation, codeql)) {
|
||||
// disable autoinstall on self-hosted runners unless explicitly requested
|
||||
@@ -147,17 +145,16 @@ async function setupCppAutobuild(codeql, logger) {
|
||||
async function runAutobuild(config, language, logger) {
|
||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (language === languages_1.Language.cpp) {
|
||||
if (language === languages_1.KnownLanguage.cpp) {
|
||||
await setupCppAutobuild(codeQL, logger);
|
||||
}
|
||||
if (config.buildMode &&
|
||||
(await codeQL.supportsFeature(tools_features_1.ToolsFeature.TraceCommandUseBuildMode))) {
|
||||
if (config.buildMode) {
|
||||
await codeQL.extractUsingBuildMode(config, language);
|
||||
}
|
||||
else {
|
||||
await codeQL.runAutobuild(config, language);
|
||||
}
|
||||
if (language === languages_1.Language.go) {
|
||||
if (language === languages_1.KnownLanguage.go) {
|
||||
core.exportVariable(environment_1.EnvVar.DID_AUTOBUILD_GOLANG, "true");
|
||||
}
|
||||
logger.endGroup();
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEAkGC;AAED,8CAqCC;AAED,oCAsBC;AAhLD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAkD;AAClD,qDAAgD;AAChD,iCAAwD;AAEjD,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,kEAkGC;AAED,8CAmCC;AAED,oCAmBC;AA1KD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAsD;AAEtD,6CAAgD;AAChD,iCAAgD;AAEzC,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QACnC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,IAAA,kBAAW,EAC1C,MAAM,CAAC,SAAS,EAChB,KAAK,EAAE,QAAQ,EAAE,EAAE,CAAC,MAAM,MAAM,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAC5D,CAAC;IAEF,IAAI,kBAAkB,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACpC,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,yBAAa,CAAC,EAAE,CAC9B,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,yBAAa,CAAC,EAAE,CAAC,CAAC;IACnC,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;IACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,yBAAa,CAAC,GAAG,EAAE,CAAC;QACnC,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACrB,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,yBAAa,CAAC,EAAE,EAAE,CAAC;QAClC,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
|
||||
41
lib/cli-errors.js
generated
41
lib/cli-errors.js
generated
@@ -1,11 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.cliErrorsConfig = exports.CliConfigErrorCategory = exports.CliError = void 0;
|
||||
exports.getCliConfigCategoryIfExists = getCliConfigCategoryIfExists;
|
||||
exports.wrapCliConfigurationError = wrapCliConfigurationError;
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const util_1 = require("./util");
|
||||
const SUPPORTED_PLATFORMS = [
|
||||
["linux", "x64"],
|
||||
["win32", "x64"],
|
||||
["darwin", "x64"],
|
||||
["darwin", "arm64"],
|
||||
];
|
||||
/**
|
||||
* An error from a CodeQL CLI invocation, with associated exit code, stderr, etc.
|
||||
*/
|
||||
@@ -110,6 +115,7 @@ function extractAutobuildErrors(error) {
|
||||
var CliConfigErrorCategory;
|
||||
(function (CliConfigErrorCategory) {
|
||||
CliConfigErrorCategory["AutobuildError"] = "AutobuildError";
|
||||
CliConfigErrorCategory["CouldNotCreateTempDir"] = "CouldNotCreateTempDir";
|
||||
CliConfigErrorCategory["ExternalRepositoryCloneFailed"] = "ExternalRepositoryCloneFailed";
|
||||
CliConfigErrorCategory["GradleBuildFailed"] = "GradleBuildFailed";
|
||||
CliConfigErrorCategory["IncompatibleWithActionVersion"] = "IncompatibleWithActionVersion";
|
||||
@@ -123,6 +129,7 @@ var CliConfigErrorCategory;
|
||||
CliConfigErrorCategory["NoSourceCodeSeen"] = "NoSourceCodeSeen";
|
||||
CliConfigErrorCategory["NoSupportedBuildCommandSucceeded"] = "NoSupportedBuildCommandSucceeded";
|
||||
CliConfigErrorCategory["NoSupportedBuildSystemDetected"] = "NoSupportedBuildSystemDetected";
|
||||
CliConfigErrorCategory["NotFoundInRegistry"] = "NotFoundInRegistry";
|
||||
CliConfigErrorCategory["OutOfMemoryOrDisk"] = "OutOfMemoryOrDisk";
|
||||
CliConfigErrorCategory["PackCannotBeFound"] = "PackCannotBeFound";
|
||||
CliConfigErrorCategory["PackMissingAuth"] = "PackMissingAuth";
|
||||
@@ -139,6 +146,9 @@ exports.cliErrorsConfig = {
|
||||
new RegExp("We were unable to automatically build your code"),
|
||||
],
|
||||
},
|
||||
[CliConfigErrorCategory.CouldNotCreateTempDir]: {
|
||||
cliErrorMessageCandidates: [new RegExp("Could not create temp directory")],
|
||||
},
|
||||
[CliConfigErrorCategory.ExternalRepositoryCloneFailed]: {
|
||||
cliErrorMessageCandidates: [
|
||||
new RegExp("Failed to clone external Git repository"),
|
||||
@@ -146,7 +156,7 @@ exports.cliErrorsConfig = {
|
||||
},
|
||||
[CliConfigErrorCategory.GradleBuildFailed]: {
|
||||
cliErrorMessageCandidates: [
|
||||
new RegExp("[autobuild] FAILURE: Build failed with an exception."),
|
||||
new RegExp("\\[autobuild\\] FAILURE: Build failed with an exception."),
|
||||
],
|
||||
},
|
||||
// Version of CodeQL CLI is incompatible with this version of the CodeQL Action
|
||||
@@ -239,6 +249,11 @@ exports.cliErrorsConfig = {
|
||||
new RegExp("does not support the .* build mode. Please try using one of the following build modes instead"),
|
||||
],
|
||||
},
|
||||
[CliConfigErrorCategory.NotFoundInRegistry]: {
|
||||
cliErrorMessageCandidates: [
|
||||
new RegExp("'.*' not found in the registry '.*'"),
|
||||
],
|
||||
},
|
||||
};
|
||||
/**
|
||||
* Check if the given CLI error or exit code, if applicable, apply to any known
|
||||
@@ -262,11 +277,29 @@ function getCliConfigCategoryIfExists(cliError) {
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Changes an error received from the CLI to a ConfigurationError with optionally an extra
|
||||
* error message appended, if it exists in a known set of configuration errors. Otherwise,
|
||||
* Check if we are running on an unsupported platform/architecture combination.
|
||||
*/
|
||||
function isUnsupportedPlatform() {
|
||||
return !SUPPORTED_PLATFORMS.some(([platform, arch]) => platform === process.platform && arch === process.arch);
|
||||
}
|
||||
/**
|
||||
* Transform a CLI error into a ConfigurationError for an unsupported platform.
|
||||
*/
|
||||
function getUnsupportedPlatformError(cliError) {
|
||||
return new util_1.ConfigurationError("The CodeQL CLI does not support the platform/architecture combination of " +
|
||||
`${process.platform}/${process.arch} ` +
|
||||
`(see ${doc_url_1.DocUrl.SYSTEM_REQUIREMENTS}). ` +
|
||||
`The underlying error was: ${cliError.message}`);
|
||||
}
|
||||
/**
|
||||
* Changes an error received from the CLI to a ConfigurationError with the message
|
||||
* optionally being transformed, if it is a known configuration error. Otherwise,
|
||||
* simply returns the original error.
|
||||
*/
|
||||
function wrapCliConfigurationError(cliError) {
|
||||
if (isUnsupportedPlatform()) {
|
||||
return getUnsupportedPlatformError(cliError);
|
||||
}
|
||||
const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError);
|
||||
if (cliConfigErrorCategory === undefined) {
|
||||
return cliError;
|
||||
|
||||
File diff suppressed because one or more lines are too long
220
lib/cli-errors.test.js
generated
Normal file
220
lib/cli-errors.test.js
generated
Normal file
@@ -0,0 +1,220 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const cli_errors_1 = require("./cli-errors");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("CliError constructor with fatal errors", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "finalize"], 32, "Running TRAP import for CodeQL database...\nA fatal error occurred: Evaluator heap must be at least 384.00 MiB\nA fatal error occurred: Dataset import failed with code 2");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
t.is(cliError.exitCode, 32);
|
||||
t.is(cliError.stderr, "Running TRAP import for CodeQL database...\nA fatal error occurred: Evaluator heap must be at least 384.00 MiB\nA fatal error occurred: Dataset import failed with code 2");
|
||||
t.true(cliError.message.includes("A fatal error occurred: Dataset import failed with code 2."));
|
||||
t.true(cliError.message.includes("Context: A fatal error occurred: Evaluator heap must be at least 384.00 MiB."));
|
||||
});
|
||||
(0, ava_1.default)("CliError constructor with single fatal error", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "A fatal error occurred: Out of memory");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
t.is(cliError.exitCode, 1);
|
||||
t.true(cliError.message.includes("A fatal error occurred: Out of memory"));
|
||||
t.false(cliError.message.includes("Context:"));
|
||||
});
|
||||
(0, ava_1.default)("CliError constructor with autobuild errors", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "[autobuild] [ERROR] Build failed\n[autobuild] [ERROR] Compilation error");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
t.is(cliError.exitCode, 1);
|
||||
t.true(cliError.message.includes("We were unable to automatically build your code"));
|
||||
t.true(cliError.message.includes("Build failed\nCompilation error"));
|
||||
});
|
||||
(0, ava_1.default)("CliError constructor with truncated autobuild errors", (t) => {
|
||||
const stderr = Array.from({ length: 12 }, (_, i) => `[autobuild] [ERROR] Error ${i + 1}`).join("\n");
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, stderr);
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
t.true(cliError.message.includes("(truncated)"));
|
||||
// Should only include first 10 errors plus truncation message
|
||||
const errorLines = cliError.message
|
||||
.split("Encountered the following error: ")[1]
|
||||
.split("\n");
|
||||
t.is(errorLines.length, 11); // 10 errors + "(truncated)"
|
||||
});
|
||||
(0, ava_1.default)("CliError constructor with generic error", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["version"], 1, "Some generic error message\nLast line of error");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
t.is(cliError.exitCode, 1);
|
||||
t.true(cliError.message.includes('Encountered a fatal error while running "codeql version"'));
|
||||
t.true(cliError.message.includes("Exit code was 1 and last log line was: Last line of error."));
|
||||
});
|
||||
(0, ava_1.default)("CliError constructor with empty stderr", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["version"], 1, "");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
t.true(cliError.message.includes("last log line was: n/a"));
|
||||
});
|
||||
for (const [platform, arch] of [
|
||||
["weird_plat", "x64"],
|
||||
["linux", "arm64"],
|
||||
["win32", "arm64"],
|
||||
]) {
|
||||
(0, ava_1.default)(`wrapCliConfigurationError - ${platform}/${arch} unsupported`, (t) => {
|
||||
sinon.stub(process, "platform").value(platform);
|
||||
sinon.stub(process, "arch").value(arch);
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["version"], 1, "Some error");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
t.true(wrappedError.message.includes("CodeQL CLI does not support the platform/architecture combination"));
|
||||
t.true(wrappedError.message.includes(`${platform}/${arch}`));
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("wrapCliConfigurationError - supported platform", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["version"], 1, "Some error");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
// Should return the original error since platform is supported
|
||||
t.is(wrappedError, cliError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - autobuild error", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "We were unable to automatically build your code");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
t.true(wrappedError.message.includes("We were unable to automatically build your code"));
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - init called twice", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "Refusing to create databases /some/path but could not process any of it");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
t.true(wrappedError.message.includes('Is the "init" action called twice in the same job?'));
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - no source code seen by exit code", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "finalize"], 32, "Some other error message");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - no source code seen by message", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "finalize"], 1, "CodeQL detected code written in JavaScript but could not process any of it");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - out of memory error with additional message", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "analyze"], 1, "CodeQL is out of memory.");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
t.true(wrappedError.message.includes("For more information, see https://gh.io/troubleshooting-code-scanning/out-of-disk-or-memory"));
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - gradle build failed", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "[autobuild] FAILURE: Build failed with an exception.");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - maven build failed", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "[autobuild] [ERROR] Failed to execute goal");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - swift build failed", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "[autobuilder/build] [build-command-failed] `autobuild` failed to run the build command");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - pack cannot be found", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["pack", "install"], 1, "Query pack my-pack cannot be found. Check the spelling of the pack.");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - pack missing auth", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["pack", "download"], 1, "GitHub Container registry returned 403 Forbidden");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - invalid config file", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["database", "create"], 1, "Config file .codeql/config.yml is not valid");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - incompatible CLI version", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["version"], 1, "is not compatible with this CodeQL CLI");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
t.true(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
(0, ava_1.default)("wrapCliConfigurationError - unknown error remains unchanged", (t) => {
|
||||
const commandError = new actions_util_1.CommandInvocationError("codeql", ["version"], 1, "Some unknown error that doesn't match any patterns");
|
||||
const cliError = new cli_errors_1.CliError(commandError);
|
||||
const wrappedError = (0, cli_errors_1.wrapCliConfigurationError)(cliError);
|
||||
// Should return the original CliError since it doesn't match any known patterns
|
||||
t.is(wrappedError, cliError);
|
||||
t.true(wrappedError instanceof cli_errors_1.CliError);
|
||||
t.false(wrappedError instanceof util_1.ConfigurationError);
|
||||
});
|
||||
// Test all error categories to ensure they're properly configured
|
||||
(0, ava_1.default)("all CLI config error categories have valid configurations", (t) => {
|
||||
const allCategories = Object.values(cli_errors_1.CliConfigErrorCategory);
|
||||
for (const category of allCategories) {
|
||||
// Each category should be a string
|
||||
t.is(typeof category, "string");
|
||||
// Create a test error that matches this category
|
||||
let testError;
|
||||
switch (category) {
|
||||
case cli_errors_1.CliConfigErrorCategory.NoSourceCodeSeen:
|
||||
// This category matches by exit code
|
||||
testError = new cli_errors_1.CliError(new actions_util_1.CommandInvocationError("codeql", [], 32, "some error"));
|
||||
break;
|
||||
default:
|
||||
// For other categories, we'll test with a generic message that should not match
|
||||
testError = new cli_errors_1.CliError(new actions_util_1.CommandInvocationError("codeql", [], 1, "generic error"));
|
||||
break;
|
||||
}
|
||||
// The test should not throw an error when processing
|
||||
t.notThrows(() => (0, cli_errors_1.wrapCliConfigurationError)(testError));
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=cli-errors.test.js.map
|
||||
1
lib/cli-errors.test.js.map
Normal file
1
lib/cli-errors.test.js.map
Normal file
File diff suppressed because one or more lines are too long
155
lib/codeql.js
generated
155
lib/codeql.js
generated
@@ -36,7 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
exports.getCodeQL = getCodeQL;
|
||||
exports.setCodeQL = setCodeQL;
|
||||
exports.getCachedCodeQL = getCachedCodeQL;
|
||||
exports.createStubCodeQL = createStubCodeQL;
|
||||
exports.getCodeQLForTesting = getCodeQLForTesting;
|
||||
exports.getCodeQLForCmd = getCodeQLForCmd;
|
||||
exports.getExtraOptions = getExtraOptions;
|
||||
@@ -50,11 +50,12 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const cli_errors_1 = require("./cli-errors");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const git_utils_1 = require("./git-utils");
|
||||
const languages_1 = require("./languages");
|
||||
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
@@ -62,7 +63,6 @@ const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
/**
|
||||
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
||||
* Can be overridden in tests using `setCodeQL`.
|
||||
*/
|
||||
let cachedCodeQL = undefined;
|
||||
/**
|
||||
@@ -73,19 +73,19 @@ let cachedCodeQL = undefined;
|
||||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.15.5";
|
||||
const CODEQL_MINIMUM_VERSION = "2.16.6";
|
||||
/**
|
||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||
*/
|
||||
const CODEQL_NEXT_MINIMUM_VERSION = "2.15.5";
|
||||
const CODEQL_NEXT_MINIMUM_VERSION = "2.17.6";
|
||||
/**
|
||||
* This is the version of GHES that was most recently deprecated.
|
||||
*/
|
||||
const GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.11";
|
||||
const GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
|
||||
/**
|
||||
* This is the deprecation date for the version of GHES that was most recently deprecated.
|
||||
*/
|
||||
const GHES_MOST_RECENT_DEPRECATION_DATE = "2024-12-19";
|
||||
const GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
|
||||
/** The CLI verbosity level to use for extraction in debug mode. */
|
||||
const EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
|
||||
/*
|
||||
@@ -133,7 +133,11 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
};
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}${e instanceof Error && e.stack ? `\n\nDetails: ${e.stack}` : ""}`);
|
||||
const ErrorClass = e instanceof util.ConfigurationError ||
|
||||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
||||
? util.ConfigurationError
|
||||
: Error;
|
||||
throw new ErrorClass(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}${e instanceof Error && e.stack ? `\n\nDetails: ${e.stack}` : ""}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
@@ -145,6 +149,16 @@ async function getCodeQL(cmd) {
|
||||
}
|
||||
return cachedCodeQL;
|
||||
}
|
||||
/**
|
||||
* Overrides the CodeQL object. Only for use in tests that cannot override
|
||||
* CodeQL via dependency injection.
|
||||
*
|
||||
* Accepts a partial object. Any undefined methods will be implemented
|
||||
* to immediately throw an exception indicating which method is missing.
|
||||
*/
|
||||
function setCodeQL(codeql) {
|
||||
cachedCodeQL = createStubCodeQL(codeql);
|
||||
}
|
||||
function resolveFunction(partialCodeql, methodName, defaultImplementation) {
|
||||
if (typeof partialCodeql[methodName] !== "function") {
|
||||
if (defaultImplementation !== undefined) {
|
||||
@@ -158,13 +172,13 @@ function resolveFunction(partialCodeql, methodName, defaultImplementation) {
|
||||
return partialCodeql[methodName];
|
||||
}
|
||||
/**
|
||||
* Set the functionality for CodeQL methods. Only for use in tests.
|
||||
* Creates a stub CodeQL object. Only for use in tests.
|
||||
*
|
||||
* Accepts a partial object and any undefined methods will be implemented
|
||||
* Accepts a partial object. Any undefined methods will be implemented
|
||||
* to immediately throw an exception indicating which method is missing.
|
||||
*/
|
||||
function setCodeQL(partialCodeql) {
|
||||
cachedCodeQL = {
|
||||
function createStubCodeQL(partialCodeql) {
|
||||
return {
|
||||
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
||||
getVersion: resolveFunction(partialCodeql, "getVersion", async () => ({
|
||||
version: "1.0.0",
|
||||
@@ -172,6 +186,8 @@ function setCodeQL(partialCodeql) {
|
||||
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
||||
supportsFeature: resolveFunction(partialCodeql, "supportsFeature", async (feature) => !!partialCodeql.getVersion &&
|
||||
(0, tools_features_1.isSupportedToolsFeature)(await partialCodeql.getVersion(), feature)),
|
||||
isTracedLanguage: resolveFunction(partialCodeql, "isTracedLanguage"),
|
||||
isScannedLanguage: resolveFunction(partialCodeql, "isScannedLanguage"),
|
||||
databaseInitCluster: resolveFunction(partialCodeql, "databaseInitCluster"),
|
||||
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
||||
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
||||
@@ -182,7 +198,7 @@ function setCodeQL(partialCodeql) {
|
||||
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
|
||||
resolveBuildEnvironment: resolveFunction(partialCodeql, "resolveBuildEnvironment"),
|
||||
packDownload: resolveFunction(partialCodeql, "packDownload"),
|
||||
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
|
||||
databaseCleanupCluster: resolveFunction(partialCodeql, "databaseCleanupCluster"),
|
||||
databaseBundle: resolveFunction(partialCodeql, "databaseBundle"),
|
||||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||
@@ -192,20 +208,6 @@ function setCodeQL(partialCodeql) {
|
||||
resolveExtractor: resolveFunction(partialCodeql, "resolveExtractor"),
|
||||
mergeResults: resolveFunction(partialCodeql, "mergeResults"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
}
|
||||
/**
|
||||
* Get the cached CodeQL object. Should only be used from tests.
|
||||
*
|
||||
* TODO: Work out a good way for tests to get this from the test context
|
||||
* instead of having to have this method.
|
||||
*/
|
||||
function getCachedCodeQL() {
|
||||
if (cachedCodeQL === undefined) {
|
||||
// Should never happen as setCodeQL is called by testing-utils.setupTests
|
||||
throw new Error("cachedCodeQL undefined");
|
||||
}
|
||||
return cachedCodeQL;
|
||||
}
|
||||
/**
|
||||
* Get a real, newly created CodeQL instance for testing. The instance refers to
|
||||
@@ -250,6 +252,14 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async supportsFeature(feature) {
|
||||
return (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), feature);
|
||||
},
|
||||
async isTracedLanguage(language) {
|
||||
const extractorPath = await this.resolveExtractor(language);
|
||||
const tracingConfigPath = path.join(extractorPath, "tools", "tracing-config.lua");
|
||||
return fs.existsSync(tracingConfigPath);
|
||||
},
|
||||
async isScannedLanguage(language) {
|
||||
return !(await this.isTracedLanguage(language));
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (await (0, tracer_config_1.shouldEnableIndirectTracing)(codeql, config)) {
|
||||
@@ -257,19 +267,13 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push(...(await getTrapCachingExtractorConfigArgs(config)));
|
||||
extraArgs.push(`--trace-process-name=${processName}`);
|
||||
}
|
||||
if (config.languages.indexOf(languages_1.Language.actions) >= 0) {
|
||||
extraArgs.push("--search-path");
|
||||
const extractorPath = path.resolve(__dirname, "../actions-extractor");
|
||||
extraArgs.push(extractorPath);
|
||||
}
|
||||
const codeScanningConfigFile = await generateCodeScanningConfig(config, logger);
|
||||
const codeScanningConfigFile = await writeCodeScanningConfigFile(config, logger);
|
||||
const externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||
extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`);
|
||||
if (externalRepositoryToken) {
|
||||
extraArgs.push("--external-repository-token-stdin");
|
||||
}
|
||||
if (config.buildMode !== undefined &&
|
||||
(await this.supportsFeature(tools_features_1.ToolsFeature.BuildModeOption))) {
|
||||
if (config.buildMode !== undefined) {
|
||||
extraArgs.push(`--build-mode=${config.buildMode}`);
|
||||
}
|
||||
if (qlconfigFile !== undefined) {
|
||||
@@ -278,10 +282,20 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overwriteFlag = (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), tools_features_1.ToolsFeature.ForceOverwrite)
|
||||
? "--force-overwrite"
|
||||
: "--overwrite";
|
||||
const overlayDatabaseMode = config.augmentationProperties.overlayDatabaseMode;
|
||||
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay) {
|
||||
const overlayChangesFile = await (0, overlay_database_utils_1.writeOverlayChangesFile)(config, sourceRoot, logger);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
}
|
||||
else if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
|
||||
extraArgs.push("--overlay-base");
|
||||
}
|
||||
await runCli(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
overwriteFlag,
|
||||
...(overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay
|
||||
? []
|
||||
: [overwriteFlag]),
|
||||
"--db-cluster",
|
||||
config.dbLocation,
|
||||
`--source-root=${sourceRoot}`,
|
||||
@@ -293,6 +307,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
ignoringOptions: ["--overwrite"],
|
||||
}),
|
||||
], { stdin: externalRepositoryToken });
|
||||
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
|
||||
await (0, overlay_database_utils_1.writeBaseDatabaseOidsFile)(config, sourceRoot);
|
||||
}
|
||||
},
|
||||
async runAutobuild(config, language) {
|
||||
applyAutobuildAzurePipelinesTimeoutFix();
|
||||
@@ -440,16 +457,16 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
throw new Error(`Unexpected output from codeql resolve build-environment: ${e} in\n${output}`);
|
||||
}
|
||||
},
|
||||
async databaseRunQueries(databasePath, flags) {
|
||||
async databaseRunQueries(databasePath, flags, queries = []) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"run-queries",
|
||||
...flags,
|
||||
databasePath,
|
||||
"--expect-discarded-cache",
|
||||
"--intra-layer-parallelism",
|
||||
"--min-disk-free=1024", // Try to leave at least 1GB free
|
||||
"-v",
|
||||
...queries,
|
||||
...getExtraOptionsFromEnv(["database", "run-queries"], {
|
||||
ignoringOptions: ["--expect-discarded-cache"],
|
||||
}),
|
||||
@@ -554,18 +571,21 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
throw new Error(`Attempted to download specified packs but got an error:\n${output}\n${e}`);
|
||||
}
|
||||
},
|
||||
async databaseCleanup(databasePath, cleanupLevel) {
|
||||
async databaseCleanupCluster(config, cleanupLevel) {
|
||||
const cacheCleanupFlag = (await util.codeQlVersionAtLeast(this, CODEQL_VERSION_CACHE_CLEANUP))
|
||||
? "--cache-cleanup"
|
||||
: "--mode";
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"cleanup",
|
||||
databasePath,
|
||||
`${cacheCleanupFlag}=${cleanupLevel}`,
|
||||
...getExtraOptionsFromEnv(["database", "cleanup"]),
|
||||
];
|
||||
await runCli(cmd, codeqlArgs);
|
||||
for (const language of config.languages) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"cleanup",
|
||||
databasePath,
|
||||
`${cacheCleanupFlag}=${cleanupLevel}`,
|
||||
...getExtraOptionsFromEnv(["database", "cleanup"]),
|
||||
];
|
||||
await runCli(cmd, codeqlArgs);
|
||||
}
|
||||
},
|
||||
async databaseBundle(databasePath, outputFilePath, databaseName) {
|
||||
const args = [
|
||||
@@ -745,46 +765,9 @@ async function runCli(cmd, args = [], opts = {}) {
|
||||
* @param config The configuration to use.
|
||||
* @returns the path to the generated user configuration file.
|
||||
*/
|
||||
async function generateCodeScanningConfig(config, logger) {
|
||||
async function writeCodeScanningConfigFile(config, logger) {
|
||||
const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config);
|
||||
// make a copy so we can modify it
|
||||
const augmentedConfig = (0, util_1.cloneObject)(config.originalUserInput);
|
||||
// Inject the queries from the input
|
||||
if (config.augmentationProperties.queriesInput) {
|
||||
if (config.augmentationProperties.queriesInputCombines) {
|
||||
augmentedConfig.queries = (augmentedConfig.queries || []).concat(config.augmentationProperties.queriesInput);
|
||||
}
|
||||
else {
|
||||
augmentedConfig.queries = config.augmentationProperties.queriesInput;
|
||||
}
|
||||
}
|
||||
if (augmentedConfig.queries?.length === 0) {
|
||||
delete augmentedConfig.queries;
|
||||
}
|
||||
// Inject the packs from the input
|
||||
if (config.augmentationProperties.packsInput) {
|
||||
if (config.augmentationProperties.packsInputCombines) {
|
||||
// At this point, we already know that this is a single-language analysis
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs = (augmentedConfig.packs || []).concat(config.augmentationProperties.packsInput);
|
||||
}
|
||||
else if (!augmentedConfig.packs) {
|
||||
augmentedConfig.packs = config.augmentationProperties.packsInput;
|
||||
}
|
||||
else {
|
||||
// At this point, we know there is only one language.
|
||||
// If there were more than one language, an error would already have been thrown.
|
||||
const language = Object.keys(augmentedConfig.packs)[0];
|
||||
augmentedConfig.packs[language] = augmentedConfig.packs[language].concat(config.augmentationProperties.packsInput);
|
||||
}
|
||||
}
|
||||
else {
|
||||
augmentedConfig.packs = config.augmentationProperties.packsInput;
|
||||
}
|
||||
}
|
||||
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
||||
delete augmentedConfig.packs;
|
||||
}
|
||||
const augmentedConfig = (0, config_utils_1.generateCodeScanningConfig)(config.originalUserInput, config.augmentationProperties);
|
||||
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
|
||||
logger.startGroup("Augmented user configuration file contents");
|
||||
logger.info(yaml.dump(augmentedConfig));
|
||||
|
||||
File diff suppressed because one or more lines are too long
60
lib/codeql.test.js
generated
60
lib/codeql.test.js
generated
@@ -49,6 +49,7 @@ const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const cli_errors_1 = require("./cli-errors");
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const languages_1 = require("./languages");
|
||||
@@ -64,7 +65,7 @@ const NO_FEATURES = (0, testing_utils_1.createFeatures)([]);
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
stubConfig = (0, testing_utils_1.createTestConfig)({
|
||||
languages: [languages_1.Language.cpp],
|
||||
languages: [languages_1.KnownLanguage.cpp],
|
||||
});
|
||||
});
|
||||
async function installIntoToolcache({ apiDetails = testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, cliVersion, isPinned, tagName, tmpDir, }) {
|
||||
@@ -95,6 +96,15 @@ function mockApiDetails(apiDetails) {
|
||||
process.env["GITHUB_SERVER_URL"] = apiDetails.url;
|
||||
process.env["GITHUB_API_URL"] = apiDetails.apiURL || "";
|
||||
}
|
||||
async function stubCodeql() {
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
sinon
|
||||
.stub(codeqlObject, "isTracedLanguage")
|
||||
.withArgs(languages_1.KnownLanguage.cpp)
|
||||
.resolves(true);
|
||||
return codeqlObject;
|
||||
}
|
||||
(0, ava_1.default)("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -327,8 +337,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
exec: async (t, augmentationProperties, configOverride, expectedConfig) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("1.0.0"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
...configOverride,
|
||||
@@ -349,18 +358,16 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
title: (providedTitle = "") => `databaseInitCluster() injected config: ${providedTitle}`,
|
||||
});
|
||||
(0, ava_1.default)("basic", injectedConfigMacro, {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
}, {}, {});
|
||||
(0, ava_1.default)("injected packs from input", injectedConfigMacro, {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {}, {
|
||||
packs: ["xxx", "yyy"],
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input with existing packs combines", injectedConfigMacro, {
|
||||
queriesInputCombines: false,
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
packsInputCombines: true,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {
|
||||
@@ -375,8 +382,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
},
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input with existing packs overrides", injectedConfigMacro, {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
@@ -389,8 +395,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
});
|
||||
// similar, but with queries
|
||||
(0, ava_1.default)("injected queries from input", injectedConfigMacro, {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {}, {
|
||||
queries: [
|
||||
@@ -403,8 +408,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries from input overrides", injectedConfigMacro, {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
@@ -421,8 +425,8 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries from input combines", injectedConfigMacro, {
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
queriesInputCombines: true,
|
||||
packsInputCombines: false,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
@@ -442,6 +446,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries from input combines 2", injectedConfigMacro, {
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
queriesInputCombines: true,
|
||||
packsInputCombines: true,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
@@ -456,6 +461,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries and packs, but empty", injectedConfigMacro, {
|
||||
...config_utils_1.defaultAugmentationProperties,
|
||||
queriesInputCombines: true,
|
||||
packsInputCombines: true,
|
||||
queriesInput: [],
|
||||
@@ -469,8 +475,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)("passes a code scanning config AND qlconfig to the CLI", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used a config file
|
||||
@@ -484,8 +489,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)("does not pass a qlconfig to the CLI when it is undefined", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, undefined, // undefined qlconfigFile
|
||||
(0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
@@ -545,8 +549,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
const cliStderr = `Running TRAP import for CodeQL database at /home/runner/work/_temp/codeql_databases/javascript...\n` +
|
||||
`${heapError}\n${datasetImportError}.`;
|
||||
stubToolRunnerConstructor(32, cliStderr);
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.finalizeDatabase("db", "--threads=2", "--ram=2048", false), {
|
||||
@@ -572,7 +575,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
sinon.stub(codeqlObject, "resolveExtractor").resolves("/path/to/extractor");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.runAutobuild(stubConfig, languages_1.Language.java), {
|
||||
await t.throwsAsync(async () => await codeqlObject.runAutobuild(stubConfig, languages_1.KnownLanguage.java), {
|
||||
instanceOf: util.ConfigurationError,
|
||||
message: "We were unable to automatically build your code. Please provide manual build steps. " +
|
||||
`See ${doc_url_1.DocUrl.AUTOMATIC_BUILD_FAILED} for more information. ` +
|
||||
@@ -585,12 +588,11 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
(0, ava_1.default)("runTool truncates long autobuilder errors", async (t) => {
|
||||
const stderr = Array.from({ length: 20 }, (_, i) => `[2019-09-18 12:00:00] [autobuild] [ERROR] line${i + 1}`).join("\n");
|
||||
stubToolRunnerConstructor(1, stderr);
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
sinon.stub(codeqlObject, "resolveExtractor").resolves("/path/to/extractor");
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.runAutobuild(stubConfig, languages_1.Language.java), {
|
||||
await t.throwsAsync(async () => await codeqlObject.runAutobuild(stubConfig, languages_1.KnownLanguage.java), {
|
||||
instanceOf: util.ConfigurationError,
|
||||
message: "We were unable to automatically build your code. Please provide manual build steps. " +
|
||||
`See ${doc_url_1.DocUrl.AUTOMATIC_BUILD_FAILED} for more information. ` +
|
||||
@@ -612,7 +614,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.databaseRunQueries(stubConfig.dbLocation, []), {
|
||||
instanceOf: cli_errors_1.CliError,
|
||||
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --expect-discarded-cache --intra-layer-parallelism --min-disk-free=1024 -v". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
|
||||
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --intra-layer-parallelism --min-disk-free=1024 -v". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
|
||||
com.semmle.util.exception.CatastrophicError: An error occurred while evaluating ControlFlowGraph::ControlFlow::Root.isRootOf/1#dispred#f610e6ed/2@86282cc8
|
||||
Severe disk cache trouble (corruption or out of space) at /home/runner/work/_temp/codeql_databases/go/db-go/default/cache/pages/28/33.pack: Failed to write item to disk. See the logs for more details.`,
|
||||
});
|
||||
@@ -620,8 +622,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
(0, ava_1.default)("runTool outputs last line of stderr if fatal error could not be found", async (t) => {
|
||||
const cliStderr = "line1\nline2\nline3\nline4\nline5";
|
||||
stubToolRunnerConstructor(32, cliStderr);
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.finalizeDatabase("db", "--threads=2", "--ram=2048", false), {
|
||||
@@ -632,8 +633,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
||||
});
|
||||
(0, ava_1.default)("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
|
||||
const codeqlObject = await stubCodeql();
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
|
||||
|
||||
File diff suppressed because one or more lines are too long
428
lib/config-utils.js
generated
428
lib/config-utils.js
generated
@@ -42,11 +42,15 @@ exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
|
||||
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
|
||||
exports.getNoLanguagesError = getNoLanguagesError;
|
||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
exports.getLanguagesInRepo = getLanguagesInRepo;
|
||||
exports.getSupportedLanguageMap = getSupportedLanguageMap;
|
||||
exports.hasActionsWorkflows = hasActionsWorkflows;
|
||||
exports.getRawLanguagesInRepo = getRawLanguagesInRepo;
|
||||
exports.getLanguages = getLanguages;
|
||||
exports.getRawLanguagesNoAutodetect = getRawLanguagesNoAutodetect;
|
||||
exports.getRawLanguages = getRawLanguages;
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
exports.calculateAugmentation = calculateAugmentation;
|
||||
exports.getOverlayDatabaseMode = getOverlayDatabaseMode;
|
||||
exports.parsePacksFromInput = parsePacksFromInput;
|
||||
exports.parsePacksSpecification = parsePacksSpecification;
|
||||
exports.validatePackSpecification = validatePackSpecification;
|
||||
@@ -57,15 +61,20 @@ exports.getConfig = getConfig;
|
||||
exports.generateRegistries = generateRegistries;
|
||||
exports.wrapEnvironment = wrapEnvironment;
|
||||
exports.parseBuildModeInput = parseBuildModeInput;
|
||||
exports.generateCodeScanningConfig = generateCodeScanningConfig;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const caching_utils_1 = require("./caching-utils");
|
||||
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const git_utils_1 = require("./git-utils");
|
||||
const languages_1 = require("./languages");
|
||||
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
// Property names from the user-supplied config file.
|
||||
@@ -79,6 +88,10 @@ exports.defaultAugmentationProperties = {
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
queriesInput: undefined,
|
||||
qualityQueriesInput: undefined,
|
||||
extraQueryExclusions: [],
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
};
|
||||
function getPacksStrInvalid(packStr, configFile) {
|
||||
return configFile
|
||||
@@ -117,29 +130,63 @@ function getNoLanguagesError() {
|
||||
function getUnknownLanguagesError(languages) {
|
||||
return `Did not recognize the following languages: ${languages.join(", ")}`;
|
||||
}
|
||||
async function getSupportedLanguageMap(codeql) {
|
||||
const resolveResult = await codeql.betterResolveLanguages();
|
||||
const supportedLanguages = {};
|
||||
// Populate canonical language names
|
||||
for (const extractor of Object.keys(resolveResult.extractors)) {
|
||||
// Require the language to be a known language.
|
||||
// This is a temporary workaround since we have extractors that are not
|
||||
// supported languages, such as `csv`, `html`, `properties`, `xml`, and
|
||||
// `yaml`. We should replace this with a more robust solution in the future.
|
||||
if (languages_1.KnownLanguage[extractor] !== undefined) {
|
||||
supportedLanguages[extractor] = extractor;
|
||||
}
|
||||
}
|
||||
// Populate language aliases
|
||||
if (resolveResult.aliases) {
|
||||
for (const [alias, extractor] of Object.entries(resolveResult.aliases)) {
|
||||
supportedLanguages[alias] = extractor;
|
||||
}
|
||||
}
|
||||
return supportedLanguages;
|
||||
}
|
||||
const baseWorkflowsPath = ".github/workflows";
|
||||
/**
|
||||
* Gets the set of languages in the current repository that are
|
||||
* scannable by CodeQL.
|
||||
* Determines if there exists a `.github/workflows` directory with at least
|
||||
* one file in it, which we use as an indicator that there are Actions
|
||||
* workflows in the workspace. This doesn't perfectly detect whether there
|
||||
* are actually workflows, but should be a good approximation.
|
||||
*
|
||||
* Alternatively, we could check specifically for yaml files, or call the
|
||||
* API to check if it knows about workflows.
|
||||
*
|
||||
* @returns True if the non-empty directory exists, false if not.
|
||||
*/
|
||||
async function getLanguagesInRepo(repository, logger) {
|
||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||
function hasActionsWorkflows(sourceRoot) {
|
||||
const workflowsPath = path.resolve(sourceRoot, baseWorkflowsPath);
|
||||
const stats = fs.lstatSync(workflowsPath);
|
||||
return (stats !== undefined &&
|
||||
stats.isDirectory() &&
|
||||
fs.readdirSync(workflowsPath).length > 0);
|
||||
}
|
||||
/**
|
||||
* Gets the set of languages in the current repository.
|
||||
*/
|
||||
async function getRawLanguagesInRepo(repository, sourceRoot, logger) {
|
||||
logger.debug(`Automatically detecting languages (${repository.owner}/${repository.repo})`);
|
||||
const response = await api.getApiClient().rest.repos.listLanguages({
|
||||
owner: repository.owner,
|
||||
repo: repository.repo,
|
||||
});
|
||||
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
const languages = new Set();
|
||||
for (const lang of Object.keys(response.data)) {
|
||||
const parsedLang = (0, languages_1.parseLanguage)(lang);
|
||||
if (parsedLang !== undefined) {
|
||||
languages.add(parsedLang);
|
||||
}
|
||||
const result = Object.keys(response.data).map((language) => language.trim().toLowerCase());
|
||||
if (hasActionsWorkflows(sourceRoot)) {
|
||||
logger.debug(`Found a .github/workflows directory`);
|
||||
result.push("actions");
|
||||
}
|
||||
return [...languages];
|
||||
logger.debug(`Raw languages in repository: ${result.join(", ")}`);
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
@@ -151,48 +198,44 @@ async function getLanguagesInRepo(repository, logger) {
|
||||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
async function getLanguages(codeql, languagesInput, repository, sourceRoot, logger) {
|
||||
// Obtain languages without filtering them.
|
||||
const { rawLanguages, autodetected } = await getRawLanguages(languagesInput, repository, logger);
|
||||
let languages = rawLanguages;
|
||||
if (autodetected) {
|
||||
const supportedLanguages = Object.keys(await codeQL.resolveLanguages());
|
||||
languages = languages
|
||||
.map(languages_1.parseLanguage)
|
||||
.filter((value) => value && supportedLanguages.includes(value))
|
||||
.map((value) => value);
|
||||
logger.info(`Automatically detected languages: ${languages.join(", ")}`);
|
||||
}
|
||||
else {
|
||||
const aliases = (await codeQL.betterResolveLanguages()).aliases;
|
||||
if (aliases) {
|
||||
languages = languages.map((lang) => aliases[lang] || lang);
|
||||
const { rawLanguages, autodetected } = await getRawLanguages(languagesInput, repository, sourceRoot, logger);
|
||||
const languageMap = await getSupportedLanguageMap(codeql);
|
||||
const languagesSet = new Set();
|
||||
const unknownLanguages = [];
|
||||
// Make sure they are supported
|
||||
for (const language of rawLanguages) {
|
||||
const extractorName = languageMap[language];
|
||||
if (extractorName === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
logger.info(`Languages from configuration: ${languages.join(", ")}`);
|
||||
else {
|
||||
languagesSet.add(extractorName);
|
||||
}
|
||||
}
|
||||
const languages = Array.from(languagesSet);
|
||||
if (!autodetected && unknownLanguages.length > 0) {
|
||||
throw new util_1.ConfigurationError(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new util_1.ConfigurationError(getNoLanguagesError());
|
||||
}
|
||||
// Make sure they are supported
|
||||
const parsedLanguages = [];
|
||||
const unknownLanguages = [];
|
||||
for (const language of languages) {
|
||||
const parsedLanguage = (0, languages_1.parseLanguage)(language);
|
||||
if (parsedLanguage === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
else if (!parsedLanguages.includes(parsedLanguage)) {
|
||||
parsedLanguages.push(parsedLanguage);
|
||||
}
|
||||
if (autodetected) {
|
||||
logger.info(`Autodetected languages: ${languages.join(", ")}`);
|
||||
}
|
||||
// Any unknown languages here would have come directly from the input
|
||||
// since we filter unknown languages coming from the GitHub API.
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new util_1.ConfigurationError(getUnknownLanguagesError(unknownLanguages));
|
||||
else {
|
||||
logger.info(`Languages from configuration: ${languages.join(", ")}`);
|
||||
}
|
||||
return parsedLanguages;
|
||||
return languages;
|
||||
}
|
||||
function getRawLanguagesNoAutodetect(languagesInput) {
|
||||
return (languagesInput || "")
|
||||
.split(",")
|
||||
.map((x) => x.trim().toLowerCase())
|
||||
.filter((x) => x.length > 0);
|
||||
}
|
||||
/**
|
||||
* Gets the set of languages in the current repository without checking to
|
||||
@@ -204,30 +247,25 @@ async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
* @returns A tuple containing a list of languages in this repository that might be
|
||||
* analyzable and whether or not this list was determined automatically.
|
||||
*/
|
||||
async function getRawLanguages(languagesInput, repository, logger) {
|
||||
// Obtain from action input 'languages' if set
|
||||
let rawLanguages = (languagesInput || "")
|
||||
.split(",")
|
||||
.map((x) => x.trim().toLowerCase())
|
||||
.filter((x) => x.length > 0);
|
||||
let autodetected;
|
||||
if (rawLanguages.length) {
|
||||
autodetected = false;
|
||||
async function getRawLanguages(languagesInput, repository, sourceRoot, logger) {
|
||||
// If the user has specified languages, use those.
|
||||
const languagesFromInput = getRawLanguagesNoAutodetect(languagesInput);
|
||||
if (languagesFromInput.length > 0) {
|
||||
return { rawLanguages: languagesFromInput, autodetected: false };
|
||||
}
|
||||
else {
|
||||
autodetected = true;
|
||||
// Obtain all languages in the repo that can be analysed
|
||||
rawLanguages = (await getLanguagesInRepo(repository, logger));
|
||||
}
|
||||
return { rawLanguages, autodetected };
|
||||
// Otherwise, autodetect languages in the repository.
|
||||
return {
|
||||
rawLanguages: await getRawLanguagesInRepo(repository, sourceRoot, logger),
|
||||
autodetected: true,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
* Get the default config, populated without user configuration file.
|
||||
*/
|
||||
async function getDefaultConfig({ languagesInput, queriesInput, packsInput, buildModeInput, dbLocation, trapCachingEnabled, dependencyCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeql, githubVersion, features, logger, }) {
|
||||
const languages = await getLanguages(codeql, languagesInput, repository, logger);
|
||||
async function getDefaultConfig({ languagesInput, queriesInput, qualityQueriesInput, packsInput, buildModeInput, dbLocation, trapCachingEnabled, dependencyCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeql, sourceRoot, githubVersion, features, logger, }) {
|
||||
const languages = await getLanguages(codeql, languagesInput, repository, sourceRoot, logger);
|
||||
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
|
||||
const augmentationProperties = calculateAugmentation(packsInput, queriesInput, languages);
|
||||
const augmentationProperties = await calculateAugmentation(packsInput, queriesInput, qualityQueriesInput, languages);
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
|
||||
return {
|
||||
languages,
|
||||
@@ -256,11 +294,7 @@ async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logg
|
||||
}
|
||||
return { trapCaches, trapCacheDownloadTime };
|
||||
}
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeInput, configFile, dbLocation, trapCachingEnabled, dependencyCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeql, workspacePath, githubVersion, apiDetails, features, logger, }) {
|
||||
let parsedYAML;
|
||||
async function loadUserConfig(configFile, workspacePath, apiDetails, tempDir) {
|
||||
if (isLocal(configFile)) {
|
||||
if (configFile !== userConfigFromActionPath(tempDir)) {
|
||||
// If the config file is not generated by the Action, it should be relative to the workspace.
|
||||
@@ -270,31 +304,11 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
|
||||
throw new util_1.ConfigurationError(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
}
|
||||
parsedYAML = getLocalConfig(configFile);
|
||||
return getLocalConfig(configFile);
|
||||
}
|
||||
else {
|
||||
parsedYAML = await getRemoteConfig(configFile, apiDetails);
|
||||
return await getRemoteConfig(configFile, apiDetails);
|
||||
}
|
||||
const languages = await getLanguages(codeql, languagesInput, repository, logger);
|
||||
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
|
||||
const augmentationProperties = calculateAugmentation(packsInput, queriesInput, languages);
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
|
||||
return {
|
||||
languages,
|
||||
buildMode,
|
||||
originalUserInput: parsedYAML,
|
||||
tempDir,
|
||||
codeQLCmd: codeql.getPath(),
|
||||
gitHubVersion: githubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
augmentationProperties,
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
dependencyCachingEnabled: (0, caching_utils_1.getCachingKind)(dependencyCachingEnabled),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Calculates how the codeql config file needs to be augmented before passing
|
||||
@@ -315,16 +329,21 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
|
||||
* not have exactly one language.
|
||||
*/
|
||||
// exported for testing.
|
||||
function calculateAugmentation(rawPacksInput, rawQueriesInput, languages) {
|
||||
async function calculateAugmentation(rawPacksInput, rawQueriesInput, rawQualityQueriesInput, languages) {
|
||||
const packsInputCombines = shouldCombine(rawPacksInput);
|
||||
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
|
||||
const queriesInputCombines = shouldCombine(rawQueriesInput);
|
||||
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
|
||||
const qualityQueriesInput = parseQueriesFromInput(rawQualityQueriesInput, false);
|
||||
return {
|
||||
packsInputCombines,
|
||||
packsInput: packsInput?.[languages[0]],
|
||||
queriesInput,
|
||||
queriesInputCombines,
|
||||
qualityQueriesInput,
|
||||
extraQueryExclusions: [],
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
};
|
||||
}
|
||||
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
||||
@@ -339,6 +358,143 @@ function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
||||
}
|
||||
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
|
||||
}
|
||||
const OVERLAY_ANALYSIS_FEATURES = {
|
||||
actions: feature_flags_1.Feature.OverlayAnalysisActions,
|
||||
cpp: feature_flags_1.Feature.OverlayAnalysisCpp,
|
||||
csharp: feature_flags_1.Feature.OverlayAnalysisCsharp,
|
||||
go: feature_flags_1.Feature.OverlayAnalysisGo,
|
||||
java: feature_flags_1.Feature.OverlayAnalysisJava,
|
||||
javascript: feature_flags_1.Feature.OverlayAnalysisJavascript,
|
||||
python: feature_flags_1.Feature.OverlayAnalysisPython,
|
||||
ruby: feature_flags_1.Feature.OverlayAnalysisRuby,
|
||||
rust: feature_flags_1.Feature.OverlayAnalysisRust,
|
||||
swift: feature_flags_1.Feature.OverlayAnalysisSwift,
|
||||
};
|
||||
const OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
|
||||
actions: feature_flags_1.Feature.OverlayAnalysisCodeScanningActions,
|
||||
cpp: feature_flags_1.Feature.OverlayAnalysisCodeScanningCpp,
|
||||
csharp: feature_flags_1.Feature.OverlayAnalysisCodeScanningCsharp,
|
||||
go: feature_flags_1.Feature.OverlayAnalysisCodeScanningGo,
|
||||
java: feature_flags_1.Feature.OverlayAnalysisCodeScanningJava,
|
||||
javascript: feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
python: feature_flags_1.Feature.OverlayAnalysisCodeScanningPython,
|
||||
ruby: feature_flags_1.Feature.OverlayAnalysisCodeScanningRuby,
|
||||
rust: feature_flags_1.Feature.OverlayAnalysisCodeScanningRust,
|
||||
swift: feature_flags_1.Feature.OverlayAnalysisCodeScanningSwift,
|
||||
};
|
||||
async function isOverlayAnalysisFeatureEnabled(repository, features, codeql, languages, codeScanningConfig) {
|
||||
// TODO: Remove the repository owner check once support for overlay analysis
|
||||
// stabilizes, and no more backward-incompatible changes are expected.
|
||||
if (!["github", "dsp-testing"].includes(repository.owner)) {
|
||||
return false;
|
||||
}
|
||||
if (!(await features.getValue(feature_flags_1.Feature.OverlayAnalysis, codeql))) {
|
||||
return false;
|
||||
}
|
||||
let enableForCodeScanningOnly = false;
|
||||
for (const language of languages) {
|
||||
const feature = OVERLAY_ANALYSIS_FEATURES[language];
|
||||
if (feature && (await features.getValue(feature, codeql))) {
|
||||
continue;
|
||||
}
|
||||
const codeScanningFeature = OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES[language];
|
||||
if (codeScanningFeature &&
|
||||
(await features.getValue(codeScanningFeature, codeql))) {
|
||||
enableForCodeScanningOnly = true;
|
||||
continue;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if (enableForCodeScanningOnly) {
|
||||
// A code-scanning configuration runs only the (default) code-scanning suite
|
||||
// if the default queries are not disabled, and no packs, queries, or
|
||||
// query-filters are specified.
|
||||
return (codeScanningConfig["disable-default-queries"] !== true &&
|
||||
codeScanningConfig.packs === undefined &&
|
||||
codeScanningConfig.queries === undefined &&
|
||||
codeScanningConfig["query-filters"] === undefined);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Calculate and validate the overlay database mode and caching to use.
|
||||
*
|
||||
* - If the environment variable `CODEQL_OVERLAY_DATABASE_MODE` is set, use it.
|
||||
* In this case, the workflow is responsible for managing database storage and
|
||||
* retrieval, and the action will not perform overlay database caching. Think
|
||||
* of it as a "manual control" mode where the calling workflow is responsible
|
||||
* for making sure that everything is set up correctly.
|
||||
* - Otherwise, if `Feature.OverlayAnalysis` is enabled, calculate the mode
|
||||
* based on what we are analyzing. Think of it as a "automatic control" mode
|
||||
* where the action will do the right thing by itself.
|
||||
* - If we are analyzing a pull request, use `Overlay` with caching.
|
||||
* - If we are analyzing the default branch, use `OverlayBase` with caching.
|
||||
* - Otherwise, use `None`.
|
||||
*
|
||||
* For `Overlay` and `OverlayBase`, the function performs further checks and
|
||||
* reverts to `None` if any check should fail.
|
||||
*
|
||||
* @returns An object containing the overlay database mode and whether the
|
||||
* action should perform overlay-base database caching.
|
||||
*/
|
||||
async function getOverlayDatabaseMode(codeql, repository, features, languages, sourceRoot, buildMode, codeScanningConfig, logger) {
|
||||
let overlayDatabaseMode = overlay_database_utils_1.OverlayDatabaseMode.None;
|
||||
let useOverlayDatabaseCaching = false;
|
||||
const modeEnv = process.env.CODEQL_OVERLAY_DATABASE_MODE;
|
||||
// Any unrecognized CODEQL_OVERLAY_DATABASE_MODE value will be ignored and
|
||||
// treated as if the environment variable was not set.
|
||||
if (modeEnv === overlay_database_utils_1.OverlayDatabaseMode.Overlay ||
|
||||
modeEnv === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase ||
|
||||
modeEnv === overlay_database_utils_1.OverlayDatabaseMode.None) {
|
||||
overlayDatabaseMode = modeEnv;
|
||||
logger.info(`Setting overlay database mode to ${overlayDatabaseMode} ` +
|
||||
"from the CODEQL_OVERLAY_DATABASE_MODE environment variable.");
|
||||
}
|
||||
else if (await isOverlayAnalysisFeatureEnabled(repository, features, codeql, languages, codeScanningConfig)) {
|
||||
if ((0, actions_util_1.isAnalyzingPullRequest)()) {
|
||||
overlayDatabaseMode = overlay_database_utils_1.OverlayDatabaseMode.Overlay;
|
||||
useOverlayDatabaseCaching = true;
|
||||
logger.info(`Setting overlay database mode to ${overlayDatabaseMode} ` +
|
||||
"with caching because we are analyzing a pull request.");
|
||||
}
|
||||
else if (await (0, git_utils_1.isAnalyzingDefaultBranch)()) {
|
||||
overlayDatabaseMode = overlay_database_utils_1.OverlayDatabaseMode.OverlayBase;
|
||||
useOverlayDatabaseCaching = true;
|
||||
logger.info(`Setting overlay database mode to ${overlayDatabaseMode} ` +
|
||||
"with caching because we are analyzing the default branch.");
|
||||
}
|
||||
}
|
||||
const nonOverlayAnalysis = {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
};
|
||||
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.None) {
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
if (buildMode !== util_1.BuildMode.None &&
|
||||
(await Promise.all(languages.map(async (l) => await codeql.isTracedLanguage(l)))).some(Boolean)) {
|
||||
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||
`build-mode is set to "${buildMode}" instead of "none". ` +
|
||||
"Falling back to creating a normal full database instead.");
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
if (!(await (0, util_1.codeQlVersionAtLeast)(codeql, overlay_database_utils_1.CODEQL_OVERLAY_MINIMUM_VERSION))) {
|
||||
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||
`the CodeQL CLI is older than ${overlay_database_utils_1.CODEQL_OVERLAY_MINIMUM_VERSION}. ` +
|
||||
"Falling back to creating a normal full database instead.");
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
if ((await (0, git_utils_1.getGitRoot)(sourceRoot)) === undefined) {
|
||||
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||
`the source root "${sourceRoot}" is not inside a git repository. ` +
|
||||
"Falling back to creating a normal full database instead.");
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
return {
|
||||
overlayDatabaseMode,
|
||||
useOverlayDatabaseCaching,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
|
||||
* and `-` allowed as long as not the first or last char.
|
||||
@@ -475,7 +631,6 @@ function userConfigFromActionPath(tempDir) {
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(inputs) {
|
||||
let config;
|
||||
const { logger, tempDir } = inputs;
|
||||
// if configInput is set, it takes precedence over configFile
|
||||
if (inputs.configInput) {
|
||||
@@ -486,14 +641,31 @@ async function initConfig(inputs) {
|
||||
fs.writeFileSync(inputs.configFile, inputs.configInput);
|
||||
logger.debug(`Using config from action input: ${inputs.configFile}`);
|
||||
}
|
||||
// If no config file was provided create an empty one
|
||||
let userConfig = {};
|
||||
if (!inputs.configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(inputs);
|
||||
}
|
||||
else {
|
||||
// Convince the type checker that inputs.configFile is defined.
|
||||
config = await loadConfig({ ...inputs, configFile: inputs.configFile });
|
||||
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
||||
userConfig = await loadUserConfig(inputs.configFile, inputs.workspacePath, inputs.apiDetails, tempDir);
|
||||
}
|
||||
const config = await getDefaultConfig(inputs);
|
||||
const augmentationProperties = config.augmentationProperties;
|
||||
config.originalUserInput = userConfig;
|
||||
// The choice of overlay database mode depends on the selection of languages
|
||||
// and queries, which in turn depends on the user config and the augmentation
|
||||
// properties. So we need to calculate the overlay database mode after the
|
||||
// rest of the config has been populated.
|
||||
const { overlayDatabaseMode, useOverlayDatabaseCaching } = await getOverlayDatabaseMode(inputs.codeql, inputs.repository, inputs.features, config.languages, inputs.sourceRoot, config.buildMode, generateCodeScanningConfig(userConfig, augmentationProperties), logger);
|
||||
logger.info(`Using overlay database mode: ${overlayDatabaseMode} ` +
|
||||
`${useOverlayDatabaseCaching ? "with" : "without"} caching.`);
|
||||
augmentationProperties.overlayDatabaseMode = overlayDatabaseMode;
|
||||
augmentationProperties.useOverlayDatabaseCaching = useOverlayDatabaseCaching;
|
||||
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay ||
|
||||
(await (0, diff_informed_analysis_utils_1.shouldPerformDiffInformedAnalysis)(inputs.codeql, inputs.features, logger))) {
|
||||
augmentationProperties.extraQueryExclusions.push({
|
||||
exclude: { tags: "exclude-from-incremental" },
|
||||
});
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config, logger);
|
||||
@@ -684,16 +856,68 @@ async function parseBuildModeInput(input, languages, features, logger) {
|
||||
if (!Object.values(util_1.BuildMode).includes(input)) {
|
||||
throw new util_1.ConfigurationError(`Invalid build mode: '${input}'. Supported build modes are: ${Object.values(util_1.BuildMode).join(", ")}.`);
|
||||
}
|
||||
if (languages.includes(languages_1.Language.csharp) &&
|
||||
if (languages.includes(languages_1.KnownLanguage.csharp) &&
|
||||
(await features.getValue(feature_flags_1.Feature.DisableCsharpBuildless))) {
|
||||
logger.warning("Scanning C# code without a build is temporarily unavailable. Falling back to 'autobuild' build mode.");
|
||||
return util_1.BuildMode.Autobuild;
|
||||
}
|
||||
if (languages.includes(languages_1.Language.java) &&
|
||||
if (languages.includes(languages_1.KnownLanguage.java) &&
|
||||
(await features.getValue(feature_flags_1.Feature.DisableJavaBuildlessEnabled))) {
|
||||
logger.warning("Scanning Java code without a build is temporarily unavailable. Falling back to 'autobuild' build mode.");
|
||||
return util_1.BuildMode.Autobuild;
|
||||
}
|
||||
return input;
|
||||
}
|
||||
function generateCodeScanningConfig(originalUserInput, augmentationProperties) {
|
||||
// make a copy so we can modify it
|
||||
const augmentedConfig = (0, util_1.cloneObject)(originalUserInput);
|
||||
// Inject the queries from the input
|
||||
if (augmentationProperties.queriesInput) {
|
||||
if (augmentationProperties.queriesInputCombines) {
|
||||
augmentedConfig.queries = (augmentedConfig.queries || []).concat(augmentationProperties.queriesInput);
|
||||
}
|
||||
else {
|
||||
augmentedConfig.queries = augmentationProperties.queriesInput;
|
||||
}
|
||||
}
|
||||
if (augmentedConfig.queries?.length === 0) {
|
||||
delete augmentedConfig.queries;
|
||||
}
|
||||
// Inject the packs from the input
|
||||
if (augmentationProperties.packsInput) {
|
||||
if (augmentationProperties.packsInputCombines) {
|
||||
// At this point, we already know that this is a single-language analysis
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs = (augmentedConfig.packs || []).concat(augmentationProperties.packsInput);
|
||||
}
|
||||
else if (!augmentedConfig.packs) {
|
||||
augmentedConfig.packs = augmentationProperties.packsInput;
|
||||
}
|
||||
else {
|
||||
// At this point, we know there is only one language.
|
||||
// If there were more than one language, an error would already have been thrown.
|
||||
const language = Object.keys(augmentedConfig.packs)[0];
|
||||
augmentedConfig.packs[language] = augmentedConfig.packs[language].concat(augmentationProperties.packsInput);
|
||||
}
|
||||
}
|
||||
else {
|
||||
augmentedConfig.packs = augmentationProperties.packsInput;
|
||||
}
|
||||
}
|
||||
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
||||
delete augmentedConfig.packs;
|
||||
}
|
||||
augmentedConfig["query-filters"] = [
|
||||
// Ordering matters. If the first filter is an inclusion, it implicitly
|
||||
// excludes all queries that are not included. If it is an exclusion,
|
||||
// it implicitly includes all queries that are not excluded. So user
|
||||
// filters (if any) should always be first to preserve intent.
|
||||
...(augmentedConfig["query-filters"] || []),
|
||||
...augmentationProperties.extraQueryExclusions,
|
||||
];
|
||||
if (augmentedConfig["query-filters"]?.length === 0) {
|
||||
delete augmentedConfig["query-filters"];
|
||||
}
|
||||
return augmentedConfig;
|
||||
}
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
File diff suppressed because one or more lines are too long
605
lib/config-utils.test.js
generated
605
lib/config-utils.test.js
generated
@@ -42,13 +42,16 @@ const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const caching_utils_1 = require("./caching-utils");
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const overlay_database_utils_1 = require("./overlay-database-utils");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
@@ -58,6 +61,7 @@ function createTestInitConfigInputs(overrides) {
|
||||
return Object.assign({}, {
|
||||
languagesInput: undefined,
|
||||
queriesInput: undefined,
|
||||
qualityQueriesInput: undefined,
|
||||
packsInput: undefined,
|
||||
configFile: undefined,
|
||||
dbLocation: undefined,
|
||||
@@ -70,8 +74,18 @@ function createTestInitConfigInputs(overrides) {
|
||||
debugDatabaseName: "",
|
||||
repository: { owner: "github", repo: "example" },
|
||||
tempDir: "",
|
||||
codeql: {},
|
||||
codeql: (0, codeql_1.createStubCodeQL)({
|
||||
async betterResolveLanguages() {
|
||||
return {
|
||||
extractors: {
|
||||
html: [{ extractor_root: "" }],
|
||||
javascript: [{ extractor_root: "" }],
|
||||
},
|
||||
};
|
||||
},
|
||||
}),
|
||||
workspacePath: "",
|
||||
sourceRoot: "",
|
||||
githubVersion,
|
||||
apiDetails: {
|
||||
auth: "token",
|
||||
@@ -121,7 +135,15 @@ function mockListLanguages(languages) {
|
||||
return await (0, util_1.withTmpDir)(async (tempDir) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const languages = "javascript,python";
|
||||
const codeql = (0, codeql_1.setCodeQL)({
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
async betterResolveLanguages() {
|
||||
return {
|
||||
extractors: {
|
||||
javascript: [{ extractor_root: "" }],
|
||||
python: [{ extractor_root: "" }],
|
||||
},
|
||||
};
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
@@ -154,7 +176,15 @@ function mockListLanguages(languages) {
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
return await (0, util_1.withTmpDir)(async (tempDir) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const codeql = (0, codeql_1.setCodeQL)({
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
async betterResolveLanguages() {
|
||||
return {
|
||||
extractors: {
|
||||
javascript: [{ extractor_root: "" }],
|
||||
python: [{ extractor_root: "" }],
|
||||
},
|
||||
};
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
@@ -198,7 +228,6 @@ function mockListLanguages(languages) {
|
||||
await configUtils.initConfig(createTestInitConfigInputs({
|
||||
configFile: "../input",
|
||||
tempDir,
|
||||
codeql: (0, codeql_1.getCachedCodeQL)(),
|
||||
workspacePath: tempDir,
|
||||
}));
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -216,7 +245,6 @@ function mockListLanguages(languages) {
|
||||
await configUtils.initConfig(createTestInitConfigInputs({
|
||||
configFile,
|
||||
tempDir,
|
||||
codeql: (0, codeql_1.getCachedCodeQL)(),
|
||||
workspacePath: tempDir,
|
||||
}));
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -236,7 +264,6 @@ function mockListLanguages(languages) {
|
||||
languagesInput,
|
||||
configFile,
|
||||
tempDir,
|
||||
codeql: (0, codeql_1.getCachedCodeQL)(),
|
||||
workspacePath: tempDir,
|
||||
}));
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -248,7 +275,14 @@ function mockListLanguages(languages) {
|
||||
});
|
||||
(0, ava_1.default)("load non-empty input", async (t) => {
|
||||
return await (0, util_1.withTmpDir)(async (tempDir) => {
|
||||
const codeql = (0, codeql_1.setCodeQL)({
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
async betterResolveLanguages() {
|
||||
return {
|
||||
extractors: {
|
||||
javascript: [{ extractor_root: "" }],
|
||||
},
|
||||
};
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
@@ -279,7 +313,7 @@ function mockListLanguages(languages) {
|
||||
fs.mkdirSync(path.join(tempDir, "foo"));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = {
|
||||
languages: [languages_1.Language.javascript],
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
buildMode: util_1.BuildMode.None,
|
||||
originalUserInput: {
|
||||
name: "my config",
|
||||
@@ -355,7 +389,15 @@ function queriesToResolvedQueryForm(queries) {
|
||||
`;
|
||||
fs.mkdirSync(path.join(tempDir, "foo"));
|
||||
const resolveQueriesArgs = [];
|
||||
const codeql = (0, codeql_1.setCodeQL)({
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
async betterResolveLanguages() {
|
||||
return {
|
||||
extractors: {
|
||||
javascript: [{ extractor_root: "" }],
|
||||
python: [{ extractor_root: "" }],
|
||||
},
|
||||
};
|
||||
},
|
||||
async resolveQueries(queries, extraSearchPath) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return queriesToResolvedQueryForm(queries);
|
||||
@@ -379,7 +421,14 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
(0, ava_1.default)("API client used when reading remote config", async (t) => {
|
||||
return await (0, util_1.withTmpDir)(async (tempDir) => {
|
||||
const codeql = (0, codeql_1.setCodeQL)({
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
async betterResolveLanguages() {
|
||||
return {
|
||||
extractors: {
|
||||
javascript: [{ extractor_root: "" }],
|
||||
},
|
||||
};
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
@@ -434,7 +483,6 @@ function queriesToResolvedQueryForm(queries) {
|
||||
await configUtils.initConfig(createTestInitConfigInputs({
|
||||
configFile: repoReference,
|
||||
tempDir,
|
||||
codeql: (0, codeql_1.getCachedCodeQL)(),
|
||||
workspacePath: tempDir,
|
||||
}));
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -455,7 +503,6 @@ function queriesToResolvedQueryForm(queries) {
|
||||
await configUtils.initConfig(createTestInitConfigInputs({
|
||||
configFile: repoReference,
|
||||
tempDir,
|
||||
codeql: (0, codeql_1.getCachedCodeQL)(),
|
||||
workspacePath: tempDir,
|
||||
}));
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -468,7 +515,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
(0, ava_1.default)("No detected languages", async (t) => {
|
||||
return await (0, util_1.withTmpDir)(async (tempDir) => {
|
||||
mockListLanguages([]);
|
||||
const codeql = (0, codeql_1.setCodeQL)({
|
||||
const codeql = (0, codeql_1.createStubCodeQL)({
|
||||
async resolveLanguages() {
|
||||
return {};
|
||||
},
|
||||
@@ -496,7 +543,6 @@ function queriesToResolvedQueryForm(queries) {
|
||||
await configUtils.initConfig(createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
tempDir,
|
||||
codeql: (0, codeql_1.getCachedCodeQL)(),
|
||||
workspacePath: tempDir,
|
||||
}));
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -526,17 +572,17 @@ const parsePacksErrorMacro = ava_1.default.macro({
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const invalidPackNameMacro = ava_1.default.macro({
|
||||
exec: (t, name) => parsePacksErrorMacro.exec(t, name, [languages_1.Language.cpp], new RegExp(`^"${name}" is not a valid pack$`)),
|
||||
exec: (t, name) => parsePacksErrorMacro.exec(t, name, [languages_1.KnownLanguage.cpp], new RegExp(`^"${name}" is not a valid pack$`)),
|
||||
title: (_providedTitle, arg) => `Invalid pack string: ${arg}`,
|
||||
});
|
||||
(0, ava_1.default)("no packs", parsePacksMacro, "", [], undefined);
|
||||
(0, ava_1.default)("two packs", parsePacksMacro, "a/b,c/d@1.2.3", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
(0, ava_1.default)("two packs", parsePacksMacro, "a/b,c/d@1.2.3", [languages_1.KnownLanguage.cpp], {
|
||||
[languages_1.KnownLanguage.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("two packs with spaces", parsePacksMacro, " a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
(0, ava_1.default)("two packs with spaces", parsePacksMacro, " a/b , c/d@1.2.3 ", [languages_1.KnownLanguage.cpp], {
|
||||
[languages_1.KnownLanguage.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("two packs with language", parsePacksErrorMacro, "a/b,c/d@1.2.3", [languages_1.Language.cpp, languages_1.Language.java], new RegExp("Cannot specify a 'packs' input in a multi-language analysis. " +
|
||||
(0, ava_1.default)("two packs with language", parsePacksErrorMacro, "a/b,c/d@1.2.3", [languages_1.KnownLanguage.cpp, languages_1.KnownLanguage.java], new RegExp("Cannot specify a 'packs' input in a multi-language analysis. " +
|
||||
"Use a codeql-config.yml file instead and specify packs by language."));
|
||||
(0, ava_1.default)("packs with other valid names", parsePacksMacro, [
|
||||
// ranges are ok
|
||||
@@ -555,8 +601,8 @@ const invalidPackNameMacro = ava_1.default.macro({
|
||||
// this is valid, too. It will fail if it doesn't match a path
|
||||
// (globbing is not done)
|
||||
"c/d@1.2.3:+*)_(",
|
||||
].join(","), [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
].join(","), [languages_1.KnownLanguage.cpp], {
|
||||
[languages_1.KnownLanguage.cpp]: [
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
@@ -623,53 +669,63 @@ const packSpecPrettyPrintingMacro = ava_1.default.macro({
|
||||
});
|
||||
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
||||
const calculateAugmentationMacro = ava_1.default.macro({
|
||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
||||
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, rawQualityQueriesInput, languages, expectedAugmentationProperties) => {
|
||||
const actualAugmentationProperties = await configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, rawQualityQueriesInput, languages);
|
||||
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation: ${title}`,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "All empty", undefined, undefined, [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: undefined,
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "All empty", undefined, undefined, undefined, [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", undefined, [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", [languages_1.Language.javascript], {
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", undefined, [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
queriesInputCombines: true,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: undefined,
|
||||
packsInputCombines: false,
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With quality queries", undefined, undefined, " a, b , c, d", [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
qualityQueriesInput: [
|
||||
{ uses: "a" },
|
||||
{ uses: "b" },
|
||||
{ uses: "c" },
|
||||
{ uses: "d" },
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With security and quality queries", undefined, " a, b , c, d", "e, f , g,h", [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
qualityQueriesInput: [
|
||||
{ uses: "e" },
|
||||
{ uses: "f" },
|
||||
{ uses: "g" },
|
||||
{ uses: "h" },
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, undefined, [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: undefined,
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, undefined, [languages_1.KnownLanguage.javascript], {
|
||||
...configUtils.defaultAugmentationProperties,
|
||||
packsInputCombines: true,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
});
|
||||
const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedError) => {
|
||||
t.throws(() => configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages), { message: expectedError });
|
||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, rawQualityQueriesInput, languages, expectedError) => {
|
||||
await t.throwsAsync(() => configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, rawQualityQueriesInput, languages), { message: expectedError });
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation Error: ${title}`,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (queries)", undefined, " + ", [languages_1.Language.javascript], /The workflow property "queries" is invalid/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (packs)", " + ", undefined, [languages_1.Language.javascript], /The workflow property "packs" is invalid/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with multiple languages", " + a/b, c/d ", undefined, [languages_1.Language.javascript, languages_1.Language.java], /Cannot specify a 'packs' input in a multi-language analysis/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with no languages", " + a/b, c/d ", undefined, [], /No languages specified/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Invalid packs", " a-pack-without-a-scope ", undefined, [languages_1.Language.javascript], /"a-pack-without-a-scope" is not a valid pack/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (queries)", undefined, " + ", undefined, [languages_1.KnownLanguage.javascript], /The workflow property "queries" is invalid/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (packs)", " + ", undefined, undefined, [languages_1.KnownLanguage.javascript], /The workflow property "packs" is invalid/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with multiple languages", " + a/b, c/d ", undefined, undefined, [languages_1.KnownLanguage.javascript, languages_1.KnownLanguage.java], /Cannot specify a 'packs' input in a multi-language analysis/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with no languages", " + a/b, c/d ", undefined, undefined, [], /No languages specified/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Invalid packs", " a-pack-without-a-scope ", undefined, undefined, [languages_1.KnownLanguage.javascript], /"a-pack-without-a-scope" is not a valid pack/);
|
||||
(0, ava_1.default)("no generateRegistries when registries is undefined", async (t) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const registriesInput = undefined;
|
||||
@@ -701,7 +757,6 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
[
|
||||
{
|
||||
name: "languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "jAvAscript, \n jaVa",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
@@ -709,7 +764,6 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
},
|
||||
{
|
||||
name: "languages from github api",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [" jAvAscript\n \t", " jaVa", "SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
@@ -717,7 +771,6 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
},
|
||||
{
|
||||
name: "aliases from input",
|
||||
codeqlResolvedLanguages: ["javascript", "csharp", "cpp", "java", "python"],
|
||||
languagesInput: " typEscript\n \t, C#, c , KoTlin",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "csharp", "cpp", "java"],
|
||||
@@ -725,7 +778,6 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
},
|
||||
{
|
||||
name: "duplicate languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "jAvAscript, \n jaVa, kotlin, typescript",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
@@ -733,7 +785,6 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
},
|
||||
{
|
||||
name: "aliases from github api",
|
||||
codeqlResolvedLanguages: ["javascript", "csharp", "cpp", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [" typEscript\n \t", " C#", "c", "other"],
|
||||
expectedLanguages: ["javascript", "csharp", "cpp"],
|
||||
@@ -741,7 +792,6 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
},
|
||||
{
|
||||
name: "no languages",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: true,
|
||||
@@ -749,30 +799,56 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
},
|
||||
{
|
||||
name: "unrecognized languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "a, b, c, javascript",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: false,
|
||||
expectedError: configUtils.getUnknownLanguagesError(["a", "b"]),
|
||||
},
|
||||
{
|
||||
name: "extractors that aren't languages aren't included (specified)",
|
||||
languagesInput: "html",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: false,
|
||||
expectedError: configUtils.getUnknownLanguagesError(["html"]),
|
||||
},
|
||||
{
|
||||
name: "extractors that aren't languages aren't included (autodetected)",
|
||||
languagesInput: "",
|
||||
languagesInRepository: ["html", "javascript"],
|
||||
expectedApiCall: true,
|
||||
expectedLanguages: ["javascript"],
|
||||
},
|
||||
].forEach((args) => {
|
||||
(0, ava_1.default)(`getLanguages: ${args.name}`, async (t) => {
|
||||
const mockRequest = (0, testing_utils_1.mockLanguagesInRepo)(args.languagesInRepository);
|
||||
const languages = args.codeqlResolvedLanguages.reduce((acc, lang) => ({
|
||||
...acc,
|
||||
[lang]: true,
|
||||
}), {});
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
resolveLanguages: () => Promise.resolve(languages),
|
||||
const stubExtractorEntry = {
|
||||
extractor_root: "",
|
||||
};
|
||||
const codeQL = (0, codeql_1.createStubCodeQL)({
|
||||
betterResolveLanguages: () => Promise.resolve({
|
||||
aliases: {
|
||||
"c#": languages_1.KnownLanguage.csharp,
|
||||
c: languages_1.KnownLanguage.cpp,
|
||||
kotlin: languages_1.KnownLanguage.java,
|
||||
typescript: languages_1.KnownLanguage.javascript,
|
||||
},
|
||||
extractors: {
|
||||
cpp: [stubExtractorEntry],
|
||||
csharp: [stubExtractorEntry],
|
||||
java: [stubExtractorEntry],
|
||||
javascript: [stubExtractorEntry],
|
||||
python: [stubExtractorEntry],
|
||||
},
|
||||
}),
|
||||
});
|
||||
if (args.expectedLanguages) {
|
||||
// happy path
|
||||
const actualLanguages = await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, mockLogger);
|
||||
const actualLanguages = await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, ".", mockLogger);
|
||||
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
|
||||
}
|
||||
else {
|
||||
// there is an error
|
||||
await t.throwsAsync(async () => await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, mockLogger), { message: args.expectedError });
|
||||
await t.throwsAsync(async () => await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, ".", mockLogger), { message: args.expectedError });
|
||||
}
|
||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
||||
});
|
||||
@@ -780,12 +856,12 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
for (const { displayName, language, feature } of [
|
||||
{
|
||||
displayName: "Java",
|
||||
language: languages_1.Language.java,
|
||||
language: languages_1.KnownLanguage.java,
|
||||
feature: feature_flags_1.Feature.DisableJavaBuildlessEnabled,
|
||||
},
|
||||
{
|
||||
displayName: "C#",
|
||||
language: languages_1.Language.csharp,
|
||||
language: languages_1.KnownLanguage.csharp,
|
||||
feature: feature_flags_1.Feature.DisableCsharpBuildless,
|
||||
},
|
||||
]) {
|
||||
@@ -797,7 +873,7 @@ for (const { displayName, language, feature } of [
|
||||
});
|
||||
(0, ava_1.default)(`Build mode not overridden for other languages when disable ${displayName} buildless feature flag enabled`, async (t) => {
|
||||
const messages = [];
|
||||
const buildMode = await configUtils.parseBuildModeInput("none", [languages_1.Language.python], (0, testing_utils_1.createFeatures)([feature]), (0, testing_utils_1.getRecordingLogger)(messages));
|
||||
const buildMode = await configUtils.parseBuildModeInput("none", [languages_1.KnownLanguage.python], (0, testing_utils_1.createFeatures)([feature]), (0, testing_utils_1.getRecordingLogger)(messages));
|
||||
t.is(buildMode, util_1.BuildMode.None);
|
||||
t.deepEqual(messages, []);
|
||||
});
|
||||
@@ -813,4 +889,397 @@ for (const { displayName, language, feature } of [
|
||||
]);
|
||||
});
|
||||
}
|
||||
const defaultOverlayDatabaseModeTestSetup = {
|
||||
overlayDatabaseEnvVar: undefined,
|
||||
features: [],
|
||||
isPullRequest: false,
|
||||
isDefaultBranch: false,
|
||||
repositoryOwner: "github",
|
||||
buildMode: util_1.BuildMode.None,
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
codeqlVersion: "2.21.0",
|
||||
gitRoot: "/some/git/root",
|
||||
codeScanningConfig: {},
|
||||
};
|
||||
const getOverlayDatabaseModeMacro = ava_1.default.macro({
|
||||
exec: async (t, _title, setupOverrides, expected) => {
|
||||
return await (0, util_1.withTmpDir)(async (tempDir) => {
|
||||
const messages = [];
|
||||
const logger = (0, testing_utils_1.getRecordingLogger)(messages);
|
||||
// Save the original environment
|
||||
const originalEnv = { ...process.env };
|
||||
try {
|
||||
const setup = {
|
||||
...defaultOverlayDatabaseModeTestSetup,
|
||||
...setupOverrides,
|
||||
};
|
||||
// Set up environment variable if specified
|
||||
delete process.env.CODEQL_OVERLAY_DATABASE_MODE;
|
||||
if (setup.overlayDatabaseEnvVar !== undefined) {
|
||||
process.env.CODEQL_OVERLAY_DATABASE_MODE =
|
||||
setup.overlayDatabaseEnvVar;
|
||||
}
|
||||
// Mock feature flags
|
||||
const features = (0, testing_utils_1.createFeatures)(setup.features);
|
||||
// Mock isAnalyzingPullRequest function
|
||||
sinon
|
||||
.stub(actionsUtil, "isAnalyzingPullRequest")
|
||||
.returns(setup.isPullRequest);
|
||||
// Mock repository owner
|
||||
const repository = {
|
||||
owner: setup.repositoryOwner,
|
||||
repo: "test-repo",
|
||||
};
|
||||
// Set up CodeQL mock
|
||||
const codeql = (0, testing_utils_1.mockCodeQLVersion)(setup.codeqlVersion);
|
||||
// Mock traced languages
|
||||
sinon
|
||||
.stub(codeql, "isTracedLanguage")
|
||||
.callsFake(async (lang) => {
|
||||
return [languages_1.KnownLanguage.java].includes(lang);
|
||||
});
|
||||
// Mock git root detection
|
||||
if (setup.gitRoot !== undefined) {
|
||||
sinon.stub(gitUtils, "getGitRoot").resolves(setup.gitRoot);
|
||||
}
|
||||
// Mock default branch detection
|
||||
sinon
|
||||
.stub(gitUtils, "isAnalyzingDefaultBranch")
|
||||
.resolves(setup.isDefaultBranch);
|
||||
const result = await configUtils.getOverlayDatabaseMode(codeql, repository, features, setup.languages, tempDir, // sourceRoot
|
||||
setup.buildMode, setup.codeScanningConfig, logger);
|
||||
t.deepEqual(result, expected);
|
||||
}
|
||||
finally {
|
||||
// Restore the original environment
|
||||
process.env = originalEnv;
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (_, title) => `getOverlayDatabaseMode: ${title}`,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Environment variable override - Overlay", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Environment variable override - OverlayBase", {
|
||||
overlayDatabaseEnvVar: "overlay-base",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.OverlayBase,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Environment variable override - None", {
|
||||
overlayDatabaseEnvVar: "none",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Ignore invalid environment variable", {
|
||||
overlayDatabaseEnvVar: "invalid-mode",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Ignore feature flag when analyzing non-default branch", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay-base database on default branch when feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.OverlayBase,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay-base database on default branch when feature enabled with custom analysis", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
codeScanningConfig: {
|
||||
packs: ["some-custom-pack@1.0.0"],
|
||||
},
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.OverlayBase,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay-base database on default branch when code-scanning feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.OverlayBase,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when code-scanning feature enabled with disable-default-queries", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
"disable-default-queries": true,
|
||||
},
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when code-scanning feature enabled with packs", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
packs: ["some-custom-pack@1.0.0"],
|
||||
},
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when code-scanning feature enabled with queries", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
queries: [{ uses: "some-query.ql" }],
|
||||
},
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when code-scanning feature enabled with query-filters", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
"query-filters": [{ include: { "security-severity": "high" } }],
|
||||
},
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when only language-specific feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when only code-scanning feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript],
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay-base database on default branch when language-specific feature disabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis],
|
||||
isDefaultBranch: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay analysis on PR when feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay analysis on PR when feature enabled with custom analysis", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
codeScanningConfig: {
|
||||
packs: ["some-custom-pack@1.0.0"],
|
||||
},
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay analysis on PR when code-scanning feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when code-scanning feature enabled with disable-default-queries", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
"disable-default-queries": true,
|
||||
},
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when code-scanning feature enabled with packs", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
packs: ["some-custom-pack@1.0.0"],
|
||||
},
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when code-scanning feature enabled with queries", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
queries: [{ uses: "some-query.ql" }],
|
||||
},
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when code-scanning feature enabled with query-filters", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [
|
||||
feature_flags_1.Feature.OverlayAnalysis,
|
||||
feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
codeScanningConfig: {
|
||||
"query-filters": [{ include: { "security-severity": "high" } }],
|
||||
},
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when only language-specific feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when only code-scanning feature enabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysisCodeScanningJavascript],
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay analysis on PR when language-specific feature disabled", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis],
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay PR analysis by env for dsp-testing", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
repositoryOwner: "dsp-testing",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay PR analysis by env for other-org", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
repositoryOwner: "other-org",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Overlay PR analysis by feature flag for dsp-testing", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
isPullRequest: true,
|
||||
repositoryOwner: "dsp-testing",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.Overlay,
|
||||
useOverlayDatabaseCaching: true,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "No overlay PR analysis by feature flag for other-org", {
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis, feature_flags_1.Feature.OverlayAnalysisJavascript],
|
||||
isPullRequest: true,
|
||||
repositoryOwner: "other-org",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Fallback due to autobuild with traced language", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
buildMode: util_1.BuildMode.Autobuild,
|
||||
languages: [languages_1.KnownLanguage.java],
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Fallback due to no build mode with traced language", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
buildMode: undefined,
|
||||
languages: [languages_1.KnownLanguage.java],
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Fallback due to old CodeQL version", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
codeqlVersion: "2.14.0",
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, "Fallback due to missing git root", {
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
gitRoot: undefined,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
// Exercise language-specific overlay analysis features code paths
|
||||
for (const language in languages_1.KnownLanguage) {
|
||||
(0, ava_1.default)(getOverlayDatabaseModeMacro, `Check default overlay analysis feature for ${language}`, {
|
||||
languages: [language],
|
||||
features: [feature_flags_1.Feature.OverlayAnalysis],
|
||||
isPullRequest: true,
|
||||
}, {
|
||||
overlayDatabaseMode: overlay_database_utils_1.OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
10
lib/database-upload.js
generated
10
lib/database-upload.js
generated
@@ -37,11 +37,11 @@ exports.uploadDatabases = uploadDatabases;
|
||||
const fs = __importStar(require("fs"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const logging_1 = require("./logging");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
async function uploadDatabases(repositoryNwo, codeql, config, apiDetails, logger) {
|
||||
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
|
||||
logger.debug("Database upload disabled in workflow. Skipping upload.");
|
||||
return;
|
||||
@@ -61,8 +61,12 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
// Clean up the database, since intermediate results may still be written to the
|
||||
// database if there is high RAM pressure.
|
||||
await (0, logging_1.withGroupAsync)("Cleaning up databases", async () => {
|
||||
await codeql.databaseCleanupCluster(config, "clear");
|
||||
});
|
||||
const client = (0, api_client_1.getApiClient)();
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const uploadsUrl = new URL((0, util_1.parseGitHubUrl)(apiDetails.url));
|
||||
uploadsUrl.hostname = `uploads.${uploadsUrl.hostname}`;
|
||||
// Octokit expects the baseUrl to not have a trailing slash,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,0CAmFC;AA/FD,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,sDAAwC;AAGxC,6CAA+B;AAC/B,iCAAkD;AAE3C,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE,CAAC;QAC/D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;IACT,CAAC;IAED,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;QACxD,OAAO;IACT,CAAC;IAED,iDAAiD;IACjD,IACE,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM;QACvD,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,UAAU,EAC3D,CAAC;QACD,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC;QACjD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;IACT,CAAC;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,IAAA,qBAAc,EAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;IAC3D,UAAU,CAAC,QAAQ,GAAG,WAAW,UAAU,CAAC,QAAQ,EAAE,CAAC;IAEvD,4DAA4D;IAC5D,0CAA0C;IAC1C,IAAI,cAAc,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC3C,IAAI,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QACjC,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC;YACH,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,YAAY,CAC3C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAC9C,CAAC;YACF,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,OAAO,CAClB,qGAAqG,EACrG;oBACE,OAAO,EAAE,cAAc;oBACvB,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,UAAU,EAAE,SAAS;oBACrB,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;YACjE,CAAC;oBAAS,CAAC;gBACT,mBAAmB,CAAC,KAAK,EAAE,CAAC;YAC9B,CAAC;QACH,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;QACpE,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,0CAyFC;AArGD,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAG9D,sDAAwC;AACxC,uCAAmD;AAEnD,6CAA+B;AAC/B,iCAAkD;AAE3C,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE,CAAC;QAC/D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;IACT,CAAC;IAED,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;QACxD,OAAO;IACT,CAAC;IAED,iDAAiD;IACjD,IACE,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM;QACvD,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,UAAU,EAC3D,CAAC;QACD,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC;QACjD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;IACT,CAAC;IAED,gFAAgF;IAChF,0CAA0C;IAC1C,MAAM,IAAA,wBAAc,EAAC,uBAAuB,EAAE,KAAK,IAAI,EAAE;QACvD,MAAM,MAAM,CAAC,sBAAsB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC,CAAC,CAAC;IAEH,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAE9B,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,IAAA,qBAAc,EAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;IAC3D,UAAU,CAAC,QAAQ,GAAG,WAAW,UAAU,CAAC,QAAQ,EAAE,CAAC;IAEvD,4DAA4D;IAC5D,0CAA0C;IAC1C,IAAI,cAAc,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC3C,IAAI,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QACjC,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC;YACH,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,YAAY,CAC3C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAC9C,CAAC;YACF,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,OAAO,CAClB,qGAAqG,EACrG;oBACE,OAAO,EAAE,cAAc;oBACvB,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,UAAU,EAAE,SAAS;oBACrB,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;YACjE,CAAC;oBAAS,CAAC;gBACT,mBAAmB,CAAC,KAAK,EAAE,CAAC;YAC9B,CAAC;QACH,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;QACpE,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||
39
lib/database-upload.test.js
generated
39
lib/database-upload.test.js
generated
@@ -60,7 +60,7 @@ const testApiDetails = {
|
||||
};
|
||||
function getTestConfig(tmpDir) {
|
||||
return (0, testing_utils_1.createTestConfig)({
|
||||
languages: [languages_1.Language.javascript],
|
||||
languages: [languages_1.KnownLanguage.javascript],
|
||||
dbLocation: tmpDir,
|
||||
});
|
||||
}
|
||||
@@ -79,6 +79,16 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
return databaseUploadSpy;
|
||||
}
|
||||
function getCodeQL() {
|
||||
return (0, codeql_1.createStubCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
async databaseCleanupCluster() {
|
||||
// Do nothing, as we are not testing cleanup here.
|
||||
},
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("Abort database upload if 'upload-database' input set to false", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -88,7 +98,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.returns("false");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getCodeQL(), getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Database upload disabled in workflow. Skipping upload.") !== undefined);
|
||||
@@ -105,7 +115,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getCodeQL(), config, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not running against github.com or GHEC-DR. Skipping upload.") !== undefined);
|
||||
@@ -120,7 +130,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.returns("true");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getCodeQL(), getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -134,13 +144,8 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.returns("true");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
await mockHttpRequests(500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getCodeQL(), getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
||||
v.message ===
|
||||
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
||||
@@ -155,13 +160,8 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.returns("true");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
await mockHttpRequests(201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getCodeQL(), getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||
});
|
||||
@@ -175,13 +175,8 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
.returns("true");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const databaseUploadSpy = await mockHttpRequests(201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), {
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getCodeQL(), getTestConfig(tmpDir), {
|
||||
auth: "1234",
|
||||
url: "https://tenant.ghe.com",
|
||||
apiURL: undefined,
|
||||
|
||||
File diff suppressed because one or more lines are too long
72
lib/debug-artifacts.js
generated
72
lib/debug-artifacts.js
generated
@@ -46,17 +46,16 @@ const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
const artifactLegacy = __importStar(require("@actions/artifact-legacy"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const adm_zip_1 = __importDefault(require("adm-zip"));
|
||||
const archiver_1 = __importDefault(require("archiver"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const environment_1 = require("./environment");
|
||||
const logging_1 = require("./logging");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const util_1 = require("./util");
|
||||
function sanitizeArtifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
return name.replace(/[^a-zA-Z0-9_-]+/g, "");
|
||||
}
|
||||
/**
|
||||
* Upload Actions SARIF artifacts for debugging when CODEQL_ACTION_DEBUG_COMBINED_SARIF
|
||||
@@ -66,26 +65,28 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
|
||||
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
|
||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
const toUpload = [];
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", async () => {
|
||||
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
const toUpload = [];
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
try {
|
||||
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
@@ -118,11 +119,11 @@ function tryPrepareSarifDebugArtifact(config, language, logger) {
|
||||
*
|
||||
* @return The path to the database bundle, or undefined if an error occurs.
|
||||
*/
|
||||
async function tryBundleDatabase(config, language, logger) {
|
||||
async function tryBundleDatabase(codeql, config, language, logger) {
|
||||
try {
|
||||
if ((0, analyze_1.dbIsFinalized)(config, language, logger)) {
|
||||
try {
|
||||
return await createDatabaseBundleCli(config, language);
|
||||
return await createDatabaseBundleCli(codeql, config, language);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to bundle database for ${language} using the CLI. ` +
|
||||
@@ -141,7 +142,7 @@ async function tryBundleDatabase(config, language, logger) {
|
||||
*
|
||||
* Logs and suppresses any errors that occur.
|
||||
*/
|
||||
async function tryUploadAllAvailableDebugArtifacts(config, logger, codeQlVersion) {
|
||||
async function tryUploadAllAvailableDebugArtifacts(codeql, config, logger, codeQlVersion) {
|
||||
const filesToUpload = [];
|
||||
try {
|
||||
for (const language of config.languages) {
|
||||
@@ -168,7 +169,7 @@ async function tryUploadAllAvailableDebugArtifacts(config, logger, codeQlVersion
|
||||
}
|
||||
// Add database bundle
|
||||
logger.info("Preparing database bundle debug artifact...");
|
||||
const databaseBundle = await tryBundleDatabase(config, language, logger);
|
||||
const databaseBundle = await tryBundleDatabase(codeql, config, language, logger);
|
||||
if (databaseBundle) {
|
||||
filesToUpload.push(databaseBundle);
|
||||
logger.info("Database bundle debug artifact ready for upload.");
|
||||
@@ -248,16 +249,27 @@ async function createPartialDatabaseBundle(config, language) {
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||
}
|
||||
const zip = new adm_zip_1.default();
|
||||
zip.addLocalFolder(databasePath);
|
||||
zip.writeZip(databaseBundlePath);
|
||||
const output = fs.createWriteStream(databaseBundlePath);
|
||||
const zip = (0, archiver_1.default)("zip");
|
||||
zip.on("error", (err) => {
|
||||
throw err;
|
||||
});
|
||||
zip.on("warning", (err) => {
|
||||
// Ignore ENOENT warnings. There's nothing anyone can do about it.
|
||||
if (err.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
zip.pipe(output);
|
||||
zip.directory(databasePath, false);
|
||||
await zip.finalize();
|
||||
return databaseBundlePath;
|
||||
}
|
||||
/**
|
||||
* Runs `codeql database bundle` command and returns the path.
|
||||
*/
|
||||
async function createDatabaseBundleCli(config, language) {
|
||||
const databaseBundlePath = await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`);
|
||||
async function createDatabaseBundleCli(codeql, config, language) {
|
||||
const databaseBundlePath = await (0, util_1.bundleDb)(config, language, codeql, `${config.debugDatabaseName}-${language}`);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
//# sourceMappingURL=debug-artifacts.js.map
|
||||
File diff suppressed because one or more lines are too long
1
lib/debug-artifacts.test.js
generated
1
lib/debug-artifacts.test.js
generated
@@ -45,6 +45,7 @@ const util_1 = require("./util");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello===123"), "hello123");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
t.deepEqual(debugArtifacts.sanitizeArtifactName("\\foo\\bar//baz"), "foobarbaz");
|
||||
});
|
||||
// These next tests check the correctness of the logic to determine whether or not
|
||||
// artifacts are uploaded in debug mode. Since it's not easy to mock the actual
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,uCAA6C;AAC7C,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,kFAAkF;AAClF,+EAA+E;AAC/E,mFAAmF;AACnF,8CAA8C;AAC9C,EAAE;AACF,oFAAoF;AACpF,8BAA8B;AAE9B,IAAA,aAAI,EAAC,gFAAgF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjG,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,EAAE,EACF,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,SAAS,CACV,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ,EACR,wBAAwB,EACxB,wCAAwC,CACzC,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mFAAmF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpG,sCAAsC;IACtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,SAAS,CACV,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ;QACR,8FAA8F;QAC9F,eAAe,EACf,kEAAkE,CACnE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oFAAoF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrG,sCAAsC;IACtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,QAAQ,CACT,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ;QACR,8FAA8F;QAC9F,eAAe,EACf,kEAAkE,CACnE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sFAAsF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvG,0CAA0C;IAC1C,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,QAAQ,CACT,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ,EACR,sBAAsB,EACtB,sEAAsE,CACvE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,uCAA6C;AAC7C,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,EACtD,WAAW,CACZ,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,kFAAkF;AAClF,+EAA+E;AAC/E,mFAAmF;AACnF,8CAA8C;AAC9C,EAAE;AACF,oFAAoF;AACpF,8BAA8B;AAE9B,IAAA,aAAI,EAAC,gFAAgF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjG,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,EAAE,EACF,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,SAAS,CACV,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ,EACR,wBAAwB,EACxB,wCAAwC,CACzC,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,mFAAmF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpG,sCAAsC;IACtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,SAAS,CACV,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ;QACR,8FAA8F;QAC9F,eAAe,EACf,kEAAkE,CACnE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oFAAoF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrG,sCAAsC;IACtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,QAAQ,CACT,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ;QACR,8FAA8F;QAC9F,eAAe,EACf,kEAAkE,CACnE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sFAAsF,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvG,0CAA0C;IAC1C,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,CAAC,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,oBAAoB,CACxD,MAAM,EACN,CAAC,SAAS,CAAC,EACX,cAAc,EACd,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,QAAQ,CACT,CAAC;QACF,CAAC,CAAC,EAAE,CACF,QAAQ,EACR,sBAAsB,EACtB,sEAAsE,CACvE,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.20.3",
|
||||
"cliVersion": "2.20.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.20.2",
|
||||
"priorCliVersion": "2.20.2"
|
||||
"bundleVersion": "codeql-bundle-v2.22.3",
|
||||
"cliVersion": "2.22.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.22.2",
|
||||
"priorCliVersion": "2.22.2"
|
||||
}
|
||||
|
||||
86
lib/dependency-caching.js
generated
86
lib/dependency-caching.js
generated
@@ -33,54 +33,68 @@ var __importStar = (this && this.__importStar) || (function () {
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getJavaTempDependencyDir = getJavaTempDependencyDir;
|
||||
exports.downloadDependencyCaches = downloadDependencyCaches;
|
||||
exports.uploadDependencyCaches = uploadDependencyCaches;
|
||||
const os = __importStar(require("os"));
|
||||
const path_1 = require("path");
|
||||
const actionsCache = __importStar(require("@actions/cache"));
|
||||
const glob = __importStar(require("@actions/glob"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const caching_utils_1 = require("./caching-utils");
|
||||
const environment_1 = require("./environment");
|
||||
const util_1 = require("./util");
|
||||
const CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
|
||||
const CODEQL_DEPENDENCY_CACHE_VERSION = 1;
|
||||
/**
|
||||
* Returns a path to a directory intended to be used to store .jar files
|
||||
* for the Java `build-mode: none` extractor.
|
||||
* @returns The path to the directory that should be used by the `build-mode: none` extractor.
|
||||
*/
|
||||
function getJavaTempDependencyDir() {
|
||||
return (0, path_1.join)((0, actions_util_1.getTemporaryDirectory)(), "codeql_java", "repository");
|
||||
}
|
||||
/**
|
||||
* Default caching configurations per language.
|
||||
*/
|
||||
const CODEQL_DEFAULT_CACHE_CONFIG = {
|
||||
java: {
|
||||
paths: [
|
||||
// Maven
|
||||
(0, path_1.join)(os.homedir(), ".m2", "repository"),
|
||||
// Gradle
|
||||
(0, path_1.join)(os.homedir(), ".gradle", "caches"),
|
||||
],
|
||||
hash: [
|
||||
// Maven
|
||||
"**/pom.xml",
|
||||
// Gradle
|
||||
"**/*.gradle*",
|
||||
"**/gradle-wrapper.properties",
|
||||
"buildSrc/**/Versions.kt",
|
||||
"buildSrc/**/Dependencies.kt",
|
||||
"gradle/*.versions.toml",
|
||||
"**/versions.properties",
|
||||
],
|
||||
},
|
||||
csharp: {
|
||||
paths: [(0, path_1.join)(os.homedir(), ".nuget", "packages")],
|
||||
hash: [
|
||||
// NuGet
|
||||
"**/packages.lock.json",
|
||||
// Paket
|
||||
"**/paket.lock",
|
||||
],
|
||||
},
|
||||
go: {
|
||||
paths: [(0, path_1.join)(os.homedir(), "go", "pkg", "mod")],
|
||||
hash: ["**/go.sum"],
|
||||
},
|
||||
};
|
||||
function getDefaultCacheConfig() {
|
||||
return {
|
||||
java: {
|
||||
paths: [
|
||||
// Maven
|
||||
(0, path_1.join)(os.homedir(), ".m2", "repository"),
|
||||
// Gradle
|
||||
(0, path_1.join)(os.homedir(), ".gradle", "caches"),
|
||||
// CodeQL Java build-mode: none
|
||||
getJavaTempDependencyDir(),
|
||||
],
|
||||
hash: [
|
||||
// Maven
|
||||
"**/pom.xml",
|
||||
// Gradle
|
||||
"**/*.gradle*",
|
||||
"**/gradle-wrapper.properties",
|
||||
"buildSrc/**/Versions.kt",
|
||||
"buildSrc/**/Dependencies.kt",
|
||||
"gradle/*.versions.toml",
|
||||
"**/versions.properties",
|
||||
],
|
||||
},
|
||||
csharp: {
|
||||
paths: [(0, path_1.join)(os.homedir(), ".nuget", "packages")],
|
||||
hash: [
|
||||
// NuGet
|
||||
"**/packages.lock.json",
|
||||
// Paket
|
||||
"**/paket.lock",
|
||||
],
|
||||
},
|
||||
go: {
|
||||
paths: [(0, path_1.join)(os.homedir(), "go", "pkg", "mod")],
|
||||
hash: ["**/go.sum"],
|
||||
},
|
||||
};
|
||||
}
|
||||
async function makeGlobber(patterns) {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
@@ -94,7 +108,7 @@ async function makeGlobber(patterns) {
|
||||
async function downloadDependencyCaches(languages, logger) {
|
||||
const restoredCaches = [];
|
||||
for (const language of languages) {
|
||||
const cacheConfig = CODEQL_DEFAULT_CACHE_CONFIG[language];
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
if (cacheConfig === undefined) {
|
||||
logger.info(`Skipping download of dependency cache for ${language} as we have no caching configuration for it.`);
|
||||
continue;
|
||||
@@ -128,7 +142,7 @@ async function downloadDependencyCaches(languages, logger) {
|
||||
*/
|
||||
async function uploadDependencyCaches(config, logger) {
|
||||
for (const language of config.languages) {
|
||||
const cacheConfig = CODEQL_DEFAULT_CACHE_CONFIG[language];
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
if (cacheConfig === undefined) {
|
||||
logger.info(`Skipping upload of dependency cache for ${language} as we have no caching configuration for it.`);
|
||||
continue;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"dependency-caching.js","sourceRoot":"","sources":["../src/dependency-caching.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+EA,4DAmDC;AAQD,wDAiEC;AA3MD,uCAAyB;AACzB,+BAA4B;AAE5B,6DAA+C;AAC/C,oDAAsC;AAEtC,mDAAoD;AAEpD,+CAAuC;AAGvC,iCAA6C;AAgB7C,MAAM,8BAA8B,GAAG,qBAAqB,CAAC;AAC7D,MAAM,+BAA+B,GAAG,CAAC,CAAC;AAE1C;;GAEG;AACH,MAAM,2BAA2B,GAAwC;IACvE,IAAI,EAAE;QACJ,KAAK,EAAE;YACL,QAAQ;YACR,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,YAAY,CAAC;YACvC,SAAS;YACT,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,QAAQ,CAAC;SACxC;QACD,IAAI,EAAE;YACJ,QAAQ;YACR,YAAY;YACZ,SAAS;YACT,cAAc;YACd,8BAA8B;YAC9B,yBAAyB;YACzB,6BAA6B;YAC7B,wBAAwB;YACxB,wBAAwB;SACzB;KACF;IACD,MAAM,EAAE;QACN,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;QACjD,IAAI,EAAE;YACJ,QAAQ;YACR,uBAAuB;YACvB,QAAQ;YACR,eAAe;SAChB;KACF;IACD,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC/C,IAAI,EAAE,CAAC,WAAW,CAAC;KACpB;CACF,CAAC;AAEF,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,wBAAwB,CAC5C,SAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAe,EAAE,CAAC;IAEtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,8CAA8C,CACpG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,mDAAmD,CACzG,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QACzD,MAAM,WAAW,GAAa,CAAC,MAAM,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE5D,MAAM,CAAC,IAAI,CACT,yBAAyB,QAAQ,aAAa,UAAU,qBAAqB,WAAW,CAAC,IAAI,CAC3F,IAAI,CACL,EAAE,CACJ,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,YAAY,CAC5C,WAAW,CAAC,KAAK,EACjB,UAAU,EACV,WAAW,CACZ,CAAC;QAEF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,oBAAoB,MAAM,QAAQ,QAAQ,GAAG,CAAC,CAAC;YAC3D,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CAAC,+BAA+B,QAAQ,GAAG,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,sBAAsB,CAAC,MAAc,EAAE,MAAc;IACzE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,8CAA8C,CAClG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,mDAAmD,CACvG,CAAC;YACF,SAAS;QACX,CAAC;QAED,yGAAyG;QACzG,uGAAuG;QACvG,uCAAuC;QACvC,uGAAuG;QACvG,uGAAuG;QACvG,sCAAsC;QACtC,uGAAuG;QACvG,sGAAsG;QACtG,sGAAsG;QACtG,4CAA4C;QAC5C,MAAM,IAAI,GAAG,MAAM,IAAA,iCAAiB,EAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QAEtE,iCAAiC;QACjC,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;YACf,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,qBAAqB,CACzE,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,MAAM,CAAC,IAAI,CACT,2BAA2B,IAAI,QAAQ,QAAQ,aAAa,GAAG,KAAK,CACrE,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,YAAY,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,yFAAyF;YACzF,uFAAuF;YACvF,gCAAgC;YAChC,IAAI,KAAK,YAAY,YAAY,CAAC,iBAAiB,EAAE,CAAC;gBACpD,MAAM,CAAC,IAAI,CACT,2BAA2B,QAAQ,aAAa,GAAG,qBAAqB,CACzE,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC9B,CAAC;iBAAM,CAAC;gBACN,kCAAkC;gBAClC,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,QAAQ,CACrB,QAAkB,EAClB,WAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/D,OAAO,GAAG,MAAM,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,EAAE,CAAC;AACjD,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,yBAAyB,CAAC,CAAC;IACnE,IAAI,MAAM,GAAG,8BAA8B,CAAC;IAE5C,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1D,MAAM,GAAG,GAAG,MAAM,IAAI,YAAY,EAAE,CAAC;IACvC,CAAC;IAED,OAAO,GAAG,MAAM,IAAI,+BAA+B,IAAI,QAAQ,IAAI,QAAQ,GAAG,CAAC;AACjF,CAAC"}
|
||||
{"version":3,"file":"dependency-caching.js","sourceRoot":"","sources":["../src/dependency-caching.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,4DAEC;AAuDD,4DAmDC;AAQD,wDAiEC;AAzND,uCAAyB;AACzB,+BAA4B;AAE5B,6DAA+C;AAC/C,oDAAsC;AAEtC,iDAAuD;AACvD,mDAAoD;AAEpD,+CAAuC;AAGvC,iCAA6C;AAgB7C,MAAM,8BAA8B,GAAG,qBAAqB,CAAC;AAC7D,MAAM,+BAA+B,GAAG,CAAC,CAAC;AAE1C;;;;GAIG;AACH,SAAgB,wBAAwB;IACtC,OAAO,IAAA,WAAI,EAAC,IAAA,oCAAqB,GAAE,EAAE,aAAa,EAAE,YAAY,CAAC,CAAC;AACpE,CAAC;AAED;;GAEG;AACH,SAAS,qBAAqB;IAC5B,OAAO;QACL,IAAI,EAAE;YACJ,KAAK,EAAE;gBACL,QAAQ;gBACR,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,YAAY,CAAC;gBACvC,SAAS;gBACT,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,QAAQ,CAAC;gBACvC,+BAA+B;gBAC/B,wBAAwB,EAAE;aAC3B;YACD,IAAI,EAAE;gBACJ,QAAQ;gBACR,YAAY;gBACZ,SAAS;gBACT,cAAc;gBACd,8BAA8B;gBAC9B,yBAAyB;gBACzB,6BAA6B;gBAC7B,wBAAwB;gBACxB,wBAAwB;aACzB;SACF;QACD,MAAM,EAAE;YACN,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;YACjD,IAAI,EAAE;gBACJ,QAAQ;gBACR,uBAAuB;gBACvB,QAAQ;gBACR,eAAe;aAChB;SACF;QACD,EAAE,EAAE;YACF,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;YAC/C,IAAI,EAAE,CAAC,WAAW,CAAC;SACpB;KACF,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,wBAAwB,CAC5C,SAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAe,EAAE,CAAC;IAEtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,qBAAqB,EAAE,CAAC,QAAQ,CAAC,CAAC;QAEtD,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,8CAA8C,CACpG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,mDAAmD,CACzG,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QACzD,MAAM,WAAW,GAAa,CAAC,MAAM,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE5D,MAAM,CAAC,IAAI,CACT,yBAAyB,QAAQ,aAAa,UAAU,qBAAqB,WAAW,CAAC,IAAI,CAC3F,IAAI,CACL,EAAE,CACJ,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,YAAY,CAC5C,WAAW,CAAC,KAAK,EACjB,UAAU,EACV,WAAW,CACZ,CAAC;QAEF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,oBAAoB,MAAM,QAAQ,QAAQ,GAAG,CAAC,CAAC;YAC3D,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CAAC,+BAA+B,QAAQ,GAAG,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,sBAAsB,CAAC,MAAc,EAAE,MAAc;IACzE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,qBAAqB,EAAE,CAAC,QAAQ,CAAC,CAAC;QAEtD,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,8CAA8C,CAClG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,mDAAmD,CACvG,CAAC;YACF,SAAS;QACX,CAAC;QAED,yGAAyG;QACzG,uGAAuG;QACvG,uCAAuC;QACvC,uGAAuG;QACvG,uGAAuG;QACvG,sCAAsC;QACtC,uGAAuG;QACvG,sGAAsG;QACtG,sGAAsG;QACtG,4CAA4C;QAC5C,MAAM,IAAI,GAAG,MAAM,IAAA,iCAAiB,EAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QAEtE,iCAAiC;QACjC,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;YACf,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,qBAAqB,CACzE,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,MAAM,CAAC,IAAI,CACT,2BAA2B,IAAI,QAAQ,QAAQ,aAAa,GAAG,KAAK,CACrE,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,YAAY,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,yFAAyF;YACzF,uFAAuF;YACvF,gCAAgC;YAChC,IAAI,KAAK,YAAY,YAAY,CAAC,iBAAiB,EAAE,CAAC;gBACpD,MAAM,CAAC,IAAI,CACT,2BAA2B,QAAQ,aAAa,GAAG,qBAAqB,CACzE,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC9B,CAAC;iBAAM,CAAC;gBACN,kCAAkC;gBAClC,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,QAAQ,CACrB,QAAkB,EAClB,WAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/D,OAAO,GAAG,MAAM,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,EAAE,CAAC;AACjD,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,yBAAyB,CAAC,CAAC;IACnE,IAAI,MAAM,GAAG,8BAA8B,CAAC;IAE5C,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1D,MAAM,GAAG,GAAG,MAAM,IAAI,YAAY,EAAE,CAAC;IACvC,CAAC;IAED,OAAO,GAAG,MAAM,IAAI,+BAA+B,IAAI,QAAQ,IAAI,QAAQ,GAAG,CAAC;AACjF,CAAC"}
|
||||
95
lib/diff-informed-analysis-utils.js
generated
Normal file
95
lib/diff-informed-analysis-utils.js
generated
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shouldPerformDiffInformedAnalysis = shouldPerformDiffInformedAnalysis;
|
||||
exports.getDiffInformedAnalysisBranches = getDiffInformedAnalysisBranches;
|
||||
exports.writeDiffRangesJsonFile = writeDiffRangesJsonFile;
|
||||
exports.readDiffRangesJsonFile = readDiffRangesJsonFile;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const util_1 = require("./util");
|
||||
/**
|
||||
* Check if the action should perform diff-informed analysis.
|
||||
*/
|
||||
async function shouldPerformDiffInformedAnalysis(codeql, features, logger) {
|
||||
return ((await getDiffInformedAnalysisBranches(codeql, features, logger)) !==
|
||||
undefined);
|
||||
}
|
||||
/**
|
||||
* Get the branches to use for diff-informed analysis.
|
||||
*
|
||||
* @returns If the action should perform diff-informed analysis, return
|
||||
* the base and head branches that should be used to compute the diff ranges.
|
||||
* Otherwise return `undefined`.
|
||||
*/
|
||||
async function getDiffInformedAnalysisBranches(codeql, features, logger) {
|
||||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||
return undefined;
|
||||
}
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
if (gitHubVersion.type === util_1.GitHubVariant.GHES &&
|
||||
(0, util_1.satisfiesGHESVersion)(gitHubVersion.version, "<3.19", true)) {
|
||||
return undefined;
|
||||
}
|
||||
const branches = actionsUtil.getPullRequestBranches();
|
||||
if (!branches) {
|
||||
logger.info("Not performing diff-informed analysis " +
|
||||
"because we are not analyzing a pull request.");
|
||||
}
|
||||
return branches;
|
||||
}
|
||||
function getDiffRangesJsonFilePath() {
|
||||
return path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range.json");
|
||||
}
|
||||
function writeDiffRangesJsonFile(logger, ranges) {
|
||||
const jsonContents = JSON.stringify(ranges, null, 2);
|
||||
const jsonFilePath = getDiffRangesJsonFilePath();
|
||||
fs.writeFileSync(jsonFilePath, jsonContents);
|
||||
logger.debug(`Wrote pr-diff-range JSON file to ${jsonFilePath}:\n${jsonContents}`);
|
||||
}
|
||||
function readDiffRangesJsonFile(logger) {
|
||||
const jsonFilePath = getDiffRangesJsonFilePath();
|
||||
if (!fs.existsSync(jsonFilePath)) {
|
||||
logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`);
|
||||
return undefined;
|
||||
}
|
||||
const jsonContents = fs.readFileSync(jsonFilePath, "utf8");
|
||||
logger.debug(`Read pr-diff-range JSON file from ${jsonFilePath}:\n${jsonContents}`);
|
||||
return JSON.parse(jsonContents);
|
||||
}
|
||||
//# sourceMappingURL=diff-informed-analysis-utils.js.map
|
||||
1
lib/diff-informed-analysis-utils.js.map
Normal file
1
lib/diff-informed-analysis-utils.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"diff-informed-analysis-utils.js","sourceRoot":"","sources":["../src/diff-informed-analysis-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,8EASC;AASD,0EAyBC;AAYD,0DAUC;AAED,wDAaC;AA9FD,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAE9C,6CAAgD;AAEhD,mDAA6D;AAE7D,iCAA6D;AAE7D;;GAEG;AACI,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,QAA2B,EAC3B,MAAc;IAEd,OAAO,CACL,CAAC,MAAM,+BAA+B,CAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;QACjE,SAAS,CACV,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,+BAA+B,CACnD,MAAc,EACd,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,mBAAmB,EAAE,MAAM,CAAC,CAAC,EAAE,CAAC;QACpE,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IACE,aAAa,CAAC,IAAI,KAAK,oBAAa,CAAC,IAAI;QACzC,IAAA,2BAAoB,EAAC,aAAa,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC,EAC1D,CAAC;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,MAAM,QAAQ,GAAG,WAAW,CAAC,sBAAsB,EAAE,CAAC;IACtD,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,MAAM,CAAC,IAAI,CACT,wCAAwC;YACtC,8CAA8C,CACjD,CAAC;IACJ,CAAC;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAQD,SAAS,yBAAyB;IAChC,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,oBAAoB,CAAC,CAAC;AAC9E,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAAc,EACd,MAAwB;IAExB,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IACrD,MAAM,YAAY,GAAG,yBAAyB,EAAE,CAAC;IACjD,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,YAAY,CAAC,CAAC;IAC7C,MAAM,CAAC,KAAK,CACV,oCAAoC,YAAY,MAAM,YAAY,EAAE,CACrE,CAAC;AACJ,CAAC;AAED,SAAgB,sBAAsB,CACpC,MAAc;IAEd,MAAM,YAAY,GAAG,yBAAyB,EAAE,CAAC;IACjD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,2CAA2C,YAAY,EAAE,CAAC,CAAC;QACxE,OAAO,SAAS,CAAC;IACnB,CAAC;IACD,MAAM,YAAY,GAAG,EAAE,CAAC,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC3D,MAAM,CAAC,KAAK,CACV,qCAAqC,YAAY,MAAM,YAAY,EAAE,CACtE,CAAC;IACF,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,CAAqB,CAAC;AACtD,CAAC"}
|
||||
130
lib/diff-informed-analysis-utils.test.js
generated
Normal file
130
lib/diff-informed-analysis-utils.test.js
generated
Normal file
@@ -0,0 +1,130 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
const defaultTestCase = {
|
||||
featureEnabled: true,
|
||||
gitHubVersion: {
|
||||
type: util_1.GitHubVariant.DOTCOM,
|
||||
},
|
||||
pullRequestBranches: {
|
||||
base: "main",
|
||||
head: "feature-branch",
|
||||
},
|
||||
codeQLVersion: "2.21.0",
|
||||
};
|
||||
const testShouldPerformDiffInformedAnalysis = ava_1.default.macro({
|
||||
exec: async (t, _title, partialTestCase, expectedResult) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const testCase = { ...defaultTestCase, ...partialTestCase };
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const codeql = (0, testing_utils_1.mockCodeQLVersion)(testCase.codeQLVersion);
|
||||
if (testCase.diffInformedQueriesEnvVar !== undefined) {
|
||||
process.env.CODEQL_ACTION_DIFF_INFORMED_QUERIES =
|
||||
testCase.diffInformedQueriesEnvVar.toString();
|
||||
}
|
||||
else {
|
||||
delete process.env.CODEQL_ACTION_DIFF_INFORMED_QUERIES;
|
||||
}
|
||||
const features = new feature_flags_1.Features(testCase.gitHubVersion, (0, repository_1.parseRepositoryNwo)("github/example"), tmpDir, logger);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {
|
||||
[feature_flags_1.Feature.DiffInformedQueries]: testCase.featureEnabled,
|
||||
});
|
||||
const getGitHubVersionStub = sinon
|
||||
.stub(apiClient, "getGitHubVersion")
|
||||
.resolves(testCase.gitHubVersion);
|
||||
const getPullRequestBranchesStub = sinon
|
||||
.stub(actionsUtil, "getPullRequestBranches")
|
||||
.returns(testCase.pullRequestBranches);
|
||||
const result = await (0, diff_informed_analysis_utils_1.shouldPerformDiffInformedAnalysis)(codeql, features, logger);
|
||||
t.is(result, expectedResult);
|
||||
delete process.env.CODEQL_ACTION_DIFF_INFORMED_QUERIES;
|
||||
getGitHubVersionStub.restore();
|
||||
getPullRequestBranchesStub.restore();
|
||||
});
|
||||
},
|
||||
title: (_, title) => `shouldPerformDiffInformedAnalysis: ${title}`,
|
||||
});
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns true in the default test case", {}, true);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns false when feature flag is disabled from the API", {
|
||||
featureEnabled: false,
|
||||
}, false);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns false when CODEQL_ACTION_DIFF_INFORMED_QUERIES is set to false", {
|
||||
featureEnabled: true,
|
||||
diffInformedQueriesEnvVar: false,
|
||||
}, false);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns true when CODEQL_ACTION_DIFF_INFORMED_QUERIES is set to true", {
|
||||
featureEnabled: false,
|
||||
diffInformedQueriesEnvVar: true,
|
||||
}, true);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns false for CodeQL version 2.20.0", {
|
||||
codeQLVersion: "2.20.0",
|
||||
}, false);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns false for invalid GHES version", {
|
||||
gitHubVersion: {
|
||||
type: util_1.GitHubVariant.GHES,
|
||||
version: "invalid-version",
|
||||
},
|
||||
}, false);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns false for GHES version 3.18.5", {
|
||||
gitHubVersion: {
|
||||
type: util_1.GitHubVariant.GHES,
|
||||
version: "3.18.5",
|
||||
},
|
||||
}, false);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns true for GHES version 3.19.0", {
|
||||
gitHubVersion: {
|
||||
type: util_1.GitHubVariant.GHES,
|
||||
version: "3.19.0",
|
||||
},
|
||||
}, true);
|
||||
(0, ava_1.default)(testShouldPerformDiffInformedAnalysis, "returns false when not a pull request", {
|
||||
pullRequestBranches: undefined,
|
||||
}, false);
|
||||
//# sourceMappingURL=diff-informed-analysis-utils.test.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user