mirror of
https://github.com/github/codeql-action.git
synced 2025-12-07 00:08:06 +08:00
Compare commits
957 Commits
codeql-bun
...
v1.1.35
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
54d8b0da6b | ||
|
|
5e70e07df1 | ||
|
|
04dc6825d1 | ||
|
|
0dc6f25b82 | ||
|
|
e455e6c8e5 | ||
|
|
bea0fc1967 | ||
|
|
b2a92eb56d | ||
|
|
075b74d36e | ||
|
|
9af9a11da8 | ||
|
|
a631f4b016 | ||
|
|
1384ce4ab3 | ||
|
|
160613c380 | ||
|
|
caf1c5057b | ||
|
|
c62445de22 | ||
|
|
ea25ff07d1 | ||
|
|
9dac9f748a | ||
|
|
c6e756bb39 | ||
|
|
85b8c47da6 | ||
|
|
8d8b898c0f | ||
|
|
1b829d6cdc | ||
|
|
491881a5bf | ||
|
|
31970424d5 | ||
|
|
014e2f305e | ||
|
|
312e093a18 | ||
|
|
aef0440821 | ||
|
|
7e73dedacc | ||
|
|
102e01da36 | ||
|
|
eb19ecbad1 | ||
|
|
f0a052e8b4 | ||
|
|
ead8da6199 | ||
|
|
c61f4c61f8 | ||
|
|
ad7ca9bf21 | ||
|
|
f79028af27 | ||
|
|
909c8687d5 | ||
|
|
5b7c9daecd | ||
|
|
ce90479412 | ||
|
|
d52e657b2e | ||
|
|
187515b328 | ||
|
|
bc341c5dd1 | ||
|
|
39fe7aa8a1 | ||
|
|
c719ec0b33 | ||
|
|
09bfd560f6 | ||
|
|
33b2045510 | ||
|
|
93c6b70dc3 | ||
|
|
def4f60c6c | ||
|
|
34d91a9ce7 | ||
|
|
c49c05b5df | ||
|
|
2ccaddd459 | ||
|
|
5dcca8a6e4 | ||
|
|
b6e17a6616 | ||
|
|
e9e73b0cb9 | ||
|
|
bab5a146ac | ||
|
|
6013661451 | ||
|
|
26df9a9a4f | ||
|
|
4ee97e5e55 | ||
|
|
cfce1c4e19 | ||
|
|
da75623c67 | ||
|
|
c29fca48a1 | ||
|
|
4fddc51e4f | ||
|
|
7694d6752a | ||
|
|
73113785b9 | ||
|
|
3b8914b2cc | ||
|
|
d5e672584c | ||
|
|
377b692bf3 | ||
|
|
55a437c003 | ||
|
|
9600345327 | ||
|
|
49a3e72242 | ||
|
|
2ceabc2eb1 | ||
|
|
77ca6a3dbb | ||
|
|
e353814da4 | ||
|
|
678fc3afe2 | ||
|
|
0f07790b74 | ||
|
|
53060aa0c8 | ||
|
|
d13b9b8244 | ||
|
|
a836d9571f | ||
|
|
ffc2dd6d6d | ||
|
|
7cdf75141e | ||
|
|
a190d3876a | ||
|
|
f2c3e7ca4e | ||
|
|
bfc56625b0 | ||
|
|
c939e6615d | ||
|
|
1935d19d61 | ||
|
|
7484436e5d | ||
|
|
0a76b97b28 | ||
|
|
f8b607edaa | ||
|
|
d48707ce53 | ||
|
|
07b9db6a46 | ||
|
|
0dea34e91c | ||
|
|
f47c93c5fd | ||
|
|
6a0f768e01 | ||
|
|
2bca6af0e5 | ||
|
|
43f9bd3e4f | ||
|
|
88b5c1e51f | ||
|
|
3ef8b89353 | ||
|
|
35086e72ed | ||
|
|
1e2f8f035d | ||
|
|
31c87302e7 | ||
|
|
0990a34ac8 | ||
|
|
47ce159207 | ||
|
|
5883c13406 | ||
|
|
375506c896 | ||
|
|
9df773d1a3 | ||
|
|
dac8912e9f | ||
|
|
8ecbaea022 | ||
|
|
3d46406f3b | ||
|
|
03bb58c07d | ||
|
|
0eacdb53ad | ||
|
|
8c8a9b1231 | ||
|
|
b498c79130 | ||
|
|
51abddb49b | ||
|
|
73c5c0607d | ||
|
|
adb125ae03 | ||
|
|
219d9a0182 | ||
|
|
83ead4549c | ||
|
|
63d8cbe20e | ||
|
|
be84b68e4c | ||
|
|
8d0f2452ee | ||
|
|
f71b2a3989 | ||
|
|
af487b12e7 | ||
|
|
f3e11b9174 | ||
|
|
b3d9a91c97 | ||
|
|
f405ab82d5 | ||
|
|
27e1772911 | ||
|
|
1f97bcf176 | ||
|
|
ac90bc016d | ||
|
|
4238421316 | ||
|
|
97be6233c4 | ||
|
|
0c3e493df0 | ||
|
|
e131232b86 | ||
|
|
1fd76310fd | ||
|
|
9e44100c47 | ||
|
|
acbc6ca6e5 | ||
|
|
84e5134df9 | ||
|
|
ea990a3118 | ||
|
|
b45e925fe2 | ||
|
|
33b10be6bf | ||
|
|
718930badf | ||
|
|
76a77405c1 | ||
|
|
9452b6b864 | ||
|
|
006bb00765 | ||
|
|
d6f6ef4b0b | ||
|
|
7c9e85e249 | ||
|
|
fb3fdd7e15 | ||
|
|
73ff862014 | ||
|
|
01c4458f0c | ||
|
|
0e5b04a79a | ||
|
|
f9948ffd0e | ||
|
|
c606252ada | ||
|
|
029ecc09e7 | ||
|
|
b9b7d9f2a6 | ||
|
|
dba70acdb3 | ||
|
|
8aff97f12c | ||
|
|
31a2afec21 | ||
|
|
9e288b0363 | ||
|
|
1cebd8aaa9 | ||
|
|
6132391b38 | ||
|
|
b1b2d85934 | ||
|
|
9acbd2aaa4 | ||
|
|
69423c9e6a | ||
|
|
6a5e69e74f | ||
|
|
c3b6fce4ee | ||
|
|
8aa42f1f11 | ||
|
|
29a5553722 | ||
|
|
e260194d76 | ||
|
|
a8cabafa56 | ||
|
|
862a512899 | ||
|
|
71510779c2 | ||
|
|
1da1705b09 | ||
|
|
bdbba7ace9 | ||
|
|
81a1ec0fb3 | ||
|
|
8428fd9deb | ||
|
|
60c8cda203 | ||
|
|
f5dc403ecd | ||
|
|
0152042f03 | ||
|
|
d1c78e5b0a | ||
|
|
18fe527fa8 | ||
|
|
f04ca7c11c | ||
|
|
89e18934d3 | ||
|
|
5da50dc362 | ||
|
|
cd983e71c6 | ||
|
|
2ec046b5ac | ||
|
|
72bd9cbe62 | ||
|
|
ac0112f7f1 | ||
|
|
77b1f7e44c | ||
|
|
aa07b3894b | ||
|
|
c44e6c6096 | ||
|
|
ae0a2603c1 | ||
|
|
bfcbb093ac | ||
|
|
4b73c4f99e | ||
|
|
6c6b550a41 | ||
|
|
a3141c7a07 | ||
|
|
13cb2ca824 | ||
|
|
7e2585030f | ||
|
|
4b37e17ec1 | ||
|
|
a12a861b82 | ||
|
|
993ca05cd7 | ||
|
|
a31200481f | ||
|
|
4ed5abeff3 | ||
|
|
734292689d | ||
|
|
5767f918ef | ||
|
|
f248a57d3b | ||
|
|
40542d38bc | ||
|
|
55ffe2dcbb | ||
|
|
3c7f7914e6 | ||
|
|
aacfdb2143 | ||
|
|
e76b89fe31 | ||
|
|
99df42d8d6 | ||
|
|
b0be33f71a | ||
|
|
2c3401bcd0 | ||
|
|
f4b9f1eec4 | ||
|
|
367920c964 | ||
|
|
98f6408f34 | ||
|
|
ec3cf9c605 | ||
|
|
f246f20ec4 | ||
|
|
4b53723d6b | ||
|
|
de9f112cd1 | ||
|
|
f1a4ff53b4 | ||
|
|
624418cb40 | ||
|
|
f0a1281661 | ||
|
|
2956c09622 | ||
|
|
f0b3ef9e9c | ||
|
|
3920e2d8ae | ||
|
|
872f837f14 | ||
|
|
be55631a21 | ||
|
|
6e9376a1d6 | ||
|
|
4a7a948a21 | ||
|
|
b31ec01a12 | ||
|
|
80d1f26cfa | ||
|
|
cc7986c02b | ||
|
|
f3a27d6945 | ||
|
|
aecd03235b | ||
|
|
96c8872f06 | ||
|
|
b709139433 | ||
|
|
642d5ce7b9 | ||
|
|
6bb11c6ed9 | ||
|
|
a0e10fe4b5 | ||
|
|
27694b3887 | ||
|
|
c9864f3a64 | ||
|
|
09aebd0dab | ||
|
|
5dd73678a4 | ||
|
|
4c1ccc4a5e | ||
|
|
c2f5185572 | ||
|
|
297ec80a46 | ||
|
|
b0f8861cea | ||
|
|
2ee8edc7f0 | ||
|
|
eba983fb9b | ||
|
|
e2481f79eb | ||
|
|
17ae47e4b4 | ||
|
|
2c4b2a1b38 | ||
|
|
0c7f674831 | ||
|
|
2fafb297de | ||
|
|
160e3fe79a | ||
|
|
34d48f825c | ||
|
|
e862e8fc76 | ||
|
|
099de40ecf | ||
|
|
cc00a9d478 | ||
|
|
082bdf06b5 | ||
|
|
44edb7c4b5 | ||
|
|
43c3ed9c28 | ||
|
|
701cea34ba | ||
|
|
c27c6c7642 | ||
|
|
adb28963c0 | ||
|
|
362f9a2522 | ||
|
|
6e1dab28b6 | ||
|
|
6c869f8b03 | ||
|
|
919e4caca1 | ||
|
|
c6c7d293ca | ||
|
|
1a17c59fb0 | ||
|
|
2eeff7bf99 | ||
|
|
54257f7fd0 | ||
|
|
3d39294047 | ||
|
|
b27aed78f5 | ||
|
|
fe49f6c72a | ||
|
|
d5b0890392 | ||
|
|
75e286a66c | ||
|
|
0bb9f8e32b | ||
|
|
807578363a | ||
|
|
e4cc4a2fef | ||
|
|
5915e70486 | ||
|
|
6de05e4b24 | ||
|
|
b16314e16c | ||
|
|
84dffe700c | ||
|
|
9e044c5432 | ||
|
|
e5c3375225 | ||
|
|
24c8de16fa | ||
|
|
f359ba7af1 | ||
|
|
5960ce1190 | ||
|
|
59fbe34861 | ||
|
|
6ace05baa3 | ||
|
|
4cf80047d0 | ||
|
|
c0641ea1d3 | ||
|
|
61b87c69a6 | ||
|
|
57719e015c | ||
|
|
64b7bbc7e0 | ||
|
|
0831a67a20 | ||
|
|
6548772167 | ||
|
|
ffa74c5cb3 | ||
|
|
7e79eb61f7 | ||
|
|
d6b82d8325 | ||
|
|
af3f7823e7 | ||
|
|
c9559f9ab2 | ||
|
|
885b397f2e | ||
|
|
e0e5ded33c | ||
|
|
c60b8543e6 | ||
|
|
97054749c9 | ||
|
|
fb0f74784f | ||
|
|
3400e51bc8 | ||
|
|
74740eef3d | ||
|
|
1ec8ea99ee | ||
|
|
2466f0ce2c | ||
|
|
a711c7623d | ||
|
|
39064e0f9b | ||
|
|
28c63d131f | ||
|
|
a4e4529299 | ||
|
|
cc4ee05a07 | ||
|
|
1f0700d1c0 | ||
|
|
cab46c529f | ||
|
|
e37b0d6470 | ||
|
|
314ede696b | ||
|
|
b96c7546c1 | ||
|
|
4c8f13758e | ||
|
|
b98b2def63 | ||
|
|
f630e9a589 | ||
|
|
a643eb3621 | ||
|
|
b27be7b4a3 | ||
|
|
6188cbdeb9 | ||
|
|
4706007119 | ||
|
|
e4a1505d87 | ||
|
|
7b13a3f12f | ||
|
|
63e162a1c3 | ||
|
|
d4e61451be | ||
|
|
86f3159a69 | ||
|
|
d1e2e02bee | ||
|
|
ff5ca122ed | ||
|
|
32ca2cf500 | ||
|
|
b2fc1e178e | ||
|
|
8a893ddf18 | ||
|
|
93ba53f2de | ||
|
|
1fa5d72846 | ||
|
|
417059fdb2 | ||
|
|
ca8a78d5f3 | ||
|
|
2264307214 | ||
|
|
3f97671248 | ||
|
|
c2c7bba5f7 | ||
|
|
1309aafb7d | ||
|
|
038242a7f0 | ||
|
|
70509c3884 | ||
|
|
e1ce6e3115 | ||
|
|
5ffcfe95cc | ||
|
|
83007bbc17 | ||
|
|
aaca8193b1 | ||
|
|
2e9fbe39e1 | ||
|
|
fb28913d5c | ||
|
|
e0ef82e596 | ||
|
|
fea81695f7 | ||
|
|
f216d967f8 | ||
|
|
6fd44ee42c | ||
|
|
bda85d0562 | ||
|
|
cf48312ad0 | ||
|
|
9f79e5fbcf | ||
|
|
904260d7d9 | ||
|
|
b15cc0075a | ||
|
|
82495d8d86 | ||
|
|
21c716dd69 | ||
|
|
34aa5a554b | ||
|
|
9207340122 | ||
|
|
fefa6f57a4 | ||
|
|
fe2e30bafc | ||
|
|
4f104676ac | ||
|
|
e6ad3e0b35 | ||
|
|
302c380a82 | ||
|
|
f84e389feb | ||
|
|
3acd1b8e34 | ||
|
|
58a800540c | ||
|
|
6044413cc0 | ||
|
|
a07f0436f5 | ||
|
|
6a38b7d4a1 | ||
|
|
99d0a6bc15 | ||
|
|
f32e161cdd | ||
|
|
4cc95769d4 | ||
|
|
fd4dc5bf31 | ||
|
|
c2560331fc | ||
|
|
0a38743d1a | ||
|
|
3038e979a8 | ||
|
|
bde5694fb7 | ||
|
|
40e0374c6f | ||
|
|
0d2fa3c636 | ||
|
|
ff575f231d | ||
|
|
4a8d26e2bd | ||
|
|
f87e7a6293 | ||
|
|
9ba4d500aa | ||
|
|
ace5545513 | ||
|
|
6085805a3a | ||
|
|
21ae865bef | ||
|
|
59744464eb | ||
|
|
aa434aaed6 | ||
|
|
5dcb9dec2d | ||
|
|
4fa3e8b483 | ||
|
|
063e083705 | ||
|
|
a03f3bd585 | ||
|
|
335aa5e39f | ||
|
|
add14375d5 | ||
|
|
376fea671d | ||
|
|
bf97a6da5b | ||
|
|
7e086b240c | ||
|
|
8cdb7d8545 | ||
|
|
299b77421b | ||
|
|
abdf26c28f | ||
|
|
03e3453102 | ||
|
|
ab4197c609 | ||
|
|
fc2f344141 | ||
|
|
85ba42aa69 | ||
|
|
b0443622cd | ||
|
|
bea5e4b220 | ||
|
|
4b5dea8eed | ||
|
|
62b4f237aa | ||
|
|
21530f507f | ||
|
|
e9b47b1898 | ||
|
|
70426bb879 | ||
|
|
ab396da825 | ||
|
|
6699d47420 | ||
|
|
1d311fe8e5 | ||
|
|
e303e2c65b | ||
|
|
e460fa2e94 | ||
|
|
6d34731d93 | ||
|
|
cf5d465980 | ||
|
|
d05538fa74 | ||
|
|
7cce9bbfb5 | ||
|
|
fe1bd9ac76 | ||
|
|
62fab91947 | ||
|
|
4c7650d354 | ||
|
|
a3607ca1f7 | ||
|
|
b6c8b301ad | ||
|
|
5e6975f60f | ||
|
|
fb784f3905 | ||
|
|
e308a15a08 | ||
|
|
b398f525a5 | ||
|
|
b0f41e06da | ||
|
|
a5966ad4f1 | ||
|
|
8c692b37a0 | ||
|
|
8e0846caf0 | ||
|
|
955290300a | ||
|
|
14d7039828 | ||
|
|
b42a495e8a | ||
|
|
5b4b44c9d6 | ||
|
|
e466e75875 | ||
|
|
fff56ee004 | ||
|
|
8dc468564f | ||
|
|
215c3cb4bb | ||
|
|
693b97bf50 | ||
|
|
1d92118146 | ||
|
|
0e98efa2bb | ||
|
|
c7bb8946b2 | ||
|
|
d92a91c5c4 | ||
|
|
7294b404d8 | ||
|
|
354bc9f629 | ||
|
|
0a2b0d236c | ||
|
|
a59fbe2208 | ||
|
|
abafa5bdc1 | ||
|
|
34de8fdd99 | ||
|
|
d5cec099b5 | ||
|
|
9128f1cb71 | ||
|
|
66a519c156 | ||
|
|
d605b83c93 | ||
|
|
271f8f5854 | ||
|
|
b0d070a0d4 | ||
|
|
bd951deeeb | ||
|
|
328499d4e2 | ||
|
|
d9311e1501 | ||
|
|
54d57993da | ||
|
|
0ee5049d92 | ||
|
|
c7f292ea4f | ||
|
|
00ef1ee757 | ||
|
|
21bf3087a5 | ||
|
|
5960bffd3f | ||
|
|
92c650bfbd | ||
|
|
8b45ef3845 | ||
|
|
e7d4da3fa2 | ||
|
|
182342cdd7 | ||
|
|
e195431677 | ||
|
|
3069613ebd | ||
|
|
569f78c82d | ||
|
|
ab6508ab87 | ||
|
|
c18ed56977 | ||
|
|
d5ad81bcd4 | ||
|
|
1cd5043ced | ||
|
|
e379edd1ee | ||
|
|
9af066e7d5 | ||
|
|
ac92a02de7 | ||
|
|
5861352d57 | ||
|
|
3e49948054 | ||
|
|
1e5376ae5f | ||
|
|
d438a4b810 | ||
|
|
c2c0a2908e | ||
|
|
54b9c1e086 | ||
|
|
ec8883580f | ||
|
|
b9710bd89e | ||
|
|
9a73fddebf | ||
|
|
aeb2ecb170 | ||
|
|
256c503a4b | ||
|
|
bae51503cf | ||
|
|
c72f566aae | ||
|
|
e590686972 | ||
|
|
d7319f2ff4 | ||
|
|
7fee4ca032 | ||
|
|
5259c5e5d8 | ||
|
|
f0a1a35a9f | ||
|
|
d1ad3b4957 | ||
|
|
a07454207a | ||
|
|
53a7a27166 | ||
|
|
2927215a2b | ||
|
|
c145823103 | ||
|
|
77c55ea1fc | ||
|
|
01668839ea | ||
|
|
66bb63a202 | ||
|
|
de684051c8 | ||
|
|
0ce8ba5772 | ||
|
|
b7435e9897 | ||
|
|
fbab93d8c8 | ||
|
|
3af72a73e6 | ||
|
|
d5aef51dbf | ||
|
|
5354facf4b | ||
|
|
25052126c2 | ||
|
|
2d1b5ebd90 | ||
|
|
f5d217be74 | ||
|
|
a37ee26906 | ||
|
|
341b59f315 | ||
|
|
7c3d74c9d7 | ||
|
|
04ea3b12c8 | ||
|
|
5502fefdae | ||
|
|
0349bb05b7 | ||
|
|
3154c4f6f0 | ||
|
|
b21cab99b3 | ||
|
|
219a937551 | ||
|
|
ff9d53b61a | ||
|
|
5f4cfb016e | ||
|
|
eb6f272155 | ||
|
|
7d94bb850d | ||
|
|
b469d5358f | ||
|
|
f47f573e6e | ||
|
|
416ad3d847 | ||
|
|
8bc36d5f46 | ||
|
|
8beb190634 | ||
|
|
9b7fa3dd99 | ||
|
|
e1cd41a365 | ||
|
|
d45b0eba23 | ||
|
|
96a8424f0c | ||
|
|
5862bae77e | ||
|
|
df0c0dafc0 | ||
|
|
b0d61cff1a | ||
|
|
016a5e3bae | ||
|
|
8a4437ae33 | ||
|
|
b29194f0ac | ||
|
|
4139682b64 | ||
|
|
ca10a6d552 | ||
|
|
d8adbe91f2 | ||
|
|
b3adcb7261 | ||
|
|
c8cb2b45eb | ||
|
|
fba13b0092 | ||
|
|
909e2fd56a | ||
|
|
f9bce029b4 | ||
|
|
af9d911fb5 | ||
|
|
680d08e97d | ||
|
|
5836ad9c2a | ||
|
|
1e8043f69c | ||
|
|
68f1d68743 | ||
|
|
fa2bc211fd | ||
|
|
c12cfbc92f | ||
|
|
bcf47202b5 | ||
|
|
d101dd6f9e | ||
|
|
ccfb4e296c | ||
|
|
049537835b | ||
|
|
072cd929a3 | ||
|
|
d74f663ed4 | ||
|
|
a09a029937 | ||
|
|
07e8996e91 | ||
|
|
7c6fa5ce8a | ||
|
|
b659ce5d7d | ||
|
|
79b933c459 | ||
|
|
cf7f893f9c | ||
|
|
172eca420d | ||
|
|
fd83e55188 | ||
|
|
26cafd2f92 | ||
|
|
15608ceae3 | ||
|
|
6fdaff6eb6 | ||
|
|
4e121c0ef5 | ||
|
|
d909f714fb | ||
|
|
fa59c284a8 | ||
|
|
65d6ee0c51 | ||
|
|
bbdc9efa94 | ||
|
|
2314063848 | ||
|
|
0403fb7d8c | ||
|
|
44f42da9ca | ||
|
|
a6d09016e7 | ||
|
|
3c4f458a1a | ||
|
|
90676d9cb9 | ||
|
|
484a72c924 | ||
|
|
c7db5d9226 | ||
|
|
3835e64c38 | ||
|
|
07720c700e | ||
|
|
6df93613d7 | ||
|
|
ff7a29dd72 | ||
|
|
8903097982 | ||
|
|
4993f1e1fc | ||
|
|
27df4c2a47 | ||
|
|
1c82b1bb73 | ||
|
|
05176bcd41 | ||
|
|
6bc22912e0 | ||
|
|
b952980043 | ||
|
|
010abe7de0 | ||
|
|
8f867dcb21 | ||
|
|
0e3ae6e68a | ||
|
|
cade2b5621 | ||
|
|
2d5738c1ac | ||
|
|
693d3f22d8 | ||
|
|
38c2c091e8 | ||
|
|
9990b406c2 | ||
|
|
2a582eb53d | ||
|
|
1a0331c4c6 | ||
|
|
bb2956fd36 | ||
|
|
af35bd33fb | ||
|
|
944b1f8c5c | ||
|
|
2d4d9d8cf7 | ||
|
|
0c10062e5e | ||
|
|
c66efb35b1 | ||
|
|
ac911e9964 | ||
|
|
236b7c2b05 | ||
|
|
2ca79b6fa8 | ||
|
|
9dc4e8a2c7 | ||
|
|
4ec485d633 | ||
|
|
e16d085a47 | ||
|
|
f51da8688a | ||
|
|
f7a7ac1799 | ||
|
|
ef145d6884 | ||
|
|
329cb8b61c | ||
|
|
7f86ddc26d | ||
|
|
a758ec55e8 | ||
|
|
eeee462f05 | ||
|
|
5895ab0c0b | ||
|
|
44a27e6a51 | ||
|
|
a557279135 | ||
|
|
64d83c75f0 | ||
|
|
13e7a528eb | ||
|
|
cc047a34ce | ||
|
|
daaac4306e | ||
|
|
5229df1eef | ||
|
|
5da7870265 | ||
|
|
8a4a573d59 | ||
|
|
6630cbeccb | ||
|
|
af87cc6ba5 | ||
|
|
ebc59ec8da | ||
|
|
52de49c899 | ||
|
|
2c25894c5f | ||
|
|
81c5b2d993 | ||
|
|
a5def17768 | ||
|
|
19d025e17f | ||
|
|
2f739fcd04 | ||
|
|
495c20006b | ||
|
|
2746051310 | ||
|
|
1016eba538 | ||
|
|
907f1deb5b | ||
|
|
ceea66834a | ||
|
|
b100b75d58 | ||
|
|
992d011666 | ||
|
|
6c23d76596 | ||
|
|
573de8b146 | ||
|
|
5207709e29 | ||
|
|
fbb0a4e179 | ||
|
|
351171d4e9 | ||
|
|
624285f8b2 | ||
|
|
d8ed0a3afb | ||
|
|
9b044bf633 | ||
|
|
97441a8bc4 | ||
|
|
0f7d2dd330 | ||
|
|
507d4b7b31 | ||
|
|
0c670bbf04 | ||
|
|
0dd6a6f2b0 | ||
|
|
bf24993f0c | ||
|
|
0d16d70d87 | ||
|
|
bbc2e7010f | ||
|
|
caa2a0df0a | ||
|
|
a2f4d66a8b | ||
|
|
b4ff463500 | ||
|
|
58faf9d60c | ||
|
|
dc1c51db28 | ||
|
|
2d2dfa3424 | ||
|
|
4e46a69655 | ||
|
|
ceec52c4bc | ||
|
|
a32664975f | ||
|
|
8171514c02 | ||
|
|
6fa0b7cb22 | ||
|
|
b8bd06e075 | ||
|
|
aa231930c1 | ||
|
|
ba95eeb60e | ||
|
|
c059f95c05 | ||
|
|
75afbf4a30 | ||
|
|
01fa64cb90 | ||
|
|
28ccb035bb | ||
|
|
11111290fc | ||
|
|
01d16b1e01 | ||
|
|
d8c9c723a5 | ||
|
|
8d24c9e4c1 | ||
|
|
f7b6c0021b | ||
|
|
ea16f0f943 | ||
|
|
0b90ea88eb | ||
|
|
f695c53a17 | ||
|
|
2d5f20d706 | ||
|
|
e617b8972d | ||
|
|
c8971b8e0a | ||
|
|
3e7e3b32d0 | ||
|
|
330d552535 | ||
|
|
548f07e307 | ||
|
|
a844fefc86 | ||
|
|
7ce9ef9137 | ||
|
|
d750c6d79d | ||
|
|
4cb248b0ec | ||
|
|
1e7f770864 | ||
|
|
816b3e91bc | ||
|
|
fbbd1dcd52 | ||
|
|
0a5dad3c83 | ||
|
|
d61e3fdf02 | ||
|
|
dca60ba711 | ||
|
|
e1ec69721f | ||
|
|
b45ac1f8f8 | ||
|
|
b316baae94 | ||
|
|
e655fb331c | ||
|
|
b3801753d4 | ||
|
|
3dcdbc9add | ||
|
|
688508d8cb | ||
|
|
fc926423a5 | ||
|
|
ea8fb214de | ||
|
|
3a5fa35535 | ||
|
|
7e94a6cbca | ||
|
|
b7da732b32 | ||
|
|
0c729c81f2 | ||
|
|
53850d88bb | ||
|
|
b1742f8919 | ||
|
|
c736697abf | ||
|
|
ca8a203b51 | ||
|
|
53bc5e6c78 | ||
|
|
8688a09e14 | ||
|
|
130a51dbc6 | ||
|
|
c2fd5d10f6 | ||
|
|
30681e79db | ||
|
|
c15604920a | ||
|
|
4792297702 | ||
|
|
79ec03f3e5 | ||
|
|
7ebbfcbbdd | ||
|
|
e41f8baf4a | ||
|
|
3ea10cc7b5 | ||
|
|
6fabde2be8 | ||
|
|
237260b693 | ||
|
|
eb9619301e | ||
|
|
4a887ca920 | ||
|
|
785cbf1898 | ||
|
|
b7cbc0f8be | ||
|
|
1fd3a8d1c7 | ||
|
|
269aa1746e | ||
|
|
7c1b9e6b1a | ||
|
|
3f62b754e2 | ||
|
|
26a69806cf | ||
|
|
9953936347 | ||
|
|
41d6ac4d2a | ||
|
|
e8c48cc8cf | ||
|
|
1616e0ef98 | ||
|
|
b40cd0390c | ||
|
|
821fe9b476 | ||
|
|
2a70419420 | ||
|
|
37d8b5142f | ||
|
|
ab7316e0c5 | ||
|
|
f422a50448 | ||
|
|
ed40e306f5 | ||
|
|
cae9a1f462 | ||
|
|
11a46b8856 | ||
|
|
95673cf9a2 | ||
|
|
3ff1fd9192 | ||
|
|
47bcabd3e8 | ||
|
|
b9deefbe0a | ||
|
|
7c4d0e0f6e | ||
|
|
b38dc80666 | ||
|
|
e0411511a5 | ||
|
|
c08ab55e3d | ||
|
|
934c0340a7 | ||
|
|
c18b1d6732 | ||
|
|
8bfe3c6be5 | ||
|
|
4efa7d6115 | ||
|
|
c699821722 | ||
|
|
7fa4dc3512 | ||
|
|
a965b69658 | ||
|
|
714ec5a57d | ||
|
|
98b2df478b | ||
|
|
3b151b1bde | ||
|
|
f9fd90ed60 | ||
|
|
f4fb1cfb88 | ||
|
|
876c187c13 | ||
|
|
fef25133ac | ||
|
|
a8d485f901 | ||
|
|
41a4ada31b | ||
|
|
e524cd64db | ||
|
|
1653a84fbc | ||
|
|
08f9ac4674 | ||
|
|
2e0c6caf16 | ||
|
|
99d4397d88 | ||
|
|
47dc295f08 | ||
|
|
5a6f006e4d | ||
|
|
614d63af76 | ||
|
|
ceacebd3ff | ||
|
|
c053e94e7d | ||
|
|
d069ed5c71 | ||
|
|
0e17d37ac3 | ||
|
|
2318cf79e9 | ||
|
|
e47f040350 | ||
|
|
4d013d7594 | ||
|
|
3b2cf8914c | ||
|
|
9d650fdc1f | ||
|
|
b2786f5323 | ||
|
|
d00e8c09a3 | ||
|
|
8bd4419d1e | ||
|
|
99acb8dda6 | ||
|
|
31367d4e57 | ||
|
|
ccf5d70ab3 | ||
|
|
30fe0a56d2 | ||
|
|
7adb33da1d | ||
|
|
2e111b27f7 | ||
|
|
c7785f6b91 | ||
|
|
2e80c74b1b | ||
|
|
80ecdcdf69 | ||
|
|
7c412c67ba | ||
|
|
ee4575b213 | ||
|
|
d2ab7a2abb | ||
|
|
d7459f0368 | ||
|
|
6db77eec0d | ||
|
|
777b778409 | ||
|
|
97f9db4fb9 | ||
|
|
59ca9b59cb | ||
|
|
6834383903 | ||
|
|
4918636a75 | ||
|
|
428caf0cf5 | ||
|
|
df05122fc6 | ||
|
|
a27dc4fee4 | ||
|
|
a568674c69 | ||
|
|
f8f4c0b33e | ||
|
|
79d8e4a43d | ||
|
|
0ece1d1000 | ||
|
|
81b419c908 | ||
|
|
eec34d5f05 | ||
|
|
06e27d3e3d | ||
|
|
40b280032c | ||
|
|
bcb7fad5b3 | ||
|
|
0efcf74ce0 | ||
|
|
29a2159db1 | ||
|
|
f7c46e5cbc | ||
|
|
ccf479d336 | ||
|
|
1b5ea4afdc | ||
|
|
69e09909dc | ||
|
|
a6611b8691 | ||
|
|
632cc8efb3 | ||
|
|
6efabfeaef | ||
|
|
24d91cb4e9 | ||
|
|
b6b48ceae3 | ||
|
|
af390c0075 | ||
|
|
4c3af19a40 | ||
|
|
57096f1d43 | ||
|
|
27ea8f8fe5 | ||
|
|
3f00a1265f | ||
|
|
dbe6f211e6 | ||
|
|
16c620dea4 | ||
|
|
b36688d5b7 | ||
|
|
bfe9d7da56 | ||
|
|
822fe5ef9a | ||
|
|
255ffd480f | ||
|
|
4b775686a0 | ||
|
|
f0705a6d6f | ||
|
|
2faa3e16f3 | ||
|
|
aaff818427 | ||
|
|
0e9acb6e5c | ||
|
|
459cf02690 | ||
|
|
6f285ad15b | ||
|
|
c88cf91b1f | ||
|
|
acc7a4bc97 | ||
|
|
d25b8aca1b | ||
|
|
c6fc792e04 | ||
|
|
d28ac7880b | ||
|
|
a3a6c128d7 | ||
|
|
657581e7a8 | ||
|
|
657c2f3ff0 | ||
|
|
970e0879d9 | ||
|
|
db50adab01 | ||
|
|
9e9a8428c3 | ||
|
|
1725087693 | ||
|
|
e655565390 | ||
|
|
c38e41c45a | ||
|
|
0658e4b2d6 | ||
|
|
54b4854fda | ||
|
|
1fae5bf71b | ||
|
|
533ce91971 | ||
|
|
ace076b980 | ||
|
|
5e59d8eec4 | ||
|
|
ec7fba1280 | ||
|
|
97847a4dde | ||
|
|
46d6a93dcc | ||
|
|
ab69202a0b | ||
|
|
93c20939ac | ||
|
|
c1672e532f | ||
|
|
f8c88ab2dc | ||
|
|
2f58583a1b | ||
|
|
4e0668d05e | ||
|
|
c4fdf5fe69 | ||
|
|
4f87830a1f | ||
|
|
daf6560612 | ||
|
|
03e2e3c45f | ||
|
|
3bb6c41212 | ||
|
|
38fc5ebb37 | ||
|
|
a82d691646 | ||
|
|
ca6773e404 | ||
|
|
8dbd96566a | ||
|
|
ef73e3bee8 | ||
|
|
75b4f1c466 | ||
|
|
d468c94a69 | ||
|
|
7c55012151 | ||
|
|
f8eea91a7b | ||
|
|
878b64e0ef | ||
|
|
7cf0ed5e3f | ||
|
|
b651a677d2 | ||
|
|
827fd55c21 | ||
|
|
dd56e95b46 | ||
|
|
3c6dd303a8 | ||
|
|
395afb1dd9 | ||
|
|
ed0abc6cac | ||
|
|
193cfa588d | ||
|
|
d9e30cb001 | ||
|
|
6c3ae45f3a | ||
|
|
8371fda073 | ||
|
|
e3983ef751 | ||
|
|
e34f3ea243 | ||
|
|
d9937ad6ad | ||
|
|
f703d1ca07 | ||
|
|
acf17f7547 | ||
|
|
8a646279fc | ||
|
|
e2cc7cc006 | ||
|
|
c7b049b347 | ||
|
|
f679ec9aa9 | ||
|
|
d9f89b3dfd | ||
|
|
0ab00f44cb | ||
|
|
026ff35db0 | ||
|
|
1fc1008278 | ||
|
|
7eac76fcb4 | ||
|
|
3d10ffe493 | ||
|
|
f5e5590fc8 | ||
|
|
380041ed00 | ||
|
|
8165d30832 | ||
|
|
4c1021c504 | ||
|
|
9da34a6ec6 | ||
|
|
f83be76fd8 | ||
|
|
b45efc9e42 | ||
|
|
75743c96fc | ||
|
|
03a275bc11 |
@@ -1,5 +1,4 @@
|
||||
**/webpack.config.js
|
||||
lib/**
|
||||
runner/dist/**
|
||||
src/testdata/**
|
||||
tests/**
|
||||
|
||||
62
.github/check-codescanning-config/action.yml
vendored
Normal file
62
.github/check-codescanning-config/action.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
name: Check Code-Scanning Config
|
||||
description: |
|
||||
Checks the code scanning configuration file generated by the
|
||||
action to ensure it contains the expected contents
|
||||
inputs:
|
||||
languages:
|
||||
required: false
|
||||
description: The languages field passed to the init action.
|
||||
|
||||
packs:
|
||||
required: false
|
||||
description: The packs field passed to the init action.
|
||||
|
||||
queries:
|
||||
required: false
|
||||
description: The queries field passed to the init action.
|
||||
|
||||
config-file-test:
|
||||
required: false
|
||||
description: |
|
||||
The location of the config file to use. If empty,
|
||||
then no config file is used.
|
||||
|
||||
expected-config-file-contents:
|
||||
required: true
|
||||
description: |
|
||||
A JSON string containing the exact contents of the config file.
|
||||
|
||||
tools:
|
||||
required: true
|
||||
description: |
|
||||
The url of codeql to use.
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: ${{ inputs.languages }}
|
||||
config-file: ${{ inputs.config-file-test }}
|
||||
queries: ${{ inputs.queries }}
|
||||
packs: ${{ inputs.packs }}
|
||||
tools: ${{ inputs.tools }}
|
||||
db-location: ${{ runner.temp }}/codescanning-config-cli-test
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: 'true'
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: npm install --location=global ts-node js-yaml
|
||||
|
||||
- name: Check config
|
||||
working-directory: ${{ github.action_path }}
|
||||
shell: bash
|
||||
run: ts-node ./index.ts "${{ runner.temp }}/user-config.yaml" '${{ inputs.expected-config-file-contents }}'
|
||||
|
||||
- name: Clean up
|
||||
shell: bash
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ${{ runner.temp }}/codescanning-config-cli-test
|
||||
rm -rf ${{ runner.temp }}/user-config.yaml
|
||||
39
.github/check-codescanning-config/index.ts
vendored
Normal file
39
.github/check-codescanning-config/index.ts
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
|
||||
import * as core from '@actions/core'
|
||||
import * as yaml from 'js-yaml'
|
||||
import * as fs from 'fs'
|
||||
import * as assert from 'assert'
|
||||
|
||||
const actualConfig = loadActualConfig()
|
||||
|
||||
const rawExpectedConfig = process.argv[3].trim()
|
||||
if (!rawExpectedConfig) {
|
||||
core.info('No expected configuration provided')
|
||||
} else {
|
||||
core.startGroup('Expected generated user config')
|
||||
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
const expectedConfig = rawExpectedConfig ? JSON.parse(rawExpectedConfig) : undefined;
|
||||
|
||||
assert.deepStrictEqual(
|
||||
actualConfig,
|
||||
expectedConfig,
|
||||
'Expected configuration does not match actual configuration'
|
||||
);
|
||||
|
||||
|
||||
function loadActualConfig() {
|
||||
if (!fs.existsSync(process.argv[2])) {
|
||||
core.info('No configuration file found')
|
||||
return undefined
|
||||
} else {
|
||||
const rawActualConfig = fs.readFileSync(process.argv[2], 'utf8')
|
||||
core.startGroup('Actual generated user config')
|
||||
core.info(rawActualConfig)
|
||||
core.endGroup()
|
||||
|
||||
return yaml.load(rawActualConfig)
|
||||
}
|
||||
}
|
||||
20
.github/check-sarif/action.yml
vendored
Normal file
20
.github/check-sarif/action.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Check SARIF
|
||||
description: Checks a SARIF file to see if certain queries were run and others were not run.
|
||||
inputs:
|
||||
sarif-file:
|
||||
required: true
|
||||
description: The SARIF file to check
|
||||
|
||||
queries-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should be included in this SARIF file.
|
||||
|
||||
queries-not-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
runs:
|
||||
using: node12
|
||||
main: index.js
|
||||
43
.github/check-sarif/index.js
vendored
Normal file
43
.github/check-sarif/index.js
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs')
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(core.getInput('sarif-file'), 'utf8'))
|
||||
const rules = sarif.runs[0].tool.extensions.flatMap(ext => ext.rules || [])
|
||||
const ruleIds = rules.map(rule => rule.id)
|
||||
|
||||
// Check that all the expected queries ran
|
||||
const expectedQueriesRun = getQueryIdsInput('queries-run')
|
||||
const queriesThatShouldHaveRunButDidNot = expectedQueriesRun.filter(queryId => !ruleIds.includes(queryId))
|
||||
|
||||
if (queriesThatShouldHaveRunButDidNot.length > 0) {
|
||||
core.setFailed(`The following queries were expected to run but did not: ${queriesThatShouldHaveRunButDidNot.join(', ')}`)
|
||||
}
|
||||
|
||||
// Check that all the unexpected queries did not run
|
||||
const expectedQueriesNotRun = getQueryIdsInput('queries-not-run')
|
||||
|
||||
const queriesThatShouldNotHaveRunButDid = expectedQueriesNotRun.filter(queryId => ruleIds.includes(queryId))
|
||||
|
||||
if (queriesThatShouldNotHaveRunButDid.length > 0) {
|
||||
core.setFailed(`The following queries were NOT expected to have run but did: ${queriesThatShouldNotHaveRunButDid.join(', ')}`)
|
||||
}
|
||||
|
||||
|
||||
core.startGroup('All queries run')
|
||||
rules.forEach(rule => {
|
||||
core.info(`${rule.id}: ${(rule.properties && rule.properties.name) || rule.name}`)
|
||||
})
|
||||
core.endGroup()
|
||||
|
||||
core.startGroup('Full SARIF')
|
||||
core.info(JSON.stringify(sarif, null, 2))
|
||||
core.endGroup()
|
||||
|
||||
function getQueryIdsInput(name) {
|
||||
return core.getInput(name)
|
||||
.split(',')
|
||||
.map(q => q.trim())
|
||||
.filter(q => q.length > 0)
|
||||
}
|
||||
10
.github/prepare-test/action.yml
vendored
10
.github/prepare-test/action.yml
vendored
@@ -22,17 +22,17 @@ runs:
|
||||
run: |
|
||||
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
||||
export LATEST=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz"
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||
echo "::set-output name=tools-url::latest"
|
||||
echo "tools-url=latest" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == "cached" ]]; then
|
||||
echo "::set-output name=tools-url::"
|
||||
echo "tools-url=" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "::error Unrecognized version specified!"
|
||||
fi
|
||||
|
||||
54
.github/query-filter-test/action.yml
vendored
Normal file
54
.github/query-filter-test/action.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: Query Filter Test
|
||||
description: Runs a test of query filters using the check SARIF action
|
||||
inputs:
|
||||
sarif-file:
|
||||
required: true
|
||||
description: The SARIF file to check
|
||||
|
||||
queries-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should be included in this SARIF file.
|
||||
|
||||
queries-not-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
config-file:
|
||||
required: true
|
||||
description: |
|
||||
The location of the codeql configuration file to use.
|
||||
|
||||
tools:
|
||||
required: true
|
||||
description: |
|
||||
The url of codeql to use.
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ${{ inputs.config-file }}
|
||||
tools: ${{ inputs.tools }}
|
||||
db-location: ${{ runner.temp }}/query-filter-test
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: "true"
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
upload: false
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: "true"
|
||||
- name: Check SARIF
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ inputs.sarif-file }}
|
||||
queries-run: ${{ inputs.queries-run}}
|
||||
queries-not-run: ${{ inputs.queries-not-run}}
|
||||
- name: Cleanup after test
|
||||
shell: bash
|
||||
run: rm -rf "$RUNNER_TEMP/results" "$RUNNER_TEMP/query-filter-test"
|
||||
45
.github/update-release-branch.py
vendored
45
.github/update-release-branch.py
vendored
@@ -5,7 +5,7 @@ import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
||||
EMPTY_CHANGELOG = """# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
@@ -67,7 +67,7 @@ def open_pr(
|
||||
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
|
||||
|
||||
body.append('')
|
||||
body.append('Conductor for this PR is @' + conductor)
|
||||
body.append(f'Conductor for this PR is @{conductor}.')
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
@@ -75,31 +75,40 @@ def open_pr(
|
||||
body.append('Contains the following pull requests:')
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body.append('- #' + str(pr.number) + ' - ' + pr.title +' (@' + merger + ')')
|
||||
body.append(f'- #{pr.number} (@{merger})')
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body.append('')
|
||||
body.append('Contains the following commits not from a pull request:')
|
||||
for commit in commits_without_pull_requests:
|
||||
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
|
||||
author_description = f' (@{commit.author.login})' if commit.author is not None else ''
|
||||
body.append(f'- {commit.sha} - {get_truncated_commit_message(commit)}{author_description}')
|
||||
|
||||
body.append('')
|
||||
body.append('Please review the following:')
|
||||
body.append('Please do the following:')
|
||||
if len(conflicted_files) > 0:
|
||||
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
|
||||
body.append(' - [ ] Ensure `package.json` file contains the correct version.')
|
||||
body.append(' - [ ] Add commits to this branch to resolve the merge conflicts ' +
|
||||
'in the following files:')
|
||||
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
|
||||
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
|
||||
body.append(' - [ ] Ensure another maintainer has reviewed the additional commits you added to this ' +
|
||||
'branch to resolve the merge conflicts.')
|
||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
||||
body.append(' - [ ] Ensure the CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] Ensure the CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] Check that there are not any unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] Ensure the docs team is aware of any documentation changes that need to be released.')
|
||||
|
||||
if not is_v2_release:
|
||||
body.append(' - [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.')
|
||||
body.append(' - [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.')
|
||||
body.append(' - [ ] Mark the PR as ready for review to trigger the full set of PR checks.')
|
||||
|
||||
body.append(' - [ ] Approve and merge this PR. Make sure `Create a merge commit` is selected rather than `Squash and merge` or `Rebase and merge`.')
|
||||
|
||||
if is_v2_release:
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
|
||||
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
|
||||
body.append(' - [ ] Merge the mergeback PR that will automatically be created once this PR is merged.')
|
||||
body.append(' - [ ] Merge the v1 release PR that will automatically be created once this PR is merged.')
|
||||
|
||||
title = 'Merge ' + source_branch + ' into ' + target_branch
|
||||
|
||||
@@ -292,19 +301,19 @@ def main():
|
||||
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
|
||||
if len(conflicted_files) > 0:
|
||||
run_git('add', '.')
|
||||
run_git('commit', '--no-edit')
|
||||
run_git('commit', '--no-edit')
|
||||
|
||||
# Migrate the package version number from a v2 version number to a v1 version number
|
||||
print(f'Setting version number to {version}')
|
||||
subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
|
||||
subprocess.check_output(['npm', 'version', version, '--no-git-tag-version'])
|
||||
run_git('add', 'package.json', 'package-lock.json')
|
||||
|
||||
# Migrate the changelog notes from v2 version numbers to v1 version numbers
|
||||
print('Migrating changelog notes from v2 to v1')
|
||||
subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
|
||||
subprocess.check_output(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
|
||||
|
||||
# Remove changelog notes from v2 that don't apply to v1
|
||||
subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
|
||||
subprocess.check_output(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
|
||||
|
||||
# Amend the commit generated by `npm version` to update the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
|
||||
43
.github/workflows/__analyze-ref-input.yml
generated
vendored
43
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -25,45 +25,41 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
version: stable-20211005
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
@@ -76,6 +72,11 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
@@ -89,7 +90,5 @@ jobs:
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
68
.github/workflows/__autobuild-action.yml
generated
vendored
Normal file
68
.github/workflows/__autobuild-action.yml
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - autobuild-action
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
autobuild-action:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
name: autobuild-action
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: csharp
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
env:
|
||||
# Explicitly disable the CLR tracer.
|
||||
COR_ENABLE_PROFILING: ''
|
||||
COR_PROFILER: ''
|
||||
COR_PROFILER_PATH_64: ''
|
||||
CORECLR_ENABLE_PROFILING: ''
|
||||
CORECLR_PROFILER: ''
|
||||
CORECLR_PROFILER_PATH_64: ''
|
||||
- uses: ./../action/analyze
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d csharp ]]; then
|
||||
echo "Did not find a C# database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
90
.github/workflows/__export-file-baseline-information.yml
generated
vendored
Normal file
90
.github/workflows/__export-file-baseline-information.yml
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Export file baseline information
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
export-file-baseline-information:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Export file baseline information
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
||||
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
CODEQL_FILE_BASELINE_INFORMATION: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
CODEQL_FILE_BASELINE_INFORMATION: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Check results
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
expected_baseline_languages="cpp cs go java js py rb swift"
|
||||
|
||||
for lang in ${expected_baseline_languages}; do
|
||||
rule_name="${lang}/baseline/expected-extracted-files"
|
||||
found_notification=$(jq --arg rule_name "${rule_name}" '[.runs[0].tool.driver.notifications |
|
||||
select(. != null) | flatten | .[].id] | any(. == $rule_name)' javascript.sarif)
|
||||
if [[ "${found_notification}" != "true" ]]; then
|
||||
echo "Expected SARIF output to contain notification '${rule_name}', but found no such notification."
|
||||
exit 1
|
||||
else
|
||||
echo "Found notification '${rule_name}'."
|
||||
fi
|
||||
done
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -63,4 +63,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
43
.github/workflows/__go-custom-queries.yml
generated
vendored
43
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -25,45 +25,41 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
version: stable-20211005
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
timeout-minutes: 45
|
||||
@@ -76,7 +72,9 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -88,7 +86,6 @@ jobs:
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Custom tracing'
|
||||
name: 'PR Check - Go: tracing with autobuilder step'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -21,51 +21,35 @@ on:
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
go-custom-tracing:
|
||||
go-tracing-autobuilder:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom tracing'
|
||||
name: 'Go: tracing with autobuilder step'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -76,19 +60,29 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: go build main.go
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
if [[ "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" != true ]]; then
|
||||
echo "Expected the Go autobuilder to be run, but the" \
|
||||
"CODEQL_ACTION_DID_AUTOBUILD_GOLANG environment variable was not true."
|
||||
exit 1
|
||||
fi
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d go ]]; then
|
||||
echo "Did not find a Go database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
@@ -3,7 +3,7 @@
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Debug artifact upload
|
||||
name: 'PR Check - Go: tracing with custom build steps'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -21,22 +21,22 @@ on:
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
debug-artifacts:
|
||||
go-tracing-custom-build-steps:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Debug artifact upload
|
||||
name: 'Go: tracing with custom build steps'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -60,37 +60,33 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
run: go build main.go
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for language in $LANGUAGES; do
|
||||
echo "Checking $language"
|
||||
if [[ ! -f "$language.sarif" ]] ; then
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
# Once we start running Bash 4.2 in all environments, we can replace the
|
||||
# `! -z` flag with the more elegant `-v` which confirms that the variable
|
||||
# is actually unset and not potentially set to a blank value.
|
||||
if [[ ! -z "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" ]]; then
|
||||
echo "Expected the Go autobuilder not to be run, but the" \
|
||||
"CODEQL_ACTION_DID_AUTOBUILD_GOLANG environment variable was set."
|
||||
exit 1
|
||||
fi
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d go ]]; then
|
||||
echo "Did not find a Go database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
@@ -3,7 +3,7 @@
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Autobuild custom tracing'
|
||||
name: 'PR Check - Go: tracing with legacy workflow'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -21,22 +21,22 @@ on:
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
go-custom-tracing-autobuild:
|
||||
go-tracing-legacy-workflow:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Autobuild custom tracing'
|
||||
name: 'Go: tracing with legacy workflow'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@@ -60,17 +60,16 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
@@ -79,5 +78,5 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
79
.github/workflows/__init-with-registries.yml
generated
vendored
Normal file
79
.github/workflows/__init-with-registries.yml
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Download using registries'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
init-with-registries:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Download using registries'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Init with registries
|
||||
uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
config-file: ./.github/codeql/codeql-config-registries.yml
|
||||
languages: javascript
|
||||
registries: |
|
||||
- url: "https://ghcr.io/v2/"
|
||||
packages: "*/*"
|
||||
token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Verify packages installed
|
||||
shell: bash
|
||||
run: |
|
||||
PRIVATE_PACK="$HOME/.codeql/packages/dsp-testing/private-pack"
|
||||
CODEQL_PACK1="$HOME/.codeql/packages/dsp-testing/codeql-pack1"
|
||||
|
||||
if [[ -d $PRIVATE_PACK ]]
|
||||
then
|
||||
echo "$PRIVATE_PACK was installed."
|
||||
else
|
||||
echo "::error $PRIVATE_PACK pack was not installed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $CODEQL_PACK1 ]]
|
||||
then
|
||||
echo "$CODEQL_PACK1 was installed."
|
||||
else
|
||||
echo "::error $CODEQL_PACK1 pack was not installed."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
2
.github/workflows/__javascript-source-root.yml
generated
vendored
2
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -65,4 +65,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
36
.github/workflows/__ml-powered-queries.yml
generated
vendored
36
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -25,11 +25,11 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20220120
|
||||
- os: windows-latest
|
||||
- os: windows-2019
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
@@ -60,6 +60,11 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
@@ -71,8 +76,6 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -81,11 +84,24 @@ jobs:
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
|
||||
- name: Check sarif
|
||||
uses: ./../action/.github/check-sarif
|
||||
# Running on Windows requires CodeQL CLI 2.9.0+.
|
||||
if: "!(matrix.version == 'stable-20220120' && runner.os == 'Windows')"
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: js/ml-powered/nosql-injection,js/ml-powered/path-injection,js/ml-powered/sql-injection,js/ml-powered/xss
|
||||
queries-not-run: foo,bar
|
||||
|
||||
- name: Check results
|
||||
env:
|
||||
IS_WINDOWS: ${{ matrix.os == 'windows-latest' }}
|
||||
# Running on Windows requires CodeQL CLI 2.9.0+.
|
||||
SHOULD_RUN_ML_POWERED_QUERIES: ${{ !(matrix.version == 'stable-20220120' &&
|
||||
runner.os == 'Windows') }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
|
||||
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should run at least the ML-powered queries in `expected_rules`.
|
||||
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
||||
@@ -94,10 +110,10 @@ jobs:
|
||||
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
||||
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
||||
echo "Did find rule '${rule}': ${found_rule}"
|
||||
if [[ "${found_rule}" != "true" && "${IS_WINDOWS}" != "true" ]]; then
|
||||
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
||||
exit 1
|
||||
elif [[ "${found_rule}" == "true" && "${IS_WINDOWS}" == "true" ]]; then
|
||||
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
||||
exit 1
|
||||
fi
|
||||
@@ -108,12 +124,12 @@ jobs:
|
||||
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
||||
javascript.sarif)
|
||||
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
||||
if [[ "${num_alerts}" -eq 0 && "${IS_WINDOWS}" != "true" ]]; then
|
||||
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
elif [[ "${num_alerts}" -ne 0 && "${IS_WINDOWS}" == "true" ]]; then
|
||||
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
60
.github/workflows/__multi-language-autodetect.yml
generated
vendored
60
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -25,18 +25,18 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
@@ -60,18 +60,31 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
|
||||
- name: Check language autodetect for all languages excluding Ruby, Swift
|
||||
shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
@@ -103,5 +116,28 @@ jobs:
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Ruby
|
||||
if: (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version
|
||||
== 'nightly-latest')
|
||||
shell: bash
|
||||
run: |
|
||||
RUBY_DB=${{ fromJson(steps.analysis.outputs.db-locations).ruby }}
|
||||
if [[ ! -d $RUBY_DB ]] || [[ ! $RUBY_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Ruby, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check language autodetect for Swift
|
||||
if: "!startsWith(matrix.os, 'windows') && (matrix.version == 'cached' || matrix.version\
|
||||
\ == 'latest' || matrix.version == 'nightly-latest')"
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB=${{ fromJson(steps.analysis.outputs.db-locations).swift }}
|
||||
if [[ ! -d $SWIFT_DB ]] || [[ ! $SWIFT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Swift, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true' # Remove when Swift is GA.
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
94
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
Normal file
94
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config and input passed to the CLI'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
packaging-codescanning-config-inputs-js:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input passed to the CLI'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
|
||||
queries-not-run: foo,bar
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_PASS_CONFIG_TO_CLI: true
|
||||
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
22
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
22
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input'
|
||||
timeout-minutes: 45
|
||||
@@ -70,8 +66,14 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
|
||||
queries-not-run: foo,bar
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -87,4 +89,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
22
.github/workflows/__packaging-config-js.yml
generated
vendored
22
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config file'
|
||||
timeout-minutes: 45
|
||||
@@ -69,8 +65,14 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
|
||||
queries-not-run: foo,bar
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -86,4 +88,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
22
.github/workflows/__packaging-inputs-js.yml
generated
vendored
22
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Action input'
|
||||
timeout-minutes: 45
|
||||
@@ -70,8 +66,14 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
|
||||
queries-not-run: foo,bar
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -87,4 +89,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
43
.github/workflows/__remote-config.yml
generated
vendored
43
.github/workflows/__remote-config.yml
generated
vendored
@@ -25,45 +25,41 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
version: stable-20211005
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
timeout-minutes: 45
|
||||
@@ -76,6 +72,11 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
@@ -86,7 +87,5 @@ jobs:
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
14
.github/workflows/__rubocop-multi-language.yml
generated
vendored
14
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -25,18 +25,8 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: RuboCop multi-language
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
@@ -68,7 +58,5 @@ jobs:
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
7
.github/workflows/__test-ruby.yml → .github/workflows/__ruby.yml
generated
vendored
7
.github/workflows/__test-ruby.yml → .github/workflows/__ruby.yml
generated
vendored
@@ -21,7 +21,7 @@ on:
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
test-ruby:
|
||||
ruby:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@@ -54,8 +54,6 @@ jobs:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -65,5 +63,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES: 'true'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
7
.github/workflows/__split-workflow.yml
generated
vendored
7
.github/workflows/__split-workflow.yml
generated
vendored
@@ -61,8 +61,7 @@ jobs:
|
||||
with:
|
||||
skip-queries: true
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Assert No Results
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -74,8 +73,6 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -91,4 +88,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
67
.github/workflows/__swift-autobuild.yml
generated
vendored
Normal file
67
.github/workflows/__swift-autobuild.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Swift analysis using autobuild
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
swift-autobuild:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Swift analysis using autobuild
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: swift
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB="${{ fromJson(steps.analysis.outputs.db-locations).swift }}"
|
||||
if [[ ! -d "$SWIFT_DB" ]]; then
|
||||
echo "Did not create a database for Swift."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
76
.github/workflows/__swift-custom-build.yml
generated
vendored
Normal file
76
.github/workflows/__swift-custom-build.yml
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Swift analysis using a custom build command
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
swift-custom-build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Swift analysis using a custom build command
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||
with:
|
||||
swift-version: '5.7'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: swift
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
SWIFT_DB="${{ fromJson(steps.analysis.outputs.db-locations).swift }}"
|
||||
if [[ ! -d "$SWIFT_DB" ]]; then
|
||||
echo "Did not create a database for Swift."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: 'true'
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
4
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
4
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -53,8 +53,6 @@ jobs:
|
||||
with:
|
||||
working-directory: autobuild-dir
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -64,4 +62,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
4
.github/workflows/__test-local-codeql.yml
generated
vendored
4
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -51,7 +51,5 @@ jobs:
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
8
.github/workflows/__test-proxy.yml
generated
vendored
8
.github/workflows/__test-proxy.yml
generated
vendored
@@ -43,16 +43,14 @@ jobs:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
image: ubuntu:22.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
|
||||
61
.github/workflows/__unset-environment.yml
generated
vendored
61
.github/workflows/__unset-environment.yml
generated
vendored
@@ -25,12 +25,12 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
@@ -48,6 +48,11 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
@@ -57,39 +62,43 @@ jobs:
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
|
||||
if [[ ! -d "$CPP_DB" ]] || [[ ! "$CPP_DB" == "${RUNNER_TEMP}/customDbLocation/cpp" ]]; then
|
||||
echo "::error::Did not create a database for CPP, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/cpp' but actual was '${CPP_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||
CSHARP_DB="${{ fromJson(steps.analysis.outputs.db-locations).csharp }}"
|
||||
if [[ ! -d "$CSHARP_DB" ]] || [[ ! "$CSHARP_DB" == "${RUNNER_TEMP}/customDbLocation/csharp" ]]; then
|
||||
echo "::error::Did not create a database for C Sharp, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/csharp' but actual was '${CSHARP_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Go, or created it in the wrong location."
|
||||
GO_DB="${{ fromJson(steps.analysis.outputs.db-locations).go }}"
|
||||
if [[ ! -d "$GO_DB" ]] || [[ ! "$GO_DB" == "${RUNNER_TEMP}/customDbLocation/go" ]]; then
|
||||
echo "::error::Did not create a database for Go, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/go' but actual was '${GO_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Java, or created it in the wrong location."
|
||||
JAVA_DB="${{ fromJson(steps.analysis.outputs.db-locations).java }}"
|
||||
if [[ ! -d "$JAVA_DB" ]] || [[ ! "$JAVA_DB" == "${RUNNER_TEMP}/customDbLocation/java" ]]; then
|
||||
echo "::error::Did not create a database for Java, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/java' but actual was '${JAVA_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||
JAVASCRIPT_DB="${{ fromJson(steps.analysis.outputs.db-locations).javascript }}"
|
||||
if [[ ! -d "$JAVASCRIPT_DB" ]] || [[ ! "$JAVASCRIPT_DB" == "${RUNNER_TEMP}/customDbLocation/javascript" ]]; then
|
||||
echo "::error::Did not create a database for Javascript, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/javascript' but actual was '${JAVASCRIPT_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
PYTHON_DB="${{ fromJson(steps.analysis.outputs.db-locations).python }}"
|
||||
if [[ ! -d "$PYTHON_DB" ]] || [[ ! "$PYTHON_DB" == "${RUNNER_TEMP}/customDbLocation/python" ]]; then
|
||||
echo "::error::Did not create a database for Python, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/python' but actual was '${PYTHON_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
45
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
45
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -25,45 +25,41 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
version: stable-20211005
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
@@ -76,6 +72,11 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
@@ -90,13 +91,9 @@ jobs:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
45
.github/workflows/__with-checkout-path.yml
generated
vendored
45
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -25,45 +25,41 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
version: stable-20211005
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
timeout-minutes: 45
|
||||
@@ -76,6 +72,11 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
@@ -103,16 +104,12 @@ jobs:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
shell: bash
|
||||
@@ -143,4 +140,4 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
31
.github/workflows/check-for-conflicts.yml
vendored
31
.github/workflows/check-for-conflicts.yml
vendored
@@ -1,31 +0,0 @@
|
||||
# Checks for any conflict markers created by git. This check is primarily intended to validate that
|
||||
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
|
||||
name: Check for conflicts
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, v1, v2]
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check-for-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Check for conflicts
|
||||
run: |
|
||||
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
|
||||
# this to fail the workflow.
|
||||
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
|
||||
'^(<<<<<<<|>>>>>>>)' . || true)
|
||||
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
|
||||
echo "Fail: Found merge conflict markers in the following files:"
|
||||
echo ""
|
||||
echo "${FILES_WITH_CONFLICTS}"
|
||||
exit 1
|
||||
else
|
||||
echo "Success: Found no merge conflict markers."
|
||||
fi
|
||||
5
.github/workflows/codeql.yml
vendored
5
.github/workflows/codeql.yml
vendored
@@ -9,6 +9,9 @@ on:
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||
|
||||
jobs:
|
||||
# Identify the CodeQL tool versions to use in the analysis job.
|
||||
check-codeql-versions:
|
||||
@@ -61,7 +64,7 @@ jobs:
|
||||
|
||||
# Output a JSON-encoded list with the distinct versions to test against.
|
||||
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||
echo "versions=${VERSIONS_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
needs: [check-codeql-versions]
|
||||
|
||||
219
.github/workflows/codescanning-config-cli.yml
vendored
Normal file
219
.github/workflows/codescanning-config-cli.yml
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
# Tests that the generated code scanning config file contains the expected contents
|
||||
|
||||
name: Code-Scanning config CLI tests
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODEQL_PASS_CONFIG_TO_CLI: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
code-scanning-config-tests:
|
||||
continue-on-error: true
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
|
||||
# Code-Scanning config not created because environment variable is not set
|
||||
name: Code Scanning Configuration tests
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
|
||||
- name: Empty file
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: "{}"
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Packs from input
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
|
||||
}
|
||||
languages: javascript
|
||||
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Packs from input with +
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
|
||||
}
|
||||
languages: javascript
|
||||
packs: + dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Queries from input
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }]
|
||||
}
|
||||
languages: javascript
|
||||
queries: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Queries from input with +
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }]
|
||||
}
|
||||
languages: javascript
|
||||
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Queries and packs from input with +
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }],
|
||||
"packs": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
|
||||
}
|
||||
languages: javascript
|
||||
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
packs: + dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Queries and packs from config
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" }],
|
||||
"packs": {
|
||||
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ]
|
||||
}
|
||||
}
|
||||
languages: javascript
|
||||
config-file-test: .github/codeql/queries-and-packs-config.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Queries and packs from config overriden by input
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"queries": [{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }],
|
||||
"packs": ["codeql/javascript-queries"]
|
||||
}
|
||||
languages: javascript
|
||||
queries: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
packs: codeql/javascript-queries
|
||||
config-file-test: .github/codeql/queries-and-packs-config.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Queries and packs from config merging with input
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"queries": [
|
||||
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" },
|
||||
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql" }
|
||||
],
|
||||
"packs": {
|
||||
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2", "codeql/javascript-queries" ]
|
||||
}
|
||||
}
|
||||
languages: javascript
|
||||
queries: + ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
packs: + codeql/javascript-queries
|
||||
config-file-test: .github/codeql/queries-and-packs-config.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Multi-language packs from config
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"packs": {
|
||||
"javascript": ["dsp-testing/codeql-pack1@1.0.0", "dsp-testing/codeql-pack2" ],
|
||||
"ruby": ["codeql/ruby-queries"]
|
||||
},
|
||||
"queries": [
|
||||
{ "uses": "./codeql-qlpacks/complex-javascript-qlpack/foo2/show_ifs.ql" }
|
||||
]
|
||||
}
|
||||
languages: javascript,ruby
|
||||
config-file-test: .github/codeql/multi-language-packs-config.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Other config properties
|
||||
if: success() || failure()
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: |
|
||||
{
|
||||
"name": "Config using all properties",
|
||||
"packs": ["codeql/javascript-queries" ],
|
||||
"disable-default-queries": true,
|
||||
"paths-ignore": ["xxx"],
|
||||
"paths": ["yyy"]
|
||||
}
|
||||
languages: javascript
|
||||
packs: + codeql/javascript-queries
|
||||
config-file-test: .github/codeql/other-config-properties.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Config not generated when env var is not set
|
||||
if: success() || failure()
|
||||
env:
|
||||
CODEQL_PASS_CONFIG_TO_CLI: false
|
||||
uses: ./../action/.github/check-codescanning-config
|
||||
with:
|
||||
expected-config-file-contents: ""
|
||||
languages: javascript
|
||||
packs: + codeql/javascript-queries
|
||||
config-file-test: .github/codeql/other-config-properties.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
91
.github/workflows/debug-artifacts-failure.yml
vendored
Normal file
91
.github/workflows/debug-artifacts-failure.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
# Checks logs, SARIF, and database bundle debug artifacts exist
|
||||
# when the analyze step fails.
|
||||
name: PR Check - Debug artifacts after failure
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
name: Upload debug artifacts after failure in analyze
|
||||
continue-on-error: true
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Dump GitHub event
|
||||
run: cat "${GITHUB_EVENT_PATH}"
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: latest
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
expect-error: true
|
||||
ram: 1
|
||||
download-and-check-artifacts:
|
||||
name: Download and check debug artifacts after failure in analyze
|
||||
needs: upload-artifacts
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for os in $OPERATING_SYSTEMS; do
|
||||
pushd "./my-debug-artifacts-$os"
|
||||
echo "Artifacts from run on $os:"
|
||||
for language in $LANGUAGES; do
|
||||
echo "- Checking $language"
|
||||
if [[ ! -f "my-db-$language-partial.zip" ]] ; then
|
||||
echo "Missing a partial database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "log" ]] ; then
|
||||
echo "Missing database initialization logs"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! "$language" == "go" ]] && [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
popd
|
||||
done
|
||||
env:
|
||||
GO111MODULE: auto
|
||||
117
.github/workflows/debug-artifacts.yml
vendored
Normal file
117
.github/workflows/debug-artifacts.yml
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
# Checks logs, SARIF, and database bundle debug artifacts exist.
|
||||
name: PR Check - Debug artifact upload
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20211005
|
||||
- os: macos-latest
|
||||
version: stable-20211005
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Upload debug artifacts
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
download-and-check-artifacts:
|
||||
name: Download and check debug artifacts
|
||||
needs: upload-artifacts
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
VERSIONS="stable-20211005 stable-20220120 stable-20220401 cached latest nightly-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for version in $VERSIONS; do
|
||||
if [[ "$version" =~ stable-(20211005|20220120|20210809) ]]; then
|
||||
# Note the absence of the period in "ubuntu-2004": this is present in the image name
|
||||
# but not the artifact name
|
||||
OPERATING_SYSTEMS="ubuntu-2004 macos-latest"
|
||||
else
|
||||
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
|
||||
fi
|
||||
for os in $OPERATING_SYSTEMS; do
|
||||
pushd "./my-debug-artifacts-$os-$version"
|
||||
echo "Artifacts from version $version on $os:"
|
||||
for language in $LANGUAGES; do
|
||||
echo "- Checking $language"
|
||||
if [[ ! -f "$language.sarif" ]] ; then
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
popd
|
||||
done
|
||||
done
|
||||
env:
|
||||
GO111MODULE: auto
|
||||
47
.github/workflows/expected-queries-runs.yml
vendored
Normal file
47
.github/workflows/expected-queries-runs.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Check queries that ran
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
expected-queries:
|
||||
name: Expected Queries Tests
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: latest
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
upload: false
|
||||
|
||||
- name: Check Sarif
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: js/incomplete-hostname-regexp,js/path-injection
|
||||
queries-not-run: foo,bar
|
||||
21
.github/workflows/post-release-mergeback.yml
vendored
21
.github/workflows/post-release-mergeback.yml
vendored
@@ -47,11 +47,10 @@ jobs:
|
||||
id: getVersion
|
||||
run: |
|
||||
VERSION="v$(jq '.version' -r 'package.json')"
|
||||
echo "::set-output name=version::${VERSION}"
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
short_sha="${GITHUB_SHA:0:8}"
|
||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
|
||||
echo "::set-output name=newBranch::${NEW_BRANCH}"
|
||||
|
||||
echo "newBranch=${NEW_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Dump branches
|
||||
env:
|
||||
@@ -77,7 +76,7 @@ jobs:
|
||||
exists="$?"
|
||||
if [ "${exists}" -eq 0 ]; then
|
||||
echo "Tag ${VERSION} exists. Not going to re-release."
|
||||
echo "::set-output name=exists::true"
|
||||
echo "exists=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Tag ${VERSION} does not exist yet."
|
||||
fi
|
||||
@@ -114,7 +113,18 @@ jobs:
|
||||
run: |
|
||||
set -exu
|
||||
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
|
||||
pr_body="Updates version and changelog."
|
||||
pr_body=$(cat << EOF
|
||||
This PR bumps the version number and updates the changelog after the ${VERSION} release.
|
||||
|
||||
Please do the following:
|
||||
|
||||
- [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.
|
||||
- [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.
|
||||
- [ ] Mark the PR as ready for review to trigger the full set of PR checks.
|
||||
- [ ] Approve and merge the PR. When merging the PR, make sure "Create a merge commit" is
|
||||
selected rather than "Squash and merge" or "Rebase and merge".
|
||||
EOF
|
||||
)
|
||||
|
||||
# Update the version number ready for the next release
|
||||
npm version patch --no-git-tag-version
|
||||
@@ -134,4 +144,5 @@ jobs:
|
||||
--title "${pr_title}" \
|
||||
--label "Update dependencies" \
|
||||
--body "${pr_body}" \
|
||||
--assignee "${GITHUB_ACTOR}" \
|
||||
--draft
|
||||
|
||||
438
.github/workflows/pr-checks.yml
vendored
438
.github/workflows/pr-checks.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: PR Checks (Basic Checks and Runner)
|
||||
name: PR Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -10,27 +10,21 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint-js:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run Lint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
name: Check JS
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
node-types-version: [12.12, current]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Lint
|
||||
run: npm run-script lint
|
||||
|
||||
- name: Update version of @types/node
|
||||
if: matrix.node-types-version != 'current'
|
||||
@@ -67,21 +61,43 @@ jobs:
|
||||
- name: Check node modules up to date
|
||||
run: .github/workflows/script/check-node-modules.sh
|
||||
|
||||
verify-pr-checks:
|
||||
name: Verify PR checks up to date
|
||||
check-file-contents:
|
||||
name: Check file contents
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Checks for any conflict markers created by git. This check is primarily intended to validate that
|
||||
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
|
||||
- name: Check for merge conflicts
|
||||
run: |
|
||||
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
|
||||
# this to fail the workflow.
|
||||
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
|
||||
'^(<<<<<<<|>>>>>>>)' . || true)
|
||||
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
|
||||
echo "Fail: Found merge conflict markers in the following files:"
|
||||
echo ""
|
||||
echo "${FILES_WITH_CONFLICTS}"
|
||||
exit 1
|
||||
else
|
||||
echo "Success: Found no merge conflict markers."
|
||||
fi
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruamel.yaml
|
||||
|
||||
# Ensure the generated PR check workflows are up to date.
|
||||
- name: Verify PR checks up to date
|
||||
run: .github/workflows/script/verify-pr-checks.sh
|
||||
|
||||
@@ -90,393 +106,15 @@ jobs:
|
||||
needs: [check-js, check-node-modules]
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
|
||||
runner-analyze-javascript-ubuntu:
|
||||
name: Runner ubuntu JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
- name: npm test
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
# Pass --config-file here, but not for other jobs in this workflow.
|
||||
# This means we're testing the config file parsing in the runner
|
||||
# but not slowing down all jobs unnecessarily as it doesn't add much
|
||||
# testing the parsing on different operating systems and languages.
|
||||
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-javascript-windows:
|
||||
name: Runner windows JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-javascript-macos:
|
||||
name: Runner macos JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-ubuntu:
|
||||
name: Runner ubuntu C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-windows:
|
||||
name: Runner windows C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: powershell
|
||||
run: |
|
||||
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
||||
$Env:CODEQL_EXTRACTOR_CSHARP_ROOT = "" # Unset an environment variable to make sure the tracer resists this
|
||||
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Upload tracer logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: tracer-logs
|
||||
path: ./codeql-runner/compound-build-tracer.log
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-macos:
|
||||
name: Runner macos C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-ubuntu:
|
||||
name: Runner ubuntu autobuild C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-windows:
|
||||
timeout-minutes: 45
|
||||
name: Runner windows autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: powershell
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-macos:
|
||||
name: Runner macos autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-upload-sarif:
|
||||
name: Runner upload sarif
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Upload with runner
|
||||
run: |
|
||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
||||
# the compatibility with the API.
|
||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
runner-extractor-ram-threads-options:
|
||||
name: Runner ubuntu extractor RAM and threads options
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
if [ "${CODEQL_RAM}" != "230" ]; then
|
||||
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
# Run any commands referenced in package.json using Bash, otherwise
|
||||
# we won't be able to find them on Windows.
|
||||
npm config set script-shell bash
|
||||
npm test
|
||||
|
||||
40
.github/workflows/python-deps.yml
vendored
40
.github/workflows/python-deps.yml
vendored
@@ -7,6 +7,17 @@ on:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
paths:
|
||||
# Changes to this workflow.
|
||||
- '.github/workflows/python-deps.yml'
|
||||
# Changes to the Python package installation scripts and their tests.
|
||||
- 'python-setup/**'
|
||||
# Changes to the default CodeQL bundle version.
|
||||
- '**/defaults.json'
|
||||
schedule:
|
||||
# Weekly on Monday.
|
||||
- cron: '0 0 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-setup-python-scripts:
|
||||
@@ -15,9 +26,20 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
# Python2 is not available on ubuntu-22.04 by default -- see https://github.com/github/codeql-action/pull/1257
|
||||
- python_version: 2
|
||||
os: ubuntu-22.04
|
||||
|
||||
|
||||
env:
|
||||
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
|
||||
@@ -43,7 +65,8 @@ jobs:
|
||||
cd $GITHUB_WORKSPACE/python-setup/tests/${PYTHON_DEPS_TYPE}/requests-${PYTHON_VERSION}
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
ubuntu-20.04*) basePath="/opt";;
|
||||
ubuntu-22.04*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
echo ${basePath}
|
||||
@@ -67,7 +90,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
@@ -89,7 +112,8 @@ jobs:
|
||||
cd $GITHUB_WORKSPACE/python-setup/tests/requirements/non-standard-location
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
ubuntu-20.04*) basePath="/opt";;
|
||||
ubuntu-22.04*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
echo ${basePath}
|
||||
@@ -115,8 +139,16 @@ jobs:
|
||||
matrix:
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
|
||||
|
||||
56
.github/workflows/query-filters.yml
vendored
Normal file
56
.github/workflows/query-filters.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: Query filters tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
query-filters:
|
||||
name: Query Filters Tests
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Check SARIF for default queries with Single include, Single exclude
|
||||
uses: ./../action/.github/query-filter-test
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: js/zipslip
|
||||
queries-not-run: js/path-injection
|
||||
config-file: ./.github/codeql/codeql-config-query-filters1.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Check SARIF for query packs with Single include, Single exclude
|
||||
uses: ./../action/.github/query-filter-test
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: js/zipslip,javascript/example/empty-or-one-block
|
||||
queries-not-run: js/path-injection
|
||||
config-file: ./.github/codeql/codeql-config-query-filters2.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- name: Check SARIF for query packs and local queries with Single include, Single exclude
|
||||
uses: ./../action/.github/query-filter-test
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: js/zipslip,javascript/example/empty-or-one-block,inrepo-javascript-querypack/show-ifs
|
||||
queries-not-run: js/path-injection,complex-python-querypack/show-ifs,complex-python-querypack/foo/bar/show-ifs
|
||||
config-file: ./.github/codeql/codeql-config-query-filters3.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
4
.github/workflows/script/check-js.sh
vendored
4
.github/workflows/script/check-js.sh
vendored
@@ -14,8 +14,8 @@ npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'rm -rf lib && npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
echo "Success: JavaScript files are up to date"
|
||||
|
||||
@@ -7,7 +7,10 @@ if [ ! -z "$(git status --porcelain)" ]; then
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
sudo npm install --force -g npm@latest
|
||||
# Pin npm to v8 since v9 doesn't support Node 12.
|
||||
# When updating this, make sure to update the npm version in
|
||||
# `.github/workflows/update-dependencies.yml` too.
|
||||
sudo npm install --force -g npm@^8.19.3
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
@@ -15,8 +18,8 @@ npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci && npm run removeNPMAbsolutePaths' on a macOS machine to update. Note it is important this command is run on macOS and not any other operating system as there is one dependency (fsevents) that is needed for macOS and may not be installed if the command is run on a Windows or Linux machine."
|
||||
>&2 echo "Failed: node_modules are not up to date. Add the 'Update dependencies' label to your PR to update them. Note it is important that node modules are updated on macOS and not any other operating system as there is one dependency (fsevents) that is needed for macOS and may not be installed if dependencies are updated on a Windows or Linux machine."
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
echo "Success: node_modules are up to date"
|
||||
|
||||
37
.github/workflows/script/update-required-checks.sh
vendored
Executable file
37
.github/workflows/script/update-required-checks.sh
vendored
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
# Update the required checks based on the current branch.
|
||||
# Typically, this will be main.
|
||||
|
||||
if ! gh auth status 2>/dev/null; then
|
||||
gh auth status
|
||||
echo "Failed: Not authorized. This script requires admin access to github/codeql-action through the gh CLI."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$#" -eq 1 ]; then
|
||||
# If we were passed an argument, use that as the SHA
|
||||
GITHUB_SHA="$0"
|
||||
elif [ "$#" -gt 1 ]; then
|
||||
echo "Usage: $0 [SHA]"
|
||||
echo "Update the required checks based on the SHA, or main."
|
||||
exit 1
|
||||
elif [ -z "$GITHUB_SHA" ]; then
|
||||
# If we don't have a SHA, use main
|
||||
GITHUB_SHA="$(git rev-parse main)"
|
||||
fi
|
||||
|
||||
echo "Getting checks for $GITHUB_SHA"
|
||||
|
||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||
|
||||
echo "$CHECKS" | jq
|
||||
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
|
||||
for BRANCH in main releases/v2 releases/v1; do
|
||||
echo "Updating $BRANCH"
|
||||
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
|
||||
done
|
||||
|
||||
rm checks.json
|
||||
5
.github/workflows/update-dependencies.yml
vendored
5
.github/workflows/update-dependencies.yml
vendored
@@ -27,7 +27,10 @@ jobs:
|
||||
run: |
|
||||
git fetch origin "$BRANCH" --depth=1
|
||||
git checkout "origin/$BRANCH"
|
||||
sudo npm install --force -g npm@latest
|
||||
# Pin npm to v8 since v9 doesn't support Node 12.
|
||||
# When updating this, make sure to update the npm version in
|
||||
# `.github/workflows/script/check-node-modules.sh` too.
|
||||
sudo npm install --force -g npm@^8.19.3
|
||||
npm install
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
|
||||
45
.github/workflows/update-required-checks.yml
vendored
45
.github/workflows/update-required-checks.yml
vendored
@@ -1,45 +0,0 @@
|
||||
|
||||
# This job updates the required checks on the codeql-action repository based on the
|
||||
# checks performed on the most recent commit.
|
||||
|
||||
name: Update required checks
|
||||
on:
|
||||
schedule:
|
||||
# 23:01 on Saturdays
|
||||
- cron: "1 23 * * 6"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-required-checks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- name: Update checks
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.CODEQL_CI_TOKEN }}"
|
||||
run: |
|
||||
# Update the required checks based on the current branch.
|
||||
# Typically, this will be main.
|
||||
echo "Getting checks for $GITHUB_SHA"
|
||||
|
||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/${GITHUB_SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update-") | not)] | sort')"
|
||||
|
||||
echo "::group::New Checks"
|
||||
echo "$CHECKS" | jq
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
echo "Updating main"
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
||||
echo "Updating v2"
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/releases/v2/protection/required_status_checks --input checks.json
|
||||
echo "Updating v1"
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/releases/v1/protection/required_status_checks --input checks.json
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,2 @@
|
||||
/runner/dist/
|
||||
/runner/node_modules/
|
||||
# Ignore for example failing-tests.json from AVA
|
||||
node_modules/.cache
|
||||
|
||||
127
CHANGELOG.md
127
CHANGELOG.md
@@ -1,28 +1,143 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
## 1.1.35 - 01 Dec 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.34 - 25 Nov 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.4. [#1391](https://github.com/github/codeql-action/pull/1391)
|
||||
- Fixed a bug where some the `init` action and the `analyze` action would have different sets of experimental feature flags enabled. [#1384](https://github.com/github/codeql-action/pull/1384)
|
||||
|
||||
## 1.1.33 - 16 Nov 2022
|
||||
|
||||
- Go is now analyzed in the same way as other compiled languages such as C/C++, C#, and Java. This completes the rollout of the feature described in [CodeQL Action version 2.1.27](#2127---06-oct-2022). [#1322](https://github.com/github/codeql-action/pull/1322)
|
||||
- Bump the minimum CodeQL bundle version to 2.6.3. [#1358](https://github.com/github/codeql-action/pull/1358)
|
||||
|
||||
## 1.1.32 - 14 Nov 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.3. [#1348](https://github.com/github/codeql-action/pull/1348)
|
||||
- Update the ML-powered additional query pack for JavaScript to version 0.4.0. [#1351](https://github.com/github/codeql-action/pull/1351)
|
||||
|
||||
## 1.1.31 - 04 Nov 2022
|
||||
|
||||
- The `rb/weak-cryptographic-algorithm` Ruby query has been updated to no longer report uses of hash functions such as `MD5` and `SHA1` even if they are known to be weak. These hash algorithms are used very often in non-sensitive contexts, making the query too imprecise in practice. For more information, see the corresponding change in the [github/codeql repository](https://github.com/github/codeql/pull/11129). [#1344](https://github.com/github/codeql-action/pull/1344)
|
||||
|
||||
## 1.1.30 - 02 Nov 2022
|
||||
|
||||
- Improve the error message when using CodeQL bundle version 2.7.2 and earlier in a workflow that runs on a runner image such as `ubuntu-22.04` that uses glibc version 2.34 and later. [#1334](https://github.com/github/codeql-action/pull/1334)
|
||||
|
||||
## 1.1.29 - 26 Oct 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.2. [#1320](https://github.com/github/codeql-action/pull/1320)
|
||||
|
||||
## 1.1.28 - 18 Oct 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.1. [#1294](https://github.com/github/codeql-action/pull/1294)
|
||||
- Replace uses of GitHub Actions command `set-output` because it is now deprecated. See more information in the [GitHub Changelog](https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/). [#1301](https://github.com/github/codeql-action/pull/1301)
|
||||
|
||||
## 1.1.27 - 06 Oct 2022
|
||||
|
||||
- We are rolling out a feature of the CodeQL Action in October 2022 that changes the way that Go code is analyzed to be more consistent with other compiled languages like C/C++, C#, and Java. You do not need to alter your code scanning workflows. If you encounter any problems, please [file an issue](https://github.com/github/codeql-action/issues) or open a private ticket with GitHub Support and request an escalation to engineering.
|
||||
|
||||
## 1.1.26 - 29 Sep 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.0. [#1267](https://github.com/github/codeql-action/pull/1267)
|
||||
|
||||
## 1.1.25 - 21 Sep 2022
|
||||
|
||||
- We will soon be rolling out a feature of the CodeQL Action that stores some information used to make future runs faster in the GitHub Actions cache. Initially, this will only be enabled on JavaScript repositories, but we plan to add more languages to this soon. The new feature can be disabled by passing the `trap-caching: false` option to your workflow's `init` step, for example if you are already using the GitHub Actions cache for a different purpose and are near the storage limit for it.
|
||||
- Add support for Python automatic dependency installation with Poetry 1.2 [#1258](https://github.com/github/codeql-action/pull/1258).
|
||||
|
||||
## 1.1.24 - 16 Sep 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.23 - 14 Sep 2022
|
||||
|
||||
- Allow CodeQL packs to be downloaded from GitHub Enterprise Server instances, using the new `registries` input for the `init` action. [#1221](https://github.com/github/codeql-action/pull/1221)
|
||||
- Update default CodeQL bundle version to 2.10.5. [#1240](https://github.com/github/codeql-action/pull/1240)
|
||||
|
||||
## 1.1.22 - 01 Sep 2022
|
||||
|
||||
- Downloading CodeQL packs has been moved to the `init` step. Previously, CodeQL packs were downloaded during the `analyze` step. [#1218](https://github.com/github/codeql-action/pull/1218)
|
||||
- Update default CodeQL bundle version to 2.10.4. [#1224](https://github.com/github/codeql-action/pull/1224)
|
||||
- The newly released [Poetry 1.2](https://python-poetry.org/blog/announcing-poetry-1.2.0) is not yet supported. In the most common case where the CodeQL Action is automatically installing Python dependencies, it will continue to install and use Poetry 1.1 on its own. However, in certain cases such as with self-hosted runners, you may need to ensure Poetry 1.1 is installed yourself.
|
||||
|
||||
## 1.1.21 - 25 Aug 2022
|
||||
|
||||
- Improve error messages when the code scanning configuration file includes an invalid `queries` block or an invalid `query-filters` block. [#1208](https://github.com/github/codeql-action/pull/1208)
|
||||
- Fix a bug where Go build tracing could fail on Windows. [#1209](https://github.com/github/codeql-action/pull/1209)
|
||||
|
||||
## 1.1.20 - 22 Aug 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.19 - 17 Aug 2022
|
||||
|
||||
- Add the ability to filter queries from a code scanning run by using the `query-filters` option in the code scanning configuration file. [#1098](https://github.com/github/codeql-action/pull/1098)
|
||||
- In debug mode, debug artifacts are now uploaded even if a step in the Actions workflow fails. [#1159](https://github.com/github/codeql-action/pull/1159)
|
||||
- Update default CodeQL bundle version to 2.10.3. [#1178](https://github.com/github/codeql-action/pull/1178)
|
||||
- The combination of python2 and Pipenv is no longer supported. [#1181](https://github.com/github/codeql-action/pull/1181)
|
||||
|
||||
## 1.1.18 - 03 Aug 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.10.2. [#1156](https://github.com/github/codeql-action/pull/1156)
|
||||
|
||||
## 1.1.17 - 28 Jul 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.10.1. [#1143](https://github.com/github/codeql-action/pull/1143)
|
||||
|
||||
## 1.1.16 - 13 Jul 2022
|
||||
|
||||
- You can now quickly debug a job that uses the CodeQL Action by re-running the job from the GitHub UI and selecting the "Enable debug logging" option. [#1132](https://github.com/github/codeql-action/pull/1132)
|
||||
- You can now see diagnostic messages produced by the analysis in the logs of the `analyze` Action by enabling debug mode. To enable debug mode, pass `debug: true` to the `init` Action, or [enable step debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging). This feature is available for CodeQL CLI version 2.10.0 and later. [#1133](https://github.com/github/codeql-action/pull/1133)
|
||||
|
||||
## 1.1.15 - 28 Jun 2022
|
||||
|
||||
- CodeQL query packs listed in the `packs` configuration field will be skipped if their target language is not being analyzed in the current Actions job. Previously, this would throw an error. [#1116](https://github.com/github/codeql-action/pull/1116)
|
||||
- The combination of python2 and poetry is no longer supported. See <https://github.com/actions/setup-python/issues/374> for more details. [#1124](https://github.com/github/codeql-action/pull/1124)
|
||||
- Update default CodeQL bundle version to 2.10.0. [#1123](https://github.com/github/codeql-action/pull/1123)
|
||||
|
||||
## 1.1.14 - 22 Jun 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.13 - 21 Jun 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.4. [#1100](https://github.com/github/codeql-action/pull/1100)
|
||||
|
||||
## 1.1.12 - 01 Jun 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.3. [#1084](https://github.com/github/codeql-action/pull/1084)
|
||||
|
||||
## 1.1.11 - 17 May 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
|
||||
|
||||
## 1.1.10 - 10 May 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
|
||||
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
|
||||
|
||||
## 2.1.9 - 27 Apr 2022
|
||||
## 1.1.9 - 27 Apr 2022
|
||||
|
||||
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
|
||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
|
||||
- Update default CodeQL bundle version to 2.9.0.
|
||||
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
|
||||
|
||||
## 2.1.8 - 08 Apr 2022
|
||||
## 1.1.8 - 08 Apr 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
|
||||
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
|
||||
|
||||
## 2.1.7 - 05 Apr 2022
|
||||
## 1.1.7 - 05 Apr 2022
|
||||
|
||||
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
|
||||
|
||||
## 2.1.6 - 30 Mar 2022
|
||||
## 1.1.6 - 30 Mar 2022
|
||||
|
||||
- [v2+ only] The CodeQL Action now runs on Node.js v16. [#1000](https://github.com/github/codeql-action/pull/1000)
|
||||
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
|
||||
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
|
||||
|
||||
|
||||
@@ -38,10 +38,6 @@ To see the effect of your changes and to test them, push your changes in a branc
|
||||
|
||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||
|
||||
### Building the CodeQL runner
|
||||
|
||||
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
1. [Fork][fork] and clone the repository
|
||||
@@ -80,23 +76,11 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
|
||||
## Keeping the PR checks up to date (admin access required)
|
||||
|
||||
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [Update required checks](.github/workflows/update-required-checks.yml) workflow.
|
||||
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script:
|
||||
|
||||
Or you can use this semi-automated approach:
|
||||
|
||||
1. In a terminal check out the `SHA` whose checks you want to use as the base. Typically, this will be `main`.
|
||||
2. From a terminal, run the following commands:
|
||||
|
||||
```sh
|
||||
SHA="$(git rev-parse HEAD)"
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/releases/v2/protection/required_status_checks --input checks.json
|
||||
gh api -X "PATCH" repos/github/codeql-action/branches/releases/v1/protection/required_status_checks --input checks.json
|
||||
````
|
||||
|
||||
3. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
|
||||
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date.
|
||||
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
|
||||
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
11
README.md
11
README.md
@@ -59,9 +59,9 @@ jobs:
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
# languages: go, javascript, csharp, python, cpp, java, ruby
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
@@ -139,10 +139,3 @@ By default, this will override any queries specified in a config file. If you wi
|
||||
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
### Note on "missing analysis" message
|
||||
|
||||
The very first time code scanning is run and if it is on a pull request, you will probably get a message mentioning a "missing analysis". This is expected.
|
||||
|
||||
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
|
||||
|
||||
For more information and other causes of this message, see [Reasons for the "Analysis not found" message](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-analysis-not-found-message)
|
||||
|
||||
@@ -66,11 +66,16 @@ inputs:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
expect-error:
|
||||
description: "[Internal] It is an error to use this input outside of integration testing of the codeql-action."
|
||||
required: false
|
||||
default: "false"
|
||||
outputs:
|
||||
db-locations:
|
||||
description: A map from language to absolute path for each database created by CodeQL.
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
runs:
|
||||
using: "node16"
|
||||
using: "node12"
|
||||
main: "../lib/analyze-action.js"
|
||||
post: "../lib/analyze-action-post.js"
|
||||
|
||||
@@ -13,5 +13,5 @@ inputs:
|
||||
$GITHUB_WORKSPACE as its working directory.
|
||||
required: false
|
||||
runs:
|
||||
using: 'node16'
|
||||
using: 'node12'
|
||||
main: '../lib/autobuild-action.js'
|
||||
|
||||
@@ -10,9 +10,34 @@ inputs:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
token:
|
||||
description: GitHub token to use for authenticating with this instance of GitHub. To download custom packs from multiple registries, use the registries input.
|
||||
default: ${{ github.token }}
|
||||
required: false
|
||||
registries:
|
||||
description: |
|
||||
Use this input only when you need to download CodeQL packages from another instance of GitHub. If you only need to download packages from this GitHub instance, use the token input instead.
|
||||
|
||||
A YAML string that defines the list of GitHub container registries to use for downloading packs. The string is in the following form (the | is required on the first line):
|
||||
|
||||
registries: |
|
||||
- url: https://containers.GHEHOSTNAME1/v2/
|
||||
packages:
|
||||
- my-company/*
|
||||
- my-company2/*
|
||||
token: \$\{{ secrets.GHEHOSTNAME1_TOKEN }}
|
||||
|
||||
- url: https://ghcr.io/v2/
|
||||
packages: */*
|
||||
token: \$\{{ secrets.GHCR_TOKEN }}
|
||||
|
||||
The `url` property contains the URL to the container registry you want to connect to.
|
||||
|
||||
The `packages` property contains a single glob string or a list of glob strings, specifying which packages should be retrieved from this particular container registry. Order is important. Earlier entries will match before later entries.
|
||||
|
||||
The `token` property contains a connection token for this registry. required: false
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
required: false
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
@@ -32,7 +57,7 @@ inputs:
|
||||
analyses, you must specify packs in the codeql-config.yml file.
|
||||
required: false
|
||||
external-repository-token:
|
||||
description: A token for fetching external config files and queries if they reside in a private repository.
|
||||
description: A token for fetching external config files and queries if they reside in a private repository in the same GitHub instance that is running this action.
|
||||
required: false
|
||||
setup-python-dependencies:
|
||||
description: Try to auto-install your python dependencies
|
||||
@@ -56,7 +81,10 @@ inputs:
|
||||
This input also sets the number of threads that can later be used by the "analyze" action.
|
||||
required: false
|
||||
debug:
|
||||
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
|
||||
description: >-
|
||||
Enable debugging mode.
|
||||
This will result in more output being produced which may be useful when debugging certain issues.
|
||||
Debugging mode is enabled automatically when step debug logging is turned on.
|
||||
required: false
|
||||
default: 'false'
|
||||
debug-artifact-name:
|
||||
@@ -69,9 +97,14 @@ inputs:
|
||||
The name of the database uploaded to the debugging artifact.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
trap-caching:
|
||||
description: >-
|
||||
Explicitly enable or disable TRAP caching rather than respecting the feature flag for it.
|
||||
required: false
|
||||
outputs:
|
||||
codeql-path:
|
||||
description: The path of the CodeQL binary used for analysis
|
||||
runs:
|
||||
using: 'node16'
|
||||
using: 'node12'
|
||||
main: '../lib/init-action.js'
|
||||
post: '../lib/init-action-post.js'
|
||||
|
||||
126
lib/actions-util.js
generated
126
lib/actions-util.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -32,16 +32,11 @@ const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
/**
|
||||
* The utils in this module are meant to be run inside of the action only.
|
||||
* Code paths from the runner should not enter this module.
|
||||
*/
|
||||
/**
|
||||
* Wrapper around core.getInput for inputs that always have a value.
|
||||
* Also see getOptionalInput.
|
||||
*
|
||||
* This allows us to get stronger type checking of required/optional inputs
|
||||
* and make behaviour more consistent between actions and the runner.
|
||||
* This allows us to get stronger type checking of required/optional inputs.
|
||||
*/
|
||||
function getRequiredInput(name) {
|
||||
return core.getInput(name, { required: true });
|
||||
@@ -51,8 +46,7 @@ exports.getRequiredInput = getRequiredInput;
|
||||
* Wrapper around core.getInput that converts empty inputs to undefined.
|
||||
* Also see getRequiredInput.
|
||||
*
|
||||
* This allows us to get stronger type checking of required/optional inputs
|
||||
* and make behaviour more consistent between actions and the runner.
|
||||
* This allows us to get stronger type checking of required/optional inputs.
|
||||
*/
|
||||
const getOptionalInput = function (name) {
|
||||
const value = core.getInput(name);
|
||||
@@ -66,13 +60,6 @@ function getTemporaryDirectory() {
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||
}
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
function getToolCacheDirectory() {
|
||||
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
||||
return value !== undefined && value !== ""
|
||||
? value
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE");
|
||||
}
|
||||
exports.getToolCacheDirectory = getToolCacheDirectory;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
@@ -112,7 +99,7 @@ exports.getCommitOid = getCommitOid;
|
||||
* Returns undefined if run by other triggers or the merge base cannot be determined.
|
||||
*/
|
||||
const determineMergeBaseCommitOid = async function () {
|
||||
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
|
||||
if (workflowEventName() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
@@ -356,7 +343,7 @@ async function getWorkflowPath() {
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = api.getActionsApiClient();
|
||||
const apiClient = api.getApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
@@ -439,7 +426,7 @@ async function getRef() {
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
|
||||
}
|
||||
const ref = refInput || (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
const ref = refInput || getRefFromEnv();
|
||||
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
@@ -459,7 +446,7 @@ async function getRef() {
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
@@ -472,6 +459,26 @@ async function getRef() {
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
function getRefFromEnv() {
|
||||
// To workaround a limitation of Actions dynamic workflows not setting
|
||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
||||
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
||||
let refEnv;
|
||||
try {
|
||||
refEnv = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
}
|
||||
catch (e) {
|
||||
// If the GITHUB_REF is not set, we try to rescue by getting the
|
||||
// CODE_SCANNING_REF.
|
||||
const maybeRef = process.env["CODE_SCANNING_REF"];
|
||||
if (maybeRef === undefined || maybeRef.length === 0) {
|
||||
throw e;
|
||||
}
|
||||
refEnv = maybeRef;
|
||||
}
|
||||
return refEnv;
|
||||
}
|
||||
function getActionsStatus(error, otherFailureCause) {
|
||||
if (error || otherFailureCause) {
|
||||
return error instanceof util_1.UserError ? "user-error" : "failure";
|
||||
@@ -508,11 +515,13 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
}
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
|
||||
// See https://github.com/actions/runner/issues/803
|
||||
const actionRef = isRunningLocalAction()
|
||||
? undefined
|
||||
: process.env["GITHUB_ACTION_REF"];
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[sharedEnv.CODEQL_ACTION_TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
const statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
@@ -526,6 +535,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
runner_os: runnerOs,
|
||||
action_version: pkg.version,
|
||||
};
|
||||
@@ -574,13 +584,6 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const gitHubVersion = await api.getGitHubVersionActionsOnly();
|
||||
if ((0, util_1.isGitHubGhesVersionBelow)(gitHubVersion, "3.2.0")) {
|
||||
// GHES 3.1 and earlier versions reject unexpected properties, which means
|
||||
// that they will reject status reports with newly added properties.
|
||||
// Inhibiting status reporting for GHES < 3.2 avoids such failures.
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
@@ -590,7 +593,7 @@ async function sendStatusReport(statusReport) {
|
||||
}
|
||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = api.getActionsApiClient();
|
||||
const client = api.getApiClient();
|
||||
try {
|
||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
||||
owner,
|
||||
@@ -637,9 +640,21 @@ async function sendStatusReport(statusReport) {
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
function workflowEventName() {
|
||||
// If the original event is dynamic CODESCANNING_EVENT_NAME will contain the right info (push/pull_request)
|
||||
if (process.env["GITHUB_EVENT_NAME"] === "dynamic") {
|
||||
const value = process.env["CODESCANNING_EVENT_NAME"];
|
||||
if (value === undefined || value.length === 0) {
|
||||
return process.env["GITHUB_EVENT_NAME"];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
return process.env["GITHUB_EVENT_NAME"];
|
||||
}
|
||||
exports.workflowEventName = workflowEventName;
|
||||
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
||||
function workflowIsTriggeredByPushEvent() {
|
||||
return process.env["GITHUB_EVENT_NAME"] === "push";
|
||||
return workflowEventName() === "push";
|
||||
}
|
||||
// Is dependabot the actor that triggered the current workflow run.
|
||||
function isDependabotActor() {
|
||||
@@ -670,22 +685,51 @@ function getWorkflowEvent() {
|
||||
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
|
||||
}
|
||||
}
|
||||
function removeRefsHeadsPrefix(ref) {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
// Is the version of the repository we are currently analyzing from the default branch,
|
||||
// or alternatively from another branch or a pull request.
|
||||
async function isAnalyzingDefaultBranch() {
|
||||
var _a;
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = currentRef.startsWith("refs/heads/")
|
||||
? currentRef.slice("refs/heads/".length)
|
||||
: currentRef;
|
||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||
const event = getWorkflowEvent();
|
||||
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
let defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
if (process.env.GITHUB_EVENT_NAME === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix((0, util_1.getRequiredEnvParam)("GITHUB_REF"));
|
||||
}
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
function sanitizeArifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
async function printDebugLogs(config) {
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
if (!(0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||
continue; // Skip this language database.
|
||||
}
|
||||
const walkLogFiles = (dir) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
if (entries.length === 0) {
|
||||
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
const absolutePath = path.resolve(dir, entry.name);
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name} from file at path ${absolutePath}`);
|
||||
process.stdout.write(fs.readFileSync(absolutePath));
|
||||
core.endGroup();
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
exports.printDebugLogs = printDebugLogs;
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
File diff suppressed because one or more lines are too long
35
lib/actions-util.test.js
generated
35
lib/actions-util.test.js
generated
@@ -471,12 +471,8 @@ on: ["push"]
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
t.deepEqual((0, util_1.getMode)(), util_1.Mode.actions);
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "4.5.6");
|
||||
t.deepEqual((0, util_1.getMode)(), util_1.Mode.runner);
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
@@ -495,12 +491,31 @@ on: ["push"]
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "feature";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
schedule: "0 0 * * *",
|
||||
}));
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub
|
||||
.withArgs("ref")
|
||||
.resolves("refs/heads/something-else");
|
||||
getAdditionalInputStub
|
||||
.withArgs("sha")
|
||||
.resolves("0000000000000000000000000000000000000000");
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
(0, ava_1.default)("workflowEventName()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "push");
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "dynamic");
|
||||
process.env["CODESCANNING_EVENT_NAME"] = "push";
|
||||
t.deepEqual(actionsutil.workflowEventName(), "push");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
7
lib/analysis-paths.js
generated
7
lib/analysis-paths.js
generated
@@ -58,14 +58,11 @@ function includeAndExcludeAnalysisPaths(config) {
|
||||
}
|
||||
// If the temporary or tools directory is in the working directory ignore that too.
|
||||
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
|
||||
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
|
||||
let pathsIgnore = config.pathsIgnore;
|
||||
if (!tempRelativeToWorking.startsWith("..")) {
|
||||
if (!tempRelativeToWorking.startsWith("..") &&
|
||||
!path.isAbsolute(tempRelativeToWorking)) {
|
||||
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
|
||||
}
|
||||
if (!toolsRelativeToWorking.startsWith("..")) {
|
||||
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
|
||||
}
|
||||
if (pathsIgnore.length !== 0) {
|
||||
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IACE,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC;QACvC,CAAC,IAAI,CAAC,UAAU,CAAC,qBAAqB,CAAC,EACvC;QACA,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
67
lib/analysis-paths.test.js
generated
67
lib/analysis-paths.test.js
generated
@@ -37,7 +37,6 @@ const util = __importStar(require("./util"));
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
@@ -45,7 +44,13 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
packsInputCombines: false,
|
||||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -62,7 +67,6 @@ const util = __importStar(require("./util"));
|
||||
pathsIgnore: ["path4", "path5", "path6/**"],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
@@ -70,7 +74,13 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
packsInputCombines: false,
|
||||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -79,29 +89,32 @@ const util = __importStar(require("./util"));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("exclude temp dir", async (t) => {
|
||||
return await util.withTmpDir(async (toolCacheDir) => {
|
||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||
});
|
||||
packsInputCombines: false,
|
||||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO;QACP,SAAS,EAAE,EAAE;QACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;QACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;QACrD,KAAK,EAAE,EAAE;QACT,SAAS,EAAE,KAAK;QAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,sBAAsB,EAAE;YACtB,iBAAiB,EAAE,KAAK;YACxB,kBAAkB,EAAE,KAAK;YACzB,oBAAoB,EAAE,KAAK;SAC5B;QACD,UAAU,EAAE,EAAE;QACd,qBAAqB,EAAE,CAAC;KACzB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC"}
|
||||
14
lib/analyze-action-env.test.js
generated
14
lib/analyze-action-env.test.js
generated
@@ -38,22 +38,30 @@ const util = __importStar(require("./util"));
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
// When there are no action inputs for RAM and threads, the action uses
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
14
lib/analyze-action-input.test.js
generated
14
lib/analyze-action-input.test.js
generated
@@ -38,22 +38,30 @@ const util = __importStar(require("./util"));
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
41
lib/analyze-action-post-helper.js
generated
Normal file
41
lib/analyze-action-post-helper.js
generated
Normal file
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.run = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadSarifDebugArtifact) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Did the 'init' action fail to start?");
|
||||
}
|
||||
// Upload Actions SARIF artifacts for debugging
|
||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||
core.info("Debug mode is on. Uploading available SARIF files as Actions debugging artifact...");
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
await uploadSarifDebugArtifact(config, outputDir);
|
||||
}
|
||||
}
|
||||
exports.run = run;
|
||||
//# sourceMappingURL=analyze-action-post-helper.js.map
|
||||
1
lib/analyze-action-post-helper.js.map
Normal file
1
lib/analyze-action-post-helper.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-post-helper.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CAAC,wBAAkC;IAC1D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,+CAA+C;IAC/C,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,oFAAoF,CACrF,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAlBD,kBAkBC"}
|
||||
69
lib/analyze-action-post-helper.test.js
generated
Normal file
69
lib/analyze-action-post-helper.test.js
generated
Normal file
@@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("post: analyze action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
debugMode: false,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const uploadSarifSpy = sinon.spy();
|
||||
await analyzeActionPostHelper.run(uploadSarifSpy);
|
||||
t.assert(uploadSarifSpy.notCalled);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("post: analyze action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
debugMode: true,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("output").returns("fake-output-dir");
|
||||
const uploadSarifSpy = sinon.spy();
|
||||
await analyzeActionPostHelper.run(uploadSarifSpy);
|
||||
t.assert(uploadSarifSpy.called);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analyze-action-post-helper.test.js.map
|
||||
1
lib/analyze-action-post-helper.test.js.map
Normal file
1
lib/analyze-action-post-helper.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-post-helper.test.js","sourceRoot":"","sources":["../src/analyze-action-post-helper.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,sFAAwE;AACxE,4DAA8C;AAC9C,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,KAAK;YAChB,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QAEpC,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,IAAI;YACf,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QAEpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;QAEhE,MAAM,cAAc,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;QAEnC,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAElD,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;IAClC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
40
lib/analyze-action-post.js
generated
Normal file
40
lib/analyze-action-post.js
generated
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* This file is the entry point for the `post:` hook of `analyze-action.yml`.
|
||||
* It will run after the all steps in this job, in reverse order in relation to
|
||||
* other `post:` hooks.
|
||||
*/
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const analyzeActionPostHelper = __importStar(require("./analyze-action-post-helper"));
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await analyzeActionPostHelper.run(debugArtifacts.uploadSarifDebugArtifact);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze post-action step failed: ${error}`);
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=analyze-action-post.js.map
|
||||
1
lib/analyze-action-post.js.map
Normal file
1
lib/analyze-action-post.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,sFAAwE;AACxE,kEAAoD;AAEpD,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,uBAAuB,CAAC,GAAG,CAAC,cAAc,CAAC,wBAAwB,CAAC,CAAC;KAC5E;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,oCAAoC,KAAK,EAAE,CAAC,CAAC;QAC5D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
221
lib/analyze-action.js
generated
221
lib/analyze-action.js
generated
@@ -18,25 +18,34 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runPromise = exports.sendStatusReport = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
// We need to import `performance` on Node 12
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const api_client_1 = require("./api-client");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendStatusReport(startedAt, config, stats, error) {
|
||||
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
|
||||
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
|
||||
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
|
||||
const statusReport = {
|
||||
@@ -47,59 +56,116 @@ async function sendStatusReport(startedAt, config, stats, error) {
|
||||
}
|
||||
: {}),
|
||||
...(stats || {}),
|
||||
...(dbCreationTimings || {}),
|
||||
};
|
||||
await actionsUtil.sendStatusReport(statusReport);
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport = {
|
||||
...statusReport,
|
||||
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
||||
trap_cache_upload_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
||||
};
|
||||
await actionsUtil.sendStatusReport(trapCacheUploadStatusReport);
|
||||
}
|
||||
else {
|
||||
await actionsUtil.sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
// `expect-error` should only be set to a non-false value by the CodeQL Action PR checks.
|
||||
function hasBadExpectErrorInput() {
|
||||
return (actionsUtil.getOptionalInput("expect-error") !== "false" &&
|
||||
!util.isInTestMode());
|
||||
}
|
||||
/**
|
||||
* Returns whether any TRAP files exist under the `db-go` folder,
|
||||
* indicating whether Go extraction has extracted at least one file.
|
||||
*/
|
||||
function doesGoExtractionOutputExist(config) {
|
||||
const golangDbDirectory = util.getCodeQLDatabasePath(config, languages_1.Language.go);
|
||||
const trapDirectory = path_1.default.join(golangDbDirectory, "trap", languages_1.Language.go);
|
||||
return (fs.existsSync(trapDirectory) &&
|
||||
fs
|
||||
.readdirSync(trapDirectory)
|
||||
.some((fileName) => [
|
||||
".trap",
|
||||
".trap.gz",
|
||||
".trap.br",
|
||||
".trap.tar.gz",
|
||||
".trap.tar.br",
|
||||
".trap.tar",
|
||||
].some((ext) => fileName.endsWith(ext))));
|
||||
}
|
||||
/**
|
||||
* We attempt to autobuild Go to preserve compatibility for users who have
|
||||
* set up Go using a legacy scanning style CodeQL workflow, i.e. one without
|
||||
* an autobuild step or manual build steps.
|
||||
*
|
||||
* - We detect whether an autobuild step is present by checking the
|
||||
* `util.DID_AUTOBUILD_GO_ENV_VAR_NAME` environment variable, which is set
|
||||
* when the autobuilder is invoked.
|
||||
* - We detect whether the Go database has already been finalized in case it
|
||||
* has been manually set in a prior Action step.
|
||||
* - We approximate whether manual build steps are present by looking at
|
||||
* whether any extraction output already exists for Go.
|
||||
*/
|
||||
async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
if (!config.languages.includes(languages_1.Language.go)) {
|
||||
return;
|
||||
}
|
||||
if (process.env[util.DID_AUTOBUILD_GO_ENV_VAR_NAME] === "true") {
|
||||
logger.debug("Won't run Go autobuild since it has already been run.");
|
||||
return;
|
||||
}
|
||||
if ((0, analyze_1.dbIsFinalized)(config, languages_1.Language.go, logger)) {
|
||||
logger.debug("Won't run Go autobuild since there is already a finalized database for Go.");
|
||||
return;
|
||||
}
|
||||
// This captures whether a user has added manual build steps for Go
|
||||
if (doesGoExtractionOutputExist(config)) {
|
||||
logger.debug("Won't run Go autobuild since at least one file of Go code has already been extracted.");
|
||||
// If the user has run the manual build step, and has set the `CODEQL_EXTRACTOR_GO_BUILD_TRACING`
|
||||
// variable, we suggest they remove it from their workflow.
|
||||
if ("CODEQL_EXTRACTOR_GO_BUILD_TRACING" in process.env) {
|
||||
logger.warning(`The CODEQL_EXTRACTOR_GO_BUILD_TRACING environment variable has no effect on workflows with manual build steps, so we recommend that you remove it from your workflow.`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
await (0, autobuild_1.runAutobuild)(languages_1.Language.go, config, logger);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadResult = undefined;
|
||||
let runStats = undefined;
|
||||
let config = undefined;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
let trapCacheUploadTime = undefined;
|
||||
let dbCreationTimings = undefined;
|
||||
let didUploadTrapCaches = false;
|
||||
util.initializeEnvironment(pkg.version);
|
||||
await util.checkActionVersion(pkg.version);
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
try {
|
||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||
return;
|
||||
}
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
await util.enrichEnvironment(util.Mode.actions, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||
const apiDetails = {
|
||||
auth: actionsUtil.getRequiredInput("token"),
|
||||
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
};
|
||||
if (hasBadExpectErrorInput()) {
|
||||
throw new Error("`expect-error` input parameter is for internal use only. It should only be set by codeql-action or a fork.");
|
||||
}
|
||||
await util.enrichEnvironment(await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||
const apiDetails = (0, api_client_1.getApiDetails)();
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||
if (config.debugMode) {
|
||||
// Upload the SARIF files as an Actions artifact for debugging
|
||||
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (config.debugMode) {
|
||||
// Upload the logs as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, features);
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
@@ -110,7 +176,7 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
else {
|
||||
@@ -118,99 +184,53 @@ async function run() {
|
||||
}
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||
// Possibly upload the TRAP caches for later re-use
|
||||
const trapCacheUploadStartTime = perf_hooks_1.performance.now();
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
didUploadTrapCaches = await (0, trap_caching_1.uploadTrapCaches)(codeql, config, logger);
|
||||
trapCacheUploadTime = perf_hooks_1.performance.now() - trapCacheUploadStartTime;
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (util.isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
}
|
||||
else if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
// If we did not throw an error yet here, but we expect one, throw it.
|
||||
if (actionsUtil.getOptionalInput("expect-error") === "true") {
|
||||
core.setFailed(`expect-error input was set to true but no error was thrown.`);
|
||||
}
|
||||
}
|
||||
catch (origError) {
|
||||
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||
core.setFailed(error.message);
|
||||
if (actionsUtil.getOptionalInput("expect-error") !== "true" ||
|
||||
hasBadExpectErrorInput()) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
console.log(error);
|
||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||
const stats = { ...error.queriesStatusReport };
|
||||
await sendStatusReport(startedAt, config, stats, error);
|
||||
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
}
|
||||
else {
|
||||
await sendStatusReport(startedAt, config, undefined, error);
|
||||
await sendStatusReport(startedAt, config, undefined, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
}
|
||||
return;
|
||||
}
|
||||
finally {
|
||||
if (config !== undefined && config.debugMode) {
|
||||
try {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
||||
}
|
||||
}
|
||||
if (core.isDebug() && config !== undefined) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
const walkLogFiles = (dir) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||
core.endGroup();
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (runStats && uploadResult) {
|
||||
await sendStatusReport(startedAt, config, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
}, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
}
|
||||
else if (runStats) {
|
||||
await sendStatusReport(startedAt, config, { ...runStats });
|
||||
await sendStatusReport(startedAt, config, { ...runStats }, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
}
|
||||
else {
|
||||
await sendStatusReport(startedAt, config, undefined);
|
||||
await sendStatusReport(startedAt, config, undefined, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
function listFolder(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
const files = [];
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
files.push(path.resolve(dir, entry.name));
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
files.push(...listFolder(path.resolve(dir, entry.name)));
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
exports.runPromise = run();
|
||||
async function runWrapper() {
|
||||
try {
|
||||
@@ -220,6 +240,7 @@ async function runWrapper() {
|
||||
core.setFailed(`analyze action failed: ${error}`);
|
||||
console.log(error);
|
||||
}
|
||||
await (0, util_1.checkForTimeout)();
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=analyze-action.js.map
|
||||
File diff suppressed because one or more lines are too long
269
lib/analyze.js
generated
269
lib/analyze.js
generated
@@ -18,15 +18,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
|
||||
exports.validateQueryFilters = exports.runCleanup = exports.runFinalize = exports.createQuerySuiteContents = exports.convertPackToQuerySuiteEntry = exports.runQueries = exports.dbIsFinalized = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks"); // We need to import `performance` on Node 12
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const count_loc_1 = require("./count-loc");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
@@ -64,11 +69,10 @@ async function setupPythonExtractor(logger) {
|
||||
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
|
||||
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
|
||||
}
|
||||
async function createdDBForScannedLanguages(config, logger) {
|
||||
async function createdDBForScannedLanguages(codeql, config, logger) {
|
||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||
// we extract any scanned languages.
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if ((0, languages_1.isScannedLanguage)(language) &&
|
||||
!dbIsFinalized(config, language, logger)) {
|
||||
@@ -76,11 +80,12 @@ async function createdDBForScannedLanguages(config, logger) {
|
||||
if (language === languages_1.Language.python) {
|
||||
await setupPythonExtractor(logger);
|
||||
}
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config, language), language);
|
||||
await codeql.extractScannedLanguage(config, language);
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.createdDBForScannedLanguages = createdDBForScannedLanguages;
|
||||
function dbIsFinalized(config, language, logger) {
|
||||
const dbPath = util.getCodeQLDatabasePath(config, language);
|
||||
try {
|
||||
@@ -92,9 +97,13 @@ function dbIsFinalized(config, language, logger) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.dbIsFinalized = dbIsFinalized;
|
||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
|
||||
await createdDBForScannedLanguages(config, logger);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const extractionStart = perf_hooks_1.performance.now();
|
||||
await createdDBForScannedLanguages(codeql, config, logger);
|
||||
const extractionTime = perf_hooks_1.performance.now() - extractionStart;
|
||||
const trapImportStart = perf_hooks_1.performance.now();
|
||||
for (const language of config.languages) {
|
||||
if (dbIsFinalized(config, language, logger)) {
|
||||
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
|
||||
@@ -105,82 +114,86 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger)
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
const trapImportTime = perf_hooks_1.performance.now() - trapImportStart;
|
||||
return {
|
||||
scanned_language_extraction_duration_ms: Math.round(extractionTime),
|
||||
trap_import_duration_ms: Math.round(trapImportTime),
|
||||
};
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
|
||||
const statusReport = {};
|
||||
let locPromise = Promise.resolve({});
|
||||
const cliCanCountBaseline = await cliCanCountLoC();
|
||||
const debugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] ||
|
||||
process.env["ACTIONS_RUNNER_DEBUG"] ||
|
||||
process.env["ACTIONS_STEP_DEBUG"];
|
||||
if (!cliCanCountBaseline || debugMode) {
|
||||
// count the number of lines in the background
|
||||
locPromise = (0, count_loc_1.countLoc)(path.resolve(),
|
||||
// config.paths specifies external directories. the current
|
||||
// directory is included in the analysis by default. Replicate
|
||||
// that here.
|
||||
config.paths, config.pathsIgnore, config.languages, logger);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
|
||||
for (const language of config.languages) {
|
||||
const queries = config.queries[language];
|
||||
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
||||
const packsWithVersion = config.packs[language] || [];
|
||||
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
|
||||
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
|
||||
const hasPackWithCustomQueries = packsWithVersion.length > 0;
|
||||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
try {
|
||||
if (hasPackWithCustomQueries) {
|
||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||
const results = await codeql.packDownload(packsWithVersion);
|
||||
logger.info(`Downloaded packs: ${results.packs
|
||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||
.join(", ")}`);
|
||||
logger.endGroup();
|
||||
}
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const querySuitePaths = [];
|
||||
if (queries["builtin"].length > 0) {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||
// If we are using the code scanning config in the CLI,
|
||||
// much of the work needed to generate the query suites
|
||||
// is done in the CLI. We just need to make a single
|
||||
// call to run all the queries for each language and
|
||||
// another to interpret the results.
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const startTimeBuiltIn = new Date().getTime();
|
||||
querySuitePaths.push(await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"]), undefined));
|
||||
await runQueryGroup(language, "all", undefined, undefined);
|
||||
// TODO should not be using `builtin` here. We should be using `all` instead.
|
||||
// The status report does not support `all` yet.
|
||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeBuiltIn;
|
||||
logger.startGroup(`Interpreting results for ${language}`);
|
||||
const startTimeInterpretResults = new Date().getTime();
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode);
|
||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeInterpretResults;
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
const startTimeCustom = new Date().getTime();
|
||||
let ranCustom = false;
|
||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
||||
if (queries["custom"][i].queries.length > 0) {
|
||||
querySuitePaths.push(await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries), queries["custom"][i].searchPath));
|
||||
else {
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const querySuitePaths = [];
|
||||
if (queries["builtin"].length > 0) {
|
||||
const startTimeBuiltIn = new Date().getTime();
|
||||
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"], queryFilters), undefined)));
|
||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeBuiltIn;
|
||||
}
|
||||
const startTimeCustom = new Date().getTime();
|
||||
let ranCustom = false;
|
||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
||||
if (queries["custom"][i].queries.length > 0) {
|
||||
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries, queryFilters), queries["custom"][i].searchPath)));
|
||||
ranCustom = true;
|
||||
}
|
||||
}
|
||||
if (packsWithVersion.length > 0) {
|
||||
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters));
|
||||
ranCustom = true;
|
||||
}
|
||||
if (ranCustom) {
|
||||
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeCustom;
|
||||
}
|
||||
logger.endGroup();
|
||||
logger.startGroup(`Interpreting results for ${language}`);
|
||||
const startTimeInterpretResults = new Date().getTime();
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile, config.debugMode);
|
||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeInterpretResults;
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
if (packsWithVersion.length > 0) {
|
||||
querySuitePaths.push(...(await runQueryPacks(language, "packs", packsWithVersion, undefined)));
|
||||
ranCustom = true;
|
||||
}
|
||||
if (ranCustom) {
|
||||
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeCustom;
|
||||
}
|
||||
logger.endGroup();
|
||||
logger.startGroup(`Interpreting results for ${language}`);
|
||||
const startTimeInterpretResults = new Date().getTime();
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
|
||||
if (!cliCanCountBaseline)
|
||||
await injectLinesOfCode(sarifFile, language, locPromise);
|
||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeInterpretResults;
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
if (!cliCanCountBaseline || debugMode)
|
||||
printLinesOfCodeSummary(logger, language, await locPromise);
|
||||
if (cliCanCountBaseline)
|
||||
logger.info(await runPrintLinesOfCode(language));
|
||||
logger.info(await runPrintLinesOfCode(language));
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(String(e));
|
||||
@@ -192,50 +205,80 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
}
|
||||
}
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile) {
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
|
||||
}
|
||||
async function cliCanCountLoC() {
|
||||
return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, featureEnablement);
|
||||
}
|
||||
async function runPrintLinesOfCode(language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
return await codeql.databasePrintBaseline(databasePath);
|
||||
}
|
||||
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const querySuitePath = querySuiteContents
|
||||
? `${databasePath}-queries-${type}.qls`
|
||||
: undefined;
|
||||
if (querySuiteContents && querySuitePath) {
|
||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||
}
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
return querySuitePath;
|
||||
}
|
||||
async function runQueryPacks(language, type, packs, searchPath) {
|
||||
async function runQueryPacks(language, type, packs, queryFilters) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Run the queries individually instead of all at once to avoid command
|
||||
// line length restrictions, particularly on windows.
|
||||
for (const pack of packs) {
|
||||
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, pack, memoryFlag, threadsFlag);
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
}
|
||||
return packs;
|
||||
// combine the list of packs into a query suite in order to run them all simultaneously.
|
||||
const querySuite = packs.map(convertPackToQuerySuiteEntry).concat(queryFilters);
|
||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, memoryFlag, threadsFlag);
|
||||
return querySuitePath;
|
||||
}
|
||||
}
|
||||
exports.runQueries = runQueries;
|
||||
function createQuerySuiteContents(queries) {
|
||||
return queries.map((q) => `- query: ${q}`).join("\n");
|
||||
function convertPackToQuerySuiteEntry(packStr) {
|
||||
var _a, _b, _c, _d;
|
||||
const pack = configUtils.parsePacksSpecification(packStr);
|
||||
return {
|
||||
qlpack: !pack.path ? pack.name : undefined,
|
||||
from: pack.path ? pack.name : undefined,
|
||||
version: pack.version,
|
||||
query: ((_a = pack.path) === null || _a === void 0 ? void 0 : _a.endsWith(".ql")) ? pack.path : undefined,
|
||||
queries: !((_b = pack.path) === null || _b === void 0 ? void 0 : _b.endsWith(".ql")) && !((_c = pack.path) === null || _c === void 0 ? void 0 : _c.endsWith(".qls"))
|
||||
? pack.path
|
||||
: undefined,
|
||||
apply: ((_d = pack.path) === null || _d === void 0 ? void 0 : _d.endsWith(".qls")) ? pack.path : undefined,
|
||||
};
|
||||
}
|
||||
exports.convertPackToQuerySuiteEntry = convertPackToQuerySuiteEntry;
|
||||
function createQuerySuiteContents(queries, queryFilters) {
|
||||
return yaml.dump(queries.map((q) => ({ query: q })).concat(queryFilters));
|
||||
}
|
||||
exports.createQuerySuiteContents = createQuerySuiteContents;
|
||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||
try {
|
||||
await (0, del_1.default)(outputDir, { force: true });
|
||||
}
|
||||
catch (error) {
|
||||
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true });
|
||||
const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
// WARNING: This does not _really_ end tracing, as the tracer will restore its
|
||||
// critical environment variables and it'll still be active for all processes
|
||||
// launched from this build step.
|
||||
// However, it will stop tracing for all steps past the codeql-action/analyze
|
||||
// step.
|
||||
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Delete variables as specified by the end-tracing script
|
||||
await (0, tracer_config_1.endTracingForCluster)(config);
|
||||
@@ -244,21 +287,7 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
}
|
||||
// After switching to Node16, this entire block can be replaced with `await fs.promises.rm(outputDir, { recursive: true, force: true });`.
|
||||
try {
|
||||
await fs.promises.rmdir(outputDir, {
|
||||
recursive: true,
|
||||
maxRetries: 5,
|
||||
retryDelay: 2000,
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true });
|
||||
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||
return timings;
|
||||
}
|
||||
exports.runFinalize = runFinalize;
|
||||
async function runCleanup(config, cleanupLevel, logger) {
|
||||
@@ -271,32 +300,28 @@ async function runCleanup(config, cleanupLevel, logger) {
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.runCleanup = runCleanup;
|
||||
async function injectLinesOfCode(sarifFile, language, locPromise) {
|
||||
var _a;
|
||||
const lineCounts = await locPromise;
|
||||
if (language in lineCounts) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
||||
if (Array.isArray(sarif.runs)) {
|
||||
for (const run of sarif.runs) {
|
||||
run.properties = run.properties || {};
|
||||
run.properties.metricResults = run.properties.metricResults || [];
|
||||
for (const metric of run.properties.metricResults) {
|
||||
// Baseline is inserted when matching rule has tag lines-of-code
|
||||
if (metric.rule && metric.rule.toolComponent) {
|
||||
const matchingRule = run.tool.extensions[metric.rule.toolComponent.index].rules[metric.rule.index];
|
||||
if ((_a = matchingRule.properties.tags) === null || _a === void 0 ? void 0 : _a.includes("lines-of-code")) {
|
||||
metric.baseline = lineCounts[language];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// exported for testing
|
||||
function validateQueryFilters(queryFilters) {
|
||||
if (!queryFilters) {
|
||||
return [];
|
||||
}
|
||||
if (!Array.isArray(queryFilters)) {
|
||||
throw new Error(`Query filters must be an array of "include" or "exclude" entries. Found ${typeof queryFilters}`);
|
||||
}
|
||||
const errors = [];
|
||||
for (const qf of queryFilters) {
|
||||
const keys = Object.keys(qf);
|
||||
if (keys.length !== 1) {
|
||||
errors.push(`Query filter must have exactly one key: ${JSON.stringify(qf)}`);
|
||||
}
|
||||
if (!["exclude", "include"].includes(keys[0])) {
|
||||
errors.push(`Only "include" or "exclude" filters are allowed:\n${JSON.stringify(qf)}`);
|
||||
}
|
||||
fs.writeFileSync(sarifFile, JSON.stringify(sarif));
|
||||
}
|
||||
}
|
||||
function printLinesOfCodeSummary(logger, language, lineCounts) {
|
||||
if (language in lineCounts) {
|
||||
logger.info(`Counted a baseline of ${lineCounts[language]} lines of code for ${language}.`);
|
||||
if (errors.length) {
|
||||
throw new Error(`Invalid query filter.\n${errors.join("\n")}`);
|
||||
}
|
||||
return queryFilters;
|
||||
}
|
||||
exports.validateQueryFilters = validateQueryFilters;
|
||||
//# sourceMappingURL=analyze.js.map
|
||||
File diff suppressed because one or more lines are too long
183
lib/analyze.test.js
generated
183
lib/analyze.test.js
generated
@@ -26,10 +26,8 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const count = __importStar(require("./count-loc"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -39,12 +37,6 @@ const util = __importStar(require("./util"));
|
||||
// and correct case of builtin or custom. Also checks the correct search
|
||||
// paths are set in the database analyze invocation.
|
||||
(0, ava_1.default)("status report fields and search path setting", async (t) => {
|
||||
const mockLinesOfCode = Object.values(languages_1.Language).reduce((obj, lang, i) => {
|
||||
// use a different line count for each language
|
||||
obj[lang] = i + 1;
|
||||
return obj;
|
||||
}, {});
|
||||
sinon.stub(count, "countLoc").resolves(mockLinesOfCode);
|
||||
let searchPathsUsed = [];
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -98,6 +90,7 @@ const util = __importStar(require("./util"));
|
||||
}));
|
||||
return "";
|
||||
},
|
||||
databasePrintBaseline: async () => "",
|
||||
});
|
||||
searchPathsUsed = [];
|
||||
const config = {
|
||||
@@ -107,7 +100,6 @@ const util = __importStar(require("./util"));
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
@@ -117,7 +109,13 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
packsInputCombines: false,
|
||||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
@@ -126,7 +124,7 @@ const util = __importStar(require("./util"));
|
||||
builtin: ["foo.ql"],
|
||||
custom: [],
|
||||
};
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
|
||||
const hasPacks = language in packs;
|
||||
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||
if (hasPacks) {
|
||||
@@ -152,7 +150,7 @@ const util = __importStar(require("./util"));
|
||||
},
|
||||
],
|
||||
};
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
|
||||
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
||||
const expectedSearchPathsUsed = hasPacks
|
||||
@@ -161,32 +159,8 @@ const util = __importStar(require("./util"));
|
||||
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
|
||||
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
|
||||
}
|
||||
verifyLineCounts(tmpDir);
|
||||
verifyQuerySuites(tmpDir);
|
||||
});
|
||||
function verifyLineCounts(tmpDir) {
|
||||
// eslint-disable-next-line github/array-foreach
|
||||
Object.keys(languages_1.Language).forEach((lang, i) => {
|
||||
verifyLineCountForFile(path.join(tmpDir, `${lang}.sarif`), i + 1);
|
||||
});
|
||||
}
|
||||
function verifyLineCountForFile(filePath, lineCount) {
|
||||
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
t.deepEqual(sarif.runs[0].properties.metricResults, [
|
||||
{
|
||||
rule: {
|
||||
index: 0,
|
||||
toolComponent: {
|
||||
index: 0,
|
||||
},
|
||||
},
|
||||
value: 123,
|
||||
baseline: lineCount,
|
||||
},
|
||||
]);
|
||||
// when the rule doesn't exist, it should not be added
|
||||
t.deepEqual(sarif.runs[1].properties.metricResults, []);
|
||||
}
|
||||
function verifyQuerySuites(tmpDir) {
|
||||
const qlsContent = [
|
||||
{
|
||||
@@ -210,4 +184,141 @@ const util = __importStar(require("./util"));
|
||||
}
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("validateQueryFilters", (t) => {
|
||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
|
||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
|
||||
t.notThrows(() => {
|
||||
return (0, analyze_1.validateQueryFilters)([
|
||||
{
|
||||
exclude: {
|
||||
"problem.severity": "recommendation",
|
||||
},
|
||||
},
|
||||
{
|
||||
exclude: {
|
||||
"tags contain": ["foo", "bar"],
|
||||
},
|
||||
},
|
||||
{
|
||||
include: {
|
||||
"problem.severity": "something-to-think-about",
|
||||
},
|
||||
},
|
||||
{
|
||||
include: {
|
||||
"tags contain": ["baz", "bop"],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
t.throws(() => {
|
||||
return (0, analyze_1.validateQueryFilters)([
|
||||
{
|
||||
exclude: {
|
||||
"tags contain": ["foo", "bar"],
|
||||
},
|
||||
include: {
|
||||
"tags contain": ["baz", "bop"],
|
||||
},
|
||||
},
|
||||
]);
|
||||
}, { message: /Query filter must have exactly one key/ });
|
||||
t.throws(() => {
|
||||
return (0, analyze_1.validateQueryFilters)([{ xxx: "foo" }]);
|
||||
}, { message: /Only "include" or "exclude" filters are allowed/ });
|
||||
t.throws(() => {
|
||||
return (0, analyze_1.validateQueryFilters)({ exclude: "foo" });
|
||||
}, {
|
||||
message: /Query filters must be an array of "include" or "exclude" entries/,
|
||||
});
|
||||
});
|
||||
const convertPackToQuerySuiteEntryMacro = ava_1.default.macro({
|
||||
exec: (t, packSpec, suiteEntry) => t.deepEqual((0, analyze_1.convertPackToQuerySuiteEntry)(packSpec), suiteEntry),
|
||||
title: (_providedTitle, packSpec) => `Query Suite Entry: ${packSpec}`,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b", {
|
||||
qlpack: "a/b",
|
||||
from: undefined,
|
||||
version: undefined,
|
||||
query: undefined,
|
||||
queries: undefined,
|
||||
apply: undefined,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3", {
|
||||
qlpack: "a/b",
|
||||
from: undefined,
|
||||
version: "~1.2.3",
|
||||
query: undefined,
|
||||
queries: undefined,
|
||||
apply: undefined,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path", {
|
||||
qlpack: undefined,
|
||||
from: "a/b",
|
||||
version: undefined,
|
||||
query: undefined,
|
||||
queries: "my/path",
|
||||
apply: undefined,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path", {
|
||||
qlpack: undefined,
|
||||
from: "a/b",
|
||||
version: "~1.2.3",
|
||||
query: undefined,
|
||||
queries: "my/path",
|
||||
apply: undefined,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path/query.ql", {
|
||||
qlpack: undefined,
|
||||
from: "a/b",
|
||||
version: undefined,
|
||||
query: "my/path/query.ql",
|
||||
queries: undefined,
|
||||
apply: undefined,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path/query.ql", {
|
||||
qlpack: undefined,
|
||||
from: "a/b",
|
||||
version: "~1.2.3",
|
||||
query: "my/path/query.ql",
|
||||
queries: undefined,
|
||||
apply: undefined,
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b:my/path/suite.qls", {
|
||||
qlpack: undefined,
|
||||
from: "a/b",
|
||||
version: undefined,
|
||||
query: undefined,
|
||||
queries: undefined,
|
||||
apply: "my/path/suite.qls",
|
||||
});
|
||||
(0, ava_1.default)(convertPackToQuerySuiteEntryMacro, "a/b@~1.2.3:my/path/suite.qls", {
|
||||
qlpack: undefined,
|
||||
from: "a/b",
|
||||
version: "~1.2.3",
|
||||
query: undefined,
|
||||
queries: undefined,
|
||||
apply: "my/path/suite.qls",
|
||||
});
|
||||
(0, ava_1.default)("convertPackToQuerySuiteEntry Failure", (t) => {
|
||||
t.throws(() => (0, analyze_1.convertPackToQuerySuiteEntry)("this-is-not-a-pack"));
|
||||
});
|
||||
(0, ava_1.default)("createQuerySuiteContents", (t) => {
|
||||
const yamlResult = (0, analyze_1.createQuerySuiteContents)(["query1.ql", "query2.ql"], [
|
||||
{
|
||||
exclude: { "problem.severity": "recommendation" },
|
||||
},
|
||||
{
|
||||
include: { "problem.severity": "recommendation" },
|
||||
},
|
||||
]);
|
||||
const expected = `- query: query1.ql
|
||||
- query: query2.ql
|
||||
- exclude:
|
||||
problem.severity: recommendation
|
||||
- include:
|
||||
problem.severity: recommendation
|
||||
`;
|
||||
t.deepEqual(yamlResult, expected);
|
||||
});
|
||||
//# sourceMappingURL=analyze.test.js.map
|
||||
File diff suppressed because one or more lines are too long
46
lib/api-client.js
generated
46
lib/api-client.js
generated
@@ -22,8 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
exports.getGitHubVersion = exports.getApiClientWithExternalAuth = exports.getApiClient = exports.getApiDetails = exports.DisallowedAPIVersionReason = void 0;
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
@@ -37,57 +36,44 @@ var DisallowedAPIVersionReason;
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||
const getApiClient = function (apiDetails, { allowExternal = false } = {}) {
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: getApiUrl(apiDetails.url),
|
||||
userAgent: `CodeQL-${(0, util_1.getMode)()}/${pkg.version}`,
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
log: (0, console_log_level_1.default)({ level: "debug" }),
|
||||
}));
|
||||
};
|
||||
exports.getApiClient = getApiClient;
|
||||
function getApiUrl(githubUrl) {
|
||||
const url = new URL(githubUrl);
|
||||
// If we detect this is trying to connect to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
||||
return "https://api.github.com";
|
||||
}
|
||||
// Add the /api/v3 API prefix
|
||||
url.pathname = path.join(url.pathname, "api", "v3");
|
||||
return url.toString();
|
||||
}
|
||||
function getApiDetails() {
|
||||
return {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
apiURL: (0, util_1.getRequiredEnvParam)("GITHUB_API_URL"),
|
||||
};
|
||||
}
|
||||
// Temporary function to aid in the transition to running on and off of github actions.
|
||||
// Once all code has been converted this function should be removed or made canonical
|
||||
// and called only from the action entrypoints.
|
||||
function getActionsApiClient() {
|
||||
return (0, exports.getApiClient)(getApiDetails());
|
||||
exports.getApiDetails = getApiDetails;
|
||||
function getApiClient() {
|
||||
return createApiClientWithDetails(getApiDetails());
|
||||
}
|
||||
exports.getActionsApiClient = getActionsApiClient;
|
||||
exports.getApiClient = getApiClient;
|
||||
function getApiClientWithExternalAuth(apiDetails) {
|
||||
return createApiClientWithDetails(apiDetails, { allowExternal: true });
|
||||
}
|
||||
exports.getApiClientWithExternalAuth = getApiClientWithExternalAuth;
|
||||
let cachedGitHubVersion = undefined;
|
||||
/**
|
||||
* Report the GitHub server version. This is a wrapper around
|
||||
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
||||
* GitHub Action inputs. If you need to get the GitHub server version from the
|
||||
* Runner, please call util.getGitHubVersion() instead.
|
||||
* GitHub Action inputs.
|
||||
*
|
||||
* @returns GitHub version
|
||||
*/
|
||||
async function getGitHubVersionActionsOnly() {
|
||||
if (!util.isActions()) {
|
||||
throw new Error("getGitHubVersionActionsOnly() works only in an action");
|
||||
}
|
||||
async function getGitHubVersion() {
|
||||
if (cachedGitHubVersion === undefined) {
|
||||
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
|
||||
}
|
||||
return cachedGitHubVersion;
|
||||
}
|
||||
exports.getGitHubVersionActionsOnly = getGitHubVersionActionsOnly;
|
||||
exports.getGitHubVersion = getGitHubVersion;
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IACpB,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAA4D;AAE5D,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAiBD,SAAS,0BAA0B,CACjC,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,UAAU,CAAC,MAAM;QAC1B,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;QACzC,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC;AAED,SAAgB,aAAa;IAC3B,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;QAC7C,MAAM,EAAE,IAAA,0BAAmB,EAAC,gBAAgB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAND,sCAMC;AAED,SAAgB,YAAY;IAC1B,OAAO,0BAA0B,CAAC,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAFD,oCAEC;AAED,SAAgB,4BAA4B,CAC1C,UAAoC;IAEpC,OAAO,0BAA0B,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;AACzE,CAAC;AAJD,oEAIC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;GAMG;AACI,KAAK,UAAU,gBAAgB;IACpC,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AALD,4CAKC"}
|
||||
65
lib/api-client.test.js
generated
65
lib/api-client.test.js
generated
@@ -25,9 +25,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
const util = __importStar(require("./util"));
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
@@ -37,55 +38,23 @@ ava_1.default.beforeEach(() => {
|
||||
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
||||
githubStub = sinon.stub();
|
||||
pluginStub.returns(githubStub);
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||
util.initializeEnvironment(pkg.version);
|
||||
});
|
||||
(0, ava_1.default)("Get the client API", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
externalRepoAuth: "abc",
|
||||
url: "http://hucairz",
|
||||
}, undefined, {
|
||||
(0, ava_1.default)("getApiClient", async (t) => {
|
||||
sinon.stub(actionsUtil, "getRequiredInput").withArgs("token").returns("xyz");
|
||||
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
||||
requiredEnvParamStub
|
||||
.withArgs("GITHUB_SERVER_URL")
|
||||
.returns("http://github.localhost");
|
||||
requiredEnvParamStub
|
||||
.withArgs("GITHUB_API_URL")
|
||||
.returns("http://api.github.localhost");
|
||||
(0, api_client_1.getApiClient)();
|
||||
t.assert(githubStub.calledOnceWithExactly({
|
||||
auth: "token xyz",
|
||||
baseUrl: "http://hucairz/api/v3",
|
||||
baseUrl: "http://api.github.localhost",
|
||||
log: sinon.match.any,
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
}));
|
||||
});
|
||||
(0, ava_1.default)("Get the client API external", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
externalRepoAuth: "abc",
|
||||
url: "http://hucairz",
|
||||
}, { allowExternal: true }, {
|
||||
auth: "token abc",
|
||||
baseUrl: "http://hucairz/api/v3",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Get the client API external not present", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
url: "http://hucairz",
|
||||
}, { allowExternal: true }, {
|
||||
auth: "token xyz",
|
||||
baseUrl: "http://hucairz/api/v3",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Get the client API with github url", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
url: "https://github.com/some/invalid/url",
|
||||
}, undefined, {
|
||||
auth: "token xyz",
|
||||
baseUrl: "https://api.github.com",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
function doTest(t, clientArgs, clientOptions, expected) {
|
||||
(0, api_client_1.getApiClient)(clientArgs, clientOptions);
|
||||
const firstCallArgs = githubStub.args[0];
|
||||
// log is a function, so we don't need to test for equality of it
|
||||
delete firstCallArgs[0].log;
|
||||
t.deepEqual(firstCallArgs, [expected]);
|
||||
}
|
||||
//# sourceMappingURL=api-client.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,IAAA,yBAAY,EAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,6CAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAI,CAAC,qBAAqB,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AAC1C,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,cAAc,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/B,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC7E,MAAM,oBAAoB,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,qBAAqB,CAAC,CAAC;IACrE,oBAAoB;SACjB,QAAQ,CAAC,mBAAmB,CAAC;SAC7B,OAAO,CAAC,yBAAyB,CAAC,CAAC;IACtC,oBAAoB;SACjB,QAAQ,CAAC,gBAAgB,CAAC;SAC1B,OAAO,CAAC,6BAA6B,CAAC,CAAC;IAE1C,IAAA,yBAAY,GAAE,CAAC;IAEf,CAAC,CAAC,MAAM,CACN,UAAU,CAAC,qBAAqB,CAAC;QAC/B,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B;QACtC,GAAG,EAAE,KAAK,CAAC,KAAK,CAAC,GAAG;QACpB,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CAAC,CACH,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.5", "minimumVersion": "3.1" }
|
||||
{ "maximumVersion": "3.8", "minimumVersion": "3.3" }
|
||||
|
||||
29
lib/autobuild-action.js
generated
29
lib/autobuild-action.js
generated
@@ -21,14 +21,16 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||
(0, util_1.initializeEnvironment)(pkg.version);
|
||||
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
|
||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
|
||||
const statusReport = {
|
||||
@@ -42,32 +44,41 @@ async function run() {
|
||||
const startedAt = new Date();
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await (0, util_1.checkActionVersion)(pkg.version);
|
||||
let language = undefined;
|
||||
let currentLanguage = undefined;
|
||||
let languages = undefined;
|
||||
try {
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const config = await configUtils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
||||
if (language !== undefined) {
|
||||
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, logger);
|
||||
if (languages !== undefined) {
|
||||
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
|
||||
if (workingDirectory) {
|
||||
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
|
||||
process.chdir(workingDirectory);
|
||||
}
|
||||
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||
for (const language of languages) {
|
||||
currentLanguage = language;
|
||||
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||
if (language === languages_1.Language.go) {
|
||||
core.exportVariable(util_1.DID_AUTOBUILD_GO_ENV_VAR_NAME, "true");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error instanceof Error ? error.message : String(error)}`);
|
||||
console.log(error);
|
||||
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error instanceof Error ? error : new Error(String(error)));
|
||||
await sendCompletedStatusReport(startedAt, languages !== null && languages !== void 0 ? languages : [], currentLanguage, error instanceof Error ? error : new Error(String(error)));
|
||||
return;
|
||||
}
|
||||
await sendCompletedStatusReport(startedAt, language ? [language] : []);
|
||||
await sendCompletedStatusReport(startedAt, languages !== null && languages !== void 0 ? languages : []);
|
||||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAyE;AAEzE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,4DAA8C;AAC9C,2CAAuC;AACvC,uCAA6C;AAC7C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IAEnC,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oCAA6B,EAAE,MAAM,CAAC,CAAC;iBAC5D;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
71
lib/autobuild.js
generated
71
lib/autobuild.js
generated
@@ -1,28 +1,75 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runAutobuild = exports.determineAutobuildLanguage = void 0;
|
||||
exports.runAutobuild = exports.determineAutobuildLanguages = void 0;
|
||||
const codeql_1 = require("./codeql");
|
||||
const languages_1 = require("./languages");
|
||||
function determineAutobuildLanguage(config, logger) {
|
||||
async function determineAutobuildLanguages(config, logger) {
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
|
||||
const language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
const autobuildLanguages = config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l));
|
||||
if (!autobuildLanguages) {
|
||||
logger.info("None of the languages in this project require extra build steps");
|
||||
return undefined;
|
||||
}
|
||||
logger.debug(`Detected dominant traced language: ${language}`);
|
||||
if (autobuildLanguages.length > 1) {
|
||||
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
|
||||
.slice(1)
|
||||
.join(" and ")}, you must replace this call with custom build steps.`);
|
||||
/**
|
||||
* Additionally autobuild Go in the autobuild Action to ensure backwards
|
||||
* compatibility for users performing a multi-language build within a single
|
||||
* job.
|
||||
*
|
||||
* For example, consider a user with the following workflow file:
|
||||
*
|
||||
* ```yml
|
||||
* - uses: github/codeql-action/init@v2
|
||||
* with:
|
||||
* languages: go, java
|
||||
* - uses: github/codeql-action/autobuild@v2
|
||||
* - uses: github/codeql-action/analyze@v2
|
||||
* ```
|
||||
*
|
||||
* - With Go extraction disabled, we will run the Java autobuilder in the
|
||||
* autobuild Action, ensuring we extract both Java and Go code.
|
||||
* - With Go extraction enabled, taking the previous behavior we'd run the Go
|
||||
* autobuilder, since Go is first on the list of languages. We wouldn't run
|
||||
* the Java autobuilder at all and so we'd only extract Go code.
|
||||
*
|
||||
* We therefore introduce a special case here such that we'll autobuild Go
|
||||
* in addition to the primary non-Go traced language in the autobuild Action.
|
||||
*
|
||||
* This special case behavior should be removed as part of the next major
|
||||
* version of the CodeQL Action.
|
||||
*/
|
||||
const autobuildLanguagesWithoutGo = autobuildLanguages.filter((l) => l !== languages_1.Language.go);
|
||||
const languages = [];
|
||||
// First run the autobuilder for the first non-Go traced language, if one
|
||||
// exists.
|
||||
if (autobuildLanguagesWithoutGo[0] !== undefined) {
|
||||
languages.push(autobuildLanguagesWithoutGo[0]);
|
||||
}
|
||||
return language;
|
||||
// If Go is requested, run the Go autobuilder last to ensure it doesn't
|
||||
// interfere with the other autobuilder.
|
||||
if (autobuildLanguages.length !== autobuildLanguagesWithoutGo.length) {
|
||||
languages.push(languages_1.Language.go);
|
||||
}
|
||||
logger.debug(`Will autobuild ${languages.join(" and ")}.`);
|
||||
// In general the autobuilders for other traced languages may conflict with
|
||||
// each other. Therefore if a user has requested more than one non-Go traced
|
||||
// language, we ask for manual build steps.
|
||||
// Matrixing the build would also work, but that would change the SARIF
|
||||
// categories, potentially leading to a "stale tips" situation where alerts
|
||||
// that should be fixed remain on a repo since they are linked to SARIF
|
||||
// categories that are no longer updated.
|
||||
if (autobuildLanguagesWithoutGo.length > 1) {
|
||||
logger.warning(`We will only automatically build ${languages.join(" and ")} code. If you wish to scan ${autobuildLanguagesWithoutGo
|
||||
.slice(1)
|
||||
.join(" and ")}, you must replace the autobuild step of your workflow with custom build steps. ` +
|
||||
"For more information, see " +
|
||||
"https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-the-codeql-workflow-for-compiled-languages#adding-build-steps-for-a-compiled-language");
|
||||
}
|
||||
return languages;
|
||||
}
|
||||
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
||||
exports.determineAutobuildLanguages = determineAutobuildLanguages;
|
||||
async function runAutobuild(language, config, logger) {
|
||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGlD,KAAK,UAAU,2BAA2B,CAC/C,MAA0B,EAC1B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;QAChD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;KAChD;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE;QACpE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;KAC7B;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,4BAA4B;YAC5B,0NAA0N,CAC7N,CAAC;KACH;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAtFD,kEAsFC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||
350
lib/codeql.js
generated
350
lib/codeql.js
generated
@@ -22,27 +22,32 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const query_string_1 = __importDefault(require("query-string"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const error_matcher_1 = require("./error-matcher");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const toolcache = __importStar(require("./toolcache"));
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, error) {
|
||||
constructor(cmd, args, exitCode, error, output) {
|
||||
super(`Failure invoking ${cmd} with arguments ${args}.\n
|
||||
Exit code ${exitCode} and error was:\n
|
||||
${error}`);
|
||||
this.output = output;
|
||||
}
|
||||
}
|
||||
exports.CommandInvocationError = CommandInvocationError;
|
||||
@@ -52,30 +57,26 @@ exports.CommandInvocationError = CommandInvocationError;
|
||||
*/
|
||||
let cachedCodeQL = undefined;
|
||||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
/**
|
||||
* The oldest version of CodeQL that the Action will run with. This should be
|
||||
* at least three minor versions behind the current version. The version flags
|
||||
* below can be used to conditionally enable certain features on versions newer
|
||||
* than this. Please record the reason we cannot support an older version.
|
||||
* at least three minor versions behind the current version and must include the
|
||||
* CLI versions shipped with each supported version of GHES.
|
||||
*
|
||||
* Reason: First version containing fix for the "We still have not reached
|
||||
* idleness" deadlock.
|
||||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.4.5";
|
||||
const CODEQL_MINIMUM_VERSION = "2.6.3";
|
||||
/**
|
||||
* Versions of CodeQL that version-flag certain functionality in the Action.
|
||||
* For convenience, please keep these in descending order. Once a version
|
||||
* flag is older than the oldest supported version above, it may be removed.
|
||||
*/
|
||||
const CODEQL_VERSION_RAM_FINALIZE = "2.5.8";
|
||||
const CODEQL_VERSION_DIAGNOSTICS = "2.5.6";
|
||||
const CODEQL_VERSION_METRICS = "2.5.5";
|
||||
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
@@ -86,6 +87,22 @@ exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
* versions above that.
|
||||
*/
|
||||
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
||||
/**
|
||||
* Versions 2.7.3+ of the CodeQL CLI support build tracing with glibc 2.34 on Linux. Versions before
|
||||
* this cannot perform build tracing when running on the Actions `ubuntu-22.04` runner image.
|
||||
*/
|
||||
exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = "2.7.3";
|
||||
/**
|
||||
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
|
||||
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
|
||||
* some of their files being greater than MAX_PATH (260 characters).
|
||||
*/
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
||||
/**
|
||||
* Previous versions had the option already, but were missing the
|
||||
* --extractor-options-verbosity that we need.
|
||||
*/
|
||||
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
||||
function getCodeQLBundleName() {
|
||||
let platform;
|
||||
if (process.platform === "win32") {
|
||||
@@ -103,39 +120,25 @@ function getCodeQLBundleName() {
|
||||
return `codeql-bundle-${platform}.tar.gz`;
|
||||
}
|
||||
function getCodeQLActionRepository(logger) {
|
||||
if (!util.isActions()) {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
else {
|
||||
return getActionsCodeQLActionRepository(logger);
|
||||
}
|
||||
}
|
||||
exports.getCodeQLActionRepository = getCodeQLActionRepository;
|
||||
function getActionsCodeQLActionRepository(logger) {
|
||||
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
||||
return process.env["GITHUB_ACTION_REPOSITORY"];
|
||||
}
|
||||
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
|
||||
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
|
||||
if ((0, actions_util_1.isRunningLocalAction)()) {
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
// In these cases, the GITHUB_ACTION_REPOSITORY environment variable is not set.
|
||||
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
return exports.CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
|
||||
const relativeScriptPathParts = (0, actions_util_1.getRelativeScriptPath)().split(path.sep);
|
||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
||||
return util.getRequiredEnvParam("GITHUB_ACTION_REPOSITORY");
|
||||
}
|
||||
exports.getCodeQLActionRepository = getCodeQLActionRepository;
|
||||
async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[apiDetails.url, codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
[apiDetails.url, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
[util.GITHUB_DOTCOM_URL, exports.CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
@@ -146,14 +149,14 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
if (variant === util.GitHubVariant.GHAE) {
|
||||
try {
|
||||
const release = await api
|
||||
.getApiClient(apiDetails)
|
||||
.getApiClient()
|
||||
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
|
||||
tag: CODEQL_BUNDLE_VERSION,
|
||||
});
|
||||
const assetID = release.data.assets[codeQLBundleName];
|
||||
if (assetID !== undefined) {
|
||||
const download = await api
|
||||
.getApiClient(apiDetails)
|
||||
.getApiClient()
|
||||
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
|
||||
const downloadURL = download.data.url;
|
||||
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
|
||||
@@ -171,12 +174,12 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
const [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_URL &&
|
||||
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
repository === exports.CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
const [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getApiClient(apiDetails).repos.getReleaseByTag({
|
||||
const release = await api.getApiClient().repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: CODEQL_BUNDLE_VERSION,
|
||||
@@ -192,7 +195,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||
}
|
||||
/**
|
||||
* Set up CodeQL CLI access.
|
||||
@@ -200,42 +203,54 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
* @param codeqlURL
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param toolCacheDir
|
||||
* @param variant
|
||||
* @param features
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns
|
||||
* @returns a { CodeQL, toolsVersion } object.
|
||||
*/
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, bypassToolcache, logger, checkVersion) {
|
||||
try {
|
||||
const forceLatestReason =
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
// defaults over any pinned cached version.
|
||||
const forceLatest = codeqlURL === "latest";
|
||||
codeqlURL === "latest"
|
||||
? '"tools: latest" was requested'
|
||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
||||
// the toolcache when the appropriate feature is enabled. This
|
||||
// allows us to quickly rollback a broken bundle that has made its way
|
||||
// into the toolcache.
|
||||
codeqlURL === undefined && bypassToolcache
|
||||
? "a specific version of CodeQL was not requested and the bypass toolcache feature is enabled"
|
||||
: undefined;
|
||||
const forceLatest = forceLatestReason !== undefined;
|
||||
if (forceLatest) {
|
||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
||||
codeqlURL = undefined;
|
||||
}
|
||||
let codeqlFolder;
|
||||
let codeqlURLVersion;
|
||||
if (codeqlURL && !codeqlURL.startsWith("http")) {
|
||||
codeqlFolder = await toolcache.extractTar(codeqlURL, tempDir, logger);
|
||||
codeqlFolder = await toolcache.extractTar(codeqlURL);
|
||||
codeqlURLVersion = "local";
|
||||
}
|
||||
else {
|
||||
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
||||
// If we find the specified version, we always use that.
|
||||
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer, toolCacheDir, logger);
|
||||
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
|
||||
// If we don't find the requested version, in some cases we may allow a
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL");
|
||||
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
|
||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||
codeqlFolder = tmpCodeqlFolder;
|
||||
codeqlURLVersion = codeqlVersions[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -248,7 +263,9 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
||||
}
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||
const headers = { accept: "application/octet-stream" };
|
||||
const headers = {
|
||||
accept: "application/octet-stream",
|
||||
};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
@@ -262,10 +279,12 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
||||
logger.debug("Downloading CodeQL bundle without token.");
|
||||
}
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, tempDir, headers);
|
||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath, tempDir, logger);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer, toolCacheDir, logger);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
|
||||
}
|
||||
}
|
||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||
@@ -344,6 +363,7 @@ function setCodeQL(partialCodeql) {
|
||||
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
||||
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
|
||||
resolveLanguages: resolveFunction(partialCodeql, "resolveLanguages"),
|
||||
betterResolveLanguages: resolveFunction(partialCodeql, "betterResolveLanguages"),
|
||||
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
|
||||
packDownload: resolveFunction(partialCodeql, "packDownload"),
|
||||
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
|
||||
@@ -374,8 +394,8 @@ exports.getCachedCodeQL = getCachedCodeQL;
|
||||
* a non-existent placeholder codeql command, so tests that use this function
|
||||
* should also stub the toolrunner.ToolRunner constructor.
|
||||
*/
|
||||
async function getCodeQLForTesting() {
|
||||
return getCodeQLForCmd("codeql-for-testing", false);
|
||||
async function getCodeQLForTesting(cmd = "codeql-for-testing") {
|
||||
return getCodeQLForCmd(cmd, false);
|
||||
}
|
||||
exports.getCodeQLForTesting = getCodeQLForTesting;
|
||||
/**
|
||||
@@ -431,15 +451,32 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
// action/runner has been implemented in `codeql database trace-command`
|
||||
// _and_ is present in the latest supported CLI release.)
|
||||
const envFile = path.resolve(databasePath, "working", "env.tmp");
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
databasePath,
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
process.execPath,
|
||||
tracerEnvJs,
|
||||
envFile,
|
||||
]);
|
||||
try {
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
databasePath,
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
process.execPath,
|
||||
tracerEnvJs,
|
||||
envFile,
|
||||
]);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof CommandInvocationError &&
|
||||
e.output.includes("undefined symbol: __libc_dlopen_mode, version GLIBC_PRIVATE") &&
|
||||
process.platform === "linux" &&
|
||||
!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_TRACING_GLIBC_2_34))) {
|
||||
throw new util.UserError("The CodeQL CLI is incompatible with the version of glibc on your system. " +
|
||||
`Please upgrade to CodeQL CLI version ${exports.CODEQL_VERSION_TRACING_GLIBC_2_34} or ` +
|
||||
"later. If you cannot upgrade to a newer version of the CodeQL CLI, you can " +
|
||||
`alternatively run your workflow on another runner image such as "ubuntu-20.04" ` +
|
||||
"that has glibc 2.33 or earlier installed.");
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
|
||||
},
|
||||
async databaseInit(databasePath, language, sourceRoot) {
|
||||
@@ -452,20 +489,28 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, processLevel) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, featureEnablement) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (config.languages.filter(languages_1.isTracedLanguage).length > 0) {
|
||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||
extraArgs.push("--begin-tracing");
|
||||
if (processName !== undefined) {
|
||||
extraArgs.push(`--trace-process-name=${processName}`);
|
||||
}
|
||||
else {
|
||||
// We default to 3 if no other arguments are provided since this was the default
|
||||
// behaviour of the Runner. Note this path never happens in the CodeQL Action
|
||||
// because that always passes in a process name.
|
||||
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
||||
extraArgs.push(...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgs)(config)));
|
||||
extraArgs.push(`--trace-process-name=${processName}`);
|
||||
if (
|
||||
// There's a bug in Lua tracing for Go on Windows in versions earlier than
|
||||
// `CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED`, so don't use Lua tracing
|
||||
// when tracing Go on Windows on these CodeQL versions.
|
||||
(await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACER_CONFIG)) &&
|
||||
config.languages.includes(languages_1.Language.go) &&
|
||||
(0, languages_1.isTracedLanguage)(languages_1.Language.go) &&
|
||||
process.platform === "win32" &&
|
||||
!(await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED))) {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement);
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
@@ -478,7 +523,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
},
|
||||
async runAutobuild(language) {
|
||||
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
||||
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
|
||||
// The autobuilder for Swift is located in the experimental/ directory.
|
||||
const possibleExperimentalDir = language === languages_1.Language.swift ? "experimental" : "";
|
||||
const autobuildCmd = path.join(path.dirname(cmd), possibleExperimentalDir, language, "tools", cmdName);
|
||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||
// and Maven not properly handling closed connections
|
||||
@@ -490,9 +537,23 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
"-Dhttp.keepAlive=false",
|
||||
"-Dmaven.wagon.http.pool=false",
|
||||
].join(" ");
|
||||
// On macOS, System Integrity Protection (SIP) typically interferes with
|
||||
// CodeQL build tracing of protected binaries.
|
||||
// The usual workaround is to prefix `$CODEQL_RUNNER` to build commands:
|
||||
// `$CODEQL_RUNNER` (not to be confused with the deprecated CodeQL Runner tool)
|
||||
// points to a simple wrapper binary included with the CLI, and the extra layer of
|
||||
// process indirection helps the tracer bypass SIP.
|
||||
// The above SIP workaround is *not* needed here.
|
||||
// At the `autobuild` step in the Actions workflow, we assume the `init` step
|
||||
// has successfully run, and will have exported `DYLD_INSERT_LIBRARIES`
|
||||
// into the environment of subsequent steps, to activate the tracer.
|
||||
// When `DYLD_INSERT_LIBRARIES` is set in the environment for a step,
|
||||
// the Actions runtime introduces its own workaround for SIP
|
||||
// (https://github.com/actions/runner/pull/416).
|
||||
await runTool(autobuildCmd);
|
||||
},
|
||||
async extractScannedLanguage(databasePath, language) {
|
||||
async extractScannedLanguage(config, language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Get extractor location
|
||||
let extractorPath = "";
|
||||
await new toolrunner.ToolRunner(cmd, [
|
||||
@@ -519,6 +580,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgsForLang)(config, language)),
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
databasePath,
|
||||
"--",
|
||||
@@ -531,11 +593,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
"finalize",
|
||||
"--finalize-dataset",
|
||||
threadsFlag,
|
||||
memoryFlag,
|
||||
...getExtraOptionsFromEnv(["database", "finalize"]),
|
||||
databasePath,
|
||||
];
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_RAM_FINALIZE))
|
||||
args.push(memoryFlag);
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, args, error_matcher_1.errorMatchers);
|
||||
},
|
||||
async resolveLanguages() {
|
||||
@@ -553,6 +614,22 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
throw new Error(`Unexpected output from codeql resolve languages: ${e}`);
|
||||
}
|
||||
},
|
||||
async betterResolveLanguages() {
|
||||
const codeqlArgs = [
|
||||
"resolve",
|
||||
"languages",
|
||||
"--format=betterjson",
|
||||
"--extractor-options-verbosity=4",
|
||||
...getExtraOptionsFromEnv(["resolve", "languages"]),
|
||||
];
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
try {
|
||||
return JSON.parse(output);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unexpected output from codeql resolve languages with --format=betterjson: ${e}`);
|
||||
}
|
||||
},
|
||||
async resolveQueries(queries, extraSearchPath) {
|
||||
const codeqlArgs = [
|
||||
"resolve",
|
||||
@@ -586,34 +663,37 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||
}
|
||||
codeqlArgs.push(querySuitePath);
|
||||
if (querySuitePath) {
|
||||
codeqlArgs.push(querySuitePath);
|
||||
}
|
||||
await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, featureEnablement) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"interpret-results",
|
||||
threadsFlag,
|
||||
"--format=sarif-latest",
|
||||
"-v",
|
||||
verbosityFlag,
|
||||
`--output=${sarifFile}`,
|
||||
addSnippetsFlag,
|
||||
"--print-diagnostics-summary",
|
||||
"--print-metrics-summary",
|
||||
"--sarif-group-rules-by-pack",
|
||||
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||
];
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_DIAGNOSTICS))
|
||||
codeqlArgs.push("--print-diagnostics-summary");
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_METRICS))
|
||||
codeqlArgs.push("--print-metrics-summary");
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
|
||||
codeqlArgs.push("--sarif-group-rules-by-pack");
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
|
||||
codeqlArgs.push("--sarif-add-query-help");
|
||||
if (automationDetailsId !== undefined &&
|
||||
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
|
||||
if (automationDetailsId !== undefined) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
if (await featureEnablement.getValue(feature_flags_1.Feature.FileBaselineInformationEnabled, this)) {
|
||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
if (querySuitePaths) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
}
|
||||
// capture stdout, which contains analysis summaries
|
||||
return await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
@@ -635,11 +715,18 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
* If no version is specified, then the latest version is
|
||||
* downloaded. The check to determine what the latest version is is done
|
||||
* each time this package is requested.
|
||||
*
|
||||
* Optionally, a `qlconfigFile` is included. If used, then this file
|
||||
* is used to determine which registry each pack is downloaded from.
|
||||
*/
|
||||
async packDownload(packs) {
|
||||
async packDownload(packs, qlconfigFile) {
|
||||
const qlconfigArg = qlconfigFile
|
||||
? [`--qlconfig-file=${qlconfigFile}`]
|
||||
: [];
|
||||
const codeqlArgs = [
|
||||
"pack",
|
||||
"download",
|
||||
...qlconfigArg,
|
||||
"--format=json",
|
||||
"--resolve-query-specs",
|
||||
...getExtraOptionsFromEnv(["pack", "download"]),
|
||||
@@ -685,8 +772,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version whereever
|
||||
// possbile, we want to call getVersion(), which populates the version value
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
// possible, we want to call getVersion(), which populates the version value
|
||||
// used by status reporting, at the earliest opportunity. But invoking
|
||||
// getVersion() directly here breaks tests that only pretend to create a
|
||||
// CodeQL object. So instead we rely on the assumption that all non-test
|
||||
@@ -769,7 +856,80 @@ async function runTool(cmd, args = []) {
|
||||
ignoreReturnCode: true,
|
||||
}).exec();
|
||||
if (exitCode !== 0)
|
||||
throw new CommandInvocationError(cmd, args, exitCode, error);
|
||||
throw new CommandInvocationError(cmd, args, exitCode, error, output);
|
||||
return output;
|
||||
}
|
||||
/**
|
||||
* If appropriate, generates a code scanning configuration that is to be used for a scan.
|
||||
* If the configuration is not to be generated, returns undefined.
|
||||
*
|
||||
* @param codeql The CodeQL object to use.
|
||||
* @param config The configuration to use.
|
||||
* @returns the path to the generated user configuration file.
|
||||
*/
|
||||
async function generateCodeScanningConfig(codeql, config, featureEnablement) {
|
||||
var _a;
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
||||
return;
|
||||
}
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
// make a copy so we can modify it
|
||||
const augmentedConfig = cloneObject(config.originalUserInput);
|
||||
// Inject the queries from the input
|
||||
if (config.augmentationProperties.queriesInput) {
|
||||
if (config.augmentationProperties.queriesInputCombines) {
|
||||
augmentedConfig.queries = (augmentedConfig.queries || []).concat(config.augmentationProperties.queriesInput);
|
||||
}
|
||||
else {
|
||||
augmentedConfig.queries = config.augmentationProperties.queriesInput;
|
||||
}
|
||||
}
|
||||
if (((_a = augmentedConfig.queries) === null || _a === void 0 ? void 0 : _a.length) === 0) {
|
||||
delete augmentedConfig.queries;
|
||||
}
|
||||
// Inject the packs from the input
|
||||
if (config.augmentationProperties.packsInput) {
|
||||
if (config.augmentationProperties.packsInputCombines) {
|
||||
// At this point, we already know that this is a single-language analysis
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs = (augmentedConfig.packs || []).concat(config.augmentationProperties.packsInput);
|
||||
}
|
||||
else if (!augmentedConfig.packs) {
|
||||
augmentedConfig.packs = config.augmentationProperties.packsInput;
|
||||
}
|
||||
else {
|
||||
// At this point, we know there is only one language.
|
||||
// If there were more than one language, an error would already have been thrown.
|
||||
const language = Object.keys(augmentedConfig.packs)[0];
|
||||
augmentedConfig.packs[language] = augmentedConfig.packs[language].concat(config.augmentationProperties.packsInput);
|
||||
}
|
||||
}
|
||||
else {
|
||||
augmentedConfig.packs = config.augmentationProperties.packsInput;
|
||||
}
|
||||
}
|
||||
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
|
||||
delete augmentedConfig.packs;
|
||||
}
|
||||
if (config.augmentationProperties.injectedMlQueries) {
|
||||
// We need to inject the ML queries into the original user input before
|
||||
// we pass this on to the CLI, to make sure these get run.
|
||||
const packString = await util.getMlPoweredJsQueriesPack(codeql);
|
||||
if (augmentedConfig.packs === undefined)
|
||||
augmentedConfig.packs = [];
|
||||
if (Array.isArray(augmentedConfig.packs)) {
|
||||
augmentedConfig.packs.push(packString);
|
||||
}
|
||||
else {
|
||||
if (!augmentedConfig.packs.javascript)
|
||||
augmentedConfig.packs["javascript"] = [];
|
||||
augmentedConfig.packs["javascript"].push(packString);
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
||||
return configLocation;
|
||||
}
|
||||
function cloneObject(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
//# sourceMappingURL=codeql.js.map
|
||||
File diff suppressed because one or more lines are too long
458
lib/codeql.test.js
generated
458
lib/codeql.test.js
generated
@@ -22,14 +22,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.stubToolRunnerConstructor = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -38,54 +45,102 @@ const util_1 = require("./util");
|
||||
const sampleApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://github.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
};
|
||||
const sampleGHAEApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://example.githubenterprise.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
};
|
||||
let stubConfig;
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
stubConfig = {
|
||||
languages: [languages_1.Language.cpp],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: "",
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: "",
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
packsInputCombines: false,
|
||||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
});
|
||||
async function mockApiAndSetupCodeQL({ apiDetails, bypassToolcache, isPinned, tmpDir, toolsInput, version, }) {
|
||||
var _a;
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "linux"
|
||||
? "linux64"
|
||||
: "osx64";
|
||||
const baseUrl = (_a = apiDetails === null || apiDetails === void 0 ? void 0 : apiDetails.url) !== null && _a !== void 0 ? _a : "https://example.com";
|
||||
const relativeUrl = apiDetails
|
||||
? `/github/codeql-action/releases/download/${version}/codeql-bundle-${platform}.tar.gz`
|
||||
: `/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
(0, nock_1.default)(baseUrl)
|
||||
.get(relativeUrl)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
|
||||
return await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, !!bypassToolcache, (0, logging_1.getRunnerLogger)(true), false);
|
||||
}
|
||||
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const versions = ["20200601", "20200610"];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const codeQLConfig = await mockApiAndSetupCodeQL({ version, tmpDir });
|
||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||
t.deepEqual(codeQLConfig.toolsVersion, version);
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const unpinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200610",
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||
t.deepEqual(unpinnedCodeQLConfig.toolsVersion, "20200610");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(codeQLConfig.toolsVersion, "0.0.0-20200601");
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
@@ -93,20 +148,19 @@ ava_1.default.beforeEach(() => {
|
||||
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "linux"
|
||||
? "linux64"
|
||||
: "osx64";
|
||||
(0, nock_1.default)("https://github.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(cachedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
tmpDir,
|
||||
apiDetails: sampleApiDetails,
|
||||
toolsInput: { input: undefined },
|
||||
});
|
||||
t.deepEqual(codeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
@@ -114,24 +168,56 @@ ava_1.default.beforeEach(() => {
|
||||
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const pinnedCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: "20200601",
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "linux"
|
||||
? "linux64"
|
||||
: "osx64";
|
||||
(0, nock_1.default)("https://github.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const latestCodeQLConfig = await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
toolsInput: { input: "latest" },
|
||||
tmpDir,
|
||||
});
|
||||
t.deepEqual(latestCodeQLConfig.toolsVersion, defaults.bundleVersion.replace("codeql-bundle-", ""));
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
const TOOLCACHE_BYPASS_TEST_CASES = [
|
||||
[true, undefined, true],
|
||||
[false, undefined, false],
|
||||
[
|
||||
true,
|
||||
"https://github.com/github/codeql-action/releases/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
|
||||
false,
|
||||
],
|
||||
];
|
||||
for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCACHE_BYPASS_TEST_CASES) {
|
||||
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature ${isFeatureEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: "codeql-bundle-20200601",
|
||||
apiDetails: sampleApiDetails,
|
||||
isPinned: true,
|
||||
tmpDir,
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
bypassToolcache: isFeatureEnabled,
|
||||
toolsInput: { input: toolsInput },
|
||||
tmpDir,
|
||||
});
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, shouldToolcacheBeBypassed ? 2 : 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -155,7 +241,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, false, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
@@ -206,15 +292,14 @@ ava_1.default.beforeEach(() => {
|
||||
});
|
||||
(0, ava_1.default)("getCodeQLActionRepository", (t) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "1.2.3");
|
||||
const repoActions = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoActions, "github/codeql-action");
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
(0, util_1.initializeEnvironment)("1.2.3");
|
||||
// isRunningLocalAction() === true
|
||||
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
||||
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
||||
const repoLocalRunner = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoLocalRunner, "github/codeql-action");
|
||||
// isRunningLocalAction() === false
|
||||
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
||||
process.env["GITHUB_ACTION_REPOSITORY"] = "xxx/yyy";
|
||||
const repoEnv = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoEnv, "xxx/yyy");
|
||||
@@ -223,16 +308,282 @@ ava_1.default.beforeEach(() => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
tempDir,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
},
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should NOT have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.falsy(configArg, "Should have injected a codescanning config");
|
||||
});
|
||||
});
|
||||
// Test macro for ensuring different variants of injected augmented configurations
|
||||
const injectedConfigMacro = ava_1.default.macro({
|
||||
exec: async (t, augmentationProperties, configOverride, expectedConfig) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
...configOverride,
|
||||
tempDir,
|
||||
augmentationProperties,
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.truthy(configArg, "Should have injected a codescanning config");
|
||||
const configFile = configArg.split("=")[1];
|
||||
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
t.deepEqual(augmentedConfig, expectedConfig);
|
||||
await (0, del_1.default)(configFile, { force: true });
|
||||
});
|
||||
},
|
||||
title: (providedTitle = "") => `databaseInitCluster() injected config: ${providedTitle}`,
|
||||
});
|
||||
(0, ava_1.default)("basic", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
}, {}, {});
|
||||
(0, ava_1.default)("injected ML queries", injectedConfigMacro, {
|
||||
injectedMlQueries: true,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
}, {}, {
|
||||
packs: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
});
|
||||
(0, ava_1.default)("injected ML queries with existing packs", injectedConfigMacro, {
|
||||
injectedMlQueries: true,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
}, {
|
||||
originalUserInput: {
|
||||
packs: { javascript: ["codeql/something-else"] },
|
||||
},
|
||||
}, {
|
||||
packs: {
|
||||
javascript: [
|
||||
"codeql/something-else",
|
||||
"codeql/javascript-experimental-atm-queries@~0.3.0",
|
||||
],
|
||||
},
|
||||
});
|
||||
(0, ava_1.default)("injected ML queries with existing packs of different language", injectedConfigMacro, {
|
||||
injectedMlQueries: true,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
}, {
|
||||
originalUserInput: {
|
||||
packs: { cpp: ["codeql/something-else"] },
|
||||
},
|
||||
}, {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else"],
|
||||
javascript: ["codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
},
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {}, {
|
||||
packs: ["xxx", "yyy"],
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input with existing packs combines", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: true,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else"],
|
||||
},
|
||||
},
|
||||
}, {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else", "xxx", "yyy"],
|
||||
},
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input with existing packs overrides", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else"],
|
||||
},
|
||||
},
|
||||
}, {
|
||||
packs: ["xxx", "yyy"],
|
||||
});
|
||||
(0, ava_1.default)("injected packs from input with existing packs overrides and ML model inject", injectedConfigMacro, {
|
||||
injectedMlQueries: true,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
packsInput: ["xxx", "yyy"],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
packs: {
|
||||
cpp: ["codeql/something-else"],
|
||||
},
|
||||
},
|
||||
}, {
|
||||
packs: ["xxx", "yyy", "codeql/javascript-experimental-atm-queries@~0.3.0"],
|
||||
});
|
||||
// similar, but with queries
|
||||
(0, ava_1.default)("injected queries from input", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {}, {
|
||||
queries: [
|
||||
{
|
||||
uses: "xxx",
|
||||
},
|
||||
{
|
||||
uses: "yyy",
|
||||
},
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries from input overrides", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
queries: [{ uses: "zzz" }],
|
||||
},
|
||||
}, {
|
||||
queries: [
|
||||
{
|
||||
uses: "xxx",
|
||||
},
|
||||
{
|
||||
uses: "yyy",
|
||||
},
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries from input combines", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: true,
|
||||
packsInputCombines: false,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
queries: [{ uses: "zzz" }],
|
||||
},
|
||||
}, {
|
||||
queries: [
|
||||
{
|
||||
uses: "zzz",
|
||||
},
|
||||
{
|
||||
uses: "xxx",
|
||||
},
|
||||
{
|
||||
uses: "yyy",
|
||||
},
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries from input combines 2", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: true,
|
||||
packsInputCombines: true,
|
||||
queriesInput: [{ uses: "xxx" }, { uses: "yyy" }],
|
||||
}, {}, {
|
||||
queries: [
|
||||
{
|
||||
uses: "xxx",
|
||||
},
|
||||
{
|
||||
uses: "yyy",
|
||||
},
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("injected queries and packs, but empty", injectedConfigMacro, {
|
||||
injectedMlQueries: false,
|
||||
queriesInputCombines: true,
|
||||
packsInputCombines: true,
|
||||
queriesInput: [],
|
||||
packsInput: [],
|
||||
}, {
|
||||
originalUserInput: {
|
||||
packs: [],
|
||||
queries: [],
|
||||
},
|
||||
}, {});
|
||||
(0, ava_1.default)("does not use injected config", async (t) => {
|
||||
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "false";
|
||||
try {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
||||
}
|
||||
finally {
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
||||
// otherwise impact this test, so set it to 0.0.0.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.FileBaselineInformationEnabled]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
||||
// affect this test.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
|
||||
runnerObjectStub.exec.resolves(0);
|
||||
@@ -240,4 +591,5 @@ function stubToolRunnerConstructor() {
|
||||
runnerConstructorStub.returns(runnerObjectStub);
|
||||
return runnerConstructorStub;
|
||||
}
|
||||
exports.stubToolRunnerConstructor = stubToolRunnerConstructor;
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
File diff suppressed because one or more lines are too long
416
lib/config-utils.js
generated
416
lib/config-utils.js
generated
@@ -19,9 +19,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePacksSpecification = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
|
||||
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
// We need to import `performance` on Node 12
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
@@ -29,6 +31,7 @@ const codeql_1 = require("./codeql");
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = "name";
|
||||
@@ -38,6 +41,17 @@ const QUERIES_USES_PROPERTY = "uses";
|
||||
const PATHS_IGNORE_PROPERTY = "paths-ignore";
|
||||
const PATHS_PROPERTY = "paths";
|
||||
const PACKS_PROPERTY = "packs";
|
||||
/**
|
||||
* The default, empty augmentation properties. This is most useeful
|
||||
* for tests.
|
||||
*/
|
||||
exports.defaultAugmentationProperties = {
|
||||
queriesInputCombines: false,
|
||||
packsInputCombines: false,
|
||||
injectedMlQueries: false,
|
||||
packsInput: undefined,
|
||||
queriesInput: undefined,
|
||||
};
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
@@ -120,7 +134,7 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
* May inject ML queries, and the return value will declare if this was done.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureEnablement, configFile) {
|
||||
var _a;
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
@@ -131,13 +145,13 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||
if (
|
||||
// Disable ML-powered queries on Windows
|
||||
process.platform !== "win32" &&
|
||||
// Only run ML-powered queries on Windows if we have a CLI that supports it.
|
||||
(process.platform !== "win32" ||
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
||||
languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
@@ -149,9 +163,7 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
return injectedMlQueries;
|
||||
}
|
||||
function isMlPoweredJsQueriesPack(pack) {
|
||||
return (pack === util_1.ML_POWERED_JS_QUERIES_PACK_NAME ||
|
||||
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@`) ||
|
||||
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}:`));
|
||||
return parsePacksSpecification(pack).name === util_1.ML_POWERED_JS_QUERIES_PACK_NAME;
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||
@@ -214,7 +226,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
*
|
||||
* @returns whether or not we injected ML queries into the packs
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
@@ -226,7 +238,7 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
@@ -292,6 +304,10 @@ function getQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
|
||||
}
|
||||
exports.getQueriesInvalid = getQueriesInvalid;
|
||||
function getQueriesMissingUses(configFile) {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array, with each entry having a 'uses' property");
|
||||
}
|
||||
exports.getQueriesMissingUses = getQueriesMissingUses;
|
||||
function getQueryUsesInvalid(configFile, queryUses) {
|
||||
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
|
||||
}
|
||||
@@ -305,9 +321,8 @@ function getPathsInvalid(configFile) {
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getPacksRequireLanguage(lang, configFile) {
|
||||
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not one of the languages to analyze`);
|
||||
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not a valid language.`);
|
||||
}
|
||||
exports.getPacksRequireLanguage = getPacksRequireLanguage;
|
||||
function getPacksInvalidSplit(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must split packages by language");
|
||||
}
|
||||
@@ -370,11 +385,12 @@ function getUnknownLanguagesError(languages) {
|
||||
}
|
||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
* Gets the set of languages in the current repository that are
|
||||
* scannable by CodeQL.
|
||||
*/
|
||||
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
||||
async function getLanguagesInRepo(repository, logger) {
|
||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||
const response = await api.getApiClient(apiDetails).repos.listLanguages({
|
||||
const response = await api.getApiClient().repos.listLanguages({
|
||||
owner: repository.owner,
|
||||
repo: repository.repo,
|
||||
});
|
||||
@@ -392,6 +408,7 @@ async function getLanguagesInRepo(repository, apiDetails, logger) {
|
||||
}
|
||||
return [...languages];
|
||||
}
|
||||
exports.getLanguagesInRepo = getLanguagesInRepo;
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
@@ -402,19 +419,17 @@ async function getLanguagesInRepo(repository, apiDetails, logger) {
|
||||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages(codeQL, languagesInput, repository, apiDetails, logger) {
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = (languagesInput || "")
|
||||
.split(",")
|
||||
.map((x) => x.trim())
|
||||
.filter((x) => x.length > 0);
|
||||
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo(repository, apiDetails, logger);
|
||||
async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
// Obtain languages without filtering them.
|
||||
const { rawLanguages, autodetected } = await getRawLanguages(languagesInput, repository, logger);
|
||||
let languages = rawLanguages.map(languages_1.resolveAlias);
|
||||
if (autodetected) {
|
||||
const availableLanguages = await codeQL.resolveLanguages();
|
||||
languages = languages.filter((value) => value in availableLanguages);
|
||||
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
||||
logger.info(`Automatically detected languages: ${languages.join(", ")}`);
|
||||
}
|
||||
else {
|
||||
logger.info(`Languages from configuration: ${languages.join(", ")}`);
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
@@ -425,26 +440,58 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
||||
const parsedLanguages = [];
|
||||
const unknownLanguages = [];
|
||||
for (const language of languages) {
|
||||
// We know this is not an alias since we resolved it above.
|
||||
const parsedLanguage = (0, languages_1.parseLanguage)(language);
|
||||
if (parsedLanguage === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
|
||||
else if (!parsedLanguages.includes(parsedLanguage)) {
|
||||
parsedLanguages.push(parsedLanguage);
|
||||
}
|
||||
}
|
||||
// Any unknown languages here would have come directly from the input
|
||||
// since we filter unknown languages coming from the GitHub API.
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
return parsedLanguages;
|
||||
}
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||
exports.getLanguages = getLanguages;
|
||||
/**
|
||||
* Gets the set of languages in the current repository without checking to
|
||||
* see if these languages are actually supported by CodeQL.
|
||||
*
|
||||
* @param languagesInput The languages from the workflow input
|
||||
* @param repository the owner/name of the repository
|
||||
* @param logger a logger
|
||||
* @returns A tuple containing a list of languages in this repository that might be
|
||||
* analyzable and whether or not this list was determined automatically.
|
||||
*/
|
||||
async function getRawLanguages(languagesInput, repository, logger) {
|
||||
// Obtain from action input 'languages' if set
|
||||
let rawLanguages = (languagesInput || "")
|
||||
.split(",")
|
||||
.map((x) => x.trim().toLowerCase())
|
||||
.filter((x) => x.length > 0);
|
||||
let autodetected;
|
||||
if (rawLanguages.length) {
|
||||
autodetected = false;
|
||||
}
|
||||
else {
|
||||
autodetected = true;
|
||||
// Obtain all languages in the repo that can be analysed
|
||||
rawLanguages = (await getLanguagesInRepo(repository, logger));
|
||||
}
|
||||
return { rawLanguages, autodetected };
|
||||
}
|
||||
exports.getRawLanguages = getRawLanguages;
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger) {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
injectedMlQueries = injectedMlQueries || didInject;
|
||||
}
|
||||
return injectedMlQueries;
|
||||
@@ -462,9 +509,8 @@ function shouldAddConfigFileQueries(queriesInput) {
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
|
||||
const queries = {};
|
||||
for (const language of languages) {
|
||||
queries[language] = {
|
||||
@@ -473,11 +519,17 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
};
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
const augmentationProperties = calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
||||
const packs = augmentationProperties.packsInput
|
||||
? {
|
||||
[languages[0]]: augmentationProperties.packsInput,
|
||||
}
|
||||
: {};
|
||||
if (rawQueriesInput) {
|
||||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -486,21 +538,32 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
packs,
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
augmentationProperties,
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger) {
|
||||
let trapCaches = {};
|
||||
let trapCacheDownloadTime = 0;
|
||||
if (trapCachingEnabled) {
|
||||
const start = perf_hooks_1.performance.now();
|
||||
trapCaches = await (0, trap_caching_1.downloadTrapCaches)(codeQL, languages, logger);
|
||||
trapCacheDownloadTime = perf_hooks_1.performance.now() - start;
|
||||
}
|
||||
return { trapCaches, trapCacheDownloadTime };
|
||||
}
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
var _a;
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
@@ -521,7 +584,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
}
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
|
||||
const queries = {};
|
||||
for (const language of languages) {
|
||||
queries[language] = {
|
||||
@@ -541,27 +604,27 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
}
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||
const augmentationProperties = calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, rawPacksInput, augmentationProperties.packsInputCombines, languages, configFile, logger);
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
if (rawQueriesInput) {
|
||||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||
if (shouldAddConfigFileQueries(rawQueriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
const queriesArr = parsedYAML[QUERIES_PROPERTY];
|
||||
if (!Array.isArray(queriesArr)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
for (const query of queriesArr) {
|
||||
if (!(QUERIES_USES_PROPERTY in query) ||
|
||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueriesMissingUses(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
@@ -586,6 +649,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
||||
}
|
||||
}
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -594,16 +658,62 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
packs,
|
||||
originalUserInput: parsedYAML,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
augmentationProperties,
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Calculates how the codeql config file needs to be augmented before passing
|
||||
* it to the CLI. The reason this is necessary is the codeql-action can be called
|
||||
* with extra inputs from the workflow. These inputs are not part of the config
|
||||
* and the CLI does not know about these inputs so we need to inject them into
|
||||
* the config file sent to the CLI.
|
||||
*
|
||||
* @param rawPacksInput The packs input from the action configuration.
|
||||
* @param rawQueriesInput The queries input from the action configuration.
|
||||
* @param languages The languages that the config file is for. If the packs input
|
||||
* is non-empty, then there must be exactly one language. Otherwise, an
|
||||
* error is thrown.
|
||||
*
|
||||
* @returns The properties that need to be augmented in the config file.
|
||||
*
|
||||
* @throws An error if the packs input is non-empty and the languages input does
|
||||
* not have exactly one language.
|
||||
*/
|
||||
// exported for testing.
|
||||
function calculateAugmentation(rawPacksInput, rawQueriesInput, languages) {
|
||||
const packsInputCombines = shouldCombine(rawPacksInput);
|
||||
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
|
||||
const queriesInputCombines = shouldCombine(rawQueriesInput);
|
||||
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
|
||||
return {
|
||||
injectedMlQueries: false,
|
||||
packsInputCombines,
|
||||
packsInput: packsInput === null || packsInput === void 0 ? void 0 : packsInput[languages[0]],
|
||||
queriesInput,
|
||||
queriesInputCombines,
|
||||
};
|
||||
}
|
||||
exports.calculateAugmentation = calculateAugmentation;
|
||||
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
||||
if (!rawQueriesInput) {
|
||||
return undefined;
|
||||
}
|
||||
const trimmedInput = queriesInputCombines
|
||||
? rawQueriesInput.trim().slice(1).trim()
|
||||
: rawQueriesInput === null || rawQueriesInput === void 0 ? void 0 : rawQueriesInput.trim();
|
||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
}
|
||||
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
|
||||
}
|
||||
/**
|
||||
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
|
||||
* and `-` allowed as long as not the first or last char.
|
||||
@@ -615,7 +725,7 @@ const PACK_IDENTIFIER_PATTERN = (function () {
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
// Exported for testing
|
||||
function parsePacksFromConfig(packsByLanguage, languages, configFile) {
|
||||
function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
|
||||
const packs = {};
|
||||
if (Array.isArray(packsByLanguage)) {
|
||||
if (languages.length === 1) {
|
||||
@@ -635,18 +745,23 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile) {
|
||||
throw new Error(getPacksInvalid(configFile));
|
||||
}
|
||||
if (!languages.includes(lang)) {
|
||||
throw new Error(getPacksRequireLanguage(lang, configFile));
|
||||
}
|
||||
packs[lang] = [];
|
||||
for (const packStr of packsArr) {
|
||||
packs[lang].push(validatePacksSpecification(packStr, configFile));
|
||||
// This particular language is not being analyzed in this run.
|
||||
if (languages_1.Language[lang]) {
|
||||
logger.info(`Ignoring packs for ${lang} since this language is not being analyzed in this run.`);
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// This language is invalid, probably a misspelling
|
||||
throw new Error(getPacksRequireLanguage(configFile, lang));
|
||||
}
|
||||
}
|
||||
packs[lang] = packsArr.map((packStr) => validatePackSpecification(packStr, configFile));
|
||||
}
|
||||
return packs;
|
||||
}
|
||||
exports.parsePacksFromConfig = parsePacksFromConfig;
|
||||
function parsePacksFromInput(packsInput, languages) {
|
||||
if (!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim())) {
|
||||
function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) {
|
||||
if (!(rawPacksInput === null || rawPacksInput === void 0 ? void 0 : rawPacksInput.trim())) {
|
||||
return undefined;
|
||||
}
|
||||
if (languages.length > 1) {
|
||||
@@ -655,16 +770,16 @@ function parsePacksFromInput(packsInput, languages) {
|
||||
else if (languages.length === 0) {
|
||||
throw new Error("No languages specified. Cannot process the packs input.");
|
||||
}
|
||||
packsInput = packsInput.trim();
|
||||
if (packsInput.startsWith("+")) {
|
||||
packsInput = packsInput.substring(1).trim();
|
||||
if (!packsInput) {
|
||||
throw new Error("A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.");
|
||||
rawPacksInput = rawPacksInput.trim();
|
||||
if (packsInputCombines) {
|
||||
rawPacksInput = rawPacksInput.trim().substring(1).trim();
|
||||
if (!rawPacksInput) {
|
||||
throw new Error(getConfigFilePropertyError(undefined, "packs", "A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
}
|
||||
}
|
||||
return {
|
||||
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
|
||||
packs.push(validatePacksSpecification(pack, ""));
|
||||
[languages[0]]: rawPacksInput.split(",").reduce((packs, pack) => {
|
||||
packs.push(validatePackSpecification(pack, ""));
|
||||
return packs;
|
||||
}, []),
|
||||
};
|
||||
@@ -687,7 +802,7 @@ function parsePacksFromInput(packsInput, languages) {
|
||||
* @param packStr the package specification to verify.
|
||||
* @param configFile Config file to use for error reporting
|
||||
*/
|
||||
function validatePacksSpecification(packStr, configFile) {
|
||||
function parsePacksSpecification(packStr, configFile) {
|
||||
if (typeof packStr !== "string") {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
@@ -722,31 +837,61 @@ function validatePacksSpecification(packStr, configFile) {
|
||||
}
|
||||
}
|
||||
if (packPath &&
|
||||
(path.isAbsolute(packPath) || path.normalize(packPath) !== packPath)) {
|
||||
(path.isAbsolute(packPath) ||
|
||||
// Permit using "/" instead of "\" on Windows
|
||||
// Use `x.split(y).join(z)` as a polyfill for `x.replaceAll(y, z)` since
|
||||
// if we used a regex we'd need to escape the path separator on Windows
|
||||
// which seems more awkward.
|
||||
path.normalize(packPath).split(path.sep).join("/") !==
|
||||
packPath.split(path.sep).join("/"))) {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
if (!packPath && pathStart) {
|
||||
// 0 length path
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
return (packName + (version ? `@${version}` : "") + (packPath ? `:${packPath}` : ""));
|
||||
return {
|
||||
name: packName,
|
||||
version,
|
||||
path: packPath,
|
||||
};
|
||||
}
|
||||
exports.validatePacksSpecification = validatePacksSpecification;
|
||||
exports.parsePacksSpecification = parsePacksSpecification;
|
||||
function prettyPrintPack(pack) {
|
||||
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
|
||||
}
|
||||
exports.prettyPrintPack = prettyPrintPack;
|
||||
function validatePackSpecification(pack, configFile) {
|
||||
return prettyPrintPack(parsePacksSpecification(pack, configFile));
|
||||
}
|
||||
exports.validatePackSpecification = validatePackSpecification;
|
||||
// exported for testing
|
||||
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
|
||||
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
|
||||
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile);
|
||||
function parsePacks(rawPacksFromConfig, rawPacksFromInput, packsInputCombines, languages, configFile, logger) {
|
||||
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile, logger);
|
||||
const packsFromInput = parsePacksFromInput(rawPacksFromInput, languages, packsInputCombines);
|
||||
if (!packsFromInput) {
|
||||
return packsFomConfig;
|
||||
}
|
||||
if (!shouldCombinePacks(rawPacksInput)) {
|
||||
if (!packsInputCombines) {
|
||||
if (!packsFromInput) {
|
||||
throw new Error(getPacksInvalid(configFile));
|
||||
}
|
||||
return packsFromInput;
|
||||
}
|
||||
return combinePacks(packsFromInput, packsFomConfig);
|
||||
}
|
||||
exports.parsePacks = parsePacks;
|
||||
function shouldCombinePacks(packsInput) {
|
||||
return !!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim().startsWith("+"));
|
||||
/**
|
||||
* The convention in this action is that an input value that is prefixed with a '+' will
|
||||
* be combined with the corresponding value in the config file.
|
||||
*
|
||||
* Without a '+', an input value will override the corresponding value in the config file.
|
||||
*
|
||||
* @param inputValue The input value to process.
|
||||
* @returns true if the input value should replace the corresponding value in the config file, false if it should be appended.
|
||||
*/
|
||||
function shouldCombine(inputValue) {
|
||||
return !!(inputValue === null || inputValue === void 0 ? void 0 : inputValue.trim().startsWith("+"));
|
||||
}
|
||||
function combinePacks(packs1, packs2) {
|
||||
const packs = {};
|
||||
@@ -769,16 +914,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
var _a, _b, _c;
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (!configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
@@ -791,11 +936,29 @@ async function initConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config, logger);
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
function parseRegistries(registriesInput) {
|
||||
try {
|
||||
return registriesInput
|
||||
? yaml.load(registriesInput)
|
||||
: undefined;
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error("Invalid registries input. Must be a YAML string.");
|
||||
}
|
||||
}
|
||||
function isLocal(configPath) {
|
||||
// If the path starts with ./, look locally
|
||||
if (configPath.indexOf("./") === 0) {
|
||||
@@ -823,7 +986,7 @@ async function getRemoteConfig(configFile, apiDetails) {
|
||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
const response = await api
|
||||
.getApiClient(apiDetails, { allowExternal: true })
|
||||
.getApiClientWithExternalAuth(apiDetails)
|
||||
.repos.getContent({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
@@ -875,4 +1038,95 @@ async function getConfig(tempDir, logger) {
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
exports.getConfig = getConfig;
|
||||
async function downloadPacks(codeQL, languages, packs, registries, apiDetails, tmpDir, logger) {
|
||||
let qlconfigFile;
|
||||
let registriesAuthTokens;
|
||||
if (registries) {
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
||||
throw new Error(`'registries' input is not supported on CodeQL versions less than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}.`);
|
||||
}
|
||||
// generate a qlconfig.yml file to hold the registry configs.
|
||||
const qlconfig = createRegistriesBlock(registries);
|
||||
qlconfigFile = path.join(tmpDir, "qlconfig.yml");
|
||||
fs.writeFileSync(qlconfigFile, yaml.dump(qlconfig), "utf8");
|
||||
registriesAuthTokens = registries
|
||||
.map((registry) => `${registry.url}=${registry.token}`)
|
||||
.join(",");
|
||||
}
|
||||
await wrapEnvironment({
|
||||
GITHUB_TOKEN: apiDetails.auth,
|
||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||
}, async () => {
|
||||
let numPacksDownloaded = 0;
|
||||
logger.startGroup("Downloading packs");
|
||||
for (const language of languages) {
|
||||
const packsWithVersion = packs[language];
|
||||
if (packsWithVersion === null || packsWithVersion === void 0 ? void 0 : packsWithVersion.length) {
|
||||
logger.info(`Downloading custom packs for ${language}`);
|
||||
const results = await codeQL.packDownload(packsWithVersion, qlconfigFile);
|
||||
numPacksDownloaded += results.packs.length;
|
||||
logger.info(`Downloaded: ${results.packs
|
||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||
.join(", ")}`);
|
||||
}
|
||||
}
|
||||
if (numPacksDownloaded > 0) {
|
||||
logger.info(`Downloaded ${numPacksDownloaded} ${packs === 1 ? "pack" : "packs"}`);
|
||||
}
|
||||
else {
|
||||
logger.info("No packs to download");
|
||||
}
|
||||
logger.endGroup();
|
||||
});
|
||||
}
|
||||
exports.downloadPacks = downloadPacks;
|
||||
function createRegistriesBlock(registries) {
|
||||
if (!Array.isArray(registries) ||
|
||||
registries.some((r) => !r.url || !r.packages)) {
|
||||
throw new Error("Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||
}
|
||||
// be sure to remove the `token` field from the registry before writing it to disk.
|
||||
const safeRegistries = registries.map((registry) => ({
|
||||
// ensure the url ends with a slash to avoid a bug in the CLI 2.10.4
|
||||
url: !(registry === null || registry === void 0 ? void 0 : registry.url.endsWith("/")) ? `${registry.url}/` : registry.url,
|
||||
packages: registry.packages,
|
||||
}));
|
||||
const qlconfig = {
|
||||
registries: safeRegistries,
|
||||
};
|
||||
return qlconfig;
|
||||
}
|
||||
/**
|
||||
* Create a temporary environment based on the existing environment and overridden
|
||||
* by the given environment variables that are passed in as arguments.
|
||||
*
|
||||
* Use this new environment in the context of the given operation. After completing
|
||||
* the operation, restore the original environment.
|
||||
*
|
||||
* This function does not support un-setting environment variables.
|
||||
*
|
||||
* @param env
|
||||
* @param operation
|
||||
*/
|
||||
async function wrapEnvironment(env, operation) {
|
||||
// Remember the original env
|
||||
const oldEnv = { ...process.env };
|
||||
// Set the new env
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
// Ignore undefined keys
|
||||
if (value !== undefined) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Run the operation
|
||||
await operation();
|
||||
}
|
||||
finally {
|
||||
// Restore the old env
|
||||
for (const [key, value] of Object.entries(oldEnv)) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
File diff suppressed because one or more lines are too long
545
lib/config-utils.test.js
generated
545
lib/config-utils.test.js
generated
@@ -26,6 +26,7 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
@@ -33,6 +34,7 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
@@ -40,6 +42,8 @@ const sampleApiDetails = {
|
||||
auth: "token",
|
||||
externalRepoAuth: "token",
|
||||
url: "https://github.example.com",
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
};
|
||||
const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
|
||||
// Returns the filepath of the newly-created file
|
||||
@@ -58,6 +62,7 @@ function mockGetContents(content) {
|
||||
.stub(client.repos, "getContent")
|
||||
.resolves(response);
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
sinon.stub(api, "getApiClientWithExternalAuth").value(() => client);
|
||||
return spyGetContents;
|
||||
}
|
||||
function mockListLanguages(languages) {
|
||||
@@ -87,9 +92,12 @@ function mockListLanguages(languages) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), logger));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
@@ -106,26 +114,31 @@ function mockListLanguages(languages) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig(tmpDir, logger);
|
||||
t.not(config2, undefined);
|
||||
if (config2 !== undefined) {
|
||||
t.deepEqual(config1, config2);
|
||||
// removes properties assigned to undefined.
|
||||
const expectedConfig = JSON.parse(JSON.stringify(config1));
|
||||
t.deepEqual(expectedConfig, config2);
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, "../input", undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -138,7 +151,7 @@ function mockListLanguages(languages) {
|
||||
// no filename given, just a repo
|
||||
const configFile = "octo-org/codeql-config@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -152,7 +165,7 @@ function mockListLanguages(languages) {
|
||||
const configFile = "input";
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -175,6 +188,9 @@ function mockListLanguages(languages) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
@@ -212,7 +228,6 @@ function mockListLanguages(languages) {
|
||||
paths: ["c/d"],
|
||||
},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
@@ -220,11 +235,13 @@ function mockListLanguages(languages) {
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
injectedMlQueries: false,
|
||||
augmentationProperties: configUtils.defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
@@ -250,6 +267,9 @@ function mockListLanguages(languages) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
// The important point of this config is that it doesn't specify
|
||||
// the disable-default-queries field.
|
||||
@@ -260,7 +280,7 @@ function mockListLanguages(languages) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||
@@ -301,20 +321,23 @@ function queriesToResolvedQueryForm(queries) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return queriesToResolvedQueryForm(queries);
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
// and once for `./foo` from the config file.
|
||||
t.deepEqual(resolveQueriesArgs.length, 2);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}foo`));
|
||||
// Now check that the end result contains the default queries and the query from config
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/foo$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}foo`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries from config file can be overridden in workflow file", async (t) => {
|
||||
@@ -334,20 +357,23 @@ function queriesToResolvedQueryForm(queries) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return queriesToResolvedQueryForm(queries);
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
// but won't be called for './foo' from the config file.
|
||||
t.deepEqual(resolveQueriesArgs.length, 2);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}override`));
|
||||
// Now check that the end result contains only the default queries and the override query
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}override`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
|
||||
@@ -366,19 +392,22 @@ function queriesToResolvedQueryForm(queries) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return queriesToResolvedQueryForm(queries);
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for `./workflow-query`,
|
||||
// but won't be called for the default one since that was disabled
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
|
||||
t.true(resolveQueriesArgs[0].queries[0].endsWith(`${path.sep}workflow-query`));
|
||||
// Now check that the end result contains only the workflow query, and not the default one
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 0);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/workflow-query$/);
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}workflow-query`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
||||
@@ -392,23 +421,26 @@ function queriesToResolvedQueryForm(queries) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return queriesToResolvedQueryForm(queries);
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
// and then once for each of the two queries from the workflow
|
||||
t.deepEqual(resolveQueriesArgs.length, 3);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
|
||||
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}override1`));
|
||||
t.true(resolveQueriesArgs[2].queries[0].endsWith(`${path.sep}override2`));
|
||||
// Now check that the end result contains both the queries from the workflow, as well as the defaults
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 2);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override1$/);
|
||||
t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/override2$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}override1`));
|
||||
t.true(config.queries["javascript"].custom[1].queries[0].endsWith(`${path.sep}override2`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
|
||||
@@ -431,27 +463,30 @@ function queriesToResolvedQueryForm(queries) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return queriesToResolvedQueryForm(queries);
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries,
|
||||
// once for each of additional1 and additional2,
|
||||
// and once for './foo' from the config file
|
||||
t.deepEqual(resolveQueriesArgs.length, 4);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}additional1`));
|
||||
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
|
||||
t.true(resolveQueriesArgs[2].queries[0].endsWith(`${path.sep}additional2`));
|
||||
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
|
||||
t.true(resolveQueriesArgs[3].queries[0].endsWith(`${path.sep}foo`));
|
||||
// Now check that the end result contains all the queries
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 3);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/additional1$/);
|
||||
t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/additional2$/);
|
||||
t.regex(config.queries["javascript"].custom[2].queries[0], /.*\/foo$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}additional1`));
|
||||
t.true(config.queries["javascript"].custom[1].queries[0].endsWith(`${path.sep}additional2`));
|
||||
t.true(config.queries["javascript"].custom[2].queries[0].endsWith(`${path.sep}foo`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Invalid queries in workflow file handled correctly", async (t) => {
|
||||
@@ -470,9 +505,12 @@ function queriesToResolvedQueryForm(queries) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.fail("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -494,6 +532,9 @@ function queriesToResolvedQueryForm(queries) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
@@ -515,7 +556,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||
const languages = "javascript";
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
@@ -525,7 +566,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -541,7 +582,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -556,9 +597,12 @@ function queriesToResolvedQueryForm(queries) {
|
||||
async resolveLanguages() {
|
||||
return {};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -570,7 +614,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const languages = "rubbish,english";
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -588,6 +632,9 @@ function queriesToResolvedQueryForm(queries) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
@@ -598,7 +645,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
const languages = "javascript";
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||
});
|
||||
@@ -616,6 +663,9 @@ function queriesToResolvedQueryForm(queries) {
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
@@ -632,7 +682,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript,python,cpp";
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example" }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||
[languages_1.Language.python]: ["c/d@1.2.3"],
|
||||
@@ -669,13 +719,16 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const configFile = "input";
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -694,7 +747,7 @@ doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvali
|
||||
doInvalidInputTest("queries uses invalid type", `
|
||||
queries:
|
||||
- uses:
|
||||
- hello: world`, configUtils.getQueryUsesInvalid);
|
||||
- hello: world`, configUtils.getQueriesMissingUses);
|
||||
function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
@@ -749,14 +802,14 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
|
||||
* Test macro for ensuring the packs block is valid
|
||||
*/
|
||||
const parsePacksMacro = ava_1.default.macro({
|
||||
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
|
||||
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b", mockLogger), expected),
|
||||
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
|
||||
});
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const parsePacksErrorMacro = ava_1.default.macro({
|
||||
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
|
||||
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b", {}), {
|
||||
message: expected,
|
||||
}),
|
||||
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
|
||||
@@ -782,6 +835,12 @@ const invalidPackNameMacro = ava_1.default.macro({
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("two packs with unused language in config", parsePacksMacro, {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||
}, [languages_1.Language.cpp, languages_1.Language.csharp], {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("packs with other valid names", parsePacksMacro, [
|
||||
// ranges are ok
|
||||
"c/d@1.0",
|
||||
@@ -814,7 +873,6 @@ const invalidPackNameMacro = ava_1.default.macro({
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
||||
(0, ava_1.default)("invalid language", parsePacksErrorMacro, { [languages_1.Language.java]: ["c/d"] }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" has "java", but it is not one of the languages to analyze/);
|
||||
(0, ava_1.default)("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/);
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c-/d");
|
||||
@@ -828,16 +886,58 @@ const invalidPackNameMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@../a");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@b/../a");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d:z@1");
|
||||
/**
|
||||
* Test macro for pretty printing pack specs
|
||||
*/
|
||||
const packSpecPrettyPrintingMacro = ava_1.default.macro({
|
||||
exec: (t, packStr, packObj) => {
|
||||
const parsed = configUtils.parsePacksSpecification(packStr);
|
||||
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
||||
const stringified = configUtils.prettyPrintPack(packObj);
|
||||
t.deepEqual(stringified, packStr.trim(), "pretty-printed pack spec is correct");
|
||||
t.deepEqual(configUtils.validatePackSpecification(packStr), packStr.trim(), "pack spec is valid");
|
||||
},
|
||||
title: (_providedTitle, packStr,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_packObj) => `Prettyprint pack spec: '${packStr}'`,
|
||||
});
|
||||
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: undefined,
|
||||
});
|
||||
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b@~1.2.3", {
|
||||
name: "a/b",
|
||||
version: "~1.2.3",
|
||||
path: undefined,
|
||||
});
|
||||
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b@~1.2.3:abc/def", {
|
||||
name: "a/b",
|
||||
version: "~1.2.3",
|
||||
path: "abc/def",
|
||||
});
|
||||
(0, ava_1.default)(packSpecPrettyPrintingMacro, "a/b:abc/def", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: "abc/def",
|
||||
});
|
||||
(0, ava_1.default)(packSpecPrettyPrintingMacro, " a/b:abc/def ", {
|
||||
name: "a/b",
|
||||
version: undefined,
|
||||
path: "abc/def",
|
||||
});
|
||||
/**
|
||||
* Test macro for testing the packs block and the packs input
|
||||
*/
|
||||
function parseInputAndConfigMacro(t, packsFromConfig, packsFromInput, languages, expected) {
|
||||
t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b"), expected);
|
||||
t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, !!(packsFromInput === null || packsFromInput === void 0 ? void 0 : packsFromInput.trim().startsWith("+")), // coerce to boolean
|
||||
languages, "/a/b", mockLogger), expected);
|
||||
}
|
||||
parseInputAndConfigMacro.title = (providedTitle) => `Parse Packs input and config: ${providedTitle}`;
|
||||
function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, expected) {
|
||||
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
||||
function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, packsFromInputOverride, expected) {
|
||||
t.throws(() => {
|
||||
configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b");
|
||||
configUtils.parsePacks(packsFromConfig, packsFromInput, packsFromInputOverride, languages, "/a/b", mockLogger);
|
||||
}, {
|
||||
message: expected,
|
||||
});
|
||||
@@ -861,12 +961,12 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
|
||||
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3", "a/b", "c/d"],
|
||||
});
|
||||
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
||||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
||||
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], false, /No languages specified/);
|
||||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], false, /multi-language analysis/);
|
||||
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], true, /remove the '\+'/);
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], false, /"xxx" is not a valid pack/);
|
||||
const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
|
||||
exec: async (t, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
@@ -881,10 +981,11 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
async packDownload() {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)(isMlPoweredQueriesEnabled ? [feature_flags_1.Feature.MlPoweredQueriesEnabled] : []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectedVersionString !== undefined) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
@@ -897,24 +998,322 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
|
||||
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
|
||||
? `${expectedVersionString} are`
|
||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
|
||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature is ${isMlPoweredQueriesEnabled ? "enabled" : "disabled"}`,
|
||||
});
|
||||
// macro, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString
|
||||
// Test that ML-powered queries aren't run on v2.7.4 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||
// Test that ML-powered queries aren't run when the feature flag is off.
|
||||
// macro, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString
|
||||
// Test that ML-powered queries aren't run when the feature is off.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||
// `security-extended` or `security-and-quality` query suite.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended`.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality`.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.1.0");
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
||||
// versions of the CodeQL CLI prior to 2.9.0.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality` on
|
||||
// versions of the CodeQL CLI prior to 2.9.0.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL CLI
|
||||
// 2.9.0+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-extended", "~0.2.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.9.0+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-and-quality", "~0.2.0");
|
||||
// Test that we don't inject an ML-powered query pack if the user has already specified one.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", process.platform === "win32" ? undefined : "0.0.1");
|
||||
// Test that the ~0.2.0 version of ML-powered queries is run on v2.8.4 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.4", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL
|
||||
// CLI 2.9.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-extended", "~0.3.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.9.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-and-quality", "~0.3.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL
|
||||
// CLI 2.11.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-extended", "~0.4.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.11.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-and-quality", "~0.4.0");
|
||||
const calculateAugmentationMacro = ava_1.default.macro({
|
||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
||||
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
||||
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation: ${title}`,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "All empty", undefined, undefined, [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: undefined,
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
injectedMlQueries: false,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
injectedMlQueries: false,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", [languages_1.Language.javascript], {
|
||||
queriesInputCombines: true,
|
||||
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
|
||||
packsInputCombines: false,
|
||||
packsInput: undefined,
|
||||
injectedMlQueries: false,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: undefined,
|
||||
packsInputCombines: false,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
injectedMlQueries: false,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, [languages_1.Language.javascript], {
|
||||
queriesInputCombines: false,
|
||||
queriesInput: undefined,
|
||||
packsInputCombines: true,
|
||||
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
|
||||
injectedMlQueries: false,
|
||||
});
|
||||
const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedError) => {
|
||||
t.throws(() => configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages), { message: expectedError });
|
||||
},
|
||||
title: (_, title) => `Calculate Augmentation Error: ${title}`,
|
||||
});
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (queries)", undefined, " + ", [languages_1.Language.javascript], /The workflow property "queries" is invalid/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Plus (+) with nothing else (packs)", " + ", undefined, [languages_1.Language.javascript], /The workflow property "packs" is invalid/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with multiple languages", " + a/b, c/d ", undefined, [languages_1.Language.javascript, languages_1.Language.java], /Cannot specify a 'packs' input in a multi-language analysis/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with no languages", " + a/b, c/d ", undefined, [], /No languages specified/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Invalid packs", " a-pack-without-a-scope ", undefined, [languages_1.Language.javascript], /"a-pack-without-a-scope" is not a valid pack/);
|
||||
(0, ava_1.default)("downloadPacks-no-registries", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const packDownloadStub = sinon.stub();
|
||||
packDownloadStub.callsFake((packs) => ({
|
||||
packs,
|
||||
}));
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
packDownload: packDownloadStub,
|
||||
});
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
// packs are supplied for go, java, and python
|
||||
// analyzed languages are java, javascript, and python
|
||||
await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {
|
||||
java: ["a", "b"],
|
||||
go: ["c", "d"],
|
||||
python: ["e", "f"],
|
||||
}, undefined, // registries
|
||||
sampleApiDetails, tmpDir, logger);
|
||||
// Expecting packs to be downloaded once for java and once for python
|
||||
t.deepEqual(packDownloadStub.callCount, 2);
|
||||
// no config file was created, so pass `undefined` as the config file path
|
||||
t.deepEqual(packDownloadStub.firstCall.args, [["a", "b"], undefined]);
|
||||
t.deepEqual(packDownloadStub.secondCall.args, [["e", "f"], undefined]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("downloadPacks-with-registries", async (t) => {
|
||||
// same thing, but this time include a registries block and
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const registries = [
|
||||
{
|
||||
// no slash
|
||||
url: "http://ghcr.io",
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
token: "not-a-token",
|
||||
},
|
||||
{
|
||||
// with slash
|
||||
url: "https://containers.GHEHOSTNAME1/v2/",
|
||||
packages: "semmle/*",
|
||||
token: "still-not-a-token",
|
||||
},
|
||||
];
|
||||
// append a slash to the first url
|
||||
const expectedRegistries = registries.map((r, i) => ({
|
||||
packages: r.packages,
|
||||
url: i === 0 ? `${r.url}/` : r.url,
|
||||
}));
|
||||
const expectedConfigFile = path.join(tmpDir, "qlconfig.yml");
|
||||
const packDownloadStub = sinon.stub();
|
||||
packDownloadStub.callsFake((packs, configFile) => {
|
||||
t.deepEqual(configFile, expectedConfigFile);
|
||||
// verify the env vars were set correctly
|
||||
t.deepEqual(process.env.GITHUB_TOKEN, sampleApiDetails.auth);
|
||||
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "http://ghcr.io=not-a-token,https://containers.GHEHOSTNAME1/v2/=still-not-a-token");
|
||||
// verify the config file contents were set correctly
|
||||
const config = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
t.deepEqual(config.registries, expectedRegistries);
|
||||
return {
|
||||
packs,
|
||||
};
|
||||
});
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
packDownload: packDownloadStub,
|
||||
getVersion: () => Promise.resolve("2.10.5"),
|
||||
});
|
||||
// packs are supplied for go, java, and python
|
||||
// analyzed languages are java, javascript, and python
|
||||
await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {
|
||||
java: ["a", "b"],
|
||||
go: ["c", "d"],
|
||||
python: ["e", "f"],
|
||||
}, registries, sampleApiDetails, tmpDir, logger);
|
||||
// Same packs are downloaded as in previous test
|
||||
t.deepEqual(packDownloadStub.callCount, 2);
|
||||
t.deepEqual(packDownloadStub.firstCall.args, [
|
||||
["a", "b"],
|
||||
expectedConfigFile,
|
||||
]);
|
||||
t.deepEqual(packDownloadStub.secondCall.args, [
|
||||
["e", "f"],
|
||||
expectedConfigFile,
|
||||
]);
|
||||
// Verify that the env vars were unset.
|
||||
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
|
||||
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "not-a-registries-auth");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
||||
// same thing, but this time include a registries block and
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const registries = [
|
||||
{
|
||||
url: "http://ghcr.io",
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
token: "not-a-token",
|
||||
},
|
||||
{
|
||||
url: "https://containers.GHEHOSTNAME1/v2/",
|
||||
packages: "semmle/*",
|
||||
token: "still-not-a-token",
|
||||
},
|
||||
];
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
getVersion: () => Promise.resolve("2.10.3"),
|
||||
});
|
||||
await t.throwsAsync(async () => {
|
||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
||||
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("downloadPacks-with-registries fails with invalid registries block", async (t) => {
|
||||
// same thing, but this time include a registries block and
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const registries = [
|
||||
{
|
||||
// missing url property
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
token: "not-a-token",
|
||||
},
|
||||
{
|
||||
url: "https://containers.GHEHOSTNAME1/v2/",
|
||||
packages: "semmle/*",
|
||||
token: "still-not-a-token",
|
||||
},
|
||||
];
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
getVersion: () => Promise.resolve("2.10.4"),
|
||||
});
|
||||
await t.throwsAsync(async () => {
|
||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
||||
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||
});
|
||||
});
|
||||
// getLanguages
|
||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
// eslint-disable-next-line github/array-foreach
|
||||
[
|
||||
{
|
||||
name: "languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "jAvAscript, \n jaVa",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
expectedApiCall: false,
|
||||
},
|
||||
{
|
||||
name: "languages from github api",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [" jAvAscript\n \t", " jaVa", "SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
expectedApiCall: true,
|
||||
},
|
||||
{
|
||||
name: "aliases from input",
|
||||
codeqlResolvedLanguages: ["javascript", "csharp", "cpp", "java", "python"],
|
||||
languagesInput: " typEscript\n \t, C#, c , KoTlin",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "csharp", "cpp", "java"],
|
||||
expectedApiCall: false,
|
||||
},
|
||||
{
|
||||
name: "duplicate languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "jAvAscript, \n jaVa, kotlin, typescript",
|
||||
languagesInRepository: ["SwiFt", "other"],
|
||||
expectedLanguages: ["javascript", "java"],
|
||||
expectedApiCall: false,
|
||||
},
|
||||
{
|
||||
name: "aliases from github api",
|
||||
codeqlResolvedLanguages: ["javascript", "csharp", "cpp", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [" typEscript\n \t", " C#", "c", "other"],
|
||||
expectedLanguages: ["javascript", "csharp", "cpp"],
|
||||
expectedApiCall: true,
|
||||
},
|
||||
{
|
||||
name: "no languages",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: true,
|
||||
expectedError: configUtils.getNoLanguagesError(),
|
||||
},
|
||||
{
|
||||
name: "unrecognized languages from input",
|
||||
codeqlResolvedLanguages: ["javascript", "java", "python"],
|
||||
languagesInput: "a, b, c, javascript",
|
||||
languagesInRepository: [],
|
||||
expectedApiCall: false,
|
||||
expectedError: configUtils.getUnknownLanguagesError(["a", "b"]),
|
||||
},
|
||||
].forEach((args) => {
|
||||
(0, ava_1.default)(`getLanguages: ${args.name}`, async (t) => {
|
||||
const mockRequest = (0, testing_utils_1.mockLanguagesInRepo)(args.languagesInRepository);
|
||||
const languages = args.codeqlResolvedLanguages.reduce((acc, lang) => ({
|
||||
...acc,
|
||||
[lang]: true,
|
||||
}), {});
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
resolveLanguages: () => Promise.resolve(languages),
|
||||
});
|
||||
if (args.expectedLanguages) {
|
||||
// happy path
|
||||
const actualLanguages = await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, mockLogger);
|
||||
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
|
||||
}
|
||||
else {
|
||||
// there is an error
|
||||
await t.throwsAsync(async () => await configUtils.getLanguages(codeQL, args.languagesInput, mockRepositoryNwo, mockLogger), { message: args.expectedError });
|
||||
}
|
||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user