mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 15:58:06 +08:00
Compare commits
2129 Commits
nickfyson/
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
822fe5ef9a | ||
|
|
255ffd480f | ||
|
|
4b775686a0 | ||
|
|
f0705a6d6f | ||
|
|
2faa3e16f3 | ||
|
|
aaff818427 | ||
|
|
6f285ad15b | ||
|
|
c88cf91b1f | ||
|
|
a3a6c128d7 | ||
|
|
657581e7a8 | ||
|
|
657c2f3ff0 | ||
|
|
970e0879d9 | ||
|
|
db50adab01 | ||
|
|
9e9a8428c3 | ||
|
|
1725087693 | ||
|
|
e655565390 | ||
|
|
c38e41c45a | ||
|
|
0658e4b2d6 | ||
|
|
54b4854fda | ||
|
|
1fae5bf71b | ||
|
|
533ce91971 | ||
|
|
ace076b980 | ||
|
|
97847a4dde | ||
|
|
f8c88ab2dc | ||
|
|
2f58583a1b | ||
|
|
4e0668d05e | ||
|
|
c4fdf5fe69 | ||
|
|
4f87830a1f | ||
|
|
daf6560612 | ||
|
|
03e2e3c45f | ||
|
|
3bb6c41212 | ||
|
|
38fc5ebb37 | ||
|
|
a82d691646 | ||
|
|
ca6773e404 | ||
|
|
8dbd96566a | ||
|
|
ef73e3bee8 | ||
|
|
75b4f1c466 | ||
|
|
d468c94a69 | ||
|
|
7c55012151 | ||
|
|
f8eea91a7b | ||
|
|
878b64e0ef | ||
|
|
7cf0ed5e3f | ||
|
|
b651a677d2 | ||
|
|
827fd55c21 | ||
|
|
dd56e95b46 | ||
|
|
3c6dd303a8 | ||
|
|
96bc9c36c6 | ||
|
|
366e88c2c1 | ||
|
|
7b66e72cb7 | ||
|
|
06d4e82bd2 | ||
|
|
0fb78380f8 | ||
|
|
b71f20d70f | ||
|
|
8f845425a2 | ||
|
|
c9882bef2d | ||
|
|
9a6bf18ec4 | ||
|
|
0235de0279 | ||
|
|
a73e506617 | ||
|
|
b11fe85402 | ||
|
|
922dc2b976 | ||
|
|
395afb1dd9 | ||
|
|
ceeddf2638 | ||
|
|
06b15c22b1 | ||
|
|
ed0abc6cac | ||
|
|
193cfa588d | ||
|
|
d9e30cb001 | ||
|
|
ea676e3184 | ||
|
|
7c2be06006 | ||
|
|
0c3c093eba | ||
|
|
2bf00f719d | ||
|
|
02083c307e | ||
|
|
35ef6a2db3 | ||
|
|
5227afabbe | ||
|
|
6ed7f70798 | ||
|
|
04f504ca7f | ||
|
|
016ec75b7c | ||
|
|
7502d6e991 | ||
|
|
cbce00d08d | ||
|
|
0256599547 | ||
|
|
72861144fd | ||
|
|
6dd9baf8be | ||
|
|
ff8b365e79 | ||
|
|
eed184a534 | ||
|
|
c76f0b5b07 | ||
|
|
bf4ba6945d | ||
|
|
d2d14adf3e | ||
|
|
95b49c3e6b | ||
|
|
80771fd2d0 | ||
|
|
2b8fdb3f2e | ||
|
|
074853a9a2 | ||
|
|
ce63ab5d00 | ||
|
|
e87e2d8201 | ||
|
|
8a646279fc | ||
|
|
23b7196b6b | ||
|
|
e6e327771b | ||
|
|
b9577df761 | ||
|
|
808c29257b | ||
|
|
5b5ed44ab7 | ||
|
|
faf9d4b499 | ||
|
|
8b2f5d7158 | ||
|
|
0ba58d8497 | ||
|
|
3962f1bd85 | ||
|
|
9daf1de73c | ||
|
|
bce749b10f | ||
|
|
fce4a01cd7 | ||
|
|
bac9320f4f | ||
|
|
b3bf557359 | ||
|
|
f6312f1322 | ||
|
|
c5c5bdabb9 | ||
|
|
e7869d541b | ||
|
|
7a12645d7e | ||
|
|
9f20addbf2 | ||
|
|
780f4ee1bf | ||
|
|
baf90d17d2 | ||
|
|
6f174084dd | ||
|
|
b0c570ef83 | ||
|
|
2d80fe85fc | ||
|
|
0c80741707 | ||
|
|
792bbfea04 | ||
|
|
1ed1437484 | ||
|
|
3ed22c8145 | ||
|
|
739937f14e | ||
|
|
0ecdac49ad | ||
|
|
426a3951ee | ||
|
|
a0b596246a | ||
|
|
5d3e1a701c | ||
|
|
b9bb8dd18d | ||
|
|
11673755ab | ||
|
|
d0ca51f5e9 | ||
|
|
0182a2c78c | ||
|
|
488f78249e | ||
|
|
9cab82f202 | ||
|
|
43d066495c | ||
|
|
f090899ed0 | ||
|
|
8a00ed086d | ||
|
|
935969c6f7 | ||
|
|
e26813cf98 | ||
|
|
2c03704a6c | ||
|
|
dd6b592e3e | ||
|
|
a90d8bf711 | ||
|
|
dc0338e493 | ||
|
|
57096fe795 | ||
|
|
b0ddf36abe | ||
|
|
1ea2f2d7f1 | ||
|
|
9dcc141f12 | ||
|
|
ea751a9fae | ||
|
|
a2949f47b3 | ||
|
|
7871f0d5e1 | ||
|
|
e6f3e049b4 | ||
|
|
e83a1d469e | ||
|
|
894faced79 | ||
|
|
a9095cefc9 | ||
|
|
4d339ae3ec | ||
|
|
381ea36211 | ||
|
|
e769c2dd6e | ||
|
|
bae3a3acab | ||
|
|
bcd5c027de | ||
|
|
9885f86fab | ||
|
|
ee3341a9d8 | ||
|
|
28eead2408 | ||
|
|
a4da970395 | ||
|
|
34a1681e50 | ||
|
|
8833977736 | ||
|
|
bfe9e81020 | ||
|
|
1d58cc1f27 | ||
|
|
d8576e34bf | ||
|
|
f1060fbba0 | ||
|
|
af34c6da92 | ||
|
|
282b607642 | ||
|
|
f0e2f3c053 | ||
|
|
73ba7ffb48 | ||
|
|
2f4dd4bb41 | ||
|
|
8237e85158 | ||
|
|
eea7cf19ff | ||
|
|
fdc2a903c1 | ||
|
|
c22162c09d | ||
|
|
01c72238c1 | ||
|
|
63b2636c23 | ||
|
|
0ed0799824 | ||
|
|
9e403590f4 | ||
|
|
45b96c3de6 | ||
|
|
9a709c116e | ||
|
|
43c9f26143 | ||
|
|
b949e494e4 | ||
|
|
3d23aade46 | ||
|
|
d625a00cee | ||
|
|
077ec096bb | ||
|
|
4d6e9c02ac | ||
|
|
839aa81918 | ||
|
|
6d1f0a0357 | ||
|
|
88db5e75ec | ||
|
|
d068f5372a | ||
|
|
044f112dc1 | ||
|
|
f7846479e6 | ||
|
|
d0bd80897c | ||
|
|
bed132dae4 | ||
|
|
9d26fe0cb3 | ||
|
|
6e57bbac6c | ||
|
|
85cfdb24f4 | ||
|
|
df164705ad | ||
|
|
da7944b165 | ||
|
|
33599909af | ||
|
|
f143182488 | ||
|
|
0b037b4fcb | ||
|
|
1668e0a2bf | ||
|
|
bd4757cd6b | ||
|
|
5fb01dd153 | ||
|
|
124e7d96a6 | ||
|
|
b8f3a377bf | ||
|
|
4b465cb3ce | ||
|
|
d76b18254a | ||
|
|
33f749f1c9 | ||
|
|
ccda44cac5 | ||
|
|
81827d3fc6 | ||
|
|
b386fd4443 | ||
|
|
2a7a517ea5 | ||
|
|
ca5ed24270 | ||
|
|
fb22523acc | ||
|
|
b887a2ce1e | ||
|
|
686c3a37f0 | ||
|
|
c9973ef56b | ||
|
|
c3010cb18a | ||
|
|
81bfc289f5 | ||
|
|
5d72058994 | ||
|
|
6de8537e54 | ||
|
|
1d81ec489b | ||
|
|
c592f89989 | ||
|
|
f71aeef4dc | ||
|
|
0a713019c3 | ||
|
|
243ebf6e35 | ||
|
|
3886398541 | ||
|
|
f60bb5cc38 | ||
|
|
d0ee2b4276 | ||
|
|
34905f691e | ||
|
|
ac7bd44c3b | ||
|
|
1457c164a9 | ||
|
|
8834766498 | ||
|
|
69a58ad266 | ||
|
|
5b59d8784d | ||
|
|
ea5898d606 | ||
|
|
39aa619399 | ||
|
|
2190825ac9 | ||
|
|
e61a4b9c7c | ||
|
|
b74506c7c2 | ||
|
|
e27fa67f0f | ||
|
|
eb507cfac6 | ||
|
|
c33cdd74b3 | ||
|
|
aeefdce612 | ||
|
|
761da7eb50 | ||
|
|
76c3e91b1f | ||
|
|
560edbc33e | ||
|
|
e64a215b41 | ||
|
|
e437ea1d38 | ||
|
|
8ecc1fcbc4 | ||
|
|
0b3acf68ab | ||
|
|
480ef7689f | ||
|
|
4e2634f2fd | ||
|
|
04d521d705 | ||
|
|
cdf3c9d01e | ||
|
|
628afae9d3 | ||
|
|
111428ce06 | ||
|
|
d2fd19b7d7 | ||
|
|
c825c24663 | ||
|
|
0e0e934038 | ||
|
|
d430deb927 | ||
|
|
f5d822707e | ||
|
|
1f44993f56 | ||
|
|
701b6a5569 | ||
|
|
e75fa83dde | ||
|
|
a33ae09fb8 | ||
|
|
44198be781 | ||
|
|
308528ba13 | ||
|
|
939659ccd0 | ||
|
|
401a76f245 | ||
|
|
5e27f57127 | ||
|
|
4154eaf0e9 | ||
|
|
75e4d9f140 | ||
|
|
870dbaaebe | ||
|
|
3615d8ac45 | ||
|
|
47dd636fbc | ||
|
|
e4766c87d5 | ||
|
|
5c2600e559 | ||
|
|
a92e8775d8 | ||
|
|
527d5153ad | ||
|
|
07990f07e5 | ||
|
|
fa19286989 | ||
|
|
b1c781d398 | ||
|
|
938e0a0743 | ||
|
|
d63f798314 | ||
|
|
86940df49f | ||
|
|
25313901b7 | ||
|
|
8ec6a84629 | ||
|
|
162ecaff35 | ||
|
|
117a67b074 | ||
|
|
0da815296a | ||
|
|
4e02f8e87a | ||
|
|
438f654919 | ||
|
|
42af7232c3 | ||
|
|
360221eece | ||
|
|
9169d2461f | ||
|
|
949eeb2dcf | ||
|
|
507795f5c4 | ||
|
|
75f07e7ab2 | ||
|
|
42c72a10c8 | ||
|
|
fa57ef0579 | ||
|
|
4b66d317a4 | ||
|
|
d7ad71d803 | ||
|
|
afbddca263 | ||
|
|
c592525a67 | ||
|
|
f14beebc58 | ||
|
|
3d93bb2ac9 | ||
|
|
040feefecf | ||
|
|
4d875a922c | ||
|
|
cbc9c75e8f | ||
|
|
65174f238c | ||
|
|
d39d5d5c97 | ||
|
|
da54d7bd9f | ||
|
|
fe6fb6f21b | ||
|
|
72f407c72c | ||
|
|
cefec5bcb7 | ||
|
|
070bf9ae1c | ||
|
|
74dd782377 | ||
|
|
78e09bd811 | ||
|
|
72b5ce4f22 | ||
|
|
21c48e7a01 | ||
|
|
326e5118c5 | ||
|
|
b83cccd8a6 | ||
|
|
9b2dafda62 | ||
|
|
770ee8972e | ||
|
|
ec300a3159 | ||
|
|
c9f98d45c1 | ||
|
|
6a961c08f2 | ||
|
|
db24c88f31 | ||
|
|
c4e058a6a3 | ||
|
|
0d87b8c615 | ||
|
|
49c1e2f719 | ||
|
|
13c841aa39 | ||
|
|
ef024e702c | ||
|
|
1ed61a37b6 | ||
|
|
abba98ef38 | ||
|
|
9d794b2b2e | ||
|
|
7e1ca4f093 | ||
|
|
621c8c7115 | ||
|
|
feb711fe0b | ||
|
|
609ca1e4cf | ||
|
|
4c266bab6d | ||
|
|
ec47ce3982 | ||
|
|
e72dfad837 | ||
|
|
bcdaad245b | ||
|
|
8aa32bd9c9 | ||
|
|
d8ee3b9d09 | ||
|
|
2c6b76bc5a | ||
|
|
19debd424c | ||
|
|
3fdc5a23fa | ||
|
|
68365ec944 | ||
|
|
474bbf07f9 | ||
|
|
58f66fa60a | ||
|
|
3d2ad0b9c8 | ||
|
|
7e15b9612a | ||
|
|
7f9dca2982 | ||
|
|
87bfa0ea7a | ||
|
|
a6d606becc | ||
|
|
f2027c51f7 | ||
|
|
70e63f4b08 | ||
|
|
88073a5f6e | ||
|
|
eb172af46d | ||
|
|
a777a982cb | ||
|
|
8f27994474 | ||
|
|
8e2954c3fa | ||
|
|
8768b0f5a3 | ||
|
|
c72c200f52 | ||
|
|
34c777f8d0 | ||
|
|
8dd2b9ac2f | ||
|
|
b8047e5610 | ||
|
|
edd03fbd2c | ||
|
|
f9607205db | ||
|
|
b870f13d6d | ||
|
|
2890d306c3 | ||
|
|
2b46439dd5 | ||
|
|
6d8390b7cb | ||
|
|
4eb03fb6f3 | ||
|
|
03c64ef07d | ||
|
|
cc622a02a9 | ||
|
|
c95a3d854c | ||
|
|
f888be73ce | ||
|
|
16d4068732 | ||
|
|
aab545260e | ||
|
|
501fe7ff12 | ||
|
|
ad40e4a8f8 | ||
|
|
537b2f873a | ||
|
|
9f32fc9b9d | ||
|
|
904d0acf90 | ||
|
|
9b14aa7c84 | ||
|
|
1cddec9558 | ||
|
|
a005206838 | ||
|
|
ff33f031e8 | ||
|
|
6a6a3203dd | ||
|
|
942b34d547 | ||
|
|
3469c69bba | ||
|
|
d57c2761c9 | ||
|
|
6081b90eae | ||
|
|
78eb2c9c00 | ||
|
|
b2af0740e4 | ||
|
|
77194581b7 | ||
|
|
1a5b604256 | ||
|
|
9bfa05fb03 | ||
|
|
e9aa623c5d | ||
|
|
ce89f1b611 | ||
|
|
57f34a1569 | ||
|
|
941e382c65 | ||
|
|
6c6b8c33c4 | ||
|
|
13f97c81fe | ||
|
|
36419a79c1 | ||
|
|
9a40cc5274 | ||
|
|
ec0b3ae7ff | ||
|
|
e836f97769 | ||
|
|
11639426e6 | ||
|
|
a9da9fc959 | ||
|
|
ae8741006b | ||
|
|
72f9a88bc5 | ||
|
|
9f36b75178 | ||
|
|
dfe2bc4e49 | ||
|
|
63d0c7856c | ||
|
|
3cc87990f0 | ||
|
|
260b4d5dc4 | ||
|
|
1bfa9ace60 | ||
|
|
1eaaf07b91 | ||
|
|
5916f9896d | ||
|
|
0dd4dbf9d9 | ||
|
|
980fd4ed38 | ||
|
|
e9aa2c6f62 | ||
|
|
e9d52340a3 | ||
|
|
392931027a | ||
|
|
ba2b46d53e | ||
|
|
a602dbedb7 | ||
|
|
66777347f6 | ||
|
|
ce6e94b1b8 | ||
|
|
30790fe430 | ||
|
|
1a927e9307 | ||
|
|
ea2ef8554d | ||
|
|
bbb9c53c65 | ||
|
|
9664bae55e | ||
|
|
3a741b6cf5 | ||
|
|
1498bce660 | ||
|
|
c260382f36 | ||
|
|
6410c0691e | ||
|
|
8e07ec6ce2 | ||
|
|
c337e5f2f3 | ||
|
|
9673b562d9 | ||
|
|
253bc84963 | ||
|
|
bc4366e948 | ||
|
|
3b831aafd9 | ||
|
|
14b1fecf33 | ||
|
|
5e23536180 | ||
|
|
5f30e2466f | ||
|
|
05981c5829 | ||
|
|
486633d442 | ||
|
|
d6360c9075 | ||
|
|
d3a0787934 | ||
|
|
e13c8bbfb7 | ||
|
|
1c9a1f5d01 | ||
|
|
1f7dab4ba2 | ||
|
|
8a36468d11 | ||
|
|
f8c38c1af3 | ||
|
|
10249d1591 | ||
|
|
e6bcd71529 | ||
|
|
806fc12eb2 | ||
|
|
ba352d365b | ||
|
|
751af2a9e3 | ||
|
|
1a686e7d76 | ||
|
|
0479586f61 | ||
|
|
708446c6e4 | ||
|
|
392316b555 | ||
|
|
5604c0a3ad | ||
|
|
7d8ca8fa93 | ||
|
|
8b37404d56 | ||
|
|
c180f23bb1 | ||
|
|
a7adbce22c | ||
|
|
3b4e4d44dc | ||
|
|
f18151cc59 | ||
|
|
e175dea369 | ||
|
|
752ae5743f | ||
|
|
0dabead789 | ||
|
|
5e69ce82f8 | ||
|
|
fdb92bbffe | ||
|
|
14b4839253 | ||
|
|
d76304cd8e | ||
|
|
57c4e974c2 | ||
|
|
c2fb041dc1 | ||
|
|
ac9d34fbc6 | ||
|
|
384cfc42b2 | ||
|
|
5a1e31dc6a | ||
|
|
67c0353a8c | ||
|
|
7ec25e02e3 | ||
|
|
713eacdf6c | ||
|
|
c8290d07f0 | ||
|
|
51126e5cd1 | ||
|
|
c0b507e521 | ||
|
|
d563b098d7 | ||
|
|
fca047627b | ||
|
|
28fe8e7028 | ||
|
|
8a4b243fbf | ||
|
|
19970ae6b5 | ||
|
|
ec1b16574e | ||
|
|
b31df3ff95 | ||
|
|
776db51d2e | ||
|
|
b886234637 | ||
|
|
9913c9bfa5 | ||
|
|
8de62beb50 | ||
|
|
b6fbccaba1 | ||
|
|
df0c306daf | ||
|
|
ab1f709732 | ||
|
|
8454e21c9c | ||
|
|
d85c3e58ec | ||
|
|
cbabe47a0b | ||
|
|
f8a48f464d | ||
|
|
f6f23f8671 | ||
|
|
c2a7379048 | ||
|
|
cd783c8a29 | ||
|
|
300c8b6dcb | ||
|
|
faa9ba7363 | ||
|
|
d2a0fc83dc | ||
|
|
71112ab35d | ||
|
|
e677af3fd0 | ||
|
|
848e5140d4 | ||
|
|
e7fe6da378 | ||
|
|
2159631658 | ||
|
|
9de1702400 | ||
|
|
efded22908 | ||
|
|
5602bd50bf | ||
|
|
2f4be8e34b | ||
|
|
9763bdd6ec | ||
|
|
00d4d60204 | ||
|
|
e5d84de18b | ||
|
|
ea1acc573a | ||
|
|
79ea6d6a7c | ||
|
|
3e50d096f8 | ||
|
|
cca1cfdacf | ||
|
|
cdea582765 | ||
|
|
3e59dee9e2 | ||
|
|
249c7ffce1 | ||
|
|
254816c2d2 | ||
|
|
6d62c245ec | ||
|
|
5e87034b3b | ||
|
|
621e0794ac | ||
|
|
d6499fad61 | ||
|
|
04671efa1d | ||
|
|
e1f05902cd | ||
|
|
f9e96fa857 | ||
|
|
14a5537e13 | ||
|
|
d3eb4974a3 | ||
|
|
39216d10d3 | ||
|
|
265a7db16a | ||
|
|
f623d4cec3 | ||
|
|
eacec3646a | ||
|
|
e0e2abc1a5 | ||
|
|
716b5980cd | ||
|
|
1d83f2a0bc | ||
|
|
ce77f88627 | ||
|
|
a777b51ef7 | ||
|
|
88fbabe21d | ||
|
|
eeb215b041 | ||
|
|
5f53256358 | ||
|
|
25a5103778 | ||
|
|
0782554948 | ||
|
|
705f634a1d | ||
|
|
b7b7607959 | ||
|
|
7bcc6564d4 | ||
|
|
b661ef1697 | ||
|
|
6ad00fd084 | ||
|
|
fccdee04ba | ||
|
|
e694ca6192 | ||
|
|
67d11b5928 | ||
|
|
924a64d2e0 | ||
|
|
45dc27d3c1 | ||
|
|
cbed0358c6 | ||
|
|
a8cf6f42c2 | ||
|
|
eebe7c46f1 | ||
|
|
dc32d5448f | ||
|
|
fac22de4f9 | ||
|
|
0a1efd7f45 | ||
|
|
043e3deaeb | ||
|
|
0dbcb55617 | ||
|
|
00c59b98ce | ||
|
|
7069ada3ed | ||
|
|
dd1f9a96d8 | ||
|
|
546b30f35a | ||
|
|
d1dde03d7a | ||
|
|
f44219c94b | ||
|
|
bdaac951f7 | ||
|
|
a82f53a364 | ||
|
|
f721f011bf | ||
|
|
c82e09aa41 | ||
|
|
460d053698 | ||
|
|
3bf14e85d8 | ||
|
|
13a9d6c442 | ||
|
|
dd65833ab6 | ||
|
|
c2d9e4b48f | ||
|
|
e095058bfa | ||
|
|
2c99f99c4a | ||
|
|
bcd7e6896f | ||
|
|
0b242db78f | ||
|
|
c897659213 | ||
|
|
8b902e1723 | ||
|
|
26567f6a49 | ||
|
|
dbf7ac4b37 | ||
|
|
077f7b2532 | ||
|
|
a392055010 | ||
|
|
0aea878963 | ||
|
|
bca71988d3 | ||
|
|
02e1cdcd36 | ||
|
|
4860ed1ad4 | ||
|
|
3e36cddb07 | ||
|
|
b9bd459b70 | ||
|
|
215c4f5ff5 | ||
|
|
4eef7ef32c | ||
|
|
e0b9b9a248 | ||
|
|
823bb21bbb | ||
|
|
49fc4c9b40 | ||
|
|
21a786fda0 | ||
|
|
316ad9d919 | ||
|
|
a627e9fa50 | ||
|
|
160021fe53 | ||
|
|
3f2269bf58 | ||
|
|
2ecc17d74f | ||
|
|
9b506fed7c | ||
|
|
2803f4a792 | ||
|
|
720bf9d157 | ||
|
|
bbf0a22e84 | ||
|
|
d7b5c618a4 | ||
|
|
37a4db94ad | ||
|
|
6a98a4b500 | ||
|
|
ea8a175a94 | ||
|
|
f360da772a | ||
|
|
ea169430d2 | ||
|
|
375c14729e | ||
|
|
0442e71a2a | ||
|
|
3832953584 | ||
|
|
3ce10aec2e | ||
|
|
c4b0d49ea3 | ||
|
|
5581e08a65 | ||
|
|
df5cf240b0 | ||
|
|
ae2843216b | ||
|
|
5156a89668 | ||
|
|
8f0825e9c0 | ||
|
|
9a44540e25 | ||
|
|
ff3272d4e1 | ||
|
|
56c7489b94 | ||
|
|
3ba4184b13 | ||
|
|
bc31f604d3 | ||
|
|
4293754ed2 | ||
|
|
70b730eb7d | ||
|
|
2905689d8a | ||
|
|
1d123b770b | ||
|
|
9661171991 | ||
|
|
e04751618e | ||
|
|
e891551dd4 | ||
|
|
bd48dc5be5 | ||
|
|
a53b8d0ed1 | ||
|
|
22747bcb77 | ||
|
|
503f29874a | ||
|
|
c3b8b48b76 | ||
|
|
eae8bacaeb | ||
|
|
c226132b0b | ||
|
|
d3f5d485b3 | ||
|
|
4647e20bb5 | ||
|
|
1a3e71aa9a | ||
|
|
e86ea38e3f | ||
|
|
968c038839 | ||
|
|
531c6ba7c8 | ||
|
|
86fea52924 | ||
|
|
0499230710 | ||
|
|
c89d9bd8b0 | ||
|
|
63fd41bf33 | ||
|
|
a35be9cc60 | ||
|
|
0ebee75b04 | ||
|
|
70f007a73c | ||
|
|
1beeda3ba3 | ||
|
|
bee5aac8d7 | ||
|
|
04a87f068f | ||
|
|
ecffc3ce6b | ||
|
|
fc15a137e0 | ||
|
|
fd20d82b8d | ||
|
|
c49fe2506c | ||
|
|
1990ded3d2 | ||
|
|
f5e572fb4b | ||
|
|
a56f7259c1 | ||
|
|
5f37061461 | ||
|
|
286a237e20 | ||
|
|
43ea2ce0c9 | ||
|
|
3df217266d | ||
|
|
73f3a24896 | ||
|
|
e2f39a6d71 | ||
|
|
6ca99f72e9 | ||
|
|
1ebb0a0a35 | ||
|
|
f2159737dd | ||
|
|
5153cc4c8d | ||
|
|
faa3cfa2e9 | ||
|
|
fd3190bba5 | ||
|
|
4a45fbee96 | ||
|
|
0bbf22e3b7 | ||
|
|
d362b66e03 | ||
|
|
9e7b37f8e4 | ||
|
|
944ae88ffe | ||
|
|
ace3701b4a | ||
|
|
dbb326152c | ||
|
|
03e7dda1b9 | ||
|
|
ee9e32621e | ||
|
|
3bfe8ab742 | ||
|
|
515f16b728 | ||
|
|
8b12e8c5e1 | ||
|
|
10f284c032 | ||
|
|
19d3f6c536 | ||
|
|
7d56b0cd44 | ||
|
|
69ddeed7d5 | ||
|
|
2c73ad667d | ||
|
|
b5a522dfb8 | ||
|
|
1ddd8a5632 | ||
|
|
d96049eeaf | ||
|
|
168327a4aa | ||
|
|
ef3290ce11 | ||
|
|
f04acbbdc3 | ||
|
|
cd1b9df1e3 | ||
|
|
292203e8b6 | ||
|
|
f151a3cfe6 | ||
|
|
ee57c2ed58 | ||
|
|
d521b0e630 | ||
|
|
5aac657d98 | ||
|
|
ed36ec077c | ||
|
|
bb0850a826 | ||
|
|
f16cf435c3 | ||
|
|
167d7583fe | ||
|
|
1b37538d9c | ||
|
|
334262d6cf | ||
|
|
1f4460b9fb | ||
|
|
e40e887968 | ||
|
|
6e631b99c4 | ||
|
|
5a80cb1408 | ||
|
|
014d3ea60a | ||
|
|
8caa0808ee | ||
|
|
fd4659dfd4 | ||
|
|
7112cdaa06 | ||
|
|
58056a4b12 | ||
|
|
18cda24a3b | ||
|
|
c93e88dc4b | ||
|
|
a52e4c3e23 | ||
|
|
9db151d251 | ||
|
|
6e8752e4d5 | ||
|
|
adfea7610d | ||
|
|
40568daca8 | ||
|
|
5b28adb7ed | ||
|
|
008b2cc71c | ||
|
|
58d2adec4b | ||
|
|
b5a94e0978 | ||
|
|
31ae172f4b | ||
|
|
d0b95baf8b | ||
|
|
aa1baf5b84 | ||
|
|
bc190d2af9 | ||
|
|
c0971eed35 | ||
|
|
ac8bd3f417 | ||
|
|
4854dd23d5 | ||
|
|
c5468fcaea | ||
|
|
b0b34e58f9 | ||
|
|
83d5f38dee | ||
|
|
444316b6c6 | ||
|
|
7128833392 | ||
|
|
455ee1f217 | ||
|
|
54fc9c575e | ||
|
|
3525a164c3 | ||
|
|
aeeb3c3e88 | ||
|
|
88d30845f1 | ||
|
|
89d78ba457 | ||
|
|
e72d9f4a72 | ||
|
|
4455bf8cd9 | ||
|
|
f27dc32aeb | ||
|
|
bf6f871097 | ||
|
|
cda7fe109b | ||
|
|
12300ac88e | ||
|
|
11db623ba8 | ||
|
|
3f515d3140 | ||
|
|
a57642e1a0 | ||
|
|
bf85baea70 | ||
|
|
62ef9f5eb2 | ||
|
|
e22a6cd3c9 | ||
|
|
f9b0b9cb7b | ||
|
|
5fc16ebcf6 | ||
|
|
7e2215bc92 | ||
|
|
3a016ebea8 | ||
|
|
2ba7208ff2 | ||
|
|
72399ae69a | ||
|
|
c0a58782b6 | ||
|
|
d1ff4d6297 | ||
|
|
80a6b3a41e | ||
|
|
1c27c52804 | ||
|
|
e833d6e84c | ||
|
|
8a0c541e24 | ||
|
|
e2d592dc8f | ||
|
|
2e71e02553 | ||
|
|
b29bf7b05a | ||
|
|
1785bbb7d8 | ||
|
|
a44b61d961 | ||
|
|
a062fc9bf5 | ||
|
|
50de2e4d1e | ||
|
|
132e08a05f | ||
|
|
720884501a | ||
|
|
6dc5d80b98 | ||
|
|
542390c023 | ||
|
|
fd45eac830 | ||
|
|
e70ec1d70e | ||
|
|
70d2efc353 | ||
|
|
fb77829455 | ||
|
|
4ba53e33d7 | ||
|
|
f0a2954c12 | ||
|
|
bf91ac91d4 | ||
|
|
e3151ae8df | ||
|
|
a2f0227de2 | ||
|
|
2639547a69 | ||
|
|
e86cff2acb | ||
|
|
2eb8300e80 | ||
|
|
78cda05e38 | ||
|
|
b0e70410b4 | ||
|
|
3254fa5859 | ||
|
|
01d17eaf42 | ||
|
|
0c4306b672 | ||
|
|
acd9964b7a | ||
|
|
bc33041cc2 | ||
|
|
c7203c94d9 | ||
|
|
7f1659f0d8 | ||
|
|
962925a448 | ||
|
|
4e477f1b95 | ||
|
|
a068a286e6 | ||
|
|
b19f1f91f0 | ||
|
|
f1c75fc816 | ||
|
|
1e0763ca30 | ||
|
|
5fd8ca8122 | ||
|
|
873a76a1a7 | ||
|
|
4a0d3378b1 | ||
|
|
46043e8a9e | ||
|
|
75aad219f8 | ||
|
|
fe83f965e4 | ||
|
|
3ebf97724d | ||
|
|
0d884244be | ||
|
|
1263b9c651 | ||
|
|
93c9da2c2e | ||
|
|
060eb52d32 | ||
|
|
8c8a933d75 | ||
|
|
743d8dfb6e | ||
|
|
75d42bea8d | ||
|
|
46ddfc6186 | ||
|
|
07fa17da87 | ||
|
|
61fb5d7202 | ||
|
|
89e4b4fff3 | ||
|
|
9ad3f820af | ||
|
|
5ba154a3b4 | ||
|
|
bb0eba15f5 | ||
|
|
db7158f9ba | ||
|
|
94013c25cf | ||
|
|
20de0f01a3 | ||
|
|
21753283b1 | ||
|
|
44c8bd3e63 | ||
|
|
952b2690da | ||
|
|
d6a5bf5c1c | ||
|
|
068ade0b31 | ||
|
|
183487b717 | ||
|
|
38bb211981 | ||
|
|
a0ab4842b5 | ||
|
|
9e304b92ff | ||
|
|
5e1d24657c | ||
|
|
2b6d86c672 | ||
|
|
5bcf9d9972 | ||
|
|
8068352efa | ||
|
|
bef7eecb10 | ||
|
|
1df71f8baa | ||
|
|
8ddd620de3 | ||
|
|
53cfc95284 | ||
|
|
b22fb4674b | ||
|
|
64160b2268 | ||
|
|
ab6c0bce1b | ||
|
|
2a5fc0f6de | ||
|
|
d98d4f5fa8 | ||
|
|
2632b65a56 | ||
|
|
7eb261eac8 | ||
|
|
33927cc1c9 | ||
|
|
56b49fcd72 | ||
|
|
a673c950da | ||
|
|
0f516d90cf | ||
|
|
3057100963 | ||
|
|
e7f3012cc2 | ||
|
|
af7bc11dda | ||
|
|
a00769bdfb | ||
|
|
6e07237fd0 | ||
|
|
1b2479810e | ||
|
|
fa18c6064c | ||
|
|
adc7ff601c | ||
|
|
4f7bdf9d42 | ||
|
|
60484b2ccc | ||
|
|
9a562a33ac | ||
|
|
344d820984 | ||
|
|
2fcaf414fc | ||
|
|
a300034dbd | ||
|
|
ea8896ddf6 | ||
|
|
a73bba9f11 | ||
|
|
6e12f03311 | ||
|
|
33f3438c1d | ||
|
|
758835d67a | ||
|
|
9b3d4fd580 | ||
|
|
1c369971ff | ||
|
|
fb5a72155f | ||
|
|
2ea04f7e8a | ||
|
|
833be9c64e | ||
|
|
54e3e4405e | ||
|
|
1957254b4c | ||
|
|
e050205119 | ||
|
|
3b74ec2288 | ||
|
|
5b2b500646 | ||
|
|
aacf63d354 | ||
|
|
fe639a1033 | ||
|
|
fd833ba2c1 | ||
|
|
7e7162aa1e | ||
|
|
3baa223274 | ||
|
|
8fa56f3f78 | ||
|
|
c508d620dd | ||
|
|
ec2762e6fe | ||
|
|
a40becf6e4 | ||
|
|
5a67cbafe6 | ||
|
|
f3061d6dee | ||
|
|
a61bfd9ae4 | ||
|
|
5fee2f2ed7 | ||
|
|
46a225cb6f | ||
|
|
25fef55830 | ||
|
|
d523ba9baa | ||
|
|
ad37b812d5 | ||
|
|
e97797c21b | ||
|
|
cdcc3e81d5 | ||
|
|
e18dc0f8c5 | ||
|
|
6799d572f7 | ||
|
|
802eff9491 | ||
|
|
532d1b906d | ||
|
|
ecdbfd6594 | ||
|
|
351d080ced | ||
|
|
d0e23476a6 | ||
|
|
f0fd45b66f | ||
|
|
32c4995972 | ||
|
|
d1d4cc3106 | ||
|
|
c6b30dcdcb | ||
|
|
62be3f8b9d | ||
|
|
e1828d5291 | ||
|
|
ea5e62748e | ||
|
|
b1e69f9179 | ||
|
|
5fc3f3af30 | ||
|
|
30a243e57a | ||
|
|
659ec058e3 | ||
|
|
70bbe2df84 | ||
|
|
16fa9d48f8 | ||
|
|
67954db0cf | ||
|
|
1c4c64199f | ||
|
|
0cae9d939e | ||
|
|
5eb7f8c9a4 | ||
|
|
6abc4a8c32 | ||
|
|
3ba511a8f1 | ||
|
|
5f6ba88b4b | ||
|
|
50cb464f4a | ||
|
|
6e37ee3fe2 | ||
|
|
6ca0c46132 | ||
|
|
f649b7bd23 | ||
|
|
6520447123 | ||
|
|
5c80cfcc1e | ||
|
|
5643f45615 | ||
|
|
e45b14cfc3 | ||
|
|
05fc5a885c | ||
|
|
fb65207e91 | ||
|
|
fde64716e1 | ||
|
|
2a0289b1d8 | ||
|
|
04e8743013 | ||
|
|
e66384f36e | ||
|
|
34f86ea4ba | ||
|
|
c18ebba599 | ||
|
|
4df0bb9371 | ||
|
|
9c0cd50e9e | ||
|
|
6fdd5c2f4c | ||
|
|
8536203ad8 | ||
|
|
5a7f86f625 | ||
|
|
dbb1b44b8f | ||
|
|
07578cd9e7 | ||
|
|
cec3af8bb0 | ||
|
|
dae51797d0 | ||
|
|
a81500cbd6 | ||
|
|
ddd2696b4e | ||
|
|
de06f8a300 | ||
|
|
8c3255bc78 | ||
|
|
186161e61c | ||
|
|
cc561bc122 | ||
|
|
df6f81e49c | ||
|
|
b47da63f07 | ||
|
|
70f5789ed2 | ||
|
|
99afdfbfbd | ||
|
|
55e50b88db | ||
|
|
d9849b8ca1 | ||
|
|
60bee34764 | ||
|
|
c335145f4d | ||
|
|
17223bdff7 | ||
|
|
cc1adb825a | ||
|
|
80916e968c | ||
|
|
9ce2456348 | ||
|
|
3ab5d6d4d6 | ||
|
|
35f1961385 | ||
|
|
6b0d45a5c6 | ||
|
|
4867598089 | ||
|
|
2aa2bede67 | ||
|
|
69c3617042 | ||
|
|
181fce25fe | ||
|
|
05a53bbd2d | ||
|
|
d13dea1c37 | ||
|
|
fb19072237 | ||
|
|
c4e99325d0 | ||
|
|
ddb608be41 | ||
|
|
59e2f2d80d | ||
|
|
57a865e201 | ||
|
|
dd1c95359b | ||
|
|
fc12036b55 | ||
|
|
fb8602423d | ||
|
|
fa857eb53f | ||
|
|
ecc548b556 | ||
|
|
2f3ec1f9f1 | ||
|
|
554f1b3765 | ||
|
|
934fb86c58 | ||
|
|
e145aa414e | ||
|
|
36f860f369 | ||
|
|
63603427ef | ||
|
|
bf54da2db0 | ||
|
|
592af860c5 | ||
|
|
60211eb74c | ||
|
|
3428407b4d | ||
|
|
d6fc379360 | ||
|
|
11d56696ec | ||
|
|
47cfd760cf | ||
|
|
01b1510da2 | ||
|
|
0afedcafa7 | ||
|
|
84173b94c9 | ||
|
|
8cbe2f5527 | ||
|
|
c302d35e73 | ||
|
|
2a20b15eca | ||
|
|
0d0f0ef80e | ||
|
|
3c8ba71769 | ||
|
|
ae97d8f96d | ||
|
|
14deaf67e9 | ||
|
|
d7dcff944e | ||
|
|
4aa2d05c6b | ||
|
|
d09d89f419 | ||
|
|
f94f1ed663 | ||
|
|
1137e7db3e | ||
|
|
cc14f298d6 | ||
|
|
96edcdfd20 | ||
|
|
b0b9ab80cc | ||
|
|
3b017efdfe | ||
|
|
993205272b | ||
|
|
27e3080228 | ||
|
|
92df23808d | ||
|
|
a10ed6c610 | ||
|
|
500dad96d7 | ||
|
|
aa03f9b023 | ||
|
|
a4a91a8631 | ||
|
|
06c6845442 | ||
|
|
1c26d40826 | ||
|
|
b9217ca238 | ||
|
|
870e8e38d5 | ||
|
|
2c0da4bcc7 | ||
|
|
12f1cff212 | ||
|
|
2ca807cf16 | ||
|
|
1c69fae407 | ||
|
|
67d6f7929f | ||
|
|
028f98f5ea | ||
|
|
c78fb87659 | ||
|
|
45c0f11af2 | ||
|
|
c6728b6769 | ||
|
|
fd614e5792 | ||
|
|
0792832682 | ||
|
|
a607042aab | ||
|
|
35e1b55411 | ||
|
|
66df091046 | ||
|
|
c6b33b9ec1 | ||
|
|
d939c4b8d3 | ||
|
|
68f742b0d4 | ||
|
|
e1cd5244c8 | ||
|
|
57f584a881 | ||
|
|
a05a7f9cb1 | ||
|
|
337ae83a84 | ||
|
|
42babdf2c1 | ||
|
|
c357ca73e4 | ||
|
|
d9050f49a3 | ||
|
|
a7dac5c3db | ||
|
|
53cf5d984d | ||
|
|
93214eca2e | ||
|
|
8f4c2c76ad | ||
|
|
24ef87cfc3 | ||
|
|
954700187b | ||
|
|
4a7cc176ac | ||
|
|
a6891153f0 | ||
|
|
ef852c006a | ||
|
|
1e61ecb0c1 | ||
|
|
8cccc0664b | ||
|
|
3a8e1847c5 | ||
|
|
5c3c29fd3f | ||
|
|
d11b2ce6a3 | ||
|
|
03450ff6ed | ||
|
|
571fe400df | ||
|
|
440ccbd910 | ||
|
|
6017205208 | ||
|
|
7726ece0ab | ||
|
|
7c391e9640 | ||
|
|
67312df7cf | ||
|
|
4087f37d90 | ||
|
|
85ac9fe26e | ||
|
|
40852fa52a | ||
|
|
db80a9a7c3 | ||
|
|
af32a29f03 | ||
|
|
590c245b56 | ||
|
|
a1f71cfecf | ||
|
|
26ade03b50 | ||
|
|
9200db3ec4 | ||
|
|
ac402bf222 | ||
|
|
9f37000f6b | ||
|
|
c5434c91d8 | ||
|
|
e38356b367 | ||
|
|
6e577cfca3 | ||
|
|
68b68732c6 | ||
|
|
7729b51956 | ||
|
|
c98b43187d | ||
|
|
e684c09260 | ||
|
|
1496843315 | ||
|
|
f5a5d3ad5f | ||
|
|
cf6e019480 | ||
|
|
59560e54ac | ||
|
|
476f13ea18 | ||
|
|
a12bb22724 | ||
|
|
d623a7a3f7 | ||
|
|
629f582c03 | ||
|
|
9821b8c68c | ||
|
|
1cd2cd12b4 | ||
|
|
171619a51a | ||
|
|
8d50a5fd90 | ||
|
|
00791d5d56 | ||
|
|
146c897909 | ||
|
|
b2d10b39b0 | ||
|
|
b2e7277fb1 | ||
|
|
dbc4fdad80 | ||
|
|
d893508e3a | ||
|
|
f6d1bad81b | ||
|
|
f3cd5fa001 | ||
|
|
33ac512514 | ||
|
|
6a7c2369bf | ||
|
|
4954c371d1 | ||
|
|
c35646fe13 | ||
|
|
37f75f1702 | ||
|
|
028a76e6db | ||
|
|
9c28349a87 | ||
|
|
0b1f4a016a | ||
|
|
b4914d76a2 | ||
|
|
69c30da5ad | ||
|
|
6fc1280a3c | ||
|
|
efea9238bc | ||
|
|
d16268b273 | ||
|
|
b985a67b97 | ||
|
|
c989ee7b39 | ||
|
|
366b68eda0 | ||
|
|
d693b3cb0d | ||
|
|
e284efba72 | ||
|
|
20aafcd90c | ||
|
|
429471162a | ||
|
|
2a9a602a5e | ||
|
|
6a1c0700c3 | ||
|
|
23cdd6bab6 | ||
|
|
a2e96a4c78 | ||
|
|
96e7de35af | ||
|
|
49b2220f92 | ||
|
|
db01c78de0 | ||
|
|
82388fd94a | ||
|
|
babcc1b793 | ||
|
|
06df98a513 | ||
|
|
0ea20c5b32 | ||
|
|
d42f654f7a | ||
|
|
1cc5f1d5dd | ||
|
|
2cc885d66e | ||
|
|
c09500540c | ||
|
|
9c13fefc68 | ||
|
|
a69f472ee9 | ||
|
|
fbb9046bf6 | ||
|
|
9e6b46a9e6 | ||
|
|
ece2addcff | ||
|
|
74764ac0eb | ||
|
|
675843d09a | ||
|
|
511fe43abe | ||
|
|
02e8dcfe9c | ||
|
|
68d7c5f620 | ||
|
|
242fd828aa | ||
|
|
48efe6e282 | ||
|
|
f8c65b775d | ||
|
|
d87945e9fd | ||
|
|
1644ade514 | ||
|
|
06687e95c8 | ||
|
|
9b5753ab00 | ||
|
|
ddb83a462d | ||
|
|
6cee818bf3 | ||
|
|
41dff7fce3 | ||
|
|
86a804f9a7 | ||
|
|
cbdf0df97b | ||
|
|
f60ef170b0 | ||
|
|
cc0733fd12 | ||
|
|
082575fbc3 | ||
|
|
2c2ebdc5c5 | ||
|
|
3708898bf2 | ||
|
|
1ec2fd7ea1 | ||
|
|
61b561867b | ||
|
|
4ac9009dfe | ||
|
|
e8486b0d6c | ||
|
|
0cbd4b56d3 | ||
|
|
739e14d879 | ||
|
|
1d05ad7576 | ||
|
|
2fee1242f4 | ||
|
|
539d968ad7 | ||
|
|
4b4a5ee9d1 | ||
|
|
f0e82b7d63 | ||
|
|
c02d8cc7a9 | ||
|
|
a94829cc53 | ||
|
|
7d701d23e3 | ||
|
|
981b5cb012 | ||
|
|
ca9450846f | ||
|
|
1a4cdd35b9 | ||
|
|
4164096c0d | ||
|
|
47588796b4 | ||
|
|
fad7cc482d | ||
|
|
4917d3c7e8 | ||
|
|
dd66f4484d | ||
|
|
cf57db7e1f | ||
|
|
d9a17baf2f | ||
|
|
ff75ec7f7d | ||
|
|
1362396c57 | ||
|
|
2338fe5db5 | ||
|
|
2ccefaccfe | ||
|
|
8b0110800e | ||
|
|
ea89b06c41 | ||
|
|
12b2dc68b9 | ||
|
|
213f5407e2 | ||
|
|
f4e2dc747e | ||
|
|
6764ac6d01 | ||
|
|
a55e7e1f67 | ||
|
|
44c6b33642 | ||
|
|
4f51b8c47e | ||
|
|
800a951427 | ||
|
|
91700099ba | ||
|
|
c3633dc9bd | ||
|
|
eeaefb6054 | ||
|
|
c3e0f887ab | ||
|
|
1ad5a6c1be | ||
|
|
6a14accb41 | ||
|
|
84bec4d116 | ||
|
|
9aca271fbb | ||
|
|
ec011ddfdb | ||
|
|
69cc3550c8 | ||
|
|
8fa2ef886e | ||
|
|
f84cc5e6f1 | ||
|
|
512d4671bc | ||
|
|
741bd73e8e | ||
|
|
031ec8753a | ||
|
|
19fe63f821 | ||
|
|
6a9815718a | ||
|
|
7ae8d165a6 | ||
|
|
06b386ac6c | ||
|
|
1b3a351d6d | ||
|
|
1564a6d863 | ||
|
|
8566f9b061 | ||
|
|
bc39b2101e | ||
|
|
98104d89d9 | ||
|
|
781da98328 | ||
|
|
88f6e15932 | ||
|
|
79c79f1be5 | ||
|
|
feccdcb876 | ||
|
|
95f5a25b5d | ||
|
|
b393038372 | ||
|
|
e94d93ad78 | ||
|
|
21830ef0c1 | ||
|
|
e7d4215894 | ||
|
|
97887fe996 | ||
|
|
da06f4f01b | ||
|
|
6a0bd27159 | ||
|
|
8e61fc214a | ||
|
|
a77f6b0a58 | ||
|
|
8c963ea445 | ||
|
|
e8b2a9884b | ||
|
|
a924f03118 | ||
|
|
38c131a270 | ||
|
|
ddcb299283 | ||
|
|
4c0671c518 | ||
|
|
84b6ff05df | ||
|
|
458b7913fc | ||
|
|
a5506d82e4 | ||
|
|
22d1f7f619 | ||
|
|
ef38c532f3 | ||
|
|
22d495fd60 | ||
|
|
a9cdff830d | ||
|
|
a56e777fe4 | ||
|
|
489dbb0e02 | ||
|
|
793b3772ec | ||
|
|
d2ef04e4b0 | ||
|
|
224195bd22 | ||
|
|
3280a85376 | ||
|
|
3c96019556 | ||
|
|
f742f910aa | ||
|
|
4a083a45c2 | ||
|
|
cf682cf1c2 | ||
|
|
ea18d47a2b | ||
|
|
db6341a36c | ||
|
|
f00bafbde1 | ||
|
|
b6ae33fbe6 | ||
|
|
603c47cb50 | ||
|
|
35a83b92ca | ||
|
|
ad98dc69ff | ||
|
|
418fcd5826 | ||
|
|
031dc506df | ||
|
|
ee062d3e85 | ||
|
|
a7d3945ab4 | ||
|
|
f584f94f3d | ||
|
|
b477190a33 | ||
|
|
a6ebb19b5b | ||
|
|
53210459f6 | ||
|
|
e7e64d59be | ||
|
|
c2ec5a225a | ||
|
|
46d0d277ef | ||
|
|
2c0a85753e | ||
|
|
e04c62bb3c | ||
|
|
0c0bc0e6c6 | ||
|
|
a1176686f1 | ||
|
|
cd7eedd4a5 | ||
|
|
925cef7601 | ||
|
|
a2312a0bf3 | ||
|
|
9a415429a9 | ||
|
|
8e3540bb01 | ||
|
|
c3e98fb528 | ||
|
|
aa53f64b85 | ||
|
|
3b741b35ad | ||
|
|
c93cbc943a | ||
|
|
519d0771c7 | ||
|
|
6b86057d79 | ||
|
|
cb5810848d | ||
|
|
7ab95f642d | ||
|
|
33bb16c8b4 | ||
|
|
d879f4b84e | ||
|
|
e305db89c2 | ||
|
|
c6e734ccc5 | ||
|
|
76f5ada659 | ||
|
|
1585462c63 | ||
|
|
ee2346270d | ||
|
|
5c0a38d7e4 | ||
|
|
40fb1f3f00 | ||
|
|
03f029c2a1 | ||
|
|
998f472183 | ||
|
|
83b730ea82 | ||
|
|
7c5b1287d5 | ||
|
|
e2d70d6a0b | ||
|
|
e266dfb63e | ||
|
|
b6b197e0ad | ||
|
|
ba64dfb959 | ||
|
|
27bf3a208d | ||
|
|
8207018b75 | ||
|
|
ce467e7e36 | ||
|
|
c4a84a93d4 | ||
|
|
643bc6e3ed | ||
|
|
7e85b5d66a | ||
|
|
8c91ba83e2 | ||
|
|
429ece1037 | ||
|
|
896b4ff181 | ||
|
|
cb4c96ba60 | ||
|
|
578f9fc99e | ||
|
|
46517cfb47 | ||
|
|
75dbb28e2f | ||
|
|
1fa35632f2 | ||
|
|
496bf0ec11 | ||
|
|
9db4c5714e | ||
|
|
8bd2b3516b | ||
|
|
bc14da99c5 | ||
|
|
351d36fd18 | ||
|
|
c87ee1c65a | ||
|
|
0ece0d074b | ||
|
|
de611b2de3 | ||
|
|
47755f0910 | ||
|
|
6aebd1b98a | ||
|
|
af641b2d26 | ||
|
|
8a00ebe607 | ||
|
|
6b4df91bd2 | ||
|
|
757ff9962f | ||
|
|
0c2281fb06 | ||
|
|
fcf0863613 | ||
|
|
534192fa05 | ||
|
|
64b50fa2a6 | ||
|
|
51b1d7d81f | ||
|
|
f9a19da7bf | ||
|
|
ed446be54b | ||
|
|
8a1d7c290f | ||
|
|
7f9fb10a74 | ||
|
|
2f9f143d73 | ||
|
|
356d7a0637 | ||
|
|
def266fc62 | ||
|
|
5c715f3945 | ||
|
|
d0b1259bbe | ||
|
|
8f0d3f7541 | ||
|
|
ca27066d09 | ||
|
|
2f93805cef | ||
|
|
d4edded3ea | ||
|
|
f8f120e93b | ||
|
|
bf8daada40 | ||
|
|
996a90bf48 | ||
|
|
d8216decae | ||
|
|
1d93ad95c1 | ||
|
|
bb012c4070 | ||
|
|
ba14abbca7 | ||
|
|
972dc3e3f9 | ||
|
|
9165099103 | ||
|
|
36a9516acc | ||
|
|
687b7b73f7 | ||
|
|
ef92c5ac5f | ||
|
|
5d467d014b | ||
|
|
f8e31274f4 | ||
|
|
e700075082 | ||
|
|
d2f4021928 | ||
|
|
c4fced7348 | ||
|
|
08fae3caba | ||
|
|
ffd96b38fb | ||
|
|
0f834639e4 | ||
|
|
5004a54ed3 | ||
|
|
8373707722 | ||
|
|
378f30f95d | ||
|
|
d698cb3d2b | ||
|
|
09024e50d4 | ||
|
|
daff33213e | ||
|
|
4c3c9b0d41 | ||
|
|
0ed969c530 | ||
|
|
4df078eec5 | ||
|
|
cfec2bbc35 | ||
|
|
18f6367c46 | ||
|
|
df9359871e | ||
|
|
2fcc4eb030 | ||
|
|
27ed6ac804 | ||
|
|
c78d81fa3e | ||
|
|
bcca43b391 | ||
|
|
5c4e11807d | ||
|
|
6ddfab14f6 | ||
|
|
039625a3ce | ||
|
|
ce6b93eb0a | ||
|
|
145b51c270 | ||
|
|
bb51ece0b4 | ||
|
|
fb01860db6 | ||
|
|
2f4f48f767 | ||
|
|
0ff9c449b7 | ||
|
|
bde489c632 | ||
|
|
324d987bc6 | ||
|
|
e40d00393d | ||
|
|
964ae01287 | ||
|
|
fdf2494cfa | ||
|
|
d00607bc7b | ||
|
|
7f1d7b8bda | ||
|
|
760681b052 | ||
|
|
bcd7b2de1d | ||
|
|
fd0ad84431 | ||
|
|
41b73e168c | ||
|
|
a2653534db | ||
|
|
f84e342ff3 | ||
|
|
2b1c88c014 | ||
|
|
0ab754c698 | ||
|
|
4c94e29f1b | ||
|
|
58defc0652 | ||
|
|
88714e3a60 | ||
|
|
3c63623824 | ||
|
|
1d92248672 | ||
|
|
c6390afb59 | ||
|
|
f2896eb08a | ||
|
|
f8c5dacab5 | ||
|
|
49cb962d82 | ||
|
|
04d2b0018e | ||
|
|
3568e4afcc | ||
|
|
7b72f1c330 | ||
|
|
6452109691 | ||
|
|
c9ca4ec1bd | ||
|
|
0656b2c1ad | ||
|
|
21026345ae | ||
|
|
781e3bc540 | ||
|
|
10a2f1b1aa | ||
|
|
c4084e1c1a | ||
|
|
f3f5dfd3df | ||
|
|
169b9f804b | ||
|
|
3d8b1cb7ea | ||
|
|
8fef3928ba | ||
|
|
db540f07f0 | ||
|
|
80a8f97b9c | ||
|
|
1f07e287da | ||
|
|
ed751ece83 | ||
|
|
0dcd795b4c | ||
|
|
6408d72268 | ||
|
|
44ed1c6ce1 | ||
|
|
bfa9dfe827 | ||
|
|
19fe854945 | ||
|
|
c2377b2e49 | ||
|
|
74004631ca | ||
|
|
07943dcc5d | ||
|
|
2c62543901 | ||
|
|
18f9eb6b55 | ||
|
|
0dc37c7260 | ||
|
|
f109c77463 | ||
|
|
601dc8486f | ||
|
|
d182a0e3aa | ||
|
|
5261491807 | ||
|
|
24872f608c | ||
|
|
94b32884f9 | ||
|
|
8705aaff32 | ||
|
|
219142571c | ||
|
|
3a205cc343 | ||
|
|
7a340d32a1 | ||
|
|
ee4d06713e | ||
|
|
6be1f5ce0e | ||
|
|
8a9922df92 | ||
|
|
795b1923ec | ||
|
|
28e2860afb | ||
|
|
4547749a2f | ||
|
|
484a9ad67e | ||
|
|
1013277382 | ||
|
|
504cb5e7a2 | ||
|
|
cfdf2eaf7a | ||
|
|
a1bfa7609f | ||
|
|
a3a8231e64 | ||
|
|
d0ac97e33f | ||
|
|
cb574a7d60 | ||
|
|
ce35c1c3a5 | ||
|
|
b0adc415a0 | ||
|
|
946779f5b6 | ||
|
|
9a753aa409 | ||
|
|
2a6d6c52d7 | ||
|
|
8659fb33f9 | ||
|
|
137e614f23 | ||
|
|
4bdcd08344 | ||
|
|
b6fc7138bf | ||
|
|
1a6f6a27b3 | ||
|
|
f86e200d13 | ||
|
|
c8abbce0a2 | ||
|
|
3d63fa4dad | ||
|
|
0853901c0d | ||
|
|
369cad8272 | ||
|
|
9ace6974f2 | ||
|
|
884ee1d129 | ||
|
|
a7f3c648eb | ||
|
|
087e7a3a1a | ||
|
|
97a70e6013 | ||
|
|
90d1a31dd4 | ||
|
|
70733e4ae5 | ||
|
|
a432f684f7 | ||
|
|
2f9814894f | ||
|
|
c796788c33 | ||
|
|
cd2eafc8e3 | ||
|
|
5a03a14bfb | ||
|
|
dbd8007298 | ||
|
|
32a206ea17 | ||
|
|
a0c4707dcc | ||
|
|
c7275a75ce | ||
|
|
023add5df0 | ||
|
|
4e46a490ae | ||
|
|
54e0c67332 | ||
|
|
4bc186cf34 | ||
|
|
1da4ce5a03 | ||
|
|
20d8f91819 | ||
|
|
3792ed8ceb | ||
|
|
b1e0b46970 | ||
|
|
034bf318b8 | ||
|
|
bd4e3adfd9 | ||
|
|
230cb9b734 | ||
|
|
456cd431ff | ||
|
|
1511db33b3 | ||
|
|
bb9d573cf9 | ||
|
|
55eae6652f | ||
|
|
c2dc86575a | ||
|
|
094554cf89 | ||
|
|
b8f3d839cc | ||
|
|
3c494fdd7a | ||
|
|
6de1b753c2 | ||
|
|
45dd5ee97d | ||
|
|
82a8fa443e | ||
|
|
e89a24b8cb | ||
|
|
dc999c55d0 | ||
|
|
2d00e8c6f7 | ||
|
|
9f7bdecc04 | ||
|
|
cea5932aad | ||
|
|
4d262138e1 | ||
|
|
cbd120ea91 | ||
|
|
71c7759fac | ||
|
|
9435055597 | ||
|
|
5d77983efc | ||
|
|
1fd28a0d4c | ||
|
|
46c74bba1d | ||
|
|
3e176f8293 | ||
|
|
8cbc02a4c2 | ||
|
|
222b57e35b | ||
|
|
6156eb9557 | ||
|
|
682158920d | ||
|
|
7afd3334e3 | ||
|
|
d0d858c809 | ||
|
|
049e10c079 | ||
|
|
494945f710 | ||
|
|
e9152c326e | ||
|
|
a43ce966aa | ||
|
|
18c6a7d6d1 | ||
|
|
1dc40ba165 | ||
|
|
7100f22932 | ||
|
|
7310a508e1 | ||
|
|
f3c9aee686 | ||
|
|
107fe8422f | ||
|
|
ec8015bc03 | ||
|
|
78b9d23d09 | ||
|
|
1dd265aef3 | ||
|
|
698e2a5487 | ||
|
|
4d862616ce | ||
|
|
56b1ead679 | ||
|
|
b1be00db57 | ||
|
|
c3a9325bd8 | ||
|
|
f99af1c014 | ||
|
|
145a3c1ed9 | ||
|
|
14719432ef | ||
|
|
c8ee1f4ef3 | ||
|
|
ac1c081de8 | ||
|
|
e6174fc6cf | ||
|
|
1010b1f743 | ||
|
|
8e0e34a3e6 | ||
|
|
f65e6c40fa | ||
|
|
fb2a3bf840 | ||
|
|
d99e994194 | ||
|
|
836cbe0784 | ||
|
|
9de6863c74 | ||
|
|
dff118f7ad | ||
|
|
c6dbd5a9bf | ||
|
|
32878b75b8 | ||
|
|
b1fd75309c | ||
|
|
90e780524b | ||
|
|
1110f7be49 | ||
|
|
d73e5cefb0 | ||
|
|
38ed96450e | ||
|
|
ff28c8d403 | ||
|
|
0c33f016fa | ||
|
|
7174a81563 | ||
|
|
81a21bfa1e | ||
|
|
8d18e347a7 | ||
|
|
92df38732c | ||
|
|
be09fb3e99 | ||
|
|
7d74882aaf | ||
|
|
9532bda6e4 | ||
|
|
57514f31db | ||
|
|
7ae9b0db35 | ||
|
|
e6ea8cbae0 | ||
|
|
cd727934bf | ||
|
|
35fd0a93b1 | ||
|
|
378f1f95d7 | ||
|
|
eed314143b | ||
|
|
253d46ac97 | ||
|
|
c5f58f3ee7 | ||
|
|
94b10dbb8f | ||
|
|
85aefe5fa4 | ||
|
|
582f792089 | ||
|
|
8d468d67de | ||
|
|
c06dcf8fa2 | ||
|
|
cb3b3a8cb5 | ||
|
|
92ffb08081 | ||
|
|
1020df5700 | ||
|
|
fd16298746 | ||
|
|
2ac22e8935 | ||
|
|
27520b94c4 | ||
|
|
3ee4739b13 | ||
|
|
ab9b1a72db | ||
|
|
6df1fc5e38 | ||
|
|
754f502a84 | ||
|
|
c0bd7b0b2b | ||
|
|
33bb87523e | ||
|
|
7eb9dfcc60 | ||
|
|
20567b5888 | ||
|
|
4dcb3202d8 | ||
|
|
b15854c9af | ||
|
|
b168eee469 | ||
|
|
7589c051a9 | ||
|
|
f5e028fd83 | ||
|
|
c4b7211148 | ||
|
|
6aaf0483f0 | ||
|
|
c5d599ecb2 | ||
|
|
6ed5c82bb9 | ||
|
|
82ba92f462 | ||
|
|
7091b81414 | ||
|
|
6d232b4ec8 | ||
|
|
b59d204bbe | ||
|
|
0a05e95b52 | ||
|
|
fd36bec497 | ||
|
|
a04d948b04 | ||
|
|
b03b9fe641 | ||
|
|
35b050652a | ||
|
|
31872f129b | ||
|
|
5416d4f3b5 | ||
|
|
cf8c79ca35 | ||
|
|
4e8634c29c | ||
|
|
512c07d9a3 | ||
|
|
85ea24bd20 | ||
|
|
6a45994b42 | ||
|
|
992a0cf8f2 | ||
|
|
847f4ef293 | ||
|
|
483c94b974 | ||
|
|
74559947b1 | ||
|
|
ffe94681e4 | ||
|
|
2a2910e693 | ||
|
|
06e99f1523 | ||
|
|
98ad63b240 | ||
|
|
b54c2aab11 | ||
|
|
22f779c5e6 | ||
|
|
aafb457527 | ||
|
|
0b0bc35050 | ||
|
|
eb4226ede4 | ||
|
|
7fda765d49 | ||
|
|
0924fb6b26 | ||
|
|
17d4671d60 | ||
|
|
f3ff4c84ba | ||
|
|
726cfc8441 | ||
|
|
68dedeaa57 | ||
|
|
dc80b016b6 | ||
|
|
0d960df08a | ||
|
|
10b43b815a | ||
|
|
f94e06a382 | ||
|
|
577fc451dd | ||
|
|
d4eb1e36af | ||
|
|
54c857ce0a | ||
|
|
3f2a7abc7b | ||
|
|
e3a9a7a91c | ||
|
|
4d8912d269 | ||
|
|
af27146b64 | ||
|
|
1737b806ff | ||
|
|
80b43ca9d3 | ||
|
|
acacf9bbd5 | ||
|
|
9a7b7cb035 | ||
|
|
fdb0d486b6 | ||
|
|
241fca876b | ||
|
|
aae4713a4d | ||
|
|
d3285a0ea2 | ||
|
|
bf30ea69d6 | ||
|
|
988704e971 | ||
|
|
409b71a3d0 | ||
|
|
1870040fac | ||
|
|
1de54f1d3b | ||
|
|
4b301bd34e | ||
|
|
ab40235d88 | ||
|
|
e3bfd25a41 | ||
|
|
f13bd452d7 | ||
|
|
f76124122e | ||
|
|
c87f3021d4 | ||
|
|
54f3e52e8f | ||
|
|
ff6db59d5a | ||
|
|
77f914a4ba | ||
|
|
c213a7c7c9 | ||
|
|
854109fe92 | ||
|
|
0ed3207eb5 | ||
|
|
df843a2867 | ||
|
|
82e3812a35 | ||
|
|
1c8d72e0c2 | ||
|
|
5c0e2f93f1 | ||
|
|
b6989db81e | ||
|
|
813b5235a1 | ||
|
|
bc1ee1620f | ||
|
|
a6c99e6b5b | ||
|
|
d7bd6e39e3 | ||
|
|
4ffed2603e | ||
|
|
beac9d5621 | ||
|
|
1364a74a5d | ||
|
|
b16110e60e | ||
|
|
efc3797e30 | ||
|
|
935dd4041f | ||
|
|
5e2fa08dae | ||
|
|
1a4385d516 | ||
|
|
865b4bd832 | ||
|
|
1f7bae7ab8 | ||
|
|
9a0139eee2 | ||
|
|
736f65db3e | ||
|
|
2e550bba7f | ||
|
|
7d5b76a81b | ||
|
|
fb10307267 | ||
|
|
f0c568a42f | ||
|
|
1220ae5bfd | ||
|
|
04e7c3cfe7 | ||
|
|
7b571208e1 | ||
|
|
d96ee1a48c | ||
|
|
f46875dae9 | ||
|
|
253ef425f9 | ||
|
|
2d75893188 | ||
|
|
46110c361b | ||
|
|
8db7d74c10 | ||
|
|
abe6b7b085 | ||
|
|
4575212a76 | ||
|
|
85c65cd6bf | ||
|
|
65efaf83e9 | ||
|
|
762078e765 | ||
|
|
ea5ae18876 | ||
|
|
fb7d0f72e5 | ||
|
|
e23b3ef342 | ||
|
|
bf20a55f26 | ||
|
|
badb2863db | ||
|
|
d6287621f6 | ||
|
|
6ac5978b44 | ||
|
|
59913e8e05 | ||
|
|
269b8b9bae | ||
|
|
b6e9407b12 | ||
|
|
c1e2c53b95 | ||
|
|
d91d2d2873 | ||
|
|
1c789715a7 | ||
|
|
7507a5a9b1 | ||
|
|
ce8418a2ed | ||
|
|
2391771a3f | ||
|
|
04f2f600bc | ||
|
|
8e516b1d36 | ||
|
|
c860191a1a | ||
|
|
c8b8c0415e | ||
|
|
e0f78380e3 | ||
|
|
a41d1bd815 | ||
|
|
7d1f309c39 | ||
|
|
bc72944cba | ||
|
|
2c3dafc162 | ||
|
|
3f8927dbc5 | ||
|
|
345bcba3ea | ||
|
|
e63596efd3 | ||
|
|
0d97ea8f14 | ||
|
|
cdfffe8ff7 | ||
|
|
f813ad0ce2 | ||
|
|
bc196131af | ||
|
|
ffaa8aa197 | ||
|
|
f79717f3c3 | ||
|
|
64ebf10c1d | ||
|
|
72e430fc65 | ||
|
|
2da5fbf0d4 | ||
|
|
9bc8c56ef0 | ||
|
|
e3d2d4afc4 | ||
|
|
735ec7d414 | ||
|
|
424a9cfa1c | ||
|
|
da5edaf1b4 | ||
|
|
4a270064ab | ||
|
|
e97bdbdfac | ||
|
|
526dac0f91 | ||
|
|
0f2fa46e3c | ||
|
|
2748e6c5c1 | ||
|
|
74afd3c373 | ||
|
|
b1e2c9b8bd | ||
|
|
d81cc671c0 | ||
|
|
3630a78d1a | ||
|
|
b185050563 | ||
|
|
5dc2db0028 | ||
|
|
28a5b954e7 | ||
|
|
5cdfcab4d4 | ||
|
|
11c1460003 | ||
|
|
14192f8248 | ||
|
|
d0afe926eb | ||
|
|
0907cd5a41 | ||
|
|
a1fc3a5e79 | ||
|
|
55eb02cb0a | ||
|
|
4ce302bdb9 | ||
|
|
1a91a0716f | ||
|
|
319881ca28 | ||
|
|
a89f5ee6fd | ||
|
|
31e2458574 | ||
|
|
fe8c48ed50 | ||
|
|
f49e963057 | ||
|
|
4290eabf33 | ||
|
|
7073967e9a | ||
|
|
bf60ce880e | ||
|
|
4ff6c0d0c9 | ||
|
|
6d01157d11 | ||
|
|
10479a214a | ||
|
|
122c9b7f24 | ||
|
|
b9e933968b | ||
|
|
62f25fda9b | ||
|
|
d5029a8680 | ||
|
|
dbecf76db8 | ||
|
|
47fa956a52 | ||
|
|
8200c137dc | ||
|
|
228546a1e5 | ||
|
|
bb6fa8ee6d | ||
|
|
2d6f6077bf | ||
|
|
8ea621e2e3 | ||
|
|
4e39b768c7 | ||
|
|
5e2245cc23 | ||
|
|
67ddca1d9c | ||
|
|
206e34cbb4 | ||
|
|
c4dc1b0438 | ||
|
|
2841489ddf | ||
|
|
a2931d32c7 | ||
|
|
bcf29b3c36 | ||
|
|
c68c97e2bd | ||
|
|
18312707fe | ||
|
|
476bf863f2 | ||
|
|
5419fcd735 | ||
|
|
dffce9945c | ||
|
|
6645c550ef | ||
|
|
7753dec413 | ||
|
|
34955967b7 | ||
|
|
147734591c | ||
|
|
f2e557e77f | ||
|
|
5d00d5b4e8 | ||
|
|
d9f39334ae | ||
|
|
38363a1043 | ||
|
|
80b408e704 | ||
|
|
481f3ce214 | ||
|
|
2f4ca98eb6 | ||
|
|
23a1a65b43 | ||
|
|
3b90db98f9 | ||
|
|
41464b1396 | ||
|
|
367ad73efb | ||
|
|
8d26f6175d | ||
|
|
31c2eca167 | ||
|
|
427c79f43f | ||
|
|
c36848d44b | ||
|
|
590fdcd891 | ||
|
|
e67ba57e65 | ||
|
|
bba73b6d4e | ||
|
|
cc0eb452c7 | ||
|
|
a8a62974b3 | ||
|
|
c7c948adb9 | ||
|
|
d265935d24 | ||
|
|
47eb668155 | ||
|
|
1154bf6df9 | ||
|
|
4666a0eed0 | ||
|
|
b2dfa6e690 | ||
|
|
def00916ce | ||
|
|
9ed519fa12 | ||
|
|
55458a1ab1 | ||
|
|
1dc1029baf | ||
|
|
5166e750e9 | ||
|
|
875a8da7e3 | ||
|
|
ade519b950 | ||
|
|
7795860c11 | ||
|
|
b4a8cfa05c | ||
|
|
7da583bcb3 | ||
|
|
c9b06117cb | ||
|
|
4b4f9e8e6a | ||
|
|
2c94a7f61f | ||
|
|
5f592775cc | ||
|
|
ef3b75cbdb | ||
|
|
090a7013dd | ||
|
|
4c4114f2d8 | ||
|
|
7be1a410c2 | ||
|
|
c1cee53da5 | ||
|
|
d88fa5cef6 | ||
|
|
f6894d6610 | ||
|
|
3d5127d682 | ||
|
|
4076e23ec5 | ||
|
|
0da3e94867 | ||
|
|
3325da8a15 | ||
|
|
1ca9d4c096 | ||
|
|
121fd331cd | ||
|
|
50d46f662f | ||
|
|
89dad149ed | ||
|
|
245c02cf7d | ||
|
|
e5e9aad174 | ||
|
|
698eab0c5f | ||
|
|
7bdf90af87 | ||
|
|
cb51c857fc | ||
|
|
a184d50a26 | ||
|
|
c96f84308a | ||
|
|
09b4a82c83 | ||
|
|
06765f9340 | ||
|
|
814840b1de | ||
|
|
6567bffcb9 | ||
|
|
0579b4d27d | ||
|
|
0b64878cfe | ||
|
|
b104d6e035 | ||
|
|
b3b99014eb | ||
|
|
1fb7c81099 | ||
|
|
57ef26cffe | ||
|
|
be681d444e | ||
|
|
4ab5cbcc8a | ||
|
|
1b20fa78be | ||
|
|
7fc41c62be | ||
|
|
5701037850 | ||
|
|
a2cdfc8031 | ||
|
|
52274f1815 | ||
|
|
8cea21575c | ||
|
|
ea3706d88f | ||
|
|
77f767cb34 | ||
|
|
0539269665 | ||
|
|
75af0bf309 | ||
|
|
d4f40db368 | ||
|
|
c5f77d0bfd | ||
|
|
e6083671c7 | ||
|
|
4d8719bb73 | ||
|
|
3be0e89804 | ||
|
|
3db0a253ed | ||
|
|
d82e34bb43 | ||
|
|
67246cd645 | ||
|
|
028706c3a2 | ||
|
|
4ad13dff42 | ||
|
|
e4b0068a9d | ||
|
|
506e641669 | ||
|
|
18cd03ab62 | ||
|
|
d677f16692 | ||
|
|
88951d6193 | ||
|
|
3cd41279f2 | ||
|
|
c4f579a4db | ||
|
|
30bb37a5c7 | ||
|
|
cc2dfaf5d8 | ||
|
|
3e6d23928b | ||
|
|
7dbaff09b6 | ||
|
|
7d9c81f55c | ||
|
|
212f4484d3 | ||
|
|
556aed46b0 | ||
|
|
c68937100c | ||
|
|
1f3ce75844 | ||
|
|
9701a93bf1 | ||
|
|
8c43427531 | ||
|
|
1a772a2b9e | ||
|
|
694fa2d961 | ||
|
|
789059e604 | ||
|
|
0e9b8f4a47 | ||
|
|
3901848a2f | ||
|
|
dd4e841441 | ||
|
|
6a43a6178d | ||
|
|
9782622366 | ||
|
|
32eb1c4c34 | ||
|
|
ebb41156ee | ||
|
|
231537b08e | ||
|
|
48df01325b | ||
|
|
5c0bd22d01 | ||
|
|
2dbd7e8dc1 | ||
|
|
6ef533485e | ||
|
|
7b7e0e12b7 | ||
|
|
68c6069f0b | ||
|
|
b4d142e980 | ||
|
|
4c00c68d14 | ||
|
|
09fb3ec514 | ||
|
|
8a821a9c35 | ||
|
|
b8f7e0ff0f | ||
|
|
1548b771cb | ||
|
|
a0b54fc7ab | ||
|
|
aa7e2fe91b | ||
|
|
abf5854149 | ||
|
|
23cf700e38 | ||
|
|
c3d6602e8a | ||
|
|
831c686d9b | ||
|
|
5bd2832b32 | ||
|
|
ab8d9eccc9 | ||
|
|
82000c26c8 | ||
|
|
b1d719eeeb | ||
|
|
da1c00f5c8 | ||
|
|
80e2c4fe4a | ||
|
|
37bac22443 | ||
|
|
57f03d3bd0 | ||
|
|
6c8f96d781 | ||
|
|
3dfaa88a1d | ||
|
|
1fd45d7407 | ||
|
|
6f422a4303 | ||
|
|
f80d660e33 | ||
|
|
a542021200 | ||
|
|
b42ed69542 | ||
|
|
8229390c1d | ||
|
|
39b361ed69 | ||
|
|
688df282cd | ||
|
|
e9bfa56061 | ||
|
|
1f2bd10757 | ||
|
|
1c004b9275 | ||
|
|
f5d645fc73 | ||
|
|
3ffe4b7421 | ||
|
|
217483dfd6 | ||
|
|
b1aa99c6de | ||
|
|
7e207743ab | ||
|
|
aac5eb2aea | ||
|
|
09677dada5 | ||
|
|
fe756603d9 | ||
|
|
80a22f43d8 | ||
|
|
c039c3b7a4 | ||
|
|
2df51e04f7 | ||
|
|
abddb8bf00 | ||
|
|
e997bdf637 | ||
|
|
ab4e7216d3 | ||
|
|
1fb8d3b712 | ||
|
|
c3c86d82ae | ||
|
|
ab457bef49 | ||
|
|
d6daa45d0b | ||
|
|
0bb8872e19 | ||
|
|
c562bfbd92 | ||
|
|
8efabe9ec9 | ||
|
|
e05e9e6b5d | ||
|
|
05f620754e | ||
|
|
85c7ad0ecd | ||
|
|
570a0d7142 | ||
|
|
82fb31ed5e | ||
|
|
cd22abcda8 | ||
|
|
bdfd48264f | ||
|
|
eecc25f914 | ||
|
|
45b9e967ef | ||
|
|
129713f1a0 | ||
|
|
45e00a8e6a | ||
|
|
7f19f9198a | ||
|
|
9c015b7e83 | ||
|
|
4bc0c2b0e7 | ||
|
|
9e342a9b83 | ||
|
|
c6f02973ac | ||
|
|
88d9e95da6 | ||
|
|
3c5b7fba82 | ||
|
|
a6e6d4b72b | ||
|
|
06fb4821bc | ||
|
|
407ef0ac11 | ||
|
|
2b27c68c84 | ||
|
|
bd54c20d3e | ||
|
|
0e8b30af75 | ||
|
|
1f5a571dfc | ||
|
|
d193e5f876 | ||
|
|
5d49d011a2 | ||
|
|
038c4ebdf7 | ||
|
|
360e77a083 | ||
|
|
9c29fe283d | ||
|
|
574b5dc4e9 | ||
|
|
517d9fad41 | ||
|
|
d6fbafb242 | ||
|
|
1943ed432b | ||
|
|
00eee2b7ee | ||
|
|
ce75b488a5 | ||
|
|
d49b8673bb | ||
|
|
f92a68048c | ||
|
|
591359cae6 | ||
|
|
9494a25e76 | ||
|
|
443d18adb7 | ||
|
|
d998a87469 | ||
|
|
f57bf21879 | ||
|
|
ca2b74f2f8 | ||
|
|
7b0f432c26 | ||
|
|
f03827d513 | ||
|
|
2da457060b | ||
|
|
95cef22589 |
5
.eslintignore
Normal file
5
.eslintignore
Normal file
@@ -0,0 +1,5 @@
|
||||
**/webpack.config.js
|
||||
lib/**
|
||||
runner/dist/**
|
||||
src/testdata/**
|
||||
tests/**
|
||||
59
.eslintrc.json
Normal file
59
.eslintrc.json
Normal file
@@ -0,0 +1,59 @@
|
||||
|
||||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"plugins": ["@typescript-eslint", "filenames", "github", "import", "no-async-foreach"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||
"plugin:github/recommended",
|
||||
"plugin:github/typescript",
|
||||
"plugin:import/typescript"
|
||||
],
|
||||
"rules": {
|
||||
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
||||
"i18n-text/no-en": "off",
|
||||
"import/extensions": "error",
|
||||
"import/no-amd": "error",
|
||||
"import/no-commonjs": "error",
|
||||
"import/no-dynamic-require": "error",
|
||||
// Disable the rule that checks that devDependencies aren't imported since we use a single
|
||||
// linting configuration file for both source and test code.
|
||||
"import/no-extraneous-dependencies": ["error", {"devDependencies": true}],
|
||||
"import/no-namespace": "off",
|
||||
"import/no-unresolved": "error",
|
||||
"import/no-webpack-loader-syntax": "error",
|
||||
"import/order": ["error", {
|
||||
"alphabetize": {"order": "asc"},
|
||||
"newlines-between": "always"
|
||||
}],
|
||||
"no-async-foreach/no-async-foreach": "error",
|
||||
"no-console": "off",
|
||||
"no-sequences": "error",
|
||||
"no-shadow": "off",
|
||||
"@typescript-eslint/no-shadow": ["error"],
|
||||
"one-var": ["error", "never"]
|
||||
},
|
||||
"overrides": [{
|
||||
// "temporarily downgraded during transition to eslint
|
||||
"files": "**",
|
||||
"rules": {
|
||||
"@typescript-eslint/ban-types": "off",
|
||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off",
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
"@typescript-eslint/restrict-template-expressions": "off",
|
||||
"func-style": "off",
|
||||
"sort-imports": "off"
|
||||
}
|
||||
}]
|
||||
}
|
||||
8
.gitattributes
vendored
8
.gitattributes
vendored
@@ -1 +1,9 @@
|
||||
lib/*.js linguist-generated=true
|
||||
.github/workflows/__* linguist-generated=true
|
||||
|
||||
# Reduce incidence of needless merge conflicts on CHANGELOG.md
|
||||
# The man page at
|
||||
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/gitattributes.html
|
||||
# suggests that this might interleave lines arbitrarily, but empirically
|
||||
# it keeps added chunks contiguous
|
||||
CHANGELOG.md merge=union
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,5 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Contact GitHub Support
|
||||
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
|
||||
about: Contact Support about code scanning
|
||||
url: https://support.github.com/request
|
||||
about: Contact Support
|
||||
|
||||
20
.github/dependabot.yml
vendored
Normal file
20
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/runner"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
38
.github/prepare-test/action.yml
vendored
Normal file
38
.github/prepare-test/action.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: "Prepare test"
|
||||
description: Performs some preparation to run tests
|
||||
inputs:
|
||||
version:
|
||||
required: true
|
||||
outputs:
|
||||
tools-url:
|
||||
value: ${{ steps.get-url.outputs.tools-url }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- id: get-url
|
||||
name: Determine URL
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
||||
export LATEST=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||
echo "::set-output name=tools-url::latest"
|
||||
elif [[ ${{ inputs.version }} == "cached" ]]; then
|
||||
echo "::set-output name=tools-url::"
|
||||
else
|
||||
echo "::error Unrecognized version specified!"
|
||||
fi
|
||||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
@@ -1,4 +1,5 @@
|
||||
### Merge / deployment checklist
|
||||
|
||||
- [ ] Confirm this change is backwards compatible with existing workflows.
|
||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/master/README.md) has been updated if necessary.
|
||||
- [ ] Confirm the [readme](https://github.com/github/codeql-action/blob/main/README.md) has been updated if necessary.
|
||||
- [ ] Confirm the [changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) has been updated if necessary.
|
||||
|
||||
296
.github/update-release-branch.py
vendored
296
.github/update-release-branch.py
vendored
@@ -1,25 +1,37 @@
|
||||
import argparse
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MAIN_BRANCH = 'main'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
"""
|
||||
|
||||
# Value of the mode flag for a v1 release
|
||||
V1_MODE = 'v1-release'
|
||||
|
||||
# Value of the mode flag for a v2 release
|
||||
V2_MODE = 'v2-release'
|
||||
|
||||
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
|
||||
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
|
||||
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
# Raises an error if git does not exit successfully (unless passed
|
||||
# allow_non_zero_exit_code=True).
|
||||
def run_git(*args, allow_non_zero_exit_code=False):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
if not allow_non_zero_exit_code and p.returncode != 0:
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
@@ -27,15 +39,17 @@ def run_git(*args):
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Opens a PR from the given branch to the target branch
|
||||
def open_pr(
|
||||
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
|
||||
conductor, is_v2_release, labels, conflicted_files):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
commits_without_pull_requests = []
|
||||
for commit in all_commits:
|
||||
pr = get_pr_for_commit(repo, commit)
|
||||
|
||||
|
||||
if pr is None:
|
||||
commits_without_pull_requests.append(commit)
|
||||
elif not any(p for p in pull_requests if p.number == pr.number):
|
||||
@@ -47,55 +61,67 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Sort PRs and commits by age
|
||||
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
|
||||
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
|
||||
|
||||
# Start constructing the body text
|
||||
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body += '\n\nConductor for this PR is @' + conductor
|
||||
# Start constructing the body text
|
||||
body = []
|
||||
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
|
||||
|
||||
body.append('')
|
||||
body.append('Conductor for this PR is @' + conductor)
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
body += '\n\nContains the following pull requests:'
|
||||
body.append('')
|
||||
body.append('Contains the following pull requests:')
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body += '\n- #' + str(pr.number)
|
||||
body += ' - ' + pr.title
|
||||
body += ' (@' + merger + ')'
|
||||
|
||||
body.append('- #' + str(pr.number) + ' - ' + pr.title +' (@' + merger + ')')
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body += '\n\nContains the following commits not from a pull request:'
|
||||
body.append('')
|
||||
body.append('Contains the following commits not from a pull request:')
|
||||
for commit in commits_without_pull_requests:
|
||||
body += '\n- ' + commit.sha
|
||||
body += ' - ' + get_truncated_commit_message(commit)
|
||||
body += ' (@' + commit.author.login + ')'
|
||||
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
|
||||
|
||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
body.append('')
|
||||
body.append('Please review the following:')
|
||||
if len(conflicted_files) > 0:
|
||||
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
|
||||
'in the following files:')
|
||||
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
|
||||
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
|
||||
'branch to resolve the merge conflicts.')
|
||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
||||
if is_v2_release:
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
|
||||
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
|
||||
|
||||
title = 'Merge ' + source_branch + ' into ' + target_branch
|
||||
|
||||
# Create the pull request
|
||||
pr = repo.create_pull(title=title, body=body, head=branch_name, base=LATEST_RELEASE_BRANCH)
|
||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
|
||||
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=new_branch_name, base=target_branch, draft=True)
|
||||
pr.add_to_labels(*labels)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on main
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on main.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
|
||||
# Gets a list of the SHAs of all commits that have happened on the source branch
|
||||
# since the last release to the target branch.
|
||||
# This will not include any commits that exist on the target branch
|
||||
# that aren't on the source branch.
|
||||
def get_commit_difference(repo, source_branch, target_branch):
|
||||
# Passing split nothing means that the empty string splits to nothing: compare `''.split() == []`
|
||||
# to `''.split('\n') == ['']`.
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + target_branch + '..' + ORIGIN + '/' + source_branch).strip().split()
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
@@ -105,7 +131,7 @@ def get_commit_difference(repo):
|
||||
|
||||
# Is the given commit the automatic merge commit from when merging a PR
|
||||
def is_pr_merge_commit(commit):
|
||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
return commit.committer is not None and commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||
|
||||
# Gets a copy of the commit message that should display nicely
|
||||
def get_truncated_commit_message(commit):
|
||||
@@ -115,16 +141,16 @@ def get_truncated_commit_message(commit):
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the main branch.
|
||||
# Converts a commit into the PR that introduced it to the source branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
|
||||
|
||||
if prs.totalCount > 0:
|
||||
# In the case that there are multiple PRs, return the earliest one
|
||||
prs = list(prs)
|
||||
sorted(prs, key=lambda pr: int(pr.number))
|
||||
return prs[0]
|
||||
sorted_prs = sorted(prs, key=lambda pr: int(pr.number))
|
||||
return sorted_prs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -135,29 +161,88 @@ def get_pr_for_commit(repo, commit):
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
def get_current_version():
|
||||
with open('package.json', 'r') as f:
|
||||
return json.load(f)['version']
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
def get_today_string():
|
||||
today = datetime.datetime.today()
|
||||
return '{:%d %b %Y}'.format(today)
|
||||
|
||||
def update_changelog(version):
|
||||
if (os.path.exists('CHANGELOG.md')):
|
||||
content = ''
|
||||
with open('CHANGELOG.md', 'r') as f:
|
||||
content = f.read()
|
||||
else:
|
||||
content = EMPTY_CHANGELOG
|
||||
|
||||
newContent = content.replace('[UNRELEASED]', version + ' - ' + get_today_string(), 1)
|
||||
|
||||
with open('CHANGELOG.md', 'w') as f:
|
||||
f.write(newContent)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser('update-release-branch.py')
|
||||
|
||||
parser.add_argument(
|
||||
'--github-token',
|
||||
type=str,
|
||||
required=True,
|
||||
help='GitHub token, typically from GitHub Actions.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repository-nwo',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The nwo of the repository, for example github/codeql-action.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--mode',
|
||||
type=str,
|
||||
required=True,
|
||||
choices=[V2_MODE, V1_MODE],
|
||||
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " +
|
||||
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " +
|
||||
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
|
||||
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
|
||||
)
|
||||
parser.add_argument(
|
||||
'--conductor',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The GitHub handle of the person who is conducting the release process.'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode]
|
||||
target_branch = TARGET_BRANCH_FOR_MODE[args.mode]
|
||||
|
||||
repo = Github(args.github_token).get_repo(args.repository_nwo)
|
||||
version = get_current_version()
|
||||
|
||||
if args.mode == V1_MODE:
|
||||
# Change the version number to a v1 equivalent
|
||||
version = get_current_version()
|
||||
version = f'1{version[1:]}'
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_main_sha = run_git('rev-parse', '--short', MAIN_BRANCH).strip()
|
||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||
print('Considering difference between ' + source_branch + ' and ' + target_branch)
|
||||
source_branch_short_sha = run_git('rev-parse', '--short', ORIGIN + '/' + source_branch).strip()
|
||||
print('Current head of ' + source_branch + ' is ' + source_branch_short_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
commits = get_commit_difference(repo=repo, source_branch=source_branch, target_branch=target_branch)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
print('No commits to merge from ' + source_branch + ' to ' + target_branch)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-' + LATEST_RELEASE_BRANCH + '-' + short_main_sha
|
||||
new_branch_name = 'update-v' + version + '-' + source_branch_short_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
@@ -165,14 +250,93 @@ def main():
|
||||
if branch_exists_on_remote(new_branch_name):
|
||||
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
|
||||
return
|
||||
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)
|
||||
|
||||
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
|
||||
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
|
||||
# reconstruct the release manually)
|
||||
conflicted_files = []
|
||||
|
||||
if args.mode == V1_MODE:
|
||||
# If we're performing a backport, start from the target branch
|
||||
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch')
|
||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}')
|
||||
|
||||
# Revert the commit that we made as part of the last release that updated the version number and
|
||||
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
|
||||
# package.json files when we merge in the v2 branch.
|
||||
# This commit will not exist the first time we release the v1 branch from the v2 branch, so we
|
||||
# use `git log --grep` to conditionally revert the commit.
|
||||
print('Reverting the 1.x.x version number and changelog updates from the last release to avoid conflicts')
|
||||
v1_update_commits = run_git('log', '--grep', '^Update version and changelog for v', '--format=%H').split()
|
||||
|
||||
if len(v1_update_commits) > 0:
|
||||
print(f' Reverting {v1_update_commits[0]}')
|
||||
# Only revert the newest commit as older ones will already have been reverted in previous
|
||||
# releases.
|
||||
run_git('revert', v1_update_commits[0], '--no-edit')
|
||||
|
||||
# Also revert the "Update checked-in dependencies" commit created by Actions.
|
||||
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
|
||||
print(f' Reverting {update_dependencies_commit}')
|
||||
run_git('revert', update_dependencies_commit, '--no-edit')
|
||||
|
||||
else:
|
||||
print(' Nothing to revert.')
|
||||
|
||||
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
|
||||
# Commit any conflicts (see the comment for `conflicted_files`)
|
||||
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
|
||||
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
|
||||
if len(conflicted_files) > 0:
|
||||
run_git('add', '.')
|
||||
run_git('commit', '--no-edit')
|
||||
|
||||
# Migrate the package version number from a v2 version number to a v1 version number
|
||||
print(f'Setting version number to {version}')
|
||||
subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
|
||||
run_git('add', 'package.json', 'package-lock.json')
|
||||
|
||||
# Migrate the changelog notes from v2 version numbers to v1 version numbers
|
||||
print('Migrating changelog notes from v2 to v1')
|
||||
subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
|
||||
|
||||
# Remove changelog notes from v2 that don't apply to v1
|
||||
subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
|
||||
|
||||
# Amend the commit generated by `npm version` to update the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', f'Update version and changelog for v{version}')
|
||||
else:
|
||||
# If we're performing a standard release, there won't be any new commits on the target branch,
|
||||
# as these will have already been merged back into the source branch. Therefore we can just
|
||||
# start from the source branch.
|
||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{source_branch}')
|
||||
|
||||
print('Updating changelog')
|
||||
update_changelog(version)
|
||||
|
||||
# Create a commit that updates the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', f'Update changelog for v{version}')
|
||||
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||
open_pr(
|
||||
repo,
|
||||
commits,
|
||||
source_branch_short_sha,
|
||||
new_branch_name,
|
||||
source_branch=source_branch,
|
||||
target_branch=target_branch,
|
||||
conductor=args.conductor,
|
||||
is_v2_release=args.mode == V2_MODE,
|
||||
labels=['Update dependencies'] if args.mode == V1_MODE else [],
|
||||
conflicted_files=conflicted_files
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
95
.github/workflows/__analyze-ref-input.yml
generated
vendored
Normal file
95
.github/workflows/__analyze-ref-input.yml
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
analyze-ref-input:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
96
.github/workflows/__debug-artifacts.yml
generated
vendored
Normal file
96
.github/workflows/__debug-artifacts.yml
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Debug artifact upload
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
debug-artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Debug artifact upload
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for language in $LANGUAGES; do
|
||||
echo "Checking $language"
|
||||
if [[ ! -f "$language.sarif" ]] ; then
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
66
.github/workflows/__extractor-ram-threads.yml
generated
vendored
Normal file
66
.github/workflows/__extractor-ram-threads.yml
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Extractor ram and threads options test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
extractor-ram-threads:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Extractor ram and threads options test
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: java
|
||||
ram: 230
|
||||
threads: 1
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${CODEQL_RAM}" != "230" ]; then
|
||||
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
94
.github/workflows/__go-custom-queries.yml
generated
vendored
Normal file
94
.github/workflows/__go-custom-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Custom queries'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
go-custom-queries:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
83
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
Normal file
83
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Autobuild custom tracing'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
go-custom-tracing-autobuild:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Autobuild custom tracing'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d go ]]; then
|
||||
echo "Did not find a Go database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
94
.github/workflows/__go-custom-tracing.yml
generated
vendored
Normal file
94
.github/workflows/__go-custom-tracing.yml
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Custom tracing'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
go-custom-tracing:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom tracing'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: go build main.go
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
68
.github/workflows/__javascript-source-root.yml
generated
vendored
Normal file
68
.github/workflows/__javascript-source-root.yml
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Custom source root
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
javascript-source-root:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Custom source root
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../new-source-root
|
||||
mv * ../new-source-root
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
source-root: ../new-source-root
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
skip-queries: true
|
||||
upload: false
|
||||
- name: Assert database exists
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d javascript ]]; then
|
||||
echo "Did not find a JavaScript database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
129
.github/workflows/__ml-powered-queries.yml
generated
vendored
Normal file
129
.github/workflows/__ml-powered-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - ML-powered queries
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
ml-powered-queries:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20220120
|
||||
- os: windows-latest
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: ML-powered queries
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
queries: security-extended
|
||||
source-root: ./../action/tests/ml-powered-queries-repo
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ml-powered-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
|
||||
- name: Check results
|
||||
# Running ML-powered queries on Windows requires CodeQL CLI 2.9.0+. We don't run these checks
|
||||
# against Windows and `cached` while CodeQL CLI 2.9.0 makes its way into `cached` to avoid the
|
||||
# test starting to fail when the cached CodeQL Bundle gets updated. Once the CodeQL Bundle
|
||||
# containing CodeQL CLI 2.9.0 has been fully released, we can drop this line and start running
|
||||
# these checks on Windows and `cached`.
|
||||
if: matrix.os != 'windows-latest' || matrix.version != 'cached'
|
||||
env:
|
||||
# Running on Windows requires CodeQL CLI 2.9.0+, which has so far only made it to 'latest'.
|
||||
SHOULD_RUN_ML_POWERED_QUERIES: ${{ matrix.os != 'windows-latest' || matrix.version
|
||||
== 'latest' || matrix.version == 'nightly-latest' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
|
||||
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should run at least the ML-powered queries in `expected_rules`.
|
||||
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
||||
|
||||
for rule in ${expected_rules}; do
|
||||
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
||||
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
||||
echo "Did find rule '${rule}': ${found_rule}"
|
||||
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
||||
exit 1
|
||||
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# We should have at least one alert from an ML-powered query.
|
||||
num_alerts=$(jq '[.runs[0].results[] |
|
||||
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
||||
javascript.sarif)
|
||||
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
||||
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
107
.github/workflows/__multi-language-autodetect.yml
generated
vendored
Normal file
107
.github/workflows/__multi-language-autodetect.yml
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Multi-language repository
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
multi-language-autodetect:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Multi-language repository
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Go, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Java, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
90
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
Normal file
90
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config and input'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
packaging-config-inputs-js:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
89
.github/workflows/__packaging-config-js.yml
generated
vendored
Normal file
89
.github/workflows/__packaging-config-js.yml
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config file'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
packaging-config-js:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config file'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging.yml
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
90
.github/workflows/__packaging-inputs-js.yml
generated
vendored
Normal file
90
.github/workflows/__packaging-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Action input'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
packaging-inputs-js:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Action input'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||
languages: javascript
|
||||
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2, dsp-testing/codeql-pack3:other-query.ql
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
92
.github/workflows/__remote-config.yml
generated
vendored
Normal file
92
.github/workflows/__remote-config.yml
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Remote config file
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
remote-config:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
74
.github/workflows/__rubocop-multi-language.yml
generated
vendored
Normal file
74
.github/workflows/__rubocop-multi-language.yml
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - RuboCop multi-language
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
rubocop-multi-language:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: RuboCop multi-language
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
shell: bash
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: bundle install
|
||||
- name: RuboCop run
|
||||
shell: bash
|
||||
run: |
|
||||
bash -c "
|
||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||
[[ $? -ne 2 ]]
|
||||
"
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
94
.github/workflows/__split-workflow.yml
generated
vendored
Normal file
94
.github/workflows/__split-workflow.yml
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Split workflow
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
split-workflow:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Split workflow
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
skip-queries: true
|
||||
output: ${{ runner.temp }}/results
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Assert No Results
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "$(ls -A $RUNNER_TEMP/results)" ]; then
|
||||
echo "Expected results directory to be empty after skipping query execution!"
|
||||
exit 1
|
||||
fi
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
67
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
Normal file
67
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Autobuild working directory
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
test-autobuild-working-dir:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Autobuild working directory
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Test setup
|
||||
shell: bash
|
||||
run: |
|
||||
# Make sure that Gradle build succeeds in autobuild-dir ...
|
||||
cp -a ../action/tests/java-repo autobuild-dir
|
||||
# ... and fails if attempted in the current directory
|
||||
echo > build.gradle
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: java
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
with:
|
||||
working-directory: autobuild-dir
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d java ]]; then
|
||||
echo "Did not find a Java database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
57
.github/workflows/__test-local-codeql.yml
generated
vendored
Normal file
57
.github/workflows/__test-local-codeql.yml
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Local CodeQL bundle
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
test-local-codeql:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Local CodeQL bundle
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Fetch a CodeQL bundle
|
||||
shell: bash
|
||||
env:
|
||||
CODEQL_URL: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
run: |
|
||||
wget "$CODEQL_URL"
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ./codeql-bundle.tar.gz
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
58
.github/workflows/__test-proxy.yml
generated
vendored
Normal file
58
.github/workflows/__test-proxy.yml
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Proxy test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
test-proxy:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Proxy test
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
69
.github/workflows/__test-ruby.yml
generated
vendored
Normal file
69
.github/workflows/__test-ruby.yml
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Ruby analysis
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
test-ruby:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Ruby analysis
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: ruby
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
RUBY_DB="${{ fromJson(steps.analysis.outputs.db-locations).ruby }}"
|
||||
if [[ ! -d "$RUBY_DB" ]]; then
|
||||
echo "Did not create a database for Ruby."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES: 'true'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
95
.github/workflows/__unset-environment.yml
generated
vendored
Normal file
95
.github/workflows/__unset-environment.yml
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test unsetting environment variables
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Go, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Java, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
102
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
Normal file
102
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-ref-sha-input:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
146
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
146
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Use a custom `checkout_path`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
with-checkout-path:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
path: x/y/z/some-path
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
# it's enough to test one compiled language and one interpreted language
|
||||
languages: csharp,javascript
|
||||
source-path: x/y/z/some-path/tests/multi-language-repo
|
||||
debug: true
|
||||
- name: Build code (non-windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os != 'Windows' }}
|
||||
run: |
|
||||
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- name: Build code (windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
shell: bash
|
||||
run: |
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
25
.github/workflows/check-expected-release-files.yml
vendored
Normal file
25
.github/workflows/check-expected-release-files.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Check Expected Release Files
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/check-expected-release-files.yml
|
||||
- src/defaults.json
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check-expected-release-files:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout CodeQL Action
|
||||
uses: actions/checkout@v3
|
||||
- name: Check Expected Release Files
|
||||
run: |
|
||||
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
||||
set -x
|
||||
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz"; do
|
||||
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
|
||||
done
|
||||
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# Checks for any conflict markers created by git. This check is primarily intended to validate that
|
||||
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
|
||||
name: Check for conflicts
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, v1, v2]
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check-for-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Check for conflicts
|
||||
run: |
|
||||
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
|
||||
# this to fail the workflow.
|
||||
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
|
||||
'^(<<<<<<<|>>>>>>>)' . || true)
|
||||
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
|
||||
echo "Fail: Found merge conflict markers in the following files:"
|
||||
echo ""
|
||||
echo "${FILES_WITH_CONFLICTS}"
|
||||
exit 1
|
||||
else
|
||||
echo "Success: Found no merge conflict markers."
|
||||
fi
|
||||
18
.github/workflows/cli.yml
vendored
18
.github/workflows/cli.yml
vendored
@@ -1,18 +0,0 @@
|
||||
name: "CodeScanning CLI"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Build the CLI
|
||||
- name: Build CLI
|
||||
run: npm run build-cli
|
||||
|
||||
# Upload an empty SARIF file
|
||||
- name: Upload with CLI
|
||||
run: node cli/code-scanning-cli.js upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_API_URL --github-auth ${{ github.token }}
|
||||
84
.github/workflows/codeql.yml
vendored
84
.github/workflows/codeql.yml
vendored
@@ -1,28 +1,88 @@
|
||||
name: "CodeQL action"
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
pull_request:
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
# Identify the CodeQL tool versions to use in the analysis job.
|
||||
check-codeql-versions:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
versions: ${{ steps.compare.outputs.versions }}
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Init with default CodeQL bundle from the VM image
|
||||
id: init-default
|
||||
uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
- name: Remove empty database
|
||||
# allows us to run init a second time
|
||||
run: |
|
||||
rm -rf "$RUNNER_TEMP/codeql_databases"
|
||||
- name: Init with latest CodeQL bundle
|
||||
id: init-latest
|
||||
uses: ./init
|
||||
with:
|
||||
tools: latest
|
||||
languages: javascript
|
||||
- name: Compare default and latest CodeQL bundle versions
|
||||
id: compare
|
||||
env:
|
||||
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
|
||||
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
|
||||
run: |
|
||||
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
|
||||
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
||||
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
||||
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
||||
|
||||
# If we're running on a pull request, run with both bundles, even if `tools: latest` would
|
||||
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
|
||||
# required status check.
|
||||
#
|
||||
# If we're running on push, then we can skip running with `tools: latest` when it would be
|
||||
# the same as running with `tools: null`.
|
||||
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||
VERSIONS_JSON='[null]'
|
||||
else
|
||||
VERSIONS_JSON='[null, "latest"]'
|
||||
fi
|
||||
|
||||
# Output a JSON-encoded list with the distinct versions to test against.
|
||||
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||
|
||||
build:
|
||||
needs: [check-codeql-versions]
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest,windows-latest,macos-latest]
|
||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
tools: ${{ matrix.tools }}
|
||||
# confirm steps.init.outputs.codeql-path points to the codeql binary
|
||||
- name: Print CodeQL Version
|
||||
run: ${{steps.init.outputs.codeql-path}} version --format=json
|
||||
- uses: ./analyze
|
||||
|
||||
152
.github/workflows/integration-testing.yml
vendored
152
.github/workflows/integration-testing.yml
vendored
@@ -1,152 +0,0 @@
|
||||
name: "Integration Testing"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
multi-language-repo_test-autodetect-languages:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
[[ ! -d cpp ]] || \
|
||||
[[ ! -d csharp ]] || \
|
||||
[[ ! -d go ]] || \
|
||||
[[ ! -d java ]] || \
|
||||
[[ ! -d javascript ]] || \
|
||||
[[ ! -d python ]]; then
|
||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
multi-language-repo_test-custom-queries-and-remote-config:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||
multi-language-repo_test-go-custom-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/setup-go@v2
|
||||
if: ${{ matrix.os == 'macos-latest' }}
|
||||
with:
|
||||
go-version: '^1.13.1'
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
config-file: ./.github/codeql/custom-queries.yml
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
multi-language-repo_rubocop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||
- name: Install dependencies
|
||||
run: bundle install
|
||||
- name: Rubocop run
|
||||
run: |
|
||||
bash -c "
|
||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||
[[ $? -ne 2 ]]
|
||||
"
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
sarif_file: rubocop.sarif
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
test-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
137
.github/workflows/post-release-mergeback.yml
vendored
Normal file
137
.github/workflows/post-release-mergeback.yml
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the
|
||||
# release back into the main branch. Typically, this is just a single commit that updates the
|
||||
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that
|
||||
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this
|
||||
# commit.
|
||||
name: Tag release and merge back
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
baseBranch:
|
||||
description: 'The base branch to merge into'
|
||||
default: main
|
||||
required: false
|
||||
|
||||
push:
|
||||
branches:
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
|
||||
jobs:
|
||||
merge-back:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'github/codeql-action'
|
||||
env:
|
||||
BASE_BRANCH: "${{ github.event.inputs.baseBranch || 'main' }}"
|
||||
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
||||
|
||||
steps:
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "${GITHUB_CONTEXT}"
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Get version and new branch
|
||||
id: getVersion
|
||||
run: |
|
||||
VERSION="v$(jq '.version' -r 'package.json')"
|
||||
echo "::set-output name=version::${VERSION}"
|
||||
short_sha="${GITHUB_SHA:0:8}"
|
||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
|
||||
echo "::set-output name=newBranch::${NEW_BRANCH}"
|
||||
|
||||
|
||||
- name: Dump branches
|
||||
env:
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
run: |
|
||||
echo "BASE_BRANCH ${BASE_BRANCH}"
|
||||
echo "HEAD_BRANCH ${HEAD_BRANCH}"
|
||||
echo "NEW_BRANCH ${NEW_BRANCH}"
|
||||
|
||||
- name: Create mergeback branch
|
||||
env:
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
run: |
|
||||
git checkout -b "${NEW_BRANCH}"
|
||||
|
||||
- name: Check for tag
|
||||
id: check
|
||||
env:
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
run: |
|
||||
set +e # don't fail on an errored command
|
||||
git ls-remote --tags origin | grep "${VERSION}"
|
||||
exists="$?"
|
||||
if [ "${exists}" -eq 0 ]; then
|
||||
echo "Tag ${VERSION} exists. Not going to re-release."
|
||||
echo "::set-output name=exists::true"
|
||||
else
|
||||
echo "Tag ${VERSION} does not exist yet."
|
||||
fi
|
||||
|
||||
# we didn't tag the release during the update-release-branch workflow because the
|
||||
# commit that actually makes it to the release branch is a merge commit,
|
||||
# and not yet known during the first workflow. We tag now because we know the correct commit.
|
||||
- name: Tag release
|
||||
if: steps.check.outputs.exists != 'true'
|
||||
env:
|
||||
VERSION: ${{ steps.getVersion.outputs.version }}
|
||||
run: |
|
||||
# Unshallow the repo in order to allow pushes
|
||||
git fetch --unshallow
|
||||
# Create the `vx.y.z` tag
|
||||
git tag --annotate "${VERSION}" --message "${VERSION}"
|
||||
# Update the `vx` tag
|
||||
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
|
||||
# Use `--force` to overwrite the major version tag
|
||||
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
|
||||
# Push the tags, using:
|
||||
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
|
||||
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
|
||||
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
|
||||
# - `--force` since we're overwriting the `vx` tag
|
||||
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
|
||||
|
||||
- name: Create mergeback branch
|
||||
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2')
|
||||
env:
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
run: |
|
||||
set -exu
|
||||
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
|
||||
pr_body="Updates version and changelog."
|
||||
|
||||
# Update the version number ready for the next release
|
||||
npm version patch --no-git-tag-version
|
||||
|
||||
# Update the changelog
|
||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
||||
git add .
|
||||
git commit -m "Update changelog and version after ${VERSION}"
|
||||
|
||||
git push origin "${NEW_BRANCH}"
|
||||
|
||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
||||
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||
gh pr create \
|
||||
--head "${NEW_BRANCH}" \
|
||||
--base "${BASE_BRANCH}" \
|
||||
--title "${pr_title}" \
|
||||
--label "Update dependencies" \
|
||||
--body "${pr_body}" \
|
||||
--draft
|
||||
511
.github/workflows/pr-checks.yml
vendored
511
.github/workflows/pr-checks.yml
vendored
@@ -1,71 +1,482 @@
|
||||
name: "PR checks"
|
||||
name: PR Checks (Basic Checks and Runner)
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
pull_request:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
tslint:
|
||||
lint-js:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: tslint
|
||||
run: npm run-script lint
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run Lint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
node-types-version: [12.12, current]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check generated JavaScript
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Update version of @types/node
|
||||
if: matrix.node-types-version != 'current'
|
||||
env:
|
||||
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
|
||||
run: |
|
||||
# Export `NODE_TYPES_VERSION` so it's available to jq
|
||||
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
|
||||
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
|
||||
echo "${contents}" > package.json
|
||||
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
|
||||
# However we're not checking in the updated lockfile here, so it's fine to run
|
||||
# `npm install` on Linux.
|
||||
npm install
|
||||
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
# The period in `git add --all .` ensures that we stage deleted files too.
|
||||
git add --all .
|
||||
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
|
||||
fi
|
||||
|
||||
- name: Check generated JS
|
||||
run: .github/workflows/script/check-js.sh
|
||||
|
||||
check-node-modules:
|
||||
runs-on: ubuntu-latest
|
||||
name: Check modules up to date
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Check node modules up to date
|
||||
run: |
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check node modules up to date
|
||||
run: .github/workflows/script/check-node-modules.sh
|
||||
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
verify-pr-checks:
|
||||
name: Verify PR checks up to date
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruamel.yaml
|
||||
- name: Verify PR checks up to date
|
||||
run: .github/workflows/script/verify-pr-checks.sh
|
||||
|
||||
npm-test:
|
||||
name: Unit Test
|
||||
needs: [check-js, check-node-modules]
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
|
||||
runner-analyze-javascript-ubuntu:
|
||||
name: Runner ubuntu JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
# Pass --config-file here, but not for other jobs in this workflow.
|
||||
# This means we're testing the config file parsing in the runner
|
||||
# but not slowing down all jobs unnecessarily as it doesn't add much
|
||||
# testing the parsing on different operating systems and languages.
|
||||
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-javascript-windows:
|
||||
name: Runner windows JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-javascript-macos:
|
||||
name: Runner macos JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-ubuntu:
|
||||
name: Runner ubuntu C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-windows:
|
||||
name: Runner windows C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: powershell
|
||||
run: |
|
||||
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
||||
$Env:CODEQL_EXTRACTOR_CSHARP_ROOT = "" # Unset an environment variable to make sure the tracer resists this
|
||||
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Upload tracer logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: tracer-logs
|
||||
path: ./codeql-runner/compound-build-tracer.log
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-macos:
|
||||
name: Runner macos C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
$CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-ubuntu:
|
||||
name: Runner ubuntu autobuild C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-windows:
|
||||
timeout-minutes: 45
|
||||
name: Runner windows autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: powershell
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-macos:
|
||||
name: Runner macos autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-upload-sarif:
|
||||
name: Runner upload sarif
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Upload with runner
|
||||
run: |
|
||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
||||
# the compatibility with the API.
|
||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
runner-extractor-ram-threads-options:
|
||||
name: Runner ubuntu extractor RAM and threads options
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
if [ "${CODEQL_RAM}" != "230" ]; then
|
||||
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
158
.github/workflows/python-deps.yml
vendored
Normal file
158
.github/workflows/python-deps.yml
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
name: Test Python Package Installation on Linux and Mac
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
pull_request:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
test-setup-python-scripts:
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
|
||||
env:
|
||||
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
id: init
|
||||
with:
|
||||
tools: latest
|
||||
languages: python
|
||||
setup-python-dependencies: false
|
||||
|
||||
- name: Test Auto Package Installation
|
||||
run: |
|
||||
set -x
|
||||
$GITHUB_WORKSPACE/python-setup/install_tools.sh
|
||||
|
||||
cd $GITHUB_WORKSPACE/python-setup/tests/${PYTHON_DEPS_TYPE}/requests-${PYTHON_VERSION}
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
echo ${basePath}
|
||||
|
||||
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
|
||||
- name: Setup for extractor
|
||||
run: |
|
||||
echo $CODEQL_PYTHON
|
||||
# only run if $CODEQL_PYTHON is set
|
||||
if [ ! -z $CODEQL_PYTHON ]; then
|
||||
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
|
||||
fi
|
||||
|
||||
- name: Verify packages installed
|
||||
run: |
|
||||
$GITHUB_WORKSPACE/python-setup/tests/check_requests_2_26_0.sh ${PYTHON_VERSION}
|
||||
|
||||
# This one shouldn't fail, but also won't install packages
|
||||
test-setup-python-scripts-non-standard-location:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
id: init
|
||||
with:
|
||||
tools: latest
|
||||
languages: python
|
||||
setup-python-dependencies: false
|
||||
|
||||
- name: Test Auto Package Installation
|
||||
run: |
|
||||
set -x
|
||||
$GITHUB_WORKSPACE/python-setup/install_tools.sh
|
||||
|
||||
cd $GITHUB_WORKSPACE/python-setup/tests/requirements/non-standard-location
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
echo ${basePath}
|
||||
|
||||
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
|
||||
|
||||
- name: Setup for extractor
|
||||
run: |
|
||||
echo $CODEQL_PYTHON
|
||||
# only run if $CODEQL_PYTHON is set
|
||||
if [ ! -z $CODEQL_PYTHON ]; then
|
||||
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
|
||||
fi
|
||||
|
||||
- name: Verify packages installed
|
||||
run: |
|
||||
test -z $LGTM_INDEX_IMPORT_PATH
|
||||
|
||||
test-setup-python-scripts-windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
|
||||
env:
|
||||
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
with:
|
||||
tools: latest
|
||||
languages: python
|
||||
setup-python-dependencies: false
|
||||
|
||||
- name: Test Auto Package Installation
|
||||
run: |
|
||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\install_tools.ps1"
|
||||
powershell -File $cmd
|
||||
|
||||
cd $Env:GITHUB_WORKSPACE\\python-setup/tests/$Env:PYTHON_DEPS_TYPE/requests-$Env:PYTHON_VERSION
|
||||
$DefaultsPath = Join-Path (Join-Path $Env:GITHUB_WORKSPACE "src") "defaults.json"
|
||||
$CodeQLBundleName = (Get-Content -Raw -Path $DefaultsPath | ConvertFrom-Json).bundleVersion
|
||||
$CodeQLVersion = "0.0.0-" + $CodeQLBundleName.split("-")[-1]
|
||||
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\auto_install_packages.py C:\\hostedtoolcache\\windows\\CodeQL\\$CodeQLVersion\\x64\\codeql
|
||||
|
||||
- name: Setup for extractor
|
||||
run: |
|
||||
echo $Env:CODEQL_PYTHON
|
||||
|
||||
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\tests\\from_python_exe.py $Env:CODEQL_PYTHON
|
||||
|
||||
- name: Verify packages installed
|
||||
run: |
|
||||
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_2_26_0.ps1"
|
||||
powershell -File $cmd $Env:PYTHON_VERSION
|
||||
21
.github/workflows/script/check-js.sh
vendored
Executable file
21
.github/workflows/script/check-js.sh
vendored
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# Wipe the lib directory incase there are extra unnecessary files in there
|
||||
rm -rf lib
|
||||
# Generate the JavaScript files
|
||||
npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
22
.github/workflows/script/check-node-modules.sh
vendored
Executable file
22
.github/workflows/script/check-node-modules.sh
vendored
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
sudo npm install --force -g npm@latest
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci && npm run removeNPMAbsolutePaths' on a macOS machine to update. Note it is important this command is run on macOS and not any other operating system as there is one dependency (fsevents) that is needed for macOS and may not be installed if the command is run on a Windows or Linux machine."
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: node_modules are up to date"
|
||||
35
.github/workflows/script/update-required-checks.sh
vendored
Executable file
35
.github/workflows/script/update-required-checks.sh
vendored
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
# Update the required checks based on the current branch.
|
||||
# Typically, this will be main.
|
||||
|
||||
if [ -z "$GITHUB_TOKEN" ]; then
|
||||
echo "Failed: No GitHub token found. This script requires admin access to `github/codeql-action`."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$#" -eq 1 ]; then
|
||||
# If we were passed an argument, pass it as a query to fzf
|
||||
GITHUB_SHA="$@"
|
||||
elif [ "$#" -gt 1 ]; then
|
||||
echo "Usage: $0 [SHA]"
|
||||
echo "Update the required checks based on the SHA, or main."
|
||||
elif [ -z "$GITHUB_SHA" ]; then
|
||||
# If we don't have a SHA, use main
|
||||
GITHUB_SHA="$(git rev-parse main)"
|
||||
fi
|
||||
|
||||
echo "Getting checks for $GITHUB_SHA"
|
||||
|
||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/${GITHUB_SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") | not)] | sort')"
|
||||
|
||||
echo "$CHECKS" | jq
|
||||
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
|
||||
for BRANCH in main releases/v2 releases/v1; do
|
||||
echo "Updating $BRANCH"
|
||||
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
|
||||
done
|
||||
|
||||
rm checks.json
|
||||
25
.github/workflows/script/verify-pr-checks.sh
vendored
Executable file
25
.github/workflows/script/verify-pr-checks.sh
vendored
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
# Sanity check that repo is clean to start with
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then this workflow needs attention...
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Wipe the generated PR checks in case there are extra unnecessary files in there
|
||||
rm -rf .github/workflows/__*
|
||||
|
||||
# Generate the PR checks
|
||||
cd pr-checks && python3 sync.py
|
||||
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
git diff
|
||||
git status
|
||||
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update"
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: PR checks are up to date"
|
||||
40
.github/workflows/update-dependencies.yml
vendored
Normal file
40
.github/workflows/update-dependencies.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: Update dependencies
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened, ready_for_review, labeled]
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: Update dependencies
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Remove PR label
|
||||
env:
|
||||
REPOSITORY: '${{ github.repository }}'
|
||||
PR_NUMBER: '${{ github.event.pull_request.number }}'
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
gh api "repos/$REPOSITORY/issues/$PR_NUMBER/labels/Update%20dependencies" -X DELETE
|
||||
|
||||
- name: Push updated dependencies
|
||||
env:
|
||||
BRANCH: '${{ github.head_ref }}'
|
||||
run: |
|
||||
git fetch origin "$BRANCH" --depth=1
|
||||
git checkout "origin/$BRANCH"
|
||||
sudo npm install --force -g npm@latest
|
||||
npm install
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git add node_modules
|
||||
git commit -am "Update checked-in dependencies"
|
||||
git push origin "HEAD:$BRANCH"
|
||||
fi
|
||||
56
.github/workflows/update-release-branch.yml
vendored
56
.github/workflows/update-release-branch.yml
vendored
@@ -1,32 +1,62 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
# You can trigger this workflow via workflow dispatch to start a release.
|
||||
# This will open a PR to update the v2 release branch.
|
||||
workflow_dispatch:
|
||||
|
||||
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
|
||||
push:
|
||||
branches:
|
||||
- releases/v2
|
||||
|
||||
jobs:
|
||||
update:
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'github/codeql-action'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.5
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install PyGithub==1.51 requests
|
||||
pip install PyGithub==1.55 requests
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update v2 release branch
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
python .github/update-release-branch.py \
|
||||
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||
--repository-nwo ${{ github.repository }} \
|
||||
--mode v2-release \
|
||||
--conductor ${GITHUB_ACTOR}
|
||||
|
||||
- name: Update v1 release branch
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
python .github/update-release-branch.py \
|
||||
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||
--repository-nwo ${{ github.repository }} \
|
||||
--mode v1-release \
|
||||
--conductor ${GITHUB_ACTOR}
|
||||
|
||||
47
.github/workflows/update-supported-enterprise-server-versions.yml
vendored
Normal file
47
.github/workflows/update-supported-enterprise-server-versions.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Update Supported Enterprise Server Versions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
update-supported-enterprise-server-versions:
|
||||
name: Update Supported Enterprise Server Versions
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.7"
|
||||
- name: Checkout CodeQL Action
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout Enterprise Releases
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: github/enterprise-releases
|
||||
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
|
||||
path: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Update Supported Enterprise Server Versions
|
||||
run: |
|
||||
cd ./.github/workflows/update-supported-enterprise-server-versions/
|
||||
python3 -m pip install pipenv
|
||||
pipenv install
|
||||
pipenv run ./update.py
|
||||
rm --recursive "$ENTERPRISE_RELEASES_PATH"
|
||||
npm run build
|
||||
env:
|
||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Commit Changes
|
||||
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
|
||||
with:
|
||||
commit-message: Update supported GitHub Enterprise Server versions.
|
||||
title: Update supported GitHub Enterprise Server versions.
|
||||
body: ""
|
||||
author: GitHub <noreply@github.com>
|
||||
branch: update-supported-enterprise-server-versions
|
||||
draft: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
9
.github/workflows/update-supported-enterprise-server-versions/Pipfile
vendored
Normal file
9
.github/workflows/update-supported-enterprise-server-versions/Pipfile
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[packages]
|
||||
semver = "*"
|
||||
27
.github/workflows/update-supported-enterprise-server-versions/Pipfile.lock
generated
vendored
Normal file
27
.github/workflows/update-supported-enterprise-server-versions/Pipfile.lock
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "e3ba923dcb4888e05de5448c18a732bf40197e80fabfa051a61c01b22c504879"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"semver": {
|
||||
"hashes": [
|
||||
"sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4",
|
||||
"sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.13.0"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
43
.github/workflows/update-supported-enterprise-server-versions/update.py
vendored
Executable file
43
.github/workflows/update-supported-enterprise-server-versions/update.py
vendored
Executable file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env python3
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import semver
|
||||
|
||||
_API_COMPATIBILITY_PATH = pathlib.Path(__file__).absolute().parents[3] / "src" / "api-compatibility.json"
|
||||
_ENTERPRISE_RELEASES_PATH = pathlib.Path(os.environ["ENTERPRISE_RELEASES_PATH"])
|
||||
_RELEASE_FILE_PATH = _ENTERPRISE_RELEASES_PATH / "releases.json"
|
||||
_FIRST_SUPPORTED_RELEASE = semver.VersionInfo.parse("2.22.0") # Versions older than this did not include Code Scanning.
|
||||
|
||||
def main():
|
||||
api_compatibility_data = json.loads(_API_COMPATIBILITY_PATH.read_text())
|
||||
|
||||
releases = json.loads(_RELEASE_FILE_PATH.read_text())
|
||||
oldest_supported_release = None
|
||||
newest_supported_release = semver.VersionInfo.parse(api_compatibility_data["maximumVersion"] + ".0")
|
||||
|
||||
for release_version_string, release_data in releases.items():
|
||||
release_version = semver.VersionInfo.parse(release_version_string + ".0")
|
||||
if release_version < _FIRST_SUPPORTED_RELEASE:
|
||||
continue
|
||||
|
||||
if release_version > newest_supported_release:
|
||||
feature_freeze_date = datetime.date.fromisoformat(release_data["feature_freeze"])
|
||||
if feature_freeze_date < datetime.date.today() + datetime.timedelta(weeks=2):
|
||||
newest_supported_release = release_version
|
||||
|
||||
if oldest_supported_release is None or release_version < oldest_supported_release:
|
||||
end_of_life_date = datetime.date.fromisoformat(release_data["end"])
|
||||
if end_of_life_date > datetime.date.today():
|
||||
oldest_supported_release = release_version
|
||||
|
||||
api_compatibility_data = {
|
||||
"minimumVersion": f"{oldest_supported_release.major}.{oldest_supported_release.minor}",
|
||||
"maximumVersion": f"{newest_supported_release.major}.{newest_supported_release.minor}",
|
||||
}
|
||||
_API_COMPATIBILITY_PATH.write_text(json.dumps(api_compatibility_data, sort_keys=True) + "\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,2 +1,4 @@
|
||||
/cli/
|
||||
|
||||
/runner/dist/
|
||||
/runner/node_modules/
|
||||
# Ignore for example failing-tests.json from AVA
|
||||
node_modules/.cache
|
||||
|
||||
15
.vscode/tasks.json
vendored
Normal file
15
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "typescript",
|
||||
"tsconfig": "tsconfig.json",
|
||||
"option": "watch",
|
||||
"problemMatcher": [
|
||||
"$tsc-watch"
|
||||
],
|
||||
"group": "build",
|
||||
"label": "tsc: watch - tsconfig.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
216
CHANGELOG.md
Normal file
216
CHANGELOG.md
Normal file
@@ -0,0 +1,216 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 2.1.11 - 17 May 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
|
||||
|
||||
## 2.1.10 - 10 May 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
|
||||
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
|
||||
|
||||
## 2.1.9 - 27 Apr 2022
|
||||
|
||||
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
|
||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
|
||||
- Update default CodeQL bundle version to 2.9.0.
|
||||
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
|
||||
|
||||
## 2.1.8 - 08 Apr 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
|
||||
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
|
||||
|
||||
## 2.1.7 - 05 Apr 2022
|
||||
|
||||
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
|
||||
|
||||
## 2.1.6 - 30 Mar 2022
|
||||
|
||||
- [v2+ only] The CodeQL Action now runs on Node.js v16. [#1000](https://github.com/github/codeql-action/pull/1000)
|
||||
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
|
||||
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
|
||||
|
||||
## 1.1.5 - 15 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.3.
|
||||
- The CodeQL runner is now deprecated and no longer being released. For more information, see [CodeQL runner deprecation](https://github.blog/changelog/2021-09-21-codeql-runner-deprecation/).
|
||||
- Fix two bugs that cause action failures with GHES 3.3 or earlier. [#978](https://github.com/github/codeql-action/pull/978)
|
||||
- Fix `not a permitted key` invalid requests with GHES 3.1 or earlier
|
||||
- Fix `RUNNER_ARCH environment variable must be set` errors with GHES 3.3 or earlier
|
||||
|
||||
## 1.1.4 - 07 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.2. [#950](https://github.com/github/codeql-action/pull/950)
|
||||
- Fix a bug where old results can be uploaded if the languages in a repository change when using a non-ephemeral self-hosted runner. [#955](https://github.com/github/codeql-action/pull/955)
|
||||
|
||||
## 1.1.3 - 23 Feb 2022
|
||||
|
||||
- Fix a bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
||||
|
||||
## 1.1.2 - 17 Feb 2022
|
||||
|
||||
- Due to potential issues for GHES 3.1–3.3 customers who are using recent versions of the CodeQL Action via GHES Connect, the CodeQL Action now uses Node.js v12 rather than Node.js v16. [#937](https://github.com/github/codeql-action/pull/937)
|
||||
|
||||
## 1.1.1 - 17 Feb 2022
|
||||
|
||||
- The CodeQL CLI versions up to and including version 2.4.4 are not compatible with the CodeQL Action 1.1.1 and later. The Action will emit an error if it detects that it is being used by an incompatible version of the CLI. [#931](https://github.com/github/codeql-action/pull/931)
|
||||
- Update default CodeQL bundle version to 2.8.1. [#925](https://github.com/github/codeql-action/pull/925)
|
||||
|
||||
## 1.1.0 - 11 Feb 2022
|
||||
|
||||
- The CodeQL Action now uses Node.js v16. [#909](https://github.com/github/codeql-action/pull/909)
|
||||
- Beware that the CodeQL build tracer in this release (and in all earlier releases) is incompatible with Windows 11 and Windows Server 2022. This incompatibility affects database extraction for compiled languages: cpp, csharp, go, and java. As a result, analyzing these languages with the `windows-latest` or `windows-2022` Actions virtual environments is currently unsupported. If you use any of these languages, please use the `windows-2019` Actions virtual environment or otherwise avoid these specific Windows versions until a new release fixes this incompatibility.
|
||||
|
||||
## 1.0.32 - 07 Feb 2022
|
||||
|
||||
- Add `sarif-id` as an output for the `upload-sarif` and `analyze` actions. [#889](https://github.com/github/codeql-action/pull/889)
|
||||
- Add `ref` and `sha` inputs to the `analyze` action, which override the defaults provided by the GitHub Action context. [#889](https://github.com/github/codeql-action/pull/889)
|
||||
- Update default CodeQL bundle version to 2.8.0. [#911](https://github.com/github/codeql-action/pull/911)
|
||||
|
||||
## 1.0.31 - 31 Jan 2022
|
||||
|
||||
- Remove `experimental` message when using custom CodeQL packages. [#888](https://github.com/github/codeql-action/pull/888)
|
||||
- Add a better warning message stating that experimental features will be disabled if the workflow has been triggered by a pull request from a fork or the `security-events: write` permission is not present. [#882](https://github.com/github/codeql-action/pull/882)
|
||||
|
||||
## 1.0.30 - 24 Jan 2022
|
||||
|
||||
- Display a better error message when encountering a workflow that runs the `codeql-action/init` action multiple times. [#876](https://github.com/github/codeql-action/pull/876)
|
||||
- Update default CodeQL bundle version to 2.7.6. [#877](https://github.com/github/codeql-action/pull/877)
|
||||
|
||||
## 1.0.29 - 21 Jan 2022
|
||||
|
||||
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
|
||||
|
||||
## 1.0.28 - 18 Jan 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
|
||||
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
|
||||
|
||||
## 1.0.27 - 11 Jan 2022
|
||||
|
||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#855](https://github.com/github/codeql-action/pull/855)
|
||||
|
||||
## 1.0.26 - 10 Dec 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
|
||||
|
||||
## 1.0.25 - 06 Dec 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.24 - 23 Nov 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
|
||||
|
||||
## 1.0.23 - 16 Nov 2021
|
||||
|
||||
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
|
||||
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
|
||||
|
||||
## 1.0.22 - 04 Nov 2021
|
||||
|
||||
- The `init` step of the Action now supports `ram` and `threads` inputs to limit resource use of CodeQL extractors. These inputs also serve as defaults to the subsequent `analyze` step, which finalizes the database and executes queries. [#738](https://github.com/github/codeql-action/pull/738)
|
||||
- When used with CodeQL 2.7.1 or above, the Action now includes custom query help in the analysis results uploaded to GitHub code scanning, if available. To add help text for a custom query, create a Markdown file next to the `.ql` file containing the query, using the same base name but the file extension `.md`. [#804](https://github.com/github/codeql-action/pull/804)
|
||||
|
||||
## 1.0.21 - 28 Oct 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.0. [#795](https://github.com/github/codeql-action/pull/795)
|
||||
|
||||
## 1.0.20 - 25 Oct 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.19 - 18 Oct 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.18 - 08 Oct 2021
|
||||
|
||||
- Fixed a bug where some builds were no longer being traced correctly. [#766](https://github.com/github/codeql-action/pull/766)
|
||||
|
||||
## 1.0.17 - 07 Oct 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.6.3. [#761](https://github.com/github/codeql-action/pull/761)
|
||||
|
||||
## 1.0.16 - 05 Oct 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.15 - 22 Sep 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.6.2. [#746](https://github.com/github/codeql-action/pull/746)
|
||||
|
||||
## 1.0.14 - 09 Sep 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.6.1. [#733](https://github.com/github/codeql-action/pull/733)
|
||||
|
||||
## 1.0.13 - 06 Sep 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.6.0. [#712](https://github.com/github/codeql-action/pull/712)
|
||||
- Update baseline lines of code counter for python. All multi-line strings are counted as code. [#714](https://github.com/github/codeql-action/pull/714)
|
||||
- Remove old baseline LoC injection [#715](https://github.com/github/codeql-action/pull/715)
|
||||
|
||||
## 1.0.12 - 16 Aug 2021
|
||||
|
||||
- Update README to include a sample permissions block. [#689](https://github.com/github/codeql-action/pull/689)
|
||||
|
||||
## 1.0.11 - 09 Aug 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.5.9. [#687](https://github.com/github/codeql-action/pull/687)
|
||||
|
||||
## 1.0.10 - 03 Aug 2021
|
||||
|
||||
- Fix an issue where a summary of diagnostics information from CodeQL was not output to the logs of the `analyze` step of the Action. [#672](https://github.com/github/codeql-action/pull/672)
|
||||
|
||||
## 1.0.9 - 02 Aug 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.8 - 26 Jul 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.5.8. [#631](https://github.com/github/codeql-action/pull/631)
|
||||
|
||||
## 1.0.7 - 21 Jul 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.6 - 19 Jul 2021
|
||||
|
||||
- The `init` step of the Action now supports a `source-root` input as a path to the root source-code directory. By default, the path is relative to `$GITHUB_WORKSPACE`. [#607](https://github.com/github/codeql-action/pull/607)
|
||||
- The `init` step will now try to install a few Python tools needed by this Action when running on a self-hosted runner. [#616](https://github.com/github/codeql-action/pull/616)
|
||||
|
||||
## 1.0.5 - 12 Jul 2021
|
||||
|
||||
- The `analyze` step of the Action now supports a `skip-queries` option to merely build the CodeQL database without analyzing. This functionality is not present in the runner. Additionally, the step will no longer fail if it encounters a finalized database, and will instead continue with query execution. [#602](https://github.com/github/codeql-action/pull/602)
|
||||
- Update the warning message when the baseline lines of code count is unavailable. [#608](https://github.com/github/codeql-action/pull/608)
|
||||
|
||||
## 1.0.4 - 28 Jun 2021
|
||||
|
||||
- Fix `RUNNER_TEMP environment variable must be set` when using runner. [#594](https://github.com/github/codeql-action/pull/594)
|
||||
- Fix couting of lines of code for C# projects. [#586](https://github.com/github/codeql-action/pull/586)
|
||||
|
||||
## 1.0.3 - 23 Jun 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.2 - 17 Jun 2021
|
||||
|
||||
- Fix out of memory in hash computation. [#550](https://github.com/github/codeql-action/pull/550)
|
||||
- Clean up logging during analyze results. [#557](https://github.com/github/codeql-action/pull/557)
|
||||
- Add `--finalize-dataset` to `database finalize` call, freeing up some disk space after database creation. [#558](https://github.com/github/codeql-action/pull/558)
|
||||
|
||||
## 1.0.1 - 07 Jun 2021
|
||||
|
||||
- Pass the `--sarif-group-rules-by-pack` argument to CodeQL CLI invocations that generate SARIF. This means the SARIF rule object for each query will now be found underneath its corresponding query pack in `runs[].tool.extensions`. [#546](https://github.com/github/codeql-action/pull/546)
|
||||
- Output the location of CodeQL databases created in the analyze step. [#543](https://github.com/github/codeql-action/pull/543)
|
||||
|
||||
## 1.0.0 - 31 May 2021
|
||||
|
||||
- Add this changelog file. [#507](https://github.com/github/codeql-action/pull/507)
|
||||
- Improve grouping of analysis logs. Add a new log group containing a summary of metrics and diagnostics, if they were produced by CodeQL builtin queries. [#515](https://github.com/github/codeql-action/pull/515)
|
||||
- Add metrics and diagnostics summaries from custom query suites to the analysis summary log group. [#532](https://github.com/github/codeql-action/pull/532)
|
||||
3
CODEOWNERS
Normal file
3
CODEOWNERS
Normal file
@@ -0,0 +1,3 @@
|
||||
**/* @github/codeql-action-reviewers
|
||||
|
||||
/python-setup/ @github/codeql-python @github/codeql-action-reviewers
|
||||
@@ -12,7 +12,7 @@ Please note that this project is released with a [Contributor Code of Conduct][c
|
||||
|
||||
## Development and Testing
|
||||
|
||||
Before you start, ensure that you have a recent version of node installed. You can see which version of node is used by the action in `init/action.yml`.
|
||||
Before you start, ensure that you have a recent version of node (14 or higher) installed, along with a recent version of npm (7 or higher). You can see which version of node is used by the action in `init/action.yml`.
|
||||
|
||||
### Common tasks
|
||||
|
||||
@@ -22,33 +22,26 @@ Before you start, ensure that you have a recent version of node installed. You c
|
||||
|
||||
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
|
||||
|
||||
You may want to run `tsc --watch` from the command line or inside of vscode in order to ensure build artifacts are up to date as you are working.
|
||||
|
||||
### Checking in compiled artifacts and `node_modules`
|
||||
|
||||
Because CodeQL Action users consume the code directly from this repository, and there can be no build step during an GitHub Actions run, this repository contains all compiled artifacts and node modules. There is a PR check that will fail if any of the compiled artifacts are not up to date. Compiled artifacts are stored in the `lib/` directory. For all day-to-day development purposes, this folder can be ignored.
|
||||
|
||||
Only run `npm install` if you are explicitly changing the set of dependencies in `package.json`. The `node_modules` directory should be up to date when you check out, but if for some reason, there is an inconsistency use `npm ci && npm run removeNPMAbsolutePaths` to ensure the directory is in a state consistent with the `package-lock.json`. Note that due to a macOS-specific dependency, this command should be run on a macOS machine. There is a PR check to ensure the consistency of the `node_modules` directory.
|
||||
|
||||
### Running the action
|
||||
|
||||
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
||||
|
||||
### Running the action locally
|
||||
|
||||
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
|
||||
|
||||
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
|
||||
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
|
||||
1. Add a `.env` file in the root of the project you are running:
|
||||
|
||||
```bash
|
||||
CODEQL_LOCAL_RUN=true
|
||||
|
||||
# Optional, for better logging
|
||||
GITHUB_JOB=<ANY_JOB_NAME>
|
||||
```
|
||||
|
||||
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
|
||||
|
||||
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
|
||||
|
||||
### Integration tests
|
||||
|
||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||
|
||||
### Building the CodeQL runner
|
||||
|
||||
Navigate to the `runner` directory and run `npm install` to install dependencies needed only for compiling the CodeQL runner. Run `npm run build-runner` to output files to the `runner/dist` directory.
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
1. [Fork][fork] and clone the repository
|
||||
@@ -65,6 +58,34 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
|
||||
## Releasing (write access required)
|
||||
|
||||
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch.
|
||||
|
||||
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||
1. Review the checklist items in the pull request description.
|
||||
Once you've checked off all but the last two of these, approve the PR and automerge it.
|
||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||
|
||||
Approve the mergeback PR and automerge it.
|
||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
||||
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
||||
|
||||
Review the checklist items in the pull request description.
|
||||
Once you've checked off all the items, approve the PR and automerge it.
|
||||
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
||||
|
||||
## Keeping the PR checks up to date (admin access required)
|
||||
|
||||
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script:
|
||||
|
||||
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date.
|
||||
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
|
||||
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated.
|
||||
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
|
||||
88
README.md
88
README.md
@@ -1,6 +1,8 @@
|
||||
# CodeQL Action
|
||||
|
||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
This action runs GitHub's industry-leading semantic code analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||
|
||||
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
||||
|
||||
## License
|
||||
|
||||
@@ -20,33 +22,41 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
# ┌───────────── minute (0 - 59)
|
||||
# │ ┌───────────── hour (0 - 23)
|
||||
# │ │ ┌───────────── day of the month (1 - 31)
|
||||
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
|
||||
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
|
||||
# │ │ │ │ │
|
||||
# │ │ │ │ │
|
||||
# │ │ │ │ │
|
||||
# * * * * *
|
||||
- cron: '30 1 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head of the pull request.
|
||||
# Only include this option if you are running this workflow on pull requests.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
# Only include this step if you are running this workflow on pull requests.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -54,38 +64,38 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
||||
# three lines and modify them (or add more) to build your code if your
|
||||
# project uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
```
|
||||
|
||||
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
|
||||
|
||||
```yaml
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: go, javascript
|
||||
|
||||
# Here is where you build your code
|
||||
- run: |
|
||||
make bootstrap
|
||||
make release
|
||||
make bootstrap
|
||||
make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
@@ -93,13 +103,39 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
|
||||
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration)."
|
||||
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
config-file: owner/repo/codeql-config.yml@branch
|
||||
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
||||
```
|
||||
|
||||
For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
|
||||
|
||||
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
queries: <local-or-remote-query>,<another-query>
|
||||
```
|
||||
|
||||
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
queries: +<local-or-remote-query>,<another-query>
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name: 'CodeQL: Finish'
|
||||
description: 'Finalize CodeQL database'
|
||||
author: 'GitHub'
|
||||
name: "CodeQL: Finish"
|
||||
description: "Finalize CodeQL database"
|
||||
author: "GitHub"
|
||||
inputs:
|
||||
check_name:
|
||||
description: The name of the check run to add text to.
|
||||
@@ -8,25 +8,69 @@ inputs:
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
required: false
|
||||
default: '../results'
|
||||
default: "../results"
|
||||
upload:
|
||||
description: Upload the SARIF file
|
||||
description: Upload the SARIF file to Code Scanning
|
||||
required: false
|
||||
default: "true"
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
cleanup-level:
|
||||
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||
required: false
|
||||
default: "brutal"
|
||||
ram:
|
||||
description: >-
|
||||
The amount of memory in MB that can be used by CodeQL for database finalization and query execution.
|
||||
By default, this action will use the same amount of memory as previously set in the "init" action.
|
||||
If the "init" action also does not have an explicit "ram" input, this action will use most of the
|
||||
memory available in the system (which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows,
|
||||
and 13GB for macOS).
|
||||
required: false
|
||||
add-snippets:
|
||||
description: Specify whether or not to add code snippets to the output sarif file.
|
||||
required: false
|
||||
default: "false"
|
||||
skip-queries:
|
||||
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
|
||||
required: false
|
||||
default: "false"
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
description: >-
|
||||
The number of threads that can be used by CodeQL for database finalization and query execution.
|
||||
By default, this action will use the same number of threads as previously set in the "init" action.
|
||||
If the "init" action also does not have an explicit "threads" input, this action will use all the
|
||||
hardware threads available in the system (which for GitHub-hosted runners is 2 for Linux and Windows
|
||||
and 3 for macOS).
|
||||
required: false
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
||||
required: false
|
||||
sha:
|
||||
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is not available in pull requests from forks."
|
||||
required: false
|
||||
category:
|
||||
description: String used by Code Scanning for matching the analyses
|
||||
required: false
|
||||
upload-database:
|
||||
description: Whether to upload the resulting CodeQL database
|
||||
required: false
|
||||
default: "true"
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
default: "true"
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
outputs:
|
||||
db-locations:
|
||||
description: A map from language to absolute path for each database created by CodeQL.
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: '../lib/finalize-db.js'
|
||||
using: "node16"
|
||||
main: "../lib/analyze-action.js"
|
||||
|
||||
@@ -6,6 +6,12 @@ inputs:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
working-directory:
|
||||
description: >-
|
||||
Run the autobuilder using this path (relative to $GITHUB_WORKSPACE) as
|
||||
working directory. If this input is not set, the autobuilder runs with
|
||||
$GITHUB_WORKSPACE as its working directory.
|
||||
required: false
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: '../lib/autobuild.js'
|
||||
using: 'node16'
|
||||
main: '../lib/autobuild-action.js'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: 'CodeQL: Init'
|
||||
description: 'Setup the CodeQL tracer'
|
||||
description: 'Set up CodeQL'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
tools:
|
||||
@@ -16,6 +16,62 @@ inputs:
|
||||
config-file:
|
||||
description: Path of the config file to use
|
||||
required: false
|
||||
db-location:
|
||||
description: Path where CodeQL databases should be created. If not specified, a temporary directory will be used.
|
||||
required: false
|
||||
queries:
|
||||
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
||||
required: false
|
||||
packs:
|
||||
description: >-
|
||||
[Experimental] Comma-separated list of packs to run. Reference a pack in the format `scope/name[@version]`. If `version` is not
|
||||
specified, then the latest version of the pack is used. By default, this overrides the same setting in a
|
||||
configuration file; prefix with "+" to use both sets of packs.
|
||||
|
||||
This input is only available in single-language analyses. To use packs in multi-language
|
||||
analyses, you must specify packs in the codeql-config.yml file.
|
||||
required: false
|
||||
external-repository-token:
|
||||
description: A token for fetching external config files and queries if they reside in a private repository.
|
||||
required: false
|
||||
setup-python-dependencies:
|
||||
description: Try to auto-install your python dependencies
|
||||
required: true
|
||||
default: 'true'
|
||||
source-root:
|
||||
description: Path of the root source code directory, relative to $GITHUB_WORKSPACE.
|
||||
required: false
|
||||
ram:
|
||||
description: >-
|
||||
The amount of memory in MB that can be used by CodeQL extractors.
|
||||
By default, CodeQL extractors will use most of the memory available in the system
|
||||
(which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows, and 13GB for macOS).
|
||||
This input also sets the amount of memory that can later be used by the "analyze" action.
|
||||
required: false
|
||||
threads:
|
||||
description: >-
|
||||
The number of threads that can be used by CodeQL extractors.
|
||||
By default, CodeQL extractors will use all the hardware threads available in the system
|
||||
(which for GitHub-hosted runners is 2 for Linux and Windows and 3 for macOS).
|
||||
This input also sets the number of threads that can later be used by the "analyze" action.
|
||||
required: false
|
||||
debug:
|
||||
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
|
||||
required: false
|
||||
default: 'false'
|
||||
debug-artifact-name:
|
||||
description: >-
|
||||
The name of the artifact to store debugging information in.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
debug-database-name:
|
||||
description: >-
|
||||
The name of the database uploaded to the debugging artifact.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
outputs:
|
||||
codeql-path:
|
||||
description: The path of the CodeQL binary used for analysis
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: '../lib/setup-tracer.js'
|
||||
using: 'node16'
|
||||
main: '../lib/init-action.js'
|
||||
|
||||
691
lib/actions-util.js
generated
Normal file
691
lib/actions-util.js
generated
Normal file
@@ -0,0 +1,691 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
/**
|
||||
* The utils in this module are meant to be run inside of the action only.
|
||||
* Code paths from the runner should not enter this module.
|
||||
*/
|
||||
/**
|
||||
* Wrapper around core.getInput for inputs that always have a value.
|
||||
* Also see getOptionalInput.
|
||||
*
|
||||
* This allows us to get stronger type checking of required/optional inputs
|
||||
* and make behaviour more consistent between actions and the runner.
|
||||
*/
|
||||
function getRequiredInput(name) {
|
||||
return core.getInput(name, { required: true });
|
||||
}
|
||||
exports.getRequiredInput = getRequiredInput;
|
||||
/**
|
||||
* Wrapper around core.getInput that converts empty inputs to undefined.
|
||||
* Also see getRequiredInput.
|
||||
*
|
||||
* This allows us to get stronger type checking of required/optional inputs
|
||||
* and make behaviour more consistent between actions and the runner.
|
||||
*/
|
||||
const getOptionalInput = function (name) {
|
||||
const value = core.getInput(name);
|
||||
return value.length > 0 ? value : undefined;
|
||||
};
|
||||
exports.getOptionalInput = getOptionalInput;
|
||||
function getTemporaryDirectory() {
|
||||
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||
return value !== undefined && value !== ""
|
||||
? value
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||
}
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
function getToolCacheDirectory() {
|
||||
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
||||
return value !== undefined && value !== ""
|
||||
? value
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE");
|
||||
}
|
||||
exports.getToolCacheDirectory = getToolCacheDirectory;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
let commitOid = "";
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["rev-parse", ref], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
commitOid += data.toString();
|
||||
},
|
||||
stderr: (data) => {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return commitOid.trim();
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha of the merge base.
|
||||
* Returns undefined if run by other triggers or the merge base cannot be determined.
|
||||
*/
|
||||
const determineMergeBaseCommitOid = async function () {
|
||||
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdline: (data) => {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
}
|
||||
else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
}
|
||||
else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||
function isObject(o) {
|
||||
return o !== null && typeof o === "object";
|
||||
}
|
||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
}
|
||||
function patternToRegExp(value) {
|
||||
return new RegExp(`^${value
|
||||
.toString()
|
||||
.split(GLOB_PATTERN)
|
||||
.reduce(function (arr, cur) {
|
||||
if (cur === "**") {
|
||||
arr.push(".*?");
|
||||
}
|
||||
else if (cur === "*") {
|
||||
arr.push("[^/]*?");
|
||||
}
|
||||
else if (cur) {
|
||||
arr.push(escapeRegExp(cur));
|
||||
}
|
||||
return arr;
|
||||
}, [])
|
||||
.join("")}$`);
|
||||
}
|
||||
// this function should return true if patternA is a superset of patternB
|
||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||
function patternIsSuperset(patternA, patternB) {
|
||||
return patternToRegExp(patternA).test(patternB);
|
||||
}
|
||||
exports.patternIsSuperset = patternIsSuperset;
|
||||
function branchesToArray(branches) {
|
||||
if (typeof branches === "string") {
|
||||
return [branches];
|
||||
}
|
||||
if (Array.isArray(branches)) {
|
||||
if (branches.length === 0) {
|
||||
return "**";
|
||||
}
|
||||
return branches;
|
||||
}
|
||||
return "**";
|
||||
}
|
||||
function toCodedErrors(errors) {
|
||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||
acc[key] = { message: value, code: key };
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
// code to send back via status report
|
||||
// message to add as a warning annotation to the run
|
||||
exports.WorkflowErrors = toCodedErrors({
|
||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||
});
|
||||
function getWorkflowErrors(doc) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
const errors = [];
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
if (jobName) {
|
||||
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||
if (Array.isArray(steps)) {
|
||||
for (const step of steps) {
|
||||
// this was advice that we used to give in the README
|
||||
// we actually want to run the analysis on the merge commit
|
||||
// to produce results that are more inline with expectations
|
||||
// (i.e: this is what will happen if you merge this PR)
|
||||
// and avoid some race conditions
|
||||
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let missingPush = false;
|
||||
if (doc.on === undefined) {
|
||||
// this is not a valid config
|
||||
}
|
||||
else if (typeof doc.on === "string") {
|
||||
if (doc.on === "pull_request") {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (Array.isArray(doc.on)) {
|
||||
const hasPush = doc.on.includes("push");
|
||||
const hasPullRequest = doc.on.includes("pull_request");
|
||||
if (hasPullRequest && !hasPush) {
|
||||
missingPush = true;
|
||||
}
|
||||
}
|
||||
else if (isObject(doc.on)) {
|
||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||
if (!hasPush && hasPullRequest) {
|
||||
missingPush = true;
|
||||
}
|
||||
if (hasPush && hasPullRequest) {
|
||||
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||
// if they didn't change any files
|
||||
// currently we cannot go back through the history and find the most recent baseline
|
||||
if (Array.isArray(paths) && paths.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||
}
|
||||
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||
}
|
||||
}
|
||||
// if doc.on.pull_request is null that means 'all branches'
|
||||
// if doc.on.pull_request is undefined that means 'off'
|
||||
// we only want to check for mismatched branches if pull_request is on.
|
||||
if (doc.on.pull_request !== undefined) {
|
||||
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||
if (push !== "**") {
|
||||
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||
if (pull_request !== "**") {
|
||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||
if (difference.length > 0) {
|
||||
// there are branches in pull_request that may not have a baseline
|
||||
// because we are not building them on push
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
else if (push.length > 0) {
|
||||
// push is set up to run on a subset of branches
|
||||
// and you could open a PR against a branch with no baseline
|
||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (missingPush) {
|
||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
async function validateWorkflow() {
|
||||
let workflow;
|
||||
try {
|
||||
workflow = await getWorkflow();
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflow() failed: ${String(e)}`;
|
||||
}
|
||||
let workflowErrors;
|
||||
try {
|
||||
workflowErrors = getWorkflowErrors(workflow);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
if (workflowErrors.length > 0) {
|
||||
let message;
|
||||
try {
|
||||
message = formatWorkflowErrors(workflowErrors);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||
}
|
||||
core.warning(message);
|
||||
}
|
||||
return formatWorkflowCause(workflowErrors);
|
||||
}
|
||||
exports.validateWorkflow = validateWorkflow;
|
||||
function formatWorkflowErrors(errors) {
|
||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||
const errorsList = errors.map((e) => e.message).join(" ");
|
||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||
}
|
||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
||||
function formatWorkflowCause(errors) {
|
||||
if (errors.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return errors.map((e) => e.code).join(",");
|
||||
}
|
||||
exports.formatWorkflowCause = formatWorkflowCause;
|
||||
async function getWorkflow() {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
exports.getWorkflow = getWorkflow;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = api.getActionsApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
run_id,
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the workflow run ID.
|
||||
*/
|
||||
function getWorkflowRunID() {
|
||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||
}
|
||||
return workflowRunID;
|
||||
}
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
/**
|
||||
* Get the analysis key parameter for the current job.
|
||||
*
|
||||
* This will combine the workflow path and current job name.
|
||||
* Computing this the first time requires making requests to
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
||||
let analysisKey = process.env[analysisKeyEnvVar];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
analysisKey = `${workflowPath}:${jobName}`;
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
async function getAutomationID() {
|
||||
const analysis_key = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
return computeAutomationID(analysis_key, environment);
|
||||
}
|
||||
exports.getAutomationID = getAutomationID;
|
||||
function computeAutomationID(analysis_key, environment) {
|
||||
let automationID = `${analysis_key}/`;
|
||||
// the id has to be deterministic so we sort the fields
|
||||
if (environment !== undefined && environment !== "null") {
|
||||
const environmentObject = JSON.parse(environment);
|
||||
for (const entry of Object.entries(environmentObject).sort()) {
|
||||
if (typeof entry[1] === "string") {
|
||||
automationID += `${entry[0]}:${entry[1]}/`;
|
||||
}
|
||||
else {
|
||||
// In code scanning we just handle the string values,
|
||||
// the rest get converted to the empty string
|
||||
automationID += `${entry[0]}:/`;
|
||||
}
|
||||
}
|
||||
}
|
||||
return automationID;
|
||||
}
|
||||
exports.computeAutomationID = computeAutomationID;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
async function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = (0, exports.getOptionalInput)("ref");
|
||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
|
||||
(0, exports.getOptionalInput)("source-root") ||
|
||||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
|
||||
}
|
||||
const ref = refInput || (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
if (refInput) {
|
||||
return refInput;
|
||||
}
|
||||
// For pull request refs we want to detect whether the workflow
|
||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||
// than the 'merge' ref. If so, we want to convert the ref that
|
||||
// we report back.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||
return newRef;
|
||||
}
|
||||
else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
exports.getRef = getRef;
|
||||
function getActionsStatus(error, otherFailureCause) {
|
||||
if (error || otherFailureCause) {
|
||||
return error instanceof util_1.UserError ? "user-error" : "failure";
|
||||
}
|
||||
else {
|
||||
return "success";
|
||||
}
|
||||
}
|
||||
exports.getActionsStatus = getActionsStatus;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
}
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
|
||||
// See https://github.com/actions/runner/issues/803
|
||||
const actionRef = isRunningLocalAction()
|
||||
? undefined
|
||||
: process.env["GITHUB_ACTION_REF"];
|
||||
const statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key,
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
action_name: actionName,
|
||||
action_ref: actionRef,
|
||||
action_oid: "unknown",
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
runner_os: runnerOs,
|
||||
action_version: pkg.version,
|
||||
};
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error") {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const gitHubVersion = await api.getGitHubVersionActionsOnly();
|
||||
if ((0, util_1.isGitHubGhesVersionBelow)(gitHubVersion, "3.2.0")) {
|
||||
// GHES 3.1 and earlier versions reject unexpected properties, which means
|
||||
// that they will reject status reports with newly added properties.
|
||||
// Inhibiting status reporting for GHES < 3.2 avoids such failures.
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = api.getActionsApiClient();
|
||||
try {
|
||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
||||
owner,
|
||||
repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
if ((0, util_1.isHTTPError)(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
||||
}
|
||||
else {
|
||||
core.setFailed(e.message || GENERIC_403_MSG);
|
||||
}
|
||||
return false;
|
||||
case 404:
|
||||
core.setFailed(GENERIC_404_MSG);
|
||||
return false;
|
||||
case 422:
|
||||
// schema incompatibility when reporting status
|
||||
// this means that this action version is no longer compatible with the API
|
||||
// we still want to continue as it is likely the analysis endpoint will work
|
||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||
core.debug(INCOMPATIBLE_MSG);
|
||||
}
|
||||
else {
|
||||
core.debug(OUT_OF_DATE_MSG);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.error("An unexpected error occurred when sending code scanning status report.");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
// Was the workflow run triggered by a `push` event, for example as opposed to a `pull_request` event.
|
||||
function workflowIsTriggeredByPushEvent() {
|
||||
return process.env["GITHUB_EVENT_NAME"] === "push";
|
||||
}
|
||||
// Is dependabot the actor that triggered the current workflow run.
|
||||
function isDependabotActor() {
|
||||
return process.env["GITHUB_ACTOR"] === "dependabot[bot]";
|
||||
}
|
||||
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
|
||||
// as opposed to running a remote action (i.e. when another repo references us)
|
||||
function isRunningLocalAction() {
|
||||
const relativeScriptPath = getRelativeScriptPath();
|
||||
return (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath));
|
||||
}
|
||||
exports.isRunningLocalAction = isRunningLocalAction;
|
||||
// Get the location where the action is running from.
|
||||
// This can be used to get the actions name or tell if we're running a local action.
|
||||
function getRelativeScriptPath() {
|
||||
const runnerTemp = (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
return path.relative(actionsDirectory, __filename);
|
||||
}
|
||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
||||
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
|
||||
function getWorkflowEvent() {
|
||||
const eventJsonFile = (0, util_1.getRequiredEnvParam)("GITHUB_EVENT_PATH");
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(eventJsonFile, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
|
||||
}
|
||||
}
|
||||
// Is the version of the repository we are currently analyzing from the default branch,
|
||||
// or alternatively from another branch or a pull request.
|
||||
async function isAnalyzingDefaultBranch() {
|
||||
var _a;
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = currentRef.startsWith("refs/heads/")
|
||||
? currentRef.slice("refs/heads/".length)
|
||||
: currentRef;
|
||||
const event = getWorkflowEvent();
|
||||
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
function sanitizeArifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
1
lib/actions-util.js.map
Normal file
1
lib/actions-util.js.map
Normal file
File diff suppressed because one or more lines are too long
506
lib/actions-util.test.js
generated
Normal file
506
lib/actions-util.test.js
generated
Normal file
@@ -0,0 +1,506 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsutil = __importStar(require("./actions-util"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
function errorCodes(actual, expected) {
|
||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||
}
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
await t.throwsAsync(actionsutil.getRef);
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||
// check the environment sorting
|
||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"os": "linux", "language": "javascript"}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||
// check that an empty environment produces the right results
|
||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", "{}");
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||
// check non string environment values
|
||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"number": 1, "object": {"language": "javascript"}}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/number:/object:/");
|
||||
// check undefined environment
|
||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request", "schedule"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"], paths: ["test/*"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main", "feature"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["main", "feature"] },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: 1,
|
||||
pull_request: 1,
|
||||
},
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: 1,
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: [1],
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { 1: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: 1 },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [1] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: 1 } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [undefined] },
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: {
|
||||
branches: 1,
|
||||
},
|
||||
pull_request: {
|
||||
branches: 1,
|
||||
},
|
||||
},
|
||||
}), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/*"] },
|
||||
pull_request: { branches: "feature/moose" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/moose"] },
|
||||
pull_request: { branches: "feature/*" },
|
||||
},
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||
const message = actionsutil.formatWorkflowErrors([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
]);
|
||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||
const message = actionsutil.formatWorkflowErrors([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
actionsutil.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||
const message = actionsutil.formatWorkflowCause([]);
|
||||
t.deepEqual(message, undefined);
|
||||
});
|
||||
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||
const message = actionsutil.formatWorkflowCause([
|
||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
||||
actionsutil.WorkflowErrors.PathsSpecified,
|
||||
]);
|
||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
||||
});
|
||||
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
on:
|
||||
push:
|
||||
branches: [4.1, master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [4.1, master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master, ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: "workflow_dispatch"
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: [workflow_dispatch]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||
name: "CodeQL"
|
||||
on: ["push"]
|
||||
`)), []));
|
||||
});
|
||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
t.deepEqual((0, util_1.getMode)(), util_1.Mode.actions);
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "4.5.6");
|
||||
t.deepEqual((0, util_1.getMode)(), util_1.Mode.runner);
|
||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
repository: {
|
||||
default_branch: "main",
|
||||
},
|
||||
}));
|
||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||
process.env["GITHUB_REF"] = "main";
|
||||
process.env["GITHUB_SHA"] = "1234";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["GITHUB_REF"] = "feature";
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
1
lib/actions-util.test.js.map
Normal file
1
lib/actions-util.test.js.map
Normal file
File diff suppressed because one or more lines are too long
70
lib/analysis-paths.js
generated
70
lib/analysis-paths.js
generated
@@ -1,28 +1,50 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
exports.includeAndExcludeAnalysisPaths = exports.printPathFiltersWarning = exports.legalWindowsPathCharactersRegex = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' || language === 'python';
|
||||
return (language === "javascript" || language === "python" || language === "ruby");
|
||||
}
|
||||
// Matches a string containing only characters that are legal to include in paths on windows.
|
||||
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
|
||||
exports.legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
|
||||
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
|
||||
function buildIncludeExcludeEnvVar(paths) {
|
||||
// Ignore anything containing a *
|
||||
paths = paths.filter(p => p.indexOf('*') === -1);
|
||||
paths = paths.filter((p) => p.indexOf("*") === -1);
|
||||
// Some characters are illegal in path names in windows
|
||||
if (process.platform === 'win32') {
|
||||
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
|
||||
if (process.platform === "win32") {
|
||||
paths = paths.filter((p) => p.match(exports.legalWindowsPathCharactersRegex));
|
||||
}
|
||||
return paths.join('\n');
|
||||
return paths.join("\n");
|
||||
}
|
||||
function printPathFiltersWarning(config, logger) {
|
||||
// Index include/exclude/filters only work in javascript/python/ruby.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
|
||||
}
|
||||
}
|
||||
exports.printPathFiltersWarning = printPathFiltersWarning;
|
||||
function includeAndExcludeAnalysisPaths(config) {
|
||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||
// control which files/directories are traversed when scanning.
|
||||
@@ -32,27 +54,29 @@ function includeAndExcludeAnalysisPaths(config) {
|
||||
// traverse the entire file tree to determine which files are matched.
|
||||
// Any paths containing "*" are not included in these.
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
|
||||
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
|
||||
}
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
|
||||
// If the temporary or tools directory is in the working directory ignore that too.
|
||||
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
|
||||
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
|
||||
let pathsIgnore = config.pathsIgnore;
|
||||
if (!tempRelativeToWorking.startsWith("..")) {
|
||||
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
|
||||
}
|
||||
if (!toolsRelativeToWorking.startsWith("..")) {
|
||||
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
|
||||
}
|
||||
if (pathsIgnore.length !== 0) {
|
||||
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
|
||||
}
|
||||
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
||||
// extracted or ignored. It does not control which directories are traversed.
|
||||
// This does understand the glob and double-glob syntax.
|
||||
const filters = [];
|
||||
filters.push(...config.paths.map(p => 'include:' + p));
|
||||
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
|
||||
filters.push(...config.paths.map((p) => `include:${p}`));
|
||||
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`));
|
||||
if (filters.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
|
||||
}
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0 ||
|
||||
filters.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n");
|
||||
}
|
||||
}
|
||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
||||
124
lib/analysis-paths.test.js
generated
124
lib/analysis-paths.test.js
generated
@@ -1,43 +1,107 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("emptyPaths", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||
});
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
paths: ['path1', 'path2', '**/path3'],
|
||||
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
|
||||
(0, ava_1.default)("nonEmptyPaths", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
paths: ["path1", "path2", "**/path3"],
|
||||
pathsIgnore: ["path4", "path5", "path6/**"],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("exclude temp dir", async (t) => {
|
||||
return await util.withTmpDir(async (toolCacheDir) => {
|
||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
82
lib/analyze-action-env.test.js
generated
Normal file
82
lib/analyze-action-env.test.js
generated
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
// This test needs to be in its own file so that ava would run it in its own
|
||||
// nodejs process. The code being tested is in analyze-action.ts, which runs
|
||||
// immediately on load. So the file needs to be loaded during part of the test,
|
||||
// and that can happen only once per nodejs process. If multiple such tests are
|
||||
// in the same test file, ava would run them in the same nodejs process, and all
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
// When there are no action inputs for RAM and threads, the action uses
|
||||
// environment variables (passed down from the init action) to set RAM and
|
||||
// threads usage.
|
||||
process.env["CODEQL_THREADS"] = "-1";
|
||||
process.env["CODEQL_RAM"] = "4992";
|
||||
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
|
||||
const runQueriesStub = sinon.stub(analyze, "runQueries");
|
||||
const analyzeAction = require("./analyze-action");
|
||||
// When analyze-action.ts loads, it runs an async function from the top
|
||||
// level but does not wait for it to finish. To ensure that calls to
|
||||
// runFinalize and runQueries are correctly captured by spies, we explicitly
|
||||
// wait for the action promise to complete before starting verification.
|
||||
await analyzeAction.runPromise;
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analyze-action-env.test.js.map
|
||||
1
lib/analyze-action-env.test.js.map
Normal file
1
lib/analyze-action-env.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
82
lib/analyze-action-input.test.js
generated
Normal file
82
lib/analyze-action-input.test.js
generated
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
// This test needs to be in its own file so that ava would run it in its own
|
||||
// nodejs process. The code being tested is in analyze-action.ts, which runs
|
||||
// immediately on load. So the file needs to be loaded during part of the test,
|
||||
// and that can happen only once per nodejs process. If multiple such tests are
|
||||
// in the same test file, ava would run them in the same nodejs process, and all
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
process.env["CODEQL_RAM"] = "4992";
|
||||
// Action inputs have precedence over environment variables.
|
||||
optionalInputStub.withArgs("threads").returns("-1");
|
||||
optionalInputStub.withArgs("ram").returns("3012");
|
||||
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
|
||||
const runQueriesStub = sinon.stub(analyze, "runQueries");
|
||||
const analyzeAction = require("./analyze-action");
|
||||
// When analyze-action.ts loads, it runs an async function from the top
|
||||
// level but does not wait for it to finish. To ensure that calls to
|
||||
// runFinalize and runQueries are correctly captured by spies, we explicitly
|
||||
// wait for the action promise to complete before starting verification.
|
||||
await analyzeAction.runPromise;
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analyze-action-input.test.js.map
|
||||
1
lib/analyze-action-input.test.js.map
Normal file
1
lib/analyze-action-input.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
225
lib/analyze-action.js
generated
Normal file
225
lib/analyze-action.js
generated
Normal file
@@ -0,0 +1,225 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runPromise = exports.sendStatusReport = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendStatusReport(startedAt, config, stats, error) {
|
||||
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
|
||||
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...(config
|
||||
? {
|
||||
ml_powered_javascript_queries: util.getMlPoweredJsQueriesStatus(config),
|
||||
}
|
||||
: {}),
|
||||
...(stats || {}),
|
||||
};
|
||||
await actionsUtil.sendStatusReport(statusReport);
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadResult = undefined;
|
||||
let runStats = undefined;
|
||||
let config = undefined;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
await util.checkActionVersion(pkg.version);
|
||||
try {
|
||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||
return;
|
||||
}
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
await util.enrichEnvironment(util.Mode.actions, await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||
const apiDetails = {
|
||||
auth: actionsUtil.getRequiredInput("token"),
|
||||
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||
if (config.debugMode) {
|
||||
// Upload the SARIF files as an Actions artifact for debugging
|
||||
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (config.debugMode) {
|
||||
// Upload the logs as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
}
|
||||
const dbLocations = {};
|
||||
for (const language of config.languages) {
|
||||
dbLocations[language] = util.getCodeQLDatabasePath(config, language);
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (util.isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
}
|
||||
else if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
}
|
||||
catch (origError) {
|
||||
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||
core.setFailed(error.message);
|
||||
console.log(error);
|
||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||
const stats = { ...error.queriesStatusReport };
|
||||
await sendStatusReport(startedAt, config, stats, error);
|
||||
}
|
||||
else {
|
||||
await sendStatusReport(startedAt, config, undefined, error);
|
||||
}
|
||||
return;
|
||||
}
|
||||
finally {
|
||||
if (config !== undefined && config.debugMode) {
|
||||
try {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
||||
}
|
||||
}
|
||||
if (core.isDebug() && config !== undefined) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
const walkLogFiles = (dir) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||
core.endGroup();
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (runStats && uploadResult) {
|
||||
await sendStatusReport(startedAt, config, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
}
|
||||
else if (runStats) {
|
||||
await sendStatusReport(startedAt, config, { ...runStats });
|
||||
}
|
||||
else {
|
||||
await sendStatusReport(startedAt, config, undefined);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
function listFolder(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
const files = [];
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
files.push(path.resolve(dir, entry.name));
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
files.push(...listFolder(path.resolve(dir, entry.name)));
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
exports.runPromise = run();
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await exports.runPromise;
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze action failed: ${error}`);
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=analyze-action.js.map
|
||||
1
lib/analyze-action.js.map
Normal file
1
lib/analyze-action.js.map
Normal file
File diff suppressed because one or more lines are too long
302
lib/analyze.js
generated
Normal file
302
lib/analyze.js
generated
Normal file
@@ -0,0 +1,302 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const count_loc_1 = require("./count-loc");
|
||||
const languages_1 = require("./languages");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
class CodeQLAnalysisError extends Error {
|
||||
constructor(queriesStatusReport, message) {
|
||||
super(message);
|
||||
this.name = "CodeQLAnalysisError";
|
||||
this.queriesStatusReport = queriesStatusReport;
|
||||
}
|
||||
}
|
||||
exports.CodeQLAnalysisError = CodeQLAnalysisError;
|
||||
async function setupPythonExtractor(logger) {
|
||||
const codeqlPython = process.env["CODEQL_PYTHON"];
|
||||
if (codeqlPython === undefined || codeqlPython.length === 0) {
|
||||
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
|
||||
return;
|
||||
}
|
||||
let output = "";
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
},
|
||||
},
|
||||
};
|
||||
await new toolrunner.ToolRunner(codeqlPython, [
|
||||
"-c",
|
||||
"import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))",
|
||||
], options).exec();
|
||||
logger.info(`Setting LGTM_INDEX_IMPORT_PATH=${output}`);
|
||||
process.env["LGTM_INDEX_IMPORT_PATH"] = output;
|
||||
output = "";
|
||||
await new toolrunner.ToolRunner(codeqlPython, ["-c", "import sys; print(sys.version_info[0])"], options).exec();
|
||||
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
|
||||
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
|
||||
}
|
||||
async function createdDBForScannedLanguages(config, logger) {
|
||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||
// we extract any scanned languages.
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if ((0, languages_1.isScannedLanguage)(language) &&
|
||||
!dbIsFinalized(config, language, logger)) {
|
||||
logger.startGroup(`Extracting ${language}`);
|
||||
if (language === languages_1.Language.python) {
|
||||
await setupPythonExtractor(logger);
|
||||
}
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config, language), language);
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
function dbIsFinalized(config, language, logger) {
|
||||
const dbPath = util.getCodeQLDatabasePath(config, language);
|
||||
try {
|
||||
const dbInfo = yaml.load(fs.readFileSync(path.resolve(dbPath, "codeql-database.yml"), "utf8"));
|
||||
return !("inProgress" in dbInfo);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Could not check whether database for ${language} was finalized. Assuming it is not.`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
|
||||
await createdDBForScannedLanguages(config, logger);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if (dbIsFinalized(config, language, logger)) {
|
||||
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
|
||||
}
|
||||
else {
|
||||
logger.startGroup(`Finalizing ${language}`);
|
||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config, language), threadsFlag, memoryFlag);
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
|
||||
const statusReport = {};
|
||||
let locPromise = Promise.resolve({});
|
||||
const cliCanCountBaseline = await cliCanCountLoC();
|
||||
const debugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] ||
|
||||
process.env["ACTIONS_RUNNER_DEBUG"] ||
|
||||
process.env["ACTIONS_STEP_DEBUG"];
|
||||
if (!cliCanCountBaseline || debugMode) {
|
||||
// count the number of lines in the background
|
||||
locPromise = (0, count_loc_1.countLoc)(path.resolve(),
|
||||
// config.paths specifies external directories. the current
|
||||
// directory is included in the analysis by default. Replicate
|
||||
// that here.
|
||||
config.paths, config.pathsIgnore, config.languages, logger);
|
||||
}
|
||||
for (const language of config.languages) {
|
||||
const queries = config.queries[language];
|
||||
const packsWithVersion = config.packs[language] || [];
|
||||
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
|
||||
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
|
||||
const hasPackWithCustomQueries = packsWithVersion.length > 0;
|
||||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
try {
|
||||
if (hasPackWithCustomQueries) {
|
||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||
const results = await codeql.packDownload(packsWithVersion);
|
||||
logger.info(`Downloaded packs: ${results.packs
|
||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||
.join(", ")}`);
|
||||
logger.endGroup();
|
||||
}
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const querySuitePaths = [];
|
||||
if (queries["builtin"].length > 0) {
|
||||
const startTimeBuiltIn = new Date().getTime();
|
||||
querySuitePaths.push(await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"]), undefined));
|
||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeBuiltIn;
|
||||
}
|
||||
const startTimeCustom = new Date().getTime();
|
||||
let ranCustom = false;
|
||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
||||
if (queries["custom"][i].queries.length > 0) {
|
||||
querySuitePaths.push(await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries), queries["custom"][i].searchPath));
|
||||
ranCustom = true;
|
||||
}
|
||||
}
|
||||
if (packsWithVersion.length > 0) {
|
||||
querySuitePaths.push(...(await runQueryPacks(language, "packs", packsWithVersion, undefined)));
|
||||
ranCustom = true;
|
||||
}
|
||||
if (ranCustom) {
|
||||
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeCustom;
|
||||
}
|
||||
logger.endGroup();
|
||||
logger.startGroup(`Interpreting results for ${language}`);
|
||||
const startTimeInterpretResults = new Date().getTime();
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
|
||||
if (!cliCanCountBaseline)
|
||||
await injectLinesOfCode(sarifFile, language, locPromise);
|
||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeInterpretResults;
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
if (!cliCanCountBaseline || debugMode)
|
||||
printLinesOfCodeSummary(logger, language, await locPromise);
|
||||
if (cliCanCountBaseline)
|
||||
logger.info(await runPrintLinesOfCode(language));
|
||||
}
|
||||
catch (e) {
|
||||
logger.info(String(e));
|
||||
if (e instanceof Error) {
|
||||
logger.info(e.stack);
|
||||
}
|
||||
statusReport.analyze_failure_language = language;
|
||||
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
|
||||
}
|
||||
}
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
|
||||
}
|
||||
async function cliCanCountLoC() {
|
||||
return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES);
|
||||
}
|
||||
async function runPrintLinesOfCode(language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
return await codeql.databasePrintBaseline(databasePath);
|
||||
}
|
||||
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
return querySuitePath;
|
||||
}
|
||||
async function runQueryPacks(language, type, packs, searchPath) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Run the queries individually instead of all at once to avoid command
|
||||
// line length restrictions, particularly on windows.
|
||||
for (const pack of packs) {
|
||||
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, pack, memoryFlag, threadsFlag);
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
}
|
||||
return packs;
|
||||
}
|
||||
}
|
||||
exports.runQueries = runQueries;
|
||||
function createQuerySuiteContents(queries) {
|
||||
return queries.map((q) => `- query: ${q}`).join("\n");
|
||||
}
|
||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Delete variables as specified by the end-tracing script
|
||||
await (0, tracer_config_1.endTracingForCluster)(config);
|
||||
}
|
||||
else {
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
}
|
||||
// After switching to Node16, this entire block can be replaced with `await fs.promises.rm(outputDir, { recursive: true, force: true });`.
|
||||
try {
|
||||
await fs.promises.rmdir(outputDir, {
|
||||
recursive: true,
|
||||
maxRetries: 5,
|
||||
retryDelay: 2000,
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true });
|
||||
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||
}
|
||||
exports.runFinalize = runFinalize;
|
||||
async function runCleanup(config, cleanupLevel, logger) {
|
||||
logger.startGroup("Cleaning up databases");
|
||||
for (const language of config.languages) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
await codeql.databaseCleanup(databasePath, cleanupLevel);
|
||||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.runCleanup = runCleanup;
|
||||
async function injectLinesOfCode(sarifFile, language, locPromise) {
|
||||
var _a;
|
||||
const lineCounts = await locPromise;
|
||||
if (language in lineCounts) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
||||
if (Array.isArray(sarif.runs)) {
|
||||
for (const run of sarif.runs) {
|
||||
run.properties = run.properties || {};
|
||||
run.properties.metricResults = run.properties.metricResults || [];
|
||||
for (const metric of run.properties.metricResults) {
|
||||
// Baseline is inserted when matching rule has tag lines-of-code
|
||||
if (metric.rule && metric.rule.toolComponent) {
|
||||
const matchingRule = run.tool.extensions[metric.rule.toolComponent.index].rules[metric.rule.index];
|
||||
if ((_a = matchingRule.properties.tags) === null || _a === void 0 ? void 0 : _a.includes("lines-of-code")) {
|
||||
metric.baseline = lineCounts[language];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(sarifFile, JSON.stringify(sarif));
|
||||
}
|
||||
}
|
||||
function printLinesOfCodeSummary(logger, language, lineCounts) {
|
||||
if (language in lineCounts) {
|
||||
logger.info(`Counted a baseline of ${lineCounts[language]} lines of code for ${language}.`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=analyze.js.map
|
||||
1
lib/analyze.js.map
Normal file
1
lib/analyze.js.map
Normal file
File diff suppressed because one or more lines are too long
213
lib/analyze.test.js
generated
Normal file
213
lib/analyze.test.js
generated
Normal file
@@ -0,0 +1,213 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const count = __importStar(require("./count-loc"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
// Checks that the duration fields are populated for the correct language
|
||||
// and correct case of builtin or custom. Also checks the correct search
|
||||
// paths are set in the database analyze invocation.
|
||||
(0, ava_1.default)("status report fields and search path setting", async (t) => {
|
||||
const mockLinesOfCode = Object.values(languages_1.Language).reduce((obj, lang, i) => {
|
||||
// use a different line count for each language
|
||||
obj[lang] = i + 1;
|
||||
return obj;
|
||||
}, {});
|
||||
sinon.stub(count, "countLoc").resolves(mockLinesOfCode);
|
||||
let searchPathsUsed = [];
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const memoryFlag = "";
|
||||
const addSnippetsFlag = "";
|
||||
const threadsFlag = "";
|
||||
const packs = {
|
||||
[languages_1.Language.cpp]: ["a/b@1.0.0"],
|
||||
[languages_1.Language.java]: ["c/d@2.0.0"],
|
||||
};
|
||||
for (const language of Object.values(languages_1.Language)) {
|
||||
(0, codeql_1.setCodeQL)({
|
||||
packDownload: async () => ({ packs: [] }),
|
||||
databaseRunQueries: async (_db, searchPath) => {
|
||||
searchPathsUsed.push(searchPath);
|
||||
},
|
||||
databaseInterpretResults: async (_db, _queriesRun, sarifFile) => {
|
||||
fs.writeFileSync(sarifFile, JSON.stringify({
|
||||
runs: [
|
||||
// references a rule with the lines-of-code tag, so baseline should be injected
|
||||
{
|
||||
tool: {
|
||||
extensions: [
|
||||
{
|
||||
rules: [
|
||||
{
|
||||
properties: {
|
||||
tags: ["lines-of-code"],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
properties: {
|
||||
metricResults: [
|
||||
{
|
||||
rule: {
|
||||
index: 0,
|
||||
toolComponent: {
|
||||
index: 0,
|
||||
},
|
||||
},
|
||||
value: 123,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{},
|
||||
],
|
||||
}));
|
||||
return "";
|
||||
},
|
||||
});
|
||||
searchPathsUsed = [];
|
||||
const config = {
|
||||
languages: [language],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs,
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
});
|
||||
config.queries[language] = {
|
||||
builtin: ["foo.ql"],
|
||||
custom: [],
|
||||
};
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||
const hasPacks = language in packs;
|
||||
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||
if (hasPacks) {
|
||||
t.deepEqual(statusReportKeys.length, 3, statusReportKeys.toString());
|
||||
t.deepEqual(statusReportKeys[0], `analyze_builtin_queries_${language}_duration_ms`);
|
||||
t.deepEqual(statusReportKeys[1], `analyze_custom_queries_${language}_duration_ms`);
|
||||
t.deepEqual(statusReportKeys[2], `interpret_results_${language}_duration_ms`);
|
||||
}
|
||||
else {
|
||||
t.deepEqual(statusReportKeys[0], `analyze_builtin_queries_${language}_duration_ms`);
|
||||
t.deepEqual(statusReportKeys[1], `interpret_results_${language}_duration_ms`);
|
||||
}
|
||||
config.queries[language] = {
|
||||
builtin: [],
|
||||
custom: [
|
||||
{
|
||||
queries: ["foo.ql"],
|
||||
searchPath: "/1",
|
||||
},
|
||||
{
|
||||
queries: ["bar.ql"],
|
||||
searchPath: "/2",
|
||||
},
|
||||
],
|
||||
};
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
||||
const expectedSearchPathsUsed = hasPacks
|
||||
? [undefined, undefined, "/1", "/2", undefined]
|
||||
: [undefined, "/1", "/2"];
|
||||
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
|
||||
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
|
||||
}
|
||||
verifyLineCounts(tmpDir);
|
||||
verifyQuerySuites(tmpDir);
|
||||
});
|
||||
function verifyLineCounts(tmpDir) {
|
||||
// eslint-disable-next-line github/array-foreach
|
||||
Object.keys(languages_1.Language).forEach((lang, i) => {
|
||||
verifyLineCountForFile(path.join(tmpDir, `${lang}.sarif`), i + 1);
|
||||
});
|
||||
}
|
||||
function verifyLineCountForFile(filePath, lineCount) {
|
||||
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
t.deepEqual(sarif.runs[0].properties.metricResults, [
|
||||
{
|
||||
rule: {
|
||||
index: 0,
|
||||
toolComponent: {
|
||||
index: 0,
|
||||
},
|
||||
},
|
||||
value: 123,
|
||||
baseline: lineCount,
|
||||
},
|
||||
]);
|
||||
// when the rule doesn't exist, it should not be added
|
||||
t.deepEqual(sarif.runs[1].properties.metricResults, []);
|
||||
}
|
||||
function verifyQuerySuites(tmpDir) {
|
||||
const qlsContent = [
|
||||
{
|
||||
query: "foo.ql",
|
||||
},
|
||||
];
|
||||
const qlsContent2 = [
|
||||
{
|
||||
query: "bar.ql",
|
||||
},
|
||||
];
|
||||
for (const lang of Object.values(languages_1.Language)) {
|
||||
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
|
||||
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
|
||||
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
|
||||
}
|
||||
function readContents(name) {
|
||||
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
|
||||
console.log(x);
|
||||
return yaml.load(fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8"));
|
||||
}
|
||||
}
|
||||
});
|
||||
//# sourceMappingURL=analyze.test.js.map
|
||||
1
lib/analyze.test.js.map
Normal file
1
lib/analyze.test.js.map
Normal file
File diff suppressed because one or more lines are too long
99
lib/api-client.js
generated
99
lib/api-client.js
generated
@@ -1,46 +1,93 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
exports.getApiClient = function (githubAuth, githubApiUrl, allowLocalRun = false) {
|
||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||
throw new Error('Invalid API call in local run');
|
||||
}
|
||||
return new github.GitHub({
|
||||
auth: parseAuth(githubAuth),
|
||||
baseUrl: githubApiUrl,
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
var DisallowedAPIVersionReason;
|
||||
(function (DisallowedAPIVersionReason) {
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||
const getApiClient = function (apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: getApiUrl(apiDetails.url),
|
||||
userAgent: `CodeQL-${(0, util_1.getMode)()}/${pkg.version}`,
|
||||
log: (0, console_log_level_1.default)({ level: "debug" }),
|
||||
}));
|
||||
};
|
||||
// Parses the user input as either a single token,
|
||||
// or a username and password / PAT.
|
||||
function parseAuth(auth) {
|
||||
// Check if it's a username:password pair
|
||||
const c = auth.indexOf(':');
|
||||
if (c !== -1) {
|
||||
return 'basic ' + Buffer.from(auth).toString('base64');
|
||||
exports.getApiClient = getApiClient;
|
||||
function getApiUrl(githubUrl) {
|
||||
const url = new URL(githubUrl);
|
||||
// If we detect this is trying to connect to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
|
||||
return "https://api.github.com";
|
||||
}
|
||||
// Otherwise use the token as it is
|
||||
return auth;
|
||||
// Add the /api/v3 API prefix
|
||||
url.pathname = path.join(url.pathname, "api", "v3");
|
||||
return url.toString();
|
||||
}
|
||||
function getApiDetails() {
|
||||
return {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
}
|
||||
// Temporary function to aid in the transition to running on and off of github actions.
|
||||
// Once all code has been coverted this function should be removed or made canonical
|
||||
// Once all code has been converted this function should be removed or made canonical
|
||||
// and called only from the action entrypoints.
|
||||
function getActionsApiClient(allowLocalRun = false) {
|
||||
return exports.getApiClient(core.getInput('token'), util_1.getRequiredEnvParam('GITHUB_API_URL'), allowLocalRun);
|
||||
function getActionsApiClient() {
|
||||
return (0, exports.getApiClient)(getApiDetails());
|
||||
}
|
||||
exports.getActionsApiClient = getActionsApiClient;
|
||||
let cachedGitHubVersion = undefined;
|
||||
/**
|
||||
* Report the GitHub server version. This is a wrapper around
|
||||
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
||||
* GitHub Action inputs. If you need to get the GitHub server version from the
|
||||
* Runner, please call util.getGitHubVersion() instead.
|
||||
*
|
||||
* @returns GitHub version
|
||||
*/
|
||||
async function getGitHubVersionActionsOnly() {
|
||||
if (!util.isActions()) {
|
||||
throw new Error("getGitHubVersionActionsOnly() works only in an action");
|
||||
}
|
||||
if (cachedGitHubVersion === undefined) {
|
||||
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
|
||||
}
|
||||
return cachedGitHubVersion;
|
||||
}
|
||||
exports.getGitHubVersionActionsOnly = getGitHubVersionActionsOnly;
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,YAAoB,EAAE,aAAa,GAAG,KAAK;IAClG,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC;QAC3B,OAAO,EAAE,YAAY;QACrB,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,kDAAkD;AAClD,oCAAoC;AACpC,SAAS,SAAS,CAAC,IAAY;IAC7B,yCAAyC;IACzC,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;QACZ,OAAO,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KACxD;IAED,mCAAmC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,gBAAgB,CAAC,EACrC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IACpB,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"}
|
||||
91
lib/api-client.test.js
generated
Normal file
91
lib/api-client.test.js
generated
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
let pluginStub;
|
||||
let githubStub;
|
||||
ava_1.default.beforeEach(() => {
|
||||
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
||||
githubStub = sinon.stub();
|
||||
pluginStub.returns(githubStub);
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||
});
|
||||
(0, ava_1.default)("Get the client API", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
externalRepoAuth: "abc",
|
||||
url: "http://hucairz",
|
||||
}, undefined, {
|
||||
auth: "token xyz",
|
||||
baseUrl: "http://hucairz/api/v3",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Get the client API external", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
externalRepoAuth: "abc",
|
||||
url: "http://hucairz",
|
||||
}, { allowExternal: true }, {
|
||||
auth: "token abc",
|
||||
baseUrl: "http://hucairz/api/v3",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Get the client API external not present", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
url: "http://hucairz",
|
||||
}, { allowExternal: true }, {
|
||||
auth: "token xyz",
|
||||
baseUrl: "http://hucairz/api/v3",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Get the client API with github url", async (t) => {
|
||||
doTest(t, {
|
||||
auth: "xyz",
|
||||
url: "https://github.com/some/invalid/url",
|
||||
}, undefined, {
|
||||
auth: "token xyz",
|
||||
baseUrl: "https://api.github.com",
|
||||
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||
});
|
||||
});
|
||||
function doTest(t, clientArgs, clientOptions, expected) {
|
||||
(0, api_client_1.getApiClient)(clientArgs, clientOptions);
|
||||
const firstCallArgs = githubStub.args[0];
|
||||
// log is a function, so we don't need to test for equality of it
|
||||
delete firstCallArgs[0].log;
|
||||
t.deepEqual(firstCallArgs, [expected]);
|
||||
}
|
||||
//# sourceMappingURL=api-client.test.js.map
|
||||
1
lib/api-client.test.js.map
Normal file
1
lib/api-client.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,IAAA,yBAAY,EAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
||||
1
lib/api-compatibility.json
Normal file
1
lib/api-compatibility.json
Normal file
@@ -0,0 +1 @@
|
||||
{ "maximumVersion": "3.5", "minimumVersion": "3.1" }
|
||||
82
lib/autobuild-action.js
generated
Normal file
82
lib/autobuild-action.js
generated
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const logging_1 = require("./logging");
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
|
||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(","),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await (0, util_1.checkActionVersion)(pkg.version);
|
||||
let language = undefined;
|
||||
try {
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
||||
if (language !== undefined) {
|
||||
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
|
||||
if (workingDirectory) {
|
||||
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
|
||||
process.chdir(workingDirectory);
|
||||
}
|
||||
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error instanceof Error ? error.message : String(error)}`);
|
||||
console.log(error);
|
||||
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error instanceof Error ? error : new Error(String(error)));
|
||||
return;
|
||||
}
|
||||
await sendCompletedStatusReport(startedAt, language ? [language] : []);
|
||||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await run();
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`autobuild action failed. ${error}`);
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=autobuild-action.js.map
|
||||
1
lib/autobuild-action.js.map
Normal file
1
lib/autobuild-action.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAyE;AAEzE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
84
lib/autobuild.js
generated
84
lib/autobuild.js
generated
@@ -1,65 +1,33 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
exports.runAutobuild = exports.determineAutobuildLanguage = void 0;
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
var _a, _b;
|
||||
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(','),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig();
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = config.languages.filter(codeql_1.isTracedLanguage);
|
||||
language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
}
|
||||
core.debug(`Detected dominant traced language: ${language}`);
|
||||
if (autobuildLanguages.length > 1) {
|
||||
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||
}
|
||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = codeql_1.getCodeQL();
|
||||
await codeQL.runAutobuild(language);
|
||||
core.endGroup();
|
||||
const languages_1 = require("./languages");
|
||||
function determineAutobuildLanguage(config, logger) {
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
|
||||
const language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
logger.info("None of the languages in this project require extra build steps");
|
||||
return undefined;
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
console.log(error);
|
||||
await sendCompletedStatusReport(startedAt, [language], language, error);
|
||||
return;
|
||||
logger.debug(`Detected dominant traced language: ${language}`);
|
||||
if (autobuildLanguages.length > 1) {
|
||||
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
|
||||
.slice(1)
|
||||
.join(" and ")}, you must replace this call with custom build steps.`);
|
||||
}
|
||||
await sendCompletedStatusReport(startedAt, [language]);
|
||||
return language;
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
console.log(e);
|
||||
});
|
||||
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
||||
async function runAutobuild(language, config, logger) {
|
||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await codeQL.runAutobuild(language);
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.runAutobuild = runAutobuild;
|
||||
//# sourceMappingURL=autobuild.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAuD;AACvD,6DAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,EAAE,CAAC;QAE9C,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,yBAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||
57
lib/cli.js
generated
57
lib/cli.js
generated
@@ -1,57 +0,0 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const commander_1 = require("commander");
|
||||
const path = __importStar(require("path"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const program = new commander_1.Command();
|
||||
program.version('0.0.1');
|
||||
function parseGithubApiUrl(inputUrl) {
|
||||
try {
|
||||
const url = new URL(inputUrl);
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://api.github.com';
|
||||
}
|
||||
// Add the API path if it's not already present.
|
||||
if (url.pathname.indexOf('/api/v3') === -1) {
|
||||
url.pathname = path.join(url.pathname, 'api', 'v3');
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`"${inputUrl}" is not a valid URL`);
|
||||
}
|
||||
}
|
||||
const logger = logging_1.getCLILogger();
|
||||
program
|
||||
.command('upload')
|
||||
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
|
||||
.requiredOption('--sarif-file <file>', 'SARIF file to upload; can also be a directory for uploading multiple')
|
||||
.requiredOption('--repository <repository>', 'Repository name')
|
||||
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
|
||||
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
|
||||
.requiredOption('--github-url <url>', 'URL of GitHub instance')
|
||||
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
|
||||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.action(async (cmd) => {
|
||||
try {
|
||||
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubApiUrl(cmd.githubUrl), 'cli', logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error('Upload failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
program.parse(process.argv);
|
||||
//# sourceMappingURL=cli.js.map
|
||||
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;;;;;;;AAAA,yCAAoC;AACpC,2CAA6B;AAE7B,uCAAyC;AACzC,6CAAkD;AAClD,yDAA2C;AAE3C,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAC9B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAYzB,SAAS,iBAAiB,CAAC,QAAgB;IACzC,IAAI;QACF,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC;QAE9B,kDAAkD;QAClD,0CAA0C;QAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;YACtE,OAAO,wBAAwB,CAAC;SACjC;QAED,gDAAgD;QAChD,IAAI,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;YAC1C,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;SACrD;QAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;KAEvB;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,IAAI,QAAQ,sBAAsB,CAAC,CAAC;KACrD;AACH,CAAC;AAED,MAAM,MAAM,GAAG,sBAAY,EAAE,CAAC;AAE9B,OAAO;KACJ,OAAO,CAAC,QAAQ,CAAC;KACjB,WAAW,CAAC,6EAA6E,CAAC;KAC1F,cAAc,CAAC,qBAAqB,EAAE,sEAAsE,CAAC;KAC7G,cAAc,CAAC,2BAA2B,EAAE,iBAAiB,CAAC;KAC9D,cAAc,CAAC,mBAAmB,EAAE,iCAAiC,CAAC;KACtE,cAAc,CAAC,aAAa,EAAE,+BAA+B,CAAC;KAC9D,cAAc,CAAC,oBAAoB,EAAE,wBAAwB,CAAC;KAC9D,cAAc,CAAC,sBAAsB,EAAE,qFAAqF,CAAC;KAC7H,MAAM,CAAC,wBAAwB,EAAE,oDAAoD,CAAC;KACtF,MAAM,CAAC,KAAK,EAAE,GAAe,EAAE,EAAE;IAChC,IAAI;QACF,MAAM,UAAU,CAAC,MAAM,CACrB,GAAG,CAAC,SAAS,EACb,+BAAkB,CAAC,GAAG,CAAC,UAAU,CAAC,EAClC,GAAG,CAAC,MAAM,EACV,GAAG,CAAC,GAAG,EACP,SAAS,EACT,SAAS,EACT,SAAS,EACT,GAAG,CAAC,YAAY,IAAI,OAAO,CAAC,GAAG,EAAE,EACjC,SAAS,EACT,GAAG,CAAC,UAAU,EACd,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,EAChC,KAAK,EACL,MAAM,CAAC,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;QAC9B,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAChB,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;KACtB;AACH,CAAC,CAAC,CAAC;AAEL,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC"}
|
||||
884
lib/codeql.js
generated
884
lib/codeql.js
generated
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
296
lib/codeql.test.js
generated
296
lib/codeql.test.js
generated
@@ -1,82 +1,294 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
const sampleApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://github.com",
|
||||
};
|
||||
const sampleGHAEApiDetails = {
|
||||
auth: "token",
|
||||
url: "https://example.githubenterprise.com",
|
||||
};
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const versions = ["20200601", "20200610"];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await codeql.setupCodeQL();
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
ava_1.default('parse codeql bundle url version', t => {
|
||||
(0, ava_1.default)("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("don't download codeql bundle cache with pinned different version cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache with different version cached (not pinned)", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "linux"
|
||||
? "linux64"
|
||||
: "osx64";
|
||||
(0, nock_1.default)("https://github.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "linux"
|
||||
? "linux64"
|
||||
: "osx64";
|
||||
(0, nock_1.default)("https://github.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const bundleAssetID = 10;
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "linux"
|
||||
? "linux64"
|
||||
: "osx64";
|
||||
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
||||
.reply(200, {
|
||||
assets: { [codeQLBundleName]: bundleAssetID },
|
||||
});
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
||||
.reply(200, {
|
||||
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
||||
});
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("parse codeql bundle url version", (t) => {
|
||||
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
|
||||
});
|
||||
(0, ava_1.default)("convert to semver", (t) => {
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
"20200601": "0.0.0-20200601",
|
||||
"20200601.0": "0.0.0-20200601.0",
|
||||
"20200601.0.0": "20200601.0.0",
|
||||
"1.2.3": "1.2.3",
|
||||
"1.2.3-alpha": "1.2.3-alpha",
|
||||
"1.2.3-beta.1": "1.2.3-beta.1",
|
||||
};
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url);
|
||||
const parsedVersion = codeql.convertToSemVer(version, (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
t.fail(e.message);
|
||||
t.fail(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
});
|
||||
ava_1.default('getExtraOptions works for explicit paths', t => {
|
||||
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ['foo'], []), ['42']);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ['foo', 'bar'], []), ['42']);
|
||||
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
||||
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
|
||||
});
|
||||
ava_1.default('getExtraOptions works for wildcards', t => {
|
||||
t.deepEqual(codeql.getExtraOptions({ '*': [42] }, ['foo'], []), ['42']);
|
||||
(0, ava_1.default)("getExtraOptions works for wildcards", (t) => {
|
||||
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
|
||||
});
|
||||
ava_1.default('getExtraOptions works for wildcards and explicit paths', t => {
|
||||
let o1 = { '*': [42], foo: [87] };
|
||||
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
|
||||
let o2 = { '*': [42], foo: [87] };
|
||||
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
|
||||
let o3 = { '*': [42], foo: { '*': [87], bar: [99] } };
|
||||
let p = ['foo', 'bar'];
|
||||
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
|
||||
(0, ava_1.default)("getExtraOptions works for wildcards and explicit paths", (t) => {
|
||||
const o1 = { "*": [42], foo: [87] };
|
||||
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
|
||||
const o2 = { "*": [42], foo: [87] };
|
||||
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
|
||||
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
|
||||
const p = ["foo", "bar"];
|
||||
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
|
||||
});
|
||||
ava_1.default('getExtraOptions throws for bad content', t => {
|
||||
t.throws(() => codeql.getExtraOptions({ '*': 42 }, ['foo'], []));
|
||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ['foo'], []));
|
||||
t.throws(() => codeql.getExtraOptions({ '*': [42], foo: { '*': 87, bar: [99] } }, ['foo', 'bar'], []));
|
||||
(0, ava_1.default)("getExtraOptions throws for bad content", (t) => {
|
||||
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
|
||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
||||
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
||||
});
|
||||
(0, ava_1.default)("getCodeQLActionRepository", (t) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.runner, "1.2.3");
|
||||
const repoActions = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoActions, "github/codeql-action");
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
// isRunningLocalAction() === true
|
||||
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
||||
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
||||
const repoLocalRunner = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoLocalRunner, "github/codeql-action");
|
||||
process.env["GITHUB_ACTION_REPOSITORY"] = "xxx/yyy";
|
||||
const repoEnv = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoEnv, "xxx/yyy");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
const stubConfig = {
|
||||
languages: [languages_1.Language.cpp],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: "",
|
||||
toolCacheDir: "",
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: "",
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag enabled, but old CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.9.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.LuaTracerConfigEnabled]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag disabled, with old CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.9.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag enabled, compatible CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.10.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.LuaTracerConfigEnabled]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag disabled, compatible CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.10.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be present, but it is absent");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
|
||||
runnerObjectStub.exec.resolves(0);
|
||||
const runnerConstructorStub = sinon.stub(toolrunner, "ToolRunner");
|
||||
runnerConstructorStub.returns(runnerObjectStub);
|
||||
return runnerConstructorStub;
|
||||
}
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
File diff suppressed because one or more lines are too long
749
lib/config-utils.js
generated
749
lib/config-utils.js
generated
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
886
lib/config-utils.test.js
generated
886
lib/config-utils.test.js
generated
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
70
lib/count-loc.js
generated
Normal file
70
lib/count-loc.js
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.countLoc = void 0;
|
||||
const github_linguist_1 = require("github-linguist");
|
||||
const languages_1 = require("./languages");
|
||||
// Map from linguist language names to language prefixes used in the action and codeql
|
||||
const linguistToMetrics = {
|
||||
c: languages_1.Language.cpp,
|
||||
"c++": languages_1.Language.cpp,
|
||||
"c#": languages_1.Language.csharp,
|
||||
go: languages_1.Language.go,
|
||||
java: languages_1.Language.java,
|
||||
javascript: languages_1.Language.javascript,
|
||||
python: languages_1.Language.python,
|
||||
ruby: languages_1.Language.ruby,
|
||||
typescript: languages_1.Language.javascript,
|
||||
};
|
||||
const nameToLinguist = Object.entries(linguistToMetrics).reduce((obj, [key, name]) => {
|
||||
if (!obj[name]) {
|
||||
obj[name] = [];
|
||||
}
|
||||
obj[name].push(key);
|
||||
return obj;
|
||||
}, {});
|
||||
/**
|
||||
* Count the lines of code of the specified language using the include
|
||||
* and exclude glob paths.
|
||||
*
|
||||
* @param cwd the root directory to start the count from
|
||||
* @param include glob patterns to include in the search for relevant files
|
||||
* @param exclude glob patterns to exclude in the search for relevant files
|
||||
* @param dbLanguages list of languages to include in the results
|
||||
* @param logger object to log results
|
||||
*/
|
||||
async function countLoc(cwd, include, exclude, dbLanguages, logger) {
|
||||
const result = await new github_linguist_1.LocDir({
|
||||
cwd,
|
||||
include: Array.isArray(include) && include.length > 0 ? include : ["**"],
|
||||
exclude,
|
||||
analysisLanguages: dbLanguages.flatMap((lang) => nameToLinguist[lang]),
|
||||
}).loadInfo();
|
||||
// The analysis counts LoC in all languages. We need to
|
||||
// extract the languages we care about. Also, note that
|
||||
// the analysis uses slightly different names for language.
|
||||
const lineCounts = Object.entries(result.languages).reduce((obj, [language, { code }]) => {
|
||||
const metricsLanguage = linguistToMetrics[language];
|
||||
if (metricsLanguage && dbLanguages.includes(metricsLanguage)) {
|
||||
obj[metricsLanguage] = code + (obj[metricsLanguage] || 0);
|
||||
}
|
||||
return obj;
|
||||
}, {});
|
||||
if (Object.keys(lineCounts).length) {
|
||||
logger.debug("Lines of code count:");
|
||||
for (const [language, count] of Object.entries(lineCounts)) {
|
||||
logger.debug(` ${language}: ${count}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.info("Could not determine the baseline lines of code count in this repository. " +
|
||||
"Because of this, it will not be possible to compare the lines " +
|
||||
"of code analyzed by code scanning with the baseline. This will not affect " +
|
||||
"the results produced by code scanning. If you have any questions, you can " +
|
||||
"raise an issue at https://github.com/github/codeql-action/issues. Please " +
|
||||
"include a link to the repository if public, or otherwise information about " +
|
||||
"the code scanning workflow you are using.");
|
||||
}
|
||||
return lineCounts;
|
||||
}
|
||||
exports.countLoc = countLoc;
|
||||
//# sourceMappingURL=count-loc.js.map
|
||||
1
lib/count-loc.js.map
Normal file
1
lib/count-loc.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"count-loc.js","sourceRoot":"","sources":["../src/count-loc.ts"],"names":[],"mappings":";;;AAAA,qDAAyC;AAEzC,2CAAuC;AAGvC,sFAAsF;AACtF,MAAM,iBAAiB,GAA6B;IAClD,CAAC,EAAE,oBAAQ,CAAC,GAAG;IACf,KAAK,EAAE,oBAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,oBAAQ,CAAC,MAAM;IACrB,EAAE,EAAE,oBAAQ,CAAC,EAAE;IACf,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;IAC/B,MAAM,EAAE,oBAAQ,CAAC,MAAM;IACvB,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAC7D,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,EAAE;IACnB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACd,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;KAChB;IACD,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACpB,OAAO,GAAG,CAAC;AACb,CAAC,EACD,EAAgC,CACjC,CAAC;AAEF;;;;;;;;;GASG;AACI,KAAK,UAAU,QAAQ,CAC5B,GAAW,EACX,OAAiB,EACjB,OAAiB,EACjB,WAAuB,EACvB,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAI,wBAAM,CAAC;QAC9B,GAAG;QACH,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACxE,OAAO;QACP,iBAAiB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;KACvE,CAAC,CAAC,QAAQ,EAAE,CAAC;IAEd,uDAAuD;IACvD,uDAAuD;IACvD,2DAA2D;IAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,MAAM,CACxD,CAAC,GAAG,EAAE,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,eAAe,GAAG,iBAAiB,CAAC,QAAQ,CAAC,CAAC;QACpD,IAAI,eAAe,IAAI,WAAW,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;YAC5D,GAAG,CAAC,eAAe,CAAC,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3D;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAA8B,CAC/B,CAAC;IAEF,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;QAClC,MAAM,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;QACrC,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAC1D,MAAM,CAAC,KAAK,CAAC,KAAK,QAAQ,KAAK,KAAK,EAAE,CAAC,CAAC;SACzC;KACF;SAAM;QACL,MAAM,CAAC,IAAI,CACT,2EAA2E;YACzE,gEAAgE;YAChE,4EAA4E;YAC5E,4EAA4E;YAC5E,2EAA2E;YAC3E,6EAA6E;YAC7E,2CAA2C,CAC9C,CAAC;KACH;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AA9CD,4BA8CC"}
|
||||
78
lib/count-loc.test.js
generated
Normal file
78
lib/count-loc.test.js
generated
Normal file
@@ -0,0 +1,78 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const count_loc_1 = require("./count-loc");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("ensure lines of code works for cpp and js", async (t) => {
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.cpp, languages_1.Language.javascript], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {
|
||||
cpp: 6,
|
||||
javascript: 9,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("ensure lines of code works for csharp", async (t) => {
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.csharp], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {
|
||||
csharp: 10,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("ensure lines of code can handle undefined language", async (t) => {
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.javascript, languages_1.Language.python, "hucairz"], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {
|
||||
javascript: 9,
|
||||
python: 5,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("ensure lines of code can handle empty languages", async (t) => {
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], [], [], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {});
|
||||
});
|
||||
(0, ava_1.default)("ensure lines of code can handle includes", async (t) => {
|
||||
// note that "**" is always included. The includes are for extra
|
||||
// directories outside the normal structure.
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), ["../../src/testdata"], [], [languages_1.Language.javascript], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {
|
||||
javascript: 12,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("ensure lines of code can handle empty includes", async (t) => {
|
||||
// note that "**" is always included. The includes are for extra
|
||||
// directories outside the normal structure.
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), ["idontexist"], [], [languages_1.Language.javascript], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {
|
||||
// should get no results
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("ensure lines of code can handle exclude", async (t) => {
|
||||
const results = await (0, count_loc_1.countLoc)(path.join(__dirname, "../tests/multi-language-repo"), [], ["**/*.py"], [languages_1.Language.javascript, languages_1.Language.python], (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(results, {
|
||||
javascript: 9,
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=count-loc.test.js.map
|
||||
1
lib/count-loc.test.js.map
Normal file
1
lib/count-loc.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"count-loc.test.js","sourceRoot":"","sources":["../src/count-loc.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,2CAAuC;AACvC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAE7C,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,GAAG,EAAE,oBAAQ,CAAC,UAAU,CAAC,EACnC,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,GAAG,EAAE,CAAC;QACN,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,uCAAuC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACxD,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,MAAM,CAAC,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,MAAM,EAAE,EAAE;KACX,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oDAAoD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrE,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,EAAE,SAAqB,CAAC,EAC7D,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;QACb,MAAM,EAAE,CAAC;KACV,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iDAAiD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClE,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,EAAE,EACF,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,oBAAoB,CAAC,EACtB,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,EAAE;KACf,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,gDAAgD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjE,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,YAAY,CAAC,EACd,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;IACnB,wBAAwB;KACzB,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,OAAO,GAAG,MAAM,IAAA,oBAAQ,EAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,CAAC,SAAS,CAAC,EACX,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,CAAC,EACtC,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
74
lib/database-upload.js
generated
Normal file
74
lib/database-upload.js
generated
Normal file
@@ -0,0 +1,74 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadDatabases = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
|
||||
logger.debug("Database upload disabled in workflow. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
// Do nothing when not running against github.com
|
||||
if (config.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
logger.debug("Not running against github.com. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
||||
// We only want to upload a database if we are analyzing the default branch.
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
|
||||
try {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: payload,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
"Content-Type": "application/zip",
|
||||
},
|
||||
});
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
// Log a warning but don't fail the workflow
|
||||
logger.warning(`Failed to upload database for ${language}: ${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.uploadDatabases = uploadDatabases;
|
||||
//# sourceMappingURL=database-upload.js.map
|
||||
1
lib/database-upload.js.map
Normal file
1
lib/database-upload.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;gBAC5B,IAAI,EAAE,OAAO;gBACb,OAAO,EAAE;oBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;oBACzC,cAAc,EAAE,iBAAiB;iBAClC;aACF,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAxDD,0CAwDC"}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user