mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 15:58:06 +08:00
Compare commits
521 Commits
codeql-bun
...
v1.1.17
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5207709e29 | ||
|
|
624285f8b2 | ||
|
|
d8ed0a3afb | ||
|
|
9b044bf633 | ||
|
|
97441a8bc4 | ||
|
|
0f7d2dd330 | ||
|
|
0c670bbf04 | ||
|
|
0dd6a6f2b0 | ||
|
|
bbc2e7010f | ||
|
|
caa2a0df0a | ||
|
|
a2f4d66a8b | ||
|
|
b4ff463500 | ||
|
|
58faf9d60c | ||
|
|
dc1c51db28 | ||
|
|
2d2dfa3424 | ||
|
|
ceec52c4bc | ||
|
|
a32664975f | ||
|
|
8171514c02 | ||
|
|
6fa0b7cb22 | ||
|
|
b8bd06e075 | ||
|
|
aa231930c1 | ||
|
|
ba95eeb60e | ||
|
|
c059f95c05 | ||
|
|
75afbf4a30 | ||
|
|
01fa64cb90 | ||
|
|
28ccb035bb | ||
|
|
11111290fc | ||
|
|
d8c9c723a5 | ||
|
|
8d24c9e4c1 | ||
|
|
f7b6c0021b | ||
|
|
ea16f0f943 | ||
|
|
0b90ea88eb | ||
|
|
f695c53a17 | ||
|
|
2d5f20d706 | ||
|
|
e617b8972d | ||
|
|
c8971b8e0a | ||
|
|
3e7e3b32d0 | ||
|
|
330d552535 | ||
|
|
548f07e307 | ||
|
|
a844fefc86 | ||
|
|
7ce9ef9137 | ||
|
|
d750c6d79d | ||
|
|
4cb248b0ec | ||
|
|
1e7f770864 | ||
|
|
816b3e91bc | ||
|
|
fbbd1dcd52 | ||
|
|
0a5dad3c83 | ||
|
|
d61e3fdf02 | ||
|
|
dca60ba711 | ||
|
|
e1ec69721f | ||
|
|
b45ac1f8f8 | ||
|
|
b316baae94 | ||
|
|
e655fb331c | ||
|
|
b3801753d4 | ||
|
|
3dcdbc9add | ||
|
|
688508d8cb | ||
|
|
fc926423a5 | ||
|
|
ea8fb214de | ||
|
|
3a5fa35535 | ||
|
|
7e94a6cbca | ||
|
|
b7da732b32 | ||
|
|
0c729c81f2 | ||
|
|
53850d88bb | ||
|
|
b1742f8919 | ||
|
|
c736697abf | ||
|
|
ca8a203b51 | ||
|
|
53bc5e6c78 | ||
|
|
130a51dbc6 | ||
|
|
c2fd5d10f6 | ||
|
|
30681e79db | ||
|
|
c15604920a | ||
|
|
4792297702 | ||
|
|
79ec03f3e5 | ||
|
|
7ebbfcbbdd | ||
|
|
e41f8baf4a | ||
|
|
3ea10cc7b5 | ||
|
|
eb9619301e | ||
|
|
4a887ca920 | ||
|
|
785cbf1898 | ||
|
|
b7cbc0f8be | ||
|
|
1fd3a8d1c7 | ||
|
|
269aa1746e | ||
|
|
7c1b9e6b1a | ||
|
|
3f62b754e2 | ||
|
|
26a69806cf | ||
|
|
9953936347 | ||
|
|
41d6ac4d2a | ||
|
|
e8c48cc8cf | ||
|
|
1616e0ef98 | ||
|
|
b40cd0390c | ||
|
|
821fe9b476 | ||
|
|
37d8b5142f | ||
|
|
ab7316e0c5 | ||
|
|
f422a50448 | ||
|
|
ed40e306f5 | ||
|
|
cae9a1f462 | ||
|
|
11a46b8856 | ||
|
|
95673cf9a2 | ||
|
|
3ff1fd9192 | ||
|
|
47bcabd3e8 | ||
|
|
b9deefbe0a | ||
|
|
7c4d0e0f6e | ||
|
|
b38dc80666 | ||
|
|
e0411511a5 | ||
|
|
c08ab55e3d | ||
|
|
934c0340a7 | ||
|
|
c18b1d6732 | ||
|
|
8bfe3c6be5 | ||
|
|
4efa7d6115 | ||
|
|
c699821722 | ||
|
|
7fa4dc3512 | ||
|
|
a965b69658 | ||
|
|
714ec5a57d | ||
|
|
98b2df478b | ||
|
|
3b151b1bde | ||
|
|
f9fd90ed60 | ||
|
|
f4fb1cfb88 | ||
|
|
876c187c13 | ||
|
|
fef25133ac | ||
|
|
a8d485f901 | ||
|
|
41a4ada31b | ||
|
|
e524cd64db | ||
|
|
1653a84fbc | ||
|
|
08f9ac4674 | ||
|
|
2e0c6caf16 | ||
|
|
99d4397d88 | ||
|
|
47dc295f08 | ||
|
|
5a6f006e4d | ||
|
|
614d63af76 | ||
|
|
ceacebd3ff | ||
|
|
c053e94e7d | ||
|
|
d069ed5c71 | ||
|
|
0e17d37ac3 | ||
|
|
2318cf79e9 | ||
|
|
e47f040350 | ||
|
|
4d013d7594 | ||
|
|
3b2cf8914c | ||
|
|
9d650fdc1f | ||
|
|
b2786f5323 | ||
|
|
d00e8c09a3 | ||
|
|
8bd4419d1e | ||
|
|
99acb8dda6 | ||
|
|
31367d4e57 | ||
|
|
ccf5d70ab3 | ||
|
|
30fe0a56d2 | ||
|
|
7adb33da1d | ||
|
|
2e111b27f7 | ||
|
|
c7785f6b91 | ||
|
|
2e80c74b1b | ||
|
|
80ecdcdf69 | ||
|
|
7c412c67ba | ||
|
|
ee4575b213 | ||
|
|
d2ab7a2abb | ||
|
|
d7459f0368 | ||
|
|
6db77eec0d | ||
|
|
777b778409 | ||
|
|
97f9db4fb9 | ||
|
|
59ca9b59cb | ||
|
|
6834383903 | ||
|
|
4918636a75 | ||
|
|
428caf0cf5 | ||
|
|
df05122fc6 | ||
|
|
a27dc4fee4 | ||
|
|
a568674c69 | ||
|
|
f8f4c0b33e | ||
|
|
79d8e4a43d | ||
|
|
0ece1d1000 | ||
|
|
81b419c908 | ||
|
|
eec34d5f05 | ||
|
|
06e27d3e3d | ||
|
|
40b280032c | ||
|
|
bcb7fad5b3 | ||
|
|
0efcf74ce0 | ||
|
|
29a2159db1 | ||
|
|
f7c46e5cbc | ||
|
|
ccf479d336 | ||
|
|
1b5ea4afdc | ||
|
|
69e09909dc | ||
|
|
a6611b8691 | ||
|
|
632cc8efb3 | ||
|
|
6efabfeaef | ||
|
|
24d91cb4e9 | ||
|
|
b6b48ceae3 | ||
|
|
af390c0075 | ||
|
|
4c3af19a40 | ||
|
|
57096f1d43 | ||
|
|
27ea8f8fe5 | ||
|
|
3f00a1265f | ||
|
|
dbe6f211e6 | ||
|
|
16c620dea4 | ||
|
|
b36688d5b7 | ||
|
|
bfe9d7da56 | ||
|
|
822fe5ef9a | ||
|
|
255ffd480f | ||
|
|
4b775686a0 | ||
|
|
f0705a6d6f | ||
|
|
2faa3e16f3 | ||
|
|
aaff818427 | ||
|
|
0e9acb6e5c | ||
|
|
459cf02690 | ||
|
|
6f285ad15b | ||
|
|
c88cf91b1f | ||
|
|
acc7a4bc97 | ||
|
|
d25b8aca1b | ||
|
|
c6fc792e04 | ||
|
|
d28ac7880b | ||
|
|
a3a6c128d7 | ||
|
|
657581e7a8 | ||
|
|
657c2f3ff0 | ||
|
|
970e0879d9 | ||
|
|
db50adab01 | ||
|
|
9e9a8428c3 | ||
|
|
1725087693 | ||
|
|
e655565390 | ||
|
|
c38e41c45a | ||
|
|
0658e4b2d6 | ||
|
|
54b4854fda | ||
|
|
1fae5bf71b | ||
|
|
533ce91971 | ||
|
|
ace076b980 | ||
|
|
5e59d8eec4 | ||
|
|
ec7fba1280 | ||
|
|
97847a4dde | ||
|
|
46d6a93dcc | ||
|
|
ab69202a0b | ||
|
|
93c20939ac | ||
|
|
c1672e532f | ||
|
|
f8c88ab2dc | ||
|
|
2f58583a1b | ||
|
|
4e0668d05e | ||
|
|
c4fdf5fe69 | ||
|
|
4f87830a1f | ||
|
|
daf6560612 | ||
|
|
03e2e3c45f | ||
|
|
3bb6c41212 | ||
|
|
38fc5ebb37 | ||
|
|
a82d691646 | ||
|
|
ca6773e404 | ||
|
|
8dbd96566a | ||
|
|
ef73e3bee8 | ||
|
|
75b4f1c466 | ||
|
|
d468c94a69 | ||
|
|
7c55012151 | ||
|
|
f8eea91a7b | ||
|
|
878b64e0ef | ||
|
|
7cf0ed5e3f | ||
|
|
b651a677d2 | ||
|
|
827fd55c21 | ||
|
|
dd56e95b46 | ||
|
|
3c6dd303a8 | ||
|
|
96bc9c36c6 | ||
|
|
366e88c2c1 | ||
|
|
7b66e72cb7 | ||
|
|
06d4e82bd2 | ||
|
|
0fb78380f8 | ||
|
|
b71f20d70f | ||
|
|
8f845425a2 | ||
|
|
c9882bef2d | ||
|
|
9a6bf18ec4 | ||
|
|
0235de0279 | ||
|
|
a73e506617 | ||
|
|
b11fe85402 | ||
|
|
922dc2b976 | ||
|
|
395afb1dd9 | ||
|
|
ceeddf2638 | ||
|
|
06b15c22b1 | ||
|
|
ed0abc6cac | ||
|
|
193cfa588d | ||
|
|
d9e30cb001 | ||
|
|
ea676e3184 | ||
|
|
7c2be06006 | ||
|
|
0c3c093eba | ||
|
|
2bf00f719d | ||
|
|
02083c307e | ||
|
|
35ef6a2db3 | ||
|
|
5227afabbe | ||
|
|
6ed7f70798 | ||
|
|
04f504ca7f | ||
|
|
6c3ae45f3a | ||
|
|
8371fda073 | ||
|
|
e3983ef751 | ||
|
|
e34f3ea243 | ||
|
|
d9937ad6ad | ||
|
|
f703d1ca07 | ||
|
|
acf17f7547 | ||
|
|
016ec75b7c | ||
|
|
7502d6e991 | ||
|
|
cbce00d08d | ||
|
|
0256599547 | ||
|
|
72861144fd | ||
|
|
6dd9baf8be | ||
|
|
ff8b365e79 | ||
|
|
eed184a534 | ||
|
|
c76f0b5b07 | ||
|
|
bf4ba6945d | ||
|
|
d2d14adf3e | ||
|
|
95b49c3e6b | ||
|
|
80771fd2d0 | ||
|
|
2b8fdb3f2e | ||
|
|
074853a9a2 | ||
|
|
ce63ab5d00 | ||
|
|
e87e2d8201 | ||
|
|
8a646279fc | ||
|
|
23b7196b6b | ||
|
|
e6e327771b | ||
|
|
b9577df761 | ||
|
|
808c29257b | ||
|
|
5b5ed44ab7 | ||
|
|
faf9d4b499 | ||
|
|
8b2f5d7158 | ||
|
|
0ba58d8497 | ||
|
|
3962f1bd85 | ||
|
|
9daf1de73c | ||
|
|
bce749b10f | ||
|
|
fce4a01cd7 | ||
|
|
bac9320f4f | ||
|
|
b3bf557359 | ||
|
|
f6312f1322 | ||
|
|
c5c5bdabb9 | ||
|
|
e7869d541b | ||
|
|
7a12645d7e | ||
|
|
9f20addbf2 | ||
|
|
780f4ee1bf | ||
|
|
baf90d17d2 | ||
|
|
6f174084dd | ||
|
|
b0c570ef83 | ||
|
|
2d80fe85fc | ||
|
|
e2cc7cc006 | ||
|
|
0c80741707 | ||
|
|
c7b049b347 | ||
|
|
792bbfea04 | ||
|
|
f679ec9aa9 | ||
|
|
d9f89b3dfd | ||
|
|
0ab00f44cb | ||
|
|
026ff35db0 | ||
|
|
1ed1437484 | ||
|
|
3ed22c8145 | ||
|
|
739937f14e | ||
|
|
0ecdac49ad | ||
|
|
426a3951ee | ||
|
|
a0b596246a | ||
|
|
5d3e1a701c | ||
|
|
b9bb8dd18d | ||
|
|
1fc1008278 | ||
|
|
11673755ab | ||
|
|
7eac76fcb4 | ||
|
|
3d10ffe493 | ||
|
|
f5e5590fc8 | ||
|
|
380041ed00 | ||
|
|
8165d30832 | ||
|
|
d0ca51f5e9 | ||
|
|
0182a2c78c | ||
|
|
488f78249e | ||
|
|
9cab82f202 | ||
|
|
43d066495c | ||
|
|
f090899ed0 | ||
|
|
8a00ed086d | ||
|
|
935969c6f7 | ||
|
|
e26813cf98 | ||
|
|
2c03704a6c | ||
|
|
dd6b592e3e | ||
|
|
a90d8bf711 | ||
|
|
dc0338e493 | ||
|
|
57096fe795 | ||
|
|
b0ddf36abe | ||
|
|
1ea2f2d7f1 | ||
|
|
9dcc141f12 | ||
|
|
ea751a9fae | ||
|
|
a2949f47b3 | ||
|
|
7871f0d5e1 | ||
|
|
e6f3e049b4 | ||
|
|
e83a1d469e | ||
|
|
894faced79 | ||
|
|
a9095cefc9 | ||
|
|
4d339ae3ec | ||
|
|
381ea36211 | ||
|
|
e769c2dd6e | ||
|
|
4c1021c504 | ||
|
|
bae3a3acab | ||
|
|
9da34a6ec6 | ||
|
|
f83be76fd8 | ||
|
|
b45efc9e42 | ||
|
|
75743c96fc | ||
|
|
bcd5c027de | ||
|
|
9885f86fab | ||
|
|
03a275bc11 | ||
|
|
ee3341a9d8 | ||
|
|
28eead2408 | ||
|
|
a4da970395 | ||
|
|
34a1681e50 | ||
|
|
8833977736 | ||
|
|
bfe9e81020 | ||
|
|
1d58cc1f27 | ||
|
|
d8576e34bf | ||
|
|
f1060fbba0 | ||
|
|
af34c6da92 | ||
|
|
282b607642 | ||
|
|
f0e2f3c053 | ||
|
|
73ba7ffb48 | ||
|
|
2f4dd4bb41 | ||
|
|
8237e85158 | ||
|
|
eea7cf19ff | ||
|
|
fdc2a903c1 | ||
|
|
c22162c09d | ||
|
|
01c72238c1 | ||
|
|
63b2636c23 | ||
|
|
0ed0799824 | ||
|
|
9e403590f4 | ||
|
|
45b96c3de6 | ||
|
|
9a709c116e | ||
|
|
43c9f26143 | ||
|
|
b949e494e4 | ||
|
|
3d23aade46 | ||
|
|
d625a00cee | ||
|
|
077ec096bb | ||
|
|
4d6e9c02ac | ||
|
|
839aa81918 | ||
|
|
6d1f0a0357 | ||
|
|
88db5e75ec | ||
|
|
d068f5372a | ||
|
|
044f112dc1 | ||
|
|
f7846479e6 | ||
|
|
d0bd80897c | ||
|
|
bed132dae4 | ||
|
|
9d26fe0cb3 | ||
|
|
6e57bbac6c | ||
|
|
85cfdb24f4 | ||
|
|
df164705ad | ||
|
|
da7944b165 | ||
|
|
33599909af | ||
|
|
f143182488 | ||
|
|
0b037b4fcb | ||
|
|
1668e0a2bf | ||
|
|
bd4757cd6b | ||
|
|
5fb01dd153 | ||
|
|
124e7d96a6 | ||
|
|
b8f3a377bf | ||
|
|
4b465cb3ce | ||
|
|
d76b18254a | ||
|
|
33f749f1c9 | ||
|
|
ccda44cac5 | ||
|
|
81827d3fc6 | ||
|
|
b386fd4443 | ||
|
|
2a7a517ea5 | ||
|
|
ca5ed24270 | ||
|
|
fb22523acc | ||
|
|
b887a2ce1e | ||
|
|
686c3a37f0 | ||
|
|
c9973ef56b | ||
|
|
c3010cb18a | ||
|
|
81bfc289f5 | ||
|
|
5d72058994 | ||
|
|
6de8537e54 | ||
|
|
1d81ec489b | ||
|
|
c592f89989 | ||
|
|
f71aeef4dc | ||
|
|
0a713019c3 | ||
|
|
243ebf6e35 | ||
|
|
3886398541 | ||
|
|
f60bb5cc38 | ||
|
|
d0ee2b4276 | ||
|
|
34905f691e | ||
|
|
ac7bd44c3b | ||
|
|
1457c164a9 | ||
|
|
8834766498 | ||
|
|
69a58ad266 | ||
|
|
5b59d8784d | ||
|
|
ea5898d606 | ||
|
|
39aa619399 | ||
|
|
2190825ac9 | ||
|
|
e61a4b9c7c | ||
|
|
b74506c7c2 | ||
|
|
e27fa67f0f | ||
|
|
eb507cfac6 | ||
|
|
c33cdd74b3 | ||
|
|
aeefdce612 | ||
|
|
761da7eb50 | ||
|
|
76c3e91b1f | ||
|
|
560edbc33e | ||
|
|
e64a215b41 | ||
|
|
e437ea1d38 | ||
|
|
8ecc1fcbc4 | ||
|
|
0b3acf68ab | ||
|
|
480ef7689f | ||
|
|
4e2634f2fd | ||
|
|
04d521d705 | ||
|
|
cdf3c9d01e | ||
|
|
628afae9d3 | ||
|
|
111428ce06 | ||
|
|
d2fd19b7d7 | ||
|
|
c825c24663 | ||
|
|
0e0e934038 | ||
|
|
d430deb927 | ||
|
|
f5d822707e | ||
|
|
1f44993f56 | ||
|
|
701b6a5569 | ||
|
|
e75fa83dde | ||
|
|
a33ae09fb8 | ||
|
|
44198be781 | ||
|
|
308528ba13 | ||
|
|
939659ccd0 | ||
|
|
401a76f245 | ||
|
|
5e27f57127 | ||
|
|
4154eaf0e9 | ||
|
|
75e4d9f140 | ||
|
|
870dbaaebe | ||
|
|
3615d8ac45 | ||
|
|
47dd636fbc | ||
|
|
e4766c87d5 | ||
|
|
5c2600e559 | ||
|
|
a92e8775d8 | ||
|
|
527d5153ad | ||
|
|
07990f07e5 | ||
|
|
fa19286989 | ||
|
|
b1c781d398 | ||
|
|
938e0a0743 | ||
|
|
d63f798314 | ||
|
|
86940df49f | ||
|
|
25313901b7 | ||
|
|
8ec6a84629 | ||
|
|
162ecaff35 |
20
.github/check-sarif/action.yml
vendored
Normal file
20
.github/check-sarif/action.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Check SARIF
|
||||
description: Checks a SARIF file to see if certain queries were run and others were not run.
|
||||
inputs:
|
||||
sarif-file:
|
||||
required: true
|
||||
description: The SARIF file to check
|
||||
|
||||
queries-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should be included in this SARIF file.
|
||||
|
||||
queries-not-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
runs:
|
||||
using: node12
|
||||
main: index.js
|
||||
43
.github/check-sarif/index.js
vendored
Normal file
43
.github/check-sarif/index.js
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs')
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(core.getInput('sarif-file'), 'utf8'))
|
||||
const rules = sarif.runs[0].tool.extensions.flatMap(ext => ext.rules || [])
|
||||
const ruleIds = rules.map(rule => rule.id)
|
||||
|
||||
// Check that all the expected queries ran
|
||||
const expectedQueriesRun = getQueryIdsInput('queries-run')
|
||||
const queriesThatShouldHaveRunButDidNot = expectedQueriesRun.filter(queryId => !ruleIds.includes(queryId))
|
||||
|
||||
if (queriesThatShouldHaveRunButDidNot.length > 0) {
|
||||
core.setFailed(`The following queries were expected to run but did not: ${queriesThatShouldHaveRunButDidNot.join(', ')}`)
|
||||
}
|
||||
|
||||
// Check that all the unexpected queries did not run
|
||||
const expectedQueriesNotRun = getQueryIdsInput('queries-not-run')
|
||||
|
||||
const queriesThatShouldNotHaveRunButDid = expectedQueriesNotRun.filter(queryId => ruleIds.includes(queryId))
|
||||
|
||||
if (queriesThatShouldNotHaveRunButDid.length > 0) {
|
||||
core.setFailed(`The following queries were NOT expected to have run but did: ${queriesThatShouldNotHaveRunButDid.join(', ')}`)
|
||||
}
|
||||
|
||||
|
||||
core.startGroup('All queries run')
|
||||
rules.forEach(rule => {
|
||||
core.info(`${rule.id}: ${(rule.properties && rule.properties.name) || rule.name}`)
|
||||
})
|
||||
core.endGroup()
|
||||
|
||||
core.startGroup('Full SARIF')
|
||||
core.info(JSON.stringify(sarif, null, 2))
|
||||
core.endGroup()
|
||||
|
||||
function getQueryIdsInput(name) {
|
||||
return core.getInput(name)
|
||||
.split(',')
|
||||
.map(q => q.trim())
|
||||
.filter(q => q.length > 0)
|
||||
}
|
||||
52
.github/query-filter-test/action.yml
vendored
Normal file
52
.github/query-filter-test/action.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Query Filter Test
|
||||
description: Runs a test of query filters using the check sarif action
|
||||
inputs:
|
||||
sarif-file:
|
||||
required: true
|
||||
description: The SARIF file to check
|
||||
|
||||
queries-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should be included in this SARIF file.
|
||||
|
||||
queries-not-run:
|
||||
required: true
|
||||
description: |
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
config-file:
|
||||
required: true
|
||||
description: |
|
||||
The location of the codeql configuration file to use.
|
||||
|
||||
tools:
|
||||
required: true
|
||||
description: |
|
||||
The url of codeql to use.
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ${{ inputs.config-file }}
|
||||
tools: ${{ inputs.tools }}
|
||||
db-location: ${{ runner.temp }}/query-filter-test
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: "true"
|
||||
- name: Check SARIF
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ inputs.sarif-file }}
|
||||
queries-run: ${{ inputs.queries-run}}
|
||||
queries-not-run: ${{ inputs.queries-not-run}}
|
||||
- name: Cleanup after test
|
||||
shell: bash
|
||||
run: rm -rf "$RUNNER_TEMP/results" "$RUNNER_TEMP//query-filter-test"
|
||||
226
.github/update-release-branch.py
vendored
226
.github/update-release-branch.py
vendored
@@ -1,12 +1,9 @@
|
||||
import argparse
|
||||
import datetime
|
||||
from github import Github
|
||||
import random
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
import datetime
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
@@ -16,21 +13,25 @@ No user facing changes.
|
||||
|
||||
"""
|
||||
|
||||
# The branch being merged from.
|
||||
# This is the one that contains day-to-day development work.
|
||||
MAIN_BRANCH = 'main'
|
||||
# The branch being merged into.
|
||||
# This is the release branch that users reference.
|
||||
LATEST_RELEASE_BRANCH = 'v1'
|
||||
# Value of the mode flag for a v1 release
|
||||
V1_MODE = 'v1-release'
|
||||
|
||||
# Value of the mode flag for a v2 release
|
||||
V2_MODE = 'v2-release'
|
||||
|
||||
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
|
||||
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
|
||||
|
||||
# Name of the remote
|
||||
ORIGIN = 'origin'
|
||||
|
||||
# Runs git with the given args and returns the stdout.
|
||||
# Raises an error if git does not exit successfully.
|
||||
def run_git(*args):
|
||||
# Raises an error if git does not exit successfully (unless passed
|
||||
# allow_non_zero_exit_code=True).
|
||||
def run_git(*args, allow_non_zero_exit_code=False):
|
||||
cmd = ['git', *args]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if (p.returncode != 0):
|
||||
if not allow_non_zero_exit_code and p.returncode != 0:
|
||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||
return p.stdout.decode('ascii')
|
||||
|
||||
@@ -38,8 +39,10 @@ def run_git(*args):
|
||||
def branch_exists_on_remote(branch_name):
|
||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||
|
||||
# Opens a PR from the given branch to the release branch
|
||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
# Opens a PR from the given branch to the target branch
|
||||
def open_pr(
|
||||
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
|
||||
conductor, is_v2_release, labels, conflicted_files):
|
||||
# Sort the commits into the pull requests that introduced them,
|
||||
# and any commits that don't have a pull request
|
||||
pull_requests = []
|
||||
@@ -61,9 +64,8 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
|
||||
# Start constructing the body text
|
||||
body = []
|
||||
body.append('Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH)
|
||||
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
|
||||
|
||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
||||
body.append('')
|
||||
body.append('Conductor for this PR is @' + conductor)
|
||||
|
||||
@@ -80,43 +82,46 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
||||
body.append('')
|
||||
body.append('Contains the following commits not from a pull request:')
|
||||
for commit in commits_without_pull_requests:
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + ' (@' + commit.author.login + ')')
|
||||
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
|
||||
|
||||
body.append('')
|
||||
body.append('Please review the following:')
|
||||
if len(conflicted_files) > 0:
|
||||
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
|
||||
'in the following files:')
|
||||
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
|
||||
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
|
||||
'branch to resolve the merge conflicts.')
|
||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + LATEST_RELEASE_BRANCH + ' branch.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + MAIN_BRANCH + ' after this PR is merged.')
|
||||
if is_v2_release:
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
|
||||
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
|
||||
|
||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||
title = 'Merge ' + source_branch + ' into ' + target_branch
|
||||
|
||||
# Create the pull request
|
||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
|
||||
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH, draft=True)
|
||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=new_branch_name, base=target_branch, draft=True)
|
||||
pr.add_to_labels(*labels)
|
||||
print('Created PR #' + str(pr.number))
|
||||
|
||||
# Assign the conductor
|
||||
pr.add_to_assignees(conductor)
|
||||
print('Assigned PR to ' + conductor)
|
||||
|
||||
# Gets the person who should be in charge of the mergeback PR
|
||||
def get_conductor(repo, pull_requests, other_commits):
|
||||
# If there are any PRs then use whoever merged the last one
|
||||
if len(pull_requests) > 0:
|
||||
return get_merger_of_pr(repo, pull_requests[-1])
|
||||
|
||||
# Otherwise take the author of the latest commit
|
||||
return other_commits[-1].author.login
|
||||
|
||||
# Gets a list of the SHAs of all commits that have happened on main
|
||||
# since the release branched off.
|
||||
# This will not include any commits that exist on the release branch
|
||||
# that aren't on main.
|
||||
def get_commit_difference(repo):
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + ORIGIN + '/' + MAIN_BRANCH).strip().split('\n')
|
||||
# Gets a list of the SHAs of all commits that have happened on the source branch
|
||||
# since the last release to the target branch.
|
||||
# This will not include any commits that exist on the target branch
|
||||
# that aren't on the source branch.
|
||||
def get_commit_difference(repo, source_branch, target_branch):
|
||||
# Passing split nothing means that the empty string splits to nothing: compare `''.split() == []`
|
||||
# to `''.split('\n') == ['']`.
|
||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + target_branch + '..' + ORIGIN + '/' + source_branch).strip().split()
|
||||
|
||||
# Convert to full-fledged commit objects
|
||||
commits = [repo.get_commit(c) for c in commits]
|
||||
@@ -136,7 +141,7 @@ def get_truncated_commit_message(commit):
|
||||
else:
|
||||
return message
|
||||
|
||||
# Converts a commit into the PR that introduced it to the main branch.
|
||||
# Converts a commit into the PR that introduced it to the source branch.
|
||||
# Returns the PR object, or None if no PR could be found.
|
||||
def get_pr_for_commit(repo, commit):
|
||||
prs = commit.get_pulls()
|
||||
@@ -179,29 +184,65 @@ def update_changelog(version):
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
||||
github_token = sys.argv[1]
|
||||
repository_nwo = sys.argv[2]
|
||||
parser = argparse.ArgumentParser('update-release-branch.py')
|
||||
|
||||
repo = Github(github_token).get_repo(repository_nwo)
|
||||
parser.add_argument(
|
||||
'--github-token',
|
||||
type=str,
|
||||
required=True,
|
||||
help='GitHub token, typically from GitHub Actions.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--repository-nwo',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The nwo of the repository, for example github/codeql-action.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--mode',
|
||||
type=str,
|
||||
required=True,
|
||||
choices=[V2_MODE, V1_MODE],
|
||||
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " +
|
||||
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " +
|
||||
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
|
||||
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
|
||||
)
|
||||
parser.add_argument(
|
||||
'--conductor',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The GitHub handle of the person who is conducting the release process.'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode]
|
||||
target_branch = TARGET_BRANCH_FOR_MODE[args.mode]
|
||||
|
||||
repo = Github(args.github_token).get_repo(args.repository_nwo)
|
||||
version = get_current_version()
|
||||
|
||||
if args.mode == V1_MODE:
|
||||
# Change the version number to a v1 equivalent
|
||||
version = get_current_version()
|
||||
version = f'1{version[1:]}'
|
||||
|
||||
# Print what we intend to go
|
||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
||||
short_main_sha = run_git('rev-parse', '--short', ORIGIN + '/' + MAIN_BRANCH).strip()
|
||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
||||
print('Considering difference between ' + source_branch + ' and ' + target_branch)
|
||||
source_branch_short_sha = run_git('rev-parse', '--short', ORIGIN + '/' + source_branch).strip()
|
||||
print('Current head of ' + source_branch + ' is ' + source_branch_short_sha)
|
||||
|
||||
# See if there are any commits to merge in
|
||||
commits = get_commit_difference(repo)
|
||||
commits = get_commit_difference(repo=repo, source_branch=source_branch, target_branch=target_branch)
|
||||
if len(commits) == 0:
|
||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
||||
print('No commits to merge from ' + source_branch + ' to ' + target_branch)
|
||||
return
|
||||
|
||||
# The branch name is based off of the name of branch being merged into
|
||||
# and the SHA of the branch being merged from. Thus if the branch already
|
||||
# exists we can assume we don't need to recreate it.
|
||||
new_branch_name = 'update-v' + version + '-' + short_main_sha
|
||||
new_branch_name = 'update-v' + version + '-' + source_branch_short_sha
|
||||
print('Branch name is ' + new_branch_name)
|
||||
|
||||
# Check if the branch already exists. If so we can abort as this script
|
||||
@@ -212,19 +253,90 @@ def main():
|
||||
|
||||
# Create the new branch and push it to the remote
|
||||
print('Creating branch ' + new_branch_name)
|
||||
run_git('checkout', '-b', new_branch_name, ORIGIN + '/' + MAIN_BRANCH)
|
||||
|
||||
print('Updating changelog')
|
||||
update_changelog(version)
|
||||
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
|
||||
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
|
||||
# reconstruct the release manually)
|
||||
conflicted_files = []
|
||||
|
||||
# Create a commit that updates the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', version)
|
||||
if args.mode == V1_MODE:
|
||||
# If we're performing a backport, start from the target branch
|
||||
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch')
|
||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}')
|
||||
|
||||
# Revert the commit that we made as part of the last release that updated the version number and
|
||||
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
|
||||
# package.json files when we merge in the v2 branch.
|
||||
# This commit will not exist the first time we release the v1 branch from the v2 branch, so we
|
||||
# use `git log --grep` to conditionally revert the commit.
|
||||
print('Reverting the 1.x.x version number and changelog updates from the last release to avoid conflicts')
|
||||
v1_update_commits = run_git('log', '--grep', '^Update version and changelog for v', '--format=%H').split()
|
||||
|
||||
if len(v1_update_commits) > 0:
|
||||
print(f' Reverting {v1_update_commits[0]}')
|
||||
# Only revert the newest commit as older ones will already have been reverted in previous
|
||||
# releases.
|
||||
run_git('revert', v1_update_commits[0], '--no-edit')
|
||||
|
||||
# Also revert the "Update checked-in dependencies" commit created by Actions.
|
||||
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
|
||||
print(f' Reverting {update_dependencies_commit}')
|
||||
run_git('revert', update_dependencies_commit, '--no-edit')
|
||||
|
||||
else:
|
||||
print(' Nothing to revert.')
|
||||
|
||||
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
|
||||
# Commit any conflicts (see the comment for `conflicted_files`)
|
||||
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
|
||||
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
|
||||
if len(conflicted_files) > 0:
|
||||
run_git('add', '.')
|
||||
run_git('commit', '--no-edit')
|
||||
|
||||
# Migrate the package version number from a v2 version number to a v1 version number
|
||||
print(f'Setting version number to {version}')
|
||||
subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
|
||||
run_git('add', 'package.json', 'package-lock.json')
|
||||
|
||||
# Migrate the changelog notes from v2 version numbers to v1 version numbers
|
||||
print('Migrating changelog notes from v2 to v1')
|
||||
subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
|
||||
|
||||
# Remove changelog notes from v2 that don't apply to v1
|
||||
subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
|
||||
|
||||
# Amend the commit generated by `npm version` to update the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', f'Update version and changelog for v{version}')
|
||||
else:
|
||||
# If we're performing a standard release, there won't be any new commits on the target branch,
|
||||
# as these will have already been merged back into the source branch. Therefore we can just
|
||||
# start from the source branch.
|
||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{source_branch}')
|
||||
|
||||
print('Updating changelog')
|
||||
update_changelog(version)
|
||||
|
||||
# Create a commit that updates the CHANGELOG
|
||||
run_git('add', 'CHANGELOG.md')
|
||||
run_git('commit', '-m', f'Update changelog for v{version}')
|
||||
|
||||
run_git('push', ORIGIN, new_branch_name)
|
||||
|
||||
# Open a PR to update the branch
|
||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
||||
open_pr(
|
||||
repo,
|
||||
commits,
|
||||
source_branch_short_sha,
|
||||
new_branch_name,
|
||||
source_branch=source_branch,
|
||||
target_branch=target_branch,
|
||||
conductor=args.conductor,
|
||||
is_v2_release=args.mode == V2_MODE,
|
||||
labels=['Update dependencies'] if args.mode == V1_MODE else [],
|
||||
conflicted_files=conflicted_files
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
55
.github/workflows/__analyze-ref-input.yml
generated
vendored
55
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,53 @@ jobs:
|
||||
analyze-ref-input:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
72
.github/workflows/__autobuild-action.yml
generated
vendored
Normal file
72
.github/workflows/__autobuild-action.yml
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - autobuild-action
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
autobuild-action:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
name: autobuild-action
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: csharp
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
env:
|
||||
# Explicitly disable the CLR tracer.
|
||||
COR_ENABLE_PROFILING: ''
|
||||
COR_PROFILER: ''
|
||||
COR_PROFILER_PATH_64: ''
|
||||
CORECLR_ENABLE_PROFILING: ''
|
||||
CORECLR_PROFILER: ''
|
||||
CORECLR_PROFILER_PATH_64: ''
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d csharp ]]; then
|
||||
echo "Did not find a C# database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
41
.github/workflows/__debug-artifacts.yml
generated
vendored
41
.github/workflows/__debug-artifacts.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,37 @@ jobs:
|
||||
debug-artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Debug artifact upload
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
@@ -52,7 +71,7 @@ jobs:
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- uses: actions/download-artifact@v2
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
|
||||
11
.github/workflows/__extractor-ram-threads.yml
generated
vendored
11
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,15 @@ jobs:
|
||||
extractor-ram-threads:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [latest]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Extractor ram and threads options test
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
57
.github/workflows/__go-custom-queries.yml
generated
vendored
57
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,25 +24,59 @@ jobs:
|
||||
go-custom-queries:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v2
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
41
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
41
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,25 +24,43 @@ jobs:
|
||||
go-custom-tracing-autobuild:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Autobuild custom tracing'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v2
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
57
.github/workflows/__go-custom-tracing.yml
generated
vendored
57
.github/workflows/__go-custom-tracing.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,25 +24,59 @@ jobs:
|
||||
go-custom-tracing:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom tracing'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v2
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
15
.github/workflows/__javascript-source-root.yml
generated
vendored
15
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,19 @@ jobs:
|
||||
javascript-source-root:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [latest, cached, nightly-latest] # This feature is not compatible with old CLIs
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Custom source root
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
129
.github/workflows/__ml-powered-queries.yml
generated
vendored
Normal file
129
.github/workflows/__ml-powered-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - ML-powered queries
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
ml-powered-queries:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220120
|
||||
- os: macos-latest
|
||||
version: stable-20220120
|
||||
- os: windows-latest
|
||||
version: stable-20220120
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: ML-powered queries
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
queries: security-extended
|
||||
source-root: ./../action/tests/ml-powered-queries-repo
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ml-powered-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
|
||||
- name: Check results
|
||||
# Running ML-powered queries on Windows requires CodeQL CLI 2.9.0+. We don't run these checks
|
||||
# against Windows and `cached` while CodeQL CLI 2.9.0 makes its way into `cached` to avoid the
|
||||
# test starting to fail when the cached CodeQL Bundle gets updated. Once the CodeQL Bundle
|
||||
# containing CodeQL CLI 2.9.0 has been fully released, we can drop this line and start running
|
||||
# these checks on Windows and `cached`.
|
||||
if: matrix.os != 'windows-latest' || matrix.version != 'cached'
|
||||
env:
|
||||
# Running on Windows requires CodeQL CLI 2.9.0+, which has so far only made it to 'latest'.
|
||||
SHOULD_RUN_ML_POWERED_QUERIES: ${{ matrix.os != 'windows-latest' || matrix.version
|
||||
== 'latest' || matrix.version == 'nightly-latest' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
|
||||
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should run at least the ML-powered queries in `expected_rules`.
|
||||
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
||||
|
||||
for rule in ${expected_rules}; do
|
||||
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
||||
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
||||
echo "Did find rule '${rule}': ${found_rule}"
|
||||
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
||||
exit 1
|
||||
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# We should have at least one alert from an ML-powered query.
|
||||
num_alerts=$(jq '[.runs[0].results[] |
|
||||
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
||||
javascript.sarif)
|
||||
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
||||
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
39
.github/workflows/__multi-language-autodetect.yml
generated
vendored
39
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,37 @@ jobs:
|
||||
multi-language-autodetect:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Multi-language repository
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
39
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
39
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,35 @@ jobs:
|
||||
packaging-config-inputs-js:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
@@ -38,7 +61,7 @@ jobs:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
packs: +dsp-testing/codeql-pack1@0.1.0
|
||||
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
@@ -53,11 +76,11 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 3 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
|
||||
37
.github/workflows/__packaging-config-js.yml
generated
vendored
37
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,35 @@ jobs:
|
||||
packaging-config-js:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config file'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
@@ -52,11 +75,11 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 3 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
|
||||
39
.github/workflows/__packaging-inputs-js.yml
generated
vendored
39
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,35 @@ jobs:
|
||||
packaging-inputs-js:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Action input'
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
@@ -39,7 +62,7 @@ jobs:
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||
languages: javascript
|
||||
packs: dsp-testing/codeql-pack1@0.1.0, dsp-testing/codeql-pack2
|
||||
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2, dsp-testing/codeql-pack3:other-query.ql
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
@@ -53,11 +76,11 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 3 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
|
||||
55
.github/workflows/__remote-config.yml
generated
vendored
55
.github/workflows/__remote-config.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,53 @@ jobs:
|
||||
remote-config:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
27
.github/workflows/__rubocop-multi-language.yml
generated
vendored
27
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,25 @@ jobs:
|
||||
rubocop-multi-language:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: RuboCop multi-language
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
29
.github/workflows/__split-workflow.yml
generated
vendored
29
.github/workflows/__split-workflow.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,25 @@ jobs:
|
||||
split-workflow:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Split workflow
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
@@ -38,7 +51,7 @@ jobs:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
packs: +dsp-testing/codeql-pack1@0.1.0
|
||||
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
@@ -67,11 +80,11 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should have 3 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||
# We should have 4 hits from these rules
|
||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||
|
||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||
echo "Found matching rules '$RULES'"
|
||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||
|
||||
67
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
Normal file
67
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Autobuild working directory
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
test-autobuild-working-dir:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Autobuild working directory
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Test setup
|
||||
shell: bash
|
||||
run: |
|
||||
# Make sure that Gradle build succeeds in autobuild-dir ...
|
||||
cp -a ../action/tests/java-repo autobuild-dir
|
||||
# ... and fails if attempted in the current directory
|
||||
echo > build.gradle
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: java
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
with:
|
||||
working-directory: autobuild-dir
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
if [[ ! -d java ]]; then
|
||||
echo "Did not find a Java database"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
11
.github/workflows/__test-local-codeql.yml
generated
vendored
11
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,15 @@ jobs:
|
||||
test-local-codeql:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [nightly-latest]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Local CodeQL bundle
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
11
.github/workflows/__test-proxy.yml
generated
vendored
11
.github/workflows/__test-proxy.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,15 @@ jobs:
|
||||
test-proxy:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [latest]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
name: Proxy test
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
21
.github/workflows/__test-ruby.yml
generated
vendored
21
.github/workflows/__test-ruby.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,13 +24,25 @@ jobs:
|
||||
test-ruby:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [latest, cached, nightly-latest]
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Ruby analysis
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
27
.github/workflows/__unset-environment.yml
generated
vendored
27
.github/workflows/__unset-environment.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,25 @@ jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
55
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
55
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
@@ -23,19 +24,53 @@ jobs:
|
||||
upload-ref-sha-input:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20210308
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
|
||||
146
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
146
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Use a custom `checkout_path`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
with-checkout-path:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: windows-2019
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
path: x/y/z/some-path
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
# it's enough to test one compiled language and one interpreted language
|
||||
languages: csharp,javascript
|
||||
source-path: x/y/z/some-path/tests/multi-language-repo
|
||||
debug: true
|
||||
- name: Build code (non-windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os != 'Windows' }}
|
||||
run: |
|
||||
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- name: Build code (windows)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
shell: bash
|
||||
run: |
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
@@ -15,11 +15,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout CodeQL Action
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Check Expected Release Files
|
||||
run: |
|
||||
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
||||
set -x
|
||||
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz" "codeql-runner-linux" "codeql-runner-macos" "codeql-runner-win.exe"; do
|
||||
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz"; do
|
||||
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
|
||||
done
|
||||
|
||||
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# Checks for any conflict markers created by git. This check is primarily intended to validate that
|
||||
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
|
||||
name: Check for conflicts
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check-for-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Check for conflicts
|
||||
run: |
|
||||
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
|
||||
# this to fail the workflow.
|
||||
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
|
||||
'^(<<<<<<<|>>>>>>>)' . || true)
|
||||
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
|
||||
echo "Fail: Found merge conflict markers in the following files:"
|
||||
echo ""
|
||||
echo "${FILES_WITH_CONFLICTS}"
|
||||
exit 1
|
||||
else
|
||||
echo "Success: Found no merge conflict markers."
|
||||
fi
|
||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -2,9 +2,9 @@ name: "CodeQL action"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, v1]
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
pull_request:
|
||||
branches: [main, v1]
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Init with default CodeQL bundle from the VM image
|
||||
id: init-default
|
||||
uses: ./init
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./init
|
||||
id: init
|
||||
with:
|
||||
|
||||
49
.github/workflows/expected-queries-runs.yml
vendored
Normal file
49
.github/workflows/expected-queries-runs.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Expected queries runs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
expected-queries:
|
||||
name: Expected Queries Tests
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: latest
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
|
||||
- name: Check Sarif
|
||||
uses: ./../action/.github/check-sarif
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run: js/incomplete-hostname-regexp,js/path-injection
|
||||
queries-not-run: foo,bar
|
||||
92
.github/workflows/post-release-mergeback.yml
vendored
92
.github/workflows/post-release-mergeback.yml
vendored
@@ -1,7 +1,8 @@
|
||||
# This workflow runs after a release of the action.
|
||||
# It merges any changes from the release back into the
|
||||
# main branch. Typically, this is just a single commit
|
||||
# that updates the changelog.
|
||||
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the
|
||||
# release back into the main branch. Typically, this is just a single commit that updates the
|
||||
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that
|
||||
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this
|
||||
# commit.
|
||||
name: Tag release and merge back
|
||||
|
||||
on:
|
||||
@@ -14,7 +15,8 @@ on:
|
||||
|
||||
push:
|
||||
branches:
|
||||
- v1
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
|
||||
jobs:
|
||||
merge-back:
|
||||
@@ -25,13 +27,16 @@ jobs:
|
||||
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
||||
|
||||
steps:
|
||||
- name: Dump GitHub Event context
|
||||
env:
|
||||
GITHUB_EVENT_CONTEXT: "${{ toJson(github.event) }}"
|
||||
run: echo "$GITHUB_EVENT_CONTEXT"
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "${GITHUB_CONTEXT}"
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
@@ -42,25 +47,25 @@ jobs:
|
||||
id: getVersion
|
||||
run: |
|
||||
VERSION="v$(jq '.version' -r 'package.json')"
|
||||
SHORT_SHA="${GITHUB_SHA:0:8}"
|
||||
echo "::set-output name=version::$VERSION"
|
||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${SHORT_SHA}"
|
||||
echo "::set-output name=newBranch::$NEW_BRANCH"
|
||||
echo "::set-output name=version::${VERSION}"
|
||||
short_sha="${GITHUB_SHA:0:8}"
|
||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
|
||||
echo "::set-output name=newBranch::${NEW_BRANCH}"
|
||||
|
||||
|
||||
- name: Dump branches
|
||||
env:
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
run: |
|
||||
echo "BASE_BRANCH $BASE_BRANCH"
|
||||
echo "HEAD_BRANCH $HEAD_BRANCH"
|
||||
echo "NEW_BRANCH $NEW_BRANCH"
|
||||
echo "BASE_BRANCH ${BASE_BRANCH}"
|
||||
echo "HEAD_BRANCH ${HEAD_BRANCH}"
|
||||
echo "NEW_BRANCH ${NEW_BRANCH}"
|
||||
|
||||
- name: Create mergeback branch
|
||||
env:
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
run: |
|
||||
git checkout -b "$NEW_BRANCH"
|
||||
git checkout -b "${NEW_BRANCH}"
|
||||
|
||||
- name: Check for tag
|
||||
id: check
|
||||
@@ -68,13 +73,13 @@ jobs:
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
run: |
|
||||
set +e # don't fail on an errored command
|
||||
git ls-remote --tags origin | grep "$VERSION"
|
||||
EXISTS="$?"
|
||||
if [ "$EXISTS" -eq 0 ]; then
|
||||
echo "Tag $TAG exists. Not going to re-release."
|
||||
git ls-remote --tags origin | grep "${VERSION}"
|
||||
exists="$?"
|
||||
if [ "${exists}" -eq 0 ]; then
|
||||
echo "Tag ${VERSION} exists. Not going to re-release."
|
||||
echo "::set-output name=exists::true"
|
||||
else
|
||||
echo "Tag $TAG does not exist yet."
|
||||
echo "Tag ${VERSION} does not exist yet."
|
||||
fi
|
||||
|
||||
# we didn't tag the release during the update-release-branch workflow because the
|
||||
@@ -85,35 +90,48 @@ jobs:
|
||||
env:
|
||||
VERSION: ${{ steps.getVersion.outputs.version }}
|
||||
run: |
|
||||
git tag -a "$VERSION" -m "$VERSION"
|
||||
git fetch --unshallow # unshallow the repo in order to allow pushes
|
||||
git push origin --follow-tags "$VERSION"
|
||||
# Unshallow the repo in order to allow pushes
|
||||
git fetch --unshallow
|
||||
# Create the `vx.y.z` tag
|
||||
git tag --annotate "${VERSION}" --message "${VERSION}"
|
||||
# Update the `vx` tag
|
||||
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
|
||||
# Use `--force` to overwrite the major version tag
|
||||
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
|
||||
# Push the tags, using:
|
||||
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
|
||||
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
|
||||
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
|
||||
# - `--force` since we're overwriting the `vx` tag
|
||||
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
|
||||
|
||||
- name: Create mergeback branch
|
||||
if: steps.check.outputs.exists != 'true'
|
||||
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2')
|
||||
env:
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
run: |
|
||||
set -exu
|
||||
PR_TITLE="Mergeback $VERSION $HEAD_BRANCH into $BASE_BRANCH"
|
||||
PR_BODY="Updates version and changelog."
|
||||
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
|
||||
pr_body="Updates version and changelog."
|
||||
|
||||
# Update the version number ready for the next release
|
||||
npm version patch --no-git-tag-version
|
||||
|
||||
# Update the changelog
|
||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
||||
git add .
|
||||
git commit -m "Update changelog and version after $VERSION"
|
||||
npm version patch
|
||||
git commit -m "Update changelog and version after ${VERSION}"
|
||||
|
||||
git push origin "$NEW_BRANCH"
|
||||
git push origin "${NEW_BRANCH}"
|
||||
|
||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
||||
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||
gh pr create \
|
||||
--head "$NEW_BRANCH" \
|
||||
--base "$BASE_BRANCH" \
|
||||
--title "$PR_TITLE" \
|
||||
--head "${NEW_BRANCH}" \
|
||||
--base "${BASE_BRANCH}" \
|
||||
--title "${pr_title}" \
|
||||
--label "Update dependencies" \
|
||||
--body "$PR_BODY" \
|
||||
--body "${pr_body}" \
|
||||
--draft
|
||||
|
||||
97
.github/workflows/pr-checks.yml
vendored
97
.github/workflows/pr-checks.yml
vendored
@@ -2,7 +2,7 @@ name: PR Checks (Basic Checks and Runner)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, v1]
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
pull_request:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
@@ -13,37 +13,69 @@ jobs:
|
||||
lint-js:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run Lint
|
||||
run: npm run-script lint
|
||||
|
||||
check-js:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
node-types-version: [12.12, current]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Update version of @types/node
|
||||
if: matrix.node-types-version != 'current'
|
||||
env:
|
||||
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
|
||||
run: |
|
||||
# Export `NODE_TYPES_VERSION` so it's available to jq
|
||||
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
|
||||
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
|
||||
echo "${contents}" > package.json
|
||||
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
|
||||
# However we're not checking in the updated lockfile here, so it's fine to run
|
||||
# `npm install` on Linux.
|
||||
npm install
|
||||
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
# The period in `git add --all .` ensures that we stage deleted files too.
|
||||
git add --all .
|
||||
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
|
||||
fi
|
||||
|
||||
- name: Check generated JS
|
||||
run: .github/workflows/script/check-js.sh
|
||||
|
||||
check-node-modules:
|
||||
name: Check modules up to date
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check node modules up to date
|
||||
run: .github/workflows/script/check-node-modules.sh
|
||||
|
||||
verify-pr-checks:
|
||||
name: Verify PR checks up to date
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install dependencies
|
||||
@@ -58,21 +90,27 @@ jobs:
|
||||
needs: [check-js, check-node-modules]
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: npm run-script test
|
||||
run: npm run-script test
|
||||
- uses: actions/checkout@v3
|
||||
- name: npm test
|
||||
run: |
|
||||
# Run any commands referenced in package.json using Bash, otherwise
|
||||
# we won't be able to find them on Windows.
|
||||
npm config set script-shell bash
|
||||
npm test
|
||||
|
||||
runner-analyze-javascript-ubuntu:
|
||||
name: Runner ubuntu JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
@@ -97,10 +135,11 @@ jobs:
|
||||
runner-analyze-javascript-windows:
|
||||
name: Runner windows JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
@@ -121,10 +160,11 @@ jobs:
|
||||
runner-analyze-javascript-macos:
|
||||
name: Runner macos JS analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
@@ -145,10 +185,11 @@ jobs:
|
||||
runner-analyze-csharp-ubuntu:
|
||||
name: Runner ubuntu C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
@@ -184,10 +225,11 @@ jobs:
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
# `windows-latest`.
|
||||
timeout-minutes: 45
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
@@ -215,7 +257,7 @@ jobs:
|
||||
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||
|
||||
- name: Upload tracer logs
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: tracer-logs
|
||||
path: ./codeql-runner/compound-build-tracer.log
|
||||
@@ -228,11 +270,12 @@ jobs:
|
||||
|
||||
runner-analyze-csharp-macos:
|
||||
name: Runner macos C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
@@ -266,11 +309,12 @@ jobs:
|
||||
|
||||
runner-analyze-csharp-autobuild-ubuntu:
|
||||
name: Runner ubuntu autobuild C# analyze
|
||||
timeout-minutes: 45
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
@@ -301,6 +345,7 @@ jobs:
|
||||
TEST_MODE: true
|
||||
|
||||
runner-analyze-csharp-autobuild-windows:
|
||||
timeout-minutes: 45
|
||||
name: Runner windows autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||
@@ -308,7 +353,7 @@ jobs:
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
@@ -343,9 +388,10 @@ jobs:
|
||||
name: Runner macos autobuild C# analyze
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Move codeql-action
|
||||
shell: bash
|
||||
@@ -368,7 +414,10 @@ jobs:
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos autobuild
|
||||
. codeql-runner/codeql-env.sh
|
||||
CODEQL_RUNNER="$(cat codeql-runner/codeql-env.json | jq -r '.CODEQL_RUNNER')"
|
||||
echo "$CODEQL_RUNNER"
|
||||
$CODEQL_RUNNER ../action/runner/dist/codeql-runner-macos autobuild
|
||||
|
||||
- name: Run analyze
|
||||
run: |
|
||||
@@ -380,11 +429,12 @@ jobs:
|
||||
name: Runner upload sarif
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
@@ -402,9 +452,10 @@ jobs:
|
||||
name: Runner ubuntu extractor RAM and threads options
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
||||
20
.github/workflows/python-deps.yml
vendored
20
.github/workflows/python-deps.yml
vendored
@@ -2,7 +2,7 @@ name: Test Python Package Installation on Linux and Mac
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, v1]
|
||||
branches: [main, releases/v1, releases/v2]
|
||||
pull_request:
|
||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||
# by other workflows.
|
||||
@@ -10,6 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
test-setup-python-scripts:
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -17,6 +18,11 @@ jobs:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
|
||||
|
||||
env:
|
||||
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
|
||||
@@ -24,7 +30,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
@@ -70,7 +76,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: ./init
|
||||
@@ -114,6 +120,10 @@ jobs:
|
||||
matrix:
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
# Python2 and poetry are not supported. See https://github.com/actions/setup-python/issues/374
|
||||
- python_version: 2
|
||||
python_deps_type: poetry
|
||||
|
||||
env:
|
||||
PYTHON_DEPS_TYPE: ${{ matrix.python_deps_type }}
|
||||
@@ -121,9 +131,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
|
||||
|
||||
54
.github/workflows/release-runner.yml
vendored
54
.github/workflows/release-runner.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: Release runner
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bundle-tag:
|
||||
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
release-runner:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
extension: ["linux", "macos", "win.exe"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: codeql-runner-${{matrix.extension}}
|
||||
path: runner/dist/codeql-runner-${{matrix.extension}}
|
||||
|
||||
- name: Resolve Upload URL for the release
|
||||
if: ${{ github.event.inputs.bundle-tag != null }}
|
||||
id: save_url
|
||||
run: |
|
||||
UPLOAD_URL=$(curl -sS \
|
||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
|
||||
-H "Accept: application/json" \
|
||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
|
||||
echo ${UPLOAD_URL}
|
||||
echo "::set-output name=upload_url::${UPLOAD_URL}"
|
||||
|
||||
- name: Upload Platform Package
|
||||
if: ${{ github.event.inputs.bundle-tag != null }}
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.save_url.outputs.upload_url }}
|
||||
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
|
||||
asset_name: codeql-runner-${{matrix.extension}}
|
||||
asset_content_type: application/octet-stream
|
||||
4
.github/workflows/script/check-js.sh
vendored
4
.github/workflows/script/check-js.sh
vendored
@@ -14,8 +14,8 @@ npm run-script build
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'rm -rf lib && npm run-script build' to update"
|
||||
git status
|
||||
exit 1
|
||||
fi
|
||||
echo "Success: JavaScript files are up to date"
|
||||
echo "Success: JavaScript files are up to date"
|
||||
|
||||
37
.github/workflows/script/update-required-checks.sh
vendored
Executable file
37
.github/workflows/script/update-required-checks.sh
vendored
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
# Update the required checks based on the current branch.
|
||||
# Typically, this will be main.
|
||||
|
||||
if ! gh auth status 2>/dev/null; then
|
||||
gh auth status
|
||||
echo "Failed: Not authorized. This script requires admin access to github/codeql-action through the gh CLI."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$#" -eq 1 ]; then
|
||||
# If we were passed an argument, use that as the SHA
|
||||
GITHUB_SHA="$0"
|
||||
elif [ "$#" -gt 1 ]; then
|
||||
echo "Usage: $0 [SHA]"
|
||||
echo "Update the required checks based on the SHA, or main."
|
||||
exit 1
|
||||
elif [ -z "$GITHUB_SHA" ]; then
|
||||
# If we don't have a SHA, use main
|
||||
GITHUB_SHA="$(git rev-parse main)"
|
||||
fi
|
||||
|
||||
echo "Getting checks for $GITHUB_SHA"
|
||||
|
||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") | not)] | unique | sort')"
|
||||
|
||||
echo "$CHECKS" | jq
|
||||
|
||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||
|
||||
for BRANCH in main releases/v2 releases/v1; do
|
||||
echo "Updating $BRANCH"
|
||||
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
|
||||
done
|
||||
|
||||
rm checks.json
|
||||
73
.github/workflows/split.yml
vendored
73
.github/workflows/split.yml
vendored
@@ -1,73 +0,0 @@
|
||||
#
|
||||
# Split the CodeQL Bundle into platform bundles
|
||||
#
|
||||
# Instructions:
|
||||
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
|
||||
# release (codeql-bundle-20200826)
|
||||
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
|
||||
# 3. Manually launch this workflow file (via the Actions UI) specifying
|
||||
# - The CLI Release (e.g., v2.2.5)
|
||||
# - The release tag (e.g., codeql-bundle-20200826)
|
||||
# 4. If everything succeeds you should see 3 new assets.
|
||||
#
|
||||
|
||||
name: Split Bundle
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
cli-release:
|
||||
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
|
||||
required: true
|
||||
bundle-tag:
|
||||
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
|
||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: ["linux64", "osx64", "win64"]
|
||||
|
||||
steps:
|
||||
- name: Resolve Upload URL for the release
|
||||
id: save_url
|
||||
run: |
|
||||
UPLOAD_URL=$(curl -sS \
|
||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
|
||||
-H "Accept: application/json" \
|
||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
|
||||
echo ${UPLOAD_URL}
|
||||
echo "::set-output name=upload_url::${UPLOAD_URL}"
|
||||
|
||||
- name: Download CodeQL CLI and Bundle
|
||||
run: |
|
||||
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
|
||||
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
|
||||
|
||||
- name: Create Platform Package
|
||||
# Replace the codeql-binaries with the platform specific ones
|
||||
run: |
|
||||
gunzip codeql-bundle.tar.gz
|
||||
tar -f codeql-bundle.tar --delete codeql
|
||||
unzip -q codeql-${{matrix.platform}}.zip
|
||||
tar -f codeql-bundle.tar --append codeql
|
||||
gzip codeql-bundle.tar
|
||||
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
|
||||
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
|
||||
|
||||
- name: Upload Platform Package
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.save_url.outputs.upload_url }}
|
||||
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
|
||||
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
|
||||
asset_content_type: application/tar+gzip
|
||||
3
.github/workflows/update-dependencies.yml
vendored
3
.github/workflows/update-dependencies.yml
vendored
@@ -6,11 +6,12 @@ on:
|
||||
jobs:
|
||||
update:
|
||||
name: Update dependencies
|
||||
timeout-minutes: 45
|
||||
runs-on: macos-latest
|
||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Remove PR label
|
||||
env:
|
||||
|
||||
46
.github/workflows/update-release-branch.yml
vendored
46
.github/workflows/update-release-branch.yml
vendored
@@ -1,24 +1,35 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
||||
types: [update-release-branch]
|
||||
# You can trigger this workflow via workflow dispatch to start a release.
|
||||
# This will open a PR to update the v2 release branch.
|
||||
workflow_dispatch:
|
||||
|
||||
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
|
||||
push:
|
||||
branches:
|
||||
- releases/v2
|
||||
|
||||
jobs:
|
||||
update:
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
if: github.repository == 'github/codeql-action'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Dump environment
|
||||
run: env
|
||||
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
# Need full history so we calculate diffs
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
@@ -32,5 +43,20 @@ jobs:
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update release branch
|
||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
||||
- name: Update v2 release branch
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
python .github/update-release-branch.py \
|
||||
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||
--repository-nwo ${{ github.repository }} \
|
||||
--mode v2-release \
|
||||
--conductor ${GITHUB_ACTOR}
|
||||
|
||||
- name: Update v1 release branch
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
python .github/update-release-branch.py \
|
||||
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||
--repository-nwo ${{ github.repository }} \
|
||||
--mode v1-release \
|
||||
--conductor ${GITHUB_ACTOR}
|
||||
|
||||
@@ -6,18 +6,20 @@ on:
|
||||
|
||||
jobs:
|
||||
update-supported-enterprise-server-versions:
|
||||
name: Update Supported Enterprise Server Versions
|
||||
timeout-minutes: 45
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'github/codeql-action' }}
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.7"
|
||||
- name: Checkout CodeQL Action
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout Enterprise Releases
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: github/enterprise-releases
|
||||
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
|
||||
|
||||
72
CHANGELOG.md
72
CHANGELOG.md
@@ -1,12 +1,78 @@
|
||||
# CodeQL Action and CodeQL Runner Changelog
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
## 1.1.17 - 28 Jul 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.10.1. [#1143](https://github.com/github/codeql-action/pull/1143)
|
||||
|
||||
## 1.1.16 - 13 Jul 2022
|
||||
|
||||
- You can now quickly debug a job that uses the CodeQL Action by re-running the job from the GitHub UI and selecting the "Enable debug logging" option. [#1132](https://github.com/github/codeql-action/pull/1132)
|
||||
- You can now see diagnostic messages produced by the analysis in the logs of the `analyze` Action by enabling debug mode. To enable debug mode, pass `debug: true` to the `init` Action, or [enable step debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging). This feature is available for CodeQL CLI version 2.10.0 and later. [#1133](https://github.com/github/codeql-action/pull/1133)
|
||||
|
||||
## 1.1.15 - 28 Jun 2022
|
||||
|
||||
- CodeQL query packs listed in the `packs` configuration field will be skipped if their target language is not being analyzed in the current Actions job. Previously, this would throw an error. [#1116](https://github.com/github/codeql-action/pull/1116)
|
||||
- The combination of python2 and poetry is no longer supported. See https://github.com/actions/setup-python/issues/374 for more details. [#1124](https://github.com/github/codeql-action/pull/1124)
|
||||
- Update default CodeQL bundle version to 2.10.0. [#1123](https://github.com/github/codeql-action/pull/1123)
|
||||
|
||||
## 1.1.14 - 22 Jun 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.1.13 - 21 Jun 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.4. [#1100](https://github.com/github/codeql-action/pull/1100)
|
||||
|
||||
## 1.1.12 - 01 Jun 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.3. [#1084](https://github.com/github/codeql-action/pull/1084)
|
||||
|
||||
## 1.1.11 - 17 May 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
|
||||
|
||||
## 1.1.10 - 10 May 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
|
||||
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
|
||||
|
||||
## 1.1.9 - 27 Apr 2022
|
||||
|
||||
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
|
||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
|
||||
- Update default CodeQL bundle version to 2.9.0.
|
||||
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
|
||||
|
||||
## 1.1.8 - 08 Apr 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
|
||||
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
|
||||
|
||||
## 1.1.7 - 05 Apr 2022
|
||||
|
||||
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
|
||||
|
||||
## 1.1.6 - 30 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
|
||||
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
|
||||
|
||||
## 1.1.5 - 15 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.3.
|
||||
- The CodeQL runner is now deprecated and no longer being released. For more information, see [CodeQL runner deprecation](https://github.blog/changelog/2021-09-21-codeql-runner-deprecation/).
|
||||
- Fix two bugs that cause action failures with GHES 3.3 or earlier. [#978](https://github.com/github/codeql-action/pull/978)
|
||||
- Fix `not a permitted key` invalid requests with GHES 3.1 or earlier
|
||||
- Fix `RUNNER_ARCH environment variable must be set` errors with GHES 3.3 or earlier
|
||||
|
||||
## 1.1.4 - 07 Mar 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.8.2. [#950](https://github.com/github/codeql-action/pull/950)
|
||||
- Fix a bug where old results can be uploaded if the languages in a repository change when using a non-ephemeral self-hosted runner. [#955](https://github.com/github/codeql-action/pull/955)
|
||||
|
||||
## 1.1.3 - 23 Feb 2022
|
||||
|
||||
- Fix bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
||||
- Fix a bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
||||
|
||||
## 1.1.2 - 17 Feb 2022
|
||||
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
**/* @github/codeql-action-reviewers
|
||||
|
||||
/python-setup/ @github/codeql-python @github/codeql-action-reviewers
|
||||
|
||||
@@ -61,16 +61,30 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
## Releasing (write access required)
|
||||
|
||||
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
|
||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch.
|
||||
|
||||
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||
1. Review the checklist items in the pull request description.
|
||||
Once you've checked off all but the last of these, approve the PR and automerge it.
|
||||
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
|
||||
Once you've checked off all but the last two of these, approve the PR and automerge it.
|
||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||
|
||||
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
|
||||
Approve the mergeback PR and automerge it.
|
||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
||||
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
||||
|
||||
Review the checklist items in the pull request description.
|
||||
Once you've checked off all the items, approve the PR and automerge it.
|
||||
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
||||
|
||||
## Keeping the PR checks up to date (admin access required)
|
||||
|
||||
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script:
|
||||
|
||||
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date.
|
||||
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
|
||||
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
32
README.md
32
README.md
@@ -39,8 +39,7 @@ on:
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
# If you're only analyzing JavaScript or Python, CodeQL runs on ubuntu-latest, windows-latest, and macos-latest.
|
||||
# If you're analyzing C/C++, C#, Go, or Java, CodeQL runs on ubuntu-latest, windows-2019, and macos-latest.
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
@@ -53,11 +52,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -65,10 +64,10 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
||||
# three lines and modify them (or add more) to build your code if your
|
||||
@@ -79,14 +78,14 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
```
|
||||
|
||||
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
|
||||
|
||||
```yaml
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: go, javascript
|
||||
|
||||
@@ -96,7 +95,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
@@ -104,7 +103,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
```
|
||||
@@ -112,7 +111,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
||||
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
config-file: owner/repo/codeql-config.yml@branch
|
||||
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
||||
@@ -123,7 +122,7 @@ For information on how to write a configuration file, see "[Using a custom confi
|
||||
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
queries: <local-or-remote-query>,<another-query>
|
||||
```
|
||||
@@ -131,7 +130,7 @@ If you only want to customise the queries used, you can specify them in your wor
|
||||
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
|
||||
|
||||
```yaml
|
||||
- uses: github/codeql-action/init@v1
|
||||
- uses: github/codeql-action/init@v2
|
||||
with:
|
||||
queries: +<local-or-remote-query>,<another-query>
|
||||
```
|
||||
@@ -140,10 +139,3 @@ By default, this will override any queries specified in a config file. If you wi
|
||||
|
||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||
|
||||
### Note on "missing analysis" message
|
||||
|
||||
The very first time code scanning is run and if it is on a pull request, you will probably get a message mentioning a "missing analysis". This is expected.
|
||||
|
||||
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
|
||||
|
||||
For more information and other causes of this message, see [Reasons for the "missing analysis" message](https://docs.github.com/en/code-security/secure-coding/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-missing-analysis-message)
|
||||
|
||||
@@ -61,7 +61,7 @@ inputs:
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
default: "false"
|
||||
default: "true"
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -6,6 +6,12 @@ inputs:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
working-directory:
|
||||
description: >-
|
||||
Run the autobuilder using this path (relative to $GITHUB_WORKSPACE) as
|
||||
working directory. If this input is not set, the autobuilder runs with
|
||||
$GITHUB_WORKSPACE as its working directory.
|
||||
required: false
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: '../lib/autobuild-action.js'
|
||||
main: '../lib/autobuild-action.js'
|
||||
|
||||
@@ -56,7 +56,10 @@ inputs:
|
||||
This input also sets the number of threads that can later be used by the "analyze" action.
|
||||
required: false
|
||||
debug:
|
||||
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
|
||||
description: >-
|
||||
Enable debugging mode.
|
||||
This will result in more output being produced which may be useful when debugging certain issues.
|
||||
Debugging mode is enabled automatically when step debug logging is turned on.
|
||||
required: false
|
||||
default: 'false'
|
||||
debug-artifact-name:
|
||||
|
||||
56
lib/actions-util.js
generated
56
lib/actions-util.js
generated
@@ -19,8 +19,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
@@ -29,6 +30,8 @@ const yaml = __importStar(require("js-yaml"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
/**
|
||||
* The utils in this module are meant to be run inside of the action only.
|
||||
* Code paths from the runner should not enter this module.
|
||||
@@ -63,17 +66,10 @@ function getTemporaryDirectory() {
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||
}
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
function getToolCacheDirectory() {
|
||||
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
||||
return value !== undefined && value !== ""
|
||||
? value
|
||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE");
|
||||
}
|
||||
exports.getToolCacheDirectory = getToolCacheDirectory;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
const getCommitOid = async function (ref = "HEAD") {
|
||||
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
@@ -93,6 +89,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return commitOid.trim();
|
||||
}
|
||||
@@ -112,6 +109,7 @@ const determineMergeBaseCommitOid = async function () {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
@@ -136,6 +134,7 @@ const determineMergeBaseCommitOid = async function () {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
@@ -351,7 +350,7 @@ async function getWorkflowPath() {
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||
const apiClient = api.getActionsApiClient();
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||
owner,
|
||||
repo,
|
||||
run_id,
|
||||
@@ -424,6 +423,9 @@ async function getRef() {
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = (0, exports.getOptionalInput)("ref");
|
||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
|
||||
(0, exports.getOptionalInput)("source-root") ||
|
||||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
@@ -445,15 +447,14 @@ async function getRef() {
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
const head = await (0, exports.getCommitOid)("HEAD");
|
||||
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
||||
head;
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||
@@ -498,6 +499,8 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
|
||||
// See https://github.com/actions/runner/issues/803
|
||||
const actionRef = isRunningLocalAction()
|
||||
@@ -516,6 +519,8 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
runner_os: runnerOs,
|
||||
action_version: pkg.version,
|
||||
};
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
@@ -534,6 +539,17 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
@@ -551,11 +567,17 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const gitHubVersion = await api.getGitHubVersionActionsOnly();
|
||||
if ((0, util_1.isGitHubGhesVersionBelow)(gitHubVersion, "3.2.0")) {
|
||||
// GHES 3.1 and earlier versions reject unexpected properties, which means
|
||||
// that they will reject status reports with newly added properties.
|
||||
// Inhibiting status reporting for GHES < 3.2 avoids such failures.
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
||||
if (testMode) {
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
@@ -648,7 +670,7 @@ async function isAnalyzingDefaultBranch() {
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = currentRef.startsWith("refs/heads/")
|
||||
? currentRef.substr("refs/heads/".length)
|
||||
? currentRef.slice("refs/heads/".length)
|
||||
: currentRef;
|
||||
const event = getWorkflowEvent();
|
||||
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||
|
||||
File diff suppressed because one or more lines are too long
132
lib/actions-util.test.js
generated
132
lib/actions-util.test.js
generated
@@ -39,74 +39,93 @@ function errorCodes(actual, expected) {
|
||||
await t.throwsAsync(actionsutil.getRef);
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||
@@ -461,6 +480,7 @@ on: ["push"]
|
||||
});
|
||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(envFile, JSON.stringify({
|
||||
repository: {
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/analysis-paths.js
generated
7
lib/analysis-paths.js
generated
@@ -58,14 +58,11 @@ function includeAndExcludeAnalysisPaths(config) {
|
||||
}
|
||||
// If the temporary or tools directory is in the working directory ignore that too.
|
||||
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
|
||||
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
|
||||
let pathsIgnore = config.pathsIgnore;
|
||||
if (!tempRelativeToWorking.startsWith("..")) {
|
||||
if (!tempRelativeToWorking.startsWith("..") &&
|
||||
!path.isAbsolute(tempRelativeToWorking)) {
|
||||
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
|
||||
}
|
||||
if (!toolsRelativeToWorking.startsWith("..")) {
|
||||
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
|
||||
}
|
||||
if (pathsIgnore.length !== 0) {
|
||||
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IACE,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC;QACvC,CAAC,IAAI,CAAC,UAAU,CAAC,qBAAqB,CAAC,EACvC;QACA,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
48
lib/analysis-paths.test.js
generated
48
lib/analysis-paths.test.js
generated
@@ -37,7 +37,6 @@ const util = __importStar(require("./util"));
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
@@ -45,6 +44,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -61,7 +61,6 @@ const util = __importStar(require("./util"));
|
||||
pathsIgnore: ["path4", "path5", "path6/**"],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
@@ -69,6 +68,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -77,28 +77,26 @@ const util = __importStar(require("./util"));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("exclude temp dir", async (t) => {
|
||||
return await util.withTmpDir(async (toolCacheDir) => {
|
||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||
});
|
||||
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
|
||||
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
|
||||
});
|
||||
//# sourceMappingURL=analysis-paths.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO;QACP,SAAS,EAAE,EAAE;QACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;QACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;QACrD,KAAK,EAAE,EAAE;QACT,SAAS,EAAE,KAAK;QAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,KAAK;KACzB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC"}
|
||||
10
lib/analyze-action-env.test.js
generated
10
lib/analyze-action-env.test.js
generated
@@ -38,14 +38,17 @@ const util = __importStar(require("./util"));
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
@@ -54,6 +57,7 @@ const util = __importStar(require("./util"));
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
// When there are no action inputs for RAM and threads, the action uses
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
10
lib/analyze-action-input.test.js
generated
10
lib/analyze-action-input.test.js
generated
@@ -38,14 +38,17 @@ const util = __importStar(require("./util"));
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
});
|
||||
@@ -54,6 +57,7 @@ const util = __importStar(require("./util"));
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
27
lib/analyze-action.js
generated
27
lib/analyze-action.js
generated
@@ -26,9 +26,11 @@ const artifact = __importStar(require("@actions/artifact"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
@@ -57,6 +59,7 @@ async function run() {
|
||||
let runStats = undefined;
|
||||
let config = undefined;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
await util.checkActionVersion(pkg.version);
|
||||
try {
|
||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||
return;
|
||||
@@ -75,7 +78,9 @@ async function run() {
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||
if (config.debugMode) {
|
||||
@@ -86,13 +91,13 @@ async function run() {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (config.debugMode) {
|
||||
// Upload the logs as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
let toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
|
||||
toUpload = toUpload.concat(listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
|
||||
toUpload = toUpload.concat(listFolder(path.resolve(config.dbLocation, "log")));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
@@ -117,7 +122,11 @@ async function run() {
|
||||
}
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||
if (uploadResult !== undefined &&
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (util.isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
}
|
||||
else if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
@@ -136,7 +145,7 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
finally {
|
||||
if (config !== undefined && config.debugMode) {
|
||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||
try {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
@@ -149,7 +158,7 @@ async function run() {
|
||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
||||
}
|
||||
}
|
||||
if (core.isDebug() && config !== undefined) {
|
||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
|
||||
@@ -195,13 +204,13 @@ async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
}
|
||||
function listFolder(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
const files = [];
|
||||
let files = [];
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
files.push(path.resolve(dir, entry.name));
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
files.push(...listFolder(path.resolve(dir, entry.name)));
|
||||
files = files.concat(listFolder(path.resolve(dir, entry.name)));
|
||||
}
|
||||
}
|
||||
return files;
|
||||
|
||||
File diff suppressed because one or more lines are too long
73
lib/analyze.js
generated
73
lib/analyze.js
generated
@@ -18,11 +18,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
|
||||
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
@@ -64,11 +68,10 @@ async function setupPythonExtractor(logger) {
|
||||
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
|
||||
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
|
||||
}
|
||||
async function createdDBForScannedLanguages(config, logger) {
|
||||
async function createdDBForScannedLanguages(codeql, config, logger, featureFlags) {
|
||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||
// we extract any scanned languages.
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if ((0, languages_1.isScannedLanguage)(language) &&
|
||||
!dbIsFinalized(config, language, logger)) {
|
||||
@@ -76,11 +79,12 @@ async function createdDBForScannedLanguages(config, logger) {
|
||||
if (language === languages_1.Language.python) {
|
||||
await setupPythonExtractor(logger);
|
||||
}
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config, language), language);
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config, language), language, featureFlags);
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.createdDBForScannedLanguages = createdDBForScannedLanguages;
|
||||
function dbIsFinalized(config, language, logger) {
|
||||
const dbPath = util.getCodeQLDatabasePath(config, language);
|
||||
try {
|
||||
@@ -92,9 +96,9 @@ function dbIsFinalized(config, language, logger) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
|
||||
await createdDBForScannedLanguages(config, logger);
|
||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
|
||||
for (const language of config.languages) {
|
||||
if (dbIsFinalized(config, language, logger)) {
|
||||
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
|
||||
@@ -111,10 +115,8 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
const statusReport = {};
|
||||
let locPromise = Promise.resolve({});
|
||||
const cliCanCountBaseline = await cliCanCountLoC();
|
||||
const debugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] ||
|
||||
process.env["ACTIONS_RUNNER_DEBUG"] ||
|
||||
process.env["ACTIONS_STEP_DEBUG"];
|
||||
if (!cliCanCountBaseline || debugMode) {
|
||||
const countLocDebugMode = process.env["INTERNAL_CODEQL_ACTION_DEBUG_LOC"] || config.debugMode;
|
||||
if (!cliCanCountBaseline || countLocDebugMode) {
|
||||
// count the number of lines in the background
|
||||
locPromise = (0, count_loc_1.countLoc)(path.resolve(),
|
||||
// config.paths specifies external directories. the current
|
||||
@@ -131,11 +133,11 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
try {
|
||||
if (hasPackWithCustomQueries) {
|
||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const results = await codeql.packDownload(packsWithVersion);
|
||||
logger.info(`Downloaded packs: ${results.packs
|
||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||
@@ -159,7 +161,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
}
|
||||
}
|
||||
if (packsWithVersion.length > 0) {
|
||||
querySuitePaths.push(await runQueryGroup(language, "packs", createPackSuiteContents(packsWithVersion), undefined));
|
||||
querySuitePaths.push(...(await runQueryPacks(language, "packs", packsWithVersion, undefined)));
|
||||
ranCustom = true;
|
||||
}
|
||||
if (ranCustom) {
|
||||
@@ -170,14 +172,14 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.startGroup(`Interpreting results for ${language}`);
|
||||
const startTimeInterpretResults = new Date().getTime();
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
|
||||
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile, config.debugMode);
|
||||
if (!cliCanCountBaseline)
|
||||
await injectLinesOfCode(sarifFile, language, locPromise);
|
||||
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeInterpretResults;
|
||||
logger.endGroup();
|
||||
logger.info(analysisSummary);
|
||||
if (!cliCanCountBaseline || debugMode)
|
||||
if (!cliCanCountBaseline || countLocDebugMode)
|
||||
printLinesOfCodeSummary(logger, language, await locPromise);
|
||||
if (cliCanCountBaseline)
|
||||
logger.info(await runPrintLinesOfCode(language));
|
||||
@@ -192,10 +194,10 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
}
|
||||
}
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile) {
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
||||
}
|
||||
async function cliCanCountLoC() {
|
||||
return await util.codeQlVersionAbove(await (0, codeql_1.getCodeQL)(config.codeQLCmd), codeql_1.CODEQL_VERSION_COUNTS_LINES);
|
||||
@@ -217,23 +219,40 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
return querySuitePath;
|
||||
}
|
||||
async function runQueryPacks(language, type, packs, searchPath) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Run the queries individually instead of all at once to avoid command
|
||||
// line length restrictions, particularly on windows.
|
||||
for (const pack of packs) {
|
||||
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, pack, memoryFlag, threadsFlag);
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
}
|
||||
return packs;
|
||||
}
|
||||
}
|
||||
exports.runQueries = runQueries;
|
||||
function createQuerySuiteContents(queries) {
|
||||
return queries.map((q) => `- query: ${q}`).join("\n");
|
||||
}
|
||||
function createPackSuiteContents(packsWithVersion) {
|
||||
return packsWithVersion.map(packWithVersionToQuerySuiteEntry).join("\n");
|
||||
}
|
||||
function packWithVersionToQuerySuiteEntry(pack) {
|
||||
let text = `- qlpack: ${pack.packName}`;
|
||||
if (pack.version) {
|
||||
text += `\n version: ${pack.version}`;
|
||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, featureFlags) {
|
||||
try {
|
||||
await (0, del_1.default)(outputDir, { force: true });
|
||||
}
|
||||
return text;
|
||||
}
|
||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||
catch (error) {
|
||||
if ((error === null || error === void 0 ? void 0 : error.code) !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true });
|
||||
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
// WARNING: This does not _really_ end tracing, as the tracer will restore its
|
||||
// critical environment variables and it'll still be active for all processes
|
||||
// launched from this build step.
|
||||
// However, it will stop tracing for all steps past the codeql-action/analyze
|
||||
// step.
|
||||
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Delete variables as specified by the end-tracing script
|
||||
await (0, tracer_config_1.endTracingForCluster)(config);
|
||||
@@ -242,8 +261,6 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
}
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||
}
|
||||
exports.runFinalize = runFinalize;
|
||||
async function runCleanup(config, cleanupLevel, logger) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
108
lib/analyze.test.js
generated
108
lib/analyze.test.js
generated
@@ -26,11 +26,12 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const semver_1 = require("semver");
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const codeql_test_1 = require("./codeql.test");
|
||||
const count = __importStar(require("./count-loc"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -53,18 +54,8 @@ const util = __importStar(require("./util"));
|
||||
const addSnippetsFlag = "";
|
||||
const threadsFlag = "";
|
||||
const packs = {
|
||||
[languages_1.Language.cpp]: [
|
||||
{
|
||||
packName: "a/b",
|
||||
version: (0, semver_1.clean)("1.0.0"),
|
||||
},
|
||||
],
|
||||
[languages_1.Language.java]: [
|
||||
{
|
||||
packName: "c/d",
|
||||
version: (0, semver_1.clean)("2.0.0"),
|
||||
},
|
||||
],
|
||||
[languages_1.Language.cpp]: ["a/b@1.0.0"],
|
||||
[languages_1.Language.java]: ["c/d@2.0.0"],
|
||||
};
|
||||
for (const language of Object.values(languages_1.Language)) {
|
||||
(0, codeql_1.setCodeQL)({
|
||||
@@ -118,7 +109,6 @@ const util = __importStar(require("./util"));
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
@@ -128,6 +118,7 @@ const util = __importStar(require("./util"));
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
@@ -208,32 +199,10 @@ const util = __importStar(require("./util"));
|
||||
query: "bar.ql",
|
||||
},
|
||||
];
|
||||
const qlsPackContentCpp = [
|
||||
{
|
||||
qlpack: "a/b",
|
||||
version: "1.0.0",
|
||||
},
|
||||
];
|
||||
const qlsPackContentJava = [
|
||||
{
|
||||
qlpack: "c/d",
|
||||
version: "2.0.0",
|
||||
},
|
||||
];
|
||||
for (const lang of Object.values(languages_1.Language)) {
|
||||
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
|
||||
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
|
||||
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
|
||||
const packSuiteName = `${lang}-queries-packs.qls`;
|
||||
if (lang === languages_1.Language.cpp) {
|
||||
t.deepEqual(readContents(packSuiteName), qlsPackContentCpp);
|
||||
}
|
||||
else if (lang === languages_1.Language.java) {
|
||||
t.deepEqual(readContents(packSuiteName), qlsPackContentJava);
|
||||
}
|
||||
else {
|
||||
t.false(fs.existsSync(path.join(tmpDir, "codeql_databases", packSuiteName)));
|
||||
}
|
||||
}
|
||||
function readContents(name) {
|
||||
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
|
||||
@@ -242,4 +211,71 @@ const util = __importStar(require("./util"));
|
||||
}
|
||||
}
|
||||
});
|
||||
const stubConfig = {
|
||||
languages: [languages_1.Language.cpp, languages_1.Language.go],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: "",
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: "",
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
for (const options of [
|
||||
{
|
||||
name: "Lua feature flag enabled, but old CLI",
|
||||
version: "2.9.0",
|
||||
featureFlags: [feature_flags_1.FeatureFlag.LuaTracerConfigEnabled],
|
||||
yesFlagSet: false,
|
||||
noFlagSet: false,
|
||||
},
|
||||
{
|
||||
name: "Lua feature flag disabled, with old CLI",
|
||||
version: "2.9.0",
|
||||
featureFlags: [],
|
||||
yesFlagSet: false,
|
||||
noFlagSet: false,
|
||||
},
|
||||
{
|
||||
name: "Lua feature flag enabled, with new CLI",
|
||||
version: "2.10.0",
|
||||
featureFlags: [feature_flags_1.FeatureFlag.LuaTracerConfigEnabled],
|
||||
yesFlagSet: true,
|
||||
noFlagSet: false,
|
||||
},
|
||||
{
|
||||
name: "Lua feature flag disabled, with new CLI",
|
||||
version: "2.10.0",
|
||||
featureFlags: [],
|
||||
yesFlagSet: false,
|
||||
noFlagSet: true,
|
||||
},
|
||||
]) {
|
||||
(0, ava_1.default)(`createdDBForScannedLanguages() ${options.name}`, async (t) => {
|
||||
const runnerConstructorStub = (0, codeql_test_1.stubToolRunnerConstructor)();
|
||||
const codeqlObject = await (0, codeql_1.getCodeQLForTesting)("codeql/for-testing");
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(options.version);
|
||||
const promise = (0, analyze_1.createdDBForScannedLanguages)(codeqlObject, stubConfig, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)(options.featureFlags));
|
||||
// call listener on `codeql resolve extractor`
|
||||
const mockToolRunner = runnerConstructorStub.getCall(0);
|
||||
mockToolRunner.args[2].listeners.stdout('"/path/to/extractor"');
|
||||
await promise;
|
||||
if (options.yesFlagSet)
|
||||
t.true(runnerConstructorStub.secondCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be present, but it is absent");
|
||||
else
|
||||
t.false(runnerConstructorStub.secondCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
|
||||
if (options.noFlagSet)
|
||||
t.true(runnerConstructorStub.secondCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be present, but it is absent");
|
||||
else
|
||||
t.false(runnerConstructorStub.secondCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=analyze.test.js.map
|
||||
File diff suppressed because one or more lines are too long
34
lib/api-client.js
generated
34
lib/api-client.js
generated
@@ -22,12 +22,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||
exports.getGitHubVersionActionsOnly = exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
@@ -57,15 +58,36 @@ function getApiUrl(githubUrl) {
|
||||
url.pathname = path.join(url.pathname, "api", "v3");
|
||||
return url.toString();
|
||||
}
|
||||
function getApiDetails() {
|
||||
return {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
}
|
||||
// Temporary function to aid in the transition to running on and off of github actions.
|
||||
// Once all code has been converted this function should be removed or made canonical
|
||||
// and called only from the action entrypoints.
|
||||
function getActionsApiClient() {
|
||||
const apiDetails = {
|
||||
auth: (0, actions_util_1.getRequiredInput)("token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
return (0, exports.getApiClient)(apiDetails);
|
||||
return (0, exports.getApiClient)(getApiDetails());
|
||||
}
|
||||
exports.getActionsApiClient = getActionsApiClient;
|
||||
let cachedGitHubVersion = undefined;
|
||||
/**
|
||||
* Report the GitHub server version. This is a wrapper around
|
||||
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
||||
* GitHub Action inputs. If you need to get the GitHub server version from the
|
||||
* Runner, please call util.getGitHubVersion() instead.
|
||||
*
|
||||
* @returns GitHub version
|
||||
*/
|
||||
async function getGitHubVersionActionsOnly() {
|
||||
if (!util.isActions()) {
|
||||
throw new Error("getGitHubVersionActionsOnly() works only in an action");
|
||||
}
|
||||
if (cachedGitHubVersion === undefined) {
|
||||
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
|
||||
}
|
||||
return cachedGitHubVersion;
|
||||
}
|
||||
exports.getGitHubVersionActionsOnly = getGitHubVersionActionsOnly;
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,IAAA,oBAAY,EAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,6CAA+B;AAC/B,iCAAqE;AAErE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,IAAA,cAAO,GAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,SAAS,aAAa;IACpB,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,OAAO,IAAA,oBAAY,EAAC,aAAa,EAAE,CAAC,CAAC;AACvC,CAAC;AAFD,kDAEC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;;GAOG;AACI,KAAK,UAAU,2BAA2B;IAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE;QACrB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;KAC1E;IACD,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AARD,kEAQC"}
|
||||
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.4", "minimumVersion": "3.1" }
|
||||
{ "maximumVersion": "3.6", "minimumVersion": "3.2" }
|
||||
|
||||
8
lib/autobuild-action.js
generated
8
lib/autobuild-action.js
generated
@@ -39,8 +39,9 @@ async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguag
|
||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const startedAt = new Date();
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
await (0, util_1.checkActionVersion)(pkg.version);
|
||||
let language = undefined;
|
||||
try {
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||
@@ -52,6 +53,11 @@ async function run() {
|
||||
}
|
||||
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
||||
if (language !== undefined) {
|
||||
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
|
||||
if (workingDirectory) {
|
||||
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
|
||||
process.chdir(workingDirectory);
|
||||
}
|
||||
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAMwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAyE;AAEzE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
134
lib/codeql.js
generated
134
lib/codeql.js
generated
@@ -22,19 +22,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const query_string_1 = __importDefault(require("query-string"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const error_matcher_1 = require("./error-matcher");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const toolcache = __importStar(require("./toolcache"));
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
@@ -76,6 +78,7 @@ const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
@@ -86,6 +89,12 @@ exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
* versions above that.
|
||||
*/
|
||||
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
||||
/**
|
||||
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
|
||||
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
|
||||
* some of their files being greater than MAX_PATH (260 characters).
|
||||
*/
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
||||
function getCodeQLBundleName() {
|
||||
let platform;
|
||||
if (process.platform === "win32") {
|
||||
@@ -194,7 +203,19 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||
}
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
|
||||
/**
|
||||
* Set up CodeQL CLI access.
|
||||
*
|
||||
* @param codeqlURL
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns
|
||||
*/
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, logger, checkVersion) {
|
||||
try {
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
// defaults over any pinned cached version.
|
||||
@@ -205,21 +226,21 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
||||
let codeqlFolder;
|
||||
let codeqlURLVersion;
|
||||
if (codeqlURL && !codeqlURL.startsWith("http")) {
|
||||
codeqlFolder = await toolcache.extractTar(codeqlURL, tempDir, logger);
|
||||
codeqlFolder = await toolcache.extractTar(codeqlURL);
|
||||
codeqlURLVersion = "local";
|
||||
}
|
||||
else {
|
||||
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
||||
// If we find the specified version, we always use that.
|
||||
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer, toolCacheDir, logger);
|
||||
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
|
||||
// If we don't find the requested version, in some cases we may allow a
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL");
|
||||
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
|
||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||
codeqlFolder = tmpCodeqlFolder;
|
||||
@@ -235,7 +256,9 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
||||
}
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||
const headers = { accept: "application/octet-stream" };
|
||||
const headers = {
|
||||
accept: "application/octet-stream",
|
||||
};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
@@ -249,10 +272,12 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
||||
logger.debug("Downloading CodeQL bundle without token.");
|
||||
}
|
||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, tempDir, headers);
|
||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath, tempDir, logger);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer, toolCacheDir, logger);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
|
||||
}
|
||||
}
|
||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||
@@ -361,20 +386,30 @@ exports.getCachedCodeQL = getCachedCodeQL;
|
||||
* a non-existent placeholder codeql command, so tests that use this function
|
||||
* should also stub the toolrunner.ToolRunner constructor.
|
||||
*/
|
||||
async function getCodeQLForTesting() {
|
||||
return getCodeQLForCmd("codeql-for-testing", false);
|
||||
async function getCodeQLForTesting(cmd = "codeql-for-testing") {
|
||||
return getCodeQLForCmd(cmd, false);
|
||||
}
|
||||
exports.getCodeQLForTesting = getCodeQLForTesting;
|
||||
/**
|
||||
* Return a CodeQL object for CodeQL CLI access.
|
||||
*
|
||||
* @param cmd Path to CodeQL CLI
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns A new CodeQL object
|
||||
*/
|
||||
async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
let cachedVersion = undefined;
|
||||
const codeql = {
|
||||
getPath() {
|
||||
return cmd;
|
||||
},
|
||||
async getVersion() {
|
||||
if (cachedVersion === undefined)
|
||||
cachedVersion = runTool(cmd, ["version", "--format=terse"]);
|
||||
return await cachedVersion;
|
||||
let result = util.getCachedCodeQlVersion();
|
||||
if (result === undefined) {
|
||||
result = (await runTool(cmd, ["version", "--format=terse"])).trim();
|
||||
util.cacheCodeQlVersion(result);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
async printVersion() {
|
||||
await runTool(cmd, ["version", "--format=json"]);
|
||||
@@ -429,22 +464,33 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(databasePath, languages, sourceRoot, processName, processLevel) {
|
||||
const extraArgs = languages.map((language) => `--language=${language}`);
|
||||
if (languages.filter(languages_1.isTracedLanguage).length > 0) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (config.languages.filter(languages_1.isTracedLanguage).length > 0) {
|
||||
extraArgs.push("--begin-tracing");
|
||||
if (processName !== undefined) {
|
||||
extraArgs.push(`--trace-process-name=${processName}`);
|
||||
}
|
||||
else {
|
||||
// We default to 3 if no other arguments are provided since this was the default
|
||||
// behaviour of the Runner. Note this path never happens in the CodeQL Action
|
||||
// because that always passes in a process name.
|
||||
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
||||
}
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACER_CONFIG)) {
|
||||
if (await featureFlags.getValue(feature_flags_1.FeatureFlag.LuaTracerConfigEnabled)) {
|
||||
extraArgs.push("--internal-use-lua-tracing");
|
||||
}
|
||||
else {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
"--db-cluster",
|
||||
databasePath,
|
||||
config.dbLocation,
|
||||
`--source-root=${sourceRoot}`,
|
||||
...extraArgs,
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
@@ -464,9 +510,22 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
"-Dhttp.keepAlive=false",
|
||||
"-Dmaven.wagon.http.pool=false",
|
||||
].join(" ");
|
||||
// On macOS, System Integrity Protection (SIP) typically interferes with
|
||||
// CodeQL build tracing of protected binaries.
|
||||
// The usual workaround is to prefix `$CODEQL_RUNNER` to build commands:
|
||||
// `$CODEQL_RUNNER` (not to be confused with the deprecated CodeQL Runner tool)
|
||||
// points to a simple wrapper binary included with the CLI, and the extra layer of
|
||||
// process indirection helps the tracer bypass SIP.
|
||||
// The above SIP workaround is *not* needed here.
|
||||
// At the `autobuild` step in the Actions workflow, we assume the `init` step
|
||||
// has successfully run, and will have exported `DYLD_INSERT_LIBRARIES`
|
||||
// into the environment of subsequent steps, to activate the tracer.
|
||||
// When `DYLD_INSERT_LIBRARIES` is set in the environment for a step,
|
||||
// the Actions runtime introduces its own workaround for SIP
|
||||
// (https://github.com/actions/runner/pull/416).
|
||||
await runTool(autobuildCmd);
|
||||
},
|
||||
async extractScannedLanguage(databasePath, language) {
|
||||
async extractScannedLanguage(databasePath, language, featureFlags) {
|
||||
// Get extractor location
|
||||
let extractorPath = "";
|
||||
await new toolrunner.ToolRunner(cmd, [
|
||||
@@ -489,10 +548,20 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
// Set trace command
|
||||
const ext = process.platform === "win32" ? ".cmd" : ".sh";
|
||||
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
|
||||
const extraArgs = [];
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_LUA_TRACER_CONFIG)) {
|
||||
if (await featureFlags.getValue(feature_flags_1.FeatureFlag.LuaTracerConfigEnabled)) {
|
||||
extraArgs.push("--internal-use-lua-tracing");
|
||||
}
|
||||
else {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
// Run trace command
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
...extraArgs,
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
databasePath,
|
||||
"--",
|
||||
@@ -563,13 +632,13 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
codeqlArgs.push(querySuitePath);
|
||||
await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"interpret-results",
|
||||
threadsFlag,
|
||||
"--format=sarif-latest",
|
||||
"-v",
|
||||
verbosityFlag,
|
||||
`--output=${sarifFile}`,
|
||||
addSnippetsFlag,
|
||||
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||
@@ -586,7 +655,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
codeqlArgs.push(databasePath, ...querySuitePaths);
|
||||
codeqlArgs.push(databasePath);
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
// capture stdout, which contains analysis summaries
|
||||
return await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
@@ -614,8 +684,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
"pack",
|
||||
"download",
|
||||
"--format=json",
|
||||
"--resolve-query-specs",
|
||||
...getExtraOptionsFromEnv(["pack", "download"]),
|
||||
...packs.map(packWithVersionToString),
|
||||
...packs,
|
||||
];
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
try {
|
||||
@@ -657,15 +728,20 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version whereever
|
||||
// possbile, we want to call getVersion(), which populates the version value
|
||||
// used by status reporting, at the earliest opportunity. But invoking
|
||||
// getVersion() directly here breaks tests that only pretend to create a
|
||||
// CodeQL object. So instead we rely on the assumption that all non-test
|
||||
// callers would set checkVersion to true, and util.codeQlVersionAbove()
|
||||
// would call getVersion(), so the CLI version would be cached as soon as the
|
||||
// CodeQL object is created.
|
||||
if (checkVersion &&
|
||||
!(await util.codeQlVersionAbove(codeql, CODEQL_MINIMUM_VERSION))) {
|
||||
throw new Error(`Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${await codeql.getVersion()}`);
|
||||
}
|
||||
return codeql;
|
||||
}
|
||||
function packWithVersionToString(pack) {
|
||||
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
|
||||
}
|
||||
/**
|
||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||
*/
|
||||
|
||||
File diff suppressed because one or more lines are too long
76
lib/codeql.test.js
generated
76
lib/codeql.test.js
generated
@@ -22,6 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.stubToolRunnerConstructor = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
@@ -30,6 +31,8 @@ const nock_1 = __importDefault(require("nock"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -55,7 +58,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
@@ -68,12 +71,12 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||
});
|
||||
});
|
||||
@@ -83,9 +86,9 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
@@ -96,7 +99,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
@@ -106,7 +109,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://github.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
@@ -117,7 +120,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.com")
|
||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
@@ -127,7 +130,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://github.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
@@ -155,7 +158,7 @@ ava_1.default.beforeEach(() => {
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
@@ -223,16 +226,64 @@ ava_1.default.beforeEach(() => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
const stubConfig = {
|
||||
languages: [languages_1.Language.cpp],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: "",
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: "",
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag enabled, but old CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.9.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.LuaTracerConfigEnabled]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag disabled, with old CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.9.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be absent, but it is present");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag enabled, compatible CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.10.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.LuaTracerConfigEnabled]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--internal-use-lua-tracing"), "--internal-use-lua-tracing should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() Lua feature flag disabled, compatible CLI", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.10.0");
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--no-internal-use-lua-tracing"), "--no-internal-use-lua-tracing should be present, but it is absent");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
|
||||
runnerObjectStub.exec.resolves(0);
|
||||
@@ -240,4 +291,5 @@ function stubToolRunnerConstructor() {
|
||||
runnerConstructorStub.returns(runnerObjectStub);
|
||||
return runnerConstructorStub;
|
||||
}
|
||||
exports.stubToolRunnerConstructor = stubToolRunnerConstructor;
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
File diff suppressed because one or more lines are too long
156
lib/config-utils.js
generated
156
lib/config-utils.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
|
||||
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePacksSpecification = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
@@ -118,9 +118,11 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
||||
/**
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
* May inject ML queries, and the return value will declare if this was done.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
var _a;
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
@@ -128,18 +130,29 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||
if (languages.includes("javascript") &&
|
||||
if (
|
||||
// Only run ML-powered queries on Windows if we have a CLI that supports it.
|
||||
(process.platform !== "win32" ||
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
||||
languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK.packName)) &&
|
||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
|
||||
packs.javascript.push(await (0, util_1.getMlPoweredJsQueriesPack)(codeQL));
|
||||
injectedMlQueries = true;
|
||||
}
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
return injectedMlQueries;
|
||||
}
|
||||
function isMlPoweredJsQueriesPack(pack) {
|
||||
return (pack === util_1.ML_POWERED_JS_QUERIES_PACK_NAME ||
|
||||
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@`) ||
|
||||
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}:`));
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||
@@ -196,6 +209,11 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*
|
||||
* This may inject ML queries into the packs to use, and the return value will
|
||||
* declare if this was done.
|
||||
*
|
||||
* @returns whether or not we injected ML queries into the packs
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
@@ -205,15 +223,15 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
return;
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
return false;
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
// The intention is to only allow ** to appear when immediately
|
||||
@@ -288,9 +306,8 @@ function getPathsInvalid(configFile) {
|
||||
}
|
||||
exports.getPathsInvalid = getPathsInvalid;
|
||||
function getPacksRequireLanguage(lang, configFile) {
|
||||
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not one of the languages to analyze`);
|
||||
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not a valid language.`);
|
||||
}
|
||||
exports.getPacksRequireLanguage = getPacksRequireLanguage;
|
||||
function getPacksInvalidSplit(configFile) {
|
||||
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must split packages by language");
|
||||
}
|
||||
@@ -422,12 +439,15 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
||||
return parsedLanguages;
|
||||
}
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
injectedMlQueries = injectedMlQueries || didInject;
|
||||
}
|
||||
return injectedMlQueries;
|
||||
}
|
||||
// Returns true if either no queries were provided in the workflow.
|
||||
// or if the queries in the workflow were provided in "additive" mode,
|
||||
@@ -435,14 +455,14 @@ async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, r
|
||||
// should instead be added in addition
|
||||
function shouldAddConfigFileQueries(queriesInput) {
|
||||
if (queriesInput) {
|
||||
return queriesInput.trimStart().substr(0, 1) === "+";
|
||||
return queriesInput.trimStart().slice(0, 1) === "+";
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||
const queries = {};
|
||||
@@ -454,8 +474,9 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
@@ -465,20 +486,20 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
packs,
|
||||
originalUserInput: {},
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
@@ -519,13 +540,14 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
}
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile, logger);
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
let injectedMlQueries = false;
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
@@ -571,13 +593,13 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
packs,
|
||||
originalUserInput: parsedYAML,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
injectedMlQueries,
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -591,7 +613,7 @@ const PACK_IDENTIFIER_PATTERN = (function () {
|
||||
return new RegExp(`^${component}/${component}$`);
|
||||
})();
|
||||
// Exported for testing
|
||||
function parsePacksFromConfig(packsByLanguage, languages, configFile) {
|
||||
function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
|
||||
const packs = {};
|
||||
if (Array.isArray(packsByLanguage)) {
|
||||
if (languages.length === 1) {
|
||||
@@ -611,11 +633,19 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile) {
|
||||
throw new Error(getPacksInvalid(configFile));
|
||||
}
|
||||
if (!languages.includes(lang)) {
|
||||
throw new Error(getPacksRequireLanguage(lang, configFile));
|
||||
// This particular language is not being analyzed in this run.
|
||||
if (languages_1.Language[lang]) {
|
||||
logger.info(`Ignoring packs for ${lang} since this language is not being analyzed in this run.`);
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// This language is invalid, probably a misspelling
|
||||
throw new Error(getPacksRequireLanguage(configFile, lang));
|
||||
}
|
||||
}
|
||||
packs[lang] = [];
|
||||
for (const packStr of packsArr) {
|
||||
packs[lang].push(toPackWithVersion(packStr, configFile));
|
||||
packs[lang].push(validatePacksSpecification(packStr, configFile));
|
||||
}
|
||||
}
|
||||
return packs;
|
||||
@@ -640,36 +670,84 @@ function parsePacksFromInput(packsInput, languages) {
|
||||
}
|
||||
return {
|
||||
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
|
||||
packs.push(toPackWithVersion(pack, ""));
|
||||
packs.push(validatePacksSpecification(pack, ""));
|
||||
return packs;
|
||||
}, []),
|
||||
};
|
||||
}
|
||||
function toPackWithVersion(packStr, configFile) {
|
||||
/**
|
||||
* Validates that this package specification is syntactically correct.
|
||||
* It may not point to any real package, but after this function returns
|
||||
* without throwing, we are guaranteed that the package specification
|
||||
* is roughly correct.
|
||||
*
|
||||
* The CLI itself will do a more thorough validation of the package
|
||||
* specification.
|
||||
*
|
||||
* A package specification looks like this:
|
||||
*
|
||||
* `scope/name@version:path`
|
||||
*
|
||||
* Version and path are optional.
|
||||
*
|
||||
* @param packStr the package specification to verify.
|
||||
* @param configFile Config file to use for error reporting
|
||||
*/
|
||||
function validatePacksSpecification(packStr, configFile) {
|
||||
if (typeof packStr !== "string") {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
const nameWithVersion = packStr.trim().split("@");
|
||||
let version;
|
||||
if (nameWithVersion.length > 2 ||
|
||||
!PACK_IDENTIFIER_PATTERN.test(nameWithVersion[0])) {
|
||||
packStr = packStr.trim();
|
||||
const atIndex = packStr.indexOf("@");
|
||||
const colonIndex = packStr.indexOf(":", atIndex);
|
||||
const packStart = 0;
|
||||
const versionStart = atIndex + 1 || undefined;
|
||||
const pathStart = colonIndex + 1 || undefined;
|
||||
const packEnd = Math.min(atIndex > 0 ? atIndex : Infinity, colonIndex > 0 ? colonIndex : Infinity, packStr.length);
|
||||
const versionEnd = versionStart
|
||||
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
|
||||
: undefined;
|
||||
const pathEnd = pathStart ? packStr.length : undefined;
|
||||
const packName = packStr.slice(packStart, packEnd).trim();
|
||||
const version = versionStart
|
||||
? packStr.slice(versionStart, versionEnd).trim()
|
||||
: undefined;
|
||||
const packPath = pathStart
|
||||
? packStr.slice(pathStart, pathEnd).trim()
|
||||
: undefined;
|
||||
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
else if (nameWithVersion.length === 2) {
|
||||
version = semver.clean(nameWithVersion[1]) || undefined;
|
||||
if (!version) {
|
||||
if (version) {
|
||||
try {
|
||||
new semver.Range(version);
|
||||
}
|
||||
catch (e) {
|
||||
// The range string is invalid. OK to ignore the caught error
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
}
|
||||
return {
|
||||
packName: nameWithVersion[0].trim(),
|
||||
version,
|
||||
};
|
||||
if (packPath &&
|
||||
(path.isAbsolute(packPath) ||
|
||||
// Permit using "/" instead of "\" on Windows
|
||||
// Use `x.split(y).join(z)` as a polyfill for `x.replaceAll(y, z)` since
|
||||
// if we used a regex we'd need to escape the path separator on Windows
|
||||
// which seems more awkward.
|
||||
path.normalize(packPath).split(path.sep).join("/") !==
|
||||
packPath.split(path.sep).join("/"))) {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
if (!packPath && pathStart) {
|
||||
// 0 length path
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
return (packName + (version ? `@${version}` : "") + (packPath ? `:${packPath}` : ""));
|
||||
}
|
||||
exports.validatePacksSpecification = validatePacksSpecification;
|
||||
// exported for testing
|
||||
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
|
||||
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile, logger) {
|
||||
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
|
||||
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile);
|
||||
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile, logger);
|
||||
if (!packsFromInput) {
|
||||
return packsFomConfig;
|
||||
}
|
||||
@@ -703,16 +781,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a, _b, _c;
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (!configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
|
||||
File diff suppressed because one or more lines are too long
251
lib/config-utils.test.js
generated
251
lib/config-utils.test.js
generated
@@ -26,7 +26,6 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const semver_1 = require("semver");
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
@@ -89,8 +88,8 @@ function mockListLanguages(languages) {
|
||||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
@@ -112,7 +111,7 @@ function mockListLanguages(languages) {
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
@@ -126,7 +125,7 @@ function mockListLanguages(languages) {
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -139,7 +138,7 @@ function mockListLanguages(languages) {
|
||||
// no filename given, just a repo
|
||||
const configFile = "octo-org/codeql-config@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -153,7 +152,7 @@ function mockListLanguages(languages) {
|
||||
const configFile = "input";
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -213,7 +212,6 @@ function mockListLanguages(languages) {
|
||||
paths: ["c/d"],
|
||||
},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
@@ -221,10 +219,11 @@ function mockListLanguages(languages) {
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
@@ -260,7 +259,7 @@ function mockListLanguages(languages) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||
@@ -303,18 +302,18 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
// and once for `./foo` from the config file.
|
||||
t.deepEqual(resolveQueriesArgs.length, 2);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}foo`));
|
||||
// Now check that the end result contains the default queries and the query from config
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/foo$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}foo`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries from config file can be overridden in workflow file", async (t) => {
|
||||
@@ -336,18 +335,18 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
// but won't be called for './foo' from the config file.
|
||||
t.deepEqual(resolveQueriesArgs.length, 2);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}override`));
|
||||
// Now check that the end result contains only the default queries and the override query
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}override`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
|
||||
@@ -368,17 +367,17 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for `./workflow-query`,
|
||||
// but won't be called for the default one since that was disabled
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
|
||||
t.true(resolveQueriesArgs[0].queries[0].endsWith(`${path.sep}workflow-query`));
|
||||
// Now check that the end result contains only the workflow query, and not the default one
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 0);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 1);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/workflow-query$/);
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}workflow-query`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
||||
@@ -394,21 +393,21 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
// and then once for each of the two queries from the workflow
|
||||
t.deepEqual(resolveQueriesArgs.length, 3);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
|
||||
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}override1`));
|
||||
t.true(resolveQueriesArgs[2].queries[0].endsWith(`${path.sep}override2`));
|
||||
// Now check that the end result contains both the queries from the workflow, as well as the defaults
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 2);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/override1$/);
|
||||
t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/override2$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}override1`));
|
||||
t.true(config.queries["javascript"].custom[1].queries[0].endsWith(`${path.sep}override2`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
|
||||
@@ -433,25 +432,25 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries,
|
||||
// once for each of additional1 and additional2,
|
||||
// and once for './foo' from the config file
|
||||
t.deepEqual(resolveQueriesArgs.length, 4);
|
||||
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[1].queries[0], /.*\/additional1$/);
|
||||
t.true(resolveQueriesArgs[1].queries[0].endsWith(`${path.sep}additional1`));
|
||||
t.deepEqual(resolveQueriesArgs[2].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[2].queries[0], /.*\/additional2$/);
|
||||
t.true(resolveQueriesArgs[2].queries[0].endsWith(`${path.sep}additional2`));
|
||||
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
|
||||
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
|
||||
t.true(resolveQueriesArgs[3].queries[0].endsWith(`${path.sep}foo`));
|
||||
// Now check that the end result contains all the queries
|
||||
t.deepEqual(config.queries["javascript"].builtin.length, 1);
|
||||
t.deepEqual(config.queries["javascript"].custom.length, 3);
|
||||
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
|
||||
t.regex(config.queries["javascript"].custom[0].queries[0], /.*\/additional1$/);
|
||||
t.regex(config.queries["javascript"].custom[1].queries[0], /.*\/additional2$/);
|
||||
t.regex(config.queries["javascript"].custom[2].queries[0], /.*\/foo$/);
|
||||
t.true(config.queries["javascript"].builtin[0].endsWith("javascript-code-scanning.qls"));
|
||||
t.true(config.queries["javascript"].custom[0].queries[0].endsWith(`${path.sep}additional1`));
|
||||
t.true(config.queries["javascript"].custom[1].queries[0].endsWith(`${path.sep}additional2`));
|
||||
t.true(config.queries["javascript"].custom[2].queries[0].endsWith(`${path.sep}foo`));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Invalid queries in workflow file handled correctly", async (t) => {
|
||||
@@ -472,7 +471,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.fail("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -515,7 +514,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||
const languages = "javascript";
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
@@ -525,7 +524,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -541,7 +540,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -558,7 +557,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -570,7 +569,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const languages = "rubbish,english";
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -598,14 +597,9 @@ function queriesToResolvedQueryForm(queries) {
|
||||
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
const languages = "javascript";
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "a/b",
|
||||
version: (0, semver_1.clean)("1.2.3"),
|
||||
},
|
||||
],
|
||||
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -637,20 +631,10 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript,python,cpp";
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "a/b",
|
||||
version: (0, semver_1.clean)("1.2.3"),
|
||||
},
|
||||
],
|
||||
[languages_1.Language.python]: [
|
||||
{
|
||||
packName: "c/d",
|
||||
version: (0, semver_1.clean)("1.2.3"),
|
||||
},
|
||||
],
|
||||
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||
[languages_1.Language.python]: ["c/d@1.2.3"],
|
||||
});
|
||||
t.deepEqual(queries, {
|
||||
cpp: {
|
||||
@@ -690,7 +674,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -764,14 +748,14 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
|
||||
* Test macro for ensuring the packs block is valid
|
||||
*/
|
||||
const parsePacksMacro = ava_1.default.macro({
|
||||
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
|
||||
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b", mockLogger), expected),
|
||||
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
|
||||
});
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
const parsePacksErrorMacro = ava_1.default.macro({
|
||||
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
|
||||
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b", {}), {
|
||||
message: expected,
|
||||
}),
|
||||
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
|
||||
@@ -785,87 +769,106 @@ const invalidPackNameMacro = ava_1.default.macro({
|
||||
});
|
||||
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
|
||||
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("two packs with language", parsePacksMacro, {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("two packs with unused language in config", parsePacksMacro, {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||
}, [languages_1.Language.cpp, languages_1.Language.csharp], {
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("packs with other valid names", parsePacksMacro, [
|
||||
// ranges are ok
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
"c/d@~1.0.0+abc:a/b",
|
||||
"c/d@~1.0.0-abc:a/b",
|
||||
"c/d:a/b",
|
||||
// whitespace is removed
|
||||
" c/d @ ~1.0.0 : b.qls ",
|
||||
// and it is retained within a path
|
||||
" c/d @ ~1.0.0 : b/a path with/spaces.qls ",
|
||||
// this is valid. the path is '@'. It will probably fail when passed to the CLI
|
||||
"c/d@1.2.3:@",
|
||||
// this is valid, too. It will fail if it doesn't match a path
|
||||
// (globbing is not done)
|
||||
"c/d@1.2.3:+*)_(",
|
||||
], [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
||||
],
|
||||
[languages_1.Language.java]: [
|
||||
{ packName: "d/e", version: undefined },
|
||||
{ packName: "f/g", version: (0, semver_1.clean)("1.2.3") },
|
||||
"c/d@1.0",
|
||||
"c/d@~1.0.0",
|
||||
"c/d@~1.0.0:a/b",
|
||||
"c/d@~1.0.0+abc:a/b",
|
||||
"c/d@~1.0.0-abc:a/b",
|
||||
"c/d:a/b",
|
||||
"c/d@~1.0.0:b.qls",
|
||||
"c/d@~1.0.0:b/a path with/spaces.qls",
|
||||
"c/d@1.2.3:@",
|
||||
"c/d@1.2.3:+*)_(",
|
||||
],
|
||||
});
|
||||
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
||||
(0, ava_1.default)("invalid language", parsePacksErrorMacro, { [languages_1.Language.java]: ["c/d"] }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" has "java", but it is not one of the languages to analyze/);
|
||||
(0, ava_1.default)("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/);
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c-/d");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "-c/d");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d_d");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@x");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@@");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@1.0.0:");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d:");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d:/a");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "@1.0.0:a");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@../a");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@b/../a");
|
||||
(0, ava_1.default)(invalidPackNameMacro, "c/d:z@1");
|
||||
/**
|
||||
* Test macro for testing the packs block and the packs input
|
||||
*/
|
||||
function parseInputAndConfigMacro(t, packsFromConfig, packsFromInput, languages, expected) {
|
||||
t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b"), expected);
|
||||
t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b", mockLogger), expected);
|
||||
}
|
||||
parseInputAndConfigMacro.title = (providedTitle) => `Parse Packs input and config: ${providedTitle}`;
|
||||
const mockLogger = {
|
||||
info: (message) => {
|
||||
console.log(message);
|
||||
},
|
||||
};
|
||||
function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, expected) {
|
||||
t.throws(() => {
|
||||
configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b");
|
||||
configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b", mockLogger);
|
||||
}, {
|
||||
message: expected,
|
||||
});
|
||||
}
|
||||
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
|
||||
(0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
|
||||
[languages_1.Language.cpp]: ["c/d"],
|
||||
});
|
||||
(0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: "1.2.3" },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: "1.2.3" },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: undefined },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["a/b", "c/d"],
|
||||
});
|
||||
(0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "e/f", version: undefined },
|
||||
{ packName: "g/h", version: "1.2.3" },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3"],
|
||||
});
|
||||
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
{ packName: "e/f", version: undefined },
|
||||
{ packName: "g/h", version: "1.2.3" },
|
||||
{ packName: "a/b", version: undefined },
|
||||
{ packName: "c/d", version: undefined },
|
||||
],
|
||||
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3", "a/b", "c/d"],
|
||||
});
|
||||
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
||||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||
@@ -888,16 +891,13 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
};
|
||||
},
|
||||
});
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectedVersionString !== undefined) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: expectedVersionString,
|
||||
},
|
||||
`codeql/javascript-experimental-atm-queries@${expectedVersionString}`,
|
||||
],
|
||||
});
|
||||
}
|
||||
@@ -910,11 +910,34 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
? `${expectedVersionString} are`
|
||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
|
||||
});
|
||||
// macro, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, versionString
|
||||
// macro, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString
|
||||
// Test that ML-powered queries aren't run on v2.7.4 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||
// Test that ML-powered queries aren't run when the feature flag is off.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||
// `security-extended` or `security-and-quality` query suite.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.0.2");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.0.2");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
||||
// versions of the CodeQL CLI prior to 2.9.0.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality` on
|
||||
// versions of the CodeQL CLI prior to 2.9.0.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL CLI
|
||||
// 2.9.0+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-extended", "~0.2.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.9.0+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-and-quality", "~0.2.0");
|
||||
// Test that we don't inject an ML-powered query pack if the user has already specified one.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL
|
||||
// CLI 2.9.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-extended", "~0.3.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.9.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-and-quality", "~0.3.0");
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
2
lib/database-upload.test.js
generated
2
lib/database-upload.test.js
generated
@@ -50,7 +50,6 @@ function getTestConfig(tmpDir) {
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "foo",
|
||||
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
|
||||
dbLocation: tmpDir,
|
||||
@@ -58,6 +57,7 @@ function getTestConfig(tmpDir) {
|
||||
debugMode: false,
|
||||
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
injectedMlQueries: false,
|
||||
};
|
||||
}
|
||||
async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20220214"
|
||||
"bundleVersion": "codeql-bundle-20220714"
|
||||
}
|
||||
|
||||
10
lib/feature-flags.js
generated
10
lib/feature-flags.js
generated
@@ -25,6 +25,7 @@ const util = __importStar(require("./util"));
|
||||
var FeatureFlag;
|
||||
(function (FeatureFlag) {
|
||||
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
FeatureFlag["LuaTracerConfigEnabled"] = "lua_tracer_config_enabled";
|
||||
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
|
||||
class GitHubFeatureFlags {
|
||||
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
|
||||
@@ -34,12 +35,17 @@ class GitHubFeatureFlags {
|
||||
this.logger = logger;
|
||||
}
|
||||
async getValue(flag) {
|
||||
const response = (await this.getApiResponse())[flag];
|
||||
const response = await this.getApiResponse();
|
||||
if (response === undefined) {
|
||||
this.logger.debug(`No feature flags API response for ${flag}, considering it disabled.`);
|
||||
return false;
|
||||
}
|
||||
const flagValue = response[flag];
|
||||
if (flagValue === undefined) {
|
||||
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
|
||||
return false;
|
||||
}
|
||||
return response;
|
||||
return flagValue;
|
||||
}
|
||||
async getApiResponse() {
|
||||
const loadApiResponse = async () => {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAEX;AAFD,WAAY,WAAW;IACrB,qEAAsD,CAAA;AACxD,CAAC,EAFW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAEtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAhED,gDAgEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAGX;AAHD,WAAY,WAAW;IACrB,qEAAsD,CAAA;IACtD,mEAAoD,CAAA;AACtD,CAAC,EAHW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAGtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,IAAI,4BAA4B,CACtE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAvED,gDAuEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
22
lib/feature-flags.test.js
generated
22
lib/feature-flags.test.js
generated
@@ -40,6 +40,22 @@ for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("API response missing", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
for (const featureFlag of ["ml_powered_queries_enabled"]) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
`No feature flags API response for ${featureFlag}, considering it disabled.`) !== undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -66,7 +82,10 @@ for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
||||
});
|
||||
});
|
||||
});
|
||||
const FEATURE_FLAGS = ["ml_powered_queries_enabled"];
|
||||
const FEATURE_FLAGS = [
|
||||
"ml_powered_queries_enabled",
|
||||
"lua_tracer_config_enabled",
|
||||
];
|
||||
for (const featureFlag of FEATURE_FLAGS) {
|
||||
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
@@ -80,6 +99,7 @@ for (const featureFlag of FEATURE_FLAGS) {
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
|
||||
const actualFeatureFlags = {
|
||||
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
|
||||
lua_tracer_config_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.LuaTracerConfigEnabled),
|
||||
};
|
||||
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
|
||||
});
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,uBAAuB,CAAC,EACtE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG,CAAC,4BAA4B,CAAC,CAAC;AAErD,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,qCAAqC,WAAW,4BAA4B,CACjF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,uBAAuB,CAAC,EACtE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,4BAA4B;IAC5B,2BAA2B;CAC5B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,yBAAyB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACpD,2BAAW,CAAC,sBAAsB,CACnC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||
3
lib/fingerprints.js
generated
3
lib/fingerprints.js
generated
@@ -24,6 +24,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.addFingerprints = exports.resolveUriToFile = exports.hash = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const long_1 = __importDefault(require("long"));
|
||||
const tab = "\t".charCodeAt(0);
|
||||
const space = " ".charCodeAt(0);
|
||||
@@ -209,7 +210,7 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
|
||||
// Just assume a relative path is relative to the src root.
|
||||
// This is not necessarily true but should be a good approximation
|
||||
// and here we likely want to err on the side of handling more cases.
|
||||
if (!uri.startsWith("/")) {
|
||||
if (!path_1.default.isAbsolute(uri)) {
|
||||
uri = srcRootPrefix + uri;
|
||||
}
|
||||
// Check the file exists
|
||||
|
||||
File diff suppressed because one or more lines are too long
14
lib/fingerprints.test.js
generated
14
lib/fingerprints.test.js
generated
@@ -131,20 +131,22 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), (0, logging_1.getRunnerLogger)(true));
|
||||
}
|
||||
(0, ava_1.default)("resolveUriToFile", (t) => {
|
||||
var _a, _b;
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(`${cwd}/`));
|
||||
const relativeFilepath = filepath.substring(cwd.length + 1);
|
||||
const filepath = __filename.split(path.sep).join("/");
|
||||
const relativeFilepath = path
|
||||
.relative(process.cwd(), __filename)
|
||||
.split(path.sep)
|
||||
.join("/");
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile(`file://${relativeFilepath}`, undefined, []), filepath);
|
||||
t.is((_a = testResolveUriToFile(relativeFilepath, undefined, [])) === null || _a === void 0 ? void 0 : _a.split(path.sep).join("/"), filepath);
|
||||
t.is((_b = testResolveUriToFile(`file://${relativeFilepath}`, undefined, [])) === null || _b === void 0 ? void 0 : _b.split(path.sep).join("/"), filepath);
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
|
||||
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined);
|
||||
|
||||
File diff suppressed because one or more lines are too long
17
lib/init-action.js
generated
17
lib/init-action.js
generated
@@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
@@ -48,7 +49,7 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
||||
}
|
||||
if (queriesInput !== undefined) {
|
||||
queriesInput = queriesInput.startsWith("+")
|
||||
? queriesInput.substr(1)
|
||||
? queriesInput.slice(1)
|
||||
: queriesInput;
|
||||
queries.push(...queriesInput.split(","));
|
||||
}
|
||||
@@ -70,6 +71,7 @@ async function run() {
|
||||
const startedAt = new Date();
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||
await (0, util_1.checkActionVersion)(pkg.version);
|
||||
let config;
|
||||
let codeql;
|
||||
let toolsVersion;
|
||||
@@ -78,7 +80,7 @@ async function run() {
|
||||
externalRepoAuth: (0, actions_util_1.getOptionalInput)("external-repository-token"),
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
@@ -87,11 +89,16 @@ async function run() {
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), (0, actions_util_1.getToolCacheDirectory)(), gitHubVersion.type, logger);
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, logger);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"),
|
||||
// Debug mode is enabled if:
|
||||
// - The `init` Action is passed `debug: true`.
|
||||
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
||||
// or by setting the `ACTIONS_STEP_DEBUG` secret to `true`).
|
||||
(0, actions_util_1.getOptionalInput)("debug") === "true" || core.isDebug(), (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
|
||||
if (config.languages.includes(languages_1.Language.python) &&
|
||||
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
|
||||
try {
|
||||
@@ -126,7 +133,7 @@ async function run() {
|
||||
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
|
||||
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
|
||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined);
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined, featureFlags);
|
||||
if (tracerConfig !== undefined) {
|
||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||
core.exportVariable(key, value);
|
||||
|
||||
File diff suppressed because one or more lines are too long
17
lib/init.js
generated
17
lib/init.js
generated
@@ -30,29 +30,29 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger) {
|
||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, variant, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, true);
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, variant, logger, true);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return { codeql, toolsVersion };
|
||||
}
|
||||
exports.initCodeQL = initCodeQL;
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
logger.startGroup("Load language configuration");
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
async function runInit(codeql, config, sourceRoot, processName, processLevel) {
|
||||
async function runInit(codeql, config, sourceRoot, processName, processLevel, featureFlags) {
|
||||
var _a, _b;
|
||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||
try {
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Init a database cluster
|
||||
await codeql.databaseInitCluster(config.dbLocation, config.languages, sourceRoot, processName, processLevel);
|
||||
await codeql.databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags);
|
||||
}
|
||||
else {
|
||||
for (const language of config.languages) {
|
||||
@@ -179,12 +179,15 @@ async function installPythonDeps(codeql, logger) {
|
||||
if (process.platform === "win32") {
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("py"), [
|
||||
"-3",
|
||||
"-B",
|
||||
path.join(scriptsFolder, script),
|
||||
path.dirname(codeql.getPath()),
|
||||
]).exec();
|
||||
}
|
||||
else {
|
||||
await new toolrunner.ToolRunner(path.join(scriptsFolder, script), [
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("python3"), [
|
||||
"-B",
|
||||
path.join(scriptsFolder, script),
|
||||
path.dirname(codeql.getPath()),
|
||||
]).exec();
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CACtB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM,IACL,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC,CAAA,EAC7D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrC;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlDD,0BAkDC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AAnBD,gCAmBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAxCD,gCAwCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC,EAChC,YAA0B;;IAE1B,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,YAAY,CACb,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,kDAAkD;QAClD,yCAAyC;QACzC,IACE,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC,CAAA;YACnD,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,uCAAuC,CAAC,EAC3D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CACtB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;SACH;aAAM,IACL,CAAC,YAAY,KAAK;aAClB,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC,CAAA,EAC7D;YACA,MAAM,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACrC;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnDD,0BAmDC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
3
lib/languages.js
generated
3
lib/languages.js
generated
@@ -11,6 +11,7 @@ var Language;
|
||||
Language["javascript"] = "javascript";
|
||||
Language["python"] = "python";
|
||||
Language["ruby"] = "ruby";
|
||||
Language["swift"] = "swift";
|
||||
})(Language = exports.Language || (exports.Language = {}));
|
||||
// Additional names for languages
|
||||
const LANGUAGE_ALIASES = {
|
||||
@@ -35,7 +36,7 @@ function parseLanguage(language) {
|
||||
}
|
||||
exports.parseLanguage = parseLanguage;
|
||||
function isTracedLanguage(language) {
|
||||
return (["cpp", "java", "csharp"].includes(language) ||
|
||||
return (["cpp", "java", "csharp", "swift"].includes(language) ||
|
||||
(process.env["CODEQL_EXTRACTOR_GO_BUILD_TRACING"] === "on" &&
|
||||
language === Language.go));
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QAQX;AARD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;AACf,CAAC,EARW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAQnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QACrD,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||
17
lib/runner.js
generated
17
lib/runner.js
generated
@@ -23,7 +23,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const commander_1 = require("commander");
|
||||
const del_1 = __importDefault(require("del"));
|
||||
@@ -51,13 +50,6 @@ function getTempDir(userInput) {
|
||||
}
|
||||
return tempDir;
|
||||
}
|
||||
function getToolsDir(userInput) {
|
||||
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
|
||||
if (!fs.existsSync(toolsDir)) {
|
||||
fs.mkdirSync(toolsDir, { recursive: true });
|
||||
}
|
||||
return toolsDir;
|
||||
}
|
||||
const codeqlEnvJsonFilename = "codeql-env.json";
|
||||
function loadTracerEnvironment(config) {
|
||||
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
||||
@@ -134,7 +126,6 @@ program
|
||||
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
|
||||
try {
|
||||
const tempDir = getTempDir(cmd.tempDir);
|
||||
const toolsDir = getToolsDir(cmd.toolsDir);
|
||||
const checkoutPath = cmd.checkoutPath || process.cwd();
|
||||
// Wipe the temp dir
|
||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
||||
@@ -160,13 +151,13 @@ program
|
||||
codeql = await (0, codeql_1.getCodeQL)(cmd.codeqlPath);
|
||||
}
|
||||
else {
|
||||
codeql = (await (0, init_1.initCodeQL)(undefined, apiDetails, tempDir, toolsDir, gitHubVersion.type, logger)).codeql;
|
||||
codeql = (await (0, init_1.initCodeQL)(undefined, apiDetails, tempDir, gitHubVersion.type, logger)).codeql;
|
||||
}
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
|
||||
const workspacePath = checkoutPath;
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const sourceRoot = checkoutPath;
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel(), (0, feature_flags_1.createFeatureFlags)([]));
|
||||
if (tracerConfig === undefined) {
|
||||
return;
|
||||
}
|
||||
@@ -297,7 +288,7 @@ program
|
||||
}
|
||||
const threads = (0, util_1.getThreadsFlag)(cmd.threads || initEnv["CODEQL_THREADS"], logger);
|
||||
const memory = (0, util_1.getMemoryFlag)(cmd.ram || initEnv["CODEQL_RAM"]);
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger);
|
||||
if (!cmd.upload) {
|
||||
logger.info("Not uploading results");
|
||||
|
||||
File diff suppressed because one or more lines are too long
8
lib/testing-utils.js
generated
8
lib/testing-utils.js
generated
@@ -64,6 +64,13 @@ function setupTests(test) {
|
||||
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
||||
t.context.stderrWrite = processStderrWrite;
|
||||
process.stderr.write = wrapOutput(t.context);
|
||||
// Workaround an issue in tests where the case insensitivity of the `$PATH`
|
||||
// environment variable on Windows isn't preserved, i.e. `process.env.PATH`
|
||||
// is not the same as `process.env.Path`.
|
||||
const pathKeys = Object.keys(process.env).filter((k) => k.toLowerCase() === "path");
|
||||
if (pathKeys.length > 0) {
|
||||
process.env.PATH = process.env[pathKeys[0]];
|
||||
}
|
||||
// Many tests modify environment variables. Take a copy now so that
|
||||
// we reset them after the test to keep tests independent of each other.
|
||||
// process.env only has strings fields, so a shallow copy is fine.
|
||||
@@ -90,6 +97,7 @@ exports.setupTests = setupTests;
|
||||
function setupActionsVars(tempDir, toolsDir) {
|
||||
process.env["RUNNER_TEMP"] = tempDir;
|
||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tempDir;
|
||||
}
|
||||
exports.setupActionsVars = setupActionsVars;
|
||||
function getRecordingLogger(messages) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,2EAA2E;QAC3E,2EAA2E;QAC3E,yCAAyC;QACzC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAC9C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAClC,CAAC;QACF,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QAED,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAjDD,gCAiDC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user