mirror of
https://github.com/github/codeql-action.git
synced 2025-12-10 17:54:36 +08:00
Compare commits
517 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4854dd23d5 | ||
|
|
c5468fcaea | ||
|
|
83d5f38dee | ||
|
|
444316b6c6 | ||
|
|
7128833392 | ||
|
|
455ee1f217 | ||
|
|
54fc9c575e | ||
|
|
3525a164c3 | ||
|
|
aeeb3c3e88 | ||
|
|
88d30845f1 | ||
|
|
89d78ba457 | ||
|
|
e72d9f4a72 | ||
|
|
4455bf8cd9 | ||
|
|
12300ac88e | ||
|
|
11db623ba8 | ||
|
|
3f515d3140 | ||
|
|
a57642e1a0 | ||
|
|
62ef9f5eb2 | ||
|
|
e22a6cd3c9 | ||
|
|
f9b0b9cb7b | ||
|
|
5fc16ebcf6 | ||
|
|
7e2215bc92 | ||
|
|
3a016ebea8 | ||
|
|
2ba7208ff2 | ||
|
|
72399ae69a | ||
|
|
c0a58782b6 | ||
|
|
d1ff4d6297 | ||
|
|
80a6b3a41e | ||
|
|
1c27c52804 | ||
|
|
e833d6e84c | ||
|
|
8a0c541e24 | ||
|
|
e2d592dc8f | ||
|
|
2e71e02553 | ||
|
|
b29bf7b05a | ||
|
|
1785bbb7d8 | ||
|
|
a44b61d961 | ||
|
|
a062fc9bf5 | ||
|
|
50de2e4d1e | ||
|
|
132e08a05f | ||
|
|
720884501a | ||
|
|
fd45eac830 | ||
|
|
e70ec1d70e | ||
|
|
70d2efc353 | ||
|
|
fb77829455 | ||
|
|
4ba53e33d7 | ||
|
|
f0a2954c12 | ||
|
|
bf91ac91d4 | ||
|
|
e3151ae8df | ||
|
|
a2f0227de2 | ||
|
|
2639547a69 | ||
|
|
e86cff2acb | ||
|
|
2eb8300e80 | ||
|
|
78cda05e38 | ||
|
|
b0e70410b4 | ||
|
|
3254fa5859 | ||
|
|
01d17eaf42 | ||
|
|
0c4306b672 | ||
|
|
acd9964b7a | ||
|
|
bc33041cc2 | ||
|
|
c7203c94d9 | ||
|
|
7f1659f0d8 | ||
|
|
962925a448 | ||
|
|
4e477f1b95 | ||
|
|
a068a286e6 | ||
|
|
b19f1f91f0 | ||
|
|
f1c75fc816 | ||
|
|
1e0763ca30 | ||
|
|
5fd8ca8122 | ||
|
|
873a76a1a7 | ||
|
|
4a0d3378b1 | ||
|
|
46043e8a9e | ||
|
|
75aad219f8 | ||
|
|
fe83f965e4 | ||
|
|
3ebf97724d | ||
|
|
0d884244be | ||
|
|
1263b9c651 | ||
|
|
93c9da2c2e | ||
|
|
060eb52d32 | ||
|
|
8c8a933d75 | ||
|
|
743d8dfb6e | ||
|
|
75d42bea8d | ||
|
|
46ddfc6186 | ||
|
|
07fa17da87 | ||
|
|
61fb5d7202 | ||
|
|
89e4b4fff3 | ||
|
|
9ad3f820af | ||
|
|
5ba154a3b4 | ||
|
|
bb0eba15f5 | ||
|
|
db7158f9ba | ||
|
|
94013c25cf | ||
|
|
20de0f01a3 | ||
|
|
21753283b1 | ||
|
|
44c8bd3e63 | ||
|
|
952b2690da | ||
|
|
d6a5bf5c1c | ||
|
|
068ade0b31 | ||
|
|
183487b717 | ||
|
|
38bb211981 | ||
|
|
a0ab4842b5 | ||
|
|
9e304b92ff | ||
|
|
5e1d24657c | ||
|
|
2b6d86c672 | ||
|
|
5bcf9d9972 | ||
|
|
8068352efa | ||
|
|
bef7eecb10 | ||
|
|
1df71f8baa | ||
|
|
8ddd620de3 | ||
|
|
53cfc95284 | ||
|
|
b22fb4674b | ||
|
|
64160b2268 | ||
|
|
ab6c0bce1b | ||
|
|
2a5fc0f6de | ||
|
|
d98d4f5fa8 | ||
|
|
2632b65a56 | ||
|
|
7eb261eac8 | ||
|
|
33927cc1c9 | ||
|
|
56b49fcd72 | ||
|
|
a673c950da | ||
|
|
0f516d90cf | ||
|
|
3057100963 | ||
|
|
e7f3012cc2 | ||
|
|
af7bc11dda | ||
|
|
a00769bdfb | ||
|
|
6e07237fd0 | ||
|
|
1b2479810e | ||
|
|
fa18c6064c | ||
|
|
adc7ff601c | ||
|
|
4f7bdf9d42 | ||
|
|
60484b2ccc | ||
|
|
9a562a33ac | ||
|
|
344d820984 | ||
|
|
2fcaf414fc | ||
|
|
a300034dbd | ||
|
|
ea8896ddf6 | ||
|
|
a73bba9f11 | ||
|
|
6e12f03311 | ||
|
|
33f3438c1d | ||
|
|
758835d67a | ||
|
|
9b3d4fd580 | ||
|
|
1c369971ff | ||
|
|
fb5a72155f | ||
|
|
2ea04f7e8a | ||
|
|
833be9c64e | ||
|
|
54e3e4405e | ||
|
|
1957254b4c | ||
|
|
e050205119 | ||
|
|
3b74ec2288 | ||
|
|
5b2b500646 | ||
|
|
aacf63d354 | ||
|
|
fe639a1033 | ||
|
|
fd833ba2c1 | ||
|
|
7e7162aa1e | ||
|
|
3baa223274 | ||
|
|
8fa56f3f78 | ||
|
|
c508d620dd | ||
|
|
ec2762e6fe | ||
|
|
a40becf6e4 | ||
|
|
5a67cbafe6 | ||
|
|
f3061d6dee | ||
|
|
a61bfd9ae4 | ||
|
|
5fee2f2ed7 | ||
|
|
46a225cb6f | ||
|
|
25fef55830 | ||
|
|
d523ba9baa | ||
|
|
ad37b812d5 | ||
|
|
e97797c21b | ||
|
|
cdcc3e81d5 | ||
|
|
e18dc0f8c5 | ||
|
|
6799d572f7 | ||
|
|
802eff9491 | ||
|
|
532d1b906d | ||
|
|
ecdbfd6594 | ||
|
|
351d080ced | ||
|
|
d0e23476a6 | ||
|
|
f0fd45b66f | ||
|
|
32c4995972 | ||
|
|
d1d4cc3106 | ||
|
|
c6b30dcdcb | ||
|
|
62be3f8b9d | ||
|
|
e1828d5291 | ||
|
|
ea5e62748e | ||
|
|
b1e69f9179 | ||
|
|
5fc3f3af30 | ||
|
|
30a243e57a | ||
|
|
659ec058e3 | ||
|
|
70bbe2df84 | ||
|
|
16fa9d48f8 | ||
|
|
67954db0cf | ||
|
|
1c4c64199f | ||
|
|
0cae9d939e | ||
|
|
5eb7f8c9a4 | ||
|
|
6abc4a8c32 | ||
|
|
3ba511a8f1 | ||
|
|
5f6ba88b4b | ||
|
|
50cb464f4a | ||
|
|
6e37ee3fe2 | ||
|
|
6ca0c46132 | ||
|
|
f649b7bd23 | ||
|
|
6520447123 | ||
|
|
5c80cfcc1e | ||
|
|
5643f45615 | ||
|
|
e45b14cfc3 | ||
|
|
05fc5a885c | ||
|
|
fb65207e91 | ||
|
|
fde64716e1 | ||
|
|
2a0289b1d8 | ||
|
|
04e8743013 | ||
|
|
e66384f36e | ||
|
|
34f86ea4ba | ||
|
|
c18ebba599 | ||
|
|
4df0bb9371 | ||
|
|
9c0cd50e9e | ||
|
|
6fdd5c2f4c | ||
|
|
8536203ad8 | ||
|
|
5a7f86f625 | ||
|
|
dbb1b44b8f | ||
|
|
07578cd9e7 | ||
|
|
cec3af8bb0 | ||
|
|
dae51797d0 | ||
|
|
a81500cbd6 | ||
|
|
ddd2696b4e | ||
|
|
de06f8a300 | ||
|
|
8c3255bc78 | ||
|
|
186161e61c | ||
|
|
cc561bc122 | ||
|
|
df6f81e49c | ||
|
|
b47da63f07 | ||
|
|
70f5789ed2 | ||
|
|
99afdfbfbd | ||
|
|
55e50b88db | ||
|
|
d9849b8ca1 | ||
|
|
60bee34764 | ||
|
|
c335145f4d | ||
|
|
17223bdff7 | ||
|
|
cc1adb825a | ||
|
|
80916e968c | ||
|
|
9ce2456348 | ||
|
|
3ab5d6d4d6 | ||
|
|
35f1961385 | ||
|
|
6b0d45a5c6 | ||
|
|
4867598089 | ||
|
|
2aa2bede67 | ||
|
|
69c3617042 | ||
|
|
181fce25fe | ||
|
|
05a53bbd2d | ||
|
|
d13dea1c37 | ||
|
|
fb19072237 | ||
|
|
c4e99325d0 | ||
|
|
ddb608be41 | ||
|
|
59e2f2d80d | ||
|
|
57a865e201 | ||
|
|
dd1c95359b | ||
|
|
fc12036b55 | ||
|
|
fb8602423d | ||
|
|
fa857eb53f | ||
|
|
ecc548b556 | ||
|
|
2f3ec1f9f1 | ||
|
|
554f1b3765 | ||
|
|
934fb86c58 | ||
|
|
e145aa414e | ||
|
|
36f860f369 | ||
|
|
63603427ef | ||
|
|
bf54da2db0 | ||
|
|
592af860c5 | ||
|
|
60211eb74c | ||
|
|
3428407b4d | ||
|
|
d6fc379360 | ||
|
|
11d56696ec | ||
|
|
47cfd760cf | ||
|
|
01b1510da2 | ||
|
|
0afedcafa7 | ||
|
|
84173b94c9 | ||
|
|
8cbe2f5527 | ||
|
|
c302d35e73 | ||
|
|
2a20b15eca | ||
|
|
0d0f0ef80e | ||
|
|
3c8ba71769 | ||
|
|
ae97d8f96d | ||
|
|
14deaf67e9 | ||
|
|
d7dcff944e | ||
|
|
4aa2d05c6b | ||
|
|
d09d89f419 | ||
|
|
f94f1ed663 | ||
|
|
1137e7db3e | ||
|
|
cc14f298d6 | ||
|
|
96edcdfd20 | ||
|
|
b0b9ab80cc | ||
|
|
3b017efdfe | ||
|
|
993205272b | ||
|
|
27e3080228 | ||
|
|
92df23808d | ||
|
|
a10ed6c610 | ||
|
|
500dad96d7 | ||
|
|
aa03f9b023 | ||
|
|
a4a91a8631 | ||
|
|
06c6845442 | ||
|
|
1c26d40826 | ||
|
|
b9217ca238 | ||
|
|
870e8e38d5 | ||
|
|
2c0da4bcc7 | ||
|
|
12f1cff212 | ||
|
|
2ca807cf16 | ||
|
|
1c69fae407 | ||
|
|
67d6f7929f | ||
|
|
028f98f5ea | ||
|
|
c78fb87659 | ||
|
|
45c0f11af2 | ||
|
|
c6728b6769 | ||
|
|
fd614e5792 | ||
|
|
0792832682 | ||
|
|
a607042aab | ||
|
|
35e1b55411 | ||
|
|
66df091046 | ||
|
|
c6b33b9ec1 | ||
|
|
d939c4b8d3 | ||
|
|
68f742b0d4 | ||
|
|
e1cd5244c8 | ||
|
|
57f584a881 | ||
|
|
a05a7f9cb1 | ||
|
|
337ae83a84 | ||
|
|
42babdf2c1 | ||
|
|
c357ca73e4 | ||
|
|
d9050f49a3 | ||
|
|
a7dac5c3db | ||
|
|
53cf5d984d | ||
|
|
93214eca2e | ||
|
|
8f4c2c76ad | ||
|
|
24ef87cfc3 | ||
|
|
954700187b | ||
|
|
4a7cc176ac | ||
|
|
a6891153f0 | ||
|
|
ef852c006a | ||
|
|
1e61ecb0c1 | ||
|
|
8cccc0664b | ||
|
|
3a8e1847c5 | ||
|
|
5c3c29fd3f | ||
|
|
d11b2ce6a3 | ||
|
|
03450ff6ed | ||
|
|
571fe400df | ||
|
|
440ccbd910 | ||
|
|
6017205208 | ||
|
|
7726ece0ab | ||
|
|
7c391e9640 | ||
|
|
67312df7cf | ||
|
|
4087f37d90 | ||
|
|
85ac9fe26e | ||
|
|
40852fa52a | ||
|
|
db80a9a7c3 | ||
|
|
af32a29f03 | ||
|
|
590c245b56 | ||
|
|
a1f71cfecf | ||
|
|
26ade03b50 | ||
|
|
9200db3ec4 | ||
|
|
ac402bf222 | ||
|
|
9f37000f6b | ||
|
|
c5434c91d8 | ||
|
|
e38356b367 | ||
|
|
6e577cfca3 | ||
|
|
68b68732c6 | ||
|
|
7729b51956 | ||
|
|
c98b43187d | ||
|
|
e684c09260 | ||
|
|
1496843315 | ||
|
|
f5a5d3ad5f | ||
|
|
cf6e019480 | ||
|
|
59560e54ac | ||
|
|
476f13ea18 | ||
|
|
a12bb22724 | ||
|
|
d623a7a3f7 | ||
|
|
629f582c03 | ||
|
|
9821b8c68c | ||
|
|
1cd2cd12b4 | ||
|
|
171619a51a | ||
|
|
8d50a5fd90 | ||
|
|
00791d5d56 | ||
|
|
146c897909 | ||
|
|
b2d10b39b0 | ||
|
|
b2e7277fb1 | ||
|
|
dbc4fdad80 | ||
|
|
d893508e3a | ||
|
|
f6d1bad81b | ||
|
|
f3cd5fa001 | ||
|
|
33ac512514 | ||
|
|
6a7c2369bf | ||
|
|
4954c371d1 | ||
|
|
c35646fe13 | ||
|
|
37f75f1702 | ||
|
|
028a76e6db | ||
|
|
9c28349a87 | ||
|
|
0b1f4a016a | ||
|
|
b4914d76a2 | ||
|
|
69c30da5ad | ||
|
|
6fc1280a3c | ||
|
|
efea9238bc | ||
|
|
d16268b273 | ||
|
|
b985a67b97 | ||
|
|
c989ee7b39 | ||
|
|
366b68eda0 | ||
|
|
d693b3cb0d | ||
|
|
e284efba72 | ||
|
|
20aafcd90c | ||
|
|
429471162a | ||
|
|
2a9a602a5e | ||
|
|
6a1c0700c3 | ||
|
|
23cdd6bab6 | ||
|
|
a2e96a4c78 | ||
|
|
96e7de35af | ||
|
|
49b2220f92 | ||
|
|
db01c78de0 | ||
|
|
82388fd94a | ||
|
|
babcc1b793 | ||
|
|
06df98a513 | ||
|
|
0ea20c5b32 | ||
|
|
d42f654f7a | ||
|
|
1cc5f1d5dd | ||
|
|
2cc885d66e | ||
|
|
c09500540c | ||
|
|
9c13fefc68 | ||
|
|
a69f472ee9 | ||
|
|
fbb9046bf6 | ||
|
|
9e6b46a9e6 | ||
|
|
ece2addcff | ||
|
|
74764ac0eb | ||
|
|
675843d09a | ||
|
|
511fe43abe | ||
|
|
02e8dcfe9c | ||
|
|
68d7c5f620 | ||
|
|
242fd828aa | ||
|
|
48efe6e282 | ||
|
|
f8c65b775d | ||
|
|
d87945e9fd | ||
|
|
1644ade514 | ||
|
|
06687e95c8 | ||
|
|
9b5753ab00 | ||
|
|
ddb83a462d | ||
|
|
6cee818bf3 | ||
|
|
41dff7fce3 | ||
|
|
86a804f9a7 | ||
|
|
cbdf0df97b | ||
|
|
f60ef170b0 | ||
|
|
cc0733fd12 | ||
|
|
082575fbc3 | ||
|
|
2c2ebdc5c5 | ||
|
|
3708898bf2 | ||
|
|
1ec2fd7ea1 | ||
|
|
61b561867b | ||
|
|
4ac9009dfe | ||
|
|
e8486b0d6c | ||
|
|
0cbd4b56d3 | ||
|
|
739e14d879 | ||
|
|
1d05ad7576 | ||
|
|
2fee1242f4 | ||
|
|
539d968ad7 | ||
|
|
4b4a5ee9d1 | ||
|
|
f0e82b7d63 | ||
|
|
c02d8cc7a9 | ||
|
|
a94829cc53 | ||
|
|
7d701d23e3 | ||
|
|
981b5cb012 | ||
|
|
ca9450846f | ||
|
|
1a4cdd35b9 | ||
|
|
4164096c0d | ||
|
|
47588796b4 | ||
|
|
fad7cc482d | ||
|
|
4917d3c7e8 | ||
|
|
dd66f4484d | ||
|
|
cf57db7e1f | ||
|
|
d9a17baf2f | ||
|
|
ff75ec7f7d | ||
|
|
1362396c57 | ||
|
|
2338fe5db5 | ||
|
|
2ccefaccfe | ||
|
|
8b0110800e | ||
|
|
ea89b06c41 | ||
|
|
12b2dc68b9 | ||
|
|
213f5407e2 | ||
|
|
f4e2dc747e | ||
|
|
6764ac6d01 | ||
|
|
a55e7e1f67 | ||
|
|
44c6b33642 | ||
|
|
4f51b8c47e | ||
|
|
91700099ba | ||
|
|
c3633dc9bd | ||
|
|
88f6e15932 | ||
|
|
b393038372 | ||
|
|
e94d93ad78 | ||
|
|
4a083a45c2 | ||
|
|
9a415429a9 | ||
|
|
6b86057d79 | ||
|
|
83b730ea82 | ||
|
|
429ece1037 | ||
|
|
75dbb28e2f | ||
|
|
9db4c5714e | ||
|
|
af641b2d26 | ||
|
|
8a00ebe607 | ||
|
|
6b4df91bd2 | ||
|
|
757ff9962f | ||
|
|
ed446be54b | ||
|
|
8a1d7c290f | ||
|
|
f8f120e93b | ||
|
|
687b7b73f7 | ||
|
|
0ed969c530 | ||
|
|
df9359871e | ||
|
|
5c4e11807d | ||
|
|
145b51c270 | ||
|
|
e40d00393d | ||
|
|
d00607bc7b | ||
|
|
21026345ae | ||
|
|
0dcd795b4c | ||
|
|
3a205cc343 | ||
|
|
ce35c1c3a5 | ||
|
|
32a206ea17 | ||
|
|
bb9d573cf9 | ||
|
|
c2dc86575a | ||
|
|
b8f3d839cc | ||
|
|
4d262138e1 | ||
|
|
049e10c079 |
@@ -14,11 +14,14 @@
|
|||||||
],
|
],
|
||||||
"rules": {
|
"rules": {
|
||||||
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
||||||
|
"i18n-text/no-en": "off",
|
||||||
"import/extensions": "error",
|
"import/extensions": "error",
|
||||||
"import/no-amd": "error",
|
"import/no-amd": "error",
|
||||||
"import/no-commonjs": "error",
|
"import/no-commonjs": "error",
|
||||||
"import/no-dynamic-require": "error",
|
"import/no-dynamic-require": "error",
|
||||||
"import/no-extraneous-dependencies": ["error"],
|
// Disable the rule that checks that devDependencies aren't imported since we use a single
|
||||||
|
// linting configuration file for both source and test code.
|
||||||
|
"import/no-extraneous-dependencies": ["error", {"devDependencies": true}],
|
||||||
"import/no-namespace": "off",
|
"import/no-namespace": "off",
|
||||||
"import/no-unresolved": "error",
|
"import/no-unresolved": "error",
|
||||||
"import/no-webpack-loader-syntax": "error",
|
"import/no-webpack-loader-syntax": "error",
|
||||||
@@ -48,7 +51,8 @@
|
|||||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||||
"@typescript-eslint/require-await": "off",
|
"@typescript-eslint/require-await": "off",
|
||||||
"@typescript-eslint/restrict-template-expressions": "off",
|
"@typescript-eslint/restrict-template-expressions": "off",
|
||||||
"func-style": "off"
|
"func-style": "off",
|
||||||
|
"sort-imports": "off"
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,4 +1,5 @@
|
|||||||
lib/*.js linguist-generated=true
|
lib/*.js linguist-generated=true
|
||||||
|
.github/workflows/__* linguist-generated=true
|
||||||
|
|
||||||
# Reduce incidence of needless merge conflicts on CHANGELOG.md
|
# Reduce incidence of needless merge conflicts on CHANGELOG.md
|
||||||
# The man page at
|
# The man page at
|
||||||
|
|||||||
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||||
|
labels:
|
||||||
|
- "Update dependencies"
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/runner"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "thursday" # Gives us a working day to merge this before our typical release
|
||||||
39
.github/prepare-test/action.yml
vendored
Normal file
39
.github/prepare-test/action.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
name: "Prepare test"
|
||||||
|
description: Performs some preparation to run tests
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
outputs:
|
||||||
|
tools-url:
|
||||||
|
value: ${{ steps.get-url.outputs.tools-url }}
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
mv * .github ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
|
mv ../action/.github/workflows .github
|
||||||
|
- id: get-url
|
||||||
|
name: Determine URL
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
||||||
|
export LATEST=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||||
|
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz"
|
||||||
|
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
||||||
|
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||||
|
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||||
|
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||||
|
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||||
|
echo "Hello $VERSION"
|
||||||
|
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||||
|
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||||
|
echo "::set-output name=tools-url::latest"
|
||||||
|
elif [[ ${{ inputs.version }} == "cached" ]]; then
|
||||||
|
echo "::set-output name=tools-url::"
|
||||||
|
else
|
||||||
|
echo "::error Unrecognized version specified!"
|
||||||
|
fi
|
||||||
11
.github/update-release-branch.py
vendored
11
.github/update-release-branch.py
vendored
@@ -8,11 +8,12 @@ import json
|
|||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
|
||||||
EMPTY_CHANGELOG = """
|
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
||||||
# CodeQL Action and CodeQL Runner Changelog
|
|
||||||
|
|
||||||
## [UNRELEASED]
|
## [UNRELEASED]
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The branch being merged from.
|
# The branch being merged from.
|
||||||
@@ -92,7 +93,9 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
||||||
|
|
||||||
# Create the pull request
|
# Create the pull request
|
||||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH)
|
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
|
||||||
|
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||||
|
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH, draft=True)
|
||||||
print('Created PR #' + str(pr.number))
|
print('Created PR #' + str(pr.number))
|
||||||
|
|
||||||
# Assign the conductor
|
# Assign the conductor
|
||||||
@@ -123,7 +126,7 @@ def get_commit_difference(repo):
|
|||||||
|
|
||||||
# Is the given commit the automatic merge commit from when merging a PR
|
# Is the given commit the automatic merge commit from when merging a PR
|
||||||
def is_pr_merge_commit(commit):
|
def is_pr_merge_commit(commit):
|
||||||
return commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
return commit.committer is not None and commit.committer.login == 'web-flow' and len(commit.parents) > 1
|
||||||
|
|
||||||
# Gets a copy of the commit message that should display nicely
|
# Gets a copy of the commit message that should display nicely
|
||||||
def get_truncated_commit_message(commit):
|
def get_truncated_commit_message(commit):
|
||||||
|
|||||||
60
.github/workflows/__go-custom-queries.yml
generated
vendored
Normal file
60
.github/workflows/__go-custom-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Go: Custom queries'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
go-custom-queries:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
|
name: 'Go: Custom queries'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ^1.13.1
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
63
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
Normal file
63
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Go: Autobuild custom tracing'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
go-custom-tracing-autobuild:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Go: Autobuild custom tracing'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ^1.13.1
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/autobuild
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
if [[ ! -d go ]]; then
|
||||||
|
echo "Did not find a Go database"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||||
61
.github/workflows/__go-custom-tracing.yml
generated
vendored
Normal file
61
.github/workflows/__go-custom-tracing.yml
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Go: Custom tracing'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
go-custom-tracing:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
|
name: 'Go: Custom tracing'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ^1.13.1
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: go build main.go
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
CODEQL_EXTRACTOR_GO_BUILD_TRACING: 'true'
|
||||||
59
.github/workflows/__javascript-source-root.yml
generated
vendored
Normal file
59
.github/workflows/__javascript-source-root.yml
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Custom source root
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
javascript-source-root:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [latest, cached, nightly-latest] # This feature is not compatible with old CLIs
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: Custom source root
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../new-source-root
|
||||||
|
mv * ../new-source-root
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
source-root: ../new-source-root
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
skip-queries: true
|
||||||
|
upload: false
|
||||||
|
- name: Assert database exists
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
if [[ ! -d javascript ]]; then
|
||||||
|
echo "Did not find a JavaScript database"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
86
.github/workflows/__multi-language-autodetect.yml
generated
vendored
Normal file
86
.github/workflows/__multi-language-autodetect.yml
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Multi-language repository
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
multi-language-autodetect:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: Multi-language repository
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
db-location: ${{ runner.temp }}/customDbLocation
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
id: analysis
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- shell: bash
|
||||||
|
run: |
|
||||||
|
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||||
|
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||||
|
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||||
|
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Go, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||||
|
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Java, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||||
|
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||||
|
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||||
|
echo "Did not create a database for Python, or created it in the wrong location."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
65
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
Normal file
65
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Packaging: Config and input'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
packaging-config-inputs-js:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Packaging: Config and input'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
|
packs: +dsp-testing/codeql-pack1@0.1.0
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
64
.github/workflows/__packaging-config-js.yml
generated
vendored
Normal file
64
.github/workflows/__packaging-config-js.yml
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Packaging: Config file'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
packaging-config-js:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Packaging: Config file'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging.yml
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
65
.github/workflows/__packaging-inputs-js.yml
generated
vendored
Normal file
65
.github/workflows/__packaging-inputs-js.yml
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: 'PR Check - Packaging: Action input'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
packaging-inputs-js:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: 'Packaging: Action input'
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||||
|
languages: javascript
|
||||||
|
packs: dsp-testing/codeql-pack1@0.1.0, dsp-testing/codeql-pack2
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
58
.github/workflows/__remote-config.yml
generated
vendored
Normal file
58
.github/workflows/__remote-config.yml
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Remote config file
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
remote-config:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
|
name: Remote config file
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||||
|
github.sha }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
65
.github/workflows/__rubocop-multi-language.yml
generated
vendored
Normal file
65
.github/workflows/__rubocop-multi-language.yml
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - RuboCop multi-language
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
rubocop-multi-language:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- stable-20201028
|
||||||
|
- stable-20210319
|
||||||
|
- stable-20210809
|
||||||
|
- cached
|
||||||
|
- latest
|
||||||
|
- nightly-latest
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: RuboCop multi-language
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
shell: bash
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: bundle install
|
||||||
|
- name: RuboCop run
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
78
.github/workflows/__split-workflow.yml
generated
vendored
Normal file
78
.github/workflows/__split-workflow.yml
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Split workflow
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
split-workflow:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-20210831] # This CLI version is known to work with package used in this test
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: Split workflow
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
|
packs: +dsp-testing/codeql-pack1@0.1.0
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
skip-queries: true
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert No Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ "$(ls -A $RUNNER_TEMP/results)" ]; then
|
||||||
|
echo "Expected results directory to be empty after skipping query execution!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Assert Results
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should have 3 hits from these rules
|
||||||
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
||||||
|
|
||||||
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
||||||
|
echo "Found matching rules '$RULES'"
|
||||||
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
52
.github/workflows/__test-local-codeql.yml
generated
vendored
Normal file
52
.github/workflows/__test-local-codeql.yml
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Local CodeQL bundle
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-local-codeql:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [nightly-latest]
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: Local CodeQL bundle
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Fetch a CodeQL bundle
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
CODEQL_URL: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
run: |
|
||||||
|
wget "$CODEQL_URL"
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ./codeql-bundle.tar.gz
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
54
.github/workflows/__test-proxy.yml
generated
vendored
Normal file
54
.github/workflows/__test-proxy.yml
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Proxy test
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-proxy:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [latest]
|
||||||
|
os: [ubuntu-latest]
|
||||||
|
name: Proxy test
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
container:
|
||||||
|
image: ubuntu:18.04
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
55
.github/workflows/__test-ruby.yml
generated
vendored
Normal file
55
.github/workflows/__test-ruby.yml
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Ruby analysis
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- v1
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-ruby:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
version: [latest, cached, nightly-latest]
|
||||||
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
name: Ruby analysis
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: ruby
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
id: analysis
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Check database
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
RUBY_DB="${{ fromJson(steps.analysis.outputs.db-locations).ruby }}"
|
||||||
|
if [[ ! -d "$RUBY_DB" ]]; then
|
||||||
|
echo "Did not create a database for Ruby."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
CODEQL_ENABLE_EXPERIMENTAL_FEATURES: 'true'
|
||||||
@@ -5,6 +5,9 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- .github/workflows/check-expected-release-files.yml
|
- .github/workflows/check-expected-release-files.yml
|
||||||
- src/defaults.json
|
- src/defaults.json
|
||||||
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
# by other workflows.
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-expected-release-files:
|
check-expected-release-files:
|
||||||
|
|||||||
19
.github/workflows/codeql.yml
vendored
19
.github/workflows/codeql.yml
vendored
@@ -5,6 +5,9 @@ on:
|
|||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
# by other workflows.
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Identify the CodeQL tool versions to use in the analysis job.
|
# Identify the CodeQL tool versions to use in the analysis job.
|
||||||
@@ -14,8 +17,6 @@ jobs:
|
|||||||
versions: ${{ steps.compare.outputs.versions }}
|
versions: ${{ steps.compare.outputs.versions }}
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -45,13 +46,19 @@ jobs:
|
|||||||
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
||||||
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
||||||
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
||||||
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
|
||||||
# Just use `tools: null` to avoid duplication in the analysis job.
|
# If we're running on a pull request, run with both bundles, even if `tools: latest` would
|
||||||
|
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
|
||||||
|
# required status check.
|
||||||
|
#
|
||||||
|
# If we're running on push, then we can skip running with `tools: latest` when it would be
|
||||||
|
# the same as running with `tools: null`.
|
||||||
|
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
||||||
VERSIONS_JSON='[null]'
|
VERSIONS_JSON='[null]'
|
||||||
else
|
else
|
||||||
# Use both `tools: null` and `tools: latest` in the analysis job.
|
|
||||||
VERSIONS_JSON='[null, "latest"]'
|
VERSIONS_JSON='[null, "latest"]'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Output a JSON-encoded list with the distinct versions to test against.
|
# Output a JSON-encoded list with the distinct versions to test against.
|
||||||
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
||||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||||
@@ -65,8 +72,6 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
42
.github/workflows/post-release-mergeback.yml
vendored
42
.github/workflows/post-release-mergeback.yml
vendored
@@ -16,10 +16,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- v1
|
- v1
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- .github/workflows/post-release-mergeback.yml
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
merge-back:
|
merge-back:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -27,14 +23,12 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
BASE_BRANCH: "${{ github.event.inputs.baseBranch || 'main' }}"
|
BASE_BRANCH: "${{ github.event.inputs.baseBranch || 'main' }}"
|
||||||
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
||||||
# Would like to use the github/codeql-core team, but that is not working
|
|
||||||
DEFAULT_REVIEWER: "aeisenberg"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Dump GitHub context
|
- name: Dump GitHub Event context
|
||||||
env:
|
env:
|
||||||
GITHUB_CONTEXT: "${{ toJson(github) }}"
|
GITHUB_EVENT_CONTEXT: "${{ toJson(github.event) }}"
|
||||||
run: echo "$GITHUB_CONTEXT"
|
run: echo "$GITHUB_EVENT_CONTEXT"
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v2
|
||||||
@@ -76,24 +70,27 @@ jobs:
|
|||||||
set +e # don't fail on an errored command
|
set +e # don't fail on an errored command
|
||||||
git ls-remote --tags origin | grep "$VERSION"
|
git ls-remote --tags origin | grep "$VERSION"
|
||||||
EXISTS="$?"
|
EXISTS="$?"
|
||||||
if [ "$EXISTS" -ne 0 ]; then
|
if [ "$EXISTS" -eq 0 ]; then
|
||||||
echo "::set-output name=exists::true"
|
echo "Tag $TAG exists. Not going to re-release."
|
||||||
echo "Tag $TAG exists. Not going to re-release."
|
echo "::set-output name=exists::true"
|
||||||
|
else
|
||||||
|
echo "Tag $TAG does not exist yet."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# we didn't tag the release during the update-release-branch workflow because the
|
# we didn't tag the release during the update-release-branch workflow because the
|
||||||
# commit that actually makes it to the release branch is a merge commit,
|
# commit that actually makes it to the release branch is a merge commit,
|
||||||
# and not yet known during the first workflow. We tag now because we know the correct commit.
|
# and not yet known during the first workflow. We tag now because we know the correct commit.
|
||||||
- name: Tag release
|
- name: Tag release
|
||||||
if: steps.check.outputs.exists == 'true'
|
if: steps.check.outputs.exists != 'true'
|
||||||
env:
|
env:
|
||||||
VERSION: ${{ steps.getVersion.outputs.version }}
|
VERSION: ${{ steps.getVersion.outputs.version }}
|
||||||
run: |
|
run: |
|
||||||
git tag -a "$VERSION" -m "$VERSION"
|
git tag -a "$VERSION" -m "$VERSION"
|
||||||
|
git fetch --unshallow # unshallow the repo in order to allow pushes
|
||||||
git push origin --follow-tags "$VERSION"
|
git push origin --follow-tags "$VERSION"
|
||||||
|
|
||||||
- name: Create mergeback branch
|
- name: Create mergeback branch
|
||||||
if: steps.check.outputs.exists == 'true'
|
if: steps.check.outputs.exists != 'true'
|
||||||
env:
|
env:
|
||||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
@@ -104,24 +101,19 @@ jobs:
|
|||||||
PR_BODY="Updates version and changelog."
|
PR_BODY="Updates version and changelog."
|
||||||
|
|
||||||
# Update the changelog
|
# Update the changelog
|
||||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\n/ if($.==3)' CHANGELOG.md
|
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "Update changelog and version after $VERSION"
|
git commit -m "Update changelog and version after $VERSION"
|
||||||
npm version patch
|
npm version patch
|
||||||
|
|
||||||
# when running this workflow on a PR, this is just a test.
|
|
||||||
# so put into draft mode.
|
|
||||||
if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then
|
|
||||||
DRAFT="--draft"
|
|
||||||
else
|
|
||||||
DRAFT=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
git push origin "$NEW_BRANCH"
|
git push origin "$NEW_BRANCH"
|
||||||
|
|
||||||
|
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
||||||
|
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||||
gh pr create \
|
gh pr create \
|
||||||
--head "$NEW_BRANCH" \
|
--head "$NEW_BRANCH" \
|
||||||
--base "$BASE_BRANCH" \
|
--base "$BASE_BRANCH" \
|
||||||
--title "$PR_TITLE" \
|
--title "$PR_TITLE" \
|
||||||
|
--label "Update dependencies" \
|
||||||
--body "$PR_BODY" \
|
--body "$PR_BODY" \
|
||||||
--reviewer "$DEFAULT_REVIEWER" \
|
--draft
|
||||||
"$DRAFT"
|
|
||||||
|
|||||||
778
.github/workflows/pr-checks.yml
vendored
778
.github/workflows/pr-checks.yml
vendored
@@ -1,652 +1,390 @@
|
|||||||
name: "PR checks"
|
name: PR Checks (Basic Checks and Runner)
|
||||||
|
|
||||||
env:
|
|
||||||
GO111MODULE: auto
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
# by other workflows.
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-js:
|
lint-js:
|
||||||
|
name: Lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Run Lint
|
- name: Run Lint
|
||||||
run: npm run-script lint
|
run: npm run-script lint
|
||||||
|
|
||||||
check-js:
|
check-js:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check generated JavaScript
|
- name: Check generated JS
|
||||||
run: .github/workflows/script/check-js.sh
|
run: .github/workflows/script/check-js.sh
|
||||||
|
|
||||||
check-node-modules:
|
check-node-modules:
|
||||||
|
name: Check modules up to date
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Check node modules up to date
|
||||||
|
run: .github/workflows/script/check-node-modules.sh
|
||||||
|
|
||||||
|
verify-pr-checks:
|
||||||
|
name: Verify PR checks up to date
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check node modules up to date
|
- name: Set up Python
|
||||||
run: .github/workflows/script/check-node-modules.sh
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install ruamel.yaml
|
||||||
|
- name: Verify PR checks up to date
|
||||||
|
run: .github/workflows/script/verify-pr-checks.sh
|
||||||
|
|
||||||
npm-test:
|
npm-test:
|
||||||
|
name: Unit Test
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest,macos-latest]
|
os: [ubuntu-latest, macos-latest]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: npm run-script test
|
- name: npm run-script test
|
||||||
run: npm run-script test
|
run: npm run-script test
|
||||||
|
|
||||||
multi-language-repo_test-autodetect-languages:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
db-location: "${{ runner.temp }}/customDbLocation"
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
- run: |
|
|
||||||
cd "$RUNNER_TEMP/customDbLocation"
|
|
||||||
# List all directories as there will be precisely one directory per database
|
|
||||||
# but there may be other files in this directory such as query suites.
|
|
||||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
|
||||||
[[ ! -d cpp ]] || \
|
|
||||||
[[ ! -d csharp ]] || \
|
|
||||||
[[ ! -d go ]] || \
|
|
||||||
[[ ! -d java ]] || \
|
|
||||||
[[ ! -d javascript ]] || \
|
|
||||||
[[ ! -d python ]]; then
|
|
||||||
echo "Did not find expected number of databases. Database dir contains: $(ls)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Identify the CodeQL tool versions to integration test against.
|
|
||||||
check-codeql-versions:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
versions: ${{ steps.compare.outputs.versions }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- name: Init with default CodeQL bundle from the VM image
|
|
||||||
id: init-default
|
|
||||||
uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
- name: Remove empty database
|
|
||||||
# allows us to run init a second time
|
|
||||||
run: |
|
|
||||||
rm -rf "$RUNNER_TEMP/codeql_databases"
|
|
||||||
- name: Init with latest CodeQL bundle
|
|
||||||
id: init-latest
|
|
||||||
uses: ./../action/init
|
|
||||||
with:
|
|
||||||
tools: latest
|
|
||||||
languages: javascript
|
|
||||||
- name: Compare default and latest CodeQL bundle versions
|
|
||||||
id: compare
|
|
||||||
env:
|
|
||||||
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
|
|
||||||
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
|
|
||||||
run: |
|
|
||||||
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
|
|
||||||
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
|
|
||||||
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
|
|
||||||
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
|
|
||||||
if [[ "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
|
|
||||||
# Just use `tools: null` to avoid duplication in the integration tests.
|
|
||||||
VERSIONS_JSON='[null]'
|
|
||||||
else
|
|
||||||
# Use both `tools: null` and `tools: latest` in the integration tests.
|
|
||||||
VERSIONS_JSON='[null, "latest"]'
|
|
||||||
fi
|
|
||||||
# Output a JSON-encoded list with the distinct versions to test against.
|
|
||||||
echo "Suggested matrix config for integration tests: $VERSIONS_JSON"
|
|
||||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
|
||||||
|
|
||||||
multi-language-repo_test-custom-queries-and-remote-config:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
languages: cpp,csharp,java,javascript,python
|
|
||||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
|
||||||
multi-language-repo_test-go-custom-queries:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
config-file: ./.github/codeql/custom-queries.yml
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: ./build.sh
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-go@v2
|
|
||||||
if: ${{ matrix.os == 'macos-latest' }}
|
|
||||||
with:
|
|
||||||
go-version: '^1.13.1'
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- name: Build code
|
|
||||||
shell: bash
|
|
||||||
run: go build main.go
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
go-custom-tracing-autobuild:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
# No need to test Go autobuild on multiple OSes since
|
|
||||||
# we're testing Go custom tracing with a manual build on all OSes.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: go
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- uses: ./../action/autobuild
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
- run: |
|
|
||||||
cd "$RUNNER_TEMP/codeql_databases"
|
|
||||||
if [[ ! -d go ]]; then
|
|
||||||
echo "Did not find a Go database"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
multi-language-repo_rubocop:
|
|
||||||
needs: [check-js, check-node-modules]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- name: Set up Ruby
|
|
||||||
uses: ruby/setup-ruby@v1
|
|
||||||
with:
|
|
||||||
ruby-version: 2.6
|
|
||||||
- name: Install Code Scanning integration
|
|
||||||
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
|
|
||||||
- name: Install dependencies
|
|
||||||
run: bundle install
|
|
||||||
- name: Rubocop run
|
|
||||||
run: |
|
|
||||||
bash -c "
|
|
||||||
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
|
||||||
[[ $? -ne 2 ]]
|
|
||||||
"
|
|
||||||
- uses: ./../action/upload-sarif
|
|
||||||
with:
|
|
||||||
sarif_file: rubocop.sarif
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
test-proxy:
|
|
||||||
needs: [check-js, check-node-modules, check-codeql-versions]
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: ubuntu:18.04
|
|
||||||
options: --dns 127.0.0.1
|
|
||||||
services:
|
|
||||||
squid-proxy:
|
|
||||||
image: datadog/squid:latest
|
|
||||||
ports:
|
|
||||||
- 3128:3128
|
|
||||||
env:
|
|
||||||
https_proxy: http://squid-proxy:3128
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Move codeql-action
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ../action
|
|
||||||
mv * .github ../action/
|
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
|
||||||
mv ../action/.github/workflows .github
|
|
||||||
- uses: ./../action/init
|
|
||||||
with:
|
|
||||||
languages: javascript
|
|
||||||
tools: ${{ matrix.tools }}
|
|
||||||
- uses: ./../action/analyze
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
runner-analyze-javascript-ubuntu:
|
runner-analyze-javascript-ubuntu:
|
||||||
|
name: Runner ubuntu JS analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd runner
|
cd runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
# Pass --config-file here, but not for other jobs in this workflow.
|
# Pass --config-file here, but not for other jobs in this workflow.
|
||||||
# This means we're testing the config file parsing in the runner
|
# This means we're testing the config file parsing in the runner
|
||||||
# but not slowing down all jobs unnecessarily as it doesn't add much
|
# but not slowing down all jobs unnecessarily as it doesn't add much
|
||||||
# testing the parsing on different operating systems and languages.
|
# testing the parsing on different operating systems and languages.
|
||||||
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-javascript-windows:
|
runner-analyze-javascript-windows:
|
||||||
|
name: Runner windows JS analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd runner
|
cd runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-javascript-macos:
|
runner-analyze-javascript-macos:
|
||||||
|
name: Runner macos JS analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd runner
|
cd runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-ubuntu:
|
runner-analyze-csharp-ubuntu:
|
||||||
|
name: Runner ubuntu C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
mkdir ../action
|
||||||
mv * .github ../action/
|
mv * .github ../action/
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
mv ../action/.github/workflows .github
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd ../action/runner
|
cd ../action/runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Build code
|
- name: Build code
|
||||||
run: |
|
run: |
|
||||||
. ./codeql-runner/codeql-env.sh
|
. ./codeql-runner/codeql-env.sh
|
||||||
$CODEQL_RUNNER dotnet build
|
$CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-windows:
|
runner-analyze-csharp-windows:
|
||||||
|
name: Runner windows C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
mkdir ../action
|
||||||
mv * .github ../action/
|
mv * .github ../action/
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
mv ../action/.github/workflows .github
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd ../action/runner
|
cd ../action/runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Build code
|
- name: Build code
|
||||||
shell: powershell
|
shell: powershell
|
||||||
# Note we want to make sure that the .win32env file is read correctly, so we unset the CODEQL_EXTRACTOR_CSHARP_ROOT from the .sh file.
|
# Note we want to make sure that the .win32env file is read correctly, so we unset the CODEQL_EXTRACTOR_CSHARP_ROOT from the .sh file.
|
||||||
run: |
|
run: |
|
||||||
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
|
||||||
$Env:CODEQL_EXTRACTOR_CSHARP_ROOT = ""
|
$Env:CODEQL_EXTRACTOR_CSHARP_ROOT = ""
|
||||||
& $Env:CODEQL_RUNNER dotnet build
|
& $Env:CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-macos:
|
runner-analyze-csharp-macos:
|
||||||
|
name: Runner macos C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
mkdir ../action
|
||||||
mv * .github ../action/
|
mv * .github ../action/
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
mv ../action/.github/workflows .github
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd ../action/runner
|
cd ../action/runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Build code
|
- name: Build code
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
. ./codeql-runner/codeql-env.sh
|
. ./codeql-runner/codeql-env.sh
|
||||||
$CODEQL_RUNNER dotnet build
|
$CODEQL_RUNNER dotnet build
|
||||||
|
|
||||||
- name: Run analyze
|
|
||||||
run: |
|
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
|
||||||
env:
|
|
||||||
TEST_MODE: true
|
|
||||||
|
|
||||||
|
- name: Run analyze
|
||||||
|
run: |
|
||||||
|
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-ubuntu:
|
runner-analyze-csharp-autobuild-ubuntu:
|
||||||
|
name: Runner ubuntu autobuild C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
mkdir ../action
|
||||||
mv * .github ../action/
|
mv * .github ../action/
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
mv ../action/.github/workflows .github
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd ../action/runner
|
cd ../action/runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Build code
|
- name: Build code
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-linux autobuild
|
../action/runner/dist/codeql-runner-linux autobuild
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-windows:
|
runner-analyze-csharp-autobuild-windows:
|
||||||
|
name: Runner windows autobuild C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
mkdir ../action
|
||||||
mv * .github ../action/
|
mv * .github ../action/
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
mv ../action/.github/workflows .github
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd ../action/runner
|
cd ../action/runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Build code
|
- name: Build code
|
||||||
shell: powershell
|
shell: powershell
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-win.exe autobuild
|
../action/runner/dist/codeql-runner-win.exe autobuild
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-macos:
|
runner-analyze-csharp-autobuild-macos:
|
||||||
|
name: Runner macos autobuild C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
mkdir ../action
|
mkdir ../action
|
||||||
mv * .github ../action/
|
mv * .github ../action/
|
||||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||||
mv ../action/.github/workflows .github
|
mv ../action/.github/workflows .github
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd ../action/runner
|
cd ../action/runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Run init
|
- name: Run init
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|
||||||
- name: Build code
|
- name: Build code
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-macos autobuild
|
../action/runner/dist/codeql-runner-macos autobuild
|
||||||
|
|
||||||
- name: Run analyze
|
- name: Run analyze
|
||||||
run: |
|
run: |
|
||||||
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
env:
|
env:
|
||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-upload-sarif:
|
runner-upload-sarif:
|
||||||
|
name: Runner upload sarif
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
cd runner
|
cd runner
|
||||||
npm install
|
npm install
|
||||||
npm run build-runner
|
npm run build-runner
|
||||||
|
|
||||||
- name: Upload with runner
|
- name: Upload with runner
|
||||||
run: |
|
run: |
|
||||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
# Deliberately don't use TEST_MODE here. This is specifically testing
|
||||||
# the compatibility with the API.
|
# the compatibility with the API.
|
||||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||||
|
|||||||
3
.github/workflows/python-deps.yml
vendored
3
.github/workflows/python-deps.yml
vendored
@@ -4,6 +4,9 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, v1]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
# by other workflows.
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-setup-python-scripts:
|
test-setup-python-scripts:
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ if [ ! -z "$(git status --porcelain)" ]; then
|
|||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
sudo npm install --force -g npm@latest
|
||||||
# Reinstall modules and then clean to remove absolute paths
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
npm ci
|
npm ci
|
||||||
@@ -14,7 +15,7 @@ npm run removeNPMAbsolutePaths
|
|||||||
# Check that repo is still clean
|
# Check that repo is still clean
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
# If we get a fail here then the PR needs attention
|
# If we get a fail here then the PR needs attention
|
||||||
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci && npm run removeNPMAbsolutePaths' on a macOS machine to update. Note it is important this command is run on macOS and not any other operating system as there is one dependency (fsevents) that is needed for macOS and may not be installed if the command is run on a Windows or Linux machine."
|
||||||
git status
|
git status
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
25
.github/workflows/script/verify-pr-checks.sh
vendored
Executable file
25
.github/workflows/script/verify-pr-checks.sh
vendored
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Wipe the generated PR checks in case there are extra unnecessary files in there
|
||||||
|
rm -rf .github/workflows/__*
|
||||||
|
|
||||||
|
# Generate the PR checks
|
||||||
|
cd pr-checks && python3 sync.py
|
||||||
|
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
git diff
|
||||||
|
git status
|
||||||
|
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: PR checks are up to date"
|
||||||
39
.github/workflows/update-dependencies.yml
vendored
Normal file
39
.github/workflows/update-dependencies.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
name: Update dependencies
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review, labeled]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update:
|
||||||
|
name: Update dependencies
|
||||||
|
runs-on: macos-latest
|
||||||
|
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies')
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Remove PR label
|
||||||
|
env:
|
||||||
|
REPOSITORY: '${{ github.repository }}'
|
||||||
|
PR_NUMBER: '${{ github.event.pull_request.number }}'
|
||||||
|
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||||
|
run: |
|
||||||
|
gh api "repos/$REPOSITORY/issues/$PR_NUMBER/labels/Update%20dependencies" -X DELETE
|
||||||
|
|
||||||
|
- name: Push updated dependencies
|
||||||
|
env:
|
||||||
|
BRANCH: '${{ github.head_ref }}'
|
||||||
|
run: |
|
||||||
|
git fetch
|
||||||
|
git checkout $BRANCH
|
||||||
|
sudo npm install --force -g npm@latest
|
||||||
|
npm install
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
git config --global user.email "github-actions@github.com"
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git add node_modules
|
||||||
|
git commit -am "Update checked-in dependencies"
|
||||||
|
git push origin "$BRANCH"
|
||||||
|
fi
|
||||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install PyGithub==1.51 requests
|
pip install PyGithub==1.55 requests
|
||||||
|
|
||||||
- name: Update git config
|
- name: Update git config
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
update-supported-enterprise-server-versions:
|
update-supported-enterprise-server-versions:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository == 'github/codeql-action' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
|
|||||||
69
CHANGELOG.md
69
CHANGELOG.md
@@ -1,6 +1,71 @@
|
|||||||
# CodeQL Action and CodeQL Runner Changelog
|
# CodeQL Action and CodeQL Runner Changelog
|
||||||
|
|
||||||
## [UNRELEASED]
|
## 1.0.14 - 09 Sep 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.6.1. [#733](https://github.com/github/codeql-action/pull/733)
|
||||||
|
|
||||||
|
## 1.0.13 - 06 Sep 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.6.0. [#712](https://github.com/github/codeql-action/pull/712)
|
||||||
|
- Update baseline lines of code counter for python. All multi-line strings are counted as code. [#714](https://github.com/github/codeql-action/pull/714)
|
||||||
|
- Remove old baseline LoC injection [#715](https://github.com/github/codeql-action/pull/715)
|
||||||
|
|
||||||
|
## 1.0.12 - 16 Aug 2021
|
||||||
|
|
||||||
|
- Update README to include a sample permissions block. [#689](https://github.com/github/codeql-action/pull/689)
|
||||||
|
|
||||||
|
## 1.0.11 - 09 Aug 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.5.9. [#687](https://github.com/github/codeql-action/pull/687)
|
||||||
|
|
||||||
|
## 1.0.10 - 03 Aug 2021
|
||||||
|
|
||||||
|
- Fix an issue where a summary of diagnostics information from CodeQL was not output to the logs of the `analyze` step of the Action. [#672](https://github.com/github/codeql-action/pull/672)
|
||||||
|
|
||||||
|
## 1.0.9 - 02 Aug 2021
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.0.8 - 26 Jul 2021
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.5.8. [#631](https://github.com/github/codeql-action/pull/631)
|
||||||
|
|
||||||
|
## 1.0.7 - 21 Jul 2021
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.0.6 - 19 Jul 2021
|
||||||
|
|
||||||
|
- The `init` step of the Action now supports a `source-root` input as a path to the root source-code directory. By default, the path is relative to `$GITHUB_WORKSPACE`. [#607](https://github.com/github/codeql-action/pull/607)
|
||||||
|
- The `init` step will now try to install a few Python tools needed by this Action when running on a self-hosted runner. [#616](https://github.com/github/codeql-action/pull/616)
|
||||||
|
|
||||||
|
## 1.0.5 - 12 Jul 2021
|
||||||
|
|
||||||
|
- The `analyze` step of the Action now supports a `skip-queries` option to merely build the CodeQL database without analyzing. This functionality is not present in the runner. Additionally, the step will no longer fail if it encounters a finalized database, and will instead continue with query execution. [#602](https://github.com/github/codeql-action/pull/602)
|
||||||
|
- Update the warning message when the baseline lines of code count is unavailable. [#608](https://github.com/github/codeql-action/pull/608)
|
||||||
|
|
||||||
|
## 1.0.4 - 28 Jun 2021
|
||||||
|
|
||||||
|
- Fix `RUNNER_TEMP environment variable must be set` when using runner. [#594](https://github.com/github/codeql-action/pull/594)
|
||||||
|
- Fix couting of lines of code for C# projects. [#586](https://github.com/github/codeql-action/pull/586)
|
||||||
|
|
||||||
|
## 1.0.3 - 23 Jun 2021
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.0.2 - 17 Jun 2021
|
||||||
|
|
||||||
|
- Fix out of memory in hash computation. [#550](https://github.com/github/codeql-action/pull/550)
|
||||||
|
- Clean up logging during analyze results. [#557](https://github.com/github/codeql-action/pull/557)
|
||||||
|
- Add `--finalize-dataset` to `database finalize` call, freeing up some disk space after database creation. [#558](https://github.com/github/codeql-action/pull/558)
|
||||||
|
|
||||||
|
## 1.0.1 - 07 Jun 2021
|
||||||
|
|
||||||
|
- Pass the `--sarif-group-rules-by-pack` argument to CodeQL CLI invocations that generate SARIF. This means the SARIF rule object for each query will now be found underneath its corresponding query pack in `runs[].tool.extensions`. [#546](https://github.com/github/codeql-action/pull/546)
|
||||||
|
- Output the location of CodeQL databases created in the analyze step. [#543](https://github.com/github/codeql-action/pull/543)
|
||||||
|
|
||||||
|
## 1.0.0 - 31 May 2021
|
||||||
|
|
||||||
- Add this changelog file. [#507](https://github.com/github/codeql-action/pull/507)
|
- Add this changelog file. [#507](https://github.com/github/codeql-action/pull/507)
|
||||||
- Improve grouping of analysis logs. Add a new log group containing a summary of metrics and diagnostics, if they were produced by CodeQL builtin queries. [#515](https://github.com/github/codeql-action/pull/515)
|
- Improve grouping of analysis logs. Add a new log group containing a summary of metrics and diagnostics, if they were produced by CodeQL builtin queries. [#515](https://github.com/github/codeql-action/pull/515)
|
||||||
|
- Add metrics and diagnostics summaries from custom query suites to the analysis summary log group. [#532](https://github.com/github/codeql-action/pull/532)
|
||||||
|
|||||||
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@@ -0,0 +1 @@
|
|||||||
|
**/* @github/codeql-action-reviewers
|
||||||
@@ -12,7 +12,7 @@ Please note that this project is released with a [Contributor Code of Conduct][c
|
|||||||
|
|
||||||
## Development and Testing
|
## Development and Testing
|
||||||
|
|
||||||
Before you start, ensure that you have a recent version of node installed. You can see which version of node is used by the action in `init/action.yml`.
|
Before you start, ensure that you have a recent version of node (14 or higher) installed, along with a recent version of npm (7 or higher). You can see which version of node is used by the action in `init/action.yml`.
|
||||||
|
|
||||||
### Common tasks
|
### Common tasks
|
||||||
|
|
||||||
@@ -28,32 +28,12 @@ You may want to run `tsc --watch` from the command line or inside of vscode in o
|
|||||||
|
|
||||||
Because CodeQL Action users consume the code directly from this repository, and there can be no build step during an GitHub Actions run, this repository contains all compiled artifacts and node modules. There is a PR check that will fail if any of the compiled artifacts are not up to date. Compiled artifacts are stored in the `lib/` directory. For all day-to-day development purposes, this folder can be ignored.
|
Because CodeQL Action users consume the code directly from this repository, and there can be no build step during an GitHub Actions run, this repository contains all compiled artifacts and node modules. There is a PR check that will fail if any of the compiled artifacts are not up to date. Compiled artifacts are stored in the `lib/` directory. For all day-to-day development purposes, this folder can be ignored.
|
||||||
|
|
||||||
Only run `npm install` if you are explicitly changing the set of dependencies in `package.json`. The `node_modules` directory should be up to date when you check out, but if for some reason, there is an inconsistency use `npm ci && npm run removeNPMAbsolutePaths` to ensure the directory is in a state consistent with the `package-lock.json`. There is a PR check to ensure the consistency of the `node_modules` directory.
|
Only run `npm install` if you are explicitly changing the set of dependencies in `package.json`. The `node_modules` directory should be up to date when you check out, but if for some reason, there is an inconsistency use `npm ci && npm run removeNPMAbsolutePaths` to ensure the directory is in a state consistent with the `package-lock.json`. Note that due to a macOS-specific dependency, this command should be run on a macOS machine. There is a PR check to ensure the consistency of the `node_modules` directory.
|
||||||
|
|
||||||
### Running the action
|
### Running the action
|
||||||
|
|
||||||
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
||||||
|
|
||||||
### Running the action locally
|
|
||||||
|
|
||||||
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
|
|
||||||
|
|
||||||
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
|
|
||||||
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
|
|
||||||
1. Add a `.env` file in the root of the project you are running:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
CODEQL_LOCAL_RUN=true
|
|
||||||
GITHUB_SERVER_URL=https://github.com
|
|
||||||
|
|
||||||
# Optional, for better logging
|
|
||||||
GITHUB_JOB=<ANY_JOB_NAME>
|
|
||||||
```
|
|
||||||
|
|
||||||
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
|
|
||||||
|
|
||||||
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
|
|
||||||
|
|
||||||
### Integration tests
|
### Integration tests
|
||||||
|
|
||||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||||
@@ -78,6 +58,20 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||||
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||||
|
|
||||||
|
## Releasing (write access required)
|
||||||
|
|
||||||
|
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||||
|
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
|
||||||
|
|
||||||
|
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||||
|
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||||
|
1. Review the checklist items in the pull request description.
|
||||||
|
Once you've checked off all but the last of these, approve the PR and automerge it.
|
||||||
|
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||||
|
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
|
||||||
|
|
||||||
|
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
|
||||||
|
|
||||||
## Resources
|
## Resources
|
||||||
|
|
||||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||||
|
|||||||
28
README.md
28
README.md
@@ -1,6 +1,6 @@
|
|||||||
# CodeQL Action
|
# CodeQL Action
|
||||||
|
|
||||||
This action runs GitHub's industry-leading static analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
This action runs GitHub's industry-leading semantic code analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||||
|
|
||||||
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
||||||
|
|
||||||
@@ -22,7 +22,9 @@ name: "Code Scanning - Action"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
branches: [main]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
schedule:
|
schedule:
|
||||||
# ┌───────────── minute (0 - 59)
|
# ┌───────────── minute (0 - 59)
|
||||||
# │ ┌───────────── hour (0 - 23)
|
# │ ┌───────────── hour (0 - 23)
|
||||||
@@ -40,6 +42,14 @@ jobs:
|
|||||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
# required for all workflows
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
# only required for workflows in private repositories
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
@@ -64,8 +74,8 @@ jobs:
|
|||||||
# project uses a compiled language
|
# project uses a compiled language
|
||||||
|
|
||||||
#- run: |
|
#- run: |
|
||||||
# make bootstrap
|
# make bootstrap
|
||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v1
|
||||||
@@ -81,8 +91,8 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
|||||||
|
|
||||||
# Here is where you build your code
|
# Here is where you build your code
|
||||||
- run: |
|
- run: |
|
||||||
make bootstrap
|
make bootstrap
|
||||||
make release
|
make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v1
|
||||||
@@ -128,3 +138,11 @@ By default, this will override any queries specified in a config file. If you wi
|
|||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
Read about [troubleshooting code scanning](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning).
|
||||||
|
|
||||||
|
### Note on "missing analysis" message
|
||||||
|
|
||||||
|
The very first time code scanning is run and if it is on a pull request, you will probably get a message mentioning a "missing analysis". This is expected.
|
||||||
|
|
||||||
|
After code scanning has analyzed the code in a pull request, it needs to compare the analysis of the topic branch (the merge commit of the branch you used to create the pull request) with the analysis of the base branch (the branch into which you want to merge the pull request). This allows code scanning to compute which alerts are newly introduced by the pull request, which alerts were already present in the base branch, and whether any existing alerts are fixed by the changes in the pull request. Initially, if you use a pull request to add code scanning to a repository, the base branch has not yet been analyzed, so it's not possible to compute these details. In this case, when you click through from the results check on the pull request you will see the "Missing analysis for base commit SHA-HASH" message.
|
||||||
|
|
||||||
|
For more information and other causes of this message, see [Reasons for the "missing analysis" message](https://docs.github.com/en/code-security/secure-coding/automatically-scanning-your-code-for-vulnerabilities-and-errors/setting-up-code-scanning-for-a-repository#reasons-for-the-missing-analysis-message)
|
||||||
|
|||||||
@@ -13,6 +13,10 @@ inputs:
|
|||||||
description: Upload the SARIF file
|
description: Upload the SARIF file
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
|
cleanup-level:
|
||||||
|
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||||
|
required: false
|
||||||
|
default: "brutal"
|
||||||
ram:
|
ram:
|
||||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||||
required: false
|
required: false
|
||||||
@@ -20,6 +24,10 @@ inputs:
|
|||||||
description: Specify whether or not to add code snippets to the output sarif file.
|
description: Specify whether or not to add code snippets to the output sarif file.
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
|
skip-queries:
|
||||||
|
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
threads:
|
threads:
|
||||||
description: The number of threads to be used by CodeQL.
|
description: The number of threads to be used by CodeQL.
|
||||||
required: false
|
required: false
|
||||||
@@ -30,10 +38,17 @@ inputs:
|
|||||||
category:
|
category:
|
||||||
description: String used by Code Scanning for matching the analyses
|
description: String used by Code Scanning for matching the analyses
|
||||||
required: false
|
required: false
|
||||||
|
upload-database:
|
||||||
|
description: Whether to upload the resulting CodeQL database
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
token:
|
token:
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
matrix:
|
matrix:
|
||||||
default: ${{ toJson(matrix) }}
|
default: ${{ toJson(matrix) }}
|
||||||
|
outputs:
|
||||||
|
db-locations:
|
||||||
|
description: A map from language to absolute path for each database created by CodeQL.
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node12'
|
||||||
main: '../lib/analyze-action.js'
|
main: '../lib/analyze-action.js'
|
||||||
|
|||||||
@@ -22,6 +22,15 @@ inputs:
|
|||||||
queries:
|
queries:
|
||||||
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
|
||||||
required: false
|
required: false
|
||||||
|
packs:
|
||||||
|
description: >-
|
||||||
|
[Experimental] Comma-separated list of packs to run. Reference a pack in the format `scope/name[@version]`. If `version` is not
|
||||||
|
specified, then the latest version of the pack is used. By default, this overrides the same setting in a
|
||||||
|
configuration file; prefix with "+" to use both sets of packs.
|
||||||
|
|
||||||
|
This input is only available in single-language analyses. To use packs in multi-language
|
||||||
|
analyses, you must specify packs in the codeql-config.yml file.
|
||||||
|
required: false
|
||||||
external-repository-token:
|
external-repository-token:
|
||||||
description: A token for fetching external config files and queries if they reside in a private repository.
|
description: A token for fetching external config files and queries if they reside in a private repository.
|
||||||
required: false
|
required: false
|
||||||
@@ -29,6 +38,9 @@ inputs:
|
|||||||
description: Try to auto-install your python dependencies
|
description: Try to auto-install your python dependencies
|
||||||
required: true
|
required: true
|
||||||
default: 'true'
|
default: 'true'
|
||||||
|
source-root:
|
||||||
|
description: Path of the root source code directory, relative to $GITHUB_WORKSPACE.
|
||||||
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
codeql-path:
|
codeql-path:
|
||||||
description: The path of the CodeQL binary used for analysis
|
description: The path of the CodeQL binary used for analysis
|
||||||
|
|||||||
133
lib/actions-util.js
generated
133
lib/actions-util.js
generated
@@ -1,12 +1,25 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
@@ -16,6 +29,10 @@ const yaml = __importStar(require("js-yaml"));
|
|||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
/**
|
||||||
|
* The utils in this module are meant to be run inside of the action only.
|
||||||
|
* Code paths from the runner should not enter this module.
|
||||||
|
*/
|
||||||
/**
|
/**
|
||||||
* Wrapper around core.getInput for inputs that always have a value.
|
* Wrapper around core.getInput for inputs that always have a value.
|
||||||
* Also see getOptionalInput.
|
* Also see getOptionalInput.
|
||||||
@@ -39,52 +56,24 @@ function getOptionalInput(name) {
|
|||||||
return value.length > 0 ? value : undefined;
|
return value.length > 0 ? value : undefined;
|
||||||
}
|
}
|
||||||
exports.getOptionalInput = getOptionalInput;
|
exports.getOptionalInput = getOptionalInput;
|
||||||
/**
|
|
||||||
* Get an environment parameter, but throw an error if it is not set.
|
|
||||||
*/
|
|
||||||
function getRequiredEnvParam(paramName) {
|
|
||||||
const value = process.env[paramName];
|
|
||||||
if (value === undefined || value.length === 0) {
|
|
||||||
throw new Error(`${paramName} environment variable must be set`);
|
|
||||||
}
|
|
||||||
core.debug(`${paramName}=${value}`);
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
|
||||||
function getTemporaryDirectory() {
|
function getTemporaryDirectory() {
|
||||||
const value = process.env["CODEQL_ACTION_TEMP"];
|
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||||
return value !== undefined && value !== ""
|
return value !== undefined && value !== ""
|
||||||
? value
|
? value
|
||||||
: getRequiredEnvParam("RUNNER_TEMP");
|
: util_1.getRequiredEnvParam("RUNNER_TEMP");
|
||||||
}
|
}
|
||||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||||
function getToolCacheDirectory() {
|
function getToolCacheDirectory() {
|
||||||
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
const value = process.env["CODEQL_ACTION_TOOL_CACHE"];
|
||||||
return value !== undefined && value !== ""
|
return value !== undefined && value !== ""
|
||||||
? value
|
? value
|
||||||
: getRequiredEnvParam("RUNNER_TOOL_CACHE");
|
: util_1.getRequiredEnvParam("RUNNER_TOOL_CACHE");
|
||||||
}
|
}
|
||||||
exports.getToolCacheDirectory = getToolCacheDirectory;
|
exports.getToolCacheDirectory = getToolCacheDirectory;
|
||||||
/**
|
|
||||||
* Ensures all required environment variables are set in the context of a local run.
|
|
||||||
*/
|
|
||||||
function prepareLocalRunEnvironment() {
|
|
||||||
if (!util_1.isLocalRun()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
core.debug("Action is running locally.");
|
|
||||||
if (!process.env.GITHUB_JOB) {
|
|
||||||
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
|
|
||||||
}
|
|
||||||
if (!process.env.CODEQL_ACTION_ANALYSIS_KEY) {
|
|
||||||
core.exportVariable("CODEQL_ACTION_ANALYSIS_KEY", `LOCAL-RUN:${process.env.GITHUB_JOB}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
|
|
||||||
/**
|
/**
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
*/
|
*/
|
||||||
exports.getCommitOid = async function (ref = "HEAD") {
|
const getCommitOid = async function (ref = "HEAD") {
|
||||||
// Try to use git to get the current commit SHA. If that fails then
|
// Try to use git to get the current commit SHA. If that fails then
|
||||||
// log but otherwise silently fall back to using the SHA from the environment.
|
// log but otherwise silently fall back to using the SHA from the environment.
|
||||||
// The only time these two values will differ is during analysis of a PR when
|
// The only time these two values will differ is during analysis of a PR when
|
||||||
@@ -109,9 +98,10 @@ exports.getCommitOid = async function (ref = "HEAD") {
|
|||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
||||||
return getRequiredEnvParam("GITHUB_SHA");
|
return util_1.getRequiredEnvParam("GITHUB_SHA");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
exports.getCommitOid = getCommitOid;
|
||||||
function isObject(o) {
|
function isObject(o) {
|
||||||
return o !== null && typeof o === "object";
|
return o !== null && typeof o === "object";
|
||||||
}
|
}
|
||||||
@@ -171,12 +161,12 @@ exports.WorkflowErrors = toCodedErrors({
|
|||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
});
|
});
|
||||||
function getWorkflowErrors(doc) {
|
function getWorkflowErrors(doc) {
|
||||||
var _a, _b, _c, _d, _e, _f, _g, _h;
|
var _a, _b, _c, _d, _e;
|
||||||
const errors = [];
|
const errors = [];
|
||||||
const jobName = process.env.GITHUB_JOB;
|
const jobName = process.env.GITHUB_JOB;
|
||||||
if (jobName) {
|
if (jobName) {
|
||||||
const job = (_b = (_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) === null || _b === void 0 ? void 0 : _b[jobName];
|
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||||
const steps = (_c = job) === null || _c === void 0 ? void 0 : _c.steps;
|
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||||
if (Array.isArray(steps)) {
|
if (Array.isArray(steps)) {
|
||||||
for (const step of steps) {
|
for (const step of steps) {
|
||||||
// this was advice that we used to give in the README
|
// this was advice that we used to give in the README
|
||||||
@@ -184,7 +174,7 @@ function getWorkflowErrors(doc) {
|
|||||||
// to produce results that are more inline with expectations
|
// to produce results that are more inline with expectations
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
// (i.e: this is what will happen if you merge this PR)
|
||||||
// and avoid some race conditions
|
// and avoid some race conditions
|
||||||
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
|
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -214,14 +204,14 @@ function getWorkflowErrors(doc) {
|
|||||||
missingPush = true;
|
missingPush = true;
|
||||||
}
|
}
|
||||||
if (hasPush && hasPullRequest) {
|
if (hasPush && hasPullRequest) {
|
||||||
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
|
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
// if they didn't change any files
|
// if they didn't change any files
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
// currently we cannot go back through the history and find the most recent baseline
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
if (Array.isArray(paths) && paths.length > 0) {
|
||||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||||
}
|
}
|
||||||
const pathsIgnore = (_f = doc.on.push) === null || _f === void 0 ? void 0 : _f["paths-ignore"];
|
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||||
}
|
}
|
||||||
@@ -230,9 +220,9 @@ function getWorkflowErrors(doc) {
|
|||||||
// if doc.on.pull_request is undefined that means 'off'
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
// we only want to check for mismatched branches if pull_request is on.
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
if (doc.on.pull_request !== undefined) {
|
if (doc.on.pull_request !== undefined) {
|
||||||
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
|
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||||
if (push !== "**") {
|
if (push !== "**") {
|
||||||
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
|
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||||
if (pull_request !== "**") {
|
if (pull_request !== "**") {
|
||||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||||
if (difference.length > 0) {
|
if (difference.length > 0) {
|
||||||
@@ -298,21 +288,18 @@ function formatWorkflowCause(errors) {
|
|||||||
exports.formatWorkflowCause = formatWorkflowCause;
|
exports.formatWorkflowCause = formatWorkflowCause;
|
||||||
async function getWorkflow() {
|
async function getWorkflow() {
|
||||||
const relativePath = await getWorkflowPath();
|
const relativePath = await getWorkflowPath();
|
||||||
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
const absolutePath = path.join(util_1.getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
||||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
}
|
}
|
||||||
exports.getWorkflow = getWorkflow;
|
exports.getWorkflow = getWorkflow;
|
||||||
/**
|
/**
|
||||||
* Get the path of the currently executing workflow.
|
* Get the path of the currently executing workflow.
|
||||||
*/
|
*/
|
||||||
async function getWorkflowPath() {
|
async function getWorkflowPath() {
|
||||||
if (util_1.isLocalRun()) {
|
const repo_nwo = util_1.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||||
return getRequiredEnvParam("WORKFLOW_PATH");
|
|
||||||
}
|
|
||||||
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
const owner = repo_nwo[0];
|
||||||
const repo = repo_nwo[1];
|
const repo = repo_nwo[1];
|
||||||
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
const run_id = Number(util_1.getRequiredEnvParam("GITHUB_RUN_ID"));
|
||||||
const apiClient = api.getActionsApiClient();
|
const apiClient = api.getActionsApiClient();
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
||||||
owner,
|
owner,
|
||||||
@@ -327,7 +314,7 @@ async function getWorkflowPath() {
|
|||||||
* Get the workflow run ID.
|
* Get the workflow run ID.
|
||||||
*/
|
*/
|
||||||
function getWorkflowRunID() {
|
function getWorkflowRunID() {
|
||||||
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
const workflowRunID = parseInt(util_1.getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
||||||
if (Number.isNaN(workflowRunID)) {
|
if (Number.isNaN(workflowRunID)) {
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||||
}
|
}
|
||||||
@@ -348,7 +335,7 @@ async function getAnalysisKey() {
|
|||||||
return analysisKey;
|
return analysisKey;
|
||||||
}
|
}
|
||||||
const workflowPath = await getWorkflowPath();
|
const workflowPath = await getWorkflowPath();
|
||||||
const jobName = getRequiredEnvParam("GITHUB_JOB");
|
const jobName = util_1.getRequiredEnvParam("GITHUB_JOB");
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
analysisKey = `${workflowPath}:${jobName}`;
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||||
return analysisKey;
|
return analysisKey;
|
||||||
@@ -385,8 +372,8 @@ exports.computeAutomationID = computeAutomationID;
|
|||||||
async function getRef() {
|
async function getRef() {
|
||||||
// Will be in the form "refs/heads/master" on a push event
|
// Will be in the form "refs/heads/master" on a push event
|
||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
const ref = getRequiredEnvParam("GITHUB_REF");
|
const ref = util_1.getRequiredEnvParam("GITHUB_REF");
|
||||||
const sha = getRequiredEnvParam("GITHUB_SHA");
|
const sha = util_1.getRequiredEnvParam("GITHUB_SHA");
|
||||||
// For pull request refs we want to detect whether the workflow
|
// For pull request refs we want to detect whether the workflow
|
||||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||||
// than the 'merge' ref. If so, we want to convert the ref that
|
// than the 'merge' ref. If so, we want to convert the ref that
|
||||||
@@ -475,10 +462,6 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||||||
return statusReport;
|
return statusReport;
|
||||||
}
|
}
|
||||||
exports.createStatusReportBase = createStatusReportBase;
|
exports.createStatusReportBase = createStatusReportBase;
|
||||||
function isHTTPError(arg) {
|
|
||||||
var _a;
|
|
||||||
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
|
|
||||||
}
|
|
||||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||||
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
|
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
|
||||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||||
@@ -493,13 +476,9 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
|||||||
* Returns whether sending the status report was successful of not.
|
* Returns whether sending the status report was successful of not.
|
||||||
*/
|
*/
|
||||||
async function sendStatusReport(statusReport) {
|
async function sendStatusReport(statusReport) {
|
||||||
if (util_1.isLocalRun()) {
|
|
||||||
core.debug("Not sending status report because this is a local run");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const statusReportJSON = JSON.stringify(statusReport);
|
const statusReportJSON = JSON.stringify(statusReport);
|
||||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
const nwo = util_1.getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||||
const [owner, repo] = nwo.split("/");
|
const [owner, repo] = nwo.split("/");
|
||||||
const client = api.getActionsApiClient();
|
const client = api.getActionsApiClient();
|
||||||
try {
|
try {
|
||||||
@@ -512,7 +491,7 @@ async function sendStatusReport(statusReport) {
|
|||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
console.log(e);
|
console.log(e);
|
||||||
if (isHTTPError(e)) {
|
if (util_1.isHTTPError(e)) {
|
||||||
switch (e.status) {
|
switch (e.status) {
|
||||||
case 403:
|
case 403:
|
||||||
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
|
||||||
@@ -532,7 +511,7 @@ async function sendStatusReport(statusReport) {
|
|||||||
// schema incompatibility when reporting status
|
// schema incompatibility when reporting status
|
||||||
// this means that this action version is no longer compatible with the API
|
// this means that this action version is no longer compatible with the API
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
if (util_1.getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||||
core.debug(INCOMPATIBLE_MSG);
|
core.debug(INCOMPATIBLE_MSG);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -566,9 +545,33 @@ exports.isRunningLocalAction = isRunningLocalAction;
|
|||||||
// Get the location where the action is running from.
|
// Get the location where the action is running from.
|
||||||
// This can be used to get the actions name or tell if we're running a local action.
|
// This can be used to get the actions name or tell if we're running a local action.
|
||||||
function getRelativeScriptPath() {
|
function getRelativeScriptPath() {
|
||||||
const runnerTemp = getRequiredEnvParam("RUNNER_TEMP");
|
const runnerTemp = util_1.getRequiredEnvParam("RUNNER_TEMP");
|
||||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||||
return path.relative(actionsDirectory, __filename);
|
return path.relative(actionsDirectory, __filename);
|
||||||
}
|
}
|
||||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
exports.getRelativeScriptPath = getRelativeScriptPath;
|
||||||
|
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
|
||||||
|
function getWorkflowEvent() {
|
||||||
|
const eventJsonFile = util_1.getRequiredEnvParam("GITHUB_EVENT_PATH");
|
||||||
|
try {
|
||||||
|
return JSON.parse(fs.readFileSync(eventJsonFile, "utf-8"));
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Is the version of the repository we are currently analyzing from the default branch,
|
||||||
|
// or alternatively from another branch or a pull request.
|
||||||
|
async function isAnalyzingDefaultBranch() {
|
||||||
|
var _a;
|
||||||
|
// Get the current ref and trim and refs/heads/ prefix
|
||||||
|
let currentRef = await getRef();
|
||||||
|
currentRef = currentRef.startsWith("refs/heads/")
|
||||||
|
? currentRef.substr("refs/heads/".length)
|
||||||
|
: currentRef;
|
||||||
|
const event = getWorkflowEvent();
|
||||||
|
const defaultBranch = (_a = event === null || event === void 0 ? void 0 : event.repository) === null || _a === void 0 ? void 0 : _a.default_branch;
|
||||||
|
return currentRef === defaultBranch;
|
||||||
|
}
|
||||||
|
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||||
//# sourceMappingURL=actions-util.js.map
|
//# sourceMappingURL=actions-util.js.map
|
||||||
File diff suppressed because one or more lines are too long
116
lib/actions-util.test.js
generated
116
lib/actions-util.test.js
generated
@@ -1,20 +1,35 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
if (k2 === undefined) k2 = k;
|
||||||
};
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsutil = __importStar(require("./actions-util"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util_1 = require("./util");
|
||||||
function errorCodes(actual, expected) {
|
function errorCodes(actual, expected) {
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
}
|
}
|
||||||
@@ -28,7 +43,7 @@ ava_1.default("getRef() returns merge PR ref if GITHUB_SHA still checked out", a
|
|||||||
const currentSha = "a".repeat(40);
|
const currentSha = "a".repeat(40);
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
@@ -39,7 +54,7 @@ ava_1.default("getRef() returns merge PR ref if GITHUB_REF still checked out but
|
|||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
const sha = "a".repeat(40);
|
const sha = "a".repeat(40);
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
@@ -49,21 +64,13 @@ ava_1.default("getRef() returns merge PR ref if GITHUB_REF still checked out but
|
|||||||
ava_1.default("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
ava_1.default("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
const callback = sinon_1.default.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsutil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
ava_1.default("getAnalysisKey() when a local run", async (t) => {
|
|
||||||
process.env.CODEQL_LOCAL_RUN = "true";
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
|
|
||||||
process.env.GITHUB_JOB = "";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
const actualAnalysisKey = await actionsutil.getAnalysisKey();
|
|
||||||
t.deepEqual(actualAnalysisKey, "LOCAL-RUN:UNKNOWN-JOB");
|
|
||||||
});
|
|
||||||
ava_1.default("computeAutomationID()", async (t) => {
|
ava_1.default("computeAutomationID()", async (t) => {
|
||||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||||
@@ -80,31 +87,6 @@ ava_1.default("computeAutomationID()", async (t) => {
|
|||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||||
});
|
});
|
||||||
ava_1.default("prepareEnvironment() when a local run", (t) => {
|
|
||||||
process.env.CODEQL_LOCAL_RUN = "false";
|
|
||||||
process.env.GITHUB_JOB = "YYY";
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "TEST";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// unchanged
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "YYY");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
|
|
||||||
process.env.CODEQL_LOCAL_RUN = "true";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// unchanged
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "YYY");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// updated
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "YYY");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:YYY");
|
|
||||||
process.env.GITHUB_JOB = "";
|
|
||||||
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
|
|
||||||
actionsutil.prepareLocalRunEnvironment();
|
|
||||||
// updated
|
|
||||||
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
|
||||||
});
|
|
||||||
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
@@ -157,7 +139,7 @@ ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct
|
|||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
pull_request:
|
pull_request:
|
||||||
@@ -246,7 +228,7 @@ ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|||||||
}), []));
|
}), []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -329,7 +311,7 @@ ava_1.default("patternIsSuperset()", (t) => {
|
|||||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [4.1, master]
|
branches: [4.1, master]
|
||||||
@@ -340,7 +322,7 @@ ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
|||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -353,7 +335,7 @@ on:
|
|||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
process.env.GITHUB_JOB = "test";
|
process.env.GITHUB_JOB = "test";
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -377,7 +359,7 @@ jobs:
|
|||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
process.env.GITHUB_JOB = "test3";
|
process.env.GITHUB_JOB = "test3";
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -400,36 +382,62 @@ jobs:
|
|||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
`));
|
`));
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on: "workflow_dispatch"
|
on: "workflow_dispatch"
|
||||||
`)), []));
|
`)), []));
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on: [workflow_dispatch]
|
on: [workflow_dispatch]
|
||||||
`)), []));
|
`)), []));
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
workflow_dispatch: {}
|
workflow_dispatch: {}
|
||||||
`)), []));
|
`)), []));
|
||||||
});
|
});
|
||||||
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
`)), []));
|
`)), []));
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
on: ["push"]
|
on: ["push"]
|
||||||
`)), []));
|
`)), []));
|
||||||
});
|
});
|
||||||
|
ava_1.default("initializeEnvironment", (t) => {
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
||||||
|
t.deepEqual(util_1.getMode(), util_1.Mode.actions);
|
||||||
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.runner, "4.5.6");
|
||||||
|
t.deepEqual(util_1.getMode(), util_1.Mode.runner);
|
||||||
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
|
||||||
|
});
|
||||||
|
ava_1.default("isAnalyzingDefaultBranch()", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
const envFile = path.join(tmpDir, "event.json");
|
||||||
|
fs.writeFileSync(envFile, JSON.stringify({
|
||||||
|
repository: {
|
||||||
|
default_branch: "main",
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||||
|
process.env["GITHUB_REF"] = "main";
|
||||||
|
process.env["GITHUB_SHA"] = "1234";
|
||||||
|
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||||
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
|
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
||||||
|
process.env["GITHUB_REF"] = "feature";
|
||||||
|
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=actions-util.test.js.map
|
//# sourceMappingURL=actions-util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
19
lib/analysis-paths.js
generated
19
lib/analysis-paths.js
generated
@@ -1,15 +1,28 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.includeAndExcludeAnalysisPaths = exports.printPathFiltersWarning = exports.legalWindowsPathCharactersRegex = void 0;
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
function isInterpretedLanguage(language) {
|
function isInterpretedLanguage(language) {
|
||||||
return language === "javascript" || language === "python";
|
return (language === "javascript" || language === "python" || language === "ruby");
|
||||||
}
|
}
|
||||||
// Matches a string containing only characters that are legal to include in paths on windows.
|
// Matches a string containing only characters that are legal to include in paths on windows.
|
||||||
exports.legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
|
exports.legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
||||||
19
lib/analysis-paths.test.js
generated
19
lib/analysis-paths.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -29,6 +41,7 @@ ava_1.default("emptyPaths", async (t) => {
|
|||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||||
|
packs: {},
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
@@ -49,6 +62,7 @@ ava_1.default("nonEmptyPaths", async (t) => {
|
|||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||||
|
packs: {},
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||||
@@ -70,6 +84,7 @@ ava_1.default("exclude temp dir", async (t) => {
|
|||||||
codeQLCmd: "",
|
codeQLCmd: "",
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||||
|
packs: {},
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;SACrD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;SACrD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;SACtD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
72
lib/analyze-action.js
generated
72
lib/analyze-action.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
@@ -13,15 +25,18 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
|
const database_upload_1 = require("./database-upload");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
|
const repository_1 = require("./repository");
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
// eslint-disable-next-line import/no-commonjs
|
||||||
|
const pkg = require("../package.json");
|
||||||
async function sendStatusReport(startedAt, stats, error) {
|
async function sendStatusReport(startedAt, stats, error) {
|
||||||
var _a, _b, _c;
|
const status = (stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language) !== undefined || error !== undefined
|
||||||
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
|
|
||||||
? "failure"
|
? "failure"
|
||||||
: "success";
|
: "success";
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
...(stats || {}),
|
...(stats || {}),
|
||||||
@@ -30,10 +45,11 @@ async function sendStatusReport(startedAt, stats, error) {
|
|||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let stats = undefined;
|
let uploadStats = undefined;
|
||||||
|
let runStats = undefined;
|
||||||
let config = undefined;
|
let config = undefined;
|
||||||
|
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -44,26 +60,42 @@ async function run() {
|
|||||||
}
|
}
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actionsUtil.getRequiredInput("token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const outputDir = actionsUtil.getRequiredInput("output");
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
const queriesStats = await analyze_1.runAnalyze(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), actionsUtil.getOptionalInput("category"), config, logger);
|
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger);
|
||||||
if (actionsUtil.getRequiredInput("upload") === "true") {
|
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram"));
|
||||||
const uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
await analyze_1.runFinalize(outputDir, threads, memory, config, logger);
|
||||||
stats = { ...queriesStats, ...uploadStats };
|
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||||
|
runStats = await analyze_1.runQueries(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||||
|
}
|
||||||
|
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||||
|
await analyze_1.runCleanup(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||||
|
}
|
||||||
|
const dbLocations = {};
|
||||||
|
for (const language of config.languages) {
|
||||||
|
dbLocations[language] = util.getCodeQLDatabasePath(config, language);
|
||||||
|
}
|
||||||
|
core.setOutput("db-locations", dbLocations);
|
||||||
|
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||||
|
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
stats = { ...queriesStats };
|
|
||||||
}
|
}
|
||||||
|
const repositoryNwo = repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||||
|
await database_upload_1.uploadDatabases(repositoryNwo, config, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||||
stats = { ...error.queriesStatusReport };
|
const stats = { ...error.queriesStatusReport };
|
||||||
|
await sendStatusReport(startedAt, stats, error);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await sendStatusReport(startedAt, undefined, error);
|
||||||
}
|
}
|
||||||
await sendStatusReport(startedAt, stats, error);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
@@ -89,7 +121,15 @@ async function run() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await sendStatusReport(startedAt, stats);
|
if (runStats && uploadStats) {
|
||||||
|
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
|
||||||
|
}
|
||||||
|
else if (runStats) {
|
||||||
|
await sendStatusReport(startedAt, { ...runStats });
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await sendStatusReport(startedAt, undefined);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,yDAA2C;AAC3C,6CAA+B;AAU/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACvE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAMmB;AACnB,iDAAmD;AACnD,uDAAoD;AACpD,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAE3C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAUvC,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;IAEb,MAAM,MAAM,GACV,CAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,WAAW,GAAmC,SAAS,CAAC;IAC5D,IAAI,QAAQ,GAAoC,SAAS,CAAC;IAC1D,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAE3D,IAAI;QACF,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SACnD,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,cAAc,CACjC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,MAAM,CACP,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,CAAC;QACvE,MAAM,qBAAW,CAAC,SAAS,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,KAAK,MAAM,EAAE;YAC3D,QAAQ,GAAG,MAAM,oBAAU,CACzB,SAAS,EACT,MAAM,EACN,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,OAAO,EACP,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,EACN,MAAM,CACP,CAAC;SACH;QAED,IAAI,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,KAAK,MAAM,EAAE;YAC5D,MAAM,oBAAU,CACd,MAAM,EACN,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,IAAI,QAAQ,EACzD,MAAM,CACP,CAAC;SACH;QAED,MAAM,WAAW,GAA+B,EAAE,CAAC;QACnD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SACtE;QACD,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;QAE5C,IAAI,QAAQ,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACjE,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CAC9C,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;SACH;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;SACtC;QAED,MAAM,aAAa,GAAG,+BAAkB,CACtC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAC9C,CAAC;QACF,MAAM,iCAAe,CAAC,aAAa,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;KAClE;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,MAAM,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;YAC/C,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SACjD;aAAM;YACL,MAAM,gBAAgB,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;SACrD;QAED,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACvE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,IAAI,QAAQ,IAAI,WAAW,EAAE;QAC3B,MAAM,gBAAgB,CAAC,SAAS,EAAE,EAAE,GAAG,QAAQ,EAAE,GAAG,WAAW,EAAE,CAAC,CAAC;KACpE;SAAM,IAAI,QAAQ,EAAE;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,EAAE,GAAG,QAAQ,EAAE,CAAC,CAAC;KACpD;SAAM;QACL,MAAM,gBAAgB,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;KAC9C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
185
lib/analyze.js
generated
185
lib/analyze.js
generated
@@ -1,21 +1,34 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.CodeQLAnalysisError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const count_loc_1 = require("./count-loc");
|
const count_loc_1 = require("./count-loc");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const upload_lib_1 = require("./upload-lib");
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
class CodeQLAnalysisError extends Error {
|
class CodeQLAnalysisError extends Error {
|
||||||
constructor(queriesStatusReport, message) {
|
constructor(queriesStatusReport, message) {
|
||||||
@@ -54,9 +67,10 @@ async function createdDBForScannedLanguages(config, logger) {
|
|||||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||||
// we extract any scanned languages.
|
// we extract any scanned languages.
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
if (languages_1.isScannedLanguage(language)) {
|
if (languages_1.isScannedLanguage(language) &&
|
||||||
|
!dbIsFinalized(config, language, logger)) {
|
||||||
logger.startGroup(`Extracting ${language}`);
|
logger.startGroup(`Extracting ${language}`);
|
||||||
if (language === languages_1.Language.python) {
|
if (language === languages_1.Language.python) {
|
||||||
await setupPythonExtractor(logger);
|
await setupPythonExtractor(logger);
|
||||||
@@ -66,13 +80,29 @@ async function createdDBForScannedLanguages(config, logger) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function finalizeDatabaseCreation(config, threadsFlag, logger) {
|
function dbIsFinalized(config, language, logger) {
|
||||||
|
const dbPath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
try {
|
||||||
|
const dbInfo = yaml.load(fs.readFileSync(path.resolve(dbPath, "codeql-database.yml"), "utf8"));
|
||||||
|
return !("inProgress" in dbInfo);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
logger.warning(`Could not check whether database for ${language} was finalized. Assuming it is not.`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger) {
|
||||||
await createdDBForScannedLanguages(config, logger);
|
await createdDBForScannedLanguages(config, logger);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
logger.startGroup(`Finalizing ${language}`);
|
if (dbIsFinalized(config, language, logger)) {
|
||||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config, language), threadsFlag);
|
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
|
||||||
logger.endGroup();
|
}
|
||||||
|
else {
|
||||||
|
logger.startGroup(`Finalizing ${language}`);
|
||||||
|
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config, language), threadsFlag, memoryFlag);
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Runs queries and creates sarif files in the given folder
|
// Runs queries and creates sarif files in the given folder
|
||||||
@@ -85,97 +115,138 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
// that here.
|
// that here.
|
||||||
config.paths, config.pathsIgnore, config.languages, logger);
|
config.paths, config.pathsIgnore, config.languages, logger);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
logger.startGroup(`Analyzing ${language}`);
|
|
||||||
const queries = config.queries[language];
|
const queries = config.queries[language];
|
||||||
if (queries === undefined ||
|
const packsWithVersion = config.packs[language] || [];
|
||||||
(queries.builtin.length === 0 && queries.custom.length === 0)) {
|
const hasBuiltinQueries = (queries === null || queries === void 0 ? void 0 : queries.builtin.length) > 0;
|
||||||
|
const hasCustomQueries = (queries === null || queries === void 0 ? void 0 : queries.custom.length) > 0;
|
||||||
|
const hasPackWithCustomQueries = packsWithVersion.length > 0;
|
||||||
|
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
let analysisSummary = "";
|
if (hasPackWithCustomQueries) {
|
||||||
|
logger.info("*************");
|
||||||
|
logger.info("Performing analysis with custom QL Packs. QL Packs are an experimental feature.");
|
||||||
|
logger.info("And should not be used in production yet.");
|
||||||
|
logger.info("*************");
|
||||||
|
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||||
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
|
const results = await codeql.packDownload(packsWithVersion);
|
||||||
|
logger.info(`Downloaded packs: ${results.packs
|
||||||
|
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||||
|
.join(", ")}`);
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
|
logger.startGroup(`Running queries for ${language}`);
|
||||||
|
const querySuitePaths = [];
|
||||||
if (queries["builtin"].length > 0) {
|
if (queries["builtin"].length > 0) {
|
||||||
const startTimeBuliltIn = new Date().getTime();
|
const startTimeBuiltIn = new Date().getTime();
|
||||||
const { sarifFile, stdout } = await runQueryGroup(language, "builtin", queries["builtin"], sarifFolder, undefined);
|
querySuitePaths.push(await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"]), undefined));
|
||||||
analysisSummary = stdout;
|
|
||||||
await injectLinesOfCode(sarifFile, language, locPromise);
|
|
||||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||||
new Date().getTime() - startTimeBuliltIn;
|
new Date().getTime() - startTimeBuiltIn;
|
||||||
}
|
}
|
||||||
const startTimeCustom = new Date().getTime();
|
const startTimeCustom = new Date().getTime();
|
||||||
const temporarySarifDir = config.tempDir;
|
let ranCustom = false;
|
||||||
const temporarySarifFiles = [];
|
|
||||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
for (let i = 0; i < queries["custom"].length; ++i) {
|
||||||
if (queries["custom"][i].queries.length > 0) {
|
if (queries["custom"][i].queries.length > 0) {
|
||||||
const { sarifFile } = await runQueryGroup(language, `custom-${i}`, queries["custom"][i].queries, temporarySarifDir, queries["custom"][i].searchPath);
|
querySuitePaths.push(await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries), queries["custom"][i].searchPath));
|
||||||
temporarySarifFiles.push(sarifFile);
|
ranCustom = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (temporarySarifFiles.length > 0) {
|
if (packsWithVersion.length > 0) {
|
||||||
const sarifFile = path.join(sarifFolder, `${language}-custom.sarif`);
|
querySuitePaths.push(await runQueryGroup(language, "packs", createPackSuiteContents(packsWithVersion), undefined));
|
||||||
fs.writeFileSync(sarifFile, upload_lib_1.combineSarifFiles(temporarySarifFiles));
|
ranCustom = true;
|
||||||
await injectLinesOfCode(sarifFile, language, locPromise);
|
}
|
||||||
|
if (ranCustom) {
|
||||||
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
|
statusReport[`analyze_custom_queries_${language}_duration_ms`] =
|
||||||
new Date().getTime() - startTimeCustom;
|
new Date().getTime() - startTimeCustom;
|
||||||
}
|
}
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
// Print the LoC baseline and the summary results from database analyze.
|
logger.startGroup(`Interpreting results for ${language}`);
|
||||||
logger.startGroup(`Analysis summary for ${language}`);
|
const startTimeInterpretResults = new Date().getTime();
|
||||||
printLinesOfCodeSummary(logger, language, await locPromise);
|
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||||
logger.info(analysisSummary);
|
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile);
|
||||||
|
await injectLinesOfCode(sarifFile, language, locPromise);
|
||||||
|
statusReport[`interpret_results_${language}_duration_ms`] =
|
||||||
|
new Date().getTime() - startTimeInterpretResults;
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
|
logger.info(analysisSummary);
|
||||||
|
printLinesOfCodeSummary(logger, language, await locPromise);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.info(e);
|
logger.info(e);
|
||||||
|
logger.info(e.stack);
|
||||||
statusReport.analyze_failure_language = language;
|
statusReport.analyze_failure_language = language;
|
||||||
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
|
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return statusReport;
|
return statusReport;
|
||||||
async function runQueryGroup(language, type, queries, destinationFolder, searchPath) {
|
async function runInterpretResults(language, queries, sarifFile) {
|
||||||
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
|
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId);
|
||||||
|
}
|
||||||
|
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
// Pass the queries to codeql using a file instead of using the command
|
// Pass the queries to codeql using a file instead of using the command
|
||||||
// line to avoid command line length restrictions, particularly on windows.
|
// line to avoid command line length restrictions, particularly on windows.
|
||||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||||
const querySuiteContents = queries
|
|
||||||
.map((q) => `- query: ${q}`)
|
|
||||||
.join("\n");
|
|
||||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||||
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
|
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||||
const sarifFile = path.join(destinationFolder, `${language}-${type}.sarif`);
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
||||||
const databaseAnalyzeStdout = await codeql.databaseAnalyze(databasePath, sarifFile, searchPath, querySuitePath, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
|
return querySuitePath;
|
||||||
return { sarifFile, stdout: databaseAnalyzeStdout };
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.runQueries = runQueries;
|
exports.runQueries = runQueries;
|
||||||
async function runAnalyze(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
|
function createQuerySuiteContents(queries) {
|
||||||
|
return queries.map((q) => `- query: ${q}`).join("\n");
|
||||||
|
}
|
||||||
|
function createPackSuiteContents(packsWithVersion) {
|
||||||
|
return packsWithVersion.map(packWithVersionToQuerySuiteEntry).join("\n");
|
||||||
|
}
|
||||||
|
function packWithVersionToQuerySuiteEntry(pack) {
|
||||||
|
let text = `- qlpack: ${pack.packName}`;
|
||||||
|
if (pack.version) {
|
||||||
|
text += `\n version: ${pack.version}`;
|
||||||
|
}
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||||
// Delete the tracer config env var to avoid tracing ourselves
|
// Delete the tracer config env var to avoid tracing ourselves
|
||||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||||
fs.mkdirSync(outputDir, { recursive: true });
|
fs.mkdirSync(outputDir, { recursive: true });
|
||||||
logger.info("Finalizing database creation");
|
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||||
await finalizeDatabaseCreation(config, threadsFlag, logger);
|
|
||||||
logger.info("Analyzing database");
|
|
||||||
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger);
|
|
||||||
return { ...queriesStats };
|
|
||||||
}
|
}
|
||||||
exports.runAnalyze = runAnalyze;
|
exports.runFinalize = runFinalize;
|
||||||
|
async function runCleanup(config, cleanupLevel, logger) {
|
||||||
|
logger.startGroup("Cleaning up databases");
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
await codeql.databaseCleanup(databasePath, cleanupLevel);
|
||||||
|
}
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
|
exports.runCleanup = runCleanup;
|
||||||
async function injectLinesOfCode(sarifFile, language, locPromise) {
|
async function injectLinesOfCode(sarifFile, language, locPromise) {
|
||||||
|
var _a;
|
||||||
const lineCounts = await locPromise;
|
const lineCounts = await locPromise;
|
||||||
const idPrefix = count_loc_1.getIdPrefix(language);
|
|
||||||
if (language in lineCounts) {
|
if (language in lineCounts) {
|
||||||
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
const sarif = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
|
||||||
if (Array.isArray(sarif.runs)) {
|
if (Array.isArray(sarif.runs)) {
|
||||||
for (const run of sarif.runs) {
|
for (const run of sarif.runs) {
|
||||||
const ruleId = `${idPrefix}/summary/lines-of-code`;
|
|
||||||
run.properties = run.properties || {};
|
run.properties = run.properties || {};
|
||||||
run.properties.metricResults = run.properties.metricResults || [];
|
run.properties.metricResults = run.properties.metricResults || [];
|
||||||
const rule = run.properties.metricResults.find(
|
for (const metric of run.properties.metricResults) {
|
||||||
// the rule id can be in either of two places
|
// Baseline is inserted when matching rule has tag lines-of-code
|
||||||
(r) => { var _a; return r.ruleId === ruleId || ((_a = r.rule) === null || _a === void 0 ? void 0 : _a.id) === ruleId; });
|
if (metric.rule && metric.rule.toolComponent) {
|
||||||
// only add the baseline value if the rule already exists
|
const matchingRule = run.tool.extensions[metric.rule.toolComponent.index].rules[metric.rule.index];
|
||||||
if (rule) {
|
if ((_a = matchingRule.properties.tags) === null || _a === void 0 ? void 0 : _a.includes("lines-of-code")) {
|
||||||
rule.baseline = lineCounts[language];
|
metric.baseline = lineCounts[language];
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,7 +255,7 @@ async function injectLinesOfCode(sarifFile, language, locPromise) {
|
|||||||
}
|
}
|
||||||
function printLinesOfCodeSummary(logger, language, lineCounts) {
|
function printLinesOfCodeSummary(logger, language, lineCounts) {
|
||||||
if (language in lineCounts) {
|
if (language in lineCounts) {
|
||||||
logger.info(`Counted ${lineCounts[language]} lines of code for ${language} as a baseline.`);
|
logger.info(`Counted a baseline of ${lineCounts[language]} lines of code for ${language}.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//# sourceMappingURL=analyze.js.map
|
//# sourceMappingURL=analyze.js.map
|
||||||
File diff suppressed because one or more lines are too long
157
lib/analyze.test.js
generated
157
lib/analyze.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -13,10 +25,11 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const semver_1 = require("semver");
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const count_loc_1 = require("./count-loc");
|
|
||||||
const count = __importStar(require("./count-loc"));
|
const count = __importStar(require("./count-loc"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
@@ -32,36 +45,59 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
obj[lang] = i + 1;
|
obj[lang] = i + 1;
|
||||||
return obj;
|
return obj;
|
||||||
}, {});
|
}, {});
|
||||||
sinon_1.default.stub(count, "countLoc").resolves(mockLinesOfCode);
|
sinon.stub(count, "countLoc").resolves(mockLinesOfCode);
|
||||||
let searchPathsUsed = [];
|
let searchPathsUsed = [];
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
const memoryFlag = "";
|
const memoryFlag = "";
|
||||||
const addSnippetsFlag = "";
|
const addSnippetsFlag = "";
|
||||||
const threadsFlag = "";
|
const threadsFlag = "";
|
||||||
|
const packs = {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{
|
||||||
|
packName: "a/b",
|
||||||
|
version: semver_1.clean("1.0.0"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[languages_1.Language.java]: [
|
||||||
|
{
|
||||||
|
packName: "c/d",
|
||||||
|
version: semver_1.clean("2.0.0"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
for (const language of Object.values(languages_1.Language)) {
|
for (const language of Object.values(languages_1.Language)) {
|
||||||
codeql_1.setCodeQL({
|
codeql_1.setCodeQL({
|
||||||
databaseAnalyze: async (_, sarifFile, searchPath) => {
|
packDownload: async () => ({ packs: [] }),
|
||||||
|
databaseRunQueries: async (_db, searchPath) => {
|
||||||
|
searchPathsUsed.push(searchPath);
|
||||||
|
},
|
||||||
|
databaseInterpretResults: async (_db, _queriesRun, sarifFile) => {
|
||||||
fs.writeFileSync(sarifFile, JSON.stringify({
|
fs.writeFileSync(sarifFile, JSON.stringify({
|
||||||
runs: [
|
runs: [
|
||||||
// variant 1 uses ruleId
|
// references a rule with the lines-of-code tag, so baseline should be injected
|
||||||
{
|
{
|
||||||
properties: {
|
tool: {
|
||||||
metricResults: [
|
extensions: [
|
||||||
{
|
{
|
||||||
ruleId: `${count_loc_1.getIdPrefix(language)}/summary/lines-of-code`,
|
rules: [
|
||||||
value: 123,
|
{
|
||||||
|
properties: {
|
||||||
|
tags: ["lines-of-code"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
|
||||||
// variant 2 uses rule.id
|
|
||||||
{
|
|
||||||
properties: {
|
properties: {
|
||||||
metricResults: [
|
metricResults: [
|
||||||
{
|
{
|
||||||
rule: {
|
rule: {
|
||||||
id: `${count_loc_1.getIdPrefix(language)}/summary/lines-of-code`,
|
index: 0,
|
||||||
|
toolComponent: {
|
||||||
|
index: 0,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
value: 123,
|
value: 123,
|
||||||
},
|
},
|
||||||
@@ -71,7 +107,6 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
{},
|
{},
|
||||||
],
|
],
|
||||||
}));
|
}));
|
||||||
searchPathsUsed.push(searchPath);
|
|
||||||
return "";
|
return "";
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -89,6 +124,7 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
},
|
},
|
||||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||||
|
packs,
|
||||||
};
|
};
|
||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
@@ -98,8 +134,18 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
custom: [],
|
custom: [],
|
||||||
};
|
};
|
||||||
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, logging_1.getRunnerLogger(true));
|
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, logging_1.getRunnerLogger(true));
|
||||||
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
|
const hasPacks = language in packs;
|
||||||
t.true(`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport);
|
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||||
|
if (hasPacks) {
|
||||||
|
t.deepEqual(statusReportKeys.length, 3, statusReportKeys.toString());
|
||||||
|
t.deepEqual(statusReportKeys[0], `analyze_builtin_queries_${language}_duration_ms`);
|
||||||
|
t.deepEqual(statusReportKeys[1], `analyze_custom_queries_${language}_duration_ms`);
|
||||||
|
t.deepEqual(statusReportKeys[2], `interpret_results_${language}_duration_ms`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
t.deepEqual(statusReportKeys[0], `analyze_builtin_queries_${language}_duration_ms`);
|
||||||
|
t.deepEqual(statusReportKeys[1], `interpret_results_${language}_duration_ms`);
|
||||||
|
}
|
||||||
config.queries[language] = {
|
config.queries[language] = {
|
||||||
builtin: [],
|
builtin: [],
|
||||||
custom: [
|
custom: [
|
||||||
@@ -114,40 +160,83 @@ ava_1.default("status report fields and search path setting", async (t) => {
|
|||||||
],
|
],
|
||||||
};
|
};
|
||||||
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, logging_1.getRunnerLogger(true));
|
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, logging_1.getRunnerLogger(true));
|
||||||
t.deepEqual(Object.keys(customStatusReport).length, 1);
|
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
||||||
t.deepEqual(searchPathsUsed, [undefined, "/1", "/2"]);
|
const expectedSearchPathsUsed = hasPacks
|
||||||
|
? [undefined, undefined, "/1", "/2", undefined]
|
||||||
|
: [undefined, "/1", "/2"];
|
||||||
|
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
|
||||||
|
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
|
||||||
}
|
}
|
||||||
verifyLineCounts(tmpDir);
|
verifyLineCounts(tmpDir);
|
||||||
|
verifyQuerySuites(tmpDir);
|
||||||
});
|
});
|
||||||
function verifyLineCounts(tmpDir) {
|
function verifyLineCounts(tmpDir) {
|
||||||
// eslint-disable-next-line github/array-foreach
|
// eslint-disable-next-line github/array-foreach
|
||||||
Object.keys(languages_1.Language).forEach((lang, i) => {
|
Object.keys(languages_1.Language).forEach((lang, i) => {
|
||||||
verifyLineCountForFile(lang, path.join(tmpDir, `${lang}-builtin.sarif`), i + 1);
|
verifyLineCountForFile(path.join(tmpDir, `${lang}.sarif`), i + 1);
|
||||||
verifyLineCountForFile(lang, path.join(tmpDir, `${lang}-custom.sarif`), i + 1);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function verifyLineCountForFile(lang, filePath, lineCount) {
|
function verifyLineCountForFile(filePath, lineCount) {
|
||||||
const idPrefix = count_loc_1.getIdPrefix(lang);
|
|
||||||
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
|
const sarif = JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||||
t.deepEqual(sarif.runs[0].properties.metricResults, [
|
t.deepEqual(sarif.runs[0].properties.metricResults, [
|
||||||
{
|
|
||||||
ruleId: `${idPrefix}/summary/lines-of-code`,
|
|
||||||
value: 123,
|
|
||||||
baseline: lineCount,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
t.deepEqual(sarif.runs[1].properties.metricResults, [
|
|
||||||
{
|
{
|
||||||
rule: {
|
rule: {
|
||||||
id: `${idPrefix}/summary/lines-of-code`,
|
index: 0,
|
||||||
|
toolComponent: {
|
||||||
|
index: 0,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
value: 123,
|
value: 123,
|
||||||
baseline: lineCount,
|
baseline: lineCount,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
// when the rule doesn't exists, it should not be added
|
// when the rule doesn't exist, it should not be added
|
||||||
t.deepEqual(sarif.runs[2].properties.metricResults, []);
|
t.deepEqual(sarif.runs[1].properties.metricResults, []);
|
||||||
|
}
|
||||||
|
function verifyQuerySuites(tmpDir) {
|
||||||
|
const qlsContent = [
|
||||||
|
{
|
||||||
|
query: "foo.ql",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
const qlsContent2 = [
|
||||||
|
{
|
||||||
|
query: "bar.ql",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
const qlsPackContentCpp = [
|
||||||
|
{
|
||||||
|
qlpack: "a/b",
|
||||||
|
version: "1.0.0",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
const qlsPackContentJava = [
|
||||||
|
{
|
||||||
|
qlpack: "c/d",
|
||||||
|
version: "2.0.0",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
for (const lang of Object.values(languages_1.Language)) {
|
||||||
|
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
|
||||||
|
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
|
||||||
|
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
|
||||||
|
const packSuiteName = `${lang}-queries-packs.qls`;
|
||||||
|
if (lang === languages_1.Language.cpp) {
|
||||||
|
t.deepEqual(readContents(packSuiteName), qlsPackContentCpp);
|
||||||
|
}
|
||||||
|
else if (lang === languages_1.Language.java) {
|
||||||
|
t.deepEqual(readContents(packSuiteName), qlsPackContentJava);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
t.false(fs.existsSync(path.join(tmpDir, "codeql_databases", packSuiteName)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function readContents(name) {
|
||||||
|
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
|
||||||
|
console.log(x);
|
||||||
|
return yaml.load(fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=analyze.test.js.map
|
//# sourceMappingURL=analyze.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
37
lib/api-client.js
generated
37
lib/api-client.js
generated
@@ -1,36 +1,51 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getActionsApiClient = exports.getApiClient = exports.DisallowedAPIVersionReason = void 0;
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
|
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
// eslint-disable-next-line import/no-commonjs
|
||||||
|
const pkg = require("../package.json");
|
||||||
var DisallowedAPIVersionReason;
|
var DisallowedAPIVersionReason;
|
||||||
(function (DisallowedAPIVersionReason) {
|
(function (DisallowedAPIVersionReason) {
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||||
exports.getApiClient = function (apiDetails, { allowLocalRun = false, allowExternal = false } = {}) {
|
const getApiClient = function (apiDetails, { allowExternal = false } = {}) {
|
||||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
|
||||||
throw new Error("Invalid API call in local run");
|
|
||||||
}
|
|
||||||
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
||||||
return new githubUtils.GitHub(githubUtils.getOctokitOptions(auth, {
|
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||||
|
return new retryingOctokit(githubUtils.getOctokitOptions(auth, {
|
||||||
baseUrl: getApiUrl(apiDetails.url),
|
baseUrl: getApiUrl(apiDetails.url),
|
||||||
userAgent: "CodeQL Action",
|
userAgent: `CodeQL-${util_1.getMode()}/${pkg.version}`,
|
||||||
log: console_log_level_1.default({ level: "debug" }),
|
log: console_log_level_1.default({ level: "debug" }),
|
||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
exports.getApiClient = getApiClient;
|
||||||
function getApiUrl(githubUrl) {
|
function getApiUrl(githubUrl) {
|
||||||
const url = new URL(githubUrl);
|
const url = new URL(githubUrl);
|
||||||
// If we detect this is trying to connect to github.com
|
// If we detect this is trying to connect to github.com
|
||||||
@@ -45,12 +60,12 @@ function getApiUrl(githubUrl) {
|
|||||||
// Temporary function to aid in the transition to running on and off of github actions.
|
// Temporary function to aid in the transition to running on and off of github actions.
|
||||||
// Once all code has been converted this function should be removed or made canonical
|
// Once all code has been converted this function should be removed or made canonical
|
||||||
// and called only from the action entrypoints.
|
// and called only from the action entrypoints.
|
||||||
function getActionsApiClient(allowLocalRun = false) {
|
function getActionsApiClient() {
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actions_util_1.getRequiredInput("token"),
|
auth: actions_util_1.getRequiredInput("token"),
|
||||||
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
return exports.getApiClient(apiDetails, { allowLocalRun });
|
return exports.getApiClient(apiDetails);
|
||||||
}
|
}
|
||||||
exports.getActionsApiClient = getActionsApiClient;
|
exports.getActionsApiClient = getActionsApiClient;
|
||||||
//# sourceMappingURL=api-client.js.map
|
//# sourceMappingURL=api-client.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeY,QAAA,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAErD,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IAED,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAPD,kDAOC"}
|
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAkD;AAClD,iCAAsD;AAEtD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeM,MAAM,YAAY,GAAG,UAC1B,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,UAAU,cAAO,EAAE,IAAI,GAAG,CAAC,OAAO,EAAE;QAC/C,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAdW,QAAA,YAAY,gBAcvB;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB;IACjC,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,0BAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,CAAC,CAAC;AAClC,CAAC;AAPD,kDAOC"}
|
||||||
35
lib/api-client.test.js
generated
35
lib/api-client.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -12,13 +24,20 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
// eslint-disable-next-line import/no-commonjs
|
||||||
|
const pkg = require("../package.json");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
let pluginStub;
|
||||||
let githubStub;
|
let githubStub;
|
||||||
ava_1.default.beforeEach(() => {
|
ava_1.default.beforeEach(() => {
|
||||||
githubStub = sinon_1.default.stub(githubUtils, "GitHub");
|
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
||||||
|
githubStub = sinon.stub();
|
||||||
|
pluginStub.returns(githubStub);
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API", async (t) => {
|
ava_1.default("Get the client API", async (t) => {
|
||||||
doTest(t, {
|
doTest(t, {
|
||||||
@@ -28,7 +47,7 @@ ava_1.default("Get the client API", async (t) => {
|
|||||||
}, undefined, {
|
}, undefined, {
|
||||||
auth: "token xyz",
|
auth: "token xyz",
|
||||||
baseUrl: "http://hucairz/api/v3",
|
baseUrl: "http://hucairz/api/v3",
|
||||||
userAgent: "CodeQL Action",
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API external", async (t) => {
|
ava_1.default("Get the client API external", async (t) => {
|
||||||
@@ -39,7 +58,7 @@ ava_1.default("Get the client API external", async (t) => {
|
|||||||
}, { allowExternal: true }, {
|
}, { allowExternal: true }, {
|
||||||
auth: "token abc",
|
auth: "token abc",
|
||||||
baseUrl: "http://hucairz/api/v3",
|
baseUrl: "http://hucairz/api/v3",
|
||||||
userAgent: "CodeQL Action",
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API external not present", async (t) => {
|
ava_1.default("Get the client API external not present", async (t) => {
|
||||||
@@ -49,7 +68,7 @@ ava_1.default("Get the client API external not present", async (t) => {
|
|||||||
}, { allowExternal: true }, {
|
}, { allowExternal: true }, {
|
||||||
auth: "token xyz",
|
auth: "token xyz",
|
||||||
baseUrl: "http://hucairz/api/v3",
|
baseUrl: "http://hucairz/api/v3",
|
||||||
userAgent: "CodeQL Action",
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("Get the client API with github url", async (t) => {
|
ava_1.default("Get the client API with github url", async (t) => {
|
||||||
@@ -59,7 +78,7 @@ ava_1.default("Get the client API with github url", async (t) => {
|
|||||||
}, undefined, {
|
}, undefined, {
|
||||||
auth: "token xyz",
|
auth: "token xyz",
|
||||||
baseUrl: "https://api.github.com",
|
baseUrl: "https://api.github.com",
|
||||||
userAgent: "CodeQL Action",
|
userAgent: `CodeQL-Action/${pkg.version}`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
function doTest(t, clientArgs, clientOptions, expected) {
|
function doTest(t, clientArgs, clientOptions, expected) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,kDAA0B;AAE1B,6CAA4C;AAC5C,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,eAAK,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AACjD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,eAAe;KAC3B,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,yBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAA6C;AAC7C,6CAA+B;AAE/B,6CAA4C;AAC5C,mDAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAEvC,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oBAAoB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,gBAAgB,EAAE,KAAK;QACvB,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,gBAAgB;KACtB,EACD,EAAE,aAAa,EAAE,IAAI,EAAE,EACvB;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,uBAAuB;QAChC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oCAAoC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrD,MAAM,CACJ,CAAC,EACD;QACE,IAAI,EAAE,KAAK;QACX,GAAG,EAAE,qCAAqC;KAC3C,EACD,SAAS,EACT;QACE,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,wBAAwB;QACjC,SAAS,EAAE,iBAAiB,GAAG,CAAC,OAAO,EAAE;KAC1C,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,MAAM,CACb,CAA4B,EAC5B,UAAe,EACf,aAAkB,EAClB,QAAa;IAEb,yBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAExC,MAAM,aAAa,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACzC,iEAAiE;IACjE,OAAO,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC5B,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzC,CAAC"}
|
||||||
@@ -1 +1 @@
|
|||||||
{ "maximumVersion": "3.1", "minimumVersion": "2.22" }
|
{ "maximumVersion": "3.2", "minimumVersion": "2.22" }
|
||||||
|
|||||||
32
lib/autobuild-action.js
generated
32
lib/autobuild-action.js
generated
@@ -1,40 +1,54 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actions_util_1 = require("./actions-util");
|
||||||
const autobuild_1 = require("./autobuild");
|
const autobuild_1 = require("./autobuild");
|
||||||
const config_utils = __importStar(require("./config-utils"));
|
const config_utils = __importStar(require("./config-utils"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
// eslint-disable-next-line import/no-commonjs
|
||||||
|
const pkg = require("../package.json");
|
||||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||||
var _a, _b;
|
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
||||||
const status = failingLanguage !== undefined || cause !== undefined
|
const status = failingLanguage !== undefined || cause !== undefined
|
||||||
? "failure"
|
? "failure"
|
||||||
: "success";
|
: "success";
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
const statusReportBase = await actions_util_1.createStatusReportBase("autobuild", status, startedAt, cause === null || cause === void 0 ? void 0 : cause.message, cause === null || cause === void 0 ? void 0 : cause.stack);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
autobuild_languages: allLanguages.join(","),
|
autobuild_languages: allLanguages.join(","),
|
||||||
autobuild_failure: failingLanguage,
|
autobuild_failure: failingLanguage,
|
||||||
};
|
};
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
await actions_util_1.sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = logging_1.getActionsLogger();
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let language = undefined;
|
let language = undefined;
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
if (!(await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const config = await config_utils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
const config = await config_utils.getConfig(actions_util_1.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,qCAAsB,CACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,+BAAgB,CAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,+BAAgB,CACtB,MAAM,qCAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,oCAAqB,EAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
3
lib/autobuild.js
generated
3
lib/autobuild.js
generated
@@ -1,5 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.runAutobuild = exports.determineAutobuildLanguage = void 0;
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
function determineAutobuildLanguage(config, logger) {
|
function determineAutobuildLanguage(config, logger) {
|
||||||
@@ -24,7 +25,7 @@ function determineAutobuildLanguage(config, logger) {
|
|||||||
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
exports.determineAutobuildLanguage = determineAutobuildLanguage;
|
||||||
async function runAutobuild(language, config, logger) {
|
async function runAutobuild(language, config, logger) {
|
||||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||||
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
|
const codeQL = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
await codeQL.runAutobuild(language);
|
await codeQL.runAutobuild(language);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||||
378
lib/codeql.js
generated
378
lib/codeql.js
generated
@@ -1,25 +1,34 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getExtraOptions = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CommandInvocationError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const stream = __importStar(require("stream"));
|
|
||||||
const globalutil = __importStar(require("util"));
|
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const http = __importStar(require("@actions/http-client"));
|
|
||||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||||
const query_string_1 = __importDefault(require("query-string"));
|
const query_string_1 = __importDefault(require("query-string"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const uuid_1 = require("uuid");
|
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||||
@@ -27,6 +36,14 @@ const error_matcher_1 = require("./error-matcher");
|
|||||||
const toolcache = __importStar(require("./toolcache"));
|
const toolcache = __importStar(require("./toolcache"));
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
class CommandInvocationError extends Error {
|
||||||
|
constructor(cmd, args, exitCode, error) {
|
||||||
|
super(`Failure invoking ${cmd} with arguments ${args}.\n
|
||||||
|
Exit code ${exitCode} and error was:\n
|
||||||
|
${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.CommandInvocationError = CommandInvocationError;
|
||||||
/**
|
/**
|
||||||
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
||||||
* Can be overridden in tests using `setCodeQL`.
|
* Can be overridden in tests using `setCodeQL`.
|
||||||
@@ -34,6 +51,25 @@ const util = __importStar(require("./util"));
|
|||||||
let cachedCodeQL = undefined;
|
let cachedCodeQL = undefined;
|
||||||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
||||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||||
|
/**
|
||||||
|
* The oldest version of CodeQL that the Action will run with. This should be
|
||||||
|
* at least three minor versions behind the current version. The version flags
|
||||||
|
* below can be used to conditionally enable certain features on versions newer
|
||||||
|
* than this. Please record the reason we cannot support an older version.
|
||||||
|
*
|
||||||
|
* Reason: Changes to how the tracing environment is set up.
|
||||||
|
*/
|
||||||
|
const CODEQL_MINIMUM_VERSION = "2.3.1";
|
||||||
|
/**
|
||||||
|
* Versions of CodeQL that version-flag certain functionality in the Action.
|
||||||
|
* For convenience, please keep these in descending order. Once a version
|
||||||
|
* flag is older than the oldest supported version above, it may be removed.
|
||||||
|
*/
|
||||||
|
const CODEQL_VERSION_RAM_FINALIZE = "2.5.8";
|
||||||
|
const CODEQL_VERSION_DIAGNOSTICS = "2.5.6";
|
||||||
|
const CODEQL_VERSION_METRICS = "2.5.5";
|
||||||
|
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||||
|
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||||
function getCodeQLBundleName() {
|
function getCodeQLBundleName() {
|
||||||
let platform;
|
let platform;
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
@@ -50,14 +86,15 @@ function getCodeQLBundleName() {
|
|||||||
}
|
}
|
||||||
return `codeql-bundle-${platform}.tar.gz`;
|
return `codeql-bundle-${platform}.tar.gz`;
|
||||||
}
|
}
|
||||||
function getCodeQLActionRepository(mode, logger) {
|
function getCodeQLActionRepository(logger) {
|
||||||
if (mode !== "actions") {
|
if (!util.isActions()) {
|
||||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return getActionsCodeQLActionRepository(logger);
|
return getActionsCodeQLActionRepository(logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
exports.getCodeQLActionRepository = getCodeQLActionRepository;
|
||||||
function getActionsCodeQLActionRepository(logger) {
|
function getActionsCodeQLActionRepository(logger) {
|
||||||
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
|
||||||
return process.env["GITHUB_ACTION_REPOSITORY"];
|
return process.env["GITHUB_ACTION_REPOSITORY"];
|
||||||
@@ -74,8 +111,8 @@ function getActionsCodeQLActionRepository(logger) {
|
|||||||
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
|
||||||
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
|
||||||
}
|
}
|
||||||
async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
|
async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||||
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
|
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||||
const potentialDownloadSources = [
|
const potentialDownloadSources = [
|
||||||
// This GitHub instance, and this Action.
|
// This GitHub instance, and this Action.
|
||||||
[apiDetails.url, codeQLActionRepository],
|
[apiDetails.url, codeQLActionRepository],
|
||||||
@@ -141,22 +178,7 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
|
|||||||
}
|
}
|
||||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
|
||||||
}
|
}
|
||||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, checkVersion) {
|
||||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
|
||||||
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
|
|
||||||
const client = new http.HttpClient("CodeQL Action");
|
|
||||||
const dest = path.join(tempDir, uuid_1.v4());
|
|
||||||
const response = await client.get(url, headers);
|
|
||||||
if (response.message.statusCode !== 200) {
|
|
||||||
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
||||||
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
|
|
||||||
}
|
|
||||||
const pipeline = globalutil.promisify(stream.pipeline);
|
|
||||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
|
||||||
await pipeline(response.message, fs.createWriteStream(dest));
|
|
||||||
return dest;
|
|
||||||
}
|
|
||||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, mode, variant, logger) {
|
|
||||||
try {
|
try {
|
||||||
// We use the special value of 'latest' to prioritize the version in the
|
// We use the special value of 'latest' to prioritize the version in the
|
||||||
// defaults over any pinned cached version.
|
// defaults over any pinned cached version.
|
||||||
@@ -164,50 +186,58 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, mode, v
|
|||||||
if (forceLatest) {
|
if (forceLatest) {
|
||||||
codeqlURL = undefined;
|
codeqlURL = undefined;
|
||||||
}
|
}
|
||||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
let codeqlFolder;
|
||||||
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
let codeqlURLVersion;
|
||||||
// If we find the specified version, we always use that.
|
if (codeqlURL && !codeqlURL.startsWith("http")) {
|
||||||
let codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer, mode, toolCacheDir, logger);
|
codeqlFolder = await toolcache.extractTar(codeqlURL, tempDir, logger);
|
||||||
// If we don't find the requested version, in some cases we may allow a
|
codeqlURLVersion = "local";
|
||||||
// different version to save download time if the version hasn't been
|
|
||||||
// specified explicitly (in which case we always honor it).
|
|
||||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
|
||||||
const codeqlVersions = toolcache.findAllVersions("CodeQL", mode, toolCacheDir, logger);
|
|
||||||
if (codeqlVersions.length === 1) {
|
|
||||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], mode, toolCacheDir, logger);
|
|
||||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
|
||||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
|
||||||
codeqlFolder = tmpCodeqlFolder;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (codeqlFolder) {
|
|
||||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (!codeqlURL) {
|
codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||||
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger);
|
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
|
||||||
|
// If we find the specified version, we always use that.
|
||||||
|
codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer, toolCacheDir, logger);
|
||||||
|
// If we don't find the requested version, in some cases we may allow a
|
||||||
|
// different version to save download time if the version hasn't been
|
||||||
|
// specified explicitly (in which case we always honor it).
|
||||||
|
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
||||||
|
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
|
||||||
|
if (codeqlVersions.length === 1) {
|
||||||
|
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
|
||||||
|
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||||
|
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||||
|
codeqlFolder = tmpCodeqlFolder;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const parsedCodeQLURL = new URL(codeqlURL);
|
if (codeqlFolder) {
|
||||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||||
const headers = { accept: "application/octet-stream" };
|
|
||||||
// We only want to provide an authorization header if we are downloading
|
|
||||||
// from the same GitHub instance the Action is running on.
|
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
|
||||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
|
||||||
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
|
||||||
parsedQueryString["token"] === undefined) {
|
|
||||||
logger.debug("Downloading CodeQL bundle with token.");
|
|
||||||
headers.authorization = `token ${apiDetails.auth}`;
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Downloading CodeQL bundle without token.");
|
if (!codeqlURL) {
|
||||||
|
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, variant, logger);
|
||||||
|
}
|
||||||
|
const parsedCodeQLURL = new URL(codeqlURL);
|
||||||
|
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||||
|
const headers = { accept: "application/octet-stream" };
|
||||||
|
// We only want to provide an authorization header if we are downloading
|
||||||
|
// from the same GitHub instance the Action is running on.
|
||||||
|
// This avoids leaking Enterprise tokens to dotcom.
|
||||||
|
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||||
|
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
|
||||||
|
parsedQueryString["token"] === undefined) {
|
||||||
|
logger.debug("Downloading CodeQL bundle with token.");
|
||||||
|
headers.authorization = `token ${apiDetails.auth}`;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.debug("Downloading CodeQL bundle without token.");
|
||||||
|
}
|
||||||
|
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
||||||
|
const codeqlPath = await toolcache.downloadTool(codeqlURL, tempDir, headers);
|
||||||
|
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||||
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath, tempDir, logger);
|
||||||
|
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer, toolCacheDir, logger);
|
||||||
}
|
}
|
||||||
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
|
|
||||||
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
|
|
||||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
|
||||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath, mode, tempDir, logger);
|
|
||||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer, mode, toolCacheDir, logger);
|
|
||||||
}
|
}
|
||||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
@@ -216,7 +246,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, mode, v
|
|||||||
else if (process.platform !== "linux" && process.platform !== "darwin") {
|
else if (process.platform !== "linux" && process.platform !== "darwin") {
|
||||||
throw new Error(`Unsupported platform: ${process.platform}`);
|
throw new Error(`Unsupported platform: ${process.platform}`);
|
||||||
}
|
}
|
||||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
cachedCodeQL = await getCodeQLForCmd(codeqlCmd, checkVersion);
|
||||||
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
@@ -248,9 +278,9 @@ exports.convertToSemVer = convertToSemVer;
|
|||||||
/**
|
/**
|
||||||
* Use the CodeQL executable located at the given path.
|
* Use the CodeQL executable located at the given path.
|
||||||
*/
|
*/
|
||||||
function getCodeQL(cmd) {
|
async function getCodeQL(cmd) {
|
||||||
if (cachedCodeQL === undefined) {
|
if (cachedCodeQL === undefined) {
|
||||||
cachedCodeQL = getCodeQLForCmd(cmd);
|
cachedCodeQL = await getCodeQLForCmd(cmd, true);
|
||||||
}
|
}
|
||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
@@ -276,14 +306,20 @@ function resolveFunction(partialCodeql, methodName, defaultImplementation) {
|
|||||||
function setCodeQL(partialCodeql) {
|
function setCodeQL(partialCodeql) {
|
||||||
cachedCodeQL = {
|
cachedCodeQL = {
|
||||||
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
||||||
|
getVersion: resolveFunction(partialCodeql, "getVersion", () => new Promise((resolve) => resolve("1.0.0"))),
|
||||||
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
||||||
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
|
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
|
||||||
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
|
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
|
||||||
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
||||||
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
||||||
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
|
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
|
||||||
|
resolveLanguages: resolveFunction(partialCodeql, "resolveLanguages"),
|
||||||
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
|
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
|
||||||
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"),
|
packDownload: resolveFunction(partialCodeql, "packDownload"),
|
||||||
|
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
|
||||||
|
databaseBundle: resolveFunction(partialCodeql, "databaseBundle"),
|
||||||
|
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||||
|
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||||
};
|
};
|
||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
@@ -302,13 +338,19 @@ function getCachedCodeQL() {
|
|||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
exports.getCachedCodeQL = getCachedCodeQL;
|
exports.getCachedCodeQL = getCachedCodeQL;
|
||||||
function getCodeQLForCmd(cmd) {
|
async function getCodeQLForCmd(cmd, checkVersion) {
|
||||||
return {
|
let cachedVersion = undefined;
|
||||||
|
const codeql = {
|
||||||
getPath() {
|
getPath() {
|
||||||
return cmd;
|
return cmd;
|
||||||
},
|
},
|
||||||
|
async getVersion() {
|
||||||
|
if (cachedVersion === undefined)
|
||||||
|
cachedVersion = runTool(cmd, ["version", "--format=terse"]);
|
||||||
|
return await cachedVersion;
|
||||||
|
},
|
||||||
async printVersion() {
|
async printVersion() {
|
||||||
await new toolrunner.ToolRunner(cmd, ["version", "--format=json"]).exec();
|
await runTool(cmd, ["version", "--format=json"]);
|
||||||
},
|
},
|
||||||
async getTracerEnv(databasePath) {
|
async getTracerEnv(databasePath) {
|
||||||
// Write tracer-env.js to a temp location.
|
// Write tracer-env.js to a temp location.
|
||||||
@@ -339,7 +381,7 @@ function getCodeQLForCmd(cmd) {
|
|||||||
// action/runner has been implemented in `codeql database trace-command`
|
// action/runner has been implemented in `codeql database trace-command`
|
||||||
// _and_ is present in the latest supported CLI release.)
|
// _and_ is present in the latest supported CLI release.)
|
||||||
const envFile = path.resolve(databasePath, "working", "env.tmp");
|
const envFile = path.resolve(databasePath, "working", "env.tmp");
|
||||||
await new toolrunner.ToolRunner(cmd, [
|
await runTool(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"trace-command",
|
"trace-command",
|
||||||
databasePath,
|
databasePath,
|
||||||
@@ -347,18 +389,18 @@ function getCodeQLForCmd(cmd) {
|
|||||||
process.execPath,
|
process.execPath,
|
||||||
tracerEnvJs,
|
tracerEnvJs,
|
||||||
envFile,
|
envFile,
|
||||||
]).exec();
|
]);
|
||||||
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
|
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
|
||||||
},
|
},
|
||||||
async databaseInit(databasePath, language, sourceRoot) {
|
async databaseInit(databasePath, language, sourceRoot) {
|
||||||
await new toolrunner.ToolRunner(cmd, [
|
await runTool(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"init",
|
"init",
|
||||||
databasePath,
|
databasePath,
|
||||||
`--language=${language}`,
|
`--language=${language}`,
|
||||||
`--source-root=${sourceRoot}`,
|
`--source-root=${sourceRoot}`,
|
||||||
...getExtraOptionsFromEnv(["database", "init"]),
|
...getExtraOptionsFromEnv(["database", "init"]),
|
||||||
]).exec();
|
]);
|
||||||
},
|
},
|
||||||
async runAutobuild(language) {
|
async runAutobuild(language) {
|
||||||
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
|
||||||
@@ -374,7 +416,7 @@ function getCodeQLForCmd(cmd) {
|
|||||||
"-Dhttp.keepAlive=false",
|
"-Dhttp.keepAlive=false",
|
||||||
"-Dmaven.wagon.http.pool=false",
|
"-Dmaven.wagon.http.pool=false",
|
||||||
].join(" ");
|
].join(" ");
|
||||||
await new toolrunner.ToolRunner(autobuildCmd).exec();
|
await runTool(autobuildCmd);
|
||||||
},
|
},
|
||||||
async extractScannedLanguage(databasePath, language) {
|
async extractScannedLanguage(databasePath, language) {
|
||||||
// Get extractor location
|
// Get extractor location
|
||||||
@@ -409,14 +451,28 @@ function getCodeQLForCmd(cmd) {
|
|||||||
traceCommand,
|
traceCommand,
|
||||||
], error_matcher_1.errorMatchers);
|
], error_matcher_1.errorMatchers);
|
||||||
},
|
},
|
||||||
async finalizeDatabase(databasePath, threadsFlag) {
|
async finalizeDatabase(databasePath, threadsFlag, memoryFlag) {
|
||||||
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
|
const args = [
|
||||||
"database",
|
"database",
|
||||||
"finalize",
|
"finalize",
|
||||||
|
"--finalize-dataset",
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
...getExtraOptionsFromEnv(["database", "finalize"]),
|
...getExtraOptionsFromEnv(["database", "finalize"]),
|
||||||
databasePath,
|
databasePath,
|
||||||
], error_matcher_1.errorMatchers);
|
];
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_RAM_FINALIZE))
|
||||||
|
args.push(memoryFlag);
|
||||||
|
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, args, error_matcher_1.errorMatchers);
|
||||||
|
},
|
||||||
|
async resolveLanguages() {
|
||||||
|
const codeqlArgs = ["resolve", "languages", "--format=json"];
|
||||||
|
const output = await runTool(cmd, codeqlArgs);
|
||||||
|
try {
|
||||||
|
return JSON.parse(output);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
throw new Error(`Unexpected output from codeql resolve languages: ${e}`);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
async resolveQueries(queries, extraSearchPath) {
|
async resolveQueries(queries, extraSearchPath) {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
@@ -429,52 +485,119 @@ function getCodeQLForCmd(cmd) {
|
|||||||
if (extraSearchPath !== undefined) {
|
if (extraSearchPath !== undefined) {
|
||||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||||
}
|
}
|
||||||
let output = "";
|
const output = await runTool(cmd, codeqlArgs);
|
||||||
await new toolrunner.ToolRunner(cmd, codeqlArgs, {
|
try {
|
||||||
listeners: {
|
return JSON.parse(output);
|
||||||
stdout: (data) => {
|
}
|
||||||
output += data.toString();
|
catch (e) {
|
||||||
},
|
throw new Error(`Unexpected output from codeql resolve queries: ${e}`);
|
||||||
},
|
}
|
||||||
}).exec();
|
|
||||||
return JSON.parse(output);
|
|
||||||
},
|
},
|
||||||
async databaseAnalyze(databasePath, sarifFile, extraSearchPath, querySuite, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, memoryFlag, threadsFlag) {
|
||||||
const args = [
|
const codeqlArgs = [
|
||||||
"database",
|
"database",
|
||||||
"analyze",
|
"run-queries",
|
||||||
memoryFlag,
|
memoryFlag,
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
databasePath,
|
databasePath,
|
||||||
"--min-disk-free=1024",
|
"--min-disk-free=1024",
|
||||||
"--format=sarif-latest",
|
|
||||||
"--sarif-multicause-markdown",
|
|
||||||
`--output=${sarifFile}`,
|
|
||||||
addSnippetsFlag,
|
|
||||||
// Enable progress verbosity so we log each query as it's interpreted. This aids debugging
|
|
||||||
// when interpretation takes a while for one of the queries being analyzed.
|
|
||||||
"-v",
|
"-v",
|
||||||
...getExtraOptionsFromEnv(["database", "analyze"]),
|
...getExtraOptionsFromEnv(["database", "run-queries"]),
|
||||||
];
|
];
|
||||||
if (extraSearchPath !== undefined) {
|
if (extraSearchPath !== undefined) {
|
||||||
args.push("--additional-packs", extraSearchPath);
|
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||||
}
|
}
|
||||||
if (automationDetailsId !== undefined) {
|
codeqlArgs.push(querySuitePath);
|
||||||
args.push("--sarif-category", automationDetailsId);
|
await runTool(cmd, codeqlArgs);
|
||||||
|
},
|
||||||
|
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
||||||
|
const codeqlArgs = [
|
||||||
|
"database",
|
||||||
|
"interpret-results",
|
||||||
|
threadsFlag,
|
||||||
|
"--format=sarif-latest",
|
||||||
|
"-v",
|
||||||
|
`--output=${sarifFile}`,
|
||||||
|
addSnippetsFlag,
|
||||||
|
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||||
|
];
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_DIAGNOSTICS))
|
||||||
|
codeqlArgs.push("--print-diagnostics-summary");
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_METRICS))
|
||||||
|
codeqlArgs.push("--print-metrics-summary");
|
||||||
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
|
||||||
|
codeqlArgs.push("--sarif-group-rules-by-pack");
|
||||||
|
if (automationDetailsId !== undefined &&
|
||||||
|
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
|
||||||
|
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||||
}
|
}
|
||||||
args.push(querySuite);
|
codeqlArgs.push(databasePath, ...querySuitePaths);
|
||||||
// capture stdout, which contains analysis summaries
|
// capture stdout, which contains analysis summaries
|
||||||
let output = "";
|
return await runTool(cmd, codeqlArgs);
|
||||||
await new toolrunner.ToolRunner(cmd, args, {
|
},
|
||||||
listeners: {
|
/**
|
||||||
stdout: (data) => {
|
* Download specified packs into the package cache. If the specified
|
||||||
output += data.toString("utf8");
|
* package and version already exists (e.g., from a previous analysis run),
|
||||||
},
|
* then it is not downloaded again (unless the extra option `--force` is
|
||||||
},
|
* specified).
|
||||||
}).exec();
|
*
|
||||||
return output;
|
* If no version is specified, then the latest version is
|
||||||
|
* downloaded. The check to determine what the latest version is is done
|
||||||
|
* each time this package is requested.
|
||||||
|
*/
|
||||||
|
async packDownload(packs) {
|
||||||
|
const codeqlArgs = [
|
||||||
|
"pack",
|
||||||
|
"download",
|
||||||
|
"--format=json",
|
||||||
|
...getExtraOptionsFromEnv(["pack", "download"]),
|
||||||
|
...packs.map(packWithVersionToString),
|
||||||
|
];
|
||||||
|
const output = await runTool(cmd, codeqlArgs);
|
||||||
|
try {
|
||||||
|
const parsedOutput = JSON.parse(output);
|
||||||
|
if (Array.isArray(parsedOutput.packs) &&
|
||||||
|
// TODO PackDownloadOutput will not include the version if it is not specified
|
||||||
|
// in the input. The version is always the latest version available.
|
||||||
|
// It should be added to the output, but this requires a CLI change
|
||||||
|
parsedOutput.packs.every((p) => p.name /* && p.version */)) {
|
||||||
|
return parsedOutput;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error("Unexpected output from pack download");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
throw new Error(`Attempted to download specified packs but got an error:\n${output}\n${e}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
async databaseCleanup(databasePath, cleanupLevel) {
|
||||||
|
const codeqlArgs = [
|
||||||
|
"database",
|
||||||
|
"cleanup",
|
||||||
|
databasePath,
|
||||||
|
`--mode=${cleanupLevel}`,
|
||||||
|
];
|
||||||
|
await runTool(cmd, codeqlArgs);
|
||||||
|
},
|
||||||
|
async databaseBundle(databasePath, outputFilePath) {
|
||||||
|
const args = [
|
||||||
|
"database",
|
||||||
|
"bundle",
|
||||||
|
databasePath,
|
||||||
|
`--output=${outputFilePath}`,
|
||||||
|
];
|
||||||
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
if (checkVersion &&
|
||||||
|
!(await util.codeQlVersionAbove(codeql, CODEQL_MINIMUM_VERSION))) {
|
||||||
|
throw new Error(`Expected a CodeQL CLI with version at least ${CODEQL_MINIMUM_VERSION} but got version ${await codeql.getVersion()}`);
|
||||||
|
}
|
||||||
|
return codeql;
|
||||||
|
}
|
||||||
|
function packWithVersionToString(pack) {
|
||||||
|
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||||
@@ -514,12 +637,39 @@ function asExtraOptions(options, pathInfo) {
|
|||||||
* Exported for testing.
|
* Exported for testing.
|
||||||
*/
|
*/
|
||||||
function getExtraOptions(options, paths, pathInfo) {
|
function getExtraOptions(options, paths, pathInfo) {
|
||||||
var _a, _b, _c;
|
const all = asExtraOptions(options === null || options === void 0 ? void 0 : options["*"], pathInfo.concat("*"));
|
||||||
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
|
|
||||||
const specific = paths.length === 0
|
const specific = paths.length === 0
|
||||||
? asExtraOptions(options, pathInfo)
|
? asExtraOptions(options, pathInfo)
|
||||||
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[paths[0]], (_c = paths) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(paths[0]));
|
: getExtraOptions(options === null || options === void 0 ? void 0 : options[paths[0]], paths === null || paths === void 0 ? void 0 : paths.slice(1), pathInfo.concat(paths[0]));
|
||||||
return all.concat(specific);
|
return all.concat(specific);
|
||||||
}
|
}
|
||||||
exports.getExtraOptions = getExtraOptions;
|
exports.getExtraOptions = getExtraOptions;
|
||||||
|
/*
|
||||||
|
* A constant defining the maximum number of characters we will keep from
|
||||||
|
* the programs stderr for logging. This serves two purposes:
|
||||||
|
* (1) It avoids an OOM if a program fails in a way that results it
|
||||||
|
* printing many log lines.
|
||||||
|
* (2) It avoids us hitting the limit of how much data we can send in our
|
||||||
|
* status reports on GitHub.com.
|
||||||
|
*/
|
||||||
|
const maxErrorSize = 20000;
|
||||||
|
async function runTool(cmd, args = []) {
|
||||||
|
let output = "";
|
||||||
|
let error = "";
|
||||||
|
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => {
|
||||||
|
output += data.toString();
|
||||||
|
},
|
||||||
|
stderr: (data) => {
|
||||||
|
const toRead = Math.min(maxErrorSize - error.length, data.length);
|
||||||
|
error += data.toString("utf8", 0, toRead);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
}).exec();
|
||||||
|
if (exitCode !== 0)
|
||||||
|
throw new CommandInvocationError(cmd, args, exitCode, error);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
//# sourceMappingURL=codeql.js.map
|
//# sourceMappingURL=codeql.js.map
|
||||||
File diff suppressed because one or more lines are too long
55
lib/codeql.test.js
generated
55
lib/codeql.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -19,6 +31,7 @@ const defaults = __importStar(require("./defaults.json"));
|
|||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
const util_1 = require("./util");
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
const sampleApiDetails = {
|
const sampleApiDetails = {
|
||||||
auth: "token",
|
auth: "token",
|
||||||
@@ -28,6 +41,9 @@ const sampleGHAEApiDetails = {
|
|||||||
auth: "token",
|
auth: "token",
|
||||||
url: "https://example.githubenterprise.com",
|
url: "https://example.githubenterprise.com",
|
||||||
};
|
};
|
||||||
|
ava_1.default.beforeEach(() => {
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
||||||
|
});
|
||||||
ava_1.default("download codeql bundle cache", async (t) => {
|
ava_1.default("download codeql bundle cache", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
@@ -37,7 +53,7 @@ ava_1.default("download codeql bundle cache", async (t) => {
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
}
|
}
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
@@ -50,12 +66,12 @@ ava_1.default("download codeql bundle cache explicitly requested with pinned dif
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -65,9 +81,9 @@ ava_1.default("don't download codeql bundle cache with pinned different version
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
@@ -78,7 +94,7 @@ ava_1.default("download codeql bundle cache with different version cached (not p
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -88,7 +104,7 @@ ava_1.default("download codeql bundle cache with different version cached (not p
|
|||||||
nock_1.default("https://github.com")
|
nock_1.default("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
@@ -99,7 +115,7 @@ ava_1.default('download codeql bundle cache with pinned different version cached
|
|||||||
nock_1.default("https://example.com")
|
nock_1.default("https://example.com")
|
||||||
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
@@ -109,7 +125,7 @@ ava_1.default('download codeql bundle cache with pinned different version cached
|
|||||||
nock_1.default("https://github.com")
|
nock_1.default("https://github.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||||
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, tmpDir, util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
@@ -137,7 +153,7 @@ ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
|
|||||||
nock_1.default("https://example.githubenterprise.com")
|
nock_1.default("https://example.githubenterprise.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, "runner", util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true));
|
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, tmpDir, util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true), false);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
@@ -186,4 +202,19 @@ ava_1.default("getExtraOptions throws for bad content", (t) => {
|
|||||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
|
||||||
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
|
||||||
});
|
});
|
||||||
|
ava_1.default("getCodeQLActionRepository", (t) => {
|
||||||
|
const logger = logging_1.getRunnerLogger(true);
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.runner, "1.2.3");
|
||||||
|
const repoActions = codeql.getCodeQLActionRepository(logger);
|
||||||
|
t.deepEqual(repoActions, "github/codeql-action");
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
||||||
|
// isRunningLocalAction() === true
|
||||||
|
delete process.env["GITHUB_ACTION_REPOSITORY"];
|
||||||
|
process.env["RUNNER_TEMP"] = path.dirname(__dirname);
|
||||||
|
const repoLocalRunner = codeql.getCodeQLActionRepository(logger);
|
||||||
|
t.deepEqual(repoLocalRunner, "github/codeql-action");
|
||||||
|
process.env["GITHUB_ACTION_REPOSITORY"] = "xxx/yyy";
|
||||||
|
const repoEnv = codeql.getCodeQLActionRepository(logger);
|
||||||
|
t.deepEqual(repoEnv, "xxx/yyy");
|
||||||
|
});
|
||||||
//# sourceMappingURL=codeql.test.js.map
|
//# sourceMappingURL=codeql.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
233
lib/config-utils.js
generated
233
lib/config-utils.js
generated
@@ -1,15 +1,29 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const semver = __importStar(require("semver"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const externalQueries = __importStar(require("./external-queries"));
|
const externalQueries = __importStar(require("./external-queries"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
@@ -20,6 +34,7 @@ const QUERIES_PROPERTY = "queries";
|
|||||||
const QUERIES_USES_PROPERTY = "uses";
|
const QUERIES_USES_PROPERTY = "uses";
|
||||||
const PATHS_IGNORE_PROPERTY = "paths-ignore";
|
const PATHS_IGNORE_PROPERTY = "paths-ignore";
|
||||||
const PATHS_PROPERTY = "paths";
|
const PATHS_PROPERTY = "paths";
|
||||||
|
const PACKS_PROPERTY = "packs";
|
||||||
/**
|
/**
|
||||||
* A list of queries from https://github.com/github/codeql that
|
* A list of queries from https://github.com/github/codeql that
|
||||||
* we don't want to run. Disabling them here is a quicker alternative to
|
* we don't want to run. Disabling them here is a quicker alternative to
|
||||||
@@ -112,10 +127,10 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, c
|
|||||||
/**
|
/**
|
||||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||||
*/
|
*/
|
||||||
async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath, configFile) {
|
async function addLocalQueries(codeQL, resultMap, localQueryPath, workspacePath, configFile) {
|
||||||
// Resolve the local path against the workspace so that when this is
|
// Resolve the local path against the workspace so that when this is
|
||||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||||
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
|
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||||
// Check the file exists
|
// Check the file exists
|
||||||
if (!fs.existsSync(absoluteQueryPath)) {
|
if (!fs.existsSync(absoluteQueryPath)) {
|
||||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||||
@@ -123,10 +138,11 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
|
|||||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||||
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
|
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(workspacePath) + path.sep)) {
|
||||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||||
}
|
}
|
||||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath);
|
const extraSearchPath = workspacePath;
|
||||||
|
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], extraSearchPath);
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||||
@@ -164,14 +180,14 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
|||||||
* local paths starting with './', or references to remote repos, or
|
* local paths starting with './', or references to remote repos, or
|
||||||
* a finite set of hardcoded terms for builtin suites.
|
* a finite set of hardcoded terms for builtin suites.
|
||||||
*/
|
*/
|
||||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, apiDetails, logger, configFile) {
|
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
|
||||||
queryUses = queryUses.trim();
|
queryUses = queryUses.trim();
|
||||||
if (queryUses === "") {
|
if (queryUses === "") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
}
|
}
|
||||||
// Check for the local path case before we start trying to parse the repository name
|
// Check for the local path case before we start trying to parse the repository name
|
||||||
if (queryUses.startsWith("./")) {
|
if (queryUses.startsWith("./")) {
|
||||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
|
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Check for one of the builtin suites
|
// Check for one of the builtin suites
|
||||||
@@ -254,6 +270,24 @@ function getPathsInvalid(configFile) {
|
|||||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings");
|
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings");
|
||||||
}
|
}
|
||||||
exports.getPathsInvalid = getPathsInvalid;
|
exports.getPathsInvalid = getPathsInvalid;
|
||||||
|
function getPacksRequireLanguage(lang, configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, `has "${lang}", but it is not one of the languages to analyze`);
|
||||||
|
}
|
||||||
|
exports.getPacksRequireLanguage = getPacksRequireLanguage;
|
||||||
|
function getPacksInvalidSplit(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must split packages by language");
|
||||||
|
}
|
||||||
|
exports.getPacksInvalidSplit = getPacksInvalidSplit;
|
||||||
|
function getPacksInvalid(configFile) {
|
||||||
|
return getConfigFilePropertyError(configFile, PACKS_PROPERTY, "must be an array of non-empty strings");
|
||||||
|
}
|
||||||
|
exports.getPacksInvalid = getPacksInvalid;
|
||||||
|
function getPacksStrInvalid(packStr, configFile) {
|
||||||
|
return configFile
|
||||||
|
? getConfigFilePropertyError(configFile, PACKS_PROPERTY, `"${packStr}" is not a valid pack`)
|
||||||
|
: `"${packStr}" is not a valid pack`;
|
||||||
|
}
|
||||||
|
exports.getPacksStrInvalid = getPacksStrInvalid;
|
||||||
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
function getLocalPathOutsideOfRepository(configFile, localPath) {
|
||||||
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`);
|
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`);
|
||||||
}
|
}
|
||||||
@@ -306,9 +340,7 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
|||||||
*/
|
*/
|
||||||
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
async function getLanguagesInRepo(repository, apiDetails, logger) {
|
||||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||||
const response = await api
|
const response = await api.getApiClient(apiDetails).repos.listLanguages({
|
||||||
.getApiClient(apiDetails, { allowLocalRun: true })
|
|
||||||
.repos.listLanguages({
|
|
||||||
owner: repository.owner,
|
owner: repository.owner,
|
||||||
repo: repository.repo,
|
repo: repository.repo,
|
||||||
});
|
});
|
||||||
@@ -336,7 +368,7 @@ async function getLanguagesInRepo(repository, apiDetails, logger) {
|
|||||||
* If no languages could be detected from either the workflow or the repository
|
* If no languages could be detected from either the workflow or the repository
|
||||||
* then throw an error.
|
* then throw an error.
|
||||||
*/
|
*/
|
||||||
async function getLanguages(languagesInput, repository, apiDetails, logger) {
|
async function getLanguages(codeQL, languagesInput, repository, apiDetails, logger) {
|
||||||
// Obtain from action input 'languages' if set
|
// Obtain from action input 'languages' if set
|
||||||
let languages = (languagesInput || "")
|
let languages = (languagesInput || "")
|
||||||
.split(",")
|
.split(",")
|
||||||
@@ -346,6 +378,8 @@ async function getLanguages(languagesInput, repository, apiDetails, logger) {
|
|||||||
if (languages.length === 0) {
|
if (languages.length === 0) {
|
||||||
// Obtain languages as all languages in the repo that can be analysed
|
// Obtain languages as all languages in the repo that can be analysed
|
||||||
languages = await getLanguagesInRepo(repository, apiDetails, logger);
|
languages = await getLanguagesInRepo(repository, apiDetails, logger);
|
||||||
|
const availableLanguages = await codeQL.resolveLanguages();
|
||||||
|
languages = languages.filter((value) => value in availableLanguages);
|
||||||
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
|
||||||
}
|
}
|
||||||
// If the languages parameter was not given and no languages were
|
// If the languages parameter was not given and no languages were
|
||||||
@@ -370,12 +404,12 @@ async function getLanguages(languagesInput, repository, apiDetails, logger) {
|
|||||||
}
|
}
|
||||||
return parsedLanguages;
|
return parsedLanguages;
|
||||||
}
|
}
|
||||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, apiDetails, logger) {
|
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
|
||||||
queriesInput = queriesInput.trim();
|
queriesInput = queriesInput.trim();
|
||||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||||
queriesInput = queriesInput.replace(/^\+/, "");
|
queriesInput = queriesInput.replace(/^\+/, "");
|
||||||
for (const query of queriesInput.split(",")) {
|
for (const query of queriesInput.split(",")) {
|
||||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, apiDetails, logger);
|
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Returns true if either no queries were provided in the workflow.
|
// Returns true if either no queries were provided in the workflow.
|
||||||
@@ -391,8 +425,9 @@ function shouldAddConfigFileQueries(queriesInput) {
|
|||||||
/**
|
/**
|
||||||
* Get the default config for when the user has not supplied one.
|
* Get the default config for when the user has not supplied one.
|
||||||
*/
|
*/
|
||||||
async function getDefaultConfig(languagesInput, queriesInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||||
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
|
var _a;
|
||||||
|
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||||
const queries = {};
|
const queries = {};
|
||||||
for (const language of languages) {
|
for (const language of languages) {
|
||||||
queries[language] = {
|
queries[language] = {
|
||||||
@@ -402,13 +437,15 @@ async function getDefaultConfig(languagesInput, queriesInput, dbLocation, reposi
|
|||||||
}
|
}
|
||||||
await addDefaultQueries(codeQL, languages, queries);
|
await addDefaultQueries(codeQL, languages, queries);
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
|
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
queries,
|
queries,
|
||||||
pathsIgnore: [],
|
pathsIgnore: [],
|
||||||
paths: [],
|
paths: [],
|
||||||
|
packs,
|
||||||
originalUserInput: {},
|
originalUserInput: {},
|
||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
@@ -421,12 +458,13 @@ exports.getDefaultConfig = getDefaultConfig;
|
|||||||
/**
|
/**
|
||||||
* Load the config from the given file.
|
* Load the config from the given file.
|
||||||
*/
|
*/
|
||||||
async function loadConfig(languagesInput, queriesInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||||
|
var _a;
|
||||||
let parsedYAML;
|
let parsedYAML;
|
||||||
if (isLocal(configFile)) {
|
if (isLocal(configFile)) {
|
||||||
// Treat the config file as relative to the workspace
|
// Treat the config file as relative to the workspace
|
||||||
configFile = path.resolve(checkoutPath, configFile);
|
configFile = path.resolve(workspacePath, configFile);
|
||||||
parsedYAML = getLocalConfig(configFile, checkoutPath);
|
parsedYAML = getLocalConfig(configFile, workspacePath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
parsedYAML = await getRemoteConfig(configFile, apiDetails);
|
parsedYAML = await getRemoteConfig(configFile, apiDetails);
|
||||||
@@ -441,7 +479,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, dbLocation,
|
|||||||
throw new Error(getNameInvalid(configFile));
|
throw new Error(getNameInvalid(configFile));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
|
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||||
const queries = {};
|
const queries = {};
|
||||||
for (const language of languages) {
|
for (const language of languages) {
|
||||||
queries[language] = {
|
queries[language] = {
|
||||||
@@ -466,23 +504,24 @@ async function loadConfig(languagesInput, queriesInput, configFile, dbLocation,
|
|||||||
// unless they're prefixed with "+", in which case they supplement those
|
// unless they're prefixed with "+", in which case they supplement those
|
||||||
// in the config file.
|
// in the config file.
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
|
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
|
||||||
}
|
}
|
||||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||||
QUERIES_PROPERTY in parsedYAML) {
|
QUERIES_PROPERTY in parsedYAML) {
|
||||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
const queriesArr = parsedYAML[QUERIES_PROPERTY];
|
||||||
|
if (!Array.isArray(queriesArr)) {
|
||||||
throw new Error(getQueriesInvalid(configFile));
|
throw new Error(getQueriesInvalid(configFile));
|
||||||
}
|
}
|
||||||
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
for (const query of queriesArr) {
|
||||||
if (!(QUERIES_USES_PROPERTY in query) ||
|
if (!(QUERIES_USES_PROPERTY in query) ||
|
||||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
}
|
}
|
||||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails, logger, configFile);
|
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
if (!Array.isArray(parsedYAML[PATHS_IGNORE_PROPERTY])) {
|
||||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||||
}
|
}
|
||||||
for (const ignorePath of parsedYAML[PATHS_IGNORE_PROPERTY]) {
|
for (const ignorePath of parsedYAML[PATHS_IGNORE_PROPERTY]) {
|
||||||
@@ -493,7 +532,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, dbLocation,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (PATHS_PROPERTY in parsedYAML) {
|
if (PATHS_PROPERTY in parsedYAML) {
|
||||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
if (!Array.isArray(parsedYAML[PATHS_PROPERTY])) {
|
||||||
throw new Error(getPathsInvalid(configFile));
|
throw new Error(getPathsInvalid(configFile));
|
||||||
}
|
}
|
||||||
for (const includePath of parsedYAML[PATHS_PROPERTY]) {
|
for (const includePath of parsedYAML[PATHS_PROPERTY]) {
|
||||||
@@ -503,11 +542,13 @@ async function loadConfig(languagesInput, queriesInput, configFile, dbLocation,
|
|||||||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
queries,
|
queries,
|
||||||
pathsIgnore,
|
pathsIgnore,
|
||||||
paths,
|
paths,
|
||||||
|
packs,
|
||||||
originalUserInput: parsedYAML,
|
originalUserInput: parsedYAML,
|
||||||
tempDir,
|
tempDir,
|
||||||
toolCacheDir,
|
toolCacheDir,
|
||||||
@@ -516,6 +557,120 @@ async function loadConfig(languagesInput, queriesInput, configFile, dbLocation,
|
|||||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Pack names must be in the form of `scope/name`, with only alpha-numeric characters,
|
||||||
|
* and `-` allowed as long as not the first or last char.
|
||||||
|
**/
|
||||||
|
const PACK_IDENTIFIER_PATTERN = (function () {
|
||||||
|
const alphaNumeric = "[a-z0-9]";
|
||||||
|
const alphaNumericDash = "[a-z0-9-]";
|
||||||
|
const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`;
|
||||||
|
return new RegExp(`^${component}/${component}$`);
|
||||||
|
})();
|
||||||
|
// Exported for testing
|
||||||
|
function parsePacksFromConfig(packsByLanguage, languages, configFile) {
|
||||||
|
const packs = {};
|
||||||
|
if (Array.isArray(packsByLanguage)) {
|
||||||
|
if (languages.length === 1) {
|
||||||
|
// single language analysis, so language is implicit
|
||||||
|
packsByLanguage = {
|
||||||
|
[languages[0]]: packsByLanguage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// this is an error since multi-language analysis requires
|
||||||
|
// packs split by language
|
||||||
|
throw new Error(getPacksInvalidSplit(configFile));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const [lang, packsArr] of Object.entries(packsByLanguage)) {
|
||||||
|
if (!Array.isArray(packsArr)) {
|
||||||
|
throw new Error(getPacksInvalid(configFile));
|
||||||
|
}
|
||||||
|
if (!languages.includes(lang)) {
|
||||||
|
throw new Error(getPacksRequireLanguage(lang, configFile));
|
||||||
|
}
|
||||||
|
packs[lang] = [];
|
||||||
|
for (const packStr of packsArr) {
|
||||||
|
packs[lang].push(toPackWithVersion(packStr, configFile));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return packs;
|
||||||
|
}
|
||||||
|
exports.parsePacksFromConfig = parsePacksFromConfig;
|
||||||
|
function parsePacksFromInput(packsInput, languages) {
|
||||||
|
if (!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim())) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (languages.length > 1) {
|
||||||
|
throw new Error("Cannot specify a 'packs' input in a multi-language analysis. Use a codeql-config.yml file instead and specify packs by language.");
|
||||||
|
}
|
||||||
|
else if (languages.length === 0) {
|
||||||
|
throw new Error("No languages specified. Cannot process the packs input.");
|
||||||
|
}
|
||||||
|
packsInput = packsInput.trim();
|
||||||
|
if (packsInput.startsWith("+")) {
|
||||||
|
packsInput = packsInput.substring(1).trim();
|
||||||
|
if (!packsInput) {
|
||||||
|
throw new Error("A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
|
||||||
|
packs.push(toPackWithVersion(pack, ""));
|
||||||
|
return packs;
|
||||||
|
}, []),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function toPackWithVersion(packStr, configFile) {
|
||||||
|
if (typeof packStr !== "string") {
|
||||||
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
|
}
|
||||||
|
const nameWithVersion = packStr.trim().split("@");
|
||||||
|
let version;
|
||||||
|
if (nameWithVersion.length > 2 ||
|
||||||
|
!PACK_IDENTIFIER_PATTERN.test(nameWithVersion[0])) {
|
||||||
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
|
}
|
||||||
|
else if (nameWithVersion.length === 2) {
|
||||||
|
version = semver.clean(nameWithVersion[1]) || undefined;
|
||||||
|
if (!version) {
|
||||||
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
packName: nameWithVersion[0].trim(),
|
||||||
|
version,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// exported for testing
|
||||||
|
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
|
||||||
|
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
|
||||||
|
const packsFomConfig = parsePacksFromConfig(rawPacksFromConfig, languages, configFile);
|
||||||
|
if (!packsFromInput) {
|
||||||
|
return packsFomConfig;
|
||||||
|
}
|
||||||
|
if (!shouldCombinePacks(rawPacksInput)) {
|
||||||
|
return packsFromInput;
|
||||||
|
}
|
||||||
|
return combinePacks(packsFromInput, packsFomConfig);
|
||||||
|
}
|
||||||
|
exports.parsePacks = parsePacks;
|
||||||
|
function shouldCombinePacks(packsInput) {
|
||||||
|
return !!(packsInput === null || packsInput === void 0 ? void 0 : packsInput.trim().startsWith("+"));
|
||||||
|
}
|
||||||
|
function combinePacks(packs1, packs2) {
|
||||||
|
const packs = {};
|
||||||
|
for (const lang of Object.keys(packs1)) {
|
||||||
|
packs[lang] = packs1[lang].concat(packs2[lang] || []);
|
||||||
|
}
|
||||||
|
for (const lang of Object.keys(packs2)) {
|
||||||
|
if (!packs[lang]) {
|
||||||
|
packs[lang] = packs2[lang];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return packs;
|
||||||
|
}
|
||||||
function dbLocationOrDefault(dbLocation, tempDir) {
|
function dbLocationOrDefault(dbLocation, tempDir) {
|
||||||
return dbLocation || path.resolve(tempDir, "codeql_databases");
|
return dbLocation || path.resolve(tempDir, "codeql_databases");
|
||||||
}
|
}
|
||||||
@@ -525,22 +680,24 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
|||||||
* This will parse the config from the user input if present, or generate
|
* This will parse the config from the user input if present, or generate
|
||||||
* a default config. The parsed config is then stored to a known location.
|
* a default config. The parsed config is then stored to a known location.
|
||||||
*/
|
*/
|
||||||
async function initConfig(languagesInput, queriesInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||||
|
var _a, _b, _c;
|
||||||
let config;
|
let config;
|
||||||
// If no config file was provided create an empty one
|
// If no config file was provided create an empty one
|
||||||
if (!configFile) {
|
if (!configFile) {
|
||||||
logger.debug("No configuration file was provided");
|
logger.debug("No configuration file was provided");
|
||||||
config = await getDefaultConfig(languagesInput, queriesInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
config = await loadConfig(languagesInput, queriesInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
// The list of queries should not be empty for any language. If it is then
|
// The list of queries should not be empty for any language. If it is then
|
||||||
// it is a user configuration error.
|
// it is a user configuration error.
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
if (config.queries[language] === undefined ||
|
const hasBuiltinQueries = ((_a = config.queries[language]) === null || _a === void 0 ? void 0 : _a.builtin.length) > 0;
|
||||||
(config.queries[language].builtin.length === 0 &&
|
const hasCustomQueries = ((_b = config.queries[language]) === null || _b === void 0 ? void 0 : _b.custom.length) > 0;
|
||||||
config.queries[language].custom.length === 0)) {
|
const hasPacks = (((_c = config.packs[language]) === null || _c === void 0 ? void 0 : _c.length) || 0) > 0;
|
||||||
|
if (!hasPacks && !hasBuiltinQueries && !hasCustomQueries) {
|
||||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||||
}
|
}
|
||||||
@@ -557,16 +714,16 @@ function isLocal(configPath) {
|
|||||||
}
|
}
|
||||||
return configPath.indexOf("@") === -1;
|
return configPath.indexOf("@") === -1;
|
||||||
}
|
}
|
||||||
function getLocalConfig(configFile, checkoutPath) {
|
function getLocalConfig(configFile, workspacePath) {
|
||||||
// Error if the config file is now outside of the workspace
|
// Error if the config file is now outside of the workspace
|
||||||
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) {
|
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||||
}
|
}
|
||||||
// Error if the file does not exist
|
// Error if the file does not exist
|
||||||
if (!fs.existsSync(configFile)) {
|
if (!fs.existsSync(configFile)) {
|
||||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||||
}
|
}
|
||||||
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
|
return yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||||
}
|
}
|
||||||
async function getRemoteConfig(configFile, apiDetails) {
|
async function getRemoteConfig(configFile, apiDetails) {
|
||||||
// retrieve the various parts of the config location, and ensure they're present
|
// retrieve the various parts of the config location, and ensure they're present
|
||||||
@@ -577,7 +734,7 @@ async function getRemoteConfig(configFile, apiDetails) {
|
|||||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||||
}
|
}
|
||||||
const response = await api
|
const response = await api
|
||||||
.getApiClient(apiDetails, { allowLocalRun: true, allowExternal: true })
|
.getApiClient(apiDetails, { allowExternal: true })
|
||||||
.repos.getContent({
|
.repos.getContent({
|
||||||
owner: pieces.groups.owner,
|
owner: pieces.groups.owner,
|
||||||
repo: pieces.groups.repo,
|
repo: pieces.groups.repo,
|
||||||
@@ -594,7 +751,7 @@ async function getRemoteConfig(configFile, apiDetails) {
|
|||||||
else {
|
else {
|
||||||
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
||||||
}
|
}
|
||||||
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary"));
|
return yaml.load(Buffer.from(fileContents, "base64").toString("binary"));
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Get the file path where the parsed config will be stored.
|
* Get the file path where the parsed config will be stored.
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
287
lib/config-utils.test.js
generated
287
lib/config-utils.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -14,7 +26,8 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
const semver_1 = require("semver");
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
@@ -41,10 +54,10 @@ function mockGetContents(content) {
|
|||||||
const response = {
|
const response = {
|
||||||
data: content,
|
data: content,
|
||||||
};
|
};
|
||||||
const spyGetContents = sinon_1.default
|
const spyGetContents = sinon
|
||||||
.stub(client.repos, "getContent")
|
.stub(client.repos, "getContent")
|
||||||
.resolves(response);
|
.resolves(response);
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
return spyGetContents;
|
return spyGetContents;
|
||||||
}
|
}
|
||||||
function mockListLanguages(languages) {
|
function mockListLanguages(languages) {
|
||||||
@@ -56,8 +69,8 @@ function mockListLanguages(languages) {
|
|||||||
for (const language of languages) {
|
for (const language of languages) {
|
||||||
response.data[language] = 123;
|
response.data[language] = 123;
|
||||||
}
|
}
|
||||||
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
|
sinon.stub(client.repos, "listLanguages").resolves(response);
|
||||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
}
|
}
|
||||||
ava_1.default("load empty config", async (t) => {
|
ava_1.default("load empty config", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
@@ -75,8 +88,8 @@ ava_1.default("load empty config", async (t) => {
|
|||||||
};
|
};
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("loading config saves config", async (t) => {
|
ava_1.default("loading config saves config", async (t) => {
|
||||||
@@ -98,7 +111,7 @@ ava_1.default("loading config saves config", async (t) => {
|
|||||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||||
// The saved config file should now exist
|
// The saved config file should now exist
|
||||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||||
// And that same newly-initialised config should now be returned by getConfig
|
// And that same newly-initialised config should now be returned by getConfig
|
||||||
@@ -109,7 +122,7 @@ ava_1.default("loading config saves config", async (t) => {
|
|||||||
ava_1.default("load input outside of workspace", async (t) => {
|
ava_1.default("load input outside of workspace", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -122,7 +135,7 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
|||||||
// no filename given, just a repo
|
// no filename given, just a repo
|
||||||
const configFile = "octo-org/codeql-config@main";
|
const configFile = "octo-org/codeql-config@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -136,7 +149,7 @@ ava_1.default("load non-existent input", async (t) => {
|
|||||||
const configFile = "input";
|
const configFile = "input";
|
||||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -200,10 +213,11 @@ ava_1.default("load non-empty input", async (t) => {
|
|||||||
codeQLCmd: codeQL.getPath(),
|
codeQLCmd: codeQL.getPath(),
|
||||||
gitHubVersion,
|
gitHubVersion,
|
||||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||||
|
packs: {},
|
||||||
};
|
};
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Should exactly equal the object we constructed earlier
|
// Should exactly equal the object we constructed earlier
|
||||||
t.deepEqual(actualConfig, expectedConfig);
|
t.deepEqual(actualConfig, expectedConfig);
|
||||||
});
|
});
|
||||||
@@ -239,7 +253,7 @@ ava_1.default("Default queries are used", async (t) => {
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
await configUtils.initConfig(languages, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolve queries was called correctly
|
// Check resolve queries was called correctly
|
||||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||||
@@ -282,7 +296,7 @@ ava_1.default("Queries can be specified in config file", async (t) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries
|
// It'll be called once for the default queries
|
||||||
// and once for `./foo` from the config file.
|
// and once for `./foo` from the config file.
|
||||||
@@ -315,7 +329,7 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries and once for `./override`,
|
// It'll be called once for the default queries and once for `./override`,
|
||||||
// but won't be called for './foo' from the config file.
|
// but won't be called for './foo' from the config file.
|
||||||
@@ -347,7 +361,7 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for `./workflow-query`,
|
// It'll be called once for `./workflow-query`,
|
||||||
// but won't be called for the default one since that was disabled
|
// but won't be called for the default one since that was disabled
|
||||||
@@ -373,7 +387,7 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly:
|
// Check resolveQueries was called correctly:
|
||||||
// It'll be called once for the default queries,
|
// It'll be called once for the default queries,
|
||||||
// and then once for each of the two queries from the workflow
|
// and then once for each of the two queries from the workflow
|
||||||
@@ -412,7 +426,7 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const config = await configUtils.initConfig(languages, testQueries, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
// Check resolveQueries was called correctly
|
// Check resolveQueries was called correctly
|
||||||
// It'll be called once for the default queries,
|
// It'll be called once for the default queries,
|
||||||
// once for each of additional1 and additional2,
|
// once for each of additional1 and additional2,
|
||||||
@@ -451,7 +465,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, queries, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
t.fail("initConfig did not throw error");
|
t.fail("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -494,7 +508,7 @@ ava_1.default("API client used when reading remote config", async (t) => {
|
|||||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
await configUtils.initConfig(languages, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
t.assert(spyGetContents.called);
|
t.assert(spyGetContents.called);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -504,7 +518,7 @@ ava_1.default("Remote config handles the case where a directory is provided", as
|
|||||||
mockGetContents(dummyResponse);
|
mockGetContents(dummyResponse);
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -520,7 +534,7 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
|||||||
mockGetContents(dummyResponse);
|
mockGetContents(dummyResponse);
|
||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -531,8 +545,13 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
|||||||
ava_1.default("No detected languages", async (t) => {
|
ava_1.default("No detected languages", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
mockListLanguages([]);
|
mockListLanguages([]);
|
||||||
|
const codeQL = codeql_1.setCodeQL({
|
||||||
|
async resolveLanguages() {
|
||||||
|
return {};
|
||||||
|
},
|
||||||
|
});
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -542,16 +561,111 @@ ava_1.default("No detected languages", async (t) => {
|
|||||||
});
|
});
|
||||||
ava_1.default("Unknown languages", async (t) => {
|
ava_1.default("Unknown languages", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
const languages = "ruby,english";
|
const languages = "rubbish,english";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["ruby", "english"])));
|
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["rubbish", "english"])));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
ava_1.default("Config specifies packages", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeQL = codeql_1.setCodeQL({
|
||||||
|
async resolveQueries() {
|
||||||
|
return {
|
||||||
|
byLanguage: {},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const inputFileContents = `
|
||||||
|
name: my config
|
||||||
|
disable-default-queries: true
|
||||||
|
packs:
|
||||||
|
- a/b@1.2.3
|
||||||
|
`;
|
||||||
|
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||||
|
fs.writeFileSync(configFile, inputFileContents);
|
||||||
|
const languages = "javascript";
|
||||||
|
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
|
t.deepEqual(packs, {
|
||||||
|
[languages_1.Language.javascript]: [
|
||||||
|
{
|
||||||
|
packName: "a/b",
|
||||||
|
version: semver_1.clean("1.2.3"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Config specifies packages for multiple languages", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeQL = codeql_1.setCodeQL({
|
||||||
|
async resolveQueries() {
|
||||||
|
return {
|
||||||
|
byLanguage: {
|
||||||
|
cpp: { "/foo/a.ql": {} },
|
||||||
|
},
|
||||||
|
noDeclaredLanguage: {},
|
||||||
|
multipleDeclaredLanguages: {},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const inputFileContents = `
|
||||||
|
name: my config
|
||||||
|
disable-default-queries: true
|
||||||
|
queries:
|
||||||
|
- uses: ./foo
|
||||||
|
packs:
|
||||||
|
javascript:
|
||||||
|
- a/b@1.2.3
|
||||||
|
python:
|
||||||
|
- c/d@1.2.3
|
||||||
|
`;
|
||||||
|
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||||
|
fs.writeFileSync(configFile, inputFileContents);
|
||||||
|
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||||
|
const languages = "javascript,python,cpp";
|
||||||
|
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
|
t.deepEqual(packs, {
|
||||||
|
[languages_1.Language.javascript]: [
|
||||||
|
{
|
||||||
|
packName: "a/b",
|
||||||
|
version: semver_1.clean("1.2.3"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[languages_1.Language.python]: [
|
||||||
|
{
|
||||||
|
packName: "c/d",
|
||||||
|
version: semver_1.clean("1.2.3"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
t.deepEqual(queries, {
|
||||||
|
cpp: {
|
||||||
|
builtin: [],
|
||||||
|
custom: [
|
||||||
|
{
|
||||||
|
queries: ["/foo/a.ql"],
|
||||||
|
searchPath: tmpDir,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
javascript: {
|
||||||
|
builtin: [],
|
||||||
|
custom: [],
|
||||||
|
},
|
||||||
|
python: {
|
||||||
|
builtin: [],
|
||||||
|
custom: [],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||||
ava_1.default(`load invalid input - ${testName}`, async (t) => {
|
ava_1.default(`load invalid input - ${testName}`, async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
@@ -569,7 +683,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
|||||||
const inputFile = path.join(tmpDir, configFile);
|
const inputFile = path.join(tmpDir, configFile);
|
||||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(languages, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
|
||||||
throw new Error("initConfig did not throw error");
|
throw new Error("initConfig did not throw error");
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
@@ -639,4 +753,119 @@ ava_1.default("path sanitisation", (t) => {
|
|||||||
// Trailing stars are stripped
|
// Trailing stars are stripped
|
||||||
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
|
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
|
||||||
});
|
});
|
||||||
|
/**
|
||||||
|
* Test macro for ensuring the packs block is valid
|
||||||
|
*/
|
||||||
|
function parsePacksMacro(t, packsByLanguage, languages, expected) {
|
||||||
|
t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected);
|
||||||
|
}
|
||||||
|
parsePacksMacro.title = (providedTitle) => `Parse Packs: ${providedTitle}`;
|
||||||
|
/**
|
||||||
|
* Test macro for testing when the packs block is invalid
|
||||||
|
*/
|
||||||
|
function parsePacksErrorMacro(t, packsByLanguage, languages, expected) {
|
||||||
|
t.throws(() => {
|
||||||
|
configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b");
|
||||||
|
}, {
|
||||||
|
message: expected,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
parsePacksErrorMacro.title = (providedTitle) => `Parse Packs Error: ${providedTitle}`;
|
||||||
|
/**
|
||||||
|
* Test macro for testing when the packs block is invalid
|
||||||
|
*/
|
||||||
|
function invalidPackNameMacro(t, name) {
|
||||||
|
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
|
||||||
|
}
|
||||||
|
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
|
||||||
|
ava_1.default("no packs", parsePacksMacro, {}, [], {});
|
||||||
|
ava_1.default("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: semver_1.clean("1.2.3") },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: semver_1.clean("1.2.3") },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("two packs with language", parsePacksMacro, {
|
||||||
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
|
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||||
|
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: semver_1.clean("1.2.3") },
|
||||||
|
],
|
||||||
|
[languages_1.Language.java]: [
|
||||||
|
{ packName: "d/e", version: undefined },
|
||||||
|
{ packName: "f/g", version: semver_1.clean("1.2.3") },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
||||||
|
ava_1.default("invalid language", parsePacksErrorMacro, { [languages_1.Language.java]: ["c/d"] }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" has "java", but it is not one of the languages to analyze/);
|
||||||
|
ava_1.default("not an array", parsePacksErrorMacro, { [languages_1.Language.cpp]: "c/d" }, [languages_1.Language.cpp], /The configuration file "\/a\/b" is invalid: property "packs" must be an array of non-empty strings/);
|
||||||
|
ava_1.default(invalidPackNameMacro, "c"); // all packs require at least a scope and a name
|
||||||
|
ava_1.default(invalidPackNameMacro, "c-/d");
|
||||||
|
ava_1.default(invalidPackNameMacro, "-c/d");
|
||||||
|
ava_1.default(invalidPackNameMacro, "c/d_d");
|
||||||
|
ava_1.default(invalidPackNameMacro, "c/d@x");
|
||||||
|
/**
|
||||||
|
* Test macro for testing the packs block and the packs input
|
||||||
|
*/
|
||||||
|
function parseInputAndConfigMacro(t, packsFromConfig, packsFromInput, languages, expected) {
|
||||||
|
t.deepEqual(configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b"), expected);
|
||||||
|
}
|
||||||
|
parseInputAndConfigMacro.title = (providedTitle) => `Parse Packs input and config: ${providedTitle}`;
|
||||||
|
function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, languages, expected) {
|
||||||
|
t.throws(() => {
|
||||||
|
configUtils.parsePacks(packsFromConfig, packsFromInput, languages, "/a/b");
|
||||||
|
}, {
|
||||||
|
message: expected,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
|
||||||
|
ava_1.default("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
|
||||||
|
});
|
||||||
|
ava_1.default("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: "1.2.3" },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: "1.2.3" },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: undefined },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "e/f", version: undefined },
|
||||||
|
{ packName: "g/h", version: "1.2.3" },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
|
[languages_1.Language.cpp]: [
|
||||||
|
{ packName: "e/f", version: undefined },
|
||||||
|
{ packName: "g/h", version: "1.2.3" },
|
||||||
|
{ packName: "a/b", version: undefined },
|
||||||
|
{ packName: "c/d", version: undefined },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
ava_1.default("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
||||||
|
ava_1.default("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||||
|
ava_1.default("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
||||||
|
ava_1.default("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
||||||
|
// errors
|
||||||
|
// input w invalid pack name
|
||||||
//# sourceMappingURL=config-utils.test.js.map
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
37
lib/count-loc.js
generated
37
lib/count-loc.js
generated
@@ -1,8 +1,8 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.countLoc = void 0;
|
||||||
const github_linguist_1 = require("github-linguist");
|
const github_linguist_1 = require("github-linguist");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const util_1 = require("./util");
|
|
||||||
// Map from linguist language names to language prefixes used in the action and codeql
|
// Map from linguist language names to language prefixes used in the action and codeql
|
||||||
const linguistToMetrics = {
|
const linguistToMetrics = {
|
||||||
c: languages_1.Language.cpp,
|
c: languages_1.Language.cpp,
|
||||||
@@ -12,6 +12,7 @@ const linguistToMetrics = {
|
|||||||
java: languages_1.Language.java,
|
java: languages_1.Language.java,
|
||||||
javascript: languages_1.Language.javascript,
|
javascript: languages_1.Language.javascript,
|
||||||
python: languages_1.Language.python,
|
python: languages_1.Language.python,
|
||||||
|
ruby: languages_1.Language.ruby,
|
||||||
typescript: languages_1.Language.javascript,
|
typescript: languages_1.Language.javascript,
|
||||||
};
|
};
|
||||||
const nameToLinguist = Object.entries(linguistToMetrics).reduce((obj, [key, name]) => {
|
const nameToLinguist = Object.entries(linguistToMetrics).reduce((obj, [key, name]) => {
|
||||||
@@ -21,25 +22,6 @@ const nameToLinguist = Object.entries(linguistToMetrics).reduce((obj, [key, name
|
|||||||
obj[name].push(key);
|
obj[name].push(key);
|
||||||
return obj;
|
return obj;
|
||||||
}, {});
|
}, {});
|
||||||
function getIdPrefix(language) {
|
|
||||||
switch (language) {
|
|
||||||
case languages_1.Language.cpp:
|
|
||||||
return "cpp";
|
|
||||||
case languages_1.Language.csharp:
|
|
||||||
return "cs";
|
|
||||||
case languages_1.Language.go:
|
|
||||||
return "go";
|
|
||||||
case languages_1.Language.java:
|
|
||||||
return "java";
|
|
||||||
case languages_1.Language.javascript:
|
|
||||||
return "js";
|
|
||||||
case languages_1.Language.python:
|
|
||||||
return "py";
|
|
||||||
default:
|
|
||||||
util_1.assertNever(language);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.getIdPrefix = getIdPrefix;
|
|
||||||
/**
|
/**
|
||||||
* Count the lines of code of the specified language using the include
|
* Count the lines of code of the specified language using the include
|
||||||
* and exclude glob paths.
|
* and exclude glob paths.
|
||||||
@@ -74,14 +56,13 @@ async function countLoc(cwd, include, exclude, dbLanguages, logger) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.info("Could not determine the total number of lines of code in this repository. " +
|
logger.info("Could not determine the baseline lines of code count in this repository. " +
|
||||||
"Because of this, it will not be possible to compare the number of lines " +
|
"Because of this, it will not be possible to compare the lines " +
|
||||||
"of code analyzed by code scanning with the total number of lines of " +
|
"of code analyzed by code scanning with the baseline. This will not affect " +
|
||||||
"code in the repository. This will not affect the results produced by code " +
|
"the results produced by code scanning. If you have any questions, you can " +
|
||||||
"scanning. If you have any questions, you can raise an issue at " +
|
"raise an issue at https://github.com/github/codeql-action/issues. Please " +
|
||||||
"https://github.com/github/codeql-action/issues. Please include a link " +
|
"include a link to the repository if public, or otherwise information about " +
|
||||||
"to the repository if public, or otherwise information about the code scanning " +
|
"the code scanning workflow you are using.");
|
||||||
"workflow you are using.");
|
|
||||||
}
|
}
|
||||||
return lineCounts;
|
return lineCounts;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"count-loc.js","sourceRoot":"","sources":["../src/count-loc.ts"],"names":[],"mappings":";;AAAA,qDAAyC;AAEzC,2CAAuC;AAEvC,iCAAqC;AAKrC,sFAAsF;AACtF,MAAM,iBAAiB,GAA6B;IAClD,CAAC,EAAE,oBAAQ,CAAC,GAAG;IACf,KAAK,EAAE,oBAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,oBAAQ,CAAC,MAAM;IACrB,EAAE,EAAE,oBAAQ,CAAC,EAAE;IACf,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;IAC/B,MAAM,EAAE,oBAAQ,CAAC,MAAM;IACvB,UAAU,EAAE,oBAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAC7D,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,EAAE;IACnB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACd,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;KAChB;IACD,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACpB,OAAO,GAAG,CAAC;AACb,CAAC,EACD,EAAgC,CACjC,CAAC;AAEF,SAAgB,WAAW,CAAC,QAAkB;IAC5C,QAAQ,QAAQ,EAAE;QAChB,KAAK,oBAAQ,CAAC,GAAG;YACf,OAAO,KAAK,CAAC;QACf,KAAK,oBAAQ,CAAC,MAAM;YAClB,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,EAAE;YACd,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,IAAI;YAChB,OAAO,MAAM,CAAC;QAChB,KAAK,oBAAQ,CAAC,UAAU;YACtB,OAAO,IAAI,CAAC;QACd,KAAK,oBAAQ,CAAC,MAAM;YAClB,OAAO,IAAI,CAAC;QAEd;YACE,kBAAW,CAAC,QAAQ,CAAC,CAAC;KACzB;AACH,CAAC;AAlBD,kCAkBC;AAED;;;;;;;;;GASG;AACI,KAAK,UAAU,QAAQ,CAC5B,GAAW,EACX,OAAiB,EACjB,OAAiB,EACjB,WAAuB,EACvB,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAI,wBAAM,CAAC;QAC9B,GAAG;QACH,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACxE,OAAO;QACP,iBAAiB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;KACvE,CAAC,CAAC,QAAQ,EAAE,CAAC;IAEd,uDAAuD;IACvD,uDAAuD;IACvD,2DAA2D;IAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,MAAM,CACxD,CAAC,GAAG,EAAE,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,eAAe,GAAG,iBAAiB,CAAC,QAAQ,CAAC,CAAC;QACpD,IAAI,eAAe,IAAI,WAAW,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;YAC5D,GAAG,CAAC,eAAe,CAAC,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3D;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAA8B,CAC/B,CAAC;IAEF,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;QAClC,MAAM,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;QACrC,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAC1D,MAAM,CAAC,KAAK,CAAC,KAAK,QAAQ,KAAK,KAAK,EAAE,CAAC,CAAC;SACzC;KACF;SAAM;QACL,MAAM,CAAC,IAAI,CACT,4EAA4E;YAC1E,0EAA0E;YAC1E,sEAAsE;YACtE,4EAA4E;YAC5E,iEAAiE;YACjE,wEAAwE;YACxE,gFAAgF;YAChF,yBAAyB,CAC5B,CAAC;KACH;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AA/CD,4BA+CC"}
|
{"version":3,"file":"count-loc.js","sourceRoot":"","sources":["../src/count-loc.ts"],"names":[],"mappings":";;;AAAA,qDAAyC;AAEzC,2CAAuC;AAGvC,sFAAsF;AACtF,MAAM,iBAAiB,GAA6B;IAClD,CAAC,EAAE,oBAAQ,CAAC,GAAG;IACf,KAAK,EAAE,oBAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,oBAAQ,CAAC,MAAM;IACrB,EAAE,EAAE,oBAAQ,CAAC,EAAE;IACf,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;IAC/B,MAAM,EAAE,oBAAQ,CAAC,MAAM;IACvB,IAAI,EAAE,oBAAQ,CAAC,IAAI;IACnB,UAAU,EAAE,oBAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,MAAM,cAAc,GAAG,MAAM,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAC7D,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,IAAI,CAAC,EAAE,EAAE;IACnB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACd,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;KAChB;IACD,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACpB,OAAO,GAAG,CAAC;AACb,CAAC,EACD,EAAgC,CACjC,CAAC;AAEF;;;;;;;;;GASG;AACI,KAAK,UAAU,QAAQ,CAC5B,GAAW,EACX,OAAiB,EACjB,OAAiB,EACjB,WAAuB,EACvB,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAI,wBAAM,CAAC;QAC9B,GAAG;QACH,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QACxE,OAAO;QACP,iBAAiB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC;KACvE,CAAC,CAAC,QAAQ,EAAE,CAAC;IAEd,uDAAuD;IACvD,uDAAuD;IACvD,2DAA2D;IAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,MAAM,CACxD,CAAC,GAAG,EAAE,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,eAAe,GAAG,iBAAiB,CAAC,QAAQ,CAAC,CAAC;QACpD,IAAI,eAAe,IAAI,WAAW,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;YAC5D,GAAG,CAAC,eAAe,CAAC,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3D;QACD,OAAO,GAAG,CAAC;IACb,CAAC,EACD,EAA8B,CAC/B,CAAC;IAEF,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;QAClC,MAAM,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;QACrC,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAC1D,MAAM,CAAC,KAAK,CAAC,KAAK,QAAQ,KAAK,KAAK,EAAE,CAAC,CAAC;SACzC;KACF;SAAM;QACL,MAAM,CAAC,IAAI,CACT,2EAA2E;YACzE,gEAAgE;YAChE,4EAA4E;YAC5E,4EAA4E;YAC5E,2EAA2E;YAC3E,6EAA6E;YAC7E,2CAA2C,CAC9C,CAAC;KACH;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AA9CD,4BA8CC"}
|
||||||
28
lib/count-loc.test.js
generated
28
lib/count-loc.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -21,13 +33,19 @@ ava_1.default("ensure lines of code works for cpp and js", async (t) => {
|
|||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.cpp, languages_1.Language.javascript], logging_1.getRunnerLogger(true));
|
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.cpp, languages_1.Language.javascript], logging_1.getRunnerLogger(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
cpp: 6,
|
cpp: 6,
|
||||||
javascript: 3,
|
javascript: 9,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("ensure lines of code works for csharp", async (t) => {
|
||||||
|
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.csharp], logging_1.getRunnerLogger(true));
|
||||||
|
t.deepEqual(results, {
|
||||||
|
csharp: 10,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
ava_1.default("ensure lines of code can handle undefined language", async (t) => {
|
ava_1.default("ensure lines of code can handle undefined language", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.javascript, languages_1.Language.python, "hucairz"], logging_1.getRunnerLogger(true));
|
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], [], [languages_1.Language.javascript, languages_1.Language.python, "hucairz"], logging_1.getRunnerLogger(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
javascript: 3,
|
javascript: 9,
|
||||||
python: 5,
|
python: 5,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -54,7 +72,7 @@ ava_1.default("ensure lines of code can handle empty includes", async (t) => {
|
|||||||
ava_1.default("ensure lines of code can handle exclude", async (t) => {
|
ava_1.default("ensure lines of code can handle exclude", async (t) => {
|
||||||
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], ["**/*.py"], [languages_1.Language.javascript, languages_1.Language.python], logging_1.getRunnerLogger(true));
|
const results = await count_loc_1.countLoc(path.join(__dirname, "../tests/multi-language-repo"), [], ["**/*.py"], [languages_1.Language.javascript, languages_1.Language.python], logging_1.getRunnerLogger(true));
|
||||||
t.deepEqual(results, {
|
t.deepEqual(results, {
|
||||||
javascript: 3,
|
javascript: 9,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=count-loc.test.js.map
|
//# sourceMappingURL=count-loc.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"count-loc.test.js","sourceRoot":"","sources":["../src/count-loc.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,2CAAuC;AACvC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,GAAG,EAAE,oBAAQ,CAAC,UAAU,CAAC,EACnC,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,GAAG,EAAE,CAAC;QACN,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oDAAoD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrE,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,EAAE,SAAqB,CAAC,EAC7D,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;QACb,MAAM,EAAE,CAAC;KACV,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iDAAiD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClE,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,EAAE,EACF,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,oBAAoB,CAAC,EACtB,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,EAAE;KACf,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gDAAgD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjE,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,YAAY,CAAC,EACd,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;IACnB,wBAAwB;KACzB,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,CAAC,SAAS,CAAC,EACX,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,CAAC,EACtC,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"count-loc.test.js","sourceRoot":"","sources":["../src/count-loc.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,2CAAuC;AACvC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,GAAG,EAAE,oBAAQ,CAAC,UAAU,CAAC,EACnC,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,GAAG,EAAE,CAAC;QACN,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACxD,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,MAAM,CAAC,EACjB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,MAAM,EAAE,EAAE;KACX,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oDAAoD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrE,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,EAAE,SAAqB,CAAC,EAC7D,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;QACb,MAAM,EAAE,CAAC;KACV,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iDAAiD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClE,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,EAAE,EACF,EAAE,EACF,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC3D,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,oBAAoB,CAAC,EACtB,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,EAAE;KACf,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gDAAgD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACjE,gEAAgE;IAChE,4CAA4C;IAC5C,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,CAAC,YAAY,CAAC,EACd,EAAE,EACF,CAAC,oBAAQ,CAAC,UAAU,CAAC,EACrB,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;IACnB,wBAAwB;KACzB,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,yCAAyC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1D,MAAM,OAAO,GAAG,MAAM,oBAAQ,CAC5B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,8BAA8B,CAAC,EACpD,EAAE,EACF,CAAC,SAAS,CAAC,EACX,CAAC,oBAAQ,CAAC,UAAU,EAAE,oBAAQ,CAAC,MAAM,CAAC,EACtC,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;IAEF,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE;QACnB,UAAU,EAAE,CAAC;KACd,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
85
lib/database-upload.js
generated
Normal file
85
lib/database-upload.js
generated
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.uploadDatabases = void 0;
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const api_client_1 = require("./api-client");
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||||
|
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
|
||||||
|
logger.debug("Database upload disabled in workflow. Skipping upload.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Do nothing when not running against github.com
|
||||||
|
if (config.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||||
|
logger.debug("Not running against github.com. Skipping upload.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
||||||
|
// We only want to upload a database if we are analyzing the default branch.
|
||||||
|
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const client = api_client_1.getApiClient(apiDetails);
|
||||||
|
try {
|
||||||
|
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
||||||
|
owner: repositoryNwo.owner,
|
||||||
|
repo: repositoryNwo.repo,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
if (util.isHTTPError(e) && e.status === 404) {
|
||||||
|
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.log(e);
|
||||||
|
logger.info(`Skipping database upload due to unknown error: ${e}`);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const codeql = await codeql_1.getCodeQL(config.codeQLCmd);
|
||||||
|
for (const language of config.languages) {
|
||||||
|
// Bundle the database up into a single zip file
|
||||||
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
const databaseBundlePath = `${databasePath}.zip`;
|
||||||
|
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||||
|
// Upload the database bundle
|
||||||
|
const payload = fs.readFileSync(databaseBundlePath);
|
||||||
|
try {
|
||||||
|
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
|
||||||
|
owner: repositoryNwo.owner,
|
||||||
|
repo: repositoryNwo.repo,
|
||||||
|
language,
|
||||||
|
data: payload,
|
||||||
|
});
|
||||||
|
logger.debug(`Successfully uploaded database for ${language}`);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
// Log a warning but don't fail the workflow
|
||||||
|
logger.warning(`Failed to upload database for ${language}: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.uploadDatabases = uploadDatabases;
|
||||||
|
//# sourceMappingURL=database-upload.js.map
|
||||||
1
lib/database-upload.js.map
Normal file
1
lib/database-upload.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,yBAAY,CAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtED,0CAsEC"}
|
||||||
248
lib/database-upload.test.js
generated
Normal file
248
lib/database-upload.test.js
generated
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const github = __importStar(require("@actions/github"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const apiClient = __importStar(require("./api-client"));
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
|
const database_upload_1 = require("./database-upload");
|
||||||
|
const languages_1 = require("./languages");
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
|
ava_1.default.beforeEach(() => {
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
|
||||||
|
});
|
||||||
|
const testRepoName = { owner: "github", repo: "example" };
|
||||||
|
const testApiDetails = {
|
||||||
|
auth: "1234",
|
||||||
|
url: "https://github.com",
|
||||||
|
};
|
||||||
|
function getTestConfig(tmpDir) {
|
||||||
|
return {
|
||||||
|
languages: [languages_1.Language.javascript],
|
||||||
|
queries: {},
|
||||||
|
pathsIgnore: [],
|
||||||
|
paths: [],
|
||||||
|
originalUserInput: {},
|
||||||
|
tempDir: tmpDir,
|
||||||
|
toolCacheDir: tmpDir,
|
||||||
|
codeQLCmd: "foo",
|
||||||
|
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
|
||||||
|
dbLocation: tmpDir,
|
||||||
|
packs: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function getRecordingLogger(messages) {
|
||||||
|
return {
|
||||||
|
debug: (message) => {
|
||||||
|
messages.push({ type: "debug", message });
|
||||||
|
console.debug(message);
|
||||||
|
},
|
||||||
|
info: (message) => {
|
||||||
|
messages.push({ type: "info", message });
|
||||||
|
console.info(message);
|
||||||
|
},
|
||||||
|
warning: (message) => {
|
||||||
|
messages.push({ type: "warning", message });
|
||||||
|
console.warn(message);
|
||||||
|
},
|
||||||
|
error: (message) => {
|
||||||
|
messages.push({ type: "error", message });
|
||||||
|
console.error(message);
|
||||||
|
},
|
||||||
|
isDebug: () => true,
|
||||||
|
startGroup: () => undefined,
|
||||||
|
endGroup: () => undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||||
|
// Passing an auth token is required, so we just use a dummy value
|
||||||
|
const client = github.getOctokit("123");
|
||||||
|
const requestSpy = sinon.stub(client, "request");
|
||||||
|
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
|
||||||
|
if (optInStatusCode < 300) {
|
||||||
|
optInSpy.resolves(undefined);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
|
||||||
|
}
|
||||||
|
if (databaseUploadStatusCode !== undefined) {
|
||||||
|
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language");
|
||||||
|
if (databaseUploadStatusCode < 300) {
|
||||||
|
databaseUploadSpy.resolves(undefined);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||||
|
}
|
||||||
|
ava_1.default("Abort database upload if 'upload-database' input set to false", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("false");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Abort database upload if running against GHES", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
const config = getTestConfig(tmpDir);
|
||||||
|
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Abort database upload if running against GHAE", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
const config = getTestConfig(tmpDir);
|
||||||
|
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Abort database upload if not analyzing default branch", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Abort database upload if opt-in request returns 404", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
mockHttpRequests(404);
|
||||||
|
codeql_1.setCodeQL({
|
||||||
|
async databaseBundle() {
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message ===
|
||||||
|
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Abort database upload if opt-in request fails with something other than 404", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
mockHttpRequests(500);
|
||||||
|
codeql_1.setCodeQL({
|
||||||
|
async databaseBundle() {
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "info" &&
|
||||||
|
v.message ===
|
||||||
|
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Don't crash if uploading a database fails", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
mockHttpRequests(204, 500);
|
||||||
|
codeql_1.setCodeQL({
|
||||||
|
async databaseBundle(_, outputFilePath) {
|
||||||
|
fs.writeFileSync(outputFilePath, "");
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
||||||
|
v.message ===
|
||||||
|
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
ava_1.default("Successfully uploading a database", async (t) => {
|
||||||
|
await util_1.withTmpDir(async (tmpDir) => {
|
||||||
|
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("upload-database")
|
||||||
|
.returns("true");
|
||||||
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
mockHttpRequests(204, 201);
|
||||||
|
codeql_1.setCodeQL({
|
||||||
|
async databaseBundle(_, outputFilePath) {
|
||||||
|
fs.writeFileSync(outputFilePath, "");
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const loggedMessages = [];
|
||||||
|
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=database-upload.test.js.map
|
||||||
1
lib/database-upload.test.js.map
Normal file
1
lib/database-upload.test.js.map
Normal file
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20210517"
|
"bundleVersion": "codeql-bundle-20210907"
|
||||||
}
|
}
|
||||||
|
|||||||
1
lib/error-matcher.js
generated
1
lib/error-matcher.js
generated
@@ -1,5 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.errorMatchers = exports.namedMatchersForTesting = void 0;
|
||||||
// exported only for testing purposes
|
// exported only for testing purposes
|
||||||
exports.namedMatchersForTesting = {
|
exports.namedMatchersForTesting = {
|
||||||
/*
|
/*
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||||
17
lib/external-queries.js
generated
17
lib/external-queries.js
generated
@@ -1,12 +1,25 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.buildCheckoutURL = exports.checkoutExternalRepository = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
|
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
|
||||||
16
lib/external-queries.test.js
generated
16
lib/external-queries.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||||
53
lib/fingerprints.js
generated
53
lib/fingerprints.js
generated
@@ -1,21 +1,35 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.addFingerprints = exports.resolveUriToFile = exports.hash = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const long_1 = __importDefault(require("long"));
|
const long_1 = __importDefault(require("long"));
|
||||||
const tab = "\t".charCodeAt(0);
|
const tab = "\t".charCodeAt(0);
|
||||||
const space = " ".charCodeAt(0);
|
const space = " ".charCodeAt(0);
|
||||||
const lf = "\n".charCodeAt(0);
|
const lf = "\n".charCodeAt(0);
|
||||||
const cr = "\r".charCodeAt(0);
|
const cr = "\r".charCodeAt(0);
|
||||||
|
const EOF = 65535;
|
||||||
const BLOCK_SIZE = 100;
|
const BLOCK_SIZE = 100;
|
||||||
const MOD = long_1.default.fromInt(37); // L
|
const MOD = long_1.default.fromInt(37); // L
|
||||||
// Compute the starting point for the hash mod
|
// Compute the starting point for the hash mod
|
||||||
@@ -36,9 +50,9 @@ function computeFirstMod() {
|
|||||||
* the hashes of the lines near the end of the file.
|
* the hashes of the lines near the end of the file.
|
||||||
*
|
*
|
||||||
* @param callback function that is called with the line number (1-based) and hash for every line
|
* @param callback function that is called with the line number (1-based) and hash for every line
|
||||||
* @param input The file's contents
|
* @param filepath The path to the file to hash
|
||||||
*/
|
*/
|
||||||
function hash(callback, input) {
|
async function hash(callback, filepath) {
|
||||||
// A rolling view in to the input
|
// A rolling view in to the input
|
||||||
const window = Array(BLOCK_SIZE).fill(0);
|
const window = Array(BLOCK_SIZE).fill(0);
|
||||||
// If the character in the window is the start of a new line
|
// If the character in the window is the start of a new line
|
||||||
@@ -82,12 +96,11 @@ function hash(callback, input) {
|
|||||||
// as we go. Once we reach a point in the window again then we've processed
|
// as we go. Once we reach a point in the window again then we've processed
|
||||||
// BLOCK_SIZE characters and if the last character at this point in the window
|
// BLOCK_SIZE characters and if the last character at this point in the window
|
||||||
// was the start of a line then we should output the hash for that line.
|
// was the start of a line then we should output the hash for that line.
|
||||||
for (let i = 0, len = input.length; i <= len; i++) {
|
const processCharacter = function (current) {
|
||||||
let current = i === len ? 65535 : input.charCodeAt(i);
|
|
||||||
// skip tabs, spaces, and line feeds that come directly after a carriage return
|
// skip tabs, spaces, and line feeds that come directly after a carriage return
|
||||||
if (current === space || current === tab || (prevCR && current === lf)) {
|
if (current === space || current === tab || (prevCR && current === lf)) {
|
||||||
prevCR = false;
|
prevCR = false;
|
||||||
continue;
|
return;
|
||||||
}
|
}
|
||||||
// replace CR with LF
|
// replace CR with LF
|
||||||
if (current === cr) {
|
if (current === cr) {
|
||||||
@@ -109,7 +122,14 @@ function hash(callback, input) {
|
|||||||
lineStart = true;
|
lineStart = true;
|
||||||
}
|
}
|
||||||
updateHash(current);
|
updateHash(current);
|
||||||
|
};
|
||||||
|
const readStream = fs.createReadStream(filepath, "utf8");
|
||||||
|
for await (const data of readStream) {
|
||||||
|
for (let i = 0; i < data.length; ++i) {
|
||||||
|
processCharacter(data.charCodeAt(i));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
processCharacter(EOF);
|
||||||
// Flush the remaining lines
|
// Flush the remaining lines
|
||||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||||
if (lineNumbers[index] !== -1) {
|
if (lineNumbers[index] !== -1) {
|
||||||
@@ -153,7 +173,7 @@ function locationUpdateCallback(result, location, logger) {
|
|||||||
// the source file so we can hash it.
|
// the source file so we can hash it.
|
||||||
// If possible returns a absolute file path for the source file,
|
// If possible returns a absolute file path for the source file,
|
||||||
// or if not possible then returns undefined.
|
// or if not possible then returns undefined.
|
||||||
function resolveUriToFile(location, artifacts, checkoutPath, logger) {
|
function resolveUriToFile(location, artifacts, sourceRoot, logger) {
|
||||||
// This may be referencing an artifact
|
// This may be referencing an artifact
|
||||||
if (!location.uri && location.index !== undefined) {
|
if (!location.uri && location.index !== undefined) {
|
||||||
if (typeof location.index !== "number" ||
|
if (typeof location.index !== "number" ||
|
||||||
@@ -181,7 +201,7 @@ function resolveUriToFile(location, artifacts, checkoutPath, logger) {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
// Discard any absolute paths that aren't in the src root
|
// Discard any absolute paths that aren't in the src root
|
||||||
const srcRootPrefix = `${checkoutPath}/`;
|
const srcRootPrefix = `${sourceRoot}/`;
|
||||||
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
|
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
|
||||||
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -206,8 +226,8 @@ function resolveUriToFile(location, artifacts, checkoutPath, logger) {
|
|||||||
exports.resolveUriToFile = resolveUriToFile;
|
exports.resolveUriToFile = resolveUriToFile;
|
||||||
// Compute fingerprints for results in the given sarif file
|
// Compute fingerprints for results in the given sarif file
|
||||||
// and return an updated sarif file contents.
|
// and return an updated sarif file contents.
|
||||||
function addFingerprints(sarifContents, checkoutPath, logger) {
|
async function addFingerprints(sarifContents, sourceRoot, logger) {
|
||||||
var _a, _b;
|
var _a, _b, _c;
|
||||||
const sarif = JSON.parse(sarifContents);
|
const sarif = JSON.parse(sarifContents);
|
||||||
// Gather together results for the same file and construct
|
// Gather together results for the same file and construct
|
||||||
// callbacks to accept hashes for that file and update the location
|
// callbacks to accept hashes for that file and update the location
|
||||||
@@ -218,11 +238,15 @@ function addFingerprints(sarifContents, checkoutPath, logger) {
|
|||||||
for (const result of run.results || []) {
|
for (const result of run.results || []) {
|
||||||
// Check the primary location is defined correctly and is in the src root
|
// Check the primary location is defined correctly and is in the src root
|
||||||
const primaryLocation = (result.locations || [])[0];
|
const primaryLocation = (result.locations || [])[0];
|
||||||
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
|
if (!((_a = primaryLocation === null || primaryLocation === void 0 ? void 0 : primaryLocation.physicalLocation) === null || _a === void 0 ? void 0 : _a.artifactLocation)) {
|
||||||
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, checkoutPath, logger);
|
if (((_c = (_b = primaryLocation === null || primaryLocation === void 0 ? void 0 : primaryLocation.physicalLocation) === null || _b === void 0 ? void 0 : _b.region) === null || _c === void 0 ? void 0 : _c.startLine) === undefined) {
|
||||||
|
// Locations without a line number are unlikely to be source files
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, sourceRoot, logger);
|
||||||
if (!filepath) {
|
if (!filepath) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -240,8 +264,7 @@ function addFingerprints(sarifContents, checkoutPath, logger) {
|
|||||||
c(lineNumber, hashValue);
|
c(lineNumber, hashValue);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const fileContents = fs.readFileSync(filepath).toString();
|
await hash(teeCallback, filepath);
|
||||||
hash(teeCallback, fileContents);
|
|
||||||
}
|
}
|
||||||
return JSON.stringify(sarif);
|
return JSON.stringify(sarif);
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
71
lib/fingerprints.test.js
generated
71
lib/fingerprints.test.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
@@ -16,28 +28,33 @@ const ava_1 = __importDefault(require("ava"));
|
|||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
testing_utils_1.setupTests(ava_1.default);
|
testing_utils_1.setupTests(ava_1.default);
|
||||||
function testHash(t, input, expectedHashes) {
|
async function testHash(t, input, expectedHashes) {
|
||||||
let index = 0;
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const callback = function (lineNumber, hash) {
|
const tmpFile = path.resolve(tmpDir, "testfile");
|
||||||
t.is(lineNumber, index + 1);
|
fs.writeFileSync(tmpFile, input);
|
||||||
t.is(hash, expectedHashes[index]);
|
let index = 0;
|
||||||
index++;
|
const callback = function (lineNumber, hash) {
|
||||||
};
|
t.is(lineNumber, index + 1);
|
||||||
fingerprints.hash(callback, input);
|
t.is(hash, expectedHashes[index]);
|
||||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
index++;
|
||||||
|
};
|
||||||
|
await fingerprints.hash(callback, tmpFile);
|
||||||
|
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
ava_1.default("hash", (t) => {
|
ava_1.default("hash", async (t) => {
|
||||||
// Try empty file
|
// Try empty file
|
||||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
await testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||||
// Try various combinations of newline characters
|
// Try various combinations of newline characters
|
||||||
testHash(t, " a\nb\n \t\tc\n d", [
|
await testHash(t, " a\nb\n \t\tc\n d", [
|
||||||
"271789c17abda88f:1",
|
"271789c17abda88f:1",
|
||||||
"54703d4cd895b18:1",
|
"54703d4cd895b18:1",
|
||||||
"180aee12dab6264:1",
|
"180aee12dab6264:1",
|
||||||
"a23a3dc5e078b07b:1",
|
"a23a3dc5e078b07b:1",
|
||||||
]);
|
]);
|
||||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
await testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||||
"8b7cf3e952e7aeb2:1",
|
"8b7cf3e952e7aeb2:1",
|
||||||
"b1ae1287ec4718d9:1",
|
"b1ae1287ec4718d9:1",
|
||||||
"bff680108adb0fcc:1",
|
"bff680108adb0fcc:1",
|
||||||
@@ -45,7 +62,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"b86d3392aea1be30:1",
|
"b86d3392aea1be30:1",
|
||||||
"e6ceba753e1a442:1",
|
"e6ceba753e1a442:1",
|
||||||
]);
|
]);
|
||||||
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
await testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||||
"e9496ae3ebfced30:1",
|
"e9496ae3ebfced30:1",
|
||||||
"fb7c023a8b9ccb3f:1",
|
"fb7c023a8b9ccb3f:1",
|
||||||
"ce8ba1a563dcdaca:1",
|
"ce8ba1a563dcdaca:1",
|
||||||
@@ -54,7 +71,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"c8e28b0b4002a3a0:1",
|
"c8e28b0b4002a3a0:1",
|
||||||
"c129715d7a2bc9a3:1",
|
"c129715d7a2bc9a3:1",
|
||||||
]);
|
]);
|
||||||
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
await testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||||
"e9496ae3ebfced30:1",
|
"e9496ae3ebfced30:1",
|
||||||
"fb7c023a8b9ccb3f:1",
|
"fb7c023a8b9ccb3f:1",
|
||||||
"ce8ba1a563dcdaca:1",
|
"ce8ba1a563dcdaca:1",
|
||||||
@@ -63,7 +80,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"c8e28b0b4002a3a0:1",
|
"c8e28b0b4002a3a0:1",
|
||||||
"c129715d7a2bc9a3:1",
|
"c129715d7a2bc9a3:1",
|
||||||
]);
|
]);
|
||||||
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
await testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||||
"e9496ae3ebfced30:1",
|
"e9496ae3ebfced30:1",
|
||||||
"fb7c023a8b9ccb3f:1",
|
"fb7c023a8b9ccb3f:1",
|
||||||
"ce8ba1a563dcdaca:1",
|
"ce8ba1a563dcdaca:1",
|
||||||
@@ -72,7 +89,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"c8e28b0b4002a3a0:1",
|
"c8e28b0b4002a3a0:1",
|
||||||
"c129715d7a2bc9a3:1",
|
"c129715d7a2bc9a3:1",
|
||||||
]);
|
]);
|
||||||
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
await testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||||
"e9496ae3ebfced30:1",
|
"e9496ae3ebfced30:1",
|
||||||
"fb7c023a8b9ccb3f:1",
|
"fb7c023a8b9ccb3f:1",
|
||||||
"ce8ba1a563dcdaca:1",
|
"ce8ba1a563dcdaca:1",
|
||||||
@@ -82,7 +99,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"c129715d7a2bc9a3:1",
|
"c129715d7a2bc9a3:1",
|
||||||
]);
|
]);
|
||||||
// Try repeating line that will generate identical hashes
|
// Try repeating line that will generate identical hashes
|
||||||
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
await testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||||
"a7f2ff13bc495cf2:1",
|
"a7f2ff13bc495cf2:1",
|
||||||
"a7f2ff13bc495cf2:2",
|
"a7f2ff13bc495cf2:2",
|
||||||
"a7f2ff13bc495cf2:3",
|
"a7f2ff13bc495cf2:3",
|
||||||
@@ -95,7 +112,7 @@ ava_1.default("hash", (t) => {
|
|||||||
"cc97dc7b1d7d8f7b:1",
|
"cc97dc7b1d7d8f7b:1",
|
||||||
"c129715d7a2bc9a3:1",
|
"c129715d7a2bc9a3:1",
|
||||||
]);
|
]);
|
||||||
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
|
await testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
|
||||||
"e54938cc54b302f1:1",
|
"e54938cc54b302f1:1",
|
||||||
"bb609acbe9138d60:1",
|
"bb609acbe9138d60:1",
|
||||||
"1131fd5871777f34:1",
|
"1131fd5871777f34:1",
|
||||||
@@ -150,7 +167,7 @@ ava_1.default("resolveUriToFile", (t) => {
|
|||||||
t.is(testResolveUriToFile(dirpath, undefined, []), undefined);
|
t.is(testResolveUriToFile(dirpath, undefined, []), undefined);
|
||||||
t.is(testResolveUriToFile(`file://${dirpath}`, undefined, []), undefined);
|
t.is(testResolveUriToFile(`file://${dirpath}`, undefined, []), undefined);
|
||||||
});
|
});
|
||||||
ava_1.default("addFingerprints", (t) => {
|
ava_1.default("addFingerprints", async (t) => {
|
||||||
// Run an end-to-end test on a test file
|
// Run an end-to-end test on a test file
|
||||||
let input = fs
|
let input = fs
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
||||||
@@ -162,10 +179,10 @@ ava_1.default("addFingerprints", (t) => {
|
|||||||
input = JSON.stringify(JSON.parse(input));
|
input = JSON.stringify(JSON.parse(input));
|
||||||
expected = JSON.stringify(JSON.parse(expected));
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
|
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
|
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, logging_1.getRunnerLogger(true)), expected);
|
||||||
});
|
});
|
||||||
ava_1.default("missingRegions", (t) => {
|
ava_1.default("missingRegions", async (t) => {
|
||||||
// Run an end-to-end test on a test file
|
// Run an end-to-end test on a test file
|
||||||
let input = fs
|
let input = fs
|
||||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
||||||
@@ -177,7 +194,7 @@ ava_1.default("missingRegions", (t) => {
|
|||||||
input = JSON.stringify(JSON.parse(input));
|
input = JSON.stringify(JSON.parse(input));
|
||||||
expected = JSON.stringify(JSON.parse(expected));
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
|
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
|
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, logging_1.getRunnerLogger(true)), expected);
|
||||||
});
|
});
|
||||||
//# sourceMappingURL=fingerprints.test.js.map
|
//# sourceMappingURL=fingerprints.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
58
lib/init-action.js
generated
58
lib/init-action.js
generated
@@ -1,31 +1,46 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actions_util_1 = require("./actions-util");
|
||||||
const init_1 = require("./init");
|
const init_1 = require("./init");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
// eslint-disable-next-line import/no-commonjs
|
||||||
|
const pkg = require("../package.json");
|
||||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
||||||
var _a;
|
var _a;
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
|
const statusReportBase = await actions_util_1.createStatusReportBase("init", "success", startedAt);
|
||||||
const languages = config.languages.join(",");
|
const languages = config.languages.join(",");
|
||||||
const workflowLanguages = actionsUtil.getOptionalInput("languages");
|
const workflowLanguages = actions_util_1.getOptionalInput("languages");
|
||||||
const paths = (config.originalUserInput.paths || []).join(",");
|
const paths = (config.originalUserInput.paths || []).join(",");
|
||||||
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
|
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
|
||||||
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
|
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
|
||||||
? languages
|
? languages
|
||||||
: "";
|
: "";
|
||||||
const queries = [];
|
const queries = [];
|
||||||
let queriesInput = (_a = actionsUtil.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
|
let queriesInput = (_a = actions_util_1.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
|
||||||
if (queriesInput === undefined || queriesInput.startsWith("+")) {
|
if (queriesInput === undefined || queriesInput.startsWith("+")) {
|
||||||
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
|
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
|
||||||
}
|
}
|
||||||
@@ -43,36 +58,36 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|||||||
paths_ignore: pathsIgnore,
|
paths_ignore: pathsIgnore,
|
||||||
disable_default_queries: disableDefaultQueries,
|
disable_default_queries: disableDefaultQueries,
|
||||||
queries: queries.join(","),
|
queries: queries.join(","),
|
||||||
tools_input: actionsUtil.getOptionalInput("tools") || "",
|
tools_input: actions_util_1.getOptionalInput("tools") || "",
|
||||||
tools_resolved_version: toolsVersion,
|
tools_resolved_version: toolsVersion,
|
||||||
};
|
};
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
await actions_util_1.sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
const logger = logging_1.getActionsLogger();
|
const logger = logging_1.getActionsLogger();
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.actions, pkg.version);
|
||||||
let config;
|
let config;
|
||||||
let codeql;
|
let codeql;
|
||||||
let toolsVersion;
|
let toolsVersion;
|
||||||
const apiDetails = {
|
const apiDetails = {
|
||||||
auth: actionsUtil.getRequiredInput("token"),
|
auth: actions_util_1.getRequiredInput("token"),
|
||||||
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
externalRepoAuth: actions_util_1.getOptionalInput("external-repository-token"),
|
||||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
url: util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, "actions", logger);
|
util_1.checkGitHubVersionInRange(gitHubVersion, logger, util_1.Mode.actions);
|
||||||
try {
|
try {
|
||||||
actionsUtil.prepareLocalRunEnvironment();
|
const workflowErrors = await actions_util_1.validateWorkflow();
|
||||||
const workflowErrors = await actionsUtil.validateWorkflow();
|
if (!(await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getTemporaryDirectory(), actionsUtil.getToolCacheDirectory(), "actions", gitHubVersion.type, logger);
|
const initCodeQLResult = await init_1.initCodeQL(actions_util_1.getOptionalInput("tools"), apiDetails, actions_util_1.getTemporaryDirectory(), actions_util_1.getToolCacheDirectory(), gitHubVersion.type, logger);
|
||||||
codeql = initCodeQLResult.codeql;
|
codeql = initCodeQLResult.codeql;
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
toolsVersion = initCodeQLResult.toolsVersion;
|
||||||
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), actionsUtil.getOptionalInput("db-location"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getTemporaryDirectory(), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
config = await init_1.initConfig(actions_util_1.getOptionalInput("languages"), actions_util_1.getOptionalInput("queries"), actions_util_1.getOptionalInput("packs"), actions_util_1.getOptionalInput("config-file"), actions_util_1.getOptionalInput("db-location"), repository_1.parseRepositoryNwo(util_1.getRequiredEnvParam("GITHUB_REPOSITORY")), actions_util_1.getTemporaryDirectory(), util_1.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, util_1.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
||||||
if (config.languages.includes(languages_1.Language.python) &&
|
if (config.languages.includes(languages_1.Language.python) &&
|
||||||
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
|
actions_util_1.getRequiredInput("setup-python-dependencies") === "true") {
|
||||||
try {
|
try {
|
||||||
await init_1.installPythonDeps(codeql, logger);
|
await init_1.installPythonDeps(codeql, logger);
|
||||||
}
|
}
|
||||||
@@ -84,7 +99,7 @@ async function run() {
|
|||||||
catch (e) {
|
catch (e) {
|
||||||
core.setFailed(e.message);
|
core.setFailed(e.message);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "aborted", startedAt, e.message));
|
await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("init", "aborted", startedAt, e.message));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -97,7 +112,8 @@ async function run() {
|
|||||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||||
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
|
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
|
||||||
core.exportVariable("CODEQL_RAM", codeqlRam);
|
core.exportVariable("CODEQL_RAM", codeqlRam);
|
||||||
const tracerConfig = await init_1.runInit(codeql, config);
|
const sourceRoot = path.resolve(util_1.getRequiredEnvParam("GITHUB_WORKSPACE"), actions_util_1.getOptionalInput("source-root") || "");
|
||||||
|
const tracerConfig = await init_1.runInit(codeql, config, sourceRoot);
|
||||||
if (tracerConfig !== undefined) {
|
if (tracerConfig !== undefined) {
|
||||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||||
core.exportVariable(key, value);
|
core.exportVariable(key, value);
|
||||||
@@ -111,7 +127,7 @@ async function run() {
|
|||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
|
await actions_util_1.sendStatusReport(await actions_util_1.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await sendSuccessStatusReport(startedAt, config, toolsVersion);
|
await sendSuccessStatusReport(startedAt, config, toolsVersion);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAqE;AAsBrE,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QACxD,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;QAC3C,gBAAgB,EAAE,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;QAC3E,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;KAC1D,CAAC;IAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;IACzD,gCAAyB,CAAC,aAAa,EAAE,SAAS,EAAE,MAAM,CAAC,CAAC;IAE5D,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QAEzC,MAAM,cAAc,GAAG,MAAM,WAAW,CAAC,gBAAgB,EAAE,CAAC;QAE5D,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,UAAU,EACV,SAAS,EACT,cAAc,CACf,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,UAAU,EACV,WAAW,CAAC,qBAAqB,EAAE,EACnC,WAAW,CAAC,qBAAqB,EAAE,EACnC,SAAS,EACT,aAAa,CAAC,IAAI,EAClB,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,qBAAqB,EAAE,EACnC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACpE;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,oDAAsC;AAEtC,iDASwB;AAGxB,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAMgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAsBvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,qCAAsB,CACnD,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,+BAAgB,CAAC,WAAW,CAAC,CAAC;IACxD,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,MAAA,+BAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACvD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,+BAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QAC5C,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,+BAAgB,CAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,4BAAqB,CAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,gBAAgB,EAAE,+BAAgB,CAAC,2BAA2B,CAAC;QAC/D,GAAG,EAAE,0BAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;IACzD,gCAAyB,CAAC,aAAa,EAAE,MAAM,EAAE,WAAI,CAAC,OAAO,CAAC,CAAC;IAE/D,IAAI;QACF,MAAM,cAAc,GAAG,MAAM,+BAAgB,EAAE,CAAC;QAEhD,IACE,CAAC,CAAC,MAAM,+BAAgB,CACtB,MAAM,qCAAsB,CAC1B,MAAM,EACN,UAAU,EACV,SAAS,EACT,cAAc,CACf,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,+BAAgB,CAAC,OAAO,CAAC,EACzB,UAAU,EACV,oCAAqB,EAAE,EACvB,oCAAqB,EAAE,EACvB,aAAa,CAAC,IAAI,EAClB,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,+BAAgB,CAAC,WAAW,CAAC,EAC7B,+BAAgB,CAAC,SAAS,CAAC,EAC3B,+BAAgB,CAAC,OAAO,CAAC,EACzB,+BAAgB,CAAC,aAAa,CAAC,EAC/B,+BAAgB,CAAC,aAAa,CAAC,EAC/B,+BAAkB,CAAC,0BAAmB,CAAC,mBAAmB,CAAC,CAAC,EAC5D,oCAAqB,EAAE,EACvB,0BAAmB,CAAC,mBAAmB,CAAC,EACxC,MAAM,EACN,0BAAmB,CAAC,kBAAkB,CAAC,EACvC,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,+BAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACxD;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,+BAAgB,CACpB,MAAM,qCAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,CACtE,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAC7B,0BAAmB,CAAC,kBAAkB,CAAC,EACvC,+BAAgB,CAAC,aAAa,CAAC,IAAI,EAAE,CACtC,CAAC;QAEF,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC/D,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,+BAAgB,CACpB,MAAM,qCAAsB,CAC1B,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
60
lib/init.js
generated
60
lib/init.js
generated
@@ -1,12 +1,25 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.installPythonDeps = exports.injectWindowsTracer = exports.runInit = exports.initConfig = exports.initCodeQL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
@@ -16,24 +29,23 @@ const codeql_1 = require("./codeql");
|
|||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, mode, variant, logger) {
|
async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, mode, variant, logger);
|
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant, logger, true);
|
||||||
await codeql.printVersion();
|
await codeql.printVersion();
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return { codeql, toolsVersion };
|
return { codeql, toolsVersion };
|
||||||
}
|
}
|
||||||
exports.initCodeQL = initCodeQL;
|
exports.initCodeQL = initCodeQL;
|
||||||
async function initConfig(languagesInput, queriesInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
|
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||||
logger.startGroup("Load language configuration");
|
logger.startGroup("Load language configuration");
|
||||||
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
|
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||||
analysisPaths.printPathFiltersWarning(config, logger);
|
analysisPaths.printPathFiltersWarning(config, logger);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
exports.initConfig = initConfig;
|
exports.initConfig = initConfig;
|
||||||
async function runInit(codeql, config) {
|
async function runInit(codeql, config, sourceRoot) {
|
||||||
const sourceRoot = path.resolve();
|
|
||||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
@@ -129,26 +141,15 @@ exports.injectWindowsTracer = injectWindowsTracer;
|
|||||||
async function installPythonDeps(codeql, logger) {
|
async function installPythonDeps(codeql, logger) {
|
||||||
logger.startGroup("Setup Python dependencies");
|
logger.startGroup("Setup Python dependencies");
|
||||||
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
||||||
// Setup tools on the GitHub hosted runners
|
|
||||||
if (process.env["ImageOS"] !== undefined) {
|
|
||||||
try {
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [path.join(scriptsFolder, "install_tools.ps1")]).exec();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
await new toolrunner.ToolRunner(path.join(scriptsFolder, "install_tools.sh")).exec();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
// This script tries to install some needed tools in the runner. It should not fail, but if it does
|
|
||||||
// we just abort the process without failing the action
|
|
||||||
logger.endGroup();
|
|
||||||
logger.warning("Unable to download and extract the tools needed for installing the python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Install dependencies
|
|
||||||
try {
|
try {
|
||||||
|
if (process.platform === "win32") {
|
||||||
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [
|
||||||
|
path.join(scriptsFolder, "install_tools.ps1"),
|
||||||
|
]).exec();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await new toolrunner.ToolRunner(path.join(scriptsFolder, "install_tools.sh")).exec();
|
||||||
|
}
|
||||||
const script = "auto_install_packages.py";
|
const script = "auto_install_packages.py";
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("py"), [
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("py"), [
|
||||||
@@ -165,7 +166,10 @@ async function installPythonDeps(codeql, logger) {
|
|||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
logger.warning("We were unable to install your python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
|
logger.warning(`An error occurred while trying to automatically install Python dependencies: ${e}\n` +
|
||||||
|
"Please make sure any necessary dependencies are installed before calling the codeql-action/analyze " +
|
||||||
|
"step, and add a 'setup-python-dependencies: false' argument to this step to disable our automatic " +
|
||||||
|
"dependency installation and avoid this warning.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,IAAe,EACf,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,IAAI,EACJ,OAAO,EACP,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AAtBD,gCAsBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAhCD,gCAgCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
|
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAlCD,gCAkCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB;IAElB,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAlBD,0BAkBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||||
2
lib/languages.js
generated
2
lib/languages.js
generated
@@ -1,5 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.Language = void 0;
|
||||||
// All the languages supported by CodeQL
|
// All the languages supported by CodeQL
|
||||||
var Language;
|
var Language;
|
||||||
(function (Language) {
|
(function (Language) {
|
||||||
@@ -9,6 +10,7 @@ var Language;
|
|||||||
Language["java"] = "java";
|
Language["java"] = "java";
|
||||||
Language["javascript"] = "javascript";
|
Language["javascript"] = "javascript";
|
||||||
Language["python"] = "python";
|
Language["python"] = "python";
|
||||||
|
Language["ruby"] = "ruby";
|
||||||
})(Language = exports.Language || (exports.Language = {}));
|
})(Language = exports.Language || (exports.Language = {}));
|
||||||
// Additional names for languages
|
// Additional names for languages
|
||||||
const LANGUAGE_ALIASES = {
|
const LANGUAGE_ALIASES = {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QAQX;AARD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;AACf,CAAC,EARW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAQnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||||
17
lib/logging.js
generated
17
lib/logging.js
generated
@@ -1,12 +1,25 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getRunnerLogger = exports.getActionsLogger = void 0;
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
function getActionsLogger() {
|
function getActionsLogger() {
|
||||||
return core;
|
return core;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAVD,0CAUC"}
|
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAVD,0CAUC"}
|
||||||
1
lib/repository.js
generated
1
lib/repository.js
generated
@@ -1,5 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.parseRepositoryNwo = void 0;
|
||||||
function parseRepositoryNwo(input) {
|
function parseRepositoryNwo(input) {
|
||||||
const parts = input.split("/");
|
const parts = input.split("/");
|
||||||
if (parts.length !== 2) {
|
if (parts.length !== 2) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}
|
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}
|
||||||
52
lib/runner.js
generated
52
lib/runner.js
generated
@@ -1,9 +1,21 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
@@ -21,8 +33,12 @@ const logging_1 = require("./logging");
|
|||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
// eslint-disable-next-line import/no-commonjs
|
||||||
|
const pkg = require("../package.json");
|
||||||
const program = new commander_1.Command();
|
const program = new commander_1.Command();
|
||||||
program.version("0.0.1");
|
program.version(pkg.version).hook("preAction", () => {
|
||||||
|
util_1.initializeEnvironment(util_1.Mode.runner, pkg.version);
|
||||||
|
});
|
||||||
function getTempDir(userInput) {
|
function getTempDir(userInput) {
|
||||||
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
|
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
|
||||||
if (!fs.existsSync(tempDir)) {
|
if (!fs.existsSync(tempDir)) {
|
||||||
@@ -86,6 +102,12 @@ program
|
|||||||
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
|
||||||
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
|
||||||
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
|
||||||
|
.option("--packs <packs>", `[Experimental] Comma-separated list of packs to run. Reference a pack in the format scope/name[@version]. If version is not
|
||||||
|
specified, then the latest version of the pack is used. By default, this overrides the same setting in a
|
||||||
|
configuration file; prefix with "+" to use both sets of packs.
|
||||||
|
|
||||||
|
This option is only available in single-language analyses. To use packs in multi-language
|
||||||
|
analyses, you must specify packs in the codeql-config.yml file.`)
|
||||||
.option("--config-file <file>", "Path to config file.")
|
.option("--config-file <file>", "Path to config file.")
|
||||||
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
|
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
|
||||||
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
||||||
@@ -99,6 +121,7 @@ program
|
|||||||
try {
|
try {
|
||||||
const tempDir = getTempDir(cmd.tempDir);
|
const tempDir = getTempDir(cmd.tempDir);
|
||||||
const toolsDir = getToolsDir(cmd.toolsDir);
|
const toolsDir = getToolsDir(cmd.toolsDir);
|
||||||
|
const checkoutPath = cmd.checkoutPath || process.cwd();
|
||||||
// Wipe the temp dir
|
// Wipe the temp dir
|
||||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
logger.info(`Cleaning temp directory ${tempDir}`);
|
||||||
fs.rmdirSync(tempDir, { recursive: true });
|
fs.rmdirSync(tempDir, { recursive: true });
|
||||||
@@ -110,16 +133,18 @@ program
|
|||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
util_1.checkGitHubVersionInRange(gitHubVersion, "runner", logger);
|
util_1.checkGitHubVersionInRange(gitHubVersion, logger, util_1.Mode.runner);
|
||||||
let codeql;
|
let codeql;
|
||||||
if (cmd.codeqlPath !== undefined) {
|
if (cmd.codeqlPath !== undefined) {
|
||||||
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
codeql = await codeql_1.getCodeQL(cmd.codeqlPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, "runner", gitHubVersion.type, logger)).codeql;
|
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, toolsDir, gitHubVersion.type, logger)).codeql;
|
||||||
}
|
}
|
||||||
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, undefined, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
const workspacePath = checkoutPath;
|
||||||
const tracerConfig = await init_1.runInit(codeql, config);
|
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
|
||||||
|
const sourceRoot = checkoutPath;
|
||||||
|
const tracerConfig = await init_1.runInit(codeql, config, sourceRoot);
|
||||||
if (tracerConfig === undefined) {
|
if (tracerConfig === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -233,12 +258,16 @@ program
|
|||||||
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
url: util_1.parseGitHubUrl(cmd.githubUrl),
|
||||||
};
|
};
|
||||||
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
||||||
await analyze_1.runAnalyze(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), cmd.category, config, logger);
|
const threads = util_1.getThreadsFlag(cmd.threads, logger);
|
||||||
|
const memory = util_1.getMemoryFlag(cmd.ram);
|
||||||
|
await analyze_1.runFinalize(outputDir, threads, memory, config, logger);
|
||||||
|
await analyze_1.runQueries(outputDir, memory, util_1.getAddSnippetsFlag(cmd.addSnippets), threads, cmd.category, config, logger);
|
||||||
if (!cmd.upload) {
|
if (!cmd.upload) {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, cmd.checkoutPath || process.cwd(), config.gitHubVersion, apiDetails, logger);
|
const sourceRoot = cmd.checkoutPath || process.cwd();
|
||||||
|
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, sourceRoot, config.gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Analyze failed");
|
logger.error("Analyze failed");
|
||||||
@@ -268,7 +297,8 @@ program
|
|||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||||
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
|
const sourceRoot = cmd.checkoutPath || process.cwd();
|
||||||
|
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.category, sourceRoot, gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.error("Upload failed");
|
logger.error("Upload failed");
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
1
lib/shared-environment.js
generated
1
lib/shared-environment.js
generated
@@ -1,5 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
||||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||||
// The time at which the first action (normally init) started executing.
|
// The time at which the first action (normally init) started executing.
|
||||||
// If a workflow invokes a different action without first invoking the init
|
// If a workflow invokes a different action without first invoking the init
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||||
24
lib/testing-utils.js
generated
24
lib/testing-utils.js
generated
@@ -1,16 +1,26 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
if (k2 === undefined) k2 = k;
|
||||||
};
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
result["default"] = mod;
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const sinon_1 = __importDefault(require("sinon"));
|
exports.setupActionsVars = exports.setupTests = void 0;
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
const CodeQL = __importStar(require("./codeql"));
|
const CodeQL = __importStar(require("./codeql"));
|
||||||
function wrapOutput(context) {
|
function wrapOutput(context) {
|
||||||
// Function signature taken from Socket.write.
|
// Function signature taken from Socket.write.
|
||||||
@@ -66,7 +76,7 @@ function setupTests(test) {
|
|||||||
process.stdout.write(t.context.testOutput);
|
process.stdout.write(t.context.testOutput);
|
||||||
}
|
}
|
||||||
// Undo any modifications made by sinon
|
// Undo any modifications made by sinon
|
||||||
sinon_1.default.restore();
|
sinon.restore();
|
||||||
// Undo any modifications to the env
|
// Undo any modifications to the env
|
||||||
process.env = t.context.env;
|
process.env = t.context.env;
|
||||||
});
|
});
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user