diff --git a/.github/bandit-baseline.json b/.github/bandit-baseline.json new file mode 100644 index 0000000000..2c6a477879 --- /dev/null +++ b/.github/bandit-baseline.json @@ -0,0 +1,31 @@ +{ + "results": [ + { + "code": "34 run_cmd,\n35 shell=True,\n36 capture_output=True,\n37 text=True,\n38 cwd=cwd,\n39 timeout=300,\n40 )\n41 output = {\n42 \"exit_code\": proc.returncode,\n43 \"stdout\": proc.stdout,\n", + "col_offset": 19, + "end_col_offset": 13, + "filename": "src/specify_cli/workflows/steps/shell/__init__.py", + "issue_confidence": "HIGH", + "issue_cwe": { + "id": 78, + "link": "https://cwe.mitre.org/data/definitions/78.html" + }, + "issue_severity": "HIGH", + "issue_text": "subprocess call with shell=True identified, security issue.", + "line_number": 35, + "line_range": [ + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.4/plugins/b602_subprocess_popen_with_shell_equals_true.html", + "test_id": "B602", + "test_name": "subprocess_popen_with_shell_equals_true" + } + ] +} diff --git a/.github/scripts/check_security_requirements.py b/.github/scripts/check_security_requirements.py new file mode 100644 index 0000000000..6834ee42bf --- /dev/null +++ b/.github/scripts/check_security_requirements.py @@ -0,0 +1,101 @@ +"""Check that committed security audit requirements are up to date.""" + +from __future__ import annotations + +import os +import subprocess +import sys +from pathlib import Path + + +REPO_ROOT = Path(__file__).resolve().parents[2] +COMMITTED_REQUIREMENTS = REPO_ROOT / ".github" / "security-audit-requirements.txt" +DEPENDENCY_INPUTS = ("pyproject.toml", ".github/security-audit-requirements.txt") + + +def _dependency_diff_refs() -> tuple[str, str]: + base_ref = os.environ.get("DEPENDENCY_DIFF_BASE", "").strip() + head_ref = os.environ.get("DEPENDENCY_DIFF_HEAD", "").strip() or "HEAD" + if base_ref and not set(base_ref) <= {"0"}: + return base_ref, head_ref + return "HEAD^", "HEAD" + + +def _dependency_inputs_changed() -> bool: + base_ref, head_ref = _dependency_diff_refs() + try: + result = subprocess.run( + [ + "git", + "diff", + "--name-only", + base_ref, + head_ref, + "--", + *DEPENDENCY_INPUTS, + ], + check=True, + cwd=REPO_ROOT, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + text=True, + ) + except subprocess.CalledProcessError as exc: + print( + "Could not determine changed dependency inputs; checking requirements.", + file=sys.stderr, + ) + if exc.stderr: + print(exc.stderr.strip(), file=sys.stderr) + return True + + changed_inputs = [line for line in result.stdout.splitlines() if line] + if not changed_inputs: + print("Dependency audit inputs unchanged; sync check skipped.") + return False + + print(f"Dependency audit inputs changed: {', '.join(changed_inputs)}") + return True + + +def main() -> int: + if not _dependency_inputs_changed(): + return 0 + + generated_requirements = Path(os.environ["GENERATED_REQUIREMENTS"]) + generated_requirements.parent.mkdir(parents=True, exist_ok=True) + + subprocess.run( + [ + "uv", + "pip", + "compile", + "pyproject.toml", + "--extra", + "test", + "--universal", + "--generate-hashes", + "--quiet", + "--no-header", + "--output-file", + str(generated_requirements), + ], + check=True, + cwd=REPO_ROOT, + ) + + committed = COMMITTED_REQUIREMENTS.read_text(encoding="utf-8") + generated = generated_requirements.read_text(encoding="utf-8") + if committed == generated: + return 0 + + print( + "Regenerate .github/security-audit-requirements.txt with the documented " + "uv pip compile command.", + file=sys.stderr, + ) + return 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/.github/security-audit-requirements.txt b/.github/security-audit-requirements.txt new file mode 100644 index 0000000000..d97b84d2cd --- /dev/null +++ b/.github/security-audit-requirements.txt @@ -0,0 +1,318 @@ +annotated-doc==0.0.4 \ + --hash=sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320 \ + --hash=sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4 + # via typer +click==8.3.3 \ + --hash=sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2 \ + --hash=sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613 + # via + # specify-cli (pyproject.toml) + # typer +colorama==0.4.6 ; sys_platform == 'win32' \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via + # click + # pytest +coverage==7.13.5 \ + --hash=sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256 \ + --hash=sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b \ + --hash=sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5 \ + --hash=sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d \ + --hash=sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a \ + --hash=sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969 \ + --hash=sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642 \ + --hash=sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87 \ + --hash=sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740 \ + --hash=sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215 \ + --hash=sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d \ + --hash=sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422 \ + --hash=sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8 \ + --hash=sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911 \ + --hash=sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b \ + --hash=sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587 \ + --hash=sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8 \ + --hash=sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606 \ + --hash=sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9 \ + --hash=sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf \ + --hash=sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633 \ + --hash=sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6 \ + --hash=sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43 \ + --hash=sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2 \ + --hash=sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61 \ + --hash=sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930 \ + --hash=sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc \ + --hash=sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247 \ + --hash=sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75 \ + --hash=sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e \ + --hash=sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376 \ + --hash=sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01 \ + --hash=sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1 \ + --hash=sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3 \ + --hash=sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743 \ + --hash=sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9 \ + --hash=sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf \ + --hash=sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e \ + --hash=sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1 \ + --hash=sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd \ + --hash=sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b \ + --hash=sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab \ + --hash=sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d \ + --hash=sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a \ + --hash=sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0 \ + --hash=sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510 \ + --hash=sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f \ + --hash=sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0 \ + --hash=sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8 \ + --hash=sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf \ + --hash=sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209 \ + --hash=sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9 \ + --hash=sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3 \ + --hash=sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3 \ + --hash=sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d \ + --hash=sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd \ + --hash=sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2 \ + --hash=sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882 \ + --hash=sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09 \ + --hash=sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea \ + --hash=sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c \ + --hash=sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562 \ + --hash=sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3 \ + --hash=sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806 \ + --hash=sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e \ + --hash=sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878 \ + --hash=sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e \ + --hash=sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9 \ + --hash=sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45 \ + --hash=sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29 \ + --hash=sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4 \ + --hash=sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c \ + --hash=sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479 \ + --hash=sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400 \ + --hash=sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c \ + --hash=sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a \ + --hash=sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf \ + --hash=sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686 \ + --hash=sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de \ + --hash=sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028 \ + --hash=sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0 \ + --hash=sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179 \ + --hash=sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16 \ + --hash=sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85 \ + --hash=sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a \ + --hash=sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0 \ + --hash=sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810 \ + --hash=sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161 \ + --hash=sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607 \ + --hash=sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26 \ + --hash=sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819 \ + --hash=sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40 \ + --hash=sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5 \ + --hash=sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15 \ + --hash=sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0 \ + --hash=sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90 \ + --hash=sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0 \ + --hash=sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6 \ + --hash=sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a \ + --hash=sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58 \ + --hash=sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b \ + --hash=sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17 \ + --hash=sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5 \ + --hash=sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664 \ + --hash=sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0 \ + --hash=sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f + # via pytest-cov +iniconfig==2.3.0 \ + --hash=sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730 \ + --hash=sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12 + # via pytest +json5==0.14.0 \ + --hash=sha256:56cf861bab076b1178eb8c92e1311d273a9b9acea2ccc82c276abf839ebaef3a \ + --hash=sha256:b3f492fad9f6cdbced8b7d40b28b9b1c9701c5f561bef0d33b81c2ff433fefcb + # via specify-cli (pyproject.toml) +markdown-it-py==4.0.0 \ + --hash=sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147 \ + --hash=sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3 + # via rich +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +packaging==26.2 \ + --hash=sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e \ + --hash=sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661 + # via + # specify-cli (pyproject.toml) + # pytest +pathspec==1.1.1 \ + --hash=sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a \ + --hash=sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189 + # via specify-cli (pyproject.toml) +platformdirs==4.9.6 \ + --hash=sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a \ + --hash=sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917 + # via specify-cli (pyproject.toml) +pluggy==1.6.0 \ + --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ + --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 + # via + # pytest + # pytest-cov +pygments==2.20.0 \ + --hash=sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f \ + --hash=sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176 + # via + # pytest + # rich +pytest==9.0.3 \ + --hash=sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9 \ + --hash=sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c + # via + # specify-cli (pyproject.toml) + # pytest-cov +pytest-cov==7.1.0 \ + --hash=sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2 \ + --hash=sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678 + # via specify-cli (pyproject.toml) +pyyaml==6.0.3 \ + --hash=sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c \ + --hash=sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a \ + --hash=sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3 \ + --hash=sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956 \ + --hash=sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6 \ + --hash=sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c \ + --hash=sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65 \ + --hash=sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a \ + --hash=sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0 \ + --hash=sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b \ + --hash=sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1 \ + --hash=sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6 \ + --hash=sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7 \ + --hash=sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e \ + --hash=sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007 \ + --hash=sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310 \ + --hash=sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4 \ + --hash=sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9 \ + --hash=sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295 \ + --hash=sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea \ + --hash=sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0 \ + --hash=sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e \ + --hash=sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac \ + --hash=sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9 \ + --hash=sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7 \ + --hash=sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35 \ + --hash=sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb \ + --hash=sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b \ + --hash=sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69 \ + --hash=sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5 \ + --hash=sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b \ + --hash=sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c \ + --hash=sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369 \ + --hash=sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd \ + --hash=sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824 \ + --hash=sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198 \ + --hash=sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065 \ + --hash=sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c \ + --hash=sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c \ + --hash=sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764 \ + --hash=sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196 \ + --hash=sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b \ + --hash=sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00 \ + --hash=sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac \ + --hash=sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8 \ + --hash=sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e \ + --hash=sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28 \ + --hash=sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3 \ + --hash=sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5 \ + --hash=sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4 \ + --hash=sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b \ + --hash=sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf \ + --hash=sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5 \ + --hash=sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702 \ + --hash=sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8 \ + --hash=sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788 \ + --hash=sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da \ + --hash=sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d \ + --hash=sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc \ + --hash=sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c \ + --hash=sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba \ + --hash=sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f \ + --hash=sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917 \ + --hash=sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5 \ + --hash=sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26 \ + --hash=sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f \ + --hash=sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b \ + --hash=sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be \ + --hash=sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c \ + --hash=sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3 \ + --hash=sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6 \ + --hash=sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926 \ + --hash=sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0 + # via specify-cli (pyproject.toml) +readchar==4.2.2 \ + --hash=sha256:92daf7e42c52b0787e6c75d01ecfb9a94f4ceff3764958b570c1dddedd47b200 \ + --hash=sha256:e3b270fe16fc90c50ac79107700330a133dd4c63d22939f5b03b4f24564d5dd8 + # via specify-cli (pyproject.toml) +rich==15.0.0 \ + --hash=sha256:33bd4ef74232fb73fe9279a257718407f169c09b78a87ad3d296f548e27de0bb \ + --hash=sha256:edd07a4824c6b40189fb7ac9bc4c52536e9780fbbfbddf6f1e2502c31b068c36 + # via + # specify-cli (pyproject.toml) + # typer +shellingham==1.5.4 \ + --hash=sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686 \ + --hash=sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de + # via typer +tomli==2.4.1 ; python_full_version <= '3.11' \ + --hash=sha256:01f520d4f53ef97964a240a035ec2a869fe1a37dde002b57ebc4417a27ccd853 \ + --hash=sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe \ + --hash=sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5 \ + --hash=sha256:1d8591993e228b0c930c4bb0db464bdad97b3289fb981255d6c9a41aedc84b2d \ + --hash=sha256:2190f2e9dd7508d2a90ded5ed369255980a1bcdd58e52f7fe24b8162bf9fedbd \ + --hash=sha256:2c1c351919aca02858f740c6d33adea0c5deea37f9ecca1cc1ef9e884a619d26 \ + --hash=sha256:36d2bd2ad5fb9eaddba5226aa02c8ec3fa4f192631e347b3ed28186d43be6b54 \ + --hash=sha256:3d48a93ee1c9b79c04bb38772ee1b64dcf18ff43085896ea460ca8dec96f35f6 \ + --hash=sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c \ + --hash=sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a \ + --hash=sha256:4b605484e43cdc43f0954ddae319fb75f04cc10dd80d830540060ee7cd0243cd \ + --hash=sha256:504aa796fe0569bb43171066009ead363de03675276d2d121ac1a4572397870f \ + --hash=sha256:51529d40e3ca50046d7606fa99ce3956a617f9b36380da3b7f0dd3dd28e68cb5 \ + --hash=sha256:52c8ef851d9a240f11a88c003eacb03c31fc1c9c4ec64a99a0f922b93874fda9 \ + --hash=sha256:559db847dc486944896521f68d8190be1c9e719fced785720d2216fe7022b662 \ + --hash=sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9 \ + --hash=sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1 \ + --hash=sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585 \ + --hash=sha256:5ee18d9ebdb417e384b58fe414e8d6af9f4e7a0ae761519fb50f721de398dd4e \ + --hash=sha256:7008df2e7655c495dd12d2a4ad038ff878d4ca4b81fccaf82b714e07eae4402c \ + --hash=sha256:734e20b57ba95624ecf1841e72b53f6e186355e216e5412de414e3c51e5e3c41 \ + --hash=sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f \ + --hash=sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085 \ + --hash=sha256:7f94b27a62cfad8496c8d2513e1a222dd446f095fca8987fceef261225538a15 \ + --hash=sha256:88dceee75c2c63af144e456745e10101eb67361050196b0b6af5d717254dddf7 \ + --hash=sha256:8a650c2dbafa08d42e51ba0b62740dae4ecb9338eefa093aa5c78ceb546fcd5c \ + --hash=sha256:8d65a2fbf9d2f8352685bc1364177ee3923d6baf5e7f43ea4959d7d8bc326a36 \ + --hash=sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076 \ + --hash=sha256:a120733b01c45e9a0c34aeef92bf0cf1d56cfe81ed9d47d562f9ed591a9828ac \ + --hash=sha256:b1d22e6e9387bf4739fbe23bfa80e93f6b0373a7f1b96c6227c32bef95a4d7a8 \ + --hash=sha256:b8c198f8c1805dc42708689ed6864951fd2494f924149d3e4bce7710f8eb5232 \ + --hash=sha256:c2541745709bad0264b7d4705ad453b76ccd191e64aa6f0fc66b69a293a45ece \ + --hash=sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a \ + --hash=sha256:c7f2c7f2b9ca6bdeef8f0fa897f8e05085923eb091721675170254cbc5b02897 \ + --hash=sha256:d312ef37c91508b0ab2cee7da26ec0b3ed2f03ce12bd87a588d771ae15dcf82d \ + --hash=sha256:d4d8fe59808a54658fcc0160ecfb1b30f9089906c50b23bcb4c69eddc19ec2b4 \ + --hash=sha256:da25dc3563bff5965356133435b757a795a17b17d01dbc0f42fb32447ddfd917 \ + --hash=sha256:eab21f45c7f66c13f2a9e0e1535309cee140182a9cdae1e041d02e47291e8396 \ + --hash=sha256:eb0dc4e38e6a1fd579e5d50369aa2e10acfc9cace504579b2faabb478e76941a \ + --hash=sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc \ + --hash=sha256:ede3e6487c5ef5d28634ba3f31f989030ad6af71edfb0055cbbd14189ff240ba \ + --hash=sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f \ + --hash=sha256:f758f1b9299d059cc3f6546ae2af89670cb1c4d48ea29c3cacc4fe7de3058257 \ + --hash=sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30 \ + --hash=sha256:fd0409a3653af6c147209d267a0e4243f0ae46b011aa978b1080359fddc9b6cf \ + --hash=sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9 \ + --hash=sha256:ff2983983d34813c1aeb0fa89091e76c3a22889ee83ab27c5eeb45100560c049 + # via coverage +typer==0.25.1 \ + --hash=sha256:75caa44ed46a03fb2dab8808753ffacdbfea88495e74c85a28c5eefcf5f39c89 \ + --hash=sha256:9616eb8853a09ffeabab1698952f33c6f29ffdbceb4eaeecf571880e8d7664cc + # via specify-cli (pyproject.toml) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 01e0df4a51..1af463c718 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -19,14 +19,14 @@ jobs: language: [ 'actions', 'python' ] steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - name: Initialize CodeQL - uses: github/codeql-action/init@v4 + uses: github/codeql-action/init@e46ed2cbd01164d986452f91f178727624ae40d7 # v4 with: languages: ${{ matrix.language }} - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v4 + uses: github/codeql-action/analyze@e46ed2cbd01164d986452f91f178727624ae40d7 # v4 with: category: "/language:${{ matrix.language }}" diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 6fe87ddce2..9cb48f8f38 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,12 +30,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 # Fetch all history for git info - name: Setup .NET - uses: actions/setup-dotnet@v4 + uses: actions/setup-dotnet@67a3573c9a986a3f9c594539f4ab511d57bb3ce9 # v4 with: dotnet-version: '8.x' @@ -48,10 +48,10 @@ jobs: docfx docfx.json - name: Setup Pages - uses: actions/configure-pages@v6 + uses: actions/configure-pages@45bfe0192ca1faeb007ade9deae92b16b8254a0d # v6 - name: Upload artifact - uses: actions/upload-pages-artifact@v5 + uses: actions/upload-pages-artifact@fc324d3547104276b827a68afc52ff2a11cc49c9 # v5 with: path: 'docs/_site' @@ -66,5 +66,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v5 - + uses: actions/deploy-pages@cd2ce8fcbc39b97be8ca5fce6e763baed58fa128 # v5 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 8b11ccdfff..3b2ad70bfb 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - name: Run markdownlint-cli2 uses: DavidAnson/markdownlint-cli2-action@6b51ade7a9e4a75a7ad929842dd298a3804ebe8b # v23 diff --git a/.github/workflows/release-trigger.yml b/.github/workflows/release-trigger.yml index a451accfe6..c3728e2363 100644 --- a/.github/workflows/release-trigger.yml +++ b/.github/workflows/release-trigger.yml @@ -16,7 +16,7 @@ jobs: pull-requests: write steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 token: ${{ secrets.RELEASE_PAT }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7b903cf979..9437bd02e7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ jobs: contents: write steps: - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: fetch-depth: 0 token: ${{ secrets.GITHUB_TOKEN }} @@ -86,4 +86,3 @@ jobs: --notes-file release_notes.md env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 0000000000..8a0058c073 --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,74 @@ +name: Security Audit + +permissions: + contents: read + +on: + push: + branches: ["main"] + pull_request: + schedule: + - cron: "17 4 * * 1" + workflow_dispatch: + +jobs: + dependency-audit: + name: Dependency audit (${{ matrix.os }}, Python ${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest] + python-version: ["3.11", "3.12", "3.13"] + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + with: + fetch-depth: 2 + + - name: Install uv + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 + with: + python-version: ${{ matrix.python-version }} + + - name: Compile scheduled audit requirements + if: ${{ github.event_name == 'schedule' }} + run: | + uv pip compile pyproject.toml --extra test --python-version "${{ matrix.python-version }}" --generate-hashes --quiet --output-file "${{ runner.temp }}/spec-kit-audit-requirements.txt" + + - name: Run pip-audit (scheduled live resolution) + if: ${{ github.event_name == 'schedule' }} + run: uvx --from pip-audit==2.10.0 pip-audit --disable-pip --require-hashes -r "${{ runner.temp }}/spec-kit-audit-requirements.txt" --progress-spinner off + + - name: Check committed audit requirements are current + if: ${{ github.event_name != 'schedule' }} + env: + DEPENDENCY_DIFF_BASE: ${{ github.event.pull_request.base.sha || github.event.before || '' }} + DEPENDENCY_DIFF_HEAD: ${{ github.sha }} + GENERATED_REQUIREMENTS: ${{ runner.temp }}/security-audit-requirements.txt + run: python .github/scripts/check_security_requirements.py + + - name: Run pip-audit (committed requirements) + if: ${{ github.event_name != 'schedule' }} + run: uvx --from pip-audit==2.10.0 pip-audit --disable-pip --require-hashes -r .github/security-audit-requirements.txt --progress-spinner off + + static-analysis: + name: Static analysis + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + + - name: Install uv + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 + + - name: Set up Python + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 + with: + python-version: "3.13" + + - name: Run Bandit + run: uvx --from bandit==1.9.4 bandit -r src -lll --baseline .github/bandit-baseline.json diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 076d05336a..919add00f0 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -14,7 +14,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v10 + - uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10 with: # Days of inactivity before an issue or PR becomes stale days-before-stale: 150 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7354dd8e28..f7130aa8d1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,13 +13,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - name: Install uv uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 - name: Set up Python - uses: actions/setup-python@v6 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 with: python-version: "3.13" @@ -34,13 +34,13 @@ jobs: python-version: ["3.11", "3.12", "3.13"] steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - name: Install uv uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v6 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 with: python-version: ${{ matrix.python-version }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5188d70a71..fd043e01a6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -81,6 +81,19 @@ uv run python -m pytest tests/test_agent_config_consistency.py -q Run this when you change agent metadata, context update scripts, or integration wiring. +#### Security checks + +```bash +uvx --from pip-audit==2.10.0 pip-audit --disable-pip --require-hashes -r .github/security-audit-requirements.txt --progress-spinner off +uvx --from bandit==1.9.4 bandit -r src -lll --baseline .github/bandit-baseline.json +``` + +Run these before changing dependency metadata, workflow execution code, subprocess usage, or security-sensitive paths. Pull request, push, and manual CI audits use the committed hashed requirements file so they stay deterministic. The scheduled CI audit also resolves the runtime and `test` extra dependency set across the supported Python and OS matrix to catch newly published advisories. If dependency metadata changes, refresh the committed audit input before running pip-audit: + +```bash +uv pip compile pyproject.toml --extra test --universal --generate-hashes --quiet --no-header --output-file .github/security-audit-requirements.txt +``` + ### Manual testing #### Testing setup diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index ccd670d20e..8aa5ead1f5 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -54,6 +54,7 @@ from rich.tree import Tree from typer.core import TyperGroup +from ._download_security import read_response_limited from .integration_runtime import ( invoke_separator_for_integration as _invoke_separator_for_integration, resolve_integration_options as _resolve_integration_options_impl, @@ -396,14 +397,19 @@ def callback( console.print(Align.center("[dim]Run 'specify --help' for usage information[/dim]")) console.print() -def run_command(cmd: list[str], check_return: bool = True, capture: bool = False, shell: bool = False) -> Optional[str]: - """Run a shell command and optionally capture output.""" +def run_command(cmd: list[str], check_return: bool = True, capture: bool = False) -> Optional[str]: + """Run a command without invoking a shell and optionally capture output.""" try: if capture: - result = subprocess.run(cmd, check=check_return, capture_output=True, text=True, shell=shell) + result = subprocess.run( + cmd, + check=check_return, + capture_output=True, + text=True, + ) return result.stdout.strip() else: - subprocess.run(cmd, check=check_return, shell=shell) + subprocess.run(cmd, check=check_return) return None except subprocess.CalledProcessError as e: if check_return: @@ -1767,7 +1773,13 @@ def _fetch_latest_release_tag() -> tuple[str | None, str | None]: req.add_header("Authorization", f"Bearer {token}") try: with urllib.request.urlopen(req, timeout=5) as resp: - payload = json.loads(resp.read().decode("utf-8")) + payload = json.loads( + read_response_limited( + resp, + max_bytes=1024 * 1024, + label="GitHub latest release", + ).decode("utf-8") + ) tag = payload.get("tag_name") if not isinstance(tag, str) or not tag: raise ValueError("GitHub API response missing valid tag_name") @@ -3371,8 +3383,10 @@ def preset_add( zip_path = Path(tmpdir) / "preset.zip" try: with urllib.request.urlopen(from_url, timeout=60) as response: - zip_path.write_bytes(response.read()) - except urllib.error.URLError as e: + zip_path.write_bytes( + read_response_limited(response, label=f"preset {from_url}") + ) + except (urllib.error.URLError, ValueError) as e: console.print(f"[red]Error:[/red] Failed to download: {e}") raise typer.Exit(1) @@ -4275,12 +4289,15 @@ def extension_add( try: with urllib.request.urlopen(from_url, timeout=60) as response: - zip_data = response.read() + zip_data = read_response_limited( + response, + label=f"extension {from_url}", + ) zip_path.write_bytes(zip_data) # Install from downloaded ZIP manifest = manager.install_from_zip(zip_path, speckit_version, priority=priority) - except urllib.error.URLError as e: + except (urllib.error.URLError, ValueError) as e: console.print(f"[red]Error:[/red] Failed to download from {from_url}: {e}") raise typer.Exit(1) finally: @@ -5521,7 +5538,7 @@ def _validate_and_install_local(yaml_path: Path, source_label: str) -> None: console.print(f"[red]Error:[/red] URL redirected to non-HTTPS: {final_url}") raise typer.Exit(1) with tempfile.NamedTemporaryFile(suffix=".yml", delete=False) as tmp: - tmp.write(resp.read()) + tmp.write(read_response_limited(resp, label=f"workflow {source}")) tmp_path = Path(tmp.name) except typer.Exit: raise @@ -5625,7 +5642,9 @@ def _validate_and_install_local(yaml_path: Path, source_label: str) -> None: f"[red]Error:[/red] Workflow '{source}' redirected to non-HTTPS URL: {final_url}" ) raise typer.Exit(1) - workflow_file.write_bytes(response.read()) + workflow_file.write_bytes( + read_response_limited(response, label=f"workflow {source}") + ) except Exception as exc: if workflow_dir.exists(): import shutil diff --git a/src/specify_cli/_download_security.py b/src/specify_cli/_download_security.py new file mode 100644 index 0000000000..6706ec5237 --- /dev/null +++ b/src/specify_cli/_download_security.py @@ -0,0 +1,173 @@ +"""Helpers for bounded downloads and archive extraction.""" + +from __future__ import annotations + +import hashlib +import re +import stat +import zipfile +from pathlib import Path, PurePosixPath +from typing import TypeVar + + +ErrorT = TypeVar("ErrorT", bound=Exception) + +MAX_DOWNLOAD_BYTES = 50 * 1024 * 1024 +MAX_ZIP_ENTRIES = 512 +MAX_ZIP_MEMBER_BYTES = 10 * 1024 * 1024 +MAX_ZIP_TOTAL_BYTES = 50 * 1024 * 1024 +READ_CHUNK_SIZE = 1024 * 1024 +SHA256_RE = re.compile(r"^[0-9a-fA-F]{64}$") + + +def _raise(error_type: type[ErrorT], message: str) -> None: + raise error_type(message) + + +def read_response_limited( + response, + *, + max_bytes: int = MAX_DOWNLOAD_BYTES, + error_type: type[ErrorT] = ValueError, + label: str = "download", +) -> bytes: + """Read at most *max_bytes* from a response object.""" + data = response.read(max_bytes + 1) + if len(data) > max_bytes: + _raise(error_type, f"{label} exceeds maximum size of {max_bytes} bytes") + return data + + +def normalize_sha256(value: object, *, error_type: type[ErrorT] = ValueError) -> str | None: + """Normalize an optional sha256/sha256: checksum value.""" + if value is None: + return None + if not isinstance(value, str): + _raise(error_type, "sha256 checksum must be a string") + + checksum = value.strip() + if checksum.startswith("sha256:"): + checksum = checksum[len("sha256:") :] + if not SHA256_RE.fullmatch(checksum): + _raise(error_type, "sha256 checksum must be 64 hexadecimal characters") + return checksum.lower() + + +def verify_sha256( + data: bytes, + expected: object, + *, + error_type: type[ErrorT] = ValueError, + label: str = "download", +) -> None: + """Verify *data* against an optional sha256 checksum.""" + checksum = normalize_sha256(expected, error_type=error_type) + if checksum is None: + return + + actual = hashlib.sha256(data).hexdigest() + if actual != checksum: + _raise( + error_type, + f"{label} checksum mismatch: expected sha256:{checksum}, got sha256:{actual}", + ) + + +def _safe_zip_name(name: str, *, error_type: type[ErrorT]) -> str: + """Return a normalized ZIP member name or raise on traversal.""" + if "\x00" in name: + _raise(error_type, f"Unsafe path in ZIP archive: {name!r}") + + normalized = name.replace("\\", "/") + path = PurePosixPath(normalized) + has_windows_drive = re.match(r"^[A-Za-z]:", normalized) is not None + if ( + not path.parts + or path.is_absolute() + or has_windows_drive + or any(part == ".." for part in path.parts) + ): + _raise( + error_type, + f"Unsafe path in ZIP archive: {name} (potential path traversal)", + ) + return normalized + + +def safe_extract_zip( + zip_path: Path, + target_dir: Path, + *, + error_type: type[ErrorT] = ValueError, + max_entries: int = MAX_ZIP_ENTRIES, + max_member_bytes: int = MAX_ZIP_MEMBER_BYTES, + max_total_bytes: int = MAX_ZIP_TOTAL_BYTES, +) -> None: + """Extract a ZIP archive after path, symlink, and size validation.""" + target_root = target_dir.resolve() + + with zipfile.ZipFile(zip_path, "r") as zf: + members = zf.infolist() + if len(members) > max_entries: + _raise( + error_type, + f"ZIP archive contains too many entries ({len(members)} > {max_entries})", + ) + + normalized_members: list[tuple[zipfile.ZipInfo, str]] = [] + total_size = 0 + for member in members: + normalized_name = _safe_zip_name(member.filename, error_type=error_type) + + mode = member.external_attr >> 16 + if stat.S_ISLNK(mode): + _raise(error_type, f"Unsafe symlink in ZIP archive: {member.filename}") + + member_path = (target_dir / normalized_name).resolve() + try: + member_path.relative_to(target_root) + except ValueError: + _raise( + error_type, + f"Unsafe path in ZIP archive: {member.filename} " + "(potential path traversal)", + ) + + if not member.is_dir(): + if member.file_size > max_member_bytes: + _raise( + error_type, + f"ZIP member {member.filename} exceeds maximum size " + f"of {max_member_bytes} bytes", + ) + total_size += member.file_size + if total_size > max_total_bytes: + _raise( + error_type, + f"ZIP archive exceeds maximum uncompressed size " + f"of {max_total_bytes} bytes", + ) + + normalized_members.append((member, normalized_name)) + + for member, normalized_name in normalized_members: + member_path = target_dir / normalized_name + if member.is_dir(): + member_path.mkdir(parents=True, exist_ok=True) + continue + + member_path.parent.mkdir(parents=True, exist_ok=True) + written = 0 + with zf.open(member, "r") as source, member_path.open("wb") as dest: + while True: + chunk = source.read(READ_CHUNK_SIZE) + if not chunk: + break + written += len(chunk) + if written > max_member_bytes: + _raise( + error_type, + f"ZIP member {member.filename} exceeds maximum size " + f"of {max_member_bytes} bytes", + ) + dest.write(chunk) diff --git a/src/specify_cli/_github_http.py b/src/specify_cli/_github_http.py index ee68a8325c..f86b48c657 100644 --- a/src/specify_cli/_github_http.py +++ b/src/specify_cli/_github_http.py @@ -7,6 +7,7 @@ """ import os +import urllib.error import urllib.request from urllib.parse import urlparse from typing import Dict @@ -41,6 +42,12 @@ def build_github_request(url: str) -> urllib.request.Request: return urllib.request.Request(url, headers=headers) +def _is_https_or_localhost_http(url: str) -> bool: + parsed = urlparse(url) + is_localhost = parsed.hostname in ("localhost", "127.0.0.1", "::1") + return parsed.scheme == "https" or (parsed.scheme == "http" and is_localhost) + + class _StripAuthOnRedirect(urllib.request.HTTPRedirectHandler): """Redirect handler that drops the Authorization header when leaving GitHub. @@ -50,6 +57,11 @@ class _StripAuthOnRedirect(urllib.request.HTTPRedirectHandler): """ def redirect_request(self, req, fp, code, msg, headers, newurl): + if not _is_https_or_localhost_http(newurl): + raise urllib.error.URLError( + f"Refusing unsafe redirect to non-HTTPS URL: {newurl}" + ) + original_auth = req.get_header("Authorization") new_req = super().redirect_request(req, fp, code, msg, headers, newurl) if new_req is not None: @@ -63,17 +75,19 @@ def redirect_request(self, req, fp, code, msg, headers, newurl): return new_req -def open_github_url(url: str, timeout: int = 10): +def open_github_url(url: str, timeout: int = 10, *, strict_redirects: bool = False): """Open a URL with GitHub auth, stripping the header on cross-host redirects. When the request carries an Authorization header, a custom redirect handler drops that header if the redirect target is not a GitHub-owned domain, preventing token leakage to CDNs or other third-party hosts that GitHub may redirect to (e.g. S3 for release asset downloads). + When strict_redirects is true, the same redirect handler is used even + without auth so HTTPS downloads cannot silently downgrade to HTTP. """ req = build_github_request(url) - if not req.get_header("Authorization"): + if not req.get_header("Authorization") and not strict_redirects: return urllib.request.urlopen(req, timeout=timeout) opener = urllib.request.build_opener(_StripAuthOnRedirect) diff --git a/src/specify_cli/agents.py b/src/specify_cli/agents.py index 726b0fd2a6..da16e33691 100644 --- a/src/specify_cli/agents.py +++ b/src/specify_cli/agents.py @@ -461,8 +461,20 @@ def register_commands( for cmd_info in commands: cmd_name = cmd_info["name"] cmd_file = cmd_info["file"] + if not isinstance(cmd_file, str) or not cmd_file.strip(): + raise ValueError( + f"Command source file for {cmd_name!r} must be a non-empty string" + ) - source_file = source_dir / cmd_file + try: + source_root = source_dir.resolve() + source_file = (source_root / cmd_file).resolve() + source_file.relative_to(source_root) + except (OSError, ValueError): + raise ValueError( + f"Command source file {cmd_file!r} escapes directory " + f"{source_dir!r}" + ) from None if not source_file.exists(): continue diff --git a/src/specify_cli/extensions.py b/src/specify_cli/extensions.py index 81687b4186..1bace0b8a7 100644 --- a/src/specify_cli/extensions.py +++ b/src/specify_cli/extensions.py @@ -10,11 +10,10 @@ import hashlib import os import tempfile -import zipfile import shutil import copy from dataclasses import dataclass -from pathlib import Path +from pathlib import Path, PurePosixPath from typing import Optional, Dict, List, Any, Callable, Set from datetime import datetime, timezone import re @@ -25,6 +24,12 @@ from packaging import version as pkg_version from packaging.specifiers import SpecifierSet, InvalidSpecifier +from ._download_security import ( + read_response_limited, + safe_extract_zip, + verify_sha256, +) + _FALLBACK_CORE_COMMAND_NAMES = frozenset({ "analyze", "checklist", @@ -238,6 +243,24 @@ def _validate(self): ) if "name" not in cmd or "file" not in cmd: raise ValidationError("Command missing 'name' or 'file'") + if not isinstance(cmd["file"], str) or not cmd["file"].strip(): + raise ValidationError( + f"Command '{cmd['name']}' file must be a non-empty string" + ) + + normalized_file = cmd["file"].replace("\\", "/") + file_path = PurePosixPath(normalized_file) + has_windows_drive = re.match(r"^[A-Za-z]:", normalized_file) is not None + if ( + file_path.is_absolute() + or has_windows_drive + or any(part == ".." for part in file_path.parts) + ): + raise ValidationError( + f"Invalid command file path '{cmd['file']}': " + "must be a relative path within the extension directory" + ) + cmd["file"] = normalized_file # Validate command name format if not EXTENSION_COMMAND_NAME_PATTERN.match(cmd["name"]): @@ -1234,21 +1257,7 @@ def install_from_zip( with tempfile.TemporaryDirectory() as tmpdir: temp_path = Path(tmpdir) - # Extract ZIP safely (prevent Zip Slip attack) - with zipfile.ZipFile(zip_path, 'r') as zf: - # Validate all paths first before extracting anything - temp_path_resolved = temp_path.resolve() - for member in zf.namelist(): - member_path = (temp_path / member).resolve() - # Use is_relative_to for safe path containment check - try: - member_path.relative_to(temp_path_resolved) - except ValueError: - raise ValidationError( - f"Unsafe path in ZIP archive: {member} (potential path traversal)" - ) - # Only extract after all paths are validated - zf.extractall(temp_path) + safe_extract_zip(zip_path, temp_path, error_type=ValidationError) # Find extension directory (may be nested) extension_dir = temp_path @@ -1720,7 +1729,7 @@ def _open_url(self, url: str, timeout: int = 10): Delegates to :func:`specify_cli._github_http.open_github_url`. """ from specify_cli._github_http import open_github_url - return open_github_url(url, timeout) + return open_github_url(url, timeout, strict_redirects=True) def _load_catalog_config(self, config_path: Path) -> Optional[List[CatalogEntry]]: """Load catalog stack configuration from a YAML file. @@ -1912,7 +1921,13 @@ def _fetch_single_catalog(self, entry: CatalogEntry, force_refresh: bool = False # Fetch from network try: with self._open_url(entry.url, timeout=10) as response: - catalog_data = json.loads(response.read()) + catalog_data = json.loads( + read_response_limited( + response, + error_type=ExtensionError, + label=f"extension catalog {entry.url}", + ) + ) if "schema_version" not in catalog_data or "extensions" not in catalog_data: raise ExtensionError(f"Invalid catalog format from {entry.url}") @@ -2028,7 +2043,13 @@ def fetch_catalog(self, force_refresh: bool = False) -> Dict[str, Any]: import urllib.error with self._open_url(catalog_url, timeout=10) as response: - catalog_data = json.loads(response.read()) + catalog_data = json.loads( + read_response_limited( + response, + error_type=ExtensionError, + label=f"extension catalog {catalog_url}", + ) + ) # Validate catalog structure if "schema_version" not in catalog_data or "extensions" not in catalog_data: @@ -2178,8 +2199,18 @@ def download_extension(self, extension_id: str, target_dir: Optional[Path] = Non # Download the ZIP file try: with self._open_url(download_url, timeout=60) as response: - zip_data = response.read() + zip_data = read_response_limited( + response, + error_type=ExtensionError, + label=f"extension '{extension_id}' download", + ) + verify_sha256( + zip_data, + ext_info.get("sha256"), + error_type=ExtensionError, + label=f"extension '{extension_id}' download", + ) zip_path.write_bytes(zip_data) return zip_path diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 1b449af682..b784147f39 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -21,6 +21,8 @@ import yaml from packaging import version as pkg_version +from .._download_security import read_response_limited + # --------------------------------------------------------------------------- # Errors @@ -294,7 +296,13 @@ def _fetch_single_catalog( final_url = resp.geturl() if final_url != entry.url: self._validate_catalog_url(final_url) - catalog_data = json.loads(resp.read()) + catalog_data = json.loads( + read_response_limited( + resp, + error_type=IntegrationCatalogError, + label=f"integration catalog {entry.url}", + ) + ) if not isinstance(catalog_data, dict): raise IntegrationCatalogError( diff --git a/src/specify_cli/presets.py b/src/specify_cli/presets.py index 690d1c51ff..f9e350083b 100644 --- a/src/specify_cli/presets.py +++ b/src/specify_cli/presets.py @@ -12,10 +12,9 @@ import hashlib import os import tempfile -import zipfile import shutil from dataclasses import dataclass -from pathlib import Path +from pathlib import Path, PurePosixPath from typing import TYPE_CHECKING, Optional, Dict, List, Any if TYPE_CHECKING: @@ -27,6 +26,11 @@ from packaging import version as pkg_version from packaging.specifiers import SpecifierSet, InvalidSpecifier +from ._download_security import ( + read_response_limited, + safe_extract_zip, + verify_sha256, +) from .extensions import REINSTALL_COMMAND, ExtensionRegistry, normalize_priority @@ -216,12 +220,21 @@ def _validate(self): # Validate file path safety: must be relative, no parent traversal file_path = tmpl["file"] - normalized = os.path.normpath(file_path) - if os.path.isabs(normalized) or normalized.startswith(".."): + if not isinstance(file_path, str) or not file_path.strip(): + raise PresetValidationError( + "Invalid template file path: must be a non-empty string" + ) + normalized = file_path.replace("\\", "/") + normalized_path = PurePosixPath(normalized) + has_windows_drive = re.match(r"^[A-Za-z]:", normalized) is not None + if normalized_path.is_absolute() or any( + part == ".." for part in normalized_path.parts + ) or has_windows_drive: raise PresetValidationError( f"Invalid template file path '{file_path}': " "must be a relative path within the preset directory" ) + tmpl["file"] = normalized # Validate strategy field (optional, defaults to "replace") strategy = tmpl.get("strategy", "replace") @@ -1625,18 +1638,7 @@ def install_from_zip( with tempfile.TemporaryDirectory() as tmpdir: temp_path = Path(tmpdir) - with zipfile.ZipFile(zip_path, 'r') as zf: - temp_path_resolved = temp_path.resolve() - for member in zf.namelist(): - member_path = (temp_path / member).resolve() - try: - member_path.relative_to(temp_path_resolved) - except ValueError: - raise PresetValidationError( - f"Unsafe path in ZIP archive: {member} " - "(potential path traversal)" - ) - zf.extractall(temp_path) + safe_extract_zip(zip_path, temp_path, error_type=PresetValidationError) pack_dir = temp_path manifest_path = pack_dir / "preset.yml" @@ -1858,7 +1860,7 @@ def _open_url(self, url: str, timeout: int = 10): Delegates to :func:`specify_cli._github_http.open_github_url`. """ from specify_cli._github_http import open_github_url - return open_github_url(url, timeout) + return open_github_url(url, timeout, strict_redirects=True) def _load_catalog_config(self, config_path: Path) -> Optional[List[PresetCatalogEntry]]: """Load catalog stack configuration from a YAML file. @@ -2043,7 +2045,13 @@ def _fetch_single_catalog(self, entry: PresetCatalogEntry, force_refresh: bool = try: with self._open_url(entry.url, timeout=10) as response: - catalog_data = json.loads(response.read()) + catalog_data = json.loads( + read_response_limited( + response, + error_type=PresetError, + label=f"preset catalog {entry.url}", + ) + ) if ( "schema_version" not in catalog_data @@ -2136,7 +2144,13 @@ def fetch_catalog(self, force_refresh: bool = False) -> Dict[str, Any]: try: with self._open_url(catalog_url, timeout=10) as response: - catalog_data = json.loads(response.read()) + catalog_data = json.loads( + read_response_limited( + response, + error_type=PresetError, + label=f"preset catalog {catalog_url}", + ) + ) if ( "schema_version" not in catalog_data @@ -2306,8 +2320,18 @@ def download_pack( try: with self._open_url(download_url, timeout=60) as response: - zip_data = response.read() + zip_data = read_response_limited( + response, + error_type=PresetError, + label=f"preset '{pack_id}' download", + ) + verify_sha256( + zip_data, + pack_info.get("sha256"), + error_type=PresetError, + label=f"preset '{pack_id}' download", + ) zip_path.write_bytes(zip_data) return zip_path diff --git a/src/specify_cli/workflows/catalog.py b/src/specify_cli/workflows/catalog.py index da5c60b5c8..21c121715c 100644 --- a/src/specify_cli/workflows/catalog.py +++ b/src/specify_cli/workflows/catalog.py @@ -19,6 +19,9 @@ import yaml +from specify_cli._download_security import read_response_limited +from specify_cli._github_http import open_github_url + # --------------------------------------------------------------------------- # Errors @@ -322,7 +325,6 @@ def _fetch_single_catalog( # Fetch from URL — validate scheme before opening and after redirects from urllib.parse import urlparse - from urllib.request import urlopen def _validate_catalog_url(url: str) -> None: parsed = urlparse(url) @@ -337,9 +339,19 @@ def _validate_catalog_url(url: str) -> None: _validate_catalog_url(entry.url) try: - with urlopen(entry.url, timeout=30) as resp: # noqa: S310 + with open_github_url( + entry.url, + timeout=30, + strict_redirects=True, + ) as resp: _validate_catalog_url(resp.geturl()) - data = json.loads(resp.read().decode("utf-8")) + data = json.loads( + read_response_limited( + resp, + error_type=WorkflowCatalogError, + label="workflow catalog", + ).decode("utf-8") + ) except Exception as exc: # Fall back to cache if available if cache_file.exists(): diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index 8b21ddfb8b..88b1b6e415 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -173,7 +173,7 @@ def __init__(self, data, url=""): self._data = json.dumps(data).encode() self._url = url - def read(self): + def read(self, _size=-1): return self._data def geturl(self): @@ -294,6 +294,50 @@ def test_invalid_catalog_format(self, tmp_path, monkeypatch): with pytest.raises(IntegrationCatalogError, match="Failed to fetch any integration catalog"): cat.search() + def test_fetch_single_catalog_uses_bounded_read(self, tmp_path, monkeypatch): + cat = IntegrationCatalog(tmp_path) + entry = IntegrationCatalogEntry( + url="https://example.com/catalog.json", + name="test", + priority=1, + install_allowed=True, + ) + + class FakeResponse: + def read(self, _size=-1): + return b"{}" + + def geturl(self): + return entry.url + + def __enter__(self): + return self + + def __exit__(self, *_args): + pass + + def fake_urlopen(url, timeout=10): + assert url == entry.url + assert timeout == 10 + return FakeResponse() + + def fake_read_response_limited(response, **kwargs): + assert isinstance(response, FakeResponse) + assert kwargs["error_type"] is IntegrationCatalogError + assert kwargs["label"] == "integration catalog https://example.com/catalog.json" + raise IntegrationCatalogError("catalog too large") + + import urllib.request + + monkeypatch.setattr(urllib.request, "urlopen", fake_urlopen) + monkeypatch.setattr( + "specify_cli.integrations.catalog.read_response_limited", + fake_read_response_limited, + ) + + with pytest.raises(IntegrationCatalogError, match="catalog too large"): + cat._fetch_single_catalog(entry, force_refresh=True) + def test_clear_cache(self, tmp_path): (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) @@ -492,7 +536,7 @@ class FakeResponse: def __init__(self, data, url=""): self._data = json.dumps(data).encode() self._url = url - def read(self): + def read(self, _size=-1): return self._data def geturl(self): return self._url diff --git a/tests/test_download_security.py b/tests/test_download_security.py new file mode 100644 index 0000000000..2ce8310ff7 --- /dev/null +++ b/tests/test_download_security.py @@ -0,0 +1,122 @@ +"""Tests for bounded download and ZIP extraction helpers.""" + +from __future__ import annotations + +import stat +import zipfile +import re +from pathlib import Path + +import pytest + +from specify_cli._download_security import ( + read_response_limited, + safe_extract_zip, + verify_sha256, +) + + +REPO_ROOT = Path(__file__).resolve().parent.parent +RAW_RESPONSE_READ_RE = re.compile(r"\b(?:resp|response)\.read\(\)") + + +class _Response: + def __init__(self, data: bytes): + self.data = data + + def read(self, size: int = -1) -> bytes: + return self.data if size < 0 else self.data[:size] + + +def test_read_response_limited_rejects_oversized_download(): + with pytest.raises(ValueError, match="exceeds maximum size"): + read_response_limited(_Response(b"abcde"), max_bytes=4) + + +def test_remote_downloads_do_not_use_unbounded_response_reads(): + offenders = [] + for path in (REPO_ROOT / "src" / "specify_cli").rglob("*.py"): + for line_number, line in enumerate( + path.read_text(encoding="utf-8").splitlines(), + start=1, + ): + if RAW_RESPONSE_READ_RE.search(line): + offenders.append(f"{path.relative_to(REPO_ROOT)}:{line_number}") + + assert offenders == [] + + +def test_verify_sha256_rejects_mismatch(): + with pytest.raises(ValueError, match="checksum mismatch"): + verify_sha256(b"payload", "sha256:" + "0" * 64) + + +@pytest.mark.parametrize( + "member_name", + [ + "../evil.txt", + "nested/../../evil.txt", + "nested\\..\\evil.txt", + "C:\\Windows\\evil.txt", + "C:drive-relative.txt", + ], +) +def test_safe_extract_zip_rejects_traversal(tmp_path, member_name): + zip_path = tmp_path / "bad.zip" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr(member_name, "nope") + + with pytest.raises(ValueError, match="Unsafe path"): + safe_extract_zip(zip_path, tmp_path / "out") + + +def test_safe_extract_zip_rejects_symlinks(tmp_path): + zip_path = tmp_path / "bad.zip" + info = zipfile.ZipInfo("link") + info.external_attr = (stat.S_IFLNK | 0o777) << 16 + + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr(info, "target") + + with pytest.raises(ValueError, match="Unsafe symlink"): + safe_extract_zip(zip_path, tmp_path / "out") + + +def test_safe_extract_zip_rejects_oversized_member(tmp_path): + zip_path = tmp_path / "bad.zip" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("big.txt", "abcde") + + with pytest.raises(ValueError, match="exceeds maximum size"): + safe_extract_zip(zip_path, tmp_path / "out", max_member_bytes=4) + + +def test_safe_extract_zip_rejects_too_many_entries(tmp_path): + zip_path = tmp_path / "bad.zip" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("one.txt", "1") + zf.writestr("two.txt", "2") + + with pytest.raises(ValueError, match="too many entries"): + safe_extract_zip(zip_path, tmp_path / "out", max_entries=1) + + +def test_safe_extract_zip_rejects_total_uncompressed_size(tmp_path): + zip_path = tmp_path / "bad.zip" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("one.txt", "123") + zf.writestr("two.txt", "456") + + with pytest.raises(ValueError, match="maximum uncompressed size"): + safe_extract_zip(zip_path, tmp_path / "out", max_total_bytes=5) + + +def test_safe_extract_zip_extracts_safe_archive(tmp_path): + zip_path = tmp_path / "ok.zip" + out_dir = tmp_path / "out" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("nested/file.txt", "hello") + + safe_extract_zip(zip_path, out_dir) + + assert (out_dir / "nested" / "file.txt").read_text(encoding="utf-8") == "hello" diff --git a/tests/test_extensions.py b/tests/test_extensions.py index c5be0ab4f3..23e0bfdd21 100644 --- a/tests/test_extensions.py +++ b/tests/test_extensions.py @@ -11,6 +11,7 @@ import pytest import json +import hashlib import platform import tempfile import shutil @@ -293,6 +294,43 @@ def test_invalid_command_name(self, temp_dir, valid_manifest_data): with pytest.raises(ValidationError, match="Invalid command name"): ExtensionManifest(manifest_path) + @pytest.mark.parametrize( + "bad_file", + [ + "../outside.md", + "/tmp/outside.md", + "commands/../../outside.md", + "C:\\Windows\\outside.md", + "C:outside.md", + ], + ) + def test_invalid_command_file_path(self, temp_dir, valid_manifest_data, bad_file): + """Command files must stay inside the extension package.""" + import yaml + + valid_manifest_data["provides"]["commands"][0]["file"] = bad_file + + manifest_path = temp_dir / "extension.yml" + with open(manifest_path, "w") as f: + yaml.dump(valid_manifest_data, f) + + with pytest.raises(ValidationError, match="Invalid command file path"): + ExtensionManifest(manifest_path) + + def test_windows_command_file_path_is_normalized(self, temp_dir, valid_manifest_data): + """Windows-authored manifests keep compatibility without traversal.""" + import yaml + + valid_manifest_data["provides"]["commands"][0]["file"] = "commands\\hello.md" + + manifest_path = temp_dir / "extension.yml" + with open(manifest_path, "w") as f: + yaml.dump(valid_manifest_data, f) + + manifest = ExtensionManifest(manifest_path) + + assert manifest.commands[0]["file"] == "commands/hello.md" + def test_command_name_autocorrect_speckit_prefix(self, temp_dir, valid_manifest_data): """Test that 'speckit.command' is auto-corrected to 'speckit.{ext_id}.command'.""" import yaml @@ -1847,6 +1885,7 @@ def test_unregister_skill_removes_parent_directory(self, project_dir, temp_dir): from specify_cli.extensions import ExtensionManifest manifest = ExtensionManifest(ext_dir / "extension.yml") registered = registrar.register_commands_for_agent("codex", manifest, ext_dir, project_dir) + assert registered == ["speckit.cleanup-ext.run"] skill_subdir = skills_dir / "speckit-cleanup-ext-run" assert skill_subdir.exists(), "Skill subdirectory should exist after registration" @@ -2577,6 +2616,27 @@ def test_redirect_strips_auth_for_github_to_external(self): assert auth_header is None assert auth_unredirected is None + def test_redirect_rejects_https_downgrade(self): + """HTTPS downloads must not follow redirects to non-local HTTP URLs.""" + from specify_cli._github_http import _StripAuthOnRedirect + from urllib.request import Request + import io + import urllib.error + + handler = _StripAuthOnRedirect() + req = Request("https://example.com/archive.zip") + fp = io.BytesIO(b"") + + with pytest.raises(urllib.error.URLError, match="unsafe redirect"): + handler.redirect_request( + req, + fp, + 302, + "Found", + {}, + "http://evil.example.com/archive.zip", + ) + def test_fetch_single_catalog_sends_auth_header(self, temp_dir, monkeypatch): """_fetch_single_catalog passes Authorization header via opener for GitHub URLs.""" from unittest.mock import patch, MagicMock @@ -2611,10 +2671,52 @@ def fake_open(req, timeout=None): assert captured["req"].get_header("Authorization") == "Bearer ghp_testtoken" + def test_fetch_single_catalog_uses_bounded_read(self, temp_dir): + """Catalog JSON responses must use the shared bounded-read helper.""" + from unittest.mock import patch, MagicMock + + catalog = self._make_catalog(temp_dir) + mock_response = MagicMock() + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + entry = CatalogEntry( + url="https://example.com/catalog.json", + name="custom", + priority=1, + install_allowed=True, + ) + + with patch.object(catalog, "_open_url", return_value=mock_response), \ + patch( + "specify_cli.extensions.read_response_limited", + side_effect=ExtensionError("catalog too large"), + ): + with pytest.raises(ExtensionError, match="catalog too large"): + catalog._fetch_single_catalog(entry, force_refresh=True) + + def test_fetch_catalog_uses_bounded_read(self, temp_dir): + """The legacy single-catalog path must also bound catalog JSON reads.""" + from unittest.mock import patch, MagicMock + + catalog = self._make_catalog(temp_dir) + mock_response = MagicMock() + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + + with patch.object(catalog, "get_catalog_url", return_value="https://example.com/catalog.json"), \ + patch.object(catalog, "_open_url", return_value=mock_response), \ + patch( + "specify_cli.extensions.read_response_limited", + side_effect=ExtensionError("catalog too large"), + ): + with pytest.raises(ExtensionError, match="catalog too large"): + catalog.fetch_catalog(force_refresh=True) + def test_download_extension_sends_auth_header(self, temp_dir, monkeypatch): """download_extension passes Authorization header via opener for GitHub URLs.""" from unittest.mock import patch, MagicMock - import zipfile, io + import io + import zipfile monkeypatch.setenv("GITHUB_TOKEN", "ghp_testtoken") catalog = self._make_catalog(temp_dir) @@ -2653,6 +2755,52 @@ def fake_open(req, timeout=None): assert captured["req"].get_header("Authorization") == "Bearer ghp_testtoken" + def test_download_extension_verifies_sha256(self, temp_dir): + """Catalog-provided checksums are enforced when present.""" + from unittest.mock import patch, MagicMock + + catalog = self._make_catalog(temp_dir) + zip_bytes = b"fake zip data" + mock_response = MagicMock() + mock_response.read.return_value = zip_bytes + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + ext_info = { + "id": "test-ext", + "name": "Test Extension", + "version": "1.0.0", + "download_url": "https://example.com/test-ext.zip", + "sha256": hashlib.sha256(zip_bytes).hexdigest(), + } + + with patch.object(catalog, "get_extension_info", return_value=ext_info), \ + patch.object(catalog, "_open_url", return_value=mock_response): + result = catalog.download_extension("test-ext", target_dir=temp_dir) + + assert result.read_bytes() == zip_bytes + + def test_download_extension_rejects_sha256_mismatch(self, temp_dir): + """A mismatched catalog checksum stops the downloaded ZIP being used.""" + from unittest.mock import patch, MagicMock + + catalog = self._make_catalog(temp_dir) + mock_response = MagicMock() + mock_response.read.return_value = b"fake zip data" + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + ext_info = { + "id": "test-ext", + "name": "Test Extension", + "version": "1.0.0", + "download_url": "https://example.com/test-ext.zip", + "sha256": "0" * 64, + } + + with patch.object(catalog, "get_extension_info", return_value=ext_info), \ + patch.object(catalog, "_open_url", return_value=mock_response): + with pytest.raises(ExtensionError, match="checksum mismatch"): + catalog.download_extension("test-ext", target_dir=temp_dir) + # ===== CatalogEntry Tests ===== @@ -3510,7 +3658,6 @@ def test_download_extension_raises_for_bundled(self, temp_dir): def test_download_extension_allows_bundled_with_url(self, temp_dir): """download_extension should allow bundled extensions that have a download_url (newer version).""" from unittest.mock import patch, MagicMock - import urllib.request project_dir = temp_dir / "project" project_dir.mkdir() @@ -3533,7 +3680,7 @@ def test_download_extension_allows_bundled_with_url(self, temp_dir): mock_response.__exit__ = MagicMock(return_value=False) with patch.object(catalog, "get_extension_info", return_value=bundled_with_url), \ - patch.object(urllib.request, "urlopen", return_value=mock_response): + patch.object(catalog, "_open_url", return_value=mock_response): result = catalog.download_extension("git") assert result.name == "git-2.0.0.zip" diff --git a/tests/test_github_workflows.py b/tests/test_github_workflows.py new file mode 100644 index 0000000000..2b21d3a40f --- /dev/null +++ b/tests/test_github_workflows.py @@ -0,0 +1,32 @@ +"""Static checks for repository GitHub Actions workflows.""" + +from __future__ import annotations + +import re +from pathlib import Path + + +REPO_ROOT = Path(__file__).resolve().parent.parent +WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows" +USES_RE = re.compile(r"^\s*uses:\s*(?P\S+)", re.MULTILINE) + + +def test_github_actions_are_pinned_to_full_commit_shas(): + unpinned_refs = [] + + workflows = sorted( + list(WORKFLOWS_DIR.glob("*.yml")) + list(WORKFLOWS_DIR.glob("*.yaml")) + ) + assert workflows + + for workflow in workflows: + workflow_text = workflow.read_text(encoding="utf-8") + for match in USES_RE.finditer(workflow_text): + uses_ref = match.group("ref") + if uses_ref.startswith(("./", "../")): + continue + if re.search(r"@[0-9a-f]{40}$", uses_ref): + continue + unpinned_refs.append(f"{workflow.relative_to(REPO_ROOT)}: {uses_ref}") + + assert unpinned_refs == [] diff --git a/tests/test_presets.py b/tests/test_presets.py index 848c072dd0..e0106c1f69 100644 --- a/tests/test_presets.py +++ b/tests/test_presets.py @@ -12,6 +12,7 @@ import pytest import json +import hashlib import tempfile import shutil import warnings @@ -289,6 +290,39 @@ def test_invalid_template_name_format(self, temp_dir, valid_pack_data): with pytest.raises(PresetValidationError, match="Invalid template name"): PresetManifest(manifest_path) + @pytest.mark.parametrize( + "bad_file", + [ + "../outside.md", + "/tmp/outside.md", + "templates/../../outside.md", + "C:\\Windows\\outside.md", + "C:outside.md", + ], + ) + def test_invalid_template_file_path(self, temp_dir, valid_pack_data, bad_file): + """Template files must stay inside the preset package.""" + valid_pack_data["provides"]["templates"][0]["file"] = bad_file + manifest_path = temp_dir / "preset.yml" + with open(manifest_path, "w") as f: + yaml.dump(valid_pack_data, f) + + with pytest.raises(PresetValidationError, match="Invalid template file path"): + PresetManifest(manifest_path) + + def test_windows_template_file_path_is_normalized(self, temp_dir, valid_pack_data): + """Windows-authored manifests keep compatibility without traversal.""" + valid_pack_data["provides"]["templates"][0]["file"] = ( + "templates\\spec-template.md" + ) + manifest_path = temp_dir / "preset.yml" + with open(manifest_path, "w") as f: + yaml.dump(valid_pack_data, f) + + manifest = PresetManifest(manifest_path) + + assert manifest.templates[0]["file"] == "templates/spec-template.md" + def test_get_hash(self, pack_dir): """Test manifest hash calculation.""" manifest = PresetManifest(pack_dir / "preset.yml") @@ -1556,6 +1590,95 @@ def fake_open(req, timeout=None): assert captured["req"].get_header("Authorization") == "Bearer ghp_testtoken" + def test_fetch_single_catalog_uses_bounded_read(self, project_dir): + """Catalog JSON responses must use the shared bounded-read helper.""" + from unittest.mock import patch, MagicMock + + catalog = PresetCatalog(project_dir) + mock_response = MagicMock() + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + entry = PresetCatalogEntry( + url="https://example.com/catalog.json", + name="custom", + priority=1, + install_allowed=True, + ) + + with patch.object(catalog, "_open_url", return_value=mock_response), \ + patch( + "specify_cli.presets.read_response_limited", + side_effect=PresetError("catalog too large"), + ): + with pytest.raises(PresetError, match="catalog too large"): + catalog._fetch_single_catalog(entry, force_refresh=True) + + def test_fetch_catalog_uses_bounded_read(self, project_dir): + """The legacy single-catalog path must also bound catalog JSON reads.""" + from unittest.mock import patch, MagicMock + + catalog = PresetCatalog(project_dir) + mock_response = MagicMock() + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + + with patch.object(catalog, "get_catalog_url", return_value="https://example.com/catalog.json"), \ + patch.object(catalog, "_open_url", return_value=mock_response), \ + patch( + "specify_cli.presets.read_response_limited", + side_effect=PresetError("catalog too large"), + ): + with pytest.raises(PresetError, match="catalog too large"): + catalog.fetch_catalog(force_refresh=True) + + def test_download_pack_verifies_sha256(self, project_dir): + """Catalog-provided checksums are enforced when present.""" + from unittest.mock import patch, MagicMock + + catalog = PresetCatalog(project_dir) + zip_bytes = b"fake zip data" + mock_response = MagicMock() + mock_response.read.return_value = zip_bytes + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + pack_info = { + "id": "test-pack", + "name": "Test Pack", + "version": "1.0.0", + "download_url": "https://example.com/test-pack.zip", + "sha256": hashlib.sha256(zip_bytes).hexdigest(), + "_install_allowed": True, + } + + with patch.object(catalog, "get_pack_info", return_value=pack_info), \ + patch.object(catalog, "_open_url", return_value=mock_response): + result = catalog.download_pack("test-pack", target_dir=project_dir) + + assert result.read_bytes() == zip_bytes + + def test_download_pack_rejects_sha256_mismatch(self, project_dir): + """A mismatched catalog checksum stops the downloaded ZIP being used.""" + from unittest.mock import patch, MagicMock + + catalog = PresetCatalog(project_dir) + mock_response = MagicMock() + mock_response.read.return_value = b"fake zip data" + mock_response.__enter__ = lambda s: s + mock_response.__exit__ = MagicMock(return_value=False) + pack_info = { + "id": "test-pack", + "name": "Test Pack", + "version": "1.0.0", + "download_url": "https://example.com/test-pack.zip", + "sha256": "0" * 64, + "_install_allowed": True, + } + + with patch.object(catalog, "get_pack_info", return_value=pack_info), \ + patch.object(catalog, "_open_url", return_value=mock_response): + with pytest.raises(PresetError, match="checksum mismatch"): + catalog.download_pack("test-pack", target_dir=project_dir) + # ===== Integration Tests ===== diff --git a/tests/test_registrar_path_traversal.py b/tests/test_registrar_path_traversal.py index fc423b4056..006daa89e8 100644 --- a/tests/test_registrar_path_traversal.py +++ b/tests/test_registrar_path_traversal.py @@ -121,6 +121,31 @@ def test_copilot_rejects_traversal_in_alias(self, tmp_path, bad_alias): _assert_no_stray_files(tmp_path, Path(bad_alias).name.replace("/", "")) +class TestSourceFileTraversal: + """Command source files must stay inside the declared source directory.""" + + @pytest.mark.parametrize("bad_file", TRAVERSAL_PAYLOADS) + def test_rejects_traversal_in_command_source_file(self, tmp_path, bad_file): + project, ext_dir = _project_and_source(tmp_path) + (project / ".gemini" / "commands").mkdir(parents=True) + + registrar = CommandRegistrar() + with pytest.raises(ValueError, match="escapes directory"): + registrar.register_commands( + "gemini", + [ + { + "name": "speckit.myext.ok", + "file": bad_file, + "aliases": [], + } + ], + "myext", + ext_dir, + project, + ) + + class TestCopilotPromptTraversal: """`write_copilot_prompt` is a public static method — guard it directly.""" diff --git a/tests/test_security_workflow.py b/tests/test_security_workflow.py new file mode 100644 index 0000000000..e02e01eff7 --- /dev/null +++ b/tests/test_security_workflow.py @@ -0,0 +1,378 @@ +"""Static checks for the GitHub Actions security workflow.""" + +from __future__ import annotations + +import inspect +import importlib.util +import json +import re +import subprocess +from pathlib import Path + +import yaml + + +REPO_ROOT = Path(__file__).resolve().parent.parent +SECURITY_WORKFLOW = REPO_ROOT / ".github" / "workflows" / "security.yml" +CONTRIBUTING = REPO_ROOT / "CONTRIBUTING.md" +BANDIT_BASELINE = REPO_ROOT / ".github" / "bandit-baseline.json" +SECURITY_REQUIREMENTS = REPO_ROOT / ".github" / "security-audit-requirements.txt" +SECURITY_REQUIREMENTS_SYNC_SCRIPT = ( + REPO_ROOT / ".github" / "scripts" / "check_security_requirements.py" +) + +WORKFLOW_LIVE_AUDIT_REQUIREMENTS = '"${{ runner.temp }}/spec-kit-audit-requirements.txt"' +COMMITTED_AUDIT_REQUIREMENTS = ".github/security-audit-requirements.txt" +WORKFLOW_COMPILE_SCHEDULED_TEST_EXTRA_DEPS = ( + "uv pip compile pyproject.toml --extra test " + '--python-version "${{ matrix.python-version }}" --generate-hashes --quiet ' + f"--output-file {WORKFLOW_LIVE_AUDIT_REQUIREMENTS}" +) +LOCAL_REFRESH_TEST_EXTRA_DEPS = ( + "uv pip compile pyproject.toml --extra test --universal --generate-hashes " + f"--quiet --no-header --output-file {COMMITTED_AUDIT_REQUIREMENTS}" +) +WORKFLOW_SYNC_COMPILE_TEST_EXTRA_DEPS = ( + "uv pip compile pyproject.toml --extra test --universal --generate-hashes " + "--quiet --no-header --output-file" +) +WORKFLOW_SYNC_SCRIPT = "python .github/scripts/check_security_requirements.py" +WORKFLOW_LIVE_PIP_AUDIT = ( + "uvx --from pip-audit==2.10.0 pip-audit --disable-pip --require-hashes " + f"-r {WORKFLOW_LIVE_AUDIT_REQUIREMENTS} --progress-spinner off" +) +LOCAL_PIP_AUDIT = ( + "uvx --from pip-audit==2.10.0 pip-audit --disable-pip --require-hashes " + f"-r {COMMITTED_AUDIT_REQUIREMENTS} --progress-spinner off" +) +BANDIT = ( + "uvx --from bandit==1.9.4 bandit -r src -lll " + "--baseline .github/bandit-baseline.json" +) + + +def _load_security_workflow() -> dict: + return yaml.safe_load(SECURITY_WORKFLOW.read_text(encoding="utf-8")) + + +def _workflow_triggers() -> dict: + workflow = _load_security_workflow() + return workflow.get("on") or workflow[True] + + +def _step(job_name: str, step_name: str) -> dict: + workflow = _load_security_workflow() + for step in workflow["jobs"][job_name]["steps"]: + if step.get("name") == step_name: + return step + raise AssertionError(f"Step {step_name!r} not found in job {job_name!r}.") + + +def _step_run(job_name: str, step_name: str) -> str: + return _step(job_name, step_name)["run"] + + +def _load_sync_script(): + spec = importlib.util.spec_from_file_location( + "check_security_requirements", + SECURITY_REQUIREMENTS_SYNC_SCRIPT, + ) + assert spec is not None + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +class TestSecurityWorkflow: + """Guard the security workflow against review-feedback regressions.""" + + def test_dependency_audit_uses_committed_requirements_for_prs_and_pushes(self): + scheduled_compile = _step( + "dependency-audit", + "Compile scheduled audit requirements", + ) + scheduled_audit = _step( + "dependency-audit", + "Run pip-audit (scheduled live resolution)", + ) + committed_audit = _step( + "dependency-audit", + "Run pip-audit (committed requirements)", + ) + sync_check = _step( + "dependency-audit", + "Check committed audit requirements are current", + ) + + assert scheduled_compile["if"] == "${{ github.event_name == 'schedule' }}" + assert WORKFLOW_COMPILE_SCHEDULED_TEST_EXTRA_DEPS in scheduled_compile["run"] + assert scheduled_audit["if"] == "${{ github.event_name == 'schedule' }}" + assert scheduled_audit["run"] == WORKFLOW_LIVE_PIP_AUDIT + assert sync_check["if"] == "${{ github.event_name != 'schedule' }}" + assert sync_check["env"]["DEPENDENCY_DIFF_BASE"] == ( + "${{ github.event.pull_request.base.sha || github.event.before || '' }}" + ) + assert sync_check["env"]["DEPENDENCY_DIFF_HEAD"] == "${{ github.sha }}" + assert sync_check["run"] == WORKFLOW_SYNC_SCRIPT + assert committed_audit["if"] == "${{ github.event_name != 'schedule' }}" + assert committed_audit["run"] == LOCAL_PIP_AUDIT + + dependency_job_text = "\n".join( + step.get("run", "") + for step in _load_security_workflow()["jobs"]["dependency-audit"]["steps"] + ) + dependency_protection_text = ( + dependency_job_text + + "\n" + + SECURITY_REQUIREMENTS_SYNC_SCRIPT.read_text(encoding="utf-8") + ) + assert "--generate-hashes" in dependency_protection_text + assert "--no-header" in dependency_protection_text + assert "--require-hashes" in dependency_protection_text + assert "--disable-pip" in dependency_protection_text + assert WORKFLOW_LIVE_AUDIT_REQUIREMENTS in dependency_job_text + assert COMMITTED_AUDIT_REQUIREMENTS in dependency_protection_text + assert "uv export" not in dependency_protection_text + assert "--frozen" not in dependency_protection_text + assert "--locked" not in dependency_protection_text + assert "uv.lock" not in dependency_protection_text + assert "/tmp/" not in dependency_protection_text + assert "uvx pip-audit ." not in dependency_protection_text + + def test_dependency_audit_checkout_fetches_previous_commit(self): + checkout = _step("dependency-audit", "Checkout") + + assert checkout["with"]["fetch-depth"] == 2 + + def test_security_workflow_triggers_are_preserved(self): + triggers = _workflow_triggers() + + assert triggers["push"]["branches"] == ["main"] + assert triggers["pull_request"] is None + assert triggers["workflow_dispatch"] is None + assert triggers["schedule"] == [{"cron": "17 4 * * 1"}] + + def test_dependency_audit_runs_supported_python_os_matrix(self): + workflow = _load_security_workflow() + matrix = workflow["jobs"]["dependency-audit"]["strategy"]["matrix"] + + assert matrix["os"] == ["ubuntu-latest", "windows-latest"] + assert matrix["python-version"] == ["3.11", "3.12", "3.13"] + assert workflow["jobs"]["dependency-audit"]["runs-on"] == "${{ matrix.os }}" + + def test_security_tools_are_pinned(self): + workflow_text = SECURITY_WORKFLOW.read_text(encoding="utf-8") + + assert WORKFLOW_LIVE_PIP_AUDIT in workflow_text + assert LOCAL_PIP_AUDIT in workflow_text + assert BANDIT in workflow_text + assert re.search(r"\buvx\s+pip-audit\b", workflow_text) is None + assert re.search(r"\buvx\s+bandit\b", workflow_text) is None + + def test_actions_are_pinned_to_full_commit_shas(self): + workflow = _load_security_workflow() + uses_refs = [ + step["uses"] + for job in workflow["jobs"].values() + for step in job["steps"] + if "uses" in step + ] + + assert uses_refs + for uses_ref in uses_refs: + assert re.search(r"@[0-9a-f]{40}$", uses_ref), uses_ref + assert re.search(r"@v\d+", uses_ref) is None + + def test_bandit_does_not_globally_skip_b602(self): + run = _step_run("static-analysis", "Run Bandit") + workflow_text = SECURITY_WORKFLOW.read_text(encoding="utf-8") + + assert run == BANDIT + assert "--skip" not in run + assert "--skip B602" not in workflow_text + assert "--baseline .github/bandit-baseline.json" in run + + def test_bandit_baseline_only_ignores_shell_step_b602(self): + baseline = json.loads(BANDIT_BASELINE.read_text(encoding="utf-8")) + results = baseline["results"] + + assert len(results) == 1 + assert results[0]["test_id"] == "B602" + assert ( + results[0]["filename"] + == "src/specify_cli/workflows/steps/shell/__init__.py" + ) + + def test_bandit_nosec_is_not_suppressed_in_source(self): + nosec_lines = [] + for path in (REPO_ROOT / "src").rglob("*.py"): + for line_number, line in enumerate( + path.read_text(encoding="utf-8").splitlines(), + start=1, + ): + if re.search(r"#\s*nosec\b", line, flags=re.IGNORECASE): + nosec_lines.append(f"{path.relative_to(REPO_ROOT)}:{line_number}") + + assert nosec_lines == [] + + def test_run_command_does_not_accept_shell_argument(self): + from specify_cli import run_command + + assert "shell" not in inspect.signature(run_command).parameters + + def test_committed_audit_requirements_are_hashed(self): + requirements = SECURITY_REQUIREMENTS.read_text(encoding="utf-8") + + assert "--hash=sha256:" in requirements + assert not requirements.startswith("#") + assert "pytest==" in requirements + assert "pytest-cov==" in requirements + + def test_sync_script_skips_when_dependency_inputs_are_unchanged( + self, + monkeypatch, + capsys, + ): + sync_script = _load_sync_script() + + def fake_run(command, **kwargs): + assert command == [ + "git", + "diff", + "--name-only", + "HEAD^", + "HEAD", + "--", + "pyproject.toml", + ".github/security-audit-requirements.txt", + ] + assert kwargs["check"] is True + return subprocess.CompletedProcess(command, 0, stdout="", stderr="") + + monkeypatch.setattr(sync_script.subprocess, "run", fake_run) + + assert sync_script.main() == 0 + assert "sync check skipped" in capsys.readouterr().out + + def test_sync_script_uses_github_diff_refs_when_available( + self, + monkeypatch, + ): + sync_script = _load_sync_script() + monkeypatch.setenv("DEPENDENCY_DIFF_BASE", "abc123") + monkeypatch.setenv("DEPENDENCY_DIFF_HEAD", "def456") + + def fake_run(command, **_kwargs): + assert command == [ + "git", + "diff", + "--name-only", + "abc123", + "def456", + "--", + "pyproject.toml", + ".github/security-audit-requirements.txt", + ] + return subprocess.CompletedProcess(command, 0, stdout="", stderr="") + + monkeypatch.setattr(sync_script.subprocess, "run", fake_run) + + assert sync_script._dependency_inputs_changed() is False + + def test_sync_script_compiles_and_compares_when_dependency_inputs_changed( + self, + monkeypatch, + tmp_path, + ): + sync_script = _load_sync_script() + committed_requirements = tmp_path / ".github" / "security-audit-requirements.txt" + generated_requirements = tmp_path / "generated-requirements.txt" + committed_requirements.parent.mkdir() + committed_requirements.write_text("pytest==1\n", encoding="utf-8") + compile_commands = [] + + monkeypatch.setattr(sync_script, "REPO_ROOT", tmp_path) + monkeypatch.setattr(sync_script, "COMMITTED_REQUIREMENTS", committed_requirements) + monkeypatch.setenv("GENERATED_REQUIREMENTS", str(generated_requirements)) + + def fake_run(command, **kwargs): + if command[0] == "git": + return subprocess.CompletedProcess( + command, + 0, + stdout="pyproject.toml\n", + stderr="", + ) + + compile_commands.append(command) + assert kwargs["check"] is True + generated_requirements.write_text("pytest==1\n", encoding="utf-8") + return subprocess.CompletedProcess(command, 0) + + monkeypatch.setattr(sync_script.subprocess, "run", fake_run) + + assert sync_script.main() == 0 + assert len(compile_commands) == 1 + compile_command = " ".join(compile_commands[0]) + assert WORKFLOW_SYNC_COMPILE_TEST_EXTRA_DEPS in compile_command + assert "--output-file" in compile_commands[0] + assert str(generated_requirements) in compile_commands[0] + + def test_sync_script_fails_when_generated_requirements_differ( + self, + monkeypatch, + tmp_path, + capsys, + ): + sync_script = _load_sync_script() + committed_requirements = tmp_path / ".github" / "security-audit-requirements.txt" + generated_requirements = tmp_path / "generated-requirements.txt" + committed_requirements.parent.mkdir() + committed_requirements.write_text("pytest==1\n", encoding="utf-8") + + monkeypatch.setattr(sync_script, "REPO_ROOT", tmp_path) + monkeypatch.setattr(sync_script, "COMMITTED_REQUIREMENTS", committed_requirements) + monkeypatch.setenv("GENERATED_REQUIREMENTS", str(generated_requirements)) + + def fake_run(command, **_kwargs): + if command[0] == "git": + return subprocess.CompletedProcess( + command, + 0, + stdout="pyproject.toml\n", + stderr="", + ) + + generated_requirements.write_text("pytest==2\n", encoding="utf-8") + return subprocess.CompletedProcess(command, 0) + + monkeypatch.setattr(sync_script.subprocess, "run", fake_run) + + assert sync_script.main() == 1 + assert ( + "Regenerate .github/security-audit-requirements.txt" + in capsys.readouterr().err + ) + + def test_contributing_documents_security_commands(self): + contributing_text = CONTRIBUTING.read_text(encoding="utf-8") + + assert LOCAL_REFRESH_TEST_EXTRA_DEPS in contributing_text + assert LOCAL_PIP_AUDIT in contributing_text + assert BANDIT in contributing_text + assert "/tmp/" not in contributing_text + assert "uv export" not in contributing_text + assert "--frozen" not in contributing_text + assert "--locked" not in contributing_text + assert ( + re.search( + r"--output-file\s+spec-kit-audit-requirements\.txt\b", + contributing_text, + ) + is None + ) + assert ( + re.search(r"-r\s+spec-kit-audit-requirements\.txt\b", contributing_text) + is None + )