Repository: cruise-automation/fwanalyzer Branch: master Commit: a0db45054642 Files: 94 Total size: 266.9 KB Directory structure: gitextract_rnu42ilk/ ├── .circleci/ │ └── config.yml ├── .github/ │ └── workflows/ │ └── golangci-lint.yml ├── .gitignore ├── Building.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Changelog.md ├── Checksec.md ├── Dockerfile ├── LICENSE ├── Makefile ├── Readme.md ├── cmd/ │ └── fwanalyzer/ │ ├── fwanalyzer.go │ └── fwanalyzer_test.go ├── devices/ │ ├── Readme.md │ ├── android/ │ │ ├── Readme.md │ │ ├── android_properties.toml │ │ ├── android_user_build_checks.toml │ │ ├── android_user_build_checks_boot.toml │ │ ├── check_ota.py │ │ ├── system.toml │ │ └── unpack.sh │ ├── check.py │ └── generic/ │ ├── Readme.md │ └── root.toml ├── docker-compose.yml ├── go.mod ├── go.sum ├── pkg/ │ ├── analyzer/ │ │ ├── analyzer.go │ │ ├── analyzer_test.go │ │ ├── dataextract/ │ │ │ ├── dataextract.go │ │ │ └── dataextract_test.go │ │ ├── dircontent/ │ │ │ ├── dircontent.go │ │ │ └── dircontent_test.go │ │ ├── filecmp/ │ │ │ ├── filecmp.go │ │ │ └── filecmp_test.go │ │ ├── filecontent/ │ │ │ ├── filecontent.go │ │ │ └── filecontent_test.go │ │ ├── filepathowner/ │ │ │ ├── filepathowner.go │ │ │ └── filepathowner_test.go │ │ ├── filestatcheck/ │ │ │ ├── filestatcheck.go │ │ │ └── filestatcheck_test.go │ │ ├── filetree/ │ │ │ ├── filetree.go │ │ │ └── filetree_test.go │ │ └── globalfilechecks/ │ │ ├── globalfilechecks.go │ │ └── globalfilechecks_test.go │ ├── capability/ │ │ ├── capability.go │ │ └── capability_test.go │ ├── cpioparser/ │ │ ├── cpioparser.go │ │ └── cpioparser_test.go │ ├── dirparser/ │ │ ├── dirparser.go │ │ └── dirparser_test.go │ ├── extparser/ │ │ ├── extparser.go │ │ └── extparser_test.go │ ├── fsparser/ │ │ └── fsparser.go │ ├── squashfsparser/ │ │ ├── squashfsparser.go │ │ └── squashfsparser_test.go │ ├── ubifsparser/ │ │ ├── ubifsparser.go │ │ └── ubifsparser_test.go │ ├── util/ │ │ └── util.go │ └── vfatparser/ │ ├── vfatparser.go │ └── vfatparser_test.go ├── scripts/ │ ├── catfile.sh │ ├── check_apkcert.sh │ ├── check_cert.sh │ ├── check_file_arm32.sh │ ├── check_file_arm64.sh │ ├── check_file_elf_stripped.sh │ ├── check_file_x8664.sh │ ├── check_otacert.sh │ ├── check_privatekey.sh │ ├── check_sec.sh │ ├── diff.sh │ └── prop2json.py └── test/ ├── e2cp ├── elf_main.go ├── oldtree.json ├── script_test.sh ├── squashfs.img ├── squashfs_cap.img ├── test.cap.file ├── test.cpio ├── test.py ├── test_cfg.base.toml ├── test_cfg.toml ├── test_cfg_selinux.toml ├── testdir/ │ ├── bin/ │ │ ├── elf_arm32 │ │ ├── elf_arm64 │ │ └── elf_x8664 │ ├── dir1/ │ │ └── file2 │ ├── file1.txt │ └── jsonfile.json ├── unsquashfs └── vfat.img ================================================ FILE CONTENTS ================================================ ================================================ FILE: .circleci/config.yml ================================================ version: 2 jobs: build: machine: docker_layer_caching: false steps: - checkout - run: docker-compose build fwanalyzer - run: docker-compose run --rm fwanalyzer make deps - run: docker-compose run --rm fwanalyzer make test: machine: docker_layer_caching: false steps: - checkout - run: docker-compose build fwanalyzer - run: docker-compose run --rm fwanalyzer make deps - run: docker-compose run --rm fwanalyzer make testsetup ci-tests workflows: version: 2 test-build: jobs: - test - build: requires: - test ================================================ FILE: .github/workflows/golangci-lint.yml ================================================ name: golangci-lint on: push: tags: - v* branches: - master - main pull_request: permissions: contents: read jobs: golangci: name: lint runs-on: ubuntu-latest steps: - uses: actions/setup-go@v3 with: go-version: 1.13 - uses: actions/checkout@v3 - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: version: v1.29 ================================================ FILE: .gitignore ================================================ build/** release/** ================================================ FILE: Building.md ================================================ # Building FwAnalyzer ## Requirements - golang (with mod support) + golang-lint - Python - filesystem tools such as e2tools, mtools The full list of dependencies is tracked in the [Dockerfile](Dockerfile). ## Clone Repository ```sh go get github.com/cruise-automation/fwanalyzer ``` ## Building Before building you need to download third party go packages, run `make deps` before the first build. ```sh cd go/src/github.com/cruise-automation/fwanalyzer make deps make ``` The `fwanalyzer` binary will be in `build/`. # Testing We have two types of tests: unit tests and integration tests, both tests will be triggered by running `make test`. Run `make testsetup` once to setup the test environment in `test/`. Tests rely on e2tools, mtools, squashfs-tools, and ubi_reader, as well as Python. ```sh cd go/src/github.com/cruise-automation/fwanalyzer make testsetup make test ``` ================================================ FILE: CODE_OF_CONDUCT.md ================================================ # Code of Conduct This code of conduct outlines our expectations for participants within the Cruise LLC (Cruise) community, as well as steps to reporting unacceptable behavior. We are committed to providing a welcoming and inspiring community for all and expect our code of conduct to be honored. Anyone who violates this code of conduct may be banned from the community. ## Our Commitment In the interest of fostering an open and welcoming environment, we as contributors and maintainers commit to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at opensource@getcruise.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team will maintain confidentiality to the extent possible with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing By submitting a Contribution this Project (terms defined below), you agree to the following Contributor License Agreement: The following terms are used throughout this agreement: * You - the person or legal entity including its affiliates asked to accept this agreement. An affiliate is any entity that controls or is controlled by the legal entity, or is under common control with it. * Project - is an umbrella term that refers to any and all open source projects from Cruise LLC. * Contribution - any type of work that is submitted to a Project, including any modifications or additions to existing work. * Submitted - conveyed to a Project via a pull request, commit, issue, or any form of electronic, written, or verbal communication with Cruise LLC, contributors or maintainers. **1. Grant of Copyright License.** Subject to the terms and conditions of this agreement, You grant to the Projects’ maintainers, contributors, users and to Cruise LLC a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your contributions and such derivative works. Except for this license, You reserve all rights, title, and interest in your contributions. **2. Grant of Patent License.** Subject to the terms and conditions of this agreement, You grant to the Projects’ maintainers, contributors, users and to Cruise LLC a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer your contributions, where such license applies only to those patent claims licensable by you that are necessarily infringed by your contribution or by combination of your contribution with the project to which this contribution was submitted. If any entity institutes patent litigation - including cross-claim or counterclaim in a lawsuit - against You alleging that your contribution or any project it was submitted to constitutes or is responsible for direct or contributory patent infringement, then any patent licenses granted to that entity under this agreement shall terminate as of the date such litigation is filed. **3. Source of Contribution.** Your contribution is either your original creation, based upon previous work that, to the best of your knowledge, is covered under an appropriate open source license and you have the right under that license to submit that work with modifications, whether created in whole or in part by you, or you have clearly identified the source of the contribution and any license or other restriction (like related patents, trademarks, and license agreements) of which you are personally aware. ================================================ FILE: Changelog.md ================================================ # Change Log ## Unreleased ## [v1.4.4] - 2022-10-24 ### Changed - updated Building.md - updated Readme.md - Scripts now get the full filepath as second argument (before it would pass `bash` now it will pass `/bin/bash`) ### Fixed - Fix a bug where incorrect keys in checksec were silently skipped ## [v1.4.3] - 2020-08-17 ### Changed - support older versions of checksec ## [v1.4.2] - 2020-08-17 ### Added - checksec wrapper script, see [check_sec.sh](scripts/check_sec.sh) and [Checksec Wrapper Readme](Checksec.md) - link support for extfs, this requires `https://github.com/crmulliner/e2tools/tree/link_support` (or later) ### Changed - updated `test/test.img.gz` ext2 test filesystem image - updated `test/e2cp` binary ## [v1.4.1] - 2020-05-06 ### Fixed - removed `release/` folder - FileStatCheck for links - general handling for links ## [v1.4.0] - 2020-04-30 ### Added - NEW support for Linux Capabilities - NEW Capability support for ext2/3/4 and squashfs - NEW Selinux support for SquashFS ### Changed - _check.py_ cleaned up a bit, avoiding using `shell=True` in subprocess invocations. - updated linter version to v1.24 - switch back to `-lls` for unsquashfs - copyright: GM Cruise -> Cruise ### Fixed - FileTreeCheck LinkTarget handling ## [v1.3.2] - 2020-01-15 ### Fixed - _check.py_ fix to support pathnames with spaces - _cpiofs_ fix date parsing - _cpiofs_ added work around for missing directory entries ## [v1.3.1] - 2020-01-07 ### Fixed - report status in _check.py_ - use quiet flag for _cpiofs_ ## [v1.3.0] - 2020-01-07 ### Added - NEW _cpiofs_ for cpio as filesystem - NEW universal _check.py_ (so you just need to write a custom unpacker) - NEW _android/unpack.sh_ (for _check.py_) - better options for scripts (FileContent and DataExtract) ### Fixed - $PATH in makefile - FileContent file iterator - _squashfs_ username parsing ## [v1.2.0] - 2019-11-19 ### Changed - moved to go 1.13 - only store _current_file_treepath_ if filetree changed ## [v.1.1.0] - 2019-10-15 ### Added - NEW FileCmp check for full file diff against 'old' version - allow multiple matches for regex based DataExtract ### Fixed - squashfs username parsing ## [v.1.0.1] - 2019-09-19 ### Fixed - filename for BadFiles check output ## [v.1.0.0] - 2019-08-15 ### Added - CI - Build instructions ## [initial] - 2019-08-05 ================================================ FILE: Checksec.md ================================================ # checksec Integration [checksec](https://github.com/slimm609/checksec.sh) is a bash script for checking security properties of executables (like PIE, RELRO, Canaries, ...). Checksec is an incredible helpful tool therefore we developed a wrapper script for FwAnalyzer to ease the usage of checksec. Below we go through the steps required to use checksec with FwAnalyzer. ## Installation The installation is rather simple. Clone the checksec repository and copy the `checksec` script to a directory in your PATH or add the directory containing `checksec` to your PATH. ## Configuration Configuration is done in two steps. First step is adding a `FileContent` check that uses the `Script` option. The second step is creating the checksec wrapper configuration. The configuration allows you to selectively skip files (e.g. vendor binaries) and fine tune the security features that you want to enforce. ### checksec wrapper configuration The checksec wrapper has two options, and uses JSON: - cfg : checksec config, where you can select acceptable values for each field in the checksec output. The key is the name of the checksec field and the value is an array where each item is an acceptable value (e.g. allow `full` and `partial` RELRO). Omitted fields are not checked. - skip : array of fully qualified filenames that should be not checked example config: ```json { "cfg": { "pie": ["yes"], "nx": ["yes"], "relro": ["full", "partial"] }, "skip": ["/usr/bin/bla","/bin/blabla"] } ``` ### FwAnalyzer configuration The FwAnalyzer configuration uses the checksec wrapper config and looks like in the example below. We define a `FileContent` check and select `/usr/bin` as the target directory. The name of the wrapper script is `check_sec.sh`. We pass two options to the script. First argument `*` selects all files in `/usr/bin` and the second argument is the checksec wrapper config we created above. example config: ```ini [FileContent."checksec_usr_bin"] File = "/usr/bin" Script = "check_sec.sh" ScriptOptions = ["*", """ { "cfg":{ "pie": ["yes"], "nx": ["yes"], "relro": ["full", "partial"] }, "skip": ["/usr/bin/bla","/bin/blabla"] } """] ``` ### Example Output ```json "offenders": { "/usr/bin/example": [ { "canary": "no", "fortified": "0", "fortify-able": "24", "fortify_source": "no", "nx": "yes", "pie": "no", "relro": "partial", "rpath": "no", "runpath": "no", "symbols": "no" } ] } ``` ================================================ FILE: Dockerfile ================================================ FROM golang:1.13 RUN apt update && apt -y install e2tools mtools file squashfs-tools unzip python-setuptools python-lzo cpio sudo RUN wget https://github.com/crmulliner/ubi_reader/archive/master.zip -O ubireader.zip && unzip ubireader.zip && cd ubi_reader-master && python setup.py install WORKDIR $GOPATH/src/github.com/cruise-automation/fwanalyzer COPY . ./ RUN make deps ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: Makefile ================================================ .PHONY: build ifeq ($(GOOS),) GOOS := "linux" endif VERSION=1.4.1 PWD := $(shell pwd) all: build .PHONY: build build: go mod verify mkdir -p build GOOS=$(GOOS) go build -a -ldflags '-w -s' -o build/fwanalyzer ./cmd/fwanalyzer .PHONY: release release: build mkdir -p release cp build/fwanalyzer release/fwanalyzer-$(VERSION)-linux-amd64 .PHONY: testsetup testsetup: gunzip -c test/test.img.gz >test/test.img gunzip -c test/ubifs.img.gz >test/ubifs.img gunzip -c test/cap_ext2.img.gz >test/cap_ext2.img sudo setcap cap_net_admin+p test/test.cap.file getcap test/test.cap.file .PHONY: test test: PATH="$(PWD)/scripts:$(PWD)/test:$(PATH)" go test -count=3 -cover ./... .PHONY: integration-test integration-test: build PATH="$(PWD)/scripts:$(PWD)/test:$(PWD)/build:$(PATH)" ./test/test.py .PHONY: ci-tests ci-tests: build test integration-test echo "done" .PHONY: modules modules: go mod tidy .PHONY: deploy deploy: build .PHONY: clean clean: rm -rf build .PHONY: distclean distclean: clean rm -rf vendor .PHONY: deps deps: go mod download ================================================ FILE: Readme.md ================================================ # FwAnalyzer (Firmware Analyzer) [![CircleCI](https://circleci.com/gh/cruise-automation/fwanalyzer.svg?style=shield)](https://circleci.com/gh/cruise-automation/fwanalyzer) FwAnalyzer is a tool to analyze (ext2/3/4), FAT/VFat, SquashFS, UBIFS filesystem images, cpio archives, and directory content using a set of configurable rules. FwAnalyzer relies on [e2tools](https://github.com/crmulliner/e2tools/) for ext filesystems, [mtools](https://www.gnu.org/software/mtools/) for FAT filesystems, [squashfs-tools](https://github.com/plougher/squashfs-tools) for SquashFS filesystems, and [ubi_reader](https://github.com/crmulliner/ubi_reader) for UBIFS filesystems. [cpio](https://www.gnu.org/software/cpio/) for cpio archives. SELinux/Capability support for ext2/3/4 images requires a patched version of [e2tools](https://github.com/crmulliner/e2tools/). SELinux/Capability support for SquashFS images requires a patched version of [squashfs-tools](https://github.com/crmulliner/squashfs-tools/). ![fwanalyzer](images/fwanalyzer.png) ## Overview The main idea of **FwAnalyzer** is to provide a tool for rapid analysis of filesystem images as part of a firmware security Q&A check suite. FwAnalyzer takes a configuration file that defines various rules for files and directories and runs the configured checks against a given filesystem image. The output of FwAnalyzer is a report, which contains the list of files that violate any of the rules specified in the configuration. The report further contains meta information about the filesystem image and, if configured, information extracted from files within the analyzed filesystem. The report is formatted using JSON so it can be easily integrated as a step in a larger analysis. Example report: ```json { "fs_type": "extfs", "image_digest": "9d5fd9acc98421b46976f283175cc438cf549bb0607a1bca6e881d3e7f323794", "image_name": "test/test.img", "current_file_tree_path": "test/oldtree.json.new", "old_file_tree_path": "test/oldtree.json", "data": { "Version": "1.2.3", "date1 file": "Mon Oct 1 16:13:05 EDT 2018\n" }, "informational": { "/bin": [ "CheckFileTree: new file: 40755 1001:1001 1024 0 SeLinux label: -" ], }, "offenders": { "/bin/elf_arm32": [ "script(check_file_elf_stripped.sh) returned=elf_arm32 is not stripped" ], "/file1": [ "File not allowed" ], "/file2": [ "File is WorldWriteable, not allowed", "File Uid not allowed, Uid = 123" ], } } ``` ## Building and Development Follow the steps described in [Building](Building.md) to install all requirements and build FwAnalyzer. ## Using FwAnalyzer Command line options - `-cfg` : string, path to the config file - `-cfgpath` : string, path to config file and included files (can be repeated) - `-in` : string, filesystem image file or path to directory - `-out` : string, output report to file or stdout using '-' - `-extra` : string, overwrite directory to read extra data from (e.g. filetree, filecmp) - `-ee` : exit with error if offenders are present - `-invertMatch` : invert regex matches (for testing) Example: ```sh fwanalyzer -cfg system_fwa.toml -in system.img -out system_check_output.json ``` Example for using custom scripts stored in the _scripts/_ directory: ```sh PATH=$PATH:./scripts fwanalyzer -cfg system_fwa.toml -in system.img -out system_check_output.json ``` The [_devices/_](devices/) folder contains helper scripts for unpacking and dealing with specific device types and firmware package formats such as [Android](devices/android). It also includes general configuration files that can be included in target specific FwAnalyzer configurations. _check.py_ in the [_devices/_](devices) folder provides a universal script to effectively use FwAnalyzer, see [devices/Readme.md](devices/Readme.md) for details. This likely is how most people will invoke FwAnalyzer. The [_scripts/_](scripts/) folder contains helper scripts that can be called from FwAnalyzer for file content analysis and data extraction. Most interesting should be our checksec wrapper [_check_sec.sh_](scripts/check_sc.sh), see the [Checksec Wrapper Readme](Checksec.md). ## Config Options ### Global Config The global config is used to define some general parameters. The `FsType` (filesystem type) field selects the backend that is used to access the files in the image. The supported options for FsType are: - `dirfs`: to read files from a directory on the host running fwanalyzer, supports Capabilities (supported FsTypeOptions are: N/A) - `extfs`: to read ext2/3/4 filesystem images (supported FsTypeOptions are: `selinux` and `capabilities`) - `squashfs`: to read SquashFS filesystem images (supported FsTypeOptions are: `securityinfo`) - `ubifs`: to read UBIFS filesystem images (supported FsTypeOptions are: N/A) - `vfatfs`: to read VFat filesystem images (supported FsTypeOptions are: N/A) - `cpiofs`: to read cpio archives (supported FsTypeOptions are: `fixdirs`) The FsTypeOptions allow tuning of the FsType driver. - `securityinfo`: will enable selinux and capability support for SquashFS images - `capabilities`: will enable capability support when reading ext filesystem images - `selinux`: will enable selinux support when reading ext filesystem images - `fixdirs`: will attempt to work around a cpio issue where a file exists in a directory while there is no entry for the directory itself The `DigestImage` option will generate a SHA-256 digest of the filesystem image that was analyzed, the digest will be included in the output. Example: ```toml [GlobalConfig] FsType = "extfs" FsTypeOptions = "selinux" DigestImage = true ``` Example Output: ```json "fs_type": "extfs", "image_digest": "9d5fd9acc98421b46976f283175cc438cf549bb0607a1bca6e881d3e7f323794", "image_name": "test/test.img", ``` ### Include The `Include` statement is used to include other FwAnalyzer configuration files into the configuration containing the statement. The include statement can appear in any part of the configuration. The `-cfgpath` parameter sets the search path for include files. Example: ```toml [Include."fw_base.toml"] ``` ### Global File Checks The `GlobalFileChecks` are more general checks that are applied to the entire filesystem. - `Suid`: bool, (optional) if enabled the analysis will fail if any file has the sticky bit set (default: false) - `SuidAllowedList`: string array, (optional) allows Suid files (by full path) for the Suid check - `WorldWrite`: bool, (optional) if enabled the analysis will fail if any file can be written to by any user (default: false) - `SELinuxLabel`: string, (optional) if enabled the analysis will fail if a file does NOT have an SeLinux label - `Uids`: int array, (optional) specifies every allowed UID in the system, every file needs to be owned by a Uid specified in this list - `Gids`: int array, (optional) specifies every allowed GID in the system, every file needs to be owned by a Gid specified in this list - `BadFiles`: string array, (optional) specifies a list of unwanted files, allows wildcards such as `?`, `*`, and `**` (no file in this list should exist) - `BadFilesInformationalOnly`: bool, (optional) the result of the BadFile check will be Informational only (default: false) - `FlagCapabilityInformationalOnly`: bool, (optional) flag files for having a Capability set as Informational (default: false) Example: ```toml [GlobalFileChecks] Suid = true SuidAllowedList = ["/bin/sudo"] SELinuxLabel = false WorldWrite = true Uids = [0,1001,1002] Gids = [0,1001,1002] BadFiles = ["/file99", "/file1", "*.h"] ``` Example Output: ```json "offenders": { "/bin/su": [ "File is SUID, not allowed" ], "/file1": [ "File Uid not allowed, Uid = 123" ], "/world": [ "File is WorldWriteable, not allowed" ], } ``` ### Link Handling With links we refer to soft links. Links can point to files on a different filesystem, therefore, we handle them in a special way. Link handling requires a patched version of e2tools: - [e2tools](https://github.com/crmulliner/e2tools/tree/link_support) with link support `FileStatCheck` will handle links like you would expect it. However if `AllowEmpty` is `false` and the file is a link then the check fails. All other checks and dataextract will fail if the file is a link. Those checks need to be pointed to the actual file (the file the link points to). ### File Stat Check The `FileStatCheck` can be used to model the metadata for a specific file or directory. Any variation of the configuration will be reported as an offender. - `AllowEmpty`: bool, (optional) defines that the file can have zero size will cause error if file is link (default: false) - `Uid`: int, (optional) specifies the UID of the file, not specifying a UID or specifying -1 will skip the check - `Gid`: int, (optional) specifies the GID of the file, not specifying a GID or specifying -1 will skip the check - `Mode`: string, (optional) specifies the UN*X file mode/permissions in octal, not specifying a mode will skip the check - `SELinuxLabel`: string, (optional) the SELinux label of the file (will skip the check if not set) - `LinkTarget`: string, (optional) the target of a symlink, not specifying a link target will skip the check. This is currently supported for `dirfs`, `squashfs`, `cpiofs`, `ubifs`, and `extfs` filesystems. - `Capability`: string array, (optional) list of capabilities (e.g. cap_net_admin+p). - `Desc`: string, (optional) is a descriptive string that will be attached to the report if there is a failed check - `InformationalOnly`: bool, (optional) the result of the check will be Informational only (default: false) Example: ```toml [FileStatCheck."/etc/passwd"] AllowEmpty = false Uid = 0 Gid = 0 Mode = "0644" Desc = "this need to be this way" ``` Example Output: ```json "offenders": { "/file2": [ "File State Check failed: size: 0 AllowEmpyt=false : this needs to be this way" ], } ``` ### File Path Owner Check The `FilePathOwner` check can be used to model the file/directory ownership for a entire tree of the filesystem. The check fails if any file or directory with in the given directory is not owned by the specified `Uid` and `Gid` (type: int). Example: ```toml [FilePathOwner."/bin"] Uid = 0 Gid = 0 ``` Example Output: ```json "offenders": { "/dir1/file3": [ "FilePathOwner Uid not allowed, Uid = 1002 should be = 0", "FilePathOwner Gid not allowed, Gid = 1002 should be = 0" ], } ``` ### File Content Check The `FileContent` check allows to inspect the content of files. The content of a file can be check using four different methods. The file content check can be run in non enforcement mode by setting `InformationalOnly` to true (default is false). InformationalOnly checks will produce informational element in place of an offender. #### Example: Regular Expression on entire file body - `File`: string, the full path of the file - `RegEx`: string, posix/golang regular expression - `RegExLineByLine`: bool, (optional) apply regex on a line by line basis, matching line will be in result (default: false) - `Match`: bool, (optional) indicate if the regular expression should match or not match (default: false) - `Desc`: string, (optional) is a descriptive string that will be attached to failed check - `InformationalOnly`: bool, (optional) the result of the check will be Informational only (default: false) Example: ```toml [FileContent."RegExTest1"] RegEx = ".*Ver=1337.*" Match = true File = "/etc/version" ``` #### Example: SHA-256 digest calculated over the file body - `File`: string, the full path of the file - `Digest`: string, HEX encoded digest - `Desc`: string, (optional) is a descriptive string that will be attached to failed check - `InformationalOnly`: bool, (optional) the result of the check will be Informational only Example: ```toml [FileContent."DigestTest1"] Digest = "8b15095ed1af38d5e383af1c4eadc5ae73cab03964142eb54cb0477ccd6a8dd4" File = "/ver" ``` Example Output: ```json "offenders": { "/ver": [ "Digest (sha256) did not match found = 44c77e41961f354f515e4081b12619fdb15829660acaa5d7438c66fc3d326df3 should be = 8b15095ed1af38d5e383af1c4eadc5ae73cab03964142eb54cb0477ccd6a8dd4." ], } ``` #### Example: Run an external script passing the filename to the script The file is extracted into a temp directory with a temp name before the script is executed. The check produces an offender if the script produced output on stdout or stderr. - `File`: string, the full path of the file or directory - `Script`: string, the full path of the script - `ScriptOptions`: string array, (optional) the first element allows to define a pattern containing wildcards like `?`, `*`, and `**` that is applied to filenames if present it will only check files that match the pattern, this is mostly useful when running the script on a directory. Arguments can be passed to the script using the second and following elements. - `File`: string, the full path of the file, if the path points to a directory the script is run for every file in the directory and subdirectories - `Desc`: string, (optional) is a descriptive string that will be attached to failed check - `InformationalOnly`: bool, (optional) the result of the check will be Informational only (default: false) If the `--` is present it indicates that the next argument is from the `ScriptOptions[1..N]`. The script is run with the following arguments: ``` [--] [script argument 1] ... [script argument N] ``` Example: ```toml [FileContent."ScriptTest1"] Script = "check_file_x8664.sh" File = "/bin" ``` Example Output: ```json "offenders": { "/bin/elf_arm32": [ "script(check_file_x8664.sh) returned=elf_arm32 not a x86-64 elf file" ], } ``` #### Json Field Compare - `File`: string, the full path of the file - `Json`: string, the field name using the dot (.) notation to access a field within an object with a colon (:) separating the required value. All types will be converted to string and compared as a string. Json arrays can be index by supplying the index instead of a field name. - `Desc`: string, (optional) is a descriptive string that will be attached to failed check - `InformationalOnly`: bool, (optional) the result of the check will be Informational only (default: false) Example: ```toml [FileContent."System_Arch"] Json = "System.Arch:arm64" File = "/system.json" Desc = "arch test" ``` Example Input: ```json { "System": { "Version": 7, "Arch": "arm32", "Info": "customized" } } ``` Example Output: ```json "offenders": { "/system.json": [ "Json field System.Arch = arm32 did not match = arm64, System.Arch, arch test" ], } ``` ### File Compare Check The `FileCmp` (File Compare) check is a mechanism to compare a file from a previous run with the file from the current run. The main idea behind this check is to provide more insights into file changes, since it allows comparing two versions of a file rather than comparing only a digest. This works by saving the file as the `OldFilePath` (if it does not exist) and skipping the check at the first run. In consecutive runs the current file and the saved old file will be copied to a temp directory. The script will be executed passing the original filename, the path to the old file and the path to the current file as arguments. If the script prints output the check will be marked as failed. - `File`: string, the full path of the file - `Script`: string, path to the script - `ScriptOptions`: string array, (optional) arguments passed to the script - `OldFilePath`: string, filename (absolute or relative) to use to store old file - `InformationalOnly`: bool, (optional) the result of the check will be Informational only (default: false) Script runs as: ```sh script.sh [--] [argument 1] .. [argument N] ``` Example: ```toml [FileCmp."test.txt"] File = "/test.txt" Script = "diff.sh" OldFilePath = "test.txt" InformationalOnly = true ``` ### File Tree Check The `FileTree` check generates a full filesystem tree (a list of every file and directory) and compares it with a previously saved file tree. The check will produce an informational output listing new files, deleted files, and modified files. `CheckPath` (string array) specifies the paths that should be included in the check. If CheckPath is not set it will behave like it was set to `["/"]` and will include the entire filesystem. If CheckPath was set to `[]` it will generate the file tree but will not check any files. `OldFileTreePath` specifies the filename to read the old filetree from, if a new filetree is generated (e.g. because the old filetree does not exist yet) the newly generated filetree file is OldFileTreePath with ".new" appeneded to it. The `OldFileTreePath` is relative to the configuration file. This means for '-cfg testdir/test.toml' with OldTreeFilePath = "test.json" fwanalyzer will try to read 'testdir/test.json'. The `-extra` command line option can be used to overwrite the path: '-cfg testdir/test.toml -extra test1' will try to read 'test1/test.json'. Similar the newly generated filetree file will be stored in the same directory. File modification check can be customized with: - `CheckPermsOwnerChange`: bool, (optional) will tag a file as modified if owner or permission (mode) are changed (default: false) - `CheckFileSize`: bool, (optional) will tag a file as modified is the sized changed (default: false) - `CheckFileDigest`: bool, (optional) will tag a file as modified if the content changed (comparing it's SHA-256 digest) (default: false) - `SkipFileDigest`: bool, (optional) skip calculating the file digest (useful for dealing with very big files, default is: false) Example: ```toml [FileTreeCheck] OldTreeFilePath = "testtree.json" CheckPath = [ "/etc", "/bin" ] CheckPermsOwnerChange = true CheckFileSize = true CheckFileDigest = false ``` Example Output: ```json "informational": { "/bin/bla": [ "CheckFileTree: new file: 40755 1001:1001 1024 0 SeLinux label: -" ] } ``` ### Directory Content Check The `DirCheck` (Directory content) check specifies a set of files that are allowed to be, or required to be, in a specified directory. Any other file or directory found in that directory will be reported as an offender. If an `Allowed` file isn't found, the check will pass. If a `Required` file is not found, it will be reported as an offender. The file entries can contain wildcards like `?`, `*`, and `**`. The allowed patterns are described in the [golang documentation](https://golang.org/pkg/path/filepath/#Match). Only one `DirCheck` entry can exist per directory. Example: ```toml [DirContent."/home"] Allowed = ["collin", "jon"] Required = ["chris"] ``` ### Data Extract The `DataExtract` option allows extracting data from a file and including it in the report. Data can be extracted via regular expression, by running an external script, or by reading a JSON object. The extracted data can later be used by the post processing script. The Data Extract functionality adds the data to the report as a map of key:value pairs. The key is defined as the name of the statement or by the optional Name parameter. The value is the result of the regular expression or the output of the script. #### Example: Regular expression based data extraction The output generated by the regular expression will be stored as the value for the name of this statement, the example below is named "Version". - `File`: string, the full path of the file - `RegEx`: string, regular expression with one matching field - `Name`: string, (optional) the key name - `Desc`: string, (optional) description Example: The key "Version" will contain the output of the regular expression. ```toml [DataExtract."Version"] File = "/etv/versions" RegEx = ".*Ver=(.+)\n" Desc = "Ver 1337 test" ``` Example Output: ```json "data": { "Version": "1.2.3", } ``` #### Example: Script-based data extraction The output generated by the script will be stored as the value for the name of this statement, the example below is named LastLine. - `File`: string, the full path of the file - `Script`:string, the full path of the script - `ScriptOptions`: string array (optionl), arguments to pass to the script - `Name`: string, (optional) the key name - `Desc`: string, (optional) description The script is run with the following arguments: ``` [--] [script argument 1] ... [script argument N] ``` Example: The key "script_test" will contain the output of the script. The name of this statement is "scripttest" ```toml [DataExtract.scripttest] File = "/etc/somefile" Script = "extractscripttest.sh" Name = "script_test" ``` Example Output: ```json "data": { "script_test": "some data", } ``` #### Example: JSON data extraction The output generated by the script will be stored as the value for the name of this statement, the example below is named LastLine. - `File`: string, the full path of the file - `Json`: string, the field name using the dot (.) notation to access a field within an object - `Name`: string, (optional) the key name - `Desc`: string, (optional) description Example: The key "OS_Info" will containt the content of the Info field from the System object from _/etc/os_version.json_ below. ```json { "System": { "Version": 7, "Arch": "arm32", "Info": "customized" } } ``` ```toml [DataExtract.OS_Info] File = "/etc/os_version.json" Json = "System.Info" Name = "OSinfo" ``` Example Output: ```json "data": { "OSinfo": "customized", } ``` Json arrays can be indexed by supplying the index instead of a field name. #### Example: Advanced usage The `DataExtract` statement allows multiple entries with the same Name (the same key). This can be useful for configuring multiple ways to extract the same information. The first data extract statement that produces valid output will set the value for the given key. This is supported for both regular expressions and scripts and a mixture of both. The example below shows two statements that will both create the key value pair for the key "Version". If "1" does not produce valid output the next one is tried, in this case "2". Example: ```toml [DataExtract."1"] File = "/etc/versions" RegEx = ".*Ver=(.+)\n" Name = "Version" [DataExtract."2"] File = "/etc/OSVersion" RegEx = ".*OS Version: (.+)\n" Name = "Version" ``` # License Copyright 2019-present, Cruise LLC Licensed under the [Apache License Version 2.0](LICENSE) (the "License"); you may not use this project except in compliance with the License. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. # Contributions Contributions are welcome! Please see the agreement for contributions in [CONTRIBUTING.md](CONTRIBUTING.md). Commits must be made with a Sign-off (`git commit -s`) certifying that you agree to the provisions in [CONTRIBUTING.md](CONTRIBUTING.md). ================================================ FILE: cmd/fwanalyzer/fwanalyzer.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "flag" "fmt" "io/ioutil" "os" "path" "strings" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/dataextract" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/dircontent" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/filecmp" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/filecontent" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/filepathowner" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/filestatcheck" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/filetree" "github.com/cruise-automation/fwanalyzer/pkg/analyzer/globalfilechecks" ) func readFileWithCfgPath(filepath string, cfgpath []string) (string, error) { for _, cp := range cfgpath { data, err := ioutil.ReadFile(path.Join(cp, filepath)) if err == nil { return string(data), nil } } data, err := ioutil.ReadFile(filepath) return string(data), err } // read config file and parse Include statement reading all config files that are included func readConfig(filepath string, cfgpath []string) (string, error) { cfg := "" cfgBytes, err := readFileWithCfgPath(filepath, cfgpath) cfg = string(cfgBytes) if err != nil { return cfg, err } type includeCfg struct { Include map[string]interface{} } var include includeCfg _, err = toml.Decode(cfg, &include) if err != nil { return cfg, err } for inc := range include.Include { incCfg, err := readConfig(inc, cfgpath) if err != nil { return cfg, err } cfg = cfg + incCfg } return cfg, nil } type arrayFlags []string func (af *arrayFlags) String() string { return strings.Join(*af, " ") } func (af *arrayFlags) Set(value string) error { *af = append(*af, value) return nil } func main() { var cfgpath arrayFlags var in = flag.String("in", "", "filesystem image file or path to directory") var out = flag.String("out", "-", "output to file (use - for stdout)") var extra = flag.String("extra", "", "overwrite directory to read extra data from (filetree, cmpfile, ...)") var cfg = flag.String("cfg", "", "config file") flag.Var(&cfgpath, "cfgpath", "path to config file and included files (can be repated)") var errorExit = flag.Bool("ee", false, "exit with error if offenders are present") var invertMatch = flag.Bool("invertMatch", false, "invert RegEx Match") flag.Parse() if *in == "" || *cfg == "" { fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) flag.PrintDefaults() os.Exit(1) } cfgdata, err := readConfig(*cfg, cfgpath) if err != nil { fmt.Fprintf(os.Stderr, "Could not read config file: %s, error: %s\n", *cfg, err) os.Exit(1) } // if no alternative extra data directory is given use the directory "config filepath" if *extra == "" { *extra = path.Dir(*cfg) } analyzer := analyzer.NewFromConfig(*in, string(cfgdata)) supported, msg := analyzer.FsTypeSupported() if !supported { fmt.Fprintf(os.Stderr, "%s\n", msg) os.Exit(1) } analyzer.AddAnalyzerPlugin(globalfilechecks.New(string(cfgdata), analyzer)) analyzer.AddAnalyzerPlugin(filecontent.New(string(cfgdata), analyzer, *invertMatch)) analyzer.AddAnalyzerPlugin(filecmp.New(string(cfgdata), analyzer, *extra)) analyzer.AddAnalyzerPlugin(dataextract.New(string(cfgdata), analyzer)) analyzer.AddAnalyzerPlugin(dircontent.New(string(cfgdata), analyzer)) analyzer.AddAnalyzerPlugin(filestatcheck.New(string(cfgdata), analyzer)) analyzer.AddAnalyzerPlugin(filepathowner.New(string(cfgdata), analyzer)) analyzer.AddAnalyzerPlugin(filetree.New(string(cfgdata), analyzer, *extra)) analyzer.RunPlugins() report := analyzer.JsonReport() if *out == "" { fmt.Fprintln(os.Stderr, "Use '-' for stdout or provide a filename.") } else if *out == "-" { fmt.Println(report) } else { err := ioutil.WriteFile(*out, []byte(report), 0644) if err != nil { fmt.Fprintf(os.Stderr, "Can't write report to: %s, error: %s\n", *out, err) } } _ = analyzer.CleanUp() // signal offenders by providing a error exit code if *errorExit && analyzer.HasOffenders() { os.Exit(1) } } ================================================ FILE: cmd/fwanalyzer/fwanalyzer_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "io/ioutil" "strings" "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" ) func TestMain(t *testing.T) { tests := []struct { inclFile string testFile string contains []string }{ { ` [GlobalConfig] FsType="dirfs" # we can have comments `, "/tmp/fwa_test_cfg_file.1", []string{"GlobalConfig"}, }, { ` [Include."/tmp/fwa_test_cfg_file.1"] [Test] a = "a" `, "/tmp/fwa_test_cfg_file.2", []string{"Test"}, }, { ` [Include."/tmp/fwa_test_cfg_file.2"] `, "/tmp/fwa_test_cfg_file.3", []string{"Test", "GlobalConfig"}, }, } for _, test := range tests { err := ioutil.WriteFile(test.testFile, []byte(test.inclFile), 0644) if err != nil { t.Error(err) } cfg, err := readConfig(test.testFile, []string{}) if err != nil { t.Error(err) } for _, c := range test.contains { if !strings.Contains(cfg, c) { t.Errorf("include didn't work") } } // this will panic if cfg contains an illegal config analyzer.NewFromConfig("dummy", cfg) } } ================================================ FILE: devices/Readme.md ================================================ # Devices This directory contains support tools and popular checks that can be included in FwAnalyzer configs for multiple targets. - [Android](android) - [generic Linux](generic) ## Check.py check.py is a universal script to run FwAnalyzer. It will unpack (with the help of a unpacker; see below) firmware and run fwanalyzer against each of the target filesystems, it will combine all of the reports into one big report. In addition it will do some post processing of the filetree files (if present) and append the result to the report. Using check.py is straight forward (the example below is for an Android OTA firmware - make sure you have the required Android unpacking tools installed and added to your PATH, see: [Android](android/Readme.md)): ```sh check.py --unpacker android/unpack.sh --fw some_device_ota.zip --cfg-path android --cfg-include android --fwanalyzer-bin ../build/fwanalyzer ``` The full set of options is described below: ``` usage: check.py [-h] --fw FW --unpacker UNPACKER --cfg-path CFG_PATH [--cfg-include-path CFG_INCLUDE_PATH] [--report REPORT] [--keep-unpacked] [--fwanalyzer-bin FWANALYZER_BIN] [--fwanalyzer-options FWANALYZER_OPTIONS] optional arguments: -h, --help show this help message and exit --fw FW path to firmware file OR path to unpacked firmware --unpacker UNPACKER path to unpacking script --cfg-path CFG_PATH path to directory containing config files --cfg-include-path CFG_INCLUDE_PATH path to config include files --report REPORT report file --keep-unpacked keep unpacked data --fwanalyzer-bin FWANALYZER_BIN path to fwanalyzer binary --fwanalyzer-options FWANALYZER_OPTIONS options passed to fwanalyzer ``` The _--keep-unpacked_ option will NOT delete the temp directory that contains the unpacked files. Once you have the unpacked directory you can pass it to the _--fw_ option to avoid unpacking the firmware for each run (e.g. while you test/modify your configuration files). See the example below. ```sh check.py --unpacker android/unpack.sh --fw /tmp/tmp987689123 --cfg-path android --cfg-include android --fwanalyzer-bin ../build/fwanalyzer ``` ### unpacker The unpacker is used by check.py to _unpack_ firmware. The unpacker needs to be an executable file, that takes two parameters first the `file` to unpack and second the `path to the config files` (the path that was provided via --cfg-path). The unpacker needs to output a set of targets, the targets map a config file to a filesystem image (or directory). The targets are specified as a JSON object. The example below specifies two targets: - system : use _system.toml_ when analyzing _system.img_ - boot: use _boot.toml_ when analyzing the content of directory _boot/_ ```json { "system": "system.img" , "boot": "boot/" } ``` See [Android/unpack.sh](android/unpack.sh) for a real world example. ================================================ FILE: devices/android/Readme.md ================================================ # Android OTA Firmware Analysis The OTA file is a zip file with various files inside, the one file we care about is _payload.bin_. Payload.bin contains the filesystem images such as _system.img_ and _boot.img_. The `check_ota.py` script unpacks an OTA file and runs FwAnalyzer on every filesystem image extracted from the OTA file. ## FwAnalyzer Config The OTA check script requires separate FwAnalyzer configuration files for each filesystem image that is extracted from the OTA file. The `check_ota.py` script expects a directory that contains FwAnalyzer config files with the same name as the filesystem image but the toml extensions. For example the config file for _system.img_ needs to be named _[system.toml](system.toml)_. OTA images contain _system.img_, _vendor.img_, _dsp.img_, and _boot.img_. All images besides the _boot.img_ are ext4 filesystems and therefore the config file needs to have `FsType` set to `extfs`. The _boot.img_ will be unpacked to a directory (using the `mkboot` tool), therefore, the _boot.toml_ file needs to have `FsType` set to `dirfs`. ### Android Checks The files _[android_user_build_checks.toml](android_user_build_checks.toml)_ and _[android_user_build_checks_boot.toml](android_user_build_checks_boot.toml)_ are a collection of very simple checks for Android production builds (user builds). The config file can be included in custom FwAnalyzer config using the `Include` statement. The _[android_properties.toml](android_properties.toml)_ file is a collection of `DataExtract` statements that will extract Android properties from various parts of an Android firmware image. ## Running check\_ota.py The OTA check fails if FwAnalyzer reports an Offender in any of the filesystem images. The reports generated by FwAnalyzer are written to _IMAGENAME_out.json_ (e.g. _system_out.json_). `check_ota.py` arguments: - `--ota` string : path to ota file - `--report` string : path to report file (will be overwritten) - `--cfg-path` string : path to directory containing fwanalyzer config files - `--cfg-include-path` string : path to directory containing fwanalyzer config include files - `--fwanalyzer-bin` string : path to fwanalyzer binary - `--keep-unpacked` : keep unpacked data - `--targets` string : filesystem targets (e.g.: system boot) Example: ```sh $ ls system.toml $ check_ota.py -ota update-ota.zip -cfg-path . -cfg-include-path . --targets system ``` ## Required tools - [extract android ota payload](https://github.com/cyxx/extract_android_ota_payload.git) to extract the fs images from an ota update - [mkbootimg tools](https://github.com/xiaolu/mkbootimg_tools.git) unpack boot.img to extract kernel, initramfs, etc. ================================================ FILE: devices/android/android_properties.toml ================================================ # -- Android Properties -- # - /system/etc/prop.default - [DataExtract."ro.debuggable__1"] File = "/system/etc/prop.default" RegEx = ".*\\nro\\.debuggable=(.+)\\n.*" [DataExtract."ro.bootimage.build.fingerprint__1"] File = "/system/etc/prop.default" RegEx = ".*\\nro\\.bootimage\\.build\\.fingerprint=(\\S+)\\n.*" [DataExtract."ro.bootimage.build.date__1"] File = "/system/etc/prop.default" RegEx = ".*\\nro\\.bootimage\\.build\\.date=(.+)\\n.*" # - /system/build.prop - [DataExtract."ro.build.type__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.type=(\\S+)\\n.*" [DataExtract."ro.build.tags__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.tags=(\\S+)\\n.*" [DataExtract."ro.build.flavor__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.flavor=(\\S+)\\n.*" [DataExtract."ro.build.id__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.id=(\\S+)\\n.*" [DataExtract."ro.build.version.security_patch__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.version\\.security_patch=(\\S+)\\n.*" [DataExtract."ro.build.version.incremental__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.version\\.incremental=(\\S+)\\n.*" [DataExtract."ro.product.name__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.product\\.name=(\\S+)\\n.*" [DataExtract."ro.product.device__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.product\\.device=(\\S+)\\n.*" [DataExtract."ro.build.version.codename__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.version\\.codename=(\\S+)\\n.*" [DataExtract."ro.build.version.release__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.version\\.release=(\\S+)\\n.*" [DataExtract."ro.build.date__1"] File = "/system/build.prop" RegEx = ".*\\nro\\.build\\.date=(.+)\\n.*" # - /boot_img/ramdisk/prop.default (from the boot image) - [DataExtract."ro.bootimage.build.fingerprint__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.bootimage\\.build\\.fingerprint=(\\S+)\\n.*" [DataExtract."ro.bootimage.build.date__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.bootimage\\.build\\.date=(.+)\\n.*" [DataExtract."ro.build.type__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.type=(\\S+)\\n.*" [DataExtract."ro.build.tags__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.tags=(\\S+)\\n.*" [DataExtract."ro.build.flavor__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.flavor=(\\S+)\\n.*" [DataExtract."ro.build.id__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.id=(\\S+)\\n.*" [DataExtract."ro.build.version.security_patch__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.version\\.security_patch=(\\S+)\\n.*" [DataExtract."ro.build.version.incremental__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.version\\.incremental=(\\S+)\\n.*" [DataExtract."ro.product.name__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.product\\.name=(\\S+)\\n.*" [DataExtract."ro.product.device__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.product\\.device=(\\S+)\\n.*" [DataExtract."ro.build.version.codename__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.version\\.codename=(\\S+)\\n.*" [DataExtract."ro.build.version.release__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.version\\.release=(\\S+)\\n.*" [DataExtract."ro.build.date__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.build\\.date=(.+)\\n.*" [DataExtract."ro.debuggable__2"] File = "/boot_img/ramdisk/prop.default" RegEx = ".*\\nro\\.debuggable=(.+)\\n.*" # -- Android Boot Partition Info -- [DataExtract."androidboot.selinux__1"] File = "/boot_img/img_info" RegEx = ".*androidboot.selinux=(\\S+)\\s.*" [DataExtract."buildvariant__1"] File = "/boot_img/img_info" RegEx = ".*buildvariant=(\\S+)\\s.*" [DataExtract."veritykeyid__1"] File = "/boot_img/img_info" RegEx = ".*veritykeyid=id:(\\w+).*" ================================================ FILE: devices/android/android_user_build_checks.toml ================================================ # -- Android user build checks -- # # Basic checks for a production build. # Checks cover: system.img [FileContent."ro.build=user"] File = "/system/build.prop" Regex = ".*\\nro\\.build\\.type=user\n.*" Desc = "ro.build.type must be user" [FileContent."ro.secure=1"] File = "/system/etc/prop.default" Regex = ".*\\nro\\.secure=1.*" Desc = "ro.secure must be 1" [FileContent."ro.debuggable=0"] File = "/system/etc/prop.default" Regex = ".*\\nro\\.debuggable=0.*" Desc = "ro.debuggable must be 0" ================================================ FILE: devices/android/android_user_build_checks_boot.toml ================================================ # -- Android user build checks -- # # Basic checks for a production build # checks cover: boot.img [FileContent."selinux enforcement"] File = "/boot_img/img_info" Regex = ".*androidboot.selinux=enforcing.*" Desc = "selinux must be set to enforcing" [FileContent."buildvariant must be user"] File = "/boot_img/img_info" Regex = ".*buildvariant=user.*" Desc = "build variant must be 'user'" [FileContent."veritykeyid should make sense"] File = "/boot_img/img_info" Regex = ".*veritykeyid=id:[[:alnum:]]+.*" Desc = "veritykeyid must be present" [FileContent."ro.secure=1 (ramdisk)"] File = "/boot_img/ramdisk/prop.default" Regex = ".*\\nro.secure=1\\n.*" Desc = "ro.secure must be 1" [FileContent."ro.debuggable=0 (ramdisk)"] File = "/boot_img/ramdisk/prop.default" Regex = ".*\\nro.debuggable=0\\n.*" Desc = "ro.debuggable must be 0" ================================================ FILE: devices/android/check_ota.py ================================================ #!/usr/bin/env python3 # Copyright 2019-present, Cruise LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import tempfile import os import os.path import sys import argparse import subprocess import hashlib class CheckOTA: def __init__(self, fwanalyzer="fwanalyzer"): self._tmpdir = tempfile.mktemp() self._unpackdir = os.path.join(self._tmpdir, "unpacked") self._fwanalyzer = fwanalyzer def getTmpDir(self): return self._tmpdir def setUnpacked(self, unpacked): self._tmpdir = os.path.realpath(unpacked + "/..") self._unpackdir = os.path.realpath(unpacked) def runFwAnalyzeFs(self, img, cfg, cfginc, out): cfginclude = "" if cfginc: cfginclude = " -cfgpath " + cfginc cmd = self._fwanalyzer + " -in " + img + cfginclude + " -cfg " + cfg + " -out " + out subprocess.check_call(cmd, shell=True) def unpack(self, otafile, otaunpacker, mkboot): # customize based on firmware # # create tmp + unpackdir cmd = "mkdir -p " + self._unpackdir subprocess.check_call(cmd, shell=True) cmd = "unzip " + otafile subprocess.check_call(cmd, shell=True, cwd=self._unpackdir) # unpack payload cmd = otaunpacker + " payload.bin" subprocess.check_call(cmd, shell=True, cwd=self._unpackdir) # unpack boot.img cmd = mkboot + " boot.img boot_img" subprocess.check_call(cmd, shell=True, cwd=self._unpackdir) def delTmpDir(self): cmd = "rm -rf " + self._tmpdir subprocess.check_call(cmd, shell=True) # check result json def checkResult(self, result): with open(result) as read_file: data = json.load(read_file) if "offenders" in data: status = False else: status = True return (status, json.dumps(data, sort_keys=True, indent=2)) def getCfg(name): return name + ".toml" def getOut(name): return name + "_out.json" def getImg(name): if name == "boot": return "unpacked/" return "unpacked/" + name + ".img" def hashfile(fpath): m = hashlib.sha256() with open(fpath, 'rb') as f: while True: data = f.read(65535) if not data: break m.update(data) return m.hexdigest() def makeReport(ota, data): report = {} report["firmware"] = ota status = True for key in data: s, r = out[key] if not s: status = s report[key] = json.loads(r) report["firmware_digest"] = hashfile(ota) report["status"] = status return json.dumps(report, sort_keys=True, indent=2) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--ota', action='store', required=True, help="path to ota file") parser.add_argument('--cfg-path', action='store', required=True, help="path to directory containing config files") parser.add_argument('--cfg-include-path', action='store', help="path to config include files") parser.add_argument('--report', action='store', help="report file") parser.add_argument('--keep-unpacked', action='store_true', help="keep unpacked data") parser.add_argument('--targets', nargs='+', action='store', help="image targets e.g.: system vendor boot") parser.add_argument('--fwanalyzer-bin', action='store', default="fwanalyzer", help="path to fwanalyzer binary") args = parser.parse_args() # target file system images, a fwanalyzer config file is required for each of those targets = ["system", "vendor", "dsp", "boot"] # use target list from cmdline if args.targets: targets = args.targets out = {} for tgt in targets: if not os.path.isfile(os.path.join(args.cfg_path, getCfg(tgt))): print("OTA Check skipped, config file does not exist") sys.exit(0) ota = os.path.realpath(args.ota) cfg = os.path.realpath(args.cfg_path) otaunpacker = "extract_android_ota_payload.py" bootunpacker = "mkboot" check = CheckOTA(args.fwanalyzer_bin) if not ota.endswith("unpacked"): check.unpack(ota, otaunpacker, bootunpacker) else: check.setUnpacked(ota) args.keep_unpacked = True print("already unpacked") all_checks_ok = True for tgt in targets: check.runFwAnalyzeFs(os.path.join(check.getTmpDir(), getImg(tgt)), os.path.join(cfg, getCfg(tgt)), args.cfg_include_path, getOut(tgt)) ok, data = check.checkResult(getOut(tgt)) out[tgt] = ok, data if not ok: all_checks_ok = False if args.keep_unpacked: print("unpacked: {0}\n".format(check.getTmpDir())) else: check.delTmpDir() report = makeReport(args.ota, out) if args.report != None: fp = open(args.report, "w+") fp.write(report) fp.close() print("report written to: " + args.report) if not all_checks_ok: print(report) print("OTA Check Failed") sys.exit(1) else: print("OTA Check Success") sys.exit(0) ================================================ FILE: devices/android/system.toml ================================================ # -- Basic Config for Android's system.img -- [GlobalConfig] FsType = "extfs" # enable SeLinux FsTypeOptions = "selinux" DigestImage = true [GlobalFileChecks] Suid = true # run-as is a common suid binary SuidAllowedList = ["/system/bin/runs-as"] # enable SeLinux checks SeLinuxLabel = true # system is mounted read-only WorldWrite = false # UIDs and GIDs need to be adjusted for each device Uids = [0,1000,1003,1028,1036,2000] Gids = [0,1000,1003,1028,1036,2000] BadFiles = [ "/system/xbin/su" ] [FileTreeCheck] OldTreeFilePath = "system_filetree.json" CheckPermsOwnerChange = true CheckFileSize = false [FilePathOwner."/system/etc"] Uid = 0 Gid = 0 [Include."android_user_build_checks.toml"] [Include."android_properties.toml"] ================================================ FILE: devices/android/unpack.sh ================================================ #!/bin/sh # -- unpack android OTA -- if [ -z "$1" ]; then echo "syntax: $0 " exit 1 fi OTAFILE=$1 # tmpdir should contained 'unpacked' as last path element TMPDIR=$(pwd) if [ "$(basename $TMPDIR)" != "unpacked" ]; then echo "run script in directory named 'unpacked'" exit 1 fi # unpack unzip $OTAFILE >../unpack.log 2>&1 extract_android_ota_payload.py payload.bin >>../unpack.log 2>&1 mkboot boot.img boot_img >>../unpack.log 2>&1 # output targets, targets are consumed by check.py # key = name of fwanalyzer config file without extension # e.g. 'system' => will look for 'system.toml' # value = path to filesystem image (or directory) # analyze system.img using system.toml echo -n '{ "system": "unpacked/system.img" }' ================================================ FILE: devices/check.py ================================================ #!/usr/bin/env python3 # Copyright 2020-present, Cruise LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import hashlib import json import os import sys import subprocess import tempfile class CheckFirmware: def __init__(self, fwanalyzer="fwanalyzer"): self._tmpdir = "" self._unpackdir = "" self._fwanalyzer = fwanalyzer self._unpacked = False def get_tmp_dir(self): return self._tmpdir def run_fwanalyzer_fs(self, img, cfg, cfginc, out, options=""): cfginclude = [] if cfginc: cfginclude = ["-cfgpath", cfginc] cmd = [self._fwanalyzer, "-in", img, *cfginclude, "-cfg", cfg, "-out", out, options] return subprocess.check_call(cmd) def unpack(self, fwfile, unpacker, cfgpath): TARGETS_FILE = "targets.json" try: if os.path.exists(os.path.join(fwfile, "unpacked")) and os.path.exists(os.path.join(fwfile, TARGETS_FILE)): self._tmpdir = fwfile self._unpackdir = os.path.join(self._tmpdir, "unpacked") print("{0}: is a directory containing an 'unpacked' path, skipping".format(fwfile)) cmd = ["cat", os.path.join(fwfile, TARGETS_FILE)] self._unpacked = True else: self._tmpdir = tempfile.mkdtemp() self._unpackdir = os.path.join(self._tmpdir, "unpacked") os.mkdir(self._unpackdir) cmd = [unpacker, fwfile, cfgpath] res = subprocess.check_output(cmd, cwd=self._unpackdir) targets = json.loads(res.decode("utf-8")) with open(os.path.join(self._tmpdir, TARGETS_FILE), "w") as f: f.write(res.decode("utf-8")) return targets except Exception as e: print("Exception: {0}".format(e)) print("can't load targets from output of '{0}' check your script".format(unpacker)) return None def del_tmp_dir(self): if not self._unpacked: return subprocess.check_call(["rm", "-rf", self._tmpdir]) def files_by_ext_stat(self, data): allext = {} for file in data["files"]: fn, ext = os.path.splitext(file["name"]) if ext in allext: count, ext = allext[ext] allext[ext] = count + 1, ext else: allext[ext] = (1, ext) return len(data["files"]), allext def analyze_filetree(self, filetreefile): with open(filetreefile) as f: data = json.load(f) num_files, stats = self.files_by_ext_stat(data) out = {} percent = num_files / 100 # only keep entries with count > 1% and files that have an extension for i in stats: count, ext = stats[i] if count > percent and ext != "": out[ext] = count, ext return { "total_files": num_files, "file_extension_stats_inclusion_if_more_than": percent, "file_extension_stats": sorted(out.values(), reverse=True) } # check result and run post analysis def check_result(self, result): with open(result) as read_file: data = json.load(read_file) if "offenders" in data: status = False else: status = True CURRENT_FILE_TREE = "current_file_tree_path" if CURRENT_FILE_TREE in data: if os.path.isfile(data[CURRENT_FILE_TREE]): data["file_tree_analysis"] = self.analyze_filetree(data[CURRENT_FILE_TREE]) return (status, json.dumps(data, sort_keys=True, indent=2)) def hashfile(fpath): m = hashlib.sha256() with open(fpath, "rb") as f: while True: data = f.read(65535) if not data: break m.update(data) return m.hexdigest() def make_report(fwfile, data): """Return a json report built from image reports.""" report = {} status = True for key in data: img_status, img_report = out[key] if status != False: status = img_status report[key] = json.loads(img_report) report["firmware"] = fwfile if os.path.isfile(fwfile): report["firmware_digest"] = hashfile(fwfile) report["status"] = status return json.dumps(report, sort_keys=True, indent=2) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--fw", action="store", required=True, help="path to firmware file OR path to unpacked firmware") parser.add_argument("--unpacker", action="store", required=True, help="path to unpacking script") parser.add_argument("--cfg-path", action="store", required=True, help="path to directory containing config files") parser.add_argument("--cfg-include-path", action="store", help="path to config include files") parser.add_argument("--report", action="store", help="report file") parser.add_argument("--keep-unpacked", action="store_true", help="keep unpacked data") parser.add_argument("--fwanalyzer-bin", action="store", default="fwanalyzer", help="path to fwanalyzer binary") parser.add_argument("--fwanalyzer-options", action="store", default="", help="options passed to fwanalyzer") args = parser.parse_args() fw = os.path.realpath(args.fw) cfg = os.path.realpath(args.cfg_path) check = CheckFirmware(args.fwanalyzer_bin) targets = check.unpack(fw, os.path.realpath(args.unpacker), cfg) print("using tmp directory: {0}".format(check.get_tmp_dir())) if not targets: print("no targets defined") sys.exit(1) # target file system images, a fwanalyzer config file is required for each of those for tgt in targets: cfg_file_name = "{0}.toml".format(tgt) if not os.path.isfile(os.path.join(args.cfg_path, cfg_file_name)): print("skipped, config file '{0}' for '{1}' does not exist\n".format( os.path.join(args.cfg_path, cfg_file_name), targets[tgt])) sys.exit(0) else: print("using config file '{0}' for '{1}'".format( os.path.join(args.cfg_path, cfg_file_name), targets[tgt])) out = {} all_checks_ok = True for tgt in targets: cfg_file_name = "{0}.toml".format(tgt) out_file_name = "{0}_out.json".format(tgt) check.run_fwanalyzer_fs(os.path.join(check.get_tmp_dir(), targets[tgt]), os.path.join(cfg, cfg_file_name), args.cfg_include_path, out_file_name, options=args.fwanalyzer_options) ok, data = check.check_result(out_file_name) out[tgt] = ok, data if not ok: all_checks_ok = False if args.keep_unpacked: print("unpacked: {0}\n".format(check.get_tmp_dir())) else: check.del_tmp_dir() report = make_report(args.fw, out) if args.report != None: with open(args.report, "w+") as f: f.write(report) print("report written to '{0}'".format(args.report)) else: print(report) if not all_checks_ok: print("Firmware Analysis: checks failed") sys.exit(1) else: print("Firmware Analysis: checks passed") sys.exit(0) ================================================ FILE: devices/generic/Readme.md ================================================ # Generic Linux Devices The [root.toml](root.toml) provides a basic FwAnalyzer configuration for a generic Linux root filesystem. ================================================ FILE: devices/generic/root.toml ================================================ # -- Basic Config for a generic Linux device -- [GlobalConfig] FsType = "extfs" DigestImage = true [GlobalFileChecks] Suid = true SuidAllowedList = [] # disable SELinux checks SeLinuxLabel = false # flag world writable files WorldWrite = true # UIDs and GIDs need to be adjusted for each device Uids = [0] Gids = [0] # files we do not want in the filesystem BadFiles = [ "/usr/sbin/sshd", "/usr/sbin/tcpdump" ] [FileTreeCheck] OldTreeFilePath = "root_filetree.json" CheckPermsOwnerChange = true # -- root should own all binaries -- [FilePathOwner."/bin"] Uid = 0 Gid = 0 [FilePathOwner."/sbin"] Uid = 0 Gid = 0 [FilePathOwner."/usr/bin"] Uid = 0 Gid = 0 [FilePathOwner."/usr/sbin"] Uid = 0 Gid = 0 # -- check that elf files are stripped -- [FileContent.bins_stripped] File = "/" Script = "check_file_elf_stripped.sh" Desc = "elf file not stripped" # -- check mount flags -- # Note: adjust the device and mount point, example uses: /dev/sda1 at /mnt [FileContent."mount_flag_noexec"] File = "/etc/fstab" RegEx = ".*\\n/dev/sda1[\\t ]+/mnt[\\t ]+ext4[\\t a-z,]+noexec.*\\n.*" Desc = "sda1 should be mounted noexec" [FileContent."mount_flag_ro"] File = "/etc/fstab" RegEx = ".*\\n/dev/sda1[\\t ]+/mnt[\\t ]+ext4[\\t a-z,]+ro.*\\n.*" Desc = "sda1 should be mounted ro" [FileContent."mount_flag_nodev"] File = "/etc/fstab" RegEx = ".*\\n/dev/sda1[\\t ]+/mnt[\\t ]+ext4[\\t a-z,]+nodev.*\\n.*" Desc = "sda1 should be mounted nodev" [FileContent."mount_flag_nosuid"] File = "/etc/fstab" RegEx = ".*\\n/dev/sda1[\\t ]+/mnt[\\t ]+vfat[ \\ta-z,]+nosuid.*\\n.*" Desc = "sda1 should be mounted nosuid" ================================================ FILE: docker-compose.yml ================================================ version: "3" services: fwanalyzer: build: . working_dir: /go/src/github.com/cruise-automation/fwanalyzer volumes: - .:/go/src/github.com/cruise-automation/fwanalyzer ================================================ FILE: go.mod ================================================ module github.com/cruise-automation/fwanalyzer go 1.13 require ( github.com/BurntSushi/toml v0.3.1 github.com/bmatcuk/doublestar v1.1.4 github.com/google/go-cmp v0.2.0 ) ================================================ FILE: go.sum ================================================ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/bmatcuk/doublestar v1.1.4 h1:OiC5vFUceSTlgPeJdxVJGNIXTLxCBVPO7ozqJjXbE9M= github.com/bmatcuk/doublestar v1.1.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= ================================================ FILE: pkg/analyzer/analyzer.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package analyzer import ( "bytes" "encoding/hex" "encoding/json" "errors" "io/ioutil" "os" "path" "strings" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/cpioparser" "github.com/cruise-automation/fwanalyzer/pkg/dirparser" "github.com/cruise-automation/fwanalyzer/pkg/extparser" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" "github.com/cruise-automation/fwanalyzer/pkg/squashfsparser" "github.com/cruise-automation/fwanalyzer/pkg/ubifsparser" "github.com/cruise-automation/fwanalyzer/pkg/util" "github.com/cruise-automation/fwanalyzer/pkg/vfatparser" ) type AnalyzerPluginType interface { Name() string Start() Finalize() string CheckFile(fi *fsparser.FileInfo, path string) error } type AnalyzerType interface { GetFileInfo(filepath string) (fsparser.FileInfo, error) RemoveFile(filepath string) error FileGetSha256(filepath string) ([]byte, error) FileGet(filepath string) (string, error) AddOffender(filepath string, reason string) AddInformational(filepath string, reason string) CheckAllFilesWithPath(cb AllFilesCallback, cbdata AllFilesCallbackData, filepath string) AddData(key, value string) ImageInfo() AnalyzerReport } type AllFilesCallbackData interface{} type AllFilesCallback func(fi *fsparser.FileInfo, fullpath string, data AllFilesCallbackData) type globalConfigType struct { FSType string FSTypeOptions string DigestImage bool } type AnalyzerReport struct { FSType string `json:"fs_type"` ImageName string `json:"image_name"` ImageDigest string `json:"image_digest,omitempty"` Data map[string]interface{} `json:"data,omitempty"` Offenders map[string][]interface{} `json:"offenders,omitempty"` Informational map[string][]interface{} `json:"informational,omitempty"` } type Analyzer struct { fsparser fsparser.FsParser tmpdir string config globalConfigType analyzers []AnalyzerPluginType PluginReports map[string]interface{} AnalyzerReport } func New(fsp fsparser.FsParser, cfg globalConfigType) *Analyzer { var a Analyzer a.config = cfg a.fsparser = fsp a.FSType = cfg.FSType a.ImageName = fsp.ImageName() a.tmpdir, _ = util.MkTmpDir("analyzer") a.Offenders = make(map[string][]interface{}) a.Informational = make(map[string][]interface{}) a.Data = make(map[string]interface{}) a.PluginReports = make(map[string]interface{}) if cfg.DigestImage { a.ImageDigest = hex.EncodeToString(util.DigestFileSha256(a.ImageName)) } return &a } func NewFromConfig(imagepath string, cfgdata string) *Analyzer { type globalconfig struct { GlobalConfig globalConfigType } var config globalconfig _, err := toml.Decode(cfgdata, &config) if err != nil { panic("can't read config data: " + err.Error()) } var fsp fsparser.FsParser // Set the parser based on the FSType in the config if strings.EqualFold(config.GlobalConfig.FSType, "extfs") { fsp = extparser.New(imagepath, strings.Contains(config.GlobalConfig.FSTypeOptions, "selinux"), strings.Contains(config.GlobalConfig.FSTypeOptions, "capabilities")) } else if strings.EqualFold(config.GlobalConfig.FSType, "dirfs") { fsp = dirparser.New(imagepath) } else if strings.EqualFold(config.GlobalConfig.FSType, "vfatfs") { fsp = vfatparser.New(imagepath) } else if strings.EqualFold(config.GlobalConfig.FSType, "squashfs") { fsp = squashfsparser.New(imagepath, strings.Contains(config.GlobalConfig.FSTypeOptions, "securityinfo")) } else if strings.EqualFold(config.GlobalConfig.FSType, "ubifs") { fsp = ubifsparser.New(imagepath) } else if strings.EqualFold(config.GlobalConfig.FSType, "cpiofs") { fsp = cpioparser.New(imagepath, strings.Contains(config.GlobalConfig.FSTypeOptions, "fixdirs")) } else { panic("Cannot find an appropriate parser: " + config.GlobalConfig.FSType) } return New(fsp, config.GlobalConfig) } func (a *Analyzer) FsTypeSupported() (bool, string) { if !a.fsparser.Supported() { return false, a.config.FSType + ": requires additional tools, please refer to documentation." } return true, "" } func (a *Analyzer) ImageInfo() AnalyzerReport { // only provide the meta information, don't include offenders and other report data return AnalyzerReport{ FSType: a.FSType, ImageName: a.ImageName, ImageDigest: a.ImageDigest, } } func (a *Analyzer) AddAnalyzerPlugin(aplug AnalyzerPluginType) { a.analyzers = append(a.analyzers, aplug) } func (a *Analyzer) iterateFiles(curpath string) error { dir, err := a.fsparser.GetDirInfo(curpath) if err != nil { return err } cp := curpath for _, fi := range dir { for _, ap := range a.analyzers { err = ap.CheckFile(&fi, cp) if err != nil { return err } } if fi.IsDir() { err = a.iterateFiles(path.Join(curpath, fi.Name)) if err != nil { return err } } } return nil } func (a *Analyzer) checkRoot() error { fi, err := a.fsparser.GetFileInfo("/") if err != nil { return err } for _, ap := range a.analyzers { err = ap.CheckFile(&fi, "/") if err != nil { return err } } return nil } func (a *Analyzer) addPluginReport(report string) { var data map[string]interface{} err := json.Unmarshal([]byte(report), &data) if err != nil { return } for k := range data { a.PluginReports[k] = data[k] } } func (a *Analyzer) RunPlugins() { for _, ap := range a.analyzers { ap.Start() } err := a.checkRoot() if err != nil { panic("RunPlugins error: " + err.Error()) } err = a.iterateFiles("/") if err != nil { panic("RunPlugins error: " + err.Error()) } for _, ap := range a.analyzers { res := ap.Finalize() a.addPluginReport(res) } } func (a *Analyzer) CleanUp() error { err := os.RemoveAll(a.tmpdir) return err } func (a *Analyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return a.fsparser.GetFileInfo(filepath) } func (a *Analyzer) FileGet(filepath string) (string, error) { tmpfile, _ := ioutil.TempFile(a.tmpdir, "") tmpname := tmpfile.Name() tmpfile.Close() if a.fsparser.CopyFile(filepath, tmpname) { return tmpname, nil } return "", errors.New("error copying file") } func (a *Analyzer) FileGetSha256(filepath string) ([]byte, error) { tmpname, err := a.FileGet(filepath) if err != nil { return nil, err } defer os.Remove(tmpname) digest := util.DigestFileSha256(tmpname) return digest, nil } func (a *Analyzer) RemoveFile(filepath string) error { os.Remove(filepath) return nil } func (a *Analyzer) iterateAllDirs(curpath string, cb AllFilesCallback, cbdata AllFilesCallbackData) error { dir, err := a.fsparser.GetDirInfo(curpath) if err != nil { return err } for _, fi := range dir { cb(&fi, curpath, cbdata) if fi.IsDir() { err := a.iterateAllDirs(path.Join(curpath, fi.Name), cb, cbdata) if err != nil { return err } } } return nil } func (a *Analyzer) CheckAllFilesWithPath(cb AllFilesCallback, cbdata AllFilesCallbackData, filepath string) { if cb == nil { return } err := a.iterateAllDirs(filepath, cb, cbdata) if err != nil { panic("iterateAllDirs failed") } } func (a *Analyzer) AddOffender(filepath string, reason string) { var data map[string]interface{} // this is valid json? if err := json.Unmarshal([]byte(reason), &data); err == nil { // yes: store as json a.Offenders[filepath] = append(a.Offenders[filepath], json.RawMessage(reason)) } else { // no: store as plain text a.Offenders[filepath] = append(a.Offenders[filepath], reason) } } func (a *Analyzer) AddInformational(filepath string, reason string) { var data map[string]interface{} // this is valid json? if err := json.Unmarshal([]byte(reason), &data); err == nil { // yes: store as json a.Informational[filepath] = append(a.Informational[filepath], json.RawMessage(reason)) } else { // no: store as plain text a.Informational[filepath] = append(a.Informational[filepath], reason) } } func (a *Analyzer) HasOffenders() bool { return len(a.Offenders) > 0 } func (a *Analyzer) AddData(key string, value string) { // this is a valid json object? var data map[string]interface{} if err := json.Unmarshal([]byte(value), &data); err == nil { // yes: store as json a.Data[key] = json.RawMessage(value) return } // this is valid json array? var array []interface{} if err := json.Unmarshal([]byte(value), &array); err == nil { // yes: store as json a.Data[key] = json.RawMessage(value) } else { // no: store as plain text a.Data[key] = value } } func (a *Analyzer) addReportData(report []byte) ([]byte, error) { var data map[string]interface{} err := json.Unmarshal(report, &data) if err != nil { return report, err } for k := range a.PluginReports { data[k] = a.PluginReports[k] } jdata, err := json.Marshal(&data) return jdata, err } func (a *Analyzer) JsonReport() string { ar := AnalyzerReport{ FSType: a.FSType, Offenders: a.Offenders, Informational: a.Informational, Data: a.Data, ImageName: a.ImageName, ImageDigest: a.ImageDigest, } jdata, _ := json.Marshal(ar) jdata, _ = a.addReportData(jdata) // make json look pretty var prettyJson bytes.Buffer _ = json.Indent(&prettyJson, jdata, "", "\t") return prettyJson.String() } ================================================ FILE: pkg/analyzer/analyzer_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package analyzer import ( "os" "testing" ) func TestBasic(t *testing.T) { cfg := ` [GlobalConfig] FsType = "dirfs" DigestImage = false ` // check tmp file test analyzer := NewFromConfig("../../test/testdir", cfg) _ = analyzer.CleanUp() if _, err := os.Stat(analyzer.tmpdir); !os.IsNotExist(err) { t.Errorf("tmpdir was not removed") } // file test analyzer = NewFromConfig("../../test/testdir", cfg) fi, err := analyzer.GetFileInfo("/file1.txt") if err != nil { t.Errorf("GetFileInfo failed") } if !fi.IsFile() { t.Errorf("GetFileInfo failed, should be regular file") } if fi.IsDir() { t.Errorf("GetFileInfo failed, not a dir") } if fi.Name != "file1.txt" { t.Errorf("filename does not match") } // directory test fi, err = analyzer.GetFileInfo("/dir1") if err != nil { t.Errorf("GetFileInfo failed") } if fi.IsFile() { t.Errorf("GetFileInfo failed, not a file") } if !fi.IsDir() { t.Errorf("GetFileInfo failed, should be a directory") } if fi.Name != "dir1" { t.Errorf("filename does not match") } err = analyzer.checkRoot() if err != nil { t.Errorf("checkroot failed with %s", err) } _ = analyzer.CleanUp() } ================================================ FILE: pkg/analyzer/dataextract/dataextract.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dataextract import ( "encoding/json" "fmt" "io/ioutil" "os/exec" "path" "regexp" "strings" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" "github.com/cruise-automation/fwanalyzer/pkg/util" ) type dataType struct { File string Script string ScriptOptions []string // options for script execution RegEx string Json string Desc string Name string // the name can be set directly otherwise the key will be used } type dataExtractType struct { config map[string][]dataType a analyzer.AnalyzerType } func New(config string, a analyzer.AnalyzerType) *dataExtractType { type dataExtractListType struct { DataExtract map[string]dataType } cfg := dataExtractType{a: a, config: make(map[string][]dataType)} var dec dataExtractListType _, err := toml.Decode(config, &dec) if err != nil { panic("can't read config data: " + err.Error()) } // convert name based map to filename based map with an array of dataType for name, item := range dec.DataExtract { var items []dataType if _, ok := cfg.config[item.File]; ok { items = cfg.config[item.File] } if item.Name == "" { // if the key ends with __[0-9] remove the suffix and use it as name if name[len(name)-1] >= '0' && name[len(name)-1] <= '9' && strings.HasSuffix(name[:len(name)-1], "__") { item.Name = name[:len(name)-3] } else { item.Name = name } } items = append(items, item) item.File = path.Clean(item.File) cfg.config[item.File] = items } return &cfg } func (state *dataExtractType) Start() {} func (state *dataExtractType) Finalize() string { return "" } func (state *dataExtractType) Name() string { return "DataExtract" } func (state *dataExtractType) CheckFile(fi *fsparser.FileInfo, filepath string) error { if !fi.IsFile() { return nil } fn := path.Join(filepath, fi.Name) if _, ok := state.config[fn]; !ok { return nil } items := state.config[fn] // we record if the specific Name was already added with a non error value nameFilled := make(map[string]bool) for _, item := range items { // Name already set? if _, ok := nameFilled[item.Name]; ok { continue } if fi.IsLink() { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: file is Link (extract data from actual file): %s : %s", item.Name, item.Desc)) continue } if item.RegEx != "" { reg, err := regexp.Compile(item.RegEx) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: regex compile error: %s : %s %s", item.RegEx, item.Name, item.Desc)) continue } tmpfn, err := state.a.FileGet(fn) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: file read error, file get: %s : %s : %s", err, item.Name, item.Desc)) continue } fdata, err := ioutil.ReadFile(tmpfn) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: file read error, file read: %s : %s : %s", err, item.Name, item.Desc)) continue } _ = state.a.RemoveFile(tmpfn) res := reg.FindAllStringSubmatch(string(fdata), -1) if len(res) < 1 { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: regex match error, regex: %s : %s : %s", item.RegEx, item.Name, item.Desc)) } else { // only one match if len(res) == 1 && len(res[0]) == 2 { state.a.AddData(item.Name, res[0][1]) nameFilled[item.Name] = true } else if len(res) > 1 { // multiple matches data := []string{} for _, i := range res { if len(i) == 2 { data = append(data, i[1]) } } // convert to JSON array jdata, _ := json.Marshal(data) state.a.AddData(item.Name, string(jdata)) nameFilled[item.Name] = true } else { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: regex match error : %s : %s", item.Name, item.Desc)) } } } if item.Script != "" { out, err := runScriptOnFile(state.a, item.Script, item.ScriptOptions, fi, fn) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: script error: %s : %s : %s", err, item.Name, item.Desc)) } else { state.a.AddData(item.Name, out) nameFilled[item.Name] = true } } if item.Json != "" { tmpfn, err := state.a.FileGet(fn) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: file read error, file get: %s : %s : %s", err, item.Name, item.Desc)) continue } fdata, err := ioutil.ReadFile(tmpfn) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: file read error, file read: %s : %s : %s", err, item.Name, item.Desc)) continue } _ = state.a.RemoveFile(tmpfn) out, err := util.XtractJsonField(fdata, strings.Split(item.Json, ".")) if err != nil { state.a.AddData(item.Name, fmt.Sprintf("DataExtract ERROR: JSON decode error: %s : %s : %s", err, item.Name, item.Desc)) continue } state.a.AddData(item.Name, out) nameFilled[item.Name] = true } } return nil } // runScriptOnFile runs the provided script with the following parameters: // -- scriptOptions[0] scriptOptions[1] func runScriptOnFile(a analyzer.AnalyzerType, script string, scriptOptions []string, fi *fsparser.FileInfo, fpath string) (string, error) { fname, err := a.FileGet(fpath) if err != nil { return "", err } options := []string{fname, fpath, fmt.Sprintf("%d", fi.Uid), fmt.Sprintf("%d", fi.Gid), fmt.Sprintf("%o", fi.Mode), fi.SELinuxLabel} if len(scriptOptions) > 0 { options = append(options, "--") options = append(options, scriptOptions...) } out, err := exec.Command(script, options...).Output() _ = a.RemoveFile(fname) return string(out), err } ================================================ FILE: pkg/analyzer/dataextract/dataextract_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dataextract import ( "io/ioutil" "os" "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" "github.com/cruise-automation/fwanalyzer/pkg/util" ) type testAnalyzer struct { Data map[string]string testfile string } func (a *testAnalyzer) AddData(key, value string) { a.Data[key] = value } func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte(""), nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return a.testfile, nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { } func (a *testAnalyzer) AddInformational(filepath string, reason string) {} func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func makeFile(data string, fn string) fsparser.FileInfo { err := ioutil.WriteFile("/tmp/"+fn, []byte(data), 0666) if err != nil { panic(err) } return fsparser.FileInfo{Name: fn, Size: 1, Mode: 0100644} } func TestRegex1(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version"] File = "/tmp/datatestfileX.1" RegEx = ".*Ver=(.+)\n" Desc="Ver 1337 test" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" // must match fi := makeFile("sadkljhlksaj Ver=1337\naasas\n ", "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "1337" { t.Errorf("data extract failed Regex") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") // must not match fi = makeFile("sadkljhlksaj ver=1337\naasas\n ", "datatestfileX.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; ok && data == "1337" { t.Errorf("data extract failed Regex") } os.Remove("/tmp/datatestfileX.1") g.Finalize() } func TestScript1(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract.LastLine] File = "/tmp/datatestfileX.1" Script="/tmp/extractscripttest.sh" Desc="last line test" ` script := `#!/bin/sh tail -n 1 $1 ` err := ioutil.WriteFile("/tmp/extractscripttest.sh", []byte(script), 0777) if err != nil { t.Error(err) } g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile("lskjadh\naskhj23832\n\nkjhf21987\nhello world\n", "datatestfileX.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["LastLine"]; !ok || data != "hello world\n" { t.Errorf("data extract failed script") } os.Remove("/tmp/datatestfileX.1") os.Remove("/tmp/extractscripttest.sh") g.Finalize() } func TestMulti(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."1"] File = "/tmp/datatestfileX.1" RegEx = ".*Ver=(.+)\n" Name = "Version" [DataExtract."2"] File = "/tmp/datatestfileX.1" RegEx = ".*Version=(.+)\n" Name = "Version" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile("sadkljhlksaj Version=1337\naasas\n ", "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "1337" { t.Errorf("data extract failed Regex") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") fi = makeFile("sadkljhlksaj Ver=1337\naasas\n ", "datatestfileX.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok && data == "1337" { t.Errorf("data extract failed Regex") } os.Remove("/tmp/datatestfileX.1") g.Finalize() } func TestAutoNaming(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" RegEx = ".*Ver=(.+)\n" [DataExtract."Version__0"] File = "/tmp/datatestfileX.1" RegEx = ".*Version=(.+)\n" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile("sadkljhlksaj Version=1337\naasas\n ", "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "1337" { t.Errorf("data extract failed Regex") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") fi = makeFile("sadkljhlksaj Ver=1337\naasas\n ", "datatestfileX.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok && data == "1337" { t.Errorf("data extract failed Regex") } os.Remove("/tmp/datatestfileX.1") g.Finalize() } func TestJson1(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":"lalala"}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "lalala" { t.Errorf("data extract failed Json") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJson2(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a.b" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":{"b": "lalala123"}}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "lalala123" { t.Errorf("data extract failed Json") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJson3Bool(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a.c" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":{"c": true}}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "true" { t.Errorf("data extract failed Json") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJsonError(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a.c" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":{"c": true}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data == "true" { t.Errorf("data extract failed Json: %s", a.Data["Version"]) } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJson4Num(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a.d" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":{"d": 123}}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "123.000000" { t.Errorf("data extract failed Json, %s", a.Data["Version"]) } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJson5Deep(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a.b.c.d.e.f" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":{"b":{"c":{"d":{"e":{"f": "deep"}}}}}}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "deep" { t.Errorf("data extract failed Json, %s", a.Data["Version"]) } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJson6array(t *testing.T) { a := &testAnalyzer{} a.Data = make(map[string]string) cfg := ` [DataExtract."Version__9"] File = "/tmp/datatestfileX.1" Json = "a.0.c" ` g := New(cfg, a) g.Start() a.testfile = "/tmp/datatestfileX.1" fi := makeFile(`{"a":[{"c": true}]}`, "datatestfileX.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if data, ok := a.Data["Version"]; !ok || data != "true" { t.Errorf("data extract failed Json") } os.Remove("/tmp/datatestfileX.1") delete(a.Data, "Version") g.Finalize() } func TestJsonContent(t *testing.T) { cfg := ` [GlobalConfig] FsType = "dirfs" [DataExtract."jsonfile.json"] File = "/jsonfile.json" RegEx = "(.*)\\n" ` analyzer := analyzer.NewFromConfig("../../../test/testdir", cfg) analyzer.AddAnalyzerPlugin(New(string(cfg), analyzer)) analyzer.RunPlugins() report := analyzer.JsonReport() item, err := util.XtractJsonField([]byte(report), []string{"data", "jsonfile.json", "test_str"}) if err != nil { t.Errorf("error %s", err) } if item != "yolo" { t.Errorf("data was not json encoded: %s", report) } _ = analyzer.CleanUp() } ================================================ FILE: pkg/analyzer/dircontent/dircontent.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dircontent import ( "fmt" "path" "github.com/BurntSushi/toml" "github.com/bmatcuk/doublestar" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type dirContentType struct { Path string // path of directory to check Allowed []string // list of files that are allowed to be there Required []string // list of files that must be there found map[string]bool // whether or not there was a match for this file } type dirContentCheckType struct { dirs map[string]dirContentType a analyzer.AnalyzerType } func addTrailingSlash(path string) string { if path[len(path)-1] != '/' { return path + "/" } return path } func validateItem(item dirContentType) bool { // ensure items in the Allowed/Required lists are valid for doublestar.Match() for _, allowed := range item.Allowed { _, err := doublestar.Match(allowed, "") if err != nil { return false } } for _, required := range item.Required { _, err := doublestar.Match(required, "") if err != nil { return false } } return true } func New(config string, a analyzer.AnalyzerType) *dirContentCheckType { type dirCheckListType struct { DirContent map[string]dirContentType } cfg := dirContentCheckType{a: a, dirs: make(map[string]dirContentType)} var dec dirCheckListType _, err := toml.Decode(config, &dec) if err != nil { panic("can't read config data: " + err.Error()) } for name, item := range dec.DirContent { if !validateItem(item) { a.AddOffender(name, "invalid DirContent entry") } item.Path = addTrailingSlash(name) if _, ok := cfg.dirs[item.Path]; ok { a.AddOffender(name, "only one DirContent is allowed per path") } item.found = make(map[string]bool) for _, req := range item.Required { item.found[req] = false } cfg.dirs[item.Path] = item } return &cfg } func (state *dirContentCheckType) Start() {} func (state *dirContentCheckType) Finalize() string { for _, item := range state.dirs { for fn, found := range item.found { if !found { state.a.AddOffender(fn, fmt.Sprintf("DirContent: required file %s not found in directory %s", fn, item.Path)) } } } return "" } func (state *dirContentCheckType) Name() string { return "DirContent" } func (state *dirContentCheckType) CheckFile(fi *fsparser.FileInfo, dirpath string) error { dp := addTrailingSlash(dirpath) item, ok := state.dirs[dp] if !ok { return nil } found := false for _, fn := range item.Allowed { // allow globs for Allowed m, err := doublestar.Match(fn, fi.Name) if err != nil { // shouldn't happen because we check these in validateItem() return err } if m { found = true } } for _, fn := range item.Required { m, err := doublestar.Match(fn, fi.Name) if err != nil { return err } if m { item.found[fn] = true found = true } } if !found { state.a.AddOffender(path.Join(dirpath, fi.Name), fmt.Sprintf("DirContent: File %s not allowed in directory %s", fi.Name, dirpath)) } return nil } ================================================ FILE: pkg/analyzer/dircontent/dircontent_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dircontent import ( "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string) type testAnalyzer struct { ocb OffenderCallack testfile string } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte(""), nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return a.testfile, nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { a.ocb(filepath) } func (a *testAnalyzer) AddInformational(filepath string, reason string) {} func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func TestDirCheck(t *testing.T) { a := &testAnalyzer{} cfg := ` [DirContent."/temp"] Allowed = ["file1", "file2"] Required = ["file10"] ` tests := []struct { path string file string shouldTrigger bool shouldTriggerFinal bool }{ { "/temp", "file1", false, true, // file allowed }, { "/temp", "file4", true, true, // file not allowed }, { "/temp1", "file4", false, true, // wrong dir, shouldn't matter }, { "/temp", "file10", false, false, // file is required }, } g := New(cfg, a) g.Start() for _, test := range tests { triggered := false a.ocb = func(fp string) { triggered = true } fi := fsparser.FileInfo{Name: test.file} err := g.CheckFile(&fi, test.path) if err != nil { t.Errorf("CheckFile returned error for %s", fi.Name) } if triggered != test.shouldTrigger { t.Errorf("incorrect result for %s/%s, wanted %v got %v", test.path, test.file, test.shouldTrigger, triggered) } triggered = false g.Finalize() if triggered != test.shouldTriggerFinal { t.Errorf("incorrect result for %s/%s on Finalize(), wanted %v got %v", test.path, test.file, test.shouldTriggerFinal, triggered) } } } ================================================ FILE: pkg/analyzer/filecmp/filecmp.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filecmp import ( "fmt" "io" "io/ioutil" "os" "os/exec" "path" "syscall" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type cmpType struct { File string // filename OldFilePath string Script string ScriptOptions []string InformationalOnly bool // put result into Informational (not Offenders) name string // name of this check (need to be unique) } type fileCmpType struct { files map[string][]cmpType a analyzer.AnalyzerType } func New(config string, a analyzer.AnalyzerType, fileDirectory string) *fileCmpType { type fileCmpListType struct { FileCmp map[string]cmpType } cfg := fileCmpType{a: a, files: make(map[string][]cmpType)} var fcc fileCmpListType _, err := toml.Decode(config, &fcc) if err != nil { panic("can't read config data: " + err.Error()) } // convert text name based map to filename based map with an array of checks for name, item := range fcc.FileCmp { // make sure required options are set if item.OldFilePath == "" || item.Script == "" { continue } var items []cmpType if _, ok := cfg.files[item.File]; ok { items = cfg.files[item.File] } if fileDirectory != "" { item.OldFilePath = path.Join(fileDirectory, item.OldFilePath) } item.name = name item.File = path.Clean(item.File) items = append(items, item) cfg.files[item.File] = items } return &cfg } func (state *fileCmpType) Start() {} func (state *fileCmpType) Finalize() string { return "" } func (state *fileCmpType) Name() string { return "FileCmp" } func fileExists(filePath string) error { var fileState syscall.Stat_t return syscall.Lstat(filePath, &fileState) } func copyFile(out string, in string) error { src, err := os.Open(in) if err != nil { return err } defer src.Close() dst, err := os.Create(out) if err != nil { return err } defer dst.Close() _, err = io.Copy(dst, src) return err } func makeTmpFromOld(filePath string) (string, error) { tmpfile, err := ioutil.TempFile("", "") if err != nil { return "", err } defer tmpfile.Close() src, err := os.Open(filePath) if err != nil { return "", err } defer src.Close() _, err = io.Copy(tmpfile, src) return tmpfile.Name(), err } func (state *fileCmpType) CheckFile(fi *fsparser.FileInfo, filepath string) error { fn := path.Join(filepath, fi.Name) if _, ok := state.files[fn]; !ok { return nil } for _, item := range state.files[fn] { if !fi.IsFile() || fi.IsLink() { state.a.AddOffender(fn, "FileCmp: is not a file or is a link") continue } tmpfn, err := state.a.FileGet(fn) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileCmp: error getting file: %s", err)) continue } // we don't have a saved file so save it now and skip this check if fileExists(item.OldFilePath) != nil { err := copyFile(item.OldFilePath+".new", tmpfn) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileCmp: error saving file: %s", err)) continue } state.a.AddInformational(fn, "FileCmp: saved file for next run") continue } oldTmp, err := makeTmpFromOld(item.OldFilePath) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileCmp: error getting old file: %s", err)) continue } args := []string{fi.Name, oldTmp, tmpfn} if len(item.ScriptOptions) > 0 { args = append(args, "--") args = append(args, item.ScriptOptions...) } out, err := exec.Command(item.Script, args...).CombinedOutput() if err != nil { state.a.AddOffender(path.Join(filepath, fi.Name), fmt.Sprintf("script(%s) error=%s", item.Script, err)) } err = state.a.RemoveFile(tmpfn) if err != nil { panic("removeFile failed") } err = state.a.RemoveFile(oldTmp) if err != nil { panic("removeFile failed") } if len(out) > 0 { if item.InformationalOnly { state.a.AddInformational(path.Join(filepath, fi.Name), string(out)) } else { state.a.AddOffender(path.Join(filepath, fi.Name), string(out)) } } } return nil } ================================================ FILE: pkg/analyzer/filecmp/filecmp_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filecmp import ( "io/ioutil" "os" "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string, info bool) type testAnalyzer struct { ocb OffenderCallack testfile string } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte(""), nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return a.testfile, nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { a.ocb(reason, false) } func (a *testAnalyzer) AddInformational(filepath string, reason string) { a.ocb(reason, true) } func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func TestCmp(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileCmp."Test1"] File ="/cmp_test_1" Script = "diff.sh" ScriptOptions = [""] OldFilePath = "/tmp/analyzer_filecmp_1" ` g := New(cfg, a, "") g.Start() called := false infoText := "" a.ocb = func(name string, info bool) { called = true infoText = name } // same file should not produce output data := ` aaa bbb ccc ddd ` err := ioutil.WriteFile("/tmp/analyzer_filecmp_1", []byte(data), 0755) if err != nil { t.Error(err) } a.testfile = "/tmp/analyzer_filecmp_1" called = false infoText = "" fi := fsparser.FileInfo{Name: "cmp_test_1", Mode: 100755} err = g.CheckFile(&fi, "/") if err != nil { t.Error(err) } if called { t.Errorf("should not produce offender: %s", infoText) } // should cause an offender called = false infoText = "" data = ` aaa bbb ccc ddd ` err = ioutil.WriteFile("/tmp/analyzer_filecmp_1", []byte(data), 0755) if err != nil { t.Error(err) } data = ` aaa ddd ccc ` err = ioutil.WriteFile("/tmp/analyzer_filecmp_2", []byte(data), 0755) if err != nil { t.Error(err) } a.testfile = "/tmp/analyzer_filecmp_2" fi = fsparser.FileInfo{Name: "cmp_test_1", Mode: 100755} err = g.CheckFile(&fi, "/") if err != nil { t.Error(err) } if !called { t.Errorf("should produce offender: %s", infoText) } } func TestCmpInfo(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileCmp."Test1"] File ="/cmp_test_1" Script = "diff.sh" ScriptOptions = [""] InformationalOnly = true OldFilePath = "/tmp/analyzer_filecmp_1" ` g := New(cfg, a, "") g.Start() called := false infoText := "" infoO := false a.ocb = func(name string, info bool) { called = true infoText = name infoO = info } // should cause an informational data := ` aaa bbb ccc ddd ` err := ioutil.WriteFile("/tmp/analyzer_filecmp_1", []byte(data), 0755) if err != nil { t.Error(err) } data = ` aaa ddd ccc ` err = ioutil.WriteFile("/tmp/analyzer_filecmp_2", []byte(data), 0755) if err != nil { t.Error(err) } a.testfile = "/tmp/analyzer_filecmp_2" fi := fsparser.FileInfo{Name: "cmp_test_1", Mode: 100755} err = g.CheckFile(&fi, "/") if err != nil { t.Error(err) } if !called || !infoO { t.Errorf("should produce informational: %s", infoText) } } func TestCmpNoOld(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileCmp."Test1"] File ="/cmp_test_1" Script = "diff.sh" ScriptOptions = [""] OldFilePath = "/tmp/analyzer_filecmp_99" ` g := New(cfg, a, "") g.Start() called := false infoText := "" infoO := false a.ocb = func(name string, info bool) { called = true infoText = name infoO = info } // should cause an informational data := ` aaa bbb ccc ddd ` err := ioutil.WriteFile("/tmp/analyzer_filecmp_1", []byte(data), 0755) if err != nil { t.Error(err) } a.testfile = "/tmp/analyzer_filecmp_1" os.Remove("/tmp/analyzer_filecmp_99.new") fi := fsparser.FileInfo{Name: "cmp_test_1", Mode: 100755} err = g.CheckFile(&fi, "/") if err != nil { t.Error(err) } if !called || !infoO { t.Errorf("should produce informational: %s", infoText) } inData, err := ioutil.ReadFile("/tmp/analyzer_filecmp_99.new") if err != nil { t.Error(err) } if string(inData) != data { t.Errorf("files not equal after save") } } ================================================ FILE: pkg/analyzer/filecontent/filecontent.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filecontent import ( "encoding/hex" "fmt" "io/ioutil" "os" "os/exec" "path" "regexp" "strings" "github.com/BurntSushi/toml" "github.com/bmatcuk/doublestar" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" "github.com/cruise-automation/fwanalyzer/pkg/util" ) type contentType struct { File string // filename InformationalOnly bool // put result into Informational (not Offenders) RegEx string // regex to match against the file content RegExLineByLine bool // match regex line by line vs whole file Match bool // define if regex should match or not Digest string // used for SHA256 matching Script string // used for script execution ScriptOptions []string // options for script execution Json string // used for json field matching Desc string // description name string // name of this check (need to be unique) checked bool // if this file was checked or not } type fileContentType struct { files map[string][]contentType a analyzer.AnalyzerType } func validateItem(item contentType) bool { if item.RegEx != "" && (item.Digest == "" && item.Script == "" && item.Json == "") { return true } if item.Digest != "" && (item.RegEx == "" && item.Script == "" && item.Json == "") { return true } if item.Script != "" && (item.RegEx == "" && item.Digest == "" && item.Json == "") { return true } if item.Json != "" && (item.RegEx == "" && item.Digest == "" && item.Script == "") { return true } return false } func New(config string, a analyzer.AnalyzerType, MatchInvert bool) *fileContentType { type fileContentListType struct { FileContent map[string]contentType } cfg := fileContentType{a: a, files: make(map[string][]contentType)} var fcc fileContentListType _, err := toml.Decode(config, &fcc) if err != nil { panic("can't read config data: " + err.Error()) } // convert text name based map to filename based map with an array of checks for name, item := range fcc.FileContent { if !validateItem(item) { a.AddOffender(name, "FileContent: check must include one of Digest, RegEx, Json, or Script") continue } var items []contentType if _, ok := cfg.files[item.File]; ok { items = cfg.files[item.File] } item.name = name if MatchInvert { item.Match = !item.Match } items = append(items, item) item.File = path.Clean(item.File) cfg.files[item.File] = items } return &cfg } func (state *fileContentType) Start() {} func (state *fileContentType) Finalize() string { for fn, items := range state.files { for _, item := range items { if !item.checked { state.a.AddOffender(fn, fmt.Sprintf("FileContent: file %s not found", fn)) } } } return "" } func (state *fileContentType) Name() string { return "FileContent" } func regexCompile(rx string) (*regexp.Regexp, error) { reg, err := regexp.CompilePOSIX(rx) if err != nil { reg, err = regexp.Compile(rx) } return reg, err } func (state *fileContentType) canCheckFile(fi *fsparser.FileInfo, fn string, item contentType) bool { if !fi.IsFile() { state.a.AddOffender(fn, fmt.Sprintf("FileContent: '%s' file is NOT a file : %s", item.name, item.Desc)) return false } if fi.IsLink() { state.a.AddOffender(fn, fmt.Sprintf("FileContent: '%s' file is a link (check actual file) : %s", item.name, item.Desc)) return false } return true } func (state *fileContentType) CheckFile(fi *fsparser.FileInfo, filepath string) error { fn := path.Join(filepath, fi.Name) if _, ok := state.files[fn]; !ok { return nil } items := state.files[fn] for n, item := range items { items[n].checked = true //fmt.Printf("name: %s file: %s (%s)\n", item.name, item.File, fn) if item.RegEx != "" { if !state.canCheckFile(fi, fn, item) { continue } reg, err := regexCompile(item.RegEx) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileContent: regex compile error: %s : %s : %s", item.RegEx, item.name, item.Desc)) continue } tmpfn, err := state.a.FileGet(fn) // this should never happen since this function is called for every existing file if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileContent: error reading file: %s", err)) continue } fdata, _ := ioutil.ReadFile(tmpfn) err = state.a.RemoveFile(tmpfn) if err != nil { panic("RemoveFile failed") } if item.RegExLineByLine { for _, line := range strings.Split(strings.TrimSuffix(string(fdata), "\n"), "\n") { if reg.MatchString(line) == item.Match { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("RegEx check failed, for: %s : %s : line: %s", item.name, item.Desc, line)) } else { state.a.AddOffender(fn, fmt.Sprintf("RegEx check failed, for: %s : %s : line: %s", item.name, item.Desc, line)) } } } } else { if reg.Match(fdata) == item.Match { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("RegEx check failed, for: %s : %s", item.name, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("RegEx check failed, for: %s : %s", item.name, item.Desc)) } } } continue } if item.Digest != "" { if !state.canCheckFile(fi, fn, item) { continue } digestRaw, err := state.a.FileGetSha256(fn) if err != nil { return err } digest := hex.EncodeToString(digestRaw) saved, _ := hex.DecodeString(item.Digest) savedStr := hex.EncodeToString(saved) if digest != savedStr { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("Digest (sha256) did not match found = %s should be = %s. %s : %s ", digest, savedStr, item.name, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("Digest (sha256) did not match found = %s should be = %s. %s : %s ", digest, savedStr, item.name, item.Desc)) } } continue } if item.Script != "" { cbd := callbackDataType{state, item.Script, item.ScriptOptions, item.InformationalOnly} if fi.IsDir() { state.a.CheckAllFilesWithPath(checkFileScript, &cbd, fn) } else { if !state.canCheckFile(fi, fn, item) { continue } checkFileScript(fi, filepath, &cbd) } } if item.Json != "" { if !state.canCheckFile(fi, fn, item) { continue } tmpfn, err := state.a.FileGet(fn) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileContent: error getting file: %s", err)) continue } fdata, err := ioutil.ReadFile(tmpfn) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileContent: error reading file: %s", err)) continue } err = state.a.RemoveFile(tmpfn) if err != nil { panic("RemoveFile failed") } field := strings.SplitAfterN(item.Json, ":", 2) if len(field) != 2 { state.a.AddOffender(fn, fmt.Sprintf("FileContent: error Json config bad = %s, %s, %s", item.Json, item.name, item.Desc)) continue } // remove ":" so we just have the value we want to check field[0] = strings.Replace(field[0], ":", "", 1) fieldData, err := util.XtractJsonField(fdata, strings.Split(field[0], ".")) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FileContent: error Json bad field = %s, %s, %s", field[0], item.name, item.Desc)) continue } if fieldData != field[1] { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("Json field %s = %s did not match = %s, %s, %s", field[0], fieldData, field[1], item.name, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("Json field %s = %s did not match = %s, %s, %s", field[0], fieldData, field[1], item.name, item.Desc)) } } } } return nil } type callbackDataType struct { state *fileContentType script string scriptOptions []string informationalOnly bool } /* * Extract file and run script passing the file name as the argument to the script. * Only regular files that are not empty are processed, script is for checking content. * The script output is used to indicate an issue, the output is saved in the offender record. * * The first element in scriptOptions (from the callback data) defines a path match string. * This allows to specify a pattern the filename has to match. Files with names that do not match will * not be analyzed by the script. This is to speed up execution time since files have to be extracted * to analyze them with the external script. * * The following elements in scriptOptions will be passed to the script as cmd line arguments. * * The script is run with the following parameters: * script.sh -- */ func checkFileScript(fi *fsparser.FileInfo, fullpath string, cbData analyzer.AllFilesCallbackData) { cbd := cbData.(*callbackDataType) fullname := path.Join(fullpath, fi.Name) // skip/ignore anything but normal files if !fi.IsFile() || fi.IsLink() { return } if len(cbd.scriptOptions) >= 1 { m, err := doublestar.Match(cbd.scriptOptions[0], fi.Name) if err != nil { fmt.Fprintf(os.Stderr, "Match error: %s\n", err) return } // file name didn't match the specifications in scriptOptions[0] if !m { return } } fname, _ := cbd.state.a.FileGet(fullname) args := []string{fname, fullname, fmt.Sprintf("%d", fi.Uid), fmt.Sprintf("%d", fi.Gid), fmt.Sprintf("%o", fi.Mode), fi.SELinuxLabel, } if len(cbd.scriptOptions) >= 2 { args = append(args, "--") args = append(args, cbd.scriptOptions[1:]...) } out, err := exec.Command(cbd.script, args...).CombinedOutput() if err != nil { cbd.state.a.AddOffender(fullname, fmt.Sprintf("script(%s) error=%s", cbd.script, err)) } err = cbd.state.a.RemoveFile(fname) if err != nil { panic("removeFile failed") } if len(out) > 0 { if cbd.informationalOnly { cbd.state.a.AddInformational(fullname, string(out)) } else { cbd.state.a.AddOffender(fullname, string(out)) } } } ================================================ FILE: pkg/analyzer/filecontent/filecontent_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filecontent import ( "io/ioutil" "os" "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string) type testAnalyzer struct { ocb OffenderCallack testfile string } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { if filepath == "/tmp/datatestfile.1" { return []byte("AABBCCDDEEFF11223344"), nil } return []byte("AABBCCDDEEFF11223341"), nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return a.testfile, nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { a.ocb(filepath) } func (a *testAnalyzer) AddInformational(filepath string, reason string) {} func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func makeFile(data string, fn string) fsparser.FileInfo { err := ioutil.WriteFile("/tmp/"+fn, []byte(data), 0666) if err != nil { panic(err) } return fsparser.FileInfo{Name: fn, Size: 1, Mode: 100666} } func TestRegex(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."RegExTest1"] RegEx = ".*Ver=1337.*" Match = true File ="/tmp/datatestfile.1" [FileContent."RegExTest2"] RegEx = ".*Ver=1337.*" Match = true File ="/tmp/datatestfile.1" ` g := New(cfg, a, false) g.Start() a.testfile = "/tmp/datatestfile.1" // match triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile("sadkljhlksaj Ver=1337 \naasas\n ", "datatestfile.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if !triggered { t.Errorf("file content failed Regex") } os.Remove("/tmp/datatestfile.1") // do not match triggered = false a.ocb = func(fn string) { triggered = true } fi = makeFile("sadkljhlksaj Ver=1338\nasdads\nadaasd\n", "datatestfile.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if triggered { t.Errorf("file content failed regex") } os.Remove("/tmp/datatestfile.1") // ensure file isn't flagged as not-found g.Finalize() if triggered { t.Errorf("file content failed, found file flagged as not-found") } } func TestDigest(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."digest test 1"] Digest = "4141424243434444454546463131323233333434" File = "/tmp/datatestfile.1" [FileContent."digest test 2"] Digest = "4141424243434444454546463131323233333435" File ="/tmp/datatestfile.2" ` g := New(cfg, a, false) g.Start() a.testfile = "/tmp/datatestfile.1" // match triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile("sadkljhlksaj Ver=1337 \naasas\n ", "datatestfile.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if triggered { t.Errorf("file content failed digest") } os.Remove("/tmp/datatestfile.1") // do not match triggered = false a.ocb = func(fn string) { triggered = true } fi = makeFile("sadkljhlksaj Ver=1338\nasdads\nadaasd\n", "datatestfile.2") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if !triggered { t.Errorf("file content failed digest") } os.Remove("/tmp/datatestfile.2") g.Finalize() } func TestScript(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."script test 1"] Script="/tmp/testfilescript.sh" File = "/tmp/datatestfile.1" ` script := `#!/bin/sh cat $1 ` err := ioutil.WriteFile("/tmp/testfilescript.sh", []byte(script), 0777) if err != nil { t.Error(err) } g := New(cfg, a, false) g.Start() a.testfile = "/tmp/datatestfile.1" // match triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile("sadkljhlksaj Ver=1337 \naasas\n ", "datatestfile.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if !triggered { t.Errorf("file content script test failed") } os.Remove("/tmp/datatestfile.1") os.Remove("/tmp/testfilescript.sh") g.Finalize() } func TestValidateItem(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."digest test 1"] Digest = "4141424243434444454546463131323233333434" Script = "asdf.sh" File = "/tmp/datatestfile.1" ` triggered := false a.ocb = func(fn string) { triggered = true } g := New(cfg, a, false) if !triggered { t.Errorf("file content failed validate with multiple check types") } g.Finalize() triggered = false cfg = ` [FileContent."digest test 1"] File = "/tmp/datatestfile.1" ` New(cfg, a, false) if !triggered { t.Errorf("file content failed validate without check type") } } func TestMissingFile(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."RegExTest1"] RegEx = ".*Ver=1337.*" Match = true File ="/tmp/datatestfile.notfound" ` g := New(cfg, a, false) g.Start() a.testfile = "/tmp/datatestfile.1" // match triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile("sadkljhlksaj Ver=1337 \naasas\n ", "datatestfile.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } // pass should still be false here because CheckFile did not see the file if triggered { t.Errorf("file content failed, missing file checked") } os.Remove("/tmp/datatestfile.1") g.Finalize() // triggered should be true here because Finalize should call AddOffender if !triggered { t.Errorf("file content failed, missing file not found") } } func TestJson(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."json test 1"] Json="a.b:test123" File = "/tmp/datatestfile.1" ` g := New(cfg, a, false) g.Start() a.testfile = "/tmp/datatestfile.1" triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile(`{"a":{"b": "test123"}}`, "datatestfile.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if triggered { t.Errorf("file content json failed") } os.Remove("/tmp/datatestfile.1") g.Finalize() } func TestJsonDoesNotMatch(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileContent."json test 1"] Json="a.b:test12A" File = "/tmp/datatestfile.1" ` g := New(cfg, a, false) g.Start() a.testfile = "/tmp/datatestfile.1" triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile(`{"a":{"b": "test123"}}`, "datatestfile.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if !triggered { t.Errorf("file content json failed") } os.Remove("/tmp/datatestfile.1") g.Finalize() } func TestGlobalInvert(t *testing.T) { a := &testAnalyzer{} cfg := `[FileContent."RegExTest1"] RegEx = ".*Ver=1337.*" Match = true File ="/tmp/datatestfile.1" [FileContent."RegExTest2"] RegEx = ".*Ver=1337.*" Match = true File ="/tmp/datatestfile.1" ` g := New(cfg, a, true) g.Start() a.testfile = "/tmp/datatestfile.1" // match triggered := false a.ocb = func(fn string) { triggered = true } fi := makeFile("sadkljhlksaj Ver=1337 \naasas\n ", "datatestfile.1") err := g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if triggered { t.Errorf("file content failed Regex") } os.Remove("/tmp/datatestfile.1") // dont match triggered = false a.ocb = func(fn string) { triggered = true } fi = makeFile("sadkljhlksaj Ver=1338\nasdads\nadaasd\n", "datatestfile.1") err = g.CheckFile(&fi, "/tmp") if err != nil { t.Errorf("CheckFile failed") } if !triggered { t.Errorf("file content failed regex") } os.Remove("/tmp/datatestfile.1") // ensure file isn't flagged as not-found g.Finalize() if !triggered { t.Errorf("file content failed, found file flagged as not-found") } } ================================================ FILE: pkg/analyzer/filepathowner/filepathowner.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filepathowner import ( "fmt" "path" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type filePathOwner struct { Uid int Gid int } type filePathOwenrList struct { FilePathOwner map[string]filePathOwner } type fileownerpathType struct { files filePathOwenrList a analyzer.AnalyzerType } func New(config string, a analyzer.AnalyzerType) *fileownerpathType { cfg := fileownerpathType{a: a} _, err := toml.Decode(config, &cfg.files) if err != nil { panic("can't read config data: " + err.Error()) } return &cfg } func (state *fileownerpathType) Start() {} func (state *fileownerpathType) CheckFile(fi *fsparser.FileInfo, filepath string) error { return nil } func (state *fileownerpathType) Name() string { return "FilePathOwner" } type cbDataCheckOwnerPath struct { a analyzer.AnalyzerType fop filePathOwner } func (state *fileownerpathType) Finalize() string { for fn, item := range state.files.FilePathOwner { filelist := cbDataCheckOwnerPath{a: state.a, fop: item} df, err := state.a.GetFileInfo(fn) if err != nil { state.a.AddOffender(fn, fmt.Sprintf("FilePathOwner, directory not found: %s", fn)) continue } // check the directory itself cbCheckOwnerPath(&df, fn, &filelist) // check anything within the directory state.a.CheckAllFilesWithPath(cbCheckOwnerPath, &filelist, fn) } return "" } // check that every file within a given directory is owned by the given UID and GID func cbCheckOwnerPath(fi *fsparser.FileInfo, fullpath string, data analyzer.AllFilesCallbackData) { var filelist *cbDataCheckOwnerPath = data.(*cbDataCheckOwnerPath) ppath := fullpath if len(fi.Name) > 0 { ppath = path.Join(ppath, fi.Name) } if fi.Uid != filelist.fop.Uid { filelist.a.AddOffender(ppath, fmt.Sprintf("FilePathOwner Uid not allowed, Uid = %d should be = %d", fi.Uid, filelist.fop.Uid)) } if fi.Gid != filelist.fop.Gid { filelist.a.AddOffender(ppath, fmt.Sprintf("FilePathOwner Gid not allowed, Gid = %d should be = %d", fi.Gid, filelist.fop.Gid)) } } ================================================ FILE: pkg/analyzer/filepathowner/filepathowner_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filepathowner import ( "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string) type testAnalyzer struct { ocb OffenderCallack testfile string } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte(""), nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return a.testfile, nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { a.ocb(filepath) } func (a *testAnalyzer) AddInformational(filepath string, reason string) {} func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func Test(t *testing.T) { a := &testAnalyzer{} cfg := ` [FilePathOwner."/bin"] Uid = 0 Gid = 0 ` g := New(cfg, a) g.Start() // uid/gid match triggered := false a.ocb = func(fp string) { triggered = true } fi := fsparser.FileInfo{Name: "test1", Uid: 0, Gid: 0} cbCheckOwnerPath(&fi, "/bin", &cbDataCheckOwnerPath{a, filePathOwner{0, 0}}) if triggered { t.Errorf("checkOwnerPath failed") } // gid does not match triggered = false a.ocb = func(fp string) { triggered = true } fi = fsparser.FileInfo{Name: "test1", Uid: 0, Gid: 1} cbCheckOwnerPath(&fi, "/bin", &cbDataCheckOwnerPath{a, filePathOwner{0, 0}}) if !triggered { t.Errorf("checkOwnerPath failed") } // do not call finalize() since we do not have a real source } ================================================ FILE: pkg/analyzer/filestatcheck/filestatcheck.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filestatcheck import ( "fmt" "strconv" "strings" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/capability" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type fileexistType struct { AllowEmpty bool Mode string Uid int Gid int SELinuxLabel string LinkTarget string Capabilities []string Desc string InformationalOnly bool } type fileExistListType struct { FileStatCheck map[string]fileexistType } type fileExistType struct { files fileExistListType a analyzer.AnalyzerType } func New(config string, a analyzer.AnalyzerType) *fileExistType { cfg := fileExistType{a: a} md, err := toml.Decode(config, &cfg.files) if err != nil { panic("can't read config data: " + err.Error()) } for fn, item := range cfg.files.FileStatCheck { if !md.IsDefined("FileStatCheck", fn, "Uid") { item.Uid = -1 cfg.files.FileStatCheck[fn] = item } if !md.IsDefined("FileStatCheck", fn, "Gid") { item.Gid = -1 cfg.files.FileStatCheck[fn] = item } } return &cfg } func (state *fileExistType) Start() {} func (state *fileExistType) CheckFile(fi *fsparser.FileInfo, filepath string) error { return nil } func (state *fileExistType) Name() string { return "FileStatCheck" } func (state *fileExistType) Finalize() string { for fn, item := range state.files.FileStatCheck { fi, err := state.a.GetFileInfo(fn) if err != nil { state.a.AddOffender(fn, "file does not exist") } else { checkMode := false var mode uint64 if item.Mode != "" { checkMode = true mode, _ = strconv.ParseUint(item.Mode, 8, 0) } if item.LinkTarget != "" { if !fi.IsLink() { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed LinkTarget set but file is not a link : %s", item.Desc)) } else if item.LinkTarget != fi.LinkTarget { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed LinkTarget does not match '%s' found '%s' : %s", item.LinkTarget, fi.LinkTarget, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed LinkTarget does not match '%s' found '%s' : %s", item.LinkTarget, fi.LinkTarget, item.Desc)) } } } // not allow empty with check if file size is zero if !item.AllowEmpty && fi.Size == 0 { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed: size: %d AllowEmpyt=false : %s", fi.Size, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed: size: %d AllowEmpyt=false : %s", fi.Size, item.Desc)) } } // not allow empty with check that file is not a Link if !item.AllowEmpty && fi.IsLink() { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed: AllowEmpyt=false but file is Link (check link target instead) : %s", item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed: AllowEmpyt=false but file is Link (check link target instead) : %s", item.Desc)) } } if checkMode && fi.Mode != mode { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed: mode found %o should be %s : %s", fi.Mode, item.Mode, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed: mode found %o should be %s : %s", fi.Mode, item.Mode, item.Desc)) } } if item.Gid >= 0 && fi.Gid != item.Gid { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed: group found %d should be %d : %s", fi.Gid, item.Gid, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed: group found %d should be %d : %s", fi.Gid, item.Gid, item.Desc)) } } if item.Uid >= 0 && fi.Uid != item.Uid { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed: owner found %d should be %d : %s", fi.Uid, item.Uid, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed: owner found %d should be %d : %s", fi.Uid, item.Uid, item.Desc)) } } if item.SELinuxLabel != "" && !strings.EqualFold(item.SELinuxLabel, fi.SELinuxLabel) { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("File State Check failed: selinux label found = %s should be = %s : %s", fi.SELinuxLabel, item.SELinuxLabel, item.Desc)) } else { state.a.AddOffender(fn, fmt.Sprintf("File State Check failed: selinux label found = %s should be = %s : %s", fi.SELinuxLabel, item.SELinuxLabel, item.Desc)) } } if len(item.Capabilities) > 0 { if !capability.CapsEqual(item.Capabilities, fi.Capabilities) { if item.InformationalOnly { state.a.AddInformational(fn, fmt.Sprintf("Capabilities found: %s expected: %s", fi.Capabilities, item.Capabilities)) } else { state.a.AddOffender(fn, fmt.Sprintf("Capabilities found: %s expected: %s", fi.Capabilities, item.Capabilities)) } } } } } return "" } ================================================ FILE: pkg/analyzer/filestatcheck/filestatcheck_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filestatcheck import ( "fmt" "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string) type testAnalyzer struct { ocb OffenderCallack fi fsparser.FileInfo err error } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return a.fi, a.err } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte{}, nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return "", nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { a.ocb(filepath) } func (a *testAnalyzer) AddInformational(filepath string, reason string) {} func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func TestGlobal(t *testing.T) { a := &testAnalyzer{} a.err = nil cfg := ` [FileStatCheck."/file1111"] AllowEmpty = false Uid = 1 Mode = "0755" Desc = "this need to be this way"` g := New(cfg, a) // ensure gid/uid are set to correct values for _, item := range g.files.FileStatCheck { if item.Gid != -1 { t.Errorf("Gid should default to -1, is %d", item.Gid) } if item.Uid != 1 { t.Errorf("Uid should be 1, is %d", item.Uid) } } g.Start() fi := fsparser.FileInfo{} if g.CheckFile(&fi, "/") != nil { t.Errorf("checkfile failed") } tests := []struct { fi fsparser.FileInfo err error shouldTrigger bool }{ {fsparser.FileInfo{Name: "file1111", Uid: 0, Gid: 0, Mode: 0755, Size: 1}, nil, true}, {fsparser.FileInfo{Name: "file1111", Uid: 1, Gid: 0, Mode: 0755, Size: 0}, nil, true}, {fsparser.FileInfo{Name: "file1111", Uid: 1, Gid: 1, Mode: 0755, Size: 1}, nil, false}, {fsparser.FileInfo{Name: "file1111", Uid: 1, Gid: 0, Mode: 0754, Size: 1}, nil, true}, { fsparser.FileInfo{Name: "filedoesnotexist", Uid: 0, Gid: 0, Mode: 0755, Size: 1}, fmt.Errorf("file does not exist"), true, }, } var triggered bool for _, test := range tests { triggered = false a.fi = test.fi a.err = test.err a.ocb = func(fn string) { triggered = true } g.Finalize() if triggered != test.shouldTrigger { t.Errorf("FileStatCheck failed") } } } func TestLink(t *testing.T) { a := &testAnalyzer{} a.err = nil cfg := ` [FileStatCheck."/filelink"] AllowEmpty = true LinkTarget = "hello" Uid = 1 Desc = "this need to be this way" ` g := New(cfg, a) // ensure gid/uid are set to correct values for _, item := range g.files.FileStatCheck { if item.Gid != -1 { t.Errorf("Gid should default to -1, is %d", item.Gid) } if item.Uid != 1 { t.Errorf("Uid should be 1, is %d", item.Uid) } } g.Start() fi := fsparser.FileInfo{} if g.CheckFile(&fi, "/") != nil { t.Errorf("checkfile failed") } tests := []struct { fi fsparser.FileInfo err error shouldTrigger bool }{ {fsparser.FileInfo{Name: "filelink", Uid: 1, Gid: 0, Mode: 0120000, LinkTarget: "hello", Size: 1}, nil, false}, {fsparser.FileInfo{Name: "filelink", Uid: 1, Gid: 0, Mode: 0120000, LinkTarget: "hello1", Size: 1}, nil, true}, {fsparser.FileInfo{Name: "filelink", Uid: 1, Gid: 0, Mode: 0755, Size: 1}, nil, true}, } var triggered bool for _, test := range tests { triggered = false a.fi = test.fi a.err = test.err a.ocb = func(fn string) { triggered = true } g.Finalize() if triggered != test.shouldTrigger { t.Errorf("FileStatCheck failed") } } } func TestLinkEmpty(t *testing.T) { a := &testAnalyzer{} a.err = nil cfg := ` [FileStatCheck."/filelink"] AllowEmpty = false LinkTarget = "hello" Uid = 1 Desc = "this need to be this way" ` g := New(cfg, a) // ensure gid/uid are set to correct values for _, item := range g.files.FileStatCheck { if item.Gid != -1 { t.Errorf("Gid should default to -1, is %d", item.Gid) } if item.Uid != 1 { t.Errorf("Uid should be 1, is %d", item.Uid) } } g.Start() fi := fsparser.FileInfo{} if g.CheckFile(&fi, "/") != nil { t.Errorf("checkfile failed") } tests := []struct { fi fsparser.FileInfo err error shouldTrigger bool }{ {fsparser.FileInfo{Name: "filelink", Uid: 1, Gid: 0, Mode: 0120000, LinkTarget: "hello", Size: 1}, nil, true}, } var triggered bool for _, test := range tests { triggered = false a.fi = test.fi a.err = test.err a.ocb = func(fn string) { triggered = true } g.Finalize() if triggered != test.shouldTrigger { t.Errorf("FileStatCheck failed") } } } ================================================ FILE: pkg/analyzer/filetree/filetree.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filetree import ( "bytes" "encoding/hex" "encoding/json" "fmt" "io/ioutil" "path" "strings" "github.com/BurntSushi/toml" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/capability" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" "github.com/cruise-automation/fwanalyzer/pkg/util" ) const ( newFileTreeExt string = ".new" ) type fileTreeConfig struct { OldTreeFilePath string CheckPath []string CheckPermsOwnerChange bool CheckFileSize bool CheckFileDigest bool SkipFileDigest bool } type fileTreeType struct { config fileTreeConfig a analyzer.AnalyzerType tree map[string]fileInfoSaveType oldTree map[string]fileInfoSaveType } type fileInfoSaveType struct { fsparser.FileInfo Digest string `json:"digest,omitempty"` } type imageInfoSaveType struct { ImageName string `json:"image_name"` ImageDigest string `json:"image_digest"` Files []fileInfoSaveType `json:"files"` } func New(config string, a analyzer.AnalyzerType, outputDirectory string) *fileTreeType { type ftcfg struct { FileTreeCheck fileTreeConfig } var conf ftcfg md, err := toml.Decode(config, &conf) if err != nil { panic("can't read config data: " + err.Error()) } // if CheckPath is undefined set CheckPath to root if !md.IsDefined("FileTreeCheck", "CheckPath") { conf.FileTreeCheck.CheckPath = []string{"/"} } for i := range conf.FileTreeCheck.CheckPath { conf.FileTreeCheck.CheckPath[i] = util.CleanPathDir(conf.FileTreeCheck.CheckPath[i]) } cfg := fileTreeType{config: conf.FileTreeCheck, a: a} // if an output directory is set concat the path of the old filetree if outputDirectory != "" && cfg.config.OldTreeFilePath != "" { cfg.config.OldTreeFilePath = path.Join(outputDirectory, cfg.config.OldTreeFilePath) } return &cfg } func inPath(checkPath string, cfgPath []string) bool { for _, p := range cfgPath { if strings.HasPrefix(checkPath, p) { return true } } return false } func (state *fileTreeType) Start() { state.tree = make(map[string]fileInfoSaveType) } func (state *fileTreeType) Name() string { return "FileTreeChecks" } func (tree *fileTreeType) readOldTree() error { data, err := ioutil.ReadFile(tree.config.OldTreeFilePath) if err != nil { return err } var oldTree imageInfoSaveType err = json.Unmarshal(data, &oldTree) if err != nil { return err } tree.oldTree = make(map[string]fileInfoSaveType) for _, fi := range oldTree.Files { tree.oldTree[fi.Name] = fi } return nil } func (tree *fileTreeType) saveTree() error { imageInfo := tree.a.ImageInfo() oldtree := imageInfoSaveType{ ImageName: imageInfo.ImageName, ImageDigest: imageInfo.ImageDigest, } for _, fi := range tree.tree { oldtree.Files = append(oldtree.Files, fi) } jdata, err := json.Marshal(oldtree) if err != nil { return err } // make json look pretty var prettyJson bytes.Buffer err = json.Indent(&prettyJson, jdata, "", "\t") if err != nil { return err } err = ioutil.WriteFile(tree.config.OldTreeFilePath+newFileTreeExt, prettyJson.Bytes(), 0644) if err != nil { return err } return nil } func (state *fileTreeType) CheckFile(fi *fsparser.FileInfo, filepath string) error { if state.config.OldTreeFilePath == "" { return nil } fn := path.Join(filepath, fi.Name) digest := "0" if fi.IsFile() && !state.config.SkipFileDigest { digestRaw, err := state.a.FileGetSha256(fn) if err != nil { return err } digest = hex.EncodeToString(digestRaw) } state.tree[fn] = fileInfoSaveType{ fsparser.FileInfo{ Name: fn, Size: fi.Size, Uid: fi.Uid, Gid: fi.Gid, Mode: fi.Mode, SELinuxLabel: fi.SELinuxLabel, Capabilities: fi.Capabilities, LinkTarget: fi.LinkTarget, }, digest, } return nil } func (state *fileTreeType) Finalize() string { if state.config.OldTreeFilePath == "" { return "" } var added []fileInfoSaveType var removed []fileInfoSaveType var changed []string _ = state.readOldTree() // find modified files for filepath, fi := range state.oldTree { // skip files if not in configured path if !inPath(filepath, state.config.CheckPath) { continue } _, ok := state.tree[filepath] if !ok { removed = append(removed, fi) } else { oFi := fi cFi := state.tree[filepath] if oFi.Mode != cFi.Mode || oFi.Uid != cFi.Uid || oFi.Gid != cFi.Gid || oFi.LinkTarget != cFi.LinkTarget || oFi.SELinuxLabel != cFi.SELinuxLabel || !capability.CapsEqual(oFi.Capabilities, cFi.Capabilities) || ((oFi.Size != cFi.Size) && state.config.CheckFileSize) || ((oFi.Digest != cFi.Digest) && state.config.CheckFileDigest) { changed = append(changed, filepath) } } } // find new files for filepath, fi := range state.tree { // skip files if not in configured path if !inPath(filepath, state.config.CheckPath) { continue } _, ok := state.oldTree[filepath] if !ok { added = append(added, fi) } } treeUpdated := false if len(added) > 0 || len(removed) > 0 || (len(changed) > 0 && state.config.CheckPermsOwnerChange) { err := state.saveTree() if err != nil { panic("saveTree failed") } treeUpdated = true } for _, fi := range added { fileInfoStr := fiToString(fi, true) //a.config.GlobalConfig.FsTypeOptions == "selinux") state.a.AddInformational(fi.Name, fmt.Sprintf("CheckFileTree: new file: %s", fileInfoStr)) } for _, fi := range removed { fileInfoStr := fiToString(fi, true) //a.config.GlobalConfig.FsTypeOptions == "selinux") state.a.AddInformational(fi.Name, fmt.Sprintf("CheckFileTree: file removed: %s", fileInfoStr)) } if state.config.CheckPermsOwnerChange { for _, filepath := range changed { fileInfoStrOld := fiToString(state.oldTree[filepath], true) //state.config..FsTypeOptions == "selinux") fileInfoStrCur := fiToString(state.tree[filepath], true) //a.config.GlobalConfig.FsTypeOptions == "selinux") state.a.AddInformational(state.tree[filepath].Name, fmt.Sprintf("CheckFileTree: file perms/owner/size/digest changed from: %s to: %s", fileInfoStrOld, fileInfoStrCur)) } } if state.config.OldTreeFilePath != "" { type reportData struct { OldFileTreePath string `json:"old_file_tree_path"` CurrentFileTreePath string `json:"current_file_tree_path,omitempty"` } newPath := "" if treeUpdated { newPath = state.config.OldTreeFilePath + newFileTreeExt } data := reportData{state.config.OldTreeFilePath, newPath} jdata, _ := json.Marshal(&data) return string(jdata) } return "" } // provide fileinfo as a human readable string func fiToString(fi fileInfoSaveType, selinux bool) string { if selinux { return fmt.Sprintf("%o %d:%d %d %s SELinux label: %s", fi.Mode, fi.Uid, fi.Gid, fi.Size, fi.Digest, fi.SELinuxLabel) } else { return fmt.Sprintf("%o %d:%d %d %s", fi.Mode, fi.Uid, fi.Gid, fi.Size, fi.Digest) } } ================================================ FILE: pkg/analyzer/filetree/filetree_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package filetree import ( "os" "strings" "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string, reason string) type testAnalyzer struct { ocb OffenderCallack testfile string } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte(""), nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return a.testfile, nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { } func (a *testAnalyzer) AddInformational(filepath string, reason string) { a.ocb(filepath, reason) } func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func TestGlobal(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileTreeCheck] OldTreeFilePath = "/tmp/blatreetest1337.json" CheckPath = ["/"] CheckPermsOwnerChange = true CheckFileSize = true CheckFileDigest = false ` g := New(cfg, a, "") g.Start() triggered := false a.ocb = func(fn string, reason string) { if strings.HasPrefix(reason, "CheckFileTree: new file:") { triggered = true } } fi := fsparser.FileInfo{Name: "test1"} err := g.CheckFile(&fi, "/") if err != nil { t.Errorf("CheckFile failed") } result := g.Finalize() if !triggered { t.Errorf("filetree check failed") } if result == "" { t.Errorf("Finalize should not return empty string") } // rename so we have input for the next test err = os.Rename("/tmp/blatreetest1337.json.new", "/tmp/blatreetest1337.json") if err != nil { t.Errorf("rename %s %s: failed", "/tmp/blatreetest1337.json.new", "/tmp/blatreetest1337.json") } // diff test g = New(cfg, a, "") g.Start() triggered = false a.ocb = func(fn string, reason string) { if strings.HasPrefix(reason, "CheckFileTree: file perms/owner/size/digest changed") { triggered = true } } fi = fsparser.FileInfo{Name: "test1", Uid: 1} err = g.CheckFile(&fi, "/") if err != nil { t.Errorf("CheckFile failed") } g.Finalize() if !triggered { t.Errorf("filetree check failed") } // delete test g = New(cfg, a, "") g.Start() triggered = false a.ocb = func(fn string, reason string) { if fn == "/test1" && strings.HasPrefix(reason, "CheckFileTree: file removed") { triggered = true } } g.Finalize() if !triggered { t.Errorf("filetree check failed") } os.Remove("/tmp/blatreetest1337.json") os.Remove("/tmp/blatreetest1337.json.new") os.Remove("/tmp/blatreetest1337.json.new.new") } func TestGlobalCheckPath1(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileTreeCheck] OldTreeFilePath = "/tmp/blatreetest1337.json" CheckPath = [] CheckPermsOwnerChange = true CheckFileSize = true CheckFileDigest = false ` g := New(cfg, a, "") if len(g.config.CheckPath) != 0 { t.Error("CheckPath should ne empty") } } func TestGlobalCheckPath2(t *testing.T) { a := &testAnalyzer{} cfg := ` [FileTreeCheck] OldTreeFilePath = "/tmp/blatreetest1337.json" CheckPermsOwnerChange = true CheckFileSize = true CheckFileDigest = false ` g := New(cfg, a, "") if len(g.config.CheckPath) != 1 && g.config.CheckPath[0] != "/" { t.Error("CheckPath should be: /") } } ================================================ FILE: pkg/analyzer/globalfilechecks/globalfilechecks.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package globalfilechecks import ( "fmt" "path" "github.com/BurntSushi/toml" "github.com/bmatcuk/doublestar" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type filePermsConfigType struct { Suid bool SuidAllowedList map[string]bool WorldWrite bool SELinuxLabel bool Uids map[int]bool Gids map[int]bool BadFiles map[string]bool BadFilesInformationalOnly bool FlagCapabilityInformationalOnly bool } type filePermsType struct { config *filePermsConfigType a analyzer.AnalyzerType } func New(config string, a analyzer.AnalyzerType) *filePermsType { type filePermsConfig struct { Suid bool SuidWhiteList []string // keep for backward compatibility SuidAllowedList []string WorldWrite bool SELinuxLabel bool Uids []int Gids []int BadFiles []string BadFilesInformationalOnly bool FlagCapabilityInformationalOnly bool } type fpc struct { GlobalFileChecks filePermsConfig } var conf fpc _, err := toml.Decode(config, &conf) if err != nil { panic("can't read config data: " + err.Error()) } configuration := filePermsConfigType{ Suid: conf.GlobalFileChecks.Suid, WorldWrite: conf.GlobalFileChecks.WorldWrite, SELinuxLabel: conf.GlobalFileChecks.SELinuxLabel, BadFilesInformationalOnly: conf.GlobalFileChecks.BadFilesInformationalOnly, FlagCapabilityInformationalOnly: conf.GlobalFileChecks.FlagCapabilityInformationalOnly, } configuration.SuidAllowedList = make(map[string]bool) for _, alfn := range conf.GlobalFileChecks.SuidAllowedList { configuration.SuidAllowedList[path.Clean(alfn)] = true } // keep for backward compatibility for _, wlfn := range conf.GlobalFileChecks.SuidWhiteList { configuration.SuidAllowedList[path.Clean(wlfn)] = true } configuration.Uids = make(map[int]bool) for _, uid := range conf.GlobalFileChecks.Uids { configuration.Uids[uid] = true } configuration.Gids = make(map[int]bool) for _, gid := range conf.GlobalFileChecks.Gids { configuration.Gids[gid] = true } configuration.BadFiles = make(map[string]bool) for _, bf := range conf.GlobalFileChecks.BadFiles { configuration.BadFiles[path.Clean(bf)] = true } cfg := filePermsType{&configuration, a} return &cfg } func (state *filePermsType) Start() {} func (state *filePermsType) Finalize() string { return "" } func (state *filePermsType) Name() string { return "GlobalFileChecks" } func (state *filePermsType) CheckFile(fi *fsparser.FileInfo, fpath string) error { if state.config.Suid { if fi.IsSUid() || fi.IsSGid() { if _, ok := state.config.SuidAllowedList[path.Join(fpath, fi.Name)]; !ok { state.a.AddOffender(path.Join(fpath, fi.Name), "File is SUID, not allowed") } } } if state.config.WorldWrite { if fi.IsWorldWrite() && !fi.IsLink() && !fi.IsDir() { state.a.AddOffender(path.Join(fpath, fi.Name), "File is WorldWriteable, not allowed") } } if state.config.SELinuxLabel { if fi.SELinuxLabel == fsparser.SELinuxNoLabel { state.a.AddOffender(path.Join(fpath, fi.Name), "File does not have SELinux label") } } if len(state.config.Uids) > 0 { if _, ok := state.config.Uids[fi.Uid]; !ok { state.a.AddOffender(path.Join(fpath, fi.Name), fmt.Sprintf("File Uid not allowed, Uid = %d", fi.Uid)) } } if len(state.config.Gids) > 0 { if _, ok := state.config.Gids[fi.Gid]; !ok { state.a.AddOffender(path.Join(fpath, fi.Name), fmt.Sprintf("File Gid not allowed, Gid = %d", fi.Gid)) } } if state.config.FlagCapabilityInformationalOnly { if len(fi.Capabilities) > 0 { state.a.AddInformational(path.Join(fpath, fi.Name), fmt.Sprintf("Capabilities found: %s", fi.Capabilities)) } } for item := range state.config.BadFiles { fullpath := fi.Name // match the fullpath if it starts with "/" if item[0] == '/' { fullpath = path.Join(fpath, fi.Name) } m, err := doublestar.Match(item, fullpath) if err != nil { return err } if m { msg := "File not allowed" if item != fullpath { msg = fmt.Sprintf("File not allowed for pattern: %s", item) } if state.config.BadFilesInformationalOnly { state.a.AddInformational(path.Join(fpath, fi.Name), msg) } else { state.a.AddOffender(path.Join(fpath, fi.Name), msg) } } } return nil } ================================================ FILE: pkg/analyzer/globalfilechecks/globalfilechecks_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package globalfilechecks import ( "testing" "github.com/cruise-automation/fwanalyzer/pkg/analyzer" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type OffenderCallack func(fn string) type testAnalyzer struct { ocb OffenderCallack } func (a *testAnalyzer) AddData(key, value string) {} func (a *testAnalyzer) GetFileInfo(filepath string) (fsparser.FileInfo, error) { return fsparser.FileInfo{}, nil } func (a *testAnalyzer) RemoveFile(filepath string) error { return nil } func (a *testAnalyzer) FileGetSha256(filepath string) ([]byte, error) { return []byte{}, nil } func (a *testAnalyzer) FileGet(filepath string) (string, error) { return "", nil } func (a *testAnalyzer) AddOffender(filepath string, reason string) { a.ocb(filepath) } func (a *testAnalyzer) AddInformational(filepath string, reason string) { a.ocb(filepath) } func (a *testAnalyzer) CheckAllFilesWithPath(cb analyzer.AllFilesCallback, cbdata analyzer.AllFilesCallbackData, filepath string) { } func (a *testAnalyzer) ImageInfo() analyzer.AnalyzerReport { return analyzer.AnalyzerReport{} } func TestGlobal(t *testing.T) { a := &testAnalyzer{} cfg := ` [GlobalFileChecks] Suid = true SuidAllowedList = ["/shouldbesuid"] SeLinuxLabel = true WorldWrite = true Uids = [0] Gids = [0] BadFiles = ["/file99", "/file1", "**.h"] FlagCapabilityInformationalOnly = true ` g := New(cfg, a) g.Start() tests := []struct { fi fsparser.FileInfo path string shouldTrigger bool }{ {fsparser.FileInfo{Name: "suid", Mode: 0004000}, "/", true}, {fsparser.FileInfo{Name: "sgid", Mode: 0002000}, "/", true}, {fsparser.FileInfo{Name: "sgid", Mode: 0000000}, "/", false}, // allowed suid files {fsparser.FileInfo{Name: "shouldbesuid", Mode: 0004000}, "/", false}, // World write {fsparser.FileInfo{Name: "ww", Mode: 0007}, "/", true}, {fsparser.FileInfo{Name: "ww", Mode: 0004}, "/", false}, {fsparser.FileInfo{Name: "label", SELinuxLabel: "-"}, "/", true}, {fsparser.FileInfo{Name: "label", SELinuxLabel: "label"}, "/", false}, {fsparser.FileInfo{Name: "uidfile", SELinuxLabel: "uidfile", Uid: 1, Gid: 1}, "/", true}, // Bad files {fsparser.FileInfo{Name: "file99", SELinuxLabel: "uidfile"}, "/", true}, {fsparser.FileInfo{Name: "test.h", SELinuxLabel: "uidfile"}, "/usr/", true}, // Capability {fsparser.FileInfo{Name: "ping", Capabilities: []string{"cap_net_admin+p"}}, "/usr/bin", true}, } var triggered bool var err error for _, test := range tests { triggered = false a.ocb = func(fn string) { triggered = true } err = g.CheckFile(&test.fi, test.path) if err != nil { t.Errorf("CheckFile failed") } if triggered != test.shouldTrigger { t.Errorf("%s test failed", test.fi.Name) } } g.Finalize() } ================================================ FILE: pkg/capability/capability.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package capability import ( "bytes" "encoding/binary" "fmt" "strconv" "strings" ) /* * Consts and structs are based on the linux kernel headers for capabilities * see: https://github.com/torvalds/linux/blob/master/include/uapi/linux/capability.h */ const ( CAP_CHOWN = 0 CAP_DAC_OVERRIDE = 1 CAP_DAC_READ_SEARCH = 2 CAP_FOWNER = 3 CAP_FSETID = 4 CAP_KILL = 5 CAP_SETGID = 6 CAP_SETUID = 7 CAP_SETPCAP = 8 CAP_LINUX_IMMUTABLE = 9 CAP_NET_BIND_SERVICE = 10 CAP_NET_BROADCAST = 11 CAP_NET_ADMIN = 12 CAP_NET_RAW = 13 CAP_IPC_LOCK = 14 CAP_IPC_OWNER = 15 CAP_SYS_MODULE = 16 CAP_SYS_RAWIO = 17 CAP_SYS_CHROOT = 18 CAP_SYS_PTRACE = 19 CAP_SYS_PACCT = 20 CAP_SYS_ADMIN = 21 CAP_SYS_BOOT = 22 CAP_SYS_NICE = 23 CAP_SYS_RESOURCE = 24 CAP_SYS_TIME = 25 CAP_SYS_TTY_CONFIG = 26 CAP_MKNOD = 27 CAP_LEASE = 28 CAP_AUDIT_WRITE = 29 CAP_AUDIT_CONTROL = 30 CAP_SETFCAP = 31 CAP_MAC_OVERRIDE = 32 CAP_MAC_ADMIN = 33 CAP_SYSLOG = 34 CAP_WAKE_ALARM = 35 CAP_BLOCK_SUSPEND = 36 CAP_AUDIT_READ = 37 CAP_LAST_CAP = CAP_AUDIT_READ ) var CapabilityNames = []string{ "CAP_CHOWN", "CAP_DAC_OVERRIDE", "CAP_DAC_READ_SEARCH", "CAP_FOWNER", "CAP_FSETID", "CAP_KILL", "CAP_SETGID", "CAP_SETUID", "CAP_SETPCAP", "CAP_LINUX_IMMUTABLE", "CAP_NET_BIND_SERVICE", "CAP_NET_BROADCAST", "CAP_NET_ADMIN", "CAP_NET_RAW", "CAP_IPC_LOCK", "CAP_IPC_OWNER", "CAP_SYS_MODULE", "CAP_SYS_RAWIO", "CAP_SYS_CHROOT", "CAP_SYS_PTRACE", "CAP_SYS_PACCT", "CAP_SYS_ADMIN", "CAP_SYS_BOOT", "CAP_SYS_NICE", "CAP_SYS_RESOURCE", "CAP_SYS_TIME", "CAP_SYS_TTY_CONFIG", "CAP_MKNOD", "CAP_LEASE", "CAP_AUDIT_WRITE", "CAP_AUDIT_CONTROL", "CAP_SETFCAP", "CAP_MAC_OVERRIDE", "CAP_MAC_ADMIN", "CAP_SYSLOG", "CAP_WAKE_ALARM", "CAP_BLOCK_SUSPEND", "CAP_AUDIT_READ"} const capOffset = 2 const CapByteSizeMax = 24 const ( CAP_PERMITTED = 0 CAP_INHERITABLE = 1 ) /* * capabilities are store in the vfs_cap_data struct * struct vfs_cap_data { __le32 magic_etc; // Little endian struct { __le32 permitted; // Little endian __le32 inheritable; // Little endian } data[VFS_CAP_U32]; }; */ // https://github.com/torvalds/linux/blob/master/include/uapi/linux/capability.h#L373 func capValid(cap uint32) bool { // cap >= 0 && cap <= CAP_LAST_CAP return cap <= CAP_LAST_CAP } // https://github.com/torvalds/linux/blob/master/include/uapi/linux/capability.h#L379 func capIndex(cap uint32) int { return int(cap>>5) * capOffset } // https://github.com/torvalds/linux/blob/master/include/uapi/linux/capability.h#L380 func capMask(cap uint32) uint32 { return (1 << ((cap) & 31)) } func capHasCap(caps []uint32, cap uint32, capPerm int) bool { return caps[capIndex(cap)+capPerm]&capMask(cap) == capMask(cap) } // perm = 0 -> permitted // perm = 1 -> inheritable func capSet(caps []uint32, cap uint32, capPerm int) ([]uint32, error) { if !capValid(cap) { return nil, fmt.Errorf("capability is invalid") } caps[capIndex(cap)+capPerm] = caps[capIndex(cap)+capPerm] | capMask(cap) return caps, nil } func capToText(cap []uint32) []string { out := []string{} for i := range CapabilityNames { capPermitted := capHasCap(cap, uint32(i), CAP_PERMITTED) capInheritable := capHasCap(cap, uint32(i), CAP_INHERITABLE) if capPermitted || capInheritable { var capStr strings.Builder capStr.WriteString(strings.ToLower(CapabilityNames[i])) capStr.WriteString("+") if capPermitted { capStr.WriteString("p") } if capInheritable { capStr.WriteString("i") } out = append(out, capStr.String()) } } return out } func New(caps interface{}) ([]string, error) { cap := []string{} var capabilities []uint32 var err error switch capsVal := caps.(type) { case []byte: capabilities, err = capsParse(capsVal, 20) case string: capabilities, err = capsParseFromText(capsVal) default: return cap, nil } if err != nil { return cap, nil } return capToText(capabilities), nil } func capsParse(caps []byte, capsLen uint32) ([]uint32, error) { if capsLen%4 != 0 { return nil, fmt.Errorf("capability length bad") } // capabilities are stored in uint32 realCap := make([]uint32, capsLen/4) for i := 0; i < int(capsLen)/4; i++ { buf := bytes.NewBuffer(caps[i*4 : (i+1)*4]) var num uint32 err := binary.Read(buf, binary.LittleEndian, &num) if err != nil { return nil, err } realCap[i] = uint32(num) } // strip magic (first uint32 in the array) return realCap[1:], nil } // parse caps from string: 0x2000001,0x1000,0x0,0x0,0x0 // this is the format produced by e2tools and unsquashfs func capsParseFromText(capsText string) ([]uint32, error) { capsInts := strings.Split(capsText, ",") capsParsedInts := make([]uint32, 5) for i, val := range capsInts { intVal, err := strconv.ParseUint(val[2:], 16, 32) if err != nil { return nil, err } capsParsedInts[i] = uint32(intVal) } capsBytes := make([]byte, 20) for i := range capsParsedInts { binary.LittleEndian.PutUint32(capsBytes[(i)*4:], capsParsedInts[i]) } return capsParse(capsBytes, 20) } func CapsEqual(a, b []string) bool { if len(a) != len(b) { return false } aM := make(map[string]bool) for _, cap := range a { aM[cap] = true } bM := make(map[string]bool) for _, cap := range b { bM[cap] = true } for cap := range aM { if _, ok := bM[cap]; !ok { return false } } return true } ================================================ FILE: pkg/capability/capability_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package capability import ( "strings" "testing" ) func TestCap(t *testing.T) { if !strings.EqualFold("cap_net_admin+p", capToText([]uint32{0x1000, 0x0, 0x0, 0x0})[0]) { t.Error("bad cap") } cap := []uint32{0, 0, 0, 0} cap, _ = capSet(cap, CAP_DAC_OVERRIDE, CAP_PERMITTED) cap, _ = capSet(cap, CAP_AUDIT_READ, CAP_INHERITABLE) if !capHasCap(cap, CAP_DAC_OVERRIDE, CAP_PERMITTED) { t.Error("bad cap") } if !capHasCap(cap, CAP_AUDIT_READ, CAP_INHERITABLE) { t.Error("bad cap") } } func TestCapsParse(t *testing.T) { caps, err := capsParse([]byte{0, 0, 0, 0, 0, 0x10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, 20) if err != nil { t.Error(err) } if !capHasCap(caps, CAP_NET_ADMIN, CAP_PERMITTED) { t.Error("bad cap") } } func TestCapsStringParse(t *testing.T) { caps, err := capsParseFromText("0x2000001,0x1000,0x0,0x0,0x0") if err != nil { t.Error(err) } if !capHasCap(caps, CAP_NET_ADMIN, CAP_PERMITTED) { t.Error("bad cap") } } func TestCapMain(t *testing.T) { caps, err := New("0x2000001,0x1000,0x0,0x0,0x0") if err != nil { t.Error(err) } if !strings.EqualFold(caps[0], "cap_net_admin+p") { t.Error("bad cap") } caps2, err := New([]byte{0, 0, 0, 0, 0, 0x10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) if err != nil { t.Error(err) } if !strings.EqualFold(caps2[0], "cap_net_admin+p") { t.Error("bad cap") } } ================================================ FILE: pkg/cpioparser/cpioparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cpioparser import ( "errors" "fmt" "os" "os/exec" "path" "regexp" "strconv" "strings" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) const ( cpioCmd = "cpio" cpCmd = "cp" MIN_LINE_LENGTH = 25 ) type CpioParser struct { fileInfoReg *regexp.Regexp devInfoReg *regexp.Regexp fileLinkReg *regexp.Regexp imagepath string files map[string][]fsparser.FileInfo fixDirs bool } func New(imagepath string, fixDirs bool) *CpioParser { parser := &CpioParser{ //lrwxrwxrwx 1 0 0 19 Apr 24 2019 lib/libnss_dns.so.2 -> libnss_dns-2.18.so //-rwxrwxrwx 1 0 0 19 Apr 24 13:37 lib/lib.c fileInfoReg: regexp.MustCompile( `^([\w-]+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\w+\s+\d+\s+[\d:]+)\s+(.*)$`), // crw-r--r-- 1 0 0 4, 64 Apr 24 2019 dev/ttyS0 devInfoReg: regexp.MustCompile( `^([\w-]+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+),\s+(\d+)\s+(\w+\s+\d+\s+[\d:]+)\s+(.*)$`), fileLinkReg: regexp.MustCompile(`(\S+)\s->\s(\S+)`), imagepath: imagepath, fixDirs: fixDirs, } return parser } func (p *CpioParser) ImageName() string { return p.imagepath } var modeFlags = []struct { pos int chr byte val uint64 }{ {0, '-', fsparser.S_IFREG}, {0, 's', fsparser.S_IFSOCK}, {0, 'l', fsparser.S_IFLNK}, {0, 'b', fsparser.S_IFBLK}, {0, 'd', fsparser.S_IFDIR}, {0, 'c', fsparser.S_IFCHR}, {0, 'p', fsparser.S_IFIFO}, {1, 'r', fsparser.S_IRUSR}, {2, 'w', fsparser.S_IWUSR}, {3, 'x', fsparser.S_IXUSR}, {3, 's', fsparser.S_IXUSR | fsparser.S_ISUID}, {3, 'S', fsparser.S_ISUID}, {4, 'r', fsparser.S_IRGRP}, {5, 'w', fsparser.S_IWGRP}, {6, 'x', fsparser.S_IXGRP}, {6, 's', fsparser.S_IXGRP | fsparser.S_ISGID}, {6, 'S', fsparser.S_ISGID}, {7, 'r', fsparser.S_IROTH}, {8, 'w', fsparser.S_IWOTH}, {9, 'x', fsparser.S_IXOTH}, {9, 't', fsparser.S_IXOTH | fsparser.S_ISVTX}, {9, 'T', fsparser.S_ISVTX}, } const ( FILE_MODE_STR_LEN = 10 // such as "-rw-r--r--" ) func parseMode(mode string) (uint64, error) { var m uint64 if len(mode) != FILE_MODE_STR_LEN { return 0, fmt.Errorf("parseMode: invalid mode string %s", mode) } for _, f := range modeFlags { if mode[f.pos] == f.chr { m |= f.val } } return m, nil } // Ensure directory and file names are consistent, with no relative parts // or trailing slash on directory names. func normalizePath(filepath string) (dir string, name string) { dir, name = path.Split(path.Clean(filepath)) dir = path.Clean(dir) return } const ( NAME_IDX_NORMAL_FILE = 7 NAME_IDX_DEVICE_FILE = 8 ) func (p *CpioParser) parseFileLine(line string) (string, fsparser.FileInfo, error) { reg := p.fileInfoReg nameIdx := NAME_IDX_NORMAL_FILE dirpath := "" if strings.HasPrefix(line, "b") || strings.HasPrefix(line, "c") { reg = p.devInfoReg nameIdx = NAME_IDX_DEVICE_FILE } res := reg.FindAllStringSubmatch(line, -1) var fi fsparser.FileInfo // only normal files have a size if nameIdx == NAME_IDX_NORMAL_FILE { size, _ := strconv.Atoi(res[0][5]) fi.Size = int64(size) } fi.Mode, _ = parseMode(res[0][1]) fi.Uid, _ = strconv.Atoi(res[0][3]) fi.Gid, _ = strconv.Atoi(res[0][4]) // cpio returns relative pathnames so add leading "/" fi.Name = "/" + res[0][nameIdx] // fill in linktarget if fi.IsLink() && strings.Contains(fi.Name, "->") { rlnk := p.fileLinkReg.FindAllStringSubmatch(fi.Name, -1) if rlnk == nil { return "", fsparser.FileInfo{}, fmt.Errorf("can't parse LinkTarget from %s", fi.Name) } fi.Name = rlnk[0][1] fi.LinkTarget = rlnk[0][2] } // handle root directory if fi.Name == "/." { dirpath = "." fi.Name = "." } else { dirpath, fi.Name = normalizePath(fi.Name) } return dirpath, fi, nil } // GetDirInfo returns information on the specified directory. func (p *CpioParser) GetDirInfo(dirpath string) ([]fsparser.FileInfo, error) { if err := p.loadFileList(); err != nil { return nil, err } return p.files[path.Clean(dirpath)], nil } // GetFileInfo returns information on the specified file. func (p *CpioParser) GetFileInfo(filepath string) (fsparser.FileInfo, error) { if err := p.loadFileList(); err != nil { return fsparser.FileInfo{}, err } dirpath, name := normalizePath(filepath) // the root is stored as "." if dirpath == "/" && name == "" { dirpath = "." name = "." } dir := p.files[dirpath] for _, fi := range dir { if fi.Name == name { return fi, nil } } return fsparser.FileInfo{}, fmt.Errorf("Can't find file %s", filepath) } func (p *CpioParser) loadFileList() error { if p.files != nil { return nil } out, err := exec.Command("sh", "-c", cpioCmd+" -tvn --quiet < "+p.imagepath).CombinedOutput() if err != nil { if err.Error() != errors.New("exit status 2").Error() { fmt.Fprintf(os.Stderr, "getDirList: >%s<", err) return err } } return p.loadFileListFromString(string(out)) } func (p *CpioParser) loadFileListFromString(rawFileList string) error { p.files = make(map[string][]fsparser.FileInfo) lines := strings.Split(rawFileList, "\n") for _, line := range lines { if len(line) < MIN_LINE_LENGTH { continue } if strings.HasPrefix(line, "cpio") { continue } path, fi, err := p.parseFileLine(line) if err == nil { dirfiles := p.files[path] dirfiles = append(dirfiles, fi) p.files[path] = dirfiles if p.fixDirs { p.fixDir(path, fi.Name) } } } return nil } /* * With cpio it is possible that a file exists in a directory that does not have its own entry. * e.g. "dev/tty6" exists in the cpio but there is no entry for "dev" * This function creates the missing directories in the internal structure. */ func (p *CpioParser) fixDir(dir string, name string) { if dir == "/" { return } basename := path.Base(dir) dirname := path.Dir(dir) // check that all dirname parts exist if strings.Contains(dirname, "/") { p.fixDir(dirname, basename) } dirExists := false for _, f := range p.files[dirname] { if f.Name == basename { dirExists = true } } if !dirExists { dirfiles := p.files[dirname] dirfiles = append(dirfiles, fsparser.FileInfo{Name: basename, Mode: 040755, Uid: 0, Gid: 0, Size: 0}) p.files[dirname] = dirfiles } } // CopyFile copies the specified file to the specified destination. func (p *CpioParser) CopyFile(filepath string, dstdir string) bool { out, err := exec.Command("sh", "-c", cpioCmd+" -i --to-stdout "+filepath[1:]+" < "+p.imagepath+" > "+dstdir).CombinedOutput() if err != nil { if err.Error() != errors.New("exit status 2").Error() { fmt.Fprintf(os.Stderr, "cpio failed: %v: %s\n", err, out) return false } } return true } func (p *CpioParser) Supported() bool { if _, err := exec.LookPath(cpioCmd); err != nil { return false } if _, err := exec.LookPath(cpCmd); err != nil { return false } return true } ================================================ FILE: pkg/cpioparser/cpioparser_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cpioparser import ( "os" "testing" ) type testData struct { Line string Mode uint64 Dir string Name string IsFile bool LinkTarget string } func TestParseLine(t *testing.T) { testImage := "../../test/test.cpio" p := New(testImage, true) testdata := []testData{ {`-rw-r--r-- 1 0 0 21 Apr 11 2008 etc/motd`, 0100644, "/etc/", "motd", true, ""}, {`-rw-r--r-- 1 0 0 21 Apr 11 13:37 etc/mxtd`, 0100644, "/etc/", "mxtd", true, ""}, {`crw-r--r-- 1 0 0 4, 64 Apr 24 2019 dev/ttyS0`, 020644, "/dev", "ttyS0", false, ""}, {`lrwxrwxrwx 1 0 0 19 Apr 24 2019 lib/libcrypto.so.1.0.0 -> libcrypto-1.0.0.so`, 0120777, "/lib", "libcrypto.so.1.0.0", false, "libcrypto-1.0.0.so"}, {`drwxr-xr-x 2 0 0 0 Aug 8 18:53 .`, 040755, ".", ".", false, ""}, } for _, test := range testdata { dir, res, err := p.parseFileLine(test.Line) if err != nil { t.Error(err) } if res.Mode != test.Mode { t.Errorf("bad file mode: %o", res.Mode) } if dir != test.Dir && res.Name != test.Name { t.Errorf("name error: %s %s", dir, res.Name) } if res.IsFile() != test.IsFile { t.Error("isFile bad") } if test.LinkTarget != res.LinkTarget { t.Errorf("bad link target: %s", res.LinkTarget) } } } func TestFixDir(t *testing.T) { testImage := "../../test/test.cpio" p := New(testImage, true) testdata := ` crw-r--r-- 1 0 0 3, 1 Jan 13 17:57 dev/ttyp1 crw-r--r-- 1 0 0 3, 1 Jan 13 17:57 dev/x/ttyp1` err := p.loadFileListFromString(testdata) if err != nil { t.Error(err) } ok := false for _, fn := range p.files["/"] { if fn.Name == "dev" { ok = true } } if !ok { t.Errorf("dir '/dev' not found") } ok = false for _, fn := range p.files["/dev"] { if fn.Name == "x" { ok = true } } if !ok { t.Errorf("dir '/dev/x' not found") } } func TestFull(t *testing.T) { testImage := "../../test/test.cpio" p := New(testImage, false) fi, err := p.GetFileInfo("/") if err != nil { t.Error(err) } if !fi.IsDir() { t.Errorf("/ should be dir") } dir, err := p.GetDirInfo("/") if err != nil { t.Error(err) } if len(dir) < 1 { t.Errorf("/ should not be empty") } fi, err = p.GetFileInfo("/etc/fstab") if err != nil { t.Error(err) } if !fi.IsFile() { t.Error("should be a file") } if fi.Name != "fstab" { t.Errorf("name bad: %s", fi.Name) } if fi.IsDir() { t.Error("should be a file") } if fi.Size != 385 { t.Error("bad size") } if fi.Uid != 1000 || fi.Gid != 1000 { t.Error("bad owner/group") } fi, err = p.GetFileInfo("/dev/tty6") if err != nil { t.Error(err) } if fi.IsFile() { t.Error("should not be a file") } if fi.Name != "tty6" { t.Errorf("name bad: %s", fi.Name) } if fi.IsDir() { t.Error("should not be a dir") } if fi.Size != 0 { t.Error("bad size") } if fi.Uid != 0 || fi.Gid != 0 { t.Error("bad owner/group") } testfilename := "testfile123" if !p.CopyFile("/etc/fstab", testfilename) { t.Error("failed to copy fstab") } stat, err := os.Stat(testfilename) if err != nil { t.Error(err) } if stat.Size() != 385 { t.Error("bad file size after copy out") } os.Remove(testfilename) } ================================================ FILE: pkg/dirparser/dirparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dirparser import ( "fmt" "os" "os/exec" "path" "path/filepath" "syscall" "github.com/cruise-automation/fwanalyzer/pkg/capability" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) const ( cpCli string = "cp" ) type DirParser struct { imagepath string } func New(imagepath string) *DirParser { var global DirParser global.imagepath = imagepath return &global } func (dir *DirParser) GetDirInfo(dirpath string) ([]fsparser.FileInfo, error) { files := make([]fsparser.FileInfo, 0) filepath := path.Join(dir.imagepath, dirpath) fp, err := os.Open(filepath) if err != nil { return files, err } defer fp.Close() names, err := fp.Readdirnames(0) if err != nil { return files, err } for _, fname := range names { fi, err := dir.GetFileInfo(path.Join(dirpath, fname)) fi.Name = fname if err != nil { return files, err } files = append(files, fi) } return files, nil } func (dir *DirParser) GetFileInfo(dirpath string) (fsparser.FileInfo, error) { var fi fsparser.FileInfo fpath := path.Join(dir.imagepath, dirpath) var fileStat syscall.Stat_t err := syscall.Lstat(fpath, &fileStat) if err != nil { return fi, err } fi.Name = filepath.Base(dirpath) fi.Mode = uint64(fileStat.Mode) fi.Uid = int(fileStat.Uid) fi.Gid = int(fileStat.Gid) fi.SELinuxLabel = fsparser.SELinuxNoLabel fi.Size = fileStat.Size capsBytes := make([]byte, capability.CapByteSizeMax) capsSize, _ := syscall.Getxattr(fpath, "security.capability", capsBytes) // ignore err since we only care about the returned size if capsSize > 0 { fi.Capabilities, err = capability.New(capsBytes) if err != nil { fmt.Println(err) } } if fi.IsLink() { fi.LinkTarget, err = os.Readlink(fpath) if err != nil { return fi, err } } return fi, nil } // copy (extract) file out of the FS into dest dir func (dir *DirParser) CopyFile(filepath string, dstdir string) bool { _, err := dir.GetFileInfo(filepath) if err != nil { return false } err = exec.Command(cpCli, "-a", path.Join(dir.imagepath, filepath), dstdir).Run() if err != nil { fmt.Fprintf(os.Stderr, "%s -a %s %s: failed", cpCli, path.Join(dir.imagepath, filepath), dstdir) return false } return true } func (dir *DirParser) ImageName() string { return dir.imagepath } func (f *DirParser) Supported() bool { _, err := exec.LookPath(cpCli) return err == nil } ================================================ FILE: pkg/dirparser/dirparser_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dirparser import ( "fmt" "os" "strings" "testing" ) var d *DirParser func TestMain(t *testing.T) { testImage := "../../test/" d = New(testImage) if d.ImageName() != testImage { t.Errorf("ImageName returned bad name") } } func TestGetDirInfo(t *testing.T) { dir, err := d.GetDirInfo("/") if err != nil { t.Errorf("GetDirInfo failed") } for _, i := range dir { if i.Name == "." || i.Name == ".." { t.Errorf(". or .. should not appear in dir listing") } } output_file := "/tmp/dirfs_test_file" if !d.CopyFile("test.img", output_file) { t.Errorf("copyfile returned false") } if _, err := os.Stat(output_file); os.IsNotExist(err) { t.Errorf("%s", err) } else { os.Remove(output_file) } } func TestGetFileInfo(t *testing.T) { tests := []struct { filePath string isFile bool isDir bool filename string }{ {"/test.img", true, false, "test.img"}, {"/", false, true, "/"}, {"/testdir", false, true, "testdir"}, } for _, test := range tests { fi, err := d.GetFileInfo(test.filePath) if err != nil { t.Errorf("GetFileInfo failed") } if fi.IsFile() != test.isFile { t.Errorf("GetFileInfo failed, isFile != %v", test.isFile) } if fi.IsDir() != test.isDir { t.Errorf("GetFileInfo failed, isDir != %v", test.isDir) } if fi.Name != test.filename { t.Errorf("filename does not match: %s", fi.Name) } } fi, err := d.GetFileInfo("/testlink") if err != nil { t.Errorf("GetFileInfo failed") } if !fi.IsLink() { t.Errorf("GetFileInfo failed, not a link") } if fi.Name != "testlink" { t.Errorf("GetFileInfo failed, incorrect link name: %s", fi.Name) } if fi.LinkTarget != "testdir" { t.Errorf("GetFileInfo failed, incorrect link target: %s", fi.LinkTarget) } } func TestCapability(t *testing.T) { fi, err := d.GetFileInfo("/test.cap.file") if err != nil { t.Error(err) } fmt.Println(fi.Capabilities) if len(fi.Capabilities) == 0 || !strings.EqualFold(fi.Capabilities[0], "cap_net_admin+p") { t.Error("capability test failed: likely need to run 'sudo setcap cap_net_admin+p test/test.cap.file'") } } ================================================ FILE: pkg/extparser/extparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package extparser import ( "fmt" "os" "os/exec" "path/filepath" "regexp" "strconv" "strings" "github.com/cruise-automation/fwanalyzer/pkg/capability" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type Ext2Parser struct { fileinfoReg *regexp.Regexp regexString string selinux bool capabilities bool imagepath string } const ( e2ToolsCp = "e2cp" e2ToolsLs = "e2ls" ) func New(imagepath string, selinux, capabilities bool) *Ext2Parser { parser := &Ext2Parser{ // 365 120777 0 0 7 12-Jul-2018 10:15 true regexString: `^\s*(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+-\w+-\d+)\s+(\d+:\d+)\s+([\S ]+)`, imagepath: imagepath, selinux: false, capabilities: false, } if selinux && seLinuxSupported() { parser.enableSeLinux() } if capabilities && capabilitiesSupported() { parser.enableCapabilities() } parser.fileinfoReg = regexp.MustCompile(parser.regexString) return parser } func (e *Ext2Parser) ImageName() string { return e.imagepath } func (e *Ext2Parser) enableSeLinux() { // with selinux support (-Z) // 2600 100750 0 2000 1041 1-Jan-2009 03:00 init.environ.rc u:object_r:rootfs:s0 // `^\s*(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+-\w+-\d+)\s+(\d+:\d+)\s+(\S+)\s+(\S+)`) // append selinux part e.regexString = e.regexString + `\t\s+(\S+)` e.selinux = true } func (e *Ext2Parser) enableCapabilities() { // with capabilities support (-C) // 2600 100750 0 2000 1041 1-Jan-2009 03:00 init.environ.rc 0x2000001,0x0,0x0,0x0,0x0 // `^\s*(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+-\w+-\d+)\s+(\d+:\d+)\s+(\S+)\s+(\S+)`) // tab is the separator for security options if !e.selinux { e.regexString = e.regexString + `\t` } // append capability part e.regexString = e.regexString + `\s+(\S+)` e.capabilities = true } func (e *Ext2Parser) parseFileLine(line string) fsparser.FileInfo { res := e.fileinfoReg.FindAllStringSubmatch(line, -1) var fi fsparser.FileInfo size, _ := strconv.Atoi(res[0][5]) fi.Size = int64(size) fi.Mode, _ = strconv.ParseUint(res[0][2], 8, 32) fi.Uid, _ = strconv.Atoi(res[0][3]) fi.Gid, _ = strconv.Atoi(res[0][4]) fi.Name = res[0][8] if fi.IsLink() && strings.Contains(fi.Name, " -> ") { parts := strings.Split(fi.Name, " -> ") fi.Name = parts[0] fi.LinkTarget = parts[1] } if e.selinux { fi.SELinuxLabel = res[0][9] } else { fi.SELinuxLabel = fsparser.SELinuxNoLabel } if e.capabilities { idx := 9 if e.selinux { idx = 10 } if res[0][idx] != "-" { fi.Capabilities, _ = capability.New(res[0][idx]) } } return fi } // ignoreDot=true: will filter out "." and ".." files from the directory listing func (e *Ext2Parser) getDirList(dirpath string, ignoreDot bool) ([]fsparser.FileInfo, error) { arg := fmt.Sprintf("%s:%s", e.imagepath, dirpath) params := "-la" if e.selinux { params += "Z" } if e.capabilities { params += "C" } out, err := exec.Command(e2ToolsLs, params, arg).CombinedOutput() if err != nil { // do NOT print file not found error if !strings.EqualFold(string(out), "File not found by ext2_lookup") { fmt.Fprintln(os.Stderr, err) } return nil, err } var dir []fsparser.FileInfo lines := strings.Split(string(out), "\n") for _, fline := range lines { if len(fline) > 1 && fline[0] != '>' { fi := e.parseFileLine(fline) // filter: . and .. if !ignoreDot || (fi.Name != "." && fi.Name != "..") { dir = append(dir, fi) } } } return dir, nil } func (e *Ext2Parser) GetDirInfo(dirpath string) ([]fsparser.FileInfo, error) { dir, err := e.getDirList(dirpath, true) return dir, err } func (e *Ext2Parser) GetFileInfo(dirpath string) (fsparser.FileInfo, error) { var fi fsparser.FileInfo dir, err := e.getDirList(dirpath, false) if len(dir) == 1 { return dir[0], err } // GetFileInfo was called on a directory only return entry for "." for _, info := range dir { if info.Name == "." { info.Name = filepath.Base(dirpath) return info, nil } } return fi, fmt.Errorf("file not found: %s", dirpath) } func (e *Ext2Parser) CopyFile(filepath string, dstdir string) bool { src := fmt.Sprintf("%s:%s", e.imagepath, filepath) _, err := exec.Command(e2ToolsCp, src, dstdir).Output() if err != nil { fmt.Fprintln(os.Stderr, err) return false } return true } func (f *Ext2Parser) Supported() bool { _, err := exec.LookPath(e2ToolsLs) if err != nil { return false } _, err = exec.LookPath(e2ToolsCp) return err == nil } func seLinuxSupported() bool { out, _ := exec.Command(e2ToolsLs).CombinedOutput() // look for Z (selinux support) in "Usage: e2ls [-acDfilrtZ][-d dir] file" if strings.Contains(string(out), "Z") { return true } fmt.Fprintln(os.Stderr, "extparser: selinux not supported by your version of e2ls") return false } func capabilitiesSupported() bool { out, _ := exec.Command(e2ToolsLs).CombinedOutput() // look for C (capability support) in "Usage: e2ls [-acDfilrtZC][-d dir] file" if strings.Contains(string(out), "C") { return true } fmt.Fprintln(os.Stderr, "extparser: capabilities not supported by your version of e2ls") return false } ================================================ FILE: pkg/extparser/extparser_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package extparser import ( "os" "strings" "testing" ) var e *Ext2Parser func TestMain(t *testing.T) { testImage := "../../test/test.img" e = New(testImage, false, false) if e.ImageName() != testImage { t.Errorf("ImageName returned bad name") } } func TestGetDirList(t *testing.T) { dir, err := e.getDirList("/", true) if err != nil { t.Errorf("getDirList failed") } for _, i := range dir { if i.Name == "." || i.Name == ".." { t.Errorf(". or .. should not appear in dir listing") } } dir, err = e.getDirList("/", false) if err != nil { t.Errorf("getDirList failed") } dot := false dotdot := false for _, i := range dir { if i.Name == "." { dot = true } if i.Name == ".." { dotdot = true } } if !dot || !dotdot { t.Errorf(". and .. should appear in dir listing") } } func TestGetDirInfo(t *testing.T) { dir, err := e.GetDirInfo("/") if err != nil { t.Errorf("GetDirInfo failed") } for _, i := range dir { if i.Name == "." || i.Name == ".." { t.Errorf(". or .. should not appear in dir listing") } } if len(dir) == 0 { t.Errorf("root needs to be >= 1 entries due to lost+found") } if !e.CopyFile("/date1", ".") { t.Errorf("copyfile returned false") } if _, err := os.Stat("date1"); os.IsNotExist(err) { t.Errorf("%s", err) } else { os.Remove("date1") } } func TestGetFileInfo(t *testing.T) { tests := []struct { filePath string isFile bool isDir bool isLink bool linkTarget string filename string }{ {"/date1", true, false, false, "", "date1"}, {"/", false, true, false, "", "/"}, {"/dir1", false, true, false, "", "dir1"}, {"/file_link", false, false, true, "file2", "file_link"}, } for _, test := range tests { fi, err := e.GetFileInfo(test.filePath) if err != nil { t.Errorf("GetFileInfo failed: %v", err) } if fi.IsFile() != test.isFile { t.Errorf("GetFileInfo failed, isFile != %v", test.isFile) } if fi.IsLink() != test.isLink { t.Errorf("GetFileInfo failed, isLink != %v", test.isLink) } if fi.LinkTarget != test.linkTarget { t.Errorf("GetFileInfo failed, link target bad") } if fi.IsDir() != test.isDir { t.Errorf("GetFileInfo failed, isDir != %v", test.isDir) } if fi.Name != test.filename { t.Errorf("filename does not match: %s", fi.Name) } } } func TestCap(t *testing.T) { testImage := "../../test/cap_ext2.img" e = New(testImage, false, true) if !capabilitiesSupported() { t.Error("capabilities are not supported by e2ls") return } if e.ImageName() != testImage { t.Errorf("ImageName returned bad name") } fi, err := e.GetFileInfo("/test") if err != nil { t.Error(err) } if !strings.EqualFold(fi.Capabilities[0], "cap_net_admin+p") { t.Errorf("Capabilities %s don't match", fi.Capabilities) } } ================================================ FILE: pkg/fsparser/fsparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package fsparser type FsParser interface { // get directory listing. only returns files in the given directory and // does not recurse into subdirectories. GetDirInfo(dirpath string) ([]FileInfo, error) // get file/dir info GetFileInfo(dirpath string) (FileInfo, error) // copy (extract) file out of the FS into dest dir CopyFile(filepath string, dstDir string) bool // get imagename ImageName() string // determine if FS type is supported Supported() bool } type FileInfo struct { Size int64 `json:"size"` Mode uint64 `json:"mode"` Uid int `json:"uid"` Gid int `json:"gid"` SELinuxLabel string `json:"se_linux_label,omitempty"` Capabilities []string `json:"capabilities,omitempty"` Name string `json:"name"` LinkTarget string `json:"link_target,omitempty"` } const ( SELinuxNoLabel string = "-" ) const ( S_IFMT = 0170000 // bit mask for the file type bit fields S_IFSOCK = 0140000 // socket S_IFLNK = 0120000 // symbolic link S_IFREG = 0100000 // regular file S_IFBLK = 0060000 // block device S_IFDIR = 0040000 // directory S_IFCHR = 0020000 // character device S_IFIFO = 0010000 // FIFO S_ISUID = 0004000 // set-user-ID bit S_ISGID = 0002000 // set-group-ID bit (see below) S_ISVTX = 0001000 // sticky bit (see below) S_IRWXU = 00700 // mask for file owner permissions S_IRUSR = 00400 // owner has read permission S_IWUSR = 00200 // owner has write permission S_IXUSR = 00100 // owner has execute permission S_IRWXG = 00070 // mask for group permissions S_IRGRP = 00040 // group has read permission S_IWGRP = 00020 // group has write permission S_IXGRP = 00010 // group has execute permission S_IRWXO = 00007 // mask for permissions for others (not in group) S_IROTH = 00004 // others have read permission S_IWOTH = 00002 // others have write permission S_IXOTH = 00001 // others have execute permission ) func (fi *FileInfo) IsSUid() bool { return (fi.Mode & S_ISUID) != 0 } func (fi *FileInfo) IsSGid() bool { return (fi.Mode & S_ISGID) != 0 } func (fi *FileInfo) IsWorldWrite() bool { return (fi.Mode & S_IWOTH) != 0 } func (fi *FileInfo) IsFile() bool { return (fi.Mode & S_IFMT) == S_IFREG } func (fi *FileInfo) IsDir() bool { return (fi.Mode & S_IFMT) == S_IFDIR } func (fi *FileInfo) IsLink() bool { return (fi.Mode & S_IFMT) == S_IFLNK } ================================================ FILE: pkg/squashfsparser/squashfsparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package squashfsparser import ( "fmt" "io/ioutil" "os" "os/exec" "os/user" "path" "regexp" "strconv" "strings" "github.com/cruise-automation/fwanalyzer/pkg/capability" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) const ( unsquashfsCmd = "unsquashfs" cpCmd = "cp" ) // SquashFSParser parses SquashFS filesystem images. type SquashFSParser struct { fileLineRegex *regexp.Regexp imagepath string files map[string][]fsparser.FileInfo securityInfo bool } func uidForUsername(username string) (int, error) { // First check to see if it's an int. If not, look it up by name. uid, err := strconv.Atoi(username) if err == nil { return uid, nil } u, err := user.Lookup(username) if err != nil { return 0, err } return strconv.Atoi(u.Uid) } func gidForGroup(group string) (int, error) { // First check to see if it's an int. If not, look it up by name. gid, err := strconv.Atoi(group) if err == nil { return gid, nil } g, err := user.LookupGroup(group) if err != nil { return 0, err } return strconv.Atoi(g.Gid) } // From table[] in https://github.com/plougher/squashfs-tools/blob/master/squashfs-tools/unsquashfs.c var modeFlags = []struct { pos int chr byte val uint64 }{ {0, '-', fsparser.S_IFREG}, {0, 's', fsparser.S_IFSOCK}, {0, 'l', fsparser.S_IFLNK}, {0, 'b', fsparser.S_IFBLK}, {0, 'd', fsparser.S_IFDIR}, {0, 'c', fsparser.S_IFCHR}, {0, 'p', fsparser.S_IFIFO}, {1, 'r', fsparser.S_IRUSR}, {2, 'w', fsparser.S_IWUSR}, {3, 'x', fsparser.S_IXUSR}, {3, 's', fsparser.S_IXUSR | fsparser.S_ISUID}, {3, 'S', fsparser.S_ISUID}, {4, 'r', fsparser.S_IRGRP}, {5, 'w', fsparser.S_IWGRP}, {6, 'x', fsparser.S_IXGRP}, {6, 's', fsparser.S_IXGRP | fsparser.S_ISGID}, {6, 'S', fsparser.S_ISGID}, {7, 'r', fsparser.S_IROTH}, {8, 'w', fsparser.S_IWOTH}, {9, 'x', fsparser.S_IXOTH}, {9, 't', fsparser.S_IXOTH | fsparser.S_ISVTX}, {9, 'T', fsparser.S_ISVTX}, } func parseMode(mode string) (uint64, error) { var m uint64 if len(mode) != 10 { return 0, fmt.Errorf("parseMode: invalid mode string %s", mode) } for _, f := range modeFlags { if mode[f.pos] == f.chr { m |= f.val } } return m, nil } func getExtractFile(dirpath string) (string, error) { extractFile, err := ioutil.TempFile("", "squashfsparser") if err != nil { return "", err } _, err = extractFile.Write([]byte(dirpath)) if err != nil { return "", err } err = extractFile.Close() if err != nil { return "", err } return extractFile.Name(), nil } func (s *SquashFSParser) enableSecurityInfo() { // drwxr-xr-x administrator/administrator 66 2019-04-08 18:49 squashfs-root - - s.fileLineRegex = regexp.MustCompile(`^([A-Za-z-]+)\s+([\-\.\w]+|\d+)/([\-\.\w]+|\d+)\s+(\d+)\s+(\d+-\d+-\d+)\s+(\d+:\d+)\s+([\S ]+)\t(\S+)\s+(\S)`) s.securityInfo = true } // New returns a new SquashFSParser instance for the given image file. func New(imagepath string, securityInfo bool) *SquashFSParser { parser := &SquashFSParser{ // drwxr-xr-x administrator/administrator 66 2019-04-08 18:49 squashfs-root fileLineRegex: regexp.MustCompile(`^([A-Za-z-]+)\s+([\-\.\w]+|\d+)/([\-\.\w]+|\d+)\s+(\d+)\s+(\d+-\d+-\d+)\s+(\d+:\d+)\s+(.*)$`), imagepath: imagepath, securityInfo: false, } if securityInfo && securityInfoSupported() { parser.enableSecurityInfo() } return parser } func normalizePath(filepath string) (dir string, name string) { // Ensure directory and file names are consistent, with no relative parts // or trailing slash on directory names. dir, name = path.Split(path.Clean(filepath)) dir = path.Clean(dir) return } func (s *SquashFSParser) parseFileLine(line string) (string, fsparser.FileInfo, error) { // TODO(jlarimer): add support for reading xattrs. unsquashfs can read // and write xattrs, but it doesn't display them when just listing files. var fi fsparser.FileInfo dirpath := "" res := s.fileLineRegex.FindStringSubmatch(line) if res == nil { return dirpath, fi, fmt.Errorf("Can't match line %s\n", line) } var err error fi.Mode, err = parseMode(res[1]) if err != nil { return dirpath, fi, err } // unsquashfs converts the uid/gid to a username/group on this system, so // we need to convert it back to the numeric values. fi.Uid, err = uidForUsername(res[2]) if err != nil { return dirpath, fi, err } fi.Gid, err = gidForGroup(res[3]) if err != nil { return dirpath, fi, err } fi.Size, err = strconv.ParseInt(res[4], 10, 64) if err != nil { return dirpath, fi, err } // links show up with a name like "./dir2/file3 -> file1" if fi.Mode&fsparser.S_IFLNK == fsparser.S_IFLNK { parts := strings.Split(res[7], " -> ") dirpath, fi.Name = normalizePath(parts[0]) fi.LinkTarget = parts[1] } else { dirpath, fi.Name = normalizePath(res[7]) } if s.securityInfo { if res[8] != "-" { fi.Capabilities, _ = capability.New(res[8]) } fi.SELinuxLabel = res[9] } return dirpath, fi, nil } func (s *SquashFSParser) loadFileList() error { if s.files != nil { return nil } s.files = make(map[string][]fsparser.FileInfo) // we want to use -lln (numeric output) but that is only available in 4.4 and later args := []string{"-d", "", "-lls", s.imagepath} if s.securityInfo { // -llS is only available in our patched version args = append([]string{"-llS"}, args...) } out, err := exec.Command(unsquashfsCmd, args...).CombinedOutput() if err != nil { fmt.Fprintf(os.Stderr, "getDirList: %s", err) return err } lines := strings.Split(string(out), "\n") for _, line := range lines { path, fi, err := s.parseFileLine(line) if err == nil { dirfiles := s.files[path] dirfiles = append(dirfiles, fi) s.files[path] = dirfiles } } return nil } // GetDirInfo returns information on the specified directory. func (s *SquashFSParser) GetDirInfo(dirpath string) ([]fsparser.FileInfo, error) { if err := s.loadFileList(); err != nil { return nil, err } return s.files[path.Clean(dirpath)], nil } // GetFileInfo returns information on the specified file. func (s *SquashFSParser) GetFileInfo(filepath string) (fsparser.FileInfo, error) { if err := s.loadFileList(); err != nil { return fsparser.FileInfo{}, err } dirpath, name := normalizePath(filepath) // the root is stored as "." if dirpath == "/" && name == "" { dirpath = "." name = "." } dir := s.files[dirpath] for _, fi := range dir { if fi.Name == name { return fi, nil } } return fsparser.FileInfo{}, fmt.Errorf("Can't find file %s", filepath) } // CopyFile copies the specified file to the specified destination. func (s *SquashFSParser) CopyFile(filepath string, dstdir string) bool { // The list of files/directories to extract needs to be in a file... extractFile, err := getExtractFile(filepath) if err != nil { fmt.Fprintf(os.Stderr, "Failed to create temporary file: %v\n", err) return false } defer os.Remove(extractFile) // The -d argument to unsquashfs specifies a directory to unsquash to, but // the directory can't exist. It also extracts the full path. To fit the // semantics of CopyFile, we need to extract to a new temporary directly and // then copy the file to the specified destination. tmpdir, err := ioutil.TempDir("", "squashfsparser") if err != nil { fmt.Fprintf(os.Stderr, "Failed to create temporary directory: %v\n", err) return false } defer os.RemoveAll(tmpdir) tmpdir = path.Join(tmpdir, "files") out, err := exec.Command(unsquashfsCmd, "-d", tmpdir, "-e", extractFile, s.imagepath).CombinedOutput() if err != nil { fmt.Fprintf(os.Stderr, "unsquashfs failed: %v: %s\n", err, out) return false } err = exec.Command(cpCmd, "-a", path.Join(tmpdir, filepath), dstdir).Run() if err != nil { fmt.Fprintf(os.Stderr, "%s -a %s %s: failed", cpCmd, path.Join(tmpdir, filepath), dstdir) return false } return true } // ImageName returns the name of the filesystem image. func (s *SquashFSParser) ImageName() string { return s.imagepath } func (f *SquashFSParser) Supported() bool { _, err := exec.LookPath(unsquashfsCmd) if err != nil { return false } _, err = exec.LookPath(cpCmd) return err == nil } func securityInfoSupported() bool { out, _ := exec.Command(unsquashfsCmd).CombinedOutput() // look for -ll[S] (securityInfo support) in output if strings.Contains(string(out), "-ll[S]") { return true } fmt.Fprintln(os.Stderr, "squashfsparser: security info (selinux + capabilities) not supported by your version of unsquashfs") return false } ================================================ FILE: pkg/squashfsparser/squashfsparser_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package squashfsparser import ( "io/ioutil" "os" "strings" "testing" "github.com/google/go-cmp/cmp" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) // This could be considered environment-specific... func TestUidForUsername(t *testing.T) { uid, err := uidForUsername("root") if err != nil { t.Errorf("uidForUsername(\"root\") returned error: %v", err) return } if uid != 0 { t.Errorf("uidForUsername(\"root\") returned %d, should be 0", uid) } _, err = uidForUsername("asdfASDFxxx999") if err == nil { t.Errorf("uidForUsername(\"asdfASDFxxx\") did not return error") return } } // This could be considered environment-specific... func TestGidForGroup(t *testing.T) { uid, err := gidForGroup("root") if err != nil { t.Errorf("gidForGroup(\"root\") returned error: %v", err) return } if uid != 0 { t.Errorf("gidForGroup(\"root\") returned %d, should be 0", uid) } _, err = gidForGroup("asdfASDFxxx999") if err == nil { t.Errorf("gidForGroup(\"asdfASDFxxx999\") did not return error") } } func TestParseMode(t *testing.T) { tests := []struct { mode string result uint64 err bool }{ { mode: "drwxr-xr-x", result: fsparser.S_IFDIR | fsparser.S_IRWXU | fsparser.S_IRGRP | fsparser.S_IXGRP | fsparser.S_IROTH | fsparser.S_IXOTH, err: false, }, { mode: "-rw-r--r--", result: fsparser.S_IFREG | fsparser.S_IRUSR | fsparser.S_IWUSR | fsparser.S_IRGRP | fsparser.S_IROTH, err: false, }, { mode: "lrwxrwxrwx", result: fsparser.S_IFLNK | fsparser.S_IRWXU | fsparser.S_IRWXG | fsparser.S_IRWXO, err: false, }, { mode: "drwxrwxrwt", result: fsparser.S_IFDIR | fsparser.S_IRWXU | fsparser.S_IRWXG | fsparser.S_IRWXO | fsparser.S_ISVTX, err: false, }, { // too short mode: "blahblah", result: 0, err: true, }, { // too long mode: "blahblahblah", result: 0, err: true, }, } for _, test := range tests { result, err := parseMode(test.mode) if err != nil && !test.err { t.Errorf("parseMode(\"%s\") returned error but shouldn't have: %s", test.mode, err) continue } if result != test.result { t.Errorf("parseMode(\"%s\") should be %#o, is %#o", test.mode, test.result, result) } } } func TestParseFileLine(t *testing.T) { tests := []struct { line string dirpath string fi fsparser.FileInfo err bool }{ { line: "-rw-r--r-- root/root 32 2019-04-10 14:41 /Filey McFileFace", dirpath: "/", fi: fsparser.FileInfo{ Size: 32, Mode: 0100644, Uid: 0, Gid: 0, Name: "Filey McFileFace", }, err: false, }, { line: "lrwxrwxrwx 1010/2020 5 2019-04-10 14:36 /dir2/file3 -> file1", dirpath: "/dir2", fi: fsparser.FileInfo{ Size: 5, Mode: 0120777, Uid: 1010, Gid: 2020, Name: "file3", LinkTarget: "file1", }, err: false, }, { line: "blah blah blah!", dirpath: "", fi: fsparser.FileInfo{}, err: true, }, } s := New("", false) for _, test := range tests { dirpath, fi, err := s.parseFileLine(test.line) if err != nil && !test.err { t.Errorf("parseFileLine(\"%s\") returned error but shouldn't have: %s", test.line, err) continue } if dirpath != test.dirpath { t.Errorf("parseFileLine(\"%s\") dirpath got \"%s\", wanted \"%s\"", test.line, dirpath, test.dirpath) } if diff := cmp.Diff(fi, test.fi); diff != "" { t.Errorf("parseFileLine(\"%s\") result mismatch (-got, +want):\n%s", test.line, diff) } } } func TestImageName(t *testing.T) { testImage := "../../test/squashfs.img" f := New(testImage, false) imageName := f.ImageName() if imageName != testImage { t.Errorf("ImageName() returned %s, wanted %s", imageName, testImage) } } func TestDirInfoRoot(t *testing.T) { testImage := "../../test/squashfs.img" f := New(testImage, false) /* $ unsquashfs -d "" -ll test/squashfs.img Parallel unsquashfs: Using 8 processors 5 inodes (4 blocks) to write drwxr-xr-x jlarimer/jlarimer 63 2019-04-11 08:06 -rw-r--r-- root/jlarimer 0 2019-04-10 14:41 /Filey McFileFace drwxr-x--- 1007/1008 3 2019-04-10 14:36 /dir1 drwxr-xr-x jlarimer/jlarimer 69 2019-04-10 14:40 /dir2 ---------- jlarimer/jlarimer 7 2019-04-10 14:36 /dir2/file1 -rwsr-xr-x jlarimer/jlarimer 5 2019-04-10 14:36 /dir2/file2 lrwxrwxrwx jlarimer/jlarimer 5 2019-04-10 14:36 /dir2/file3 -> file1 drwx------ 1005/1005 28 2019-04-10 14:40 /dir2/subdir2 -rw-r--r-- jlarimer/jlarimer 20 2019-04-10 14:40 /dir2/subdir2/file4 */ tests := map[string]map[string]fsparser.FileInfo{ "/": { "Filey McFileFace": fsparser.FileInfo{ Name: "Filey McFileFace", Mode: 0100644, Uid: 0, Gid: 1001, Size: 0, }, "dir1": fsparser.FileInfo{ Name: "dir1", Mode: 0040750, Uid: 1007, Gid: 1008, Size: 3, }, "dir2": fsparser.FileInfo{ Name: "dir2", Mode: 0040755, Uid: 1001, Gid: 1001, Size: 69, }, }, "/dir2": { "file1": fsparser.FileInfo{ Name: "file1", Mode: 0100000, Uid: 1001, Gid: 1001, Size: 7, }, "file2": fsparser.FileInfo{ Name: "file2", Mode: 0104755, Uid: 1001, Gid: 1001, Size: 5, }, "file3": fsparser.FileInfo{ Name: "file3", Mode: 0120777, Uid: 1001, Gid: 1001, Size: 5, LinkTarget: "file1", }, "subdir2": fsparser.FileInfo{ Name: "subdir2", Mode: 0040700, Uid: 1005, Gid: 1005, Size: 28, }, }, "/dir2/subdir2": { "file4": fsparser.FileInfo{ Name: "file4", Mode: 0100644, Uid: 1001, Gid: 1001, Size: 20, }, }, } for _, testdir := range []string{"/", "/dir2", "/dir2/subdir2"} { dirtests := tests[testdir] dir, err := f.GetDirInfo(testdir) if err != nil { t.Errorf("GetDirInfo() returned error: %v", err) return } for _, fi := range dir { //fmt.Printf("Directory: %s, Name: %s, Size: %d, Mode: %o\n", testdir, fi.Name, fi.Size, fi.Mode) tfi, ok := dirtests[fi.Name] if !ok { t.Errorf("File \"%s\" not found in test map", fi.Name) continue } if diff := cmp.Diff(fi, tfi); diff != "" { t.Errorf("GetDirInfo() result mismatch for \"%s\" (-got, +want):\n%s", fi.Name, diff) } delete(dirtests, fi.Name) } for name := range dirtests { t.Errorf("File \"%s\" exists in test map but not in test filesystem", name) } } } func TestGetFileInfo(t *testing.T) { testImage := "../../test/squashfs.img" f := New(testImage, false) fi, err := f.GetFileInfo("/") if err != nil { t.Error(err) } if !fi.IsDir() { t.Errorf("/ should be dir") } dir, err := f.GetDirInfo("/") if err != nil { t.Error(err) } if len(dir) < 1 { t.Errorf("/ should not be empty") } fi, err = f.GetFileInfo("/dir2/file3") if err != nil { t.Errorf("GetDirInfo() returned error: %v", err) return } tfi := fsparser.FileInfo{ Name: "file3", Mode: 0120777, Uid: 1001, Gid: 1001, Size: 5, LinkTarget: "file1", } if diff := cmp.Diff(fi, tfi); diff != "" { t.Errorf("GetFileInfo() result mismatch (-got, +want):\n%s", diff) } } func TestCopyFile(t *testing.T) { testImage := "../../test/squashfs.img" f := New(testImage, false) if !f.CopyFile("/dir2/subdir2/file4", ".") { t.Errorf("CopyFile() returned false") return } defer os.Remove("file4") data, err := ioutil.ReadFile("file4") if err != nil { t.Errorf("can't read file4: %v", err) return } expected := "feed me a stray cat\n" if string(data) != expected { t.Errorf("file4 expected \"%s\" but got \"%s\"", expected, data) } } func TestSecurityInfo(t *testing.T) { testImage := "../../test/squashfs_cap.img" f := New(testImage, true) fi, err := f.GetFileInfo("/ifconfig") if err != nil { t.Error(err) } if fi.SELinuxLabel != "-" { t.Error("no selinux label should be present") } if !strings.EqualFold(fi.Capabilities[0], "cap_net_admin+p") { t.Errorf("bad capabilities: %s", fi.Capabilities) } } ================================================ FILE: pkg/ubifsparser/ubifsparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ubifsparser import ( "fmt" "os" "os/exec" "path" "regexp" "strconv" "strings" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type UbifsParser struct { fileinfoReg *regexp.Regexp fileLinkReg *regexp.Regexp imagepath string } const ( ubifsReaderCmd = "ubireader_list_files" ) func New(imagepath string) *UbifsParser { parser := &UbifsParser{ // 120777 1 0 0 0 Mar 13 08:53 tmp -> /var/tmp fileinfoReg: regexp.MustCompile( `^\s*(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\S+\s+\d+)\s+(\d+:\d+)\s+(.+)$`), fileLinkReg: regexp.MustCompile( `(\S+)\s->\s(\S+)`), imagepath: imagepath, } return parser } func (e *UbifsParser) ImageName() string { return e.imagepath } func (e *UbifsParser) parseFileLine(line string) (fsparser.FileInfo, error) { res := e.fileinfoReg.FindAllStringSubmatch(line, -1) var fi fsparser.FileInfo if res == nil { return fi, fmt.Errorf("can't parse: %s", line) } size, _ := strconv.Atoi(res[0][5]) fi.Size = int64(size) fi.Mode, _ = strconv.ParseUint(res[0][1], 8, 32) fi.Uid, _ = strconv.Atoi(res[0][3]) fi.Gid, _ = strconv.Atoi(res[0][4]) fi.Name = res[0][8] fi.SELinuxLabel = fsparser.SELinuxNoLabel // fill in linktarget if fi.IsLink() && strings.Contains(fi.Name, "->") { rlnk := e.fileLinkReg.FindAllStringSubmatch(fi.Name, -1) if rlnk == nil { return fsparser.FileInfo{}, fmt.Errorf("can't parse LinkTarget from %s", fi.Name) } fi.Name = rlnk[0][1] fi.LinkTarget = rlnk[0][2] } return fi, nil } func (e *UbifsParser) getDirList(dirpath string) ([]fsparser.FileInfo, error) { out, err := exec.Command(ubifsReaderCmd, "-P", dirpath, e.imagepath).CombinedOutput() if err != nil { fmt.Fprintln(os.Stderr, err) return nil, err } var dir []fsparser.FileInfo lines := strings.Split(string(out), "\n") for _, fline := range lines { if fline == "" { continue } fi, err := e.parseFileLine(fline) if err != nil { return nil, err } dir = append(dir, fi) } return dir, nil } func (e *UbifsParser) GetDirInfo(dirpath string) ([]fsparser.FileInfo, error) { dir, err := e.getDirList(dirpath) return dir, err } func (e *UbifsParser) GetFileInfo(dirpath string) (fsparser.FileInfo, error) { // return fake entry for root (/) if dirpath == "/" { return fsparser.FileInfo{Name: "/", Mode: fsparser.S_IFDIR}, nil } listpath := path.Dir(dirpath) listfile := path.Base(dirpath) var fi fsparser.FileInfo dir, err := e.getDirList(listpath) if err != nil { return fi, err } for _, info := range dir { if info.Name == listfile { return info, nil } } return fi, fmt.Errorf("file not found: %s", dirpath) } func (e *UbifsParser) CopyFile(filepath string, dstdir string) bool { err := exec.Command(ubifsReaderCmd, "--copy", filepath, "--copy-dest", dstdir, e.imagepath).Run() if err != nil { fmt.Fprintln(os.Stderr, err) return false } return true } func (f *UbifsParser) Supported() bool { _, err := exec.LookPath(ubifsReaderCmd) return err == nil } ================================================ FILE: pkg/ubifsparser/ubifsparser_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ubifsparser import ( "os" "testing" ) func TestCleanup(t *testing.T) { testImage := "../../test/ubifs.img" e := New(testImage) if e.ImageName() != testImage { t.Errorf("ImageName returned bad name") } fi, err := e.GetFileInfo("/") if err != nil { t.Error(err) } if !fi.IsDir() { t.Errorf("/ should be dir") } dir, err := e.GetDirInfo("/") if err != nil { t.Errorf("getDirList failed") } if len(dir) != 5 { t.Errorf("should be 5 files, but %d found", len(dir)) } fi, err = e.GetFileInfo("/file1.txt") if err != nil { t.Errorf("GetFileInfo failed") } if !fi.IsFile() { t.Errorf("GetFileInfo failed, not a file") } if fi.IsDir() { t.Errorf("GetFileInfo failed, not a dir") } if fi.Name != "file1.txt" { t.Errorf("filename does not match: %s", fi.Name) } fi, err = e.GetFileInfo("/bin/elf_arm64") if err != nil { t.Errorf("GetFileInfo failed") } if !fi.IsFile() { t.Errorf("GetFileInfo failed, not a file") } if fi.IsDir() { t.Errorf("GetFileInfo failed, not a dir") } if fi.Size != 3740436 { t.Errorf("file size does not match: %s", fi.Name) } fi, err = e.GetFileInfo("/dateX") if err != nil { t.Errorf("GetFileInfo failed") } if fi.IsFile() { t.Errorf("GetFileInfo failed, not a file") } if fi.IsDir() { t.Errorf("GetFileInfo failed, not a dir") } if !fi.IsLink() { t.Errorf("GetFileInfo failed, is link") } if fi.LinkTarget != "date1.txt" { t.Errorf("link does not match: %s", fi.LinkTarget) } fi, err = e.GetFileInfo("/dir1") if err != nil { t.Errorf("GetFileInfo failed") } if fi.IsFile() { t.Errorf("GetFileInfo failed, not a file") } if !fi.IsDir() { t.Errorf("GetFileInfo failed, not a dir") } if fi.Name != "dir1" { t.Errorf("filename does not match: %s", fi.Name) } if !e.CopyFile("/bin/elf_arm32", "xxx-test-xxx") { t.Errorf("copyfile returned false") } if _, err := os.Stat("xxx-test-xxx"); os.IsNotExist(err) { t.Errorf("%s", err) } else { os.Remove("xxx-test-xxx") } } ================================================ FILE: pkg/util/util.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package util import ( "crypto/sha256" "encoding/json" "fmt" "io" "io/ioutil" "os" "path" "reflect" "strconv" ) func MkTmpDir(prefix string) (string, error) { tmpDir, err := ioutil.TempDir(os.TempDir(), prefix) if err != nil { return "", err } return tmpDir, err } func DigestFileSha256(filepath string) []byte { f, err := os.Open(filepath) if err != nil { return nil } defer f.Close() h := sha256.New() if _, err := io.Copy(h, f); err != nil { return nil } return h.Sum(nil) } func loadJson(data []byte, item string) (interface{}, error) { var jd map[string]interface{} err := json.Unmarshal(data, &jd) return jd[item], err } func XtractJsonField(data []byte, items []string) (string, error) { idx := 0 id, err := loadJson(data, items[idx]) if err != nil { return "", err } if id == nil { return "", fmt.Errorf("JSON field not found: %s", items[idx]) } idx++ for { if id == nil { return "", fmt.Errorf("JSON field not found: %s", items[idx-1]) } // keep for debugging //fmt.Printf("idx=%d, type=%s\n", idx, reflect.TypeOf(id).String()) if reflect.TypeOf(id).String() == "map[string]interface {}" { idc := id.(map[string]interface{}) id = idc[items[idx]] idx++ } else if reflect.TypeOf(id).String() == "[]interface {}" { idc := id.([]interface{}) index, _ := strconv.Atoi(items[idx]) id = idc[index] idx++ } else { switch id := id.(type) { case bool: if id { return "true", nil } else { return "false", nil } case float32, float64: return fmt.Sprintf("%f", id), nil case string: return id, nil default: return "", fmt.Errorf("can't handle type") } } } } func CleanPathDir(pathName string) string { cleaned := path.Clean(pathName) if cleaned[len(cleaned)-1] != '/' { cleaned += "/" } return cleaned } ================================================ FILE: pkg/vfatparser/vfatparser.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package vfatparser import ( "fmt" "os" "os/exec" "path/filepath" "regexp" "strconv" "strings" "github.com/cruise-automation/fwanalyzer/pkg/fsparser" ) type mDirReg struct { rx *regexp.Regexp hasExt bool } type VFatParser struct { mDirRegex []mDirReg imagepath string } const ( vFatLsCmd string = "mdir" vFatCpCmd string = "mcopy" ) func New(imagepath string) *VFatParser { var regs []mDirReg // BZIMAGE 5853744 2018-11-12 10:04 bzImage regs = append(regs, mDirReg{regexp.MustCompile(`^([~\w]+)\s+(\d+)\s\d+-\d+-\d+\s+\d+:\d+\s+(\w+)$`), false}) // BZIMAGE SIG 287 2018-11-12 10:04 bzImage.sig regs = append(regs, mDirReg{regexp.MustCompile(`^([~\w]+)\s+(\w+)\s+(\d+)\s\d+-\d+-\d+\s+\d+:\d+\s+(.+).*`), true}) // EFI 2018-11-12 10:04 regs = append(regs, mDirReg{regexp.MustCompile(`^([~\.\w]+)\s+.*`), false}) // startup nsh 12 2018-11-12 10:04 regs = append(regs, mDirReg{regexp.MustCompile(`^([~\w]+)\s+(\w+)\s+(\d+).*`), true}) // grubenv 1024 2018-11-12 10:04 regs = append(regs, mDirReg{regexp.MustCompile(`^([~\w]+)\s+(\d+).*`), false}) parser := &VFatParser{ mDirRegex: regs, imagepath: imagepath, } // configure mtools to skip size checks on VFAT images os.Setenv("MTOOLS_SKIP_CHECK", "1") return parser } func (f *VFatParser) ImageName() string { return f.imagepath } func (f *VFatParser) parseFileLine(line string) (fsparser.FileInfo, error) { var fi fsparser.FileInfo for _, reg := range f.mDirRegex { res := reg.rx.FindAllStringSubmatch(line, -1) if res != nil { size := 0 if len(res[0]) == 2 { fi.Mode = fsparser.S_IFDIR } else { fi.Mode = fsparser.S_IFREG if reg.hasExt { size, _ = strconv.Atoi(res[0][3]) } else { size, _ = strconv.Atoi(res[0][2]) } } fi.Mode |= fsparser.S_IRWXU | fsparser.S_IRWXG | fsparser.S_IRWXO fi.Size = int64(size) fi.Uid = 0 fi.Gid = 0 fi.SELinuxLabel = fsparser.SELinuxNoLabel fi.Name = res[0][1] if reg.hasExt { fi.Name = fmt.Sprintf("%s.%s", res[0][1], res[0][2]) } // use long name if (!reg.hasExt && len(res[0]) > 3) || (reg.hasExt && len(res[0]) > 4) { fi.Name = res[0][len(res[0])-1] } return fi, nil } } return fi, fmt.Errorf("not a file/dir") } func (f *VFatParser) getDirList(dirpath string, ignoreDot bool) ([]fsparser.FileInfo, error) { var dir []fsparser.FileInfo out, err := exec.Command(vFatLsCmd, "-i", f.imagepath, dirpath).CombinedOutput() if err != nil { fmt.Fprintln(os.Stderr, err) return nil, err } lines := strings.Split(string(out), "\n") for _, fline := range lines { if len(fline) > 1 { fi, err := f.parseFileLine(fline) if err == nil { // filter: . and .. if !ignoreDot || (fi.Name != "." && fi.Name != "..") { dir = append(dir, fi) } } } } return dir, nil } func (f *VFatParser) GetDirInfo(dirpath string) ([]fsparser.FileInfo, error) { if dirpath == "" { dirpath = "/" } return f.getDirList(dirpath, true) } func (f *VFatParser) GetFileInfo(dirpath string) (fsparser.FileInfo, error) { // return fake entry for root (/) if dirpath == "/" { return fsparser.FileInfo{Name: "/", Mode: fsparser.S_IFDIR}, nil } var fifake fsparser.FileInfo dir, err := f.getDirList(dirpath, false) if err != nil { return fifake, err } // GetFileInfo was called on non directory if len(dir) == 1 { return dir[0], nil } for _, info := range dir { if info.Name == "." { info.Name = filepath.Base(dirpath) return info, nil } } return fifake, fmt.Errorf("file not found: %s", dirpath) } func (f *VFatParser) CopyFile(filepath string, dstdir string) bool { src := fmt.Sprintf("::%s", filepath) _, err := exec.Command(vFatCpCmd, "-bni", f.imagepath, src, dstdir).Output() if err != nil { fmt.Fprintln(os.Stderr, err) return false } return true } func (f *VFatParser) Supported() bool { _, err := exec.LookPath(vFatCpCmd) if err != nil { return false } _, err = exec.LookPath(vFatLsCmd) return err == nil } ================================================ FILE: pkg/vfatparser/vfatparser_test.go ================================================ /* Copyright 2019-present, Cruise LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package vfatparser import ( "fmt" "os" "testing" ) var f *VFatParser func TestMain(t *testing.T) { testImage := "../../test/vfat.img" f = New(testImage) if f.ImageName() != testImage { t.Errorf("imageName returned bad name") } } func TestGetDirInfo(t *testing.T) { dir, err := f.GetDirInfo("/") if err != nil { t.Errorf("GetDirInfo was nil: %s", err) } for _, fi := range dir { if fi.Name == "dir1" { if !fi.IsDir() { t.Errorf("dir1 must be Dir") } } } dir, err = f.GetDirInfo("dir1") if err != nil { t.Errorf("GetDirInfo was nil: %s", err) } for _, fi := range dir { fmt.Printf("Name: %s Size: %d Mode: %o\n", fi.Name, fi.Size, fi.Mode) if fi.Name == "file1" { if !fi.IsFile() { t.Errorf("file1 need to be a file") } if fi.Uid != 0 || fi.Gid != 0 { t.Errorf("file1 need to be 0:0") } if fi.Size != 5 { t.Errorf("file1 size needs to be 5") } if !fi.IsWorldWrite() { t.Errorf("file1 needs to be world writable") } } } if !f.CopyFile("dir1/file1", ".") { t.Errorf("CopyFile returned false") } if _, err := os.Stat("file1"); os.IsNotExist(err) { t.Errorf("%s", err) } else { os.Remove("file1") } } func TestGetFileInfo(t *testing.T) { fi, err := f.GetFileInfo("/DIR1/FILE1") if err != nil { t.Error(err) } if !fi.IsFile() { t.Errorf("/DIR1/FILE1 should be file") } fi, err = f.GetFileInfo("/") if err != nil { t.Error(err) } if !fi.IsDir() { t.Errorf("/ should be dir") } } ================================================ FILE: scripts/catfile.sh ================================================ #!/bin/sh cat $1 ================================================ FILE: scripts/check_apkcert.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 APK=$(echo ${ORIG_FILENAME}|grep -e "\.apk") if [ -n "$APK" ]; then DIR=$(dirname ${FILEPATH}) mkdir ${DIR}/apkdata cd ${DIR}/apkdata; unzip ${FILEPATH} >/dev/null 2>&1; openssl cms -cmsout -noout -text -print -in META-INF/CERT.RSA -inform DER |grep subject:|sed 's/^ *//' rm -rf ${DIR}/apkdata fi ================================================ FILE: scripts/check_cert.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 ISPEM=$(file ${FILEPATH} |grep PEM) if [ -n "$ISPEM" ]; then openssl x509 -noout -text -in ${FILEPATH} | grep Issuer:|sed 's/^ *//' openssl x509 -noout -text -in ${FILEPATH} | grep Subject:|sed 's/^ *//' fi ================================================ FILE: scripts/check_file_arm32.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 INFO=$(file ${FILEPATH}|grep "ELF 32-bit LSB executable, ARM, EABI5") if [ -z "$INFO" ]; then echo -n ${ORIG_FILENAME} "not an ARM32 elf file" fi ================================================ FILE: scripts/check_file_arm64.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 INFO=$(file ${FILEPATH}|grep "ELF 64-bit LSB executable, ARM aarch64") if [ -z "$INFO" ]; then echo -n ${ORIG_FILENAME} "not an ARM aarch64 elf file" fi ================================================ FILE: scripts/check_file_elf_stripped.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 INFO=$(file ${FILEPATH}|grep "not stripped") if [ -n "$INFO" ]; then echo -n ${ORIG_FILENAME} "is not stripped" fi ================================================ FILE: scripts/check_file_x8664.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 INFO=$(file ${FILEPATH}|grep "ELF 64-bit LSB executable, x86-64") if [ -z "$INFO" ]; then echo -n '{"reason": "not a x86-64 elf file"}' fi ================================================ FILE: scripts/check_otacert.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 ZIP=$(echo ${ORIG_FILENAME}|grep -e "\.zip") if [ -n "$ZIP" ]; then DIR=$(dirname ${FILEPATH}) mkdir ${DIR}/otacertdata pushd cd ${DIR}/otacertdata unzip ${FILEPATH} >/dev/null 2>&1 popd find ${DIR}/otacertdata -name "*" -exec scripts/check_cert.sh {} {} \; rm -rf ${DIR}/otacertdata fi ================================================ FILE: scripts/check_privatekey.sh ================================================ #!/bin/sh FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 INFO=$(file ${FILEPATH} | grep "private key") PERMS=$(echo ${ORIG_MODE} | grep -E ".*r[-w][-x]$") if [ -n "$INFO" ]; then echo -n ${ORIG_FILENAME} "is a private key" if [ -n "$PERMS" ]; then echo -n " that is world readable" fi fi ================================================ FILE: scripts/check_sec.sh ================================================ #!/bin/bash FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_LABEL=$6 CONFIG=$8 RESULT=$(checksec --output=json --file="$1") export RESULT export FILEPATH export CONFIG export ORIG_FILENAME # Config format is JSON # array for values allows multiple acceptable values # {"cfg": # { # "pie": ["yes"], # "relro": ["full", "partial"] # }, # "skip": ["/usr/bin/bla"] # } # # usable cfg fields, omitted fields are not checked: # { # "canary": "no", # "fortify_source": "no", # "nx": "yes", # "pie": "no", # "relro": "partial", # "rpath": "no", # "runpath": "no", # "symbols": "no" # } python -c 'import json import sys import os cfg = os.getenv("CONFIG") res = os.getenv("RESULT") fp = os.getenv("FILEPATH") orig_name = os.getenv("ORIG_FILENAME") expected = {} try: expected = json.loads(cfg.rstrip()) except Exception: print("bad config: {}".format(cfg.rstrip())) sys.exit(1) try: result = json.loads(res.rstrip()) if "skip" in expected: if orig_name in expected["skip"]: sys.exit(0) if not fp in result: fp = "file" bad_keys = [] for k in expected["cfg"]: if k in result[fp]: passed = False for expected_value in expected["cfg"][k]: if expected_value == result[fp][k]: passed = True break if not passed: print(json.dumps(result[fp]).rstrip()) sys.exit(0) else: bad_keys.append(k) if bad_keys: print("results were missing expected keys: {}".format(", ".join(bad_keys))) sys.exit(0) except Exception as e: if not "Not an ELF file:" in res: print(e) sys.exit(0) ' ================================================ FILE: scripts/diff.sh ================================================ #!/bin/sh origname=$1 oldfile=$2 curfile=$3 diff -u $oldfile $curfile exit 0 ================================================ FILE: scripts/prop2json.py ================================================ #!/usr/bin/python3 # # read Android property file and convert it to JSON # import json import sys props = {} with open(sys.argv[1], 'r') as fp: while True: line = fp.readline() if not line: break if line.startswith('#'): continue line = line.rstrip("\n") parts = line.split("=", 1) if len(parts) == 2: props[parts[0]] = parts[1] print(json.dumps(props)) ================================================ FILE: test/elf_main.go ================================================ package main import "fmt" func main() { fmt.Println("hello world") } ================================================ FILE: test/oldtree.json ================================================ { "files": [ { "name": "/world", "gid": 0, "mode": 33206, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/bin/elf_arm64", "gid": 1001, "mode": 33277, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "5e18d8042a38789761dc950cd3aa73b3562d69a4dce2a5ef8530301e71494168", "size": 2110768 }, { "name": "/dir1", "gid": 0, "mode": 16877, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "0", "size": 1024 }, { "name": "/dir1/dir11", "gid": 0, "mode": 16877, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "0", "size": 1024 }, { "name": "/dir3/file31", "gid": 1001, "mode": 33188, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/bin/elf_arm32", "gid": 1001, "mode": 33277, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "682dfed81319befa3fcec071d4d86e7093f19929ef75dea7b16abb196a2aa667", "size": 1957752 }, { "name": "/date1", "gid": 0, "mode": 33188, "se_linux_label": "-", "uid": 1, "link_target": "", "digest": "8b15095ed1af38d5e383af1c4eadc5ae73cab03964142eb54cb0477ccd6a8dd5", "size": 29 }, { "name": "/dir3", "gid": 1001, "mode": 16877, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "0", "size": 1024 }, { "name": "/dir1/file11", "gid": 0, "mode": 33188, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/dir2", "gid": 1001, "mode": 16877, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "0", "size": 1024 }, { "name": "/dir2/file21", "gid": 0, "mode": 36333, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/file1", "gid": 0, "mode": 33178, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/file2", "gid": 0, "mode": 32851, "se_linux_label": "-", "uid": 123, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/bin", "gid": 1001, "mode": 16877, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "0", "size": 1024 }, { "name": "/bin/elf_x8664", "gid": 1001, "mode": 33277, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "9de3537418e232700ea8d187762beb6f1862a999372a47e5d04101ca754f2000", "size": 2011612 }, { "name": "/bin/elf_x8664_stripped", "gid": 1001, "mode": 33204, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "fb969a744228ee4471377930d6f31d3bb2c41a6f6b04c0253ebdf1f57180a421", "size": 1211048 }, { "name": "/lost+found", "gid": 0, "mode": 16832, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "0", "size": 12288 }, { "name": "/dir3/file33", "gid": 1001, "mode": 33188, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/ver", "gid": 0, "mode": 33188, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "44c77e41961f354f515e4081b12619fdb15829660acaa5d7438c66fc3d326df3", "size": 15 }, { "name": "/dir1/dir11/file12", "gid": 0, "mode": 33188, "se_linux_label": "-", "uid": 0, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/dir2/file22", "gid": 1002, "mode": 33188, "se_linux_label": "-", "uid": 1002, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 }, { "name": "/dir3/file32", "gid": 1001, "mode": 33188, "se_linux_label": "-", "uid": 1001, "link_target": "", "digest": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0 } ], "image_name": "test/test.img", "image_digest": "9d5fd9acc98421b46976f283175cc438cf549bb0607a1bca6e881d3e7f323794" } ================================================ FILE: test/script_test.sh ================================================ #!/bin/bash FILEPATH=$1 ORIG_FILENAME=$2 ORIG_UID=$3 ORIG_GID=$4 ORIG_MODE=$5 ORIG_SELINUXLABEL=$6 # this is an artificial test if [ "$7" = "--" ]; then echo -n $9 $8 fi ================================================ FILE: test/test.cap.file ================================================ ================================================ FILE: test/test.py ================================================ #!/usr/bin/env python # Copyright 2019-present, Cruise LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import sys import os error = False def SetError(log): global error print log error = True def test(cfgfile, e2toolspath=""): os.system(e2toolspath+" fwanalyzer -in test/test.img -cfg " + cfgfile + " >test/test_out.json 2>&1") with open("test/test_out.json") as read_file: try: data = json.load(read_file) except: data = {} if data.get("image_name") != "test/test.img": SetError("image_name") if "data" not in data: SetError("data") if data.get("data", {}).get("Version") != "1.2.3": SetError("Data Version") if data.get("data", {}).get("extract_test") != "test extract": SetError("extract test") if "offenders" not in data: SetError("offenders") else: if not "/dir2/file21" in data["offenders"]: SetError("dir2/file21") if not "/dir2/file22" in data["offenders"]: SetError("dir2/file22") if not "File is WorldWriteable, not allowed" in data["offenders"]["/world"]: SetError("WorldWriteable") if not "File is SUID, not allowed" in data["offenders"]["/dir2/file21"]: SetError("SUID") if not "DirContent: File file22 not allowed in directory /dir2" in data["offenders"]["/dir2/file22"]: SetError("DirContent") if not "nofile" in data["offenders"]: SetError("DirContent") if not "test script" in data["offenders"]["/file2"]: SetError("file2") if not "Digest (sha256) did not match found = 44c77e41961f354f515e4081b12619fdb15829660acaa5d7438c66fc3d326df3 should be = 8b15095ed1af38d5e383af1c4eadc5ae73cab03964142eb54cb0477ccd6a8dd4. ver needs to be specific : " in data["offenders"]["/ver"]: SetError("ver digest") if "File State Check failed: group found 1002 should be 0 : this needs to be this way" in data["offenders"]["/dir2/file22"]: SetError("FileStatCheck shouldn't default to uid/guid 0") if not "File not allowed for pattern: *1" in data["offenders"]["/file1"]: SetError("file1 not allowed") if not "File State Check failed: size: 0 AllowEmpyt=false : this needs to be this way" in data["offenders"]["/file1"]: SetError("file1 exists but size 0") if not "/bin/elf_x8664 is not stripped" in data["offenders"]["/bin/elf_x8664"]: SetError("script failed") if not "informational" in data: SetError("informational") else: if not "/file1" in data["informational"]: SetError("/file1") else: if not "changed" in data["informational"]["/file1"][0]: SetError("file1 not changed") if not "/date1" in data["informational"]: SetError("/date1") else: if not "changed" in data["informational"]["/date1"][0]: SetError("date1 not changed") if __name__ == "__main__": test("test/test_cfg.toml") if error: os.system("cat test/test_out.json") sys.exit(error) # disable if your e2ls version does not support selinux (-Z) option test("test/test_cfg_selinux.toml") if error: os.system("cat test/test_out.json") sys.exit(error) ================================================ FILE: test/test_cfg.base.toml ================================================ # all checks for the integration test [GlobalFileChecks] Suid = true SuidAllowedList = [] SeLinuxLabel = false WorldWrite = true Uids = [0,1001,1002] Gids = [0,1001,1002] BadFiles = ["/file99", "*1", "/bin/elf_x8664"] [FileTreeCheck] OldTreeFilePath = "oldtree.json" CheckPath = ["/"] CheckPermsOwnerChange = true CheckFileSize = true CheckFileDigest = true [FilePathOwner."/dir2"] Uid = 0 Gid = 0 [FilePathOwner."/dir3"] Uid = 1001 Gid = 1001 [FileStatCheck."/file2"] AllowEmpty = true Uid = 123 Gid = 0 Mode = "100123" Desc = "this needs to be this way" [FileStatCheck."/dir2/file22"] AllowEmpty = true Desc = "this needs to be this way" [FileStatCheck."/ver"] AllowEmpty = false Uid = -1 Gid = -1 Mode = "" Desc = "this needs to be this way" [FileStatCheck."/file1"] AllowEmpty = false Uid = -1 Gid = -1 Mode = "" Desc = "this needs to be this way" [FileContent."ensure all elf files are x86 64bit"] File = "/bin" Script="check_file_x8664.sh" [FileContent."ensure bins are stripped"] File = "/" ScriptOptions = ["*"] Script="check_file_elf_stripped.sh" [FileContent."script_test"] File = "/file2" ScriptOptions = ["*", "script", "test"] Script="script_test.sh" [DataExtract."extract_test"] File = "/file2" ScriptOptions = ["extract", "test"] Script="script_test.sh" [FileContent."date1 needs to be specific"] File = "/date1" Digest="8b15095ed1af38d5e383af1c4eadc5ae73cab03964142eb54cb0477ccd6a8dd5" [FileContent."ver needs to be specific"] File = "/ver" Digest="8b15095ed1af38d5e383af1c4eadc5ae73cab03964142eb54cb0477ccd6a8dd4" [FileContent."version check"] File = "/ver" Regex= ".*version=1.2.3.*" Match = false [DirContent."/dir1"] Allowed=["dir11", "file11"] Required=["nofile"] [DirContent."/dir2"] Allowed=["file?1"] Required=["file21"] [DataExtract."Version"] File = "/ver" RegEx = "^version=(\\S+)\\n.*" [DataExtract."date1_file"] File = "/date1" Script = "catfile.sh" ================================================ FILE: test/test_cfg.toml ================================================ # test with old e2tools without selinux support [GlobalConfig] FsType = "extfs" FsTypeOptions = "" DigestImage = true # load actual config [Include."test/test_cfg.base.toml"] ================================================ FILE: test/test_cfg_selinux.toml ================================================ # test with NEW e2tools with selinux support [GlobalConfig] FsType = "extfs" FsTypeOptions = "selinux" DigestImage = true # load actual config [Include."test/test_cfg.base.toml"] ================================================ FILE: test/testdir/dir1/file2 ================================================ ================================================ FILE: test/testdir/file1.txt ================================================ ================================================ FILE: test/testdir/jsonfile.json ================================================ {"test_var": 1, "test_str": "yolo"}