Compare commits
151 commits
Author | SHA1 | Date | |
---|---|---|---|
|
4c4b4471d8 | ||
|
4fbce7d649 | ||
|
393206b5e2 | ||
|
c475557cd1 | ||
|
2a3994208b | ||
|
a93f8142fc | ||
|
914459cb99 | ||
|
e907a26493 | ||
|
d57426d89b | ||
|
a1424bb197 | ||
|
fa3982d07a | ||
|
1125f87df4 | ||
|
863e870b23 | ||
|
9c7f6954e6 | ||
|
ed928f9a3e | ||
|
6c91e759a1 | ||
|
5dacdd9eaf | ||
|
788a04e6bd | ||
|
e3fcadaa4d | ||
|
ce0d08ff18 | ||
|
93e6672b0c | ||
|
d209a94a7d | ||
|
7437606c81 | ||
|
3a520a98b8 | ||
|
904b079876 | ||
|
b78d2aacd5 | ||
|
a3759454fd | ||
|
a63e42986a | ||
|
9ba2af21ac | ||
|
88e4c92633 | ||
|
678c558e7f | ||
|
19f008e701 | ||
|
0fdd07b0e2 | ||
|
7566eefcd7 | ||
|
12f5da337c | ||
|
3e3a8c29b4 | ||
|
663e8fcfc7 | ||
|
127c0b4f8d | ||
|
f91ea4bc19 | ||
|
48163a56ce | ||
|
d4c5d7a4d4 | ||
|
93372c3219 | ||
|
c0d050e34c | ||
|
d4d06574b8 | ||
|
74d41194cc | ||
|
345ceb01e4 | ||
|
028a0b0d20 | ||
|
4556eda901 | ||
|
c346404d62 | ||
|
a33fe9b9cf | ||
|
d398ae6956 | ||
|
8e4665a639 | ||
|
9f017c0f9e | ||
|
0bb5f33c4a | ||
|
8ad0eabf82 | ||
|
55378bda85 | ||
|
62077eefb5 | ||
|
6889bbd62e | ||
|
9dffde4dfa | ||
|
d74b387f23 | ||
|
50d38337bc | ||
|
da7dd18f34 | ||
|
a19f1cad54 | ||
|
288ad45650 | ||
|
d2c6a24f35 | ||
|
e87e047cca | ||
|
482b8f31b7 | ||
|
79e0f72e60 | ||
|
81a184e55a | ||
|
d3990a89a7 | ||
|
9849aa1661 | ||
|
ca77a526ed | ||
|
451ca92a90 | ||
|
97ffd39ab5 | ||
|
8dfd0738aa | ||
|
514628d7f4 | ||
|
ba57dade51 | ||
|
afb5c70f02 | ||
|
1e3b61e772 | ||
|
4815a17844 | ||
|
d57733fb15 | ||
|
0845543d87 | ||
|
aa823947f0 | ||
|
50b6d67009 | ||
|
677e39b9b0 | ||
|
d175768c7f | ||
|
ae8a4033cb | ||
|
5c81928db6 | ||
|
883cac9291 | ||
|
70d95eaefd | ||
|
e12728be37 | ||
|
8737b37214 | ||
|
b4bc135a7a | ||
|
91cab2773c | ||
|
83d276f996 | ||
|
e90a193cf4 | ||
|
ceecfb0a8d | ||
|
632d2ecdfd | ||
|
c7e209cd81 | ||
|
61278cfcd4 | ||
|
d1dadca916 | ||
|
b154fd0201 | ||
|
30c8dc460f | ||
|
f0cf6ddbb9 | ||
|
6763513a6e | ||
|
373c4d105e | ||
|
d3eb0506a8 | ||
|
1a25a728af | ||
|
3583f246fa | ||
|
ca9505e106 | ||
|
0034e04c34 | ||
|
cc8f77434e | ||
|
d57fc16a56 | ||
|
8d8217a712 | ||
|
5fdc14db2c | ||
|
87ecc7d370 | ||
|
2948b71e0b | ||
|
55d45eac42 | ||
|
a51f718566 | ||
|
8a0851a726 | ||
|
e5a0bec29b | ||
|
3324760d0a | ||
|
4e06dfb226 | ||
|
711f939913 | ||
|
5059051610 | ||
|
70f411c597 | ||
|
89f70739ea | ||
|
06354da1b3 | ||
|
4aed44485c | ||
|
0c6106e090 | ||
|
b95c07ea68 | ||
|
b43cf3068c | ||
|
e2642de2bd | ||
|
4ea1811147 | ||
|
30c3c87962 | ||
|
1e91e2b9f7 | ||
|
b4f443f7c2 | ||
|
ac933d788a | ||
|
6ff3d72bb1 | ||
|
39eaf8bc69 | ||
|
1106e578ac | ||
|
f41681c197 | ||
|
69219a4db8 | ||
|
f6e134506d | ||
|
4538b98bd7 | ||
|
2459e21034 | ||
|
1ac81dde54 | ||
|
af4dbad99f | ||
|
b7b78c7ec6 | ||
|
b310781990 | ||
|
f5f8414b8d |
114 changed files with 4795 additions and 945 deletions
.github
.gitignore.golangci.ymlGopkg.lockGopkg.tomlREADME.mdcmd
buildcfg-envelope.gocfg-signet.gocfg-tools.gocmd-checksum.gocmd-close.gocmd-configure.gocmd-generate.gocmd-import-export.gocmd-manage.gocmd-open.gocmd-sign.gocmd-verify.gocmd-version.goformat.goformat_sig.gomain.gopassword.gopassword_test.goutils.go
core-wire.gocore-wire_test.gocore.gocore_test.godefaults.goenvelope.gotestdata
.truststore
3911c84c-78f7-4354-a7f5-0e115aa2903c.recipient3911c84c-78f7-4354-a7f5-0e115aa2903c.signetsafing-codesign-1.envelope
test.txttest.txt.lettertest.txt.sigtest3.txttest3.txt.sigtest4.txttestdir
filesig
format_armor.goformat_armor_test.gohelpers.gojson.gojson_test.gomain.gomain_test.gotext.gotext_test.gotext_yaml.go
go.modgo.sumhashtools
import_export.goletter-file.goletter-wire.goletter.goletter_test.golhash
packpassword.gopassword_test.gorequirements.gorequirements_test.gosession-wire.gosession.gosignet.gosuite.gosuites.gosuites_test.gosuites_v1.gosuites_v2.gosupply
testtools.gotools
all
blake3
ecdh
gostdlib
11
.github/dependabot.yml
vendored
Normal file
11
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
40
.github/label-actions.yml
vendored
Normal file
40
.github/label-actions.yml
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
# Configuration for Label Actions - https://github.com/dessant/label-actions
|
||||
|
||||
community support:
|
||||
comment: |
|
||||
Hey @{issue-author}, thank you for raising this issue with us.
|
||||
|
||||
After a first review we noticed that this does not seem to be a technical issue, but rather a configuration issue or general question about how Portmaster works.
|
||||
|
||||
Thus, we invite the community to help with configuration and/or answering this questions.
|
||||
|
||||
If you are in a hurry or haven't received an answer, a good place to ask is in [our Discord community](https://discord.gg/safing).
|
||||
|
||||
If your problem or question has been resolved or answered, please come back and give an update here for other users encountering the same and then close this issue.
|
||||
|
||||
If you are a paying subscriber and want this issue to be checked out by Safing, please send us a message [on Discord](https://discord.gg/safing) or [via Email](mailto:support@safing.io) with your username and the link to this issue, so we can prioritize accordingly.
|
||||
|
||||
needs debug info:
|
||||
comment: |
|
||||
Hey @{issue-author}, thank you for raising this issue with us.
|
||||
|
||||
After a first review we noticed that we will require the Debug Info for further investigation. However, you haven't supplied any Debug Info in your report.
|
||||
|
||||
Please [collect Debug Info](https://wiki.safing.io/en/FAQ/DebugInfo) from Portmaster _while_ the reported issue is present.
|
||||
|
||||
in/compatibility:
|
||||
comment: |
|
||||
Hey @{issue-author}, thank you for reporting on a compatibility.
|
||||
|
||||
We keep a list of compatible software and user provided guides for improving compatibility [in the wiki - please have a look there](https://wiki.safing.io/en/Portmaster/App/Compatibility).
|
||||
If you can't find your software in the list, then a good starting point is our guide on [How do I make software compatible with Portmaster](https://wiki.safing.io/en/FAQ/MakeSoftwareCompatibleWithPortmaster).
|
||||
|
||||
If you have managed to establish compatibility with an application, please share your findings here. This will greatly help other users encountering the same issues.
|
||||
|
||||
fixed:
|
||||
comment: |
|
||||
This issue has been fixed by the recently referenced commit or PR.
|
||||
|
||||
However, the fix is not released yet.
|
||||
|
||||
It is expected to go into the [Beta Release Channel](https://wiki.safing.io/en/FAQ/SwitchReleaseChannel) for testing within the next two weeks and will be available for everyone within the next four weeks. While this is the typical timeline we work with, things are subject to change.
|
55
.github/workflows/go.yml
vendored
Normal file
55
.github/workflows/go.yml
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
name: Go
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Linter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '^1.19'
|
||||
|
||||
- name: Get dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Run golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: v1.52.2
|
||||
only-new-issues: true
|
||||
args: -c ./.golangci.yml --timeout 15m
|
||||
|
||||
- name: Run go vet
|
||||
run: go vet ./...
|
||||
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '^1.19'
|
||||
|
||||
- name: Get dependencies
|
||||
run: go mod download
|
||||
|
||||
- name: Run tests
|
||||
run: ./test --test-only
|
26
.github/workflows/issues-first-greet.yml
vendored
Normal file
26
.github/workflows/issues-first-greet.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
# This workflow responds to first time posters with a greeting message.
|
||||
# Docs: https://github.com/actions/first-interaction
|
||||
name: Greet New Users
|
||||
|
||||
# This workflow is triggered when a new issue is created.
|
||||
on:
|
||||
issues:
|
||||
types: opened
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
greet:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Respond to first time issue raisers.
|
||||
issue-message: |
|
||||
Greetings and welcome to our community! As this is the first issue you opened here, we wanted to share some useful infos with you:
|
||||
|
||||
- 🗣️ Our community on [Discord](https://discord.gg/safing) is super helpful and active. We also have an AI-enabled support bot that knows Portmaster well and can give you immediate help.
|
||||
- 📖 The [Wiki](https://wiki.safing.io/) answers all common questions and has many important details. If you can't find an answer there, let us know, so we can add anything that's missing.
|
22
.github/workflows/issues-label-actions.yml
vendored
Normal file
22
.github/workflows/issues-label-actions.yml
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
# This workflow responds with a message when certain labels are added to an issue or PR.
|
||||
# Docs: https://github.com/dessant/label-actions
|
||||
name: Label Actions
|
||||
|
||||
# This workflow is triggered when a label is added to an issue.
|
||||
on:
|
||||
issues:
|
||||
types: labeled
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/label-actions@v3
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
config-path: ".github/label-actions.yml"
|
||||
process-only: "issues"
|
42
.github/workflows/issues-stale.yml
vendored
Normal file
42
.github/workflows/issues-stale.yml
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
# This workflow warns and then closes stale issues and PRs.
|
||||
# Docs: https://github.com/actions/stale
|
||||
name: Close Stale Issues
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "17 5 * * 1-5" # run at 5:17 (UTC) on Monday to Friday
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Increase max operations.
|
||||
# When using GITHUB_TOKEN, the rate limit is 1,000 requests per hour per repository.
|
||||
operations-per-run: 500
|
||||
# Handle stale issues
|
||||
stale-issue-label: 'stale'
|
||||
# Exemptions
|
||||
exempt-all-issue-assignees: true
|
||||
exempt-issue-labels: 'support,dependencies,pinned,security'
|
||||
# Mark as stale
|
||||
days-before-issue-stale: 63 # 2 months / 9 weeks
|
||||
stale-issue-message: |
|
||||
This issue has been automatically marked as inactive because it has not had activity in the past two months.
|
||||
|
||||
If no further activity occurs, this issue will be automatically closed in one week in order to increase our focus on active topics.
|
||||
# Close
|
||||
days-before-issue-close: 7 # 1 week
|
||||
close-issue-message: |
|
||||
This issue has been automatically closed because it has not had recent activity. Thank you for your contributions.
|
||||
|
||||
If the issue has not been resolved, you can [find more information in our Wiki](https://wiki.safing.io/) or [continue the conversation on our Discord](https://discord.gg/safing).
|
||||
# TODO: Handle stale PRs
|
||||
days-before-pr-stale: 36500 # 100 years - effectively disabled.
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,5 +1,7 @@
|
|||
cpu.out
|
||||
vendor
|
||||
cmd/cmd*
|
||||
cmd/jess*
|
||||
dist
|
||||
|
||||
# Custom dev deps
|
||||
go.mod.*
|
||||
|
|
|
@ -1,16 +1,72 @@
|
|||
# Docs:
|
||||
# https://golangci-lint.run/usage/linters/
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- lll
|
||||
- gochecknoinits
|
||||
- gochecknoglobals
|
||||
- containedctx
|
||||
- contextcheck
|
||||
- cyclop
|
||||
- depguard
|
||||
- exhaustivestruct
|
||||
- exhaustruct
|
||||
- forbidigo
|
||||
- funlen
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- gocognit
|
||||
- gocyclo
|
||||
- goerr113
|
||||
- gomnd
|
||||
- ifshort
|
||||
- interfacebloat
|
||||
- interfacer
|
||||
- ireturn
|
||||
- lll
|
||||
- musttag
|
||||
- nestif
|
||||
- nilnil
|
||||
- nlreturn
|
||||
- noctx
|
||||
- nolintlint
|
||||
- nonamedreturns
|
||||
- nosnakecase
|
||||
- revive
|
||||
- tagliatelle
|
||||
- testpackage
|
||||
- varnamelen
|
||||
- whitespace
|
||||
- wrapcheck
|
||||
- wsl
|
||||
|
||||
linters-settings:
|
||||
revive:
|
||||
# See https://github.com/mgechev/revive#available-rules for details.
|
||||
enable-all-rules: true
|
||||
gci:
|
||||
# put imports beginning with prefix after 3rd-party packages;
|
||||
# only support one prefix
|
||||
# if not set, use goimports.local-prefixes
|
||||
local-prefixes: github.com/safing
|
||||
godox:
|
||||
# report any comments starting with keywords, this is useful for TODO or FIXME comments that
|
||||
# might be left in the code accidentally and should be resolved before merging
|
||||
keywords:
|
||||
- FIXME
|
||||
gosec:
|
||||
# To specify a set of rules to explicitly exclude.
|
||||
# Available rules: https://github.com/securego/gosec#available-rules
|
||||
excludes:
|
||||
- G204 # Variables in commands.
|
||||
- G304 # Variables in file paths.
|
||||
- G505 # We need crypto/sha1 for non-security stuff. Using `nolint:` triggers another linter.
|
||||
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
exclude-rules:
|
||||
- text: "a blank import .*"
|
||||
linters:
|
||||
- golint
|
||||
- text: "ST1000: at least one file in a package should have a package comment.*"
|
||||
linters:
|
||||
- stylecheck
|
||||
|
|
172
Gopkg.lock
generated
172
Gopkg.lock
generated
|
@ -1,172 +0,0 @@
|
|||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
digest = "1:6e5a3c39b076935a83346f9e51e11ae3e791524f6fc92c18975d3c5399872fd7"
|
||||
name = "github.com/AlecAivazis/survey"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "e4af3b345125b0903edb492a33a99a23e9eb3487"
|
||||
version = "v1.8.7"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:d38cc62bf81b2247597596ee088042c400c40307e5a6bbeb9df4190c83e19a00"
|
||||
name = "github.com/aead/ecdh"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "85c03e91d99ae3280de8341f9434bf4c733ddafb"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:870d441fe217b8e689d7949fef6e43efbc787e50f200cb1e70dbca9204a1d6be"
|
||||
name = "github.com/inconshreveable/mousetrap"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75"
|
||||
version = "v1.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:400e113a367b511b9b09ca642ee11d9885485a93838526d697033af334a2fdde"
|
||||
name = "github.com/kballard/go-shellquote"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "95032a82bc518f77982ea72343cc1ade730072f0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:4a29eeb25603debe8f2098a9902c4d3851034cf70d33be428826e86e8c30a1b0"
|
||||
name = "github.com/mattn/go-colorable"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "98ec13f34aabf44cc914c65a1cfb7b9bc815aef1"
|
||||
version = "v0.1.4"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:29895093e370d9da5fd1c9ab12a5855ccb568f28766e0c3ff159c4b0f09aa127"
|
||||
name = "github.com/mattn/go-isatty"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "31745d66dd679ac0ac4f8d3ecff168fce6170c6a"
|
||||
version = "v0.0.11"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:2b32af4d2a529083275afc192d1067d8126b578c7a9613b26600e4df9c735155"
|
||||
name = "github.com/mgutz/ansi"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "9520e82c474b0a04dd04f8a40959027271bab992"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:3b7bb4dccaa604125b8392b33d69a4a477efb928644d534e3d4063d4b6892693"
|
||||
name = "github.com/safing/portbase"
|
||||
packages = [
|
||||
"container",
|
||||
"formats/dsd",
|
||||
"formats/varint",
|
||||
"info",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "a120990396939bbe3480f752d190c476b1d56c3a"
|
||||
version = "v0.4.1"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:274f67cb6fed9588ea2521ecdac05a6d62a8c51c074c1fccc6a49a40ba80e925"
|
||||
name = "github.com/satori/go.uuid"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3"
|
||||
version = "v1.2.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:e096613fb7cf34743d49af87d197663cfccd61876e2219853005a57baedfa562"
|
||||
name = "github.com/spf13/cobra"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "f2b07da1e2c38d5f12845a4f607e2e1018cbb1f5"
|
||||
version = "v0.0.5"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:524b71991fc7d9246cc7dc2d9e0886ccb97648091c63e30eef619e6862c955dd"
|
||||
name = "github.com/spf13/pflag"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "2e9d26c8c37aae03e3f9d4e90b7116f5accb7cab"
|
||||
version = "v1.0.5"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:93d6687fc19da8a35c7352d72117a6acd2072dfb7e9bfd65646227bf2a913b2a"
|
||||
name = "github.com/tevino/abool"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "9b9efcf221b50905aab9bbabd3daed56dc10f339"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:4df631634d7dc4496e9c51436cc2d3ccd0d0d4734640f63108950fe68b348b7e"
|
||||
name = "golang.org/x/crypto"
|
||||
packages = [
|
||||
"blake2b",
|
||||
"blake2s",
|
||||
"chacha20",
|
||||
"chacha20poly1305",
|
||||
"curve25519",
|
||||
"hkdf",
|
||||
"internal/subtle",
|
||||
"pbkdf2",
|
||||
"poly1305",
|
||||
"salsa20",
|
||||
"salsa20/salsa",
|
||||
"scrypt",
|
||||
"sha3",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "e1110fd1c708ef015366ea01799a23c459593c47"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:2a7bb603cdfc77afb007e411395d21947ac9e8f9cc220ca0f6883027402bbdc3"
|
||||
name = "golang.org/x/sys"
|
||||
packages = [
|
||||
"cpu",
|
||||
"unix",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "4c7a9d0fe056d9d1de37e1409ca8a5c17accb46a"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:a8136ab6079cc7a9de8d8665e6757b506bf2bb4c88014a4dc1e35eaaa449dfc0"
|
||||
name = "gopkg.in/AlecAivazis/survey.v1"
|
||||
packages = [
|
||||
"core",
|
||||
"terminal",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "e4af3b345125b0903edb492a33a99a23e9eb3487"
|
||||
version = "v1.8.7"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
input-imports = [
|
||||
"github.com/AlecAivazis/survey",
|
||||
"github.com/aead/ecdh",
|
||||
"github.com/safing/portbase/container",
|
||||
"github.com/safing/portbase/formats/dsd",
|
||||
"github.com/safing/portbase/info",
|
||||
"github.com/satori/go.uuid",
|
||||
"github.com/spf13/cobra",
|
||||
"github.com/tevino/abool",
|
||||
"golang.org/x/crypto/blake2b",
|
||||
"golang.org/x/crypto/blake2s",
|
||||
"golang.org/x/crypto/chacha20poly1305",
|
||||
"golang.org/x/crypto/hkdf",
|
||||
"golang.org/x/crypto/pbkdf2",
|
||||
"golang.org/x/crypto/poly1305",
|
||||
"golang.org/x/crypto/salsa20",
|
||||
"golang.org/x/crypto/scrypt",
|
||||
"golang.org/x/crypto/sha3",
|
||||
]
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
29
Gopkg.toml
29
Gopkg.toml
|
@ -1,29 +0,0 @@
|
|||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
#
|
||||
# [prune]
|
||||
# non-go = false
|
||||
# go-tests = true
|
||||
# unused-packages = true
|
||||
|
||||
[prune]
|
||||
go-tests = true
|
||||
unused-packages = true
|
18
README.md
18
README.md
|
@ -1,3 +1,5 @@
|
|||
> **Check out our main project at [safing/portmaster](https://github.com/safing/portmaster)**
|
||||
|
||||
# Jess
|
||||
|
||||
Jess is a cryptographic library and cli tool that focuses on usability and freedom.
|
||||
|
@ -111,6 +113,22 @@ Some of these properties may also be used multiple times. For example, you could
|
|||
|
||||
Should any of these properties _not_ be required, the user has to intentionally remove requirements.
|
||||
|
||||
### Recommended Suites
|
||||
|
||||
In order to reduce the possibility of making unsuggested combinations of tools, the primary interface to choose tools is to use a suite:
|
||||
|
||||
The command `jess list` shows the available suites:
|
||||
|
||||
```
|
||||
Name/ID Provides Security Level Tools Notes
|
||||
key_v1 CIRS 128 b/s HKDF(BLAKE2b-256), CHACHA20-POLY1305 recommended
|
||||
pw_v1 CIRS 128 b/s SCRYPT-20, HKDF(BLAKE2b-256), CHACHA20-POLY1305 recommended
|
||||
rcpt_v1 CIR 128 b/s ECDH-X25519, HKDF(BLAKE2b-256), CHACHA20-POLY1305 recommended
|
||||
sign_v1 S 128 b/s Ed25519(BLAKE2b-256) recommended
|
||||
v1 CIRS 128 b/s ECDH-X25519, Ed25519(BLAKE2b-256), HKDF(BLAKE2b-256), CHACHA20-POLY1305 recommended
|
||||
w1 CIR 128 b/s ECDH-X25519, HKDF(BLAKE2b-256), CHACHA20-POLY1305 recommended
|
||||
```
|
||||
|
||||
### Specification
|
||||
|
||||
There is some more detail in [SPEC.md](./SPEC.md).
|
||||
|
|
66
cmd/build
66
cmd/build
|
@ -1,55 +1,29 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
baseDir="$( cd "$(dirname "$0")" && pwd )"
|
||||
cd "$baseDir"
|
||||
|
||||
# get build data
|
||||
if [[ "$BUILD_COMMIT" == "" ]]; then
|
||||
BUILD_COMMIT=$(git describe --all --long --abbrev=99 --dirty 2>/dev/null)
|
||||
fi
|
||||
if [[ "$BUILD_USER" == "" ]]; then
|
||||
BUILD_USER=$(id -un)
|
||||
fi
|
||||
if [[ "$BUILD_HOST" == "" ]]; then
|
||||
BUILD_HOST=$(hostname)
|
||||
fi
|
||||
if [[ "$BUILD_DATE" == "" ]]; then
|
||||
BUILD_DATE=$(date +%d.%m.%Y)
|
||||
fi
|
||||
if [[ "$BUILD_SOURCE" == "" ]]; then
|
||||
BUILD_SOURCE=$(git remote -v | grep origin | cut -f2 | cut -d" " -f1 | head -n 1)
|
||||
fi
|
||||
if [[ "$BUILD_SOURCE" == "" ]]; then
|
||||
BUILD_SOURCE=$(git remote -v | cut -f2 | cut -d" " -f1 | head -n 1)
|
||||
fi
|
||||
BUILD_BUILDOPTIONS=$(echo $* | sed "s/ /§/g")
|
||||
|
||||
# check
|
||||
if [[ "$BUILD_COMMIT" == "" ]]; then
|
||||
echo "could not automatically determine BUILD_COMMIT, please supply manually as environment variable."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$BUILD_USER" == "" ]]; then
|
||||
echo "could not automatically determine BUILD_USER, please supply manually as environment variable."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$BUILD_HOST" == "" ]]; then
|
||||
echo "could not automatically determine BUILD_HOST, please supply manually as environment variable."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$BUILD_DATE" == "" ]]; then
|
||||
echo "could not automatically determine BUILD_DATE, please supply manually as environment variable."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$BUILD_SOURCE" == "" ]]; then
|
||||
echo "could not automatically determine BUILD_SOURCE, please supply manually as environment variable."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Please notice, that this build script includes metadata into the build."
|
||||
echo "This information is useful for debugging and license compliance."
|
||||
echo "Run the compiled binary with the version command to see the information included."
|
||||
|
||||
# build
|
||||
BUILD_PATH="github.com/safing/jess/vendor/github.com/safing/portbase/info"
|
||||
go build -ldflags "-X ${BUILD_PATH}.commit=${BUILD_COMMIT} -X ${BUILD_PATH}.buildOptions=${BUILD_BUILDOPTIONS} -X ${BUILD_PATH}.buildUser=${BUILD_USER} -X ${BUILD_PATH}.buildHost=${BUILD_HOST} -X ${BUILD_PATH}.buildDate=${BUILD_DATE} -X ${BUILD_PATH}.buildSource=${BUILD_SOURCE}" $@
|
||||
# Get version.
|
||||
VERSION="$(git tag --points-at)" || true
|
||||
test -z "$VERSION" && DEV_VERSION="$(git describe --tags --first-parent --abbrev=0)" || true
|
||||
test -n "$DEV_VERSION" && VERSION="${DEV_VERSION}_dev_build"
|
||||
test -z "$VERSION" && VERSION="dev_build"
|
||||
BUILD_SOURCE=$( ( git remote -v | cut -f2 | cut -d" " -f1 | head -n 1 ) || echo "unknown" )
|
||||
BUILD_TIME=$(date -u "+%Y-%m-%dT%H:%M:%SZ" || echo "unknown")
|
||||
|
||||
LDFLAGS="-X main.Version=${VERSION} -X main.BuildSource=${BUILD_SOURCE} -X main.BuildTime=${BUILD_TIME}"
|
||||
|
||||
# build output name
|
||||
BIN_NAME="jess"
|
||||
if [[ "$GOOS" == "windows" ]]; then
|
||||
BIN_NAME="${BIN_NAME}.exe"
|
||||
fi
|
||||
|
||||
# Build.
|
||||
export CGO_ENABLED=0
|
||||
go build -o "${BIN_NAME}" -ldflags "$LDFLAGS" "$@"
|
||||
|
|
|
@ -5,7 +5,8 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/AlecAivazis/survey"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
|
||||
"github.com/safing/jess"
|
||||
)
|
||||
|
||||
|
@ -29,7 +30,8 @@ func newEnvelope(name string) (*jess.Envelope, error) {
|
|||
"Encrypt with key",
|
||||
"Encrypt for someone and sign",
|
||||
"Encrypt for someone but don't sign",
|
||||
"Sign a file",
|
||||
"Sign a file (wrapped)",
|
||||
"Sign a file (separate sig)",
|
||||
},
|
||||
}
|
||||
err := survey.AskOne(prompt, &preset, nil)
|
||||
|
@ -53,9 +55,12 @@ func newEnvelope(name string) (*jess.Envelope, error) {
|
|||
case "Encrypt for someone but don't sign":
|
||||
envelope.SuiteID = jess.SuiteRcptOnly
|
||||
err = selectSignets(envelope, "recipient")
|
||||
case "Sign a file":
|
||||
case "Sign a file (wrapped)":
|
||||
envelope.SuiteID = jess.SuiteSign
|
||||
err = selectSignets(envelope, "sender")
|
||||
case "Sign a file (separate sig)":
|
||||
envelope.SuiteID = jess.SuiteSignFile
|
||||
err = selectSignets(envelope, "sender")
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -92,6 +97,7 @@ func editEnvelope(envelope *jess.Envelope) error {
|
|||
{"Recipients", formatSignetNames(envelope.Recipients)},
|
||||
{"Senders", formatSignetNames(envelope.Senders)},
|
||||
{""},
|
||||
{"Export", "export to text format"},
|
||||
{"Abort", "discard changes and return"},
|
||||
{"Delete", "delete and return"},
|
||||
}),
|
||||
|
@ -104,8 +110,28 @@ func editEnvelope(envelope *jess.Envelope) error {
|
|||
|
||||
switch {
|
||||
case strings.HasPrefix(submenu, "Done"):
|
||||
// save
|
||||
// Check if the envelope is valid.
|
||||
if envelope.SecurityLevel == 0 {
|
||||
fmt.Println("Envelope is invalid, please fix before saving.")
|
||||
continue
|
||||
}
|
||||
// Remove and keys and save.
|
||||
envelope.CleanSignets()
|
||||
return trustStore.StoreEnvelope(envelope)
|
||||
case strings.HasPrefix(submenu, "Export"):
|
||||
// Check if the envelope is valid.
|
||||
if envelope.SecurityLevel == 0 {
|
||||
fmt.Println("Envelope is invalid, please fix before exporting.")
|
||||
continue
|
||||
}
|
||||
// Remove keys and export.
|
||||
envelope.CleanSignets()
|
||||
text, err := envelope.Export(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to export: %w", err)
|
||||
}
|
||||
fmt.Println("Exported envelope:")
|
||||
fmt.Println(text)
|
||||
case strings.HasPrefix(submenu, "Abort"):
|
||||
return nil
|
||||
case strings.HasPrefix(submenu, "Delete"):
|
||||
|
|
|
@ -5,20 +5,21 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/AlecAivazis/survey"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
//nolint:gocognit
|
||||
func newSignet(name, scheme string) (*jess.Signet, error) {
|
||||
func newSignet(name, scheme string, saveToTrustStore bool) (*jess.Signet, error) {
|
||||
// get name
|
||||
name = strings.TrimSpace(name)
|
||||
if name == "" {
|
||||
enterName := &survey.Input{
|
||||
Message: "Enter name of signet:",
|
||||
}
|
||||
err := survey.AskOne(enterName, &name, survey.MinLength(1))
|
||||
err := survey.AskOne(enterName, &name, survey.WithValidator(survey.MinLength(1)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -109,28 +110,30 @@ func newSignet(name, scheme string) (*jess.Signet, error) {
|
|||
Created: time.Now(),
|
||||
}
|
||||
|
||||
// write signet
|
||||
err = trustStore.StoreSignet(signet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if saveToTrustStore {
|
||||
// write signet
|
||||
err = trustStore.StoreSignet(signet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// export as recipient
|
||||
switch scheme {
|
||||
case jess.SignetSchemePassword, jess.SignetSchemeKey:
|
||||
// is secret, no recipient
|
||||
default:
|
||||
rcpt, err := signet.AsRecipient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = rcpt.StoreKey()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = trustStore.StoreSignet(rcpt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// export as recipient
|
||||
switch scheme {
|
||||
case jess.SignetSchemePassword, jess.SignetSchemeKey:
|
||||
// is secret, no recipient
|
||||
default:
|
||||
rcpt, err := signet.AsRecipient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = rcpt.StoreKey()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = trustStore.StoreSignet(rcpt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,11 +4,10 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
|
||||
"github.com/safing/jess/hashtools"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
|
||||
"github.com/AlecAivazis/survey"
|
||||
)
|
||||
|
||||
func pickTools(toolNames []string, promptMsg string) ([]string, error) { //nolint:unused,deadcode // TODO
|
||||
|
|
111
cmd/cmd-checksum.go
Normal file
111
cmd/cmd-checksum.go
Normal file
|
@ -0,0 +1,111 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess/filesig"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(checksum)
|
||||
checksum.AddCommand(checksumAdd)
|
||||
checksum.AddCommand(checksumVerify)
|
||||
}
|
||||
|
||||
var (
|
||||
checksum = &cobra.Command{
|
||||
Use: "checksum",
|
||||
Short: "add or verify embedded checksums",
|
||||
}
|
||||
|
||||
checksumAddUsage = "usage: checksum add <file>"
|
||||
checksumAdd = &cobra.Command{
|
||||
Use: "add <file>",
|
||||
Short: "add an embedded checksum to a file",
|
||||
Long: "add an embedded checksum to a file (support file types: txt, json, yaml)",
|
||||
RunE: handleChecksumAdd,
|
||||
}
|
||||
|
||||
checksumVerifyUsage = "usage: checksum verify <file>"
|
||||
checksumVerify = &cobra.Command{
|
||||
Use: "verify <file>",
|
||||
Short: "verify the embedded checksum of a file",
|
||||
Long: "verify the embedded checksum of a file (support file types: txt, json, yaml)",
|
||||
RunE: handleChecksumVerify,
|
||||
}
|
||||
)
|
||||
|
||||
func handleChecksumAdd(cmd *cobra.Command, args []string) error {
|
||||
// Check args.
|
||||
if len(args) != 1 {
|
||||
return errors.New(checksumAddUsage)
|
||||
}
|
||||
filename := args[0]
|
||||
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
switch filepath.Ext(filename) {
|
||||
case ".json":
|
||||
data, err = filesig.AddJSONChecksum(data)
|
||||
case ".yml", ".yaml":
|
||||
data, err = filesig.AddYAMLChecksum(data, filesig.TextPlacementAfterComment)
|
||||
case ".txt":
|
||||
data, err = filesig.AddTextFileChecksum(data, "#", filesig.TextPlacementAfterComment)
|
||||
default:
|
||||
return errors.New("unsupported file format")
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write back to disk.
|
||||
fileInfo, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to stat file: %w", err)
|
||||
}
|
||||
err = os.WriteFile(filename, data, fileInfo.Mode().Perm())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write back file with checksum: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println("checksum added")
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleChecksumVerify(cmd *cobra.Command, args []string) error {
|
||||
// Check args.
|
||||
if len(args) != 1 {
|
||||
return errors.New(checksumVerifyUsage)
|
||||
}
|
||||
filename := args[0]
|
||||
|
||||
data, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
switch filepath.Ext(filename) {
|
||||
case ".json":
|
||||
err = filesig.VerifyJSONChecksum(data)
|
||||
case ".yml", ".yaml":
|
||||
err = filesig.VerifyYAMLChecksum(data)
|
||||
case ".txt":
|
||||
err = filesig.VerifyTextFileChecksum(data, "#")
|
||||
default:
|
||||
return errors.New("unsupported file format")
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println("checksum verified")
|
||||
return nil
|
||||
}
|
|
@ -3,7 +3,8 @@ package main
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
|
@ -12,7 +13,7 @@ import (
|
|||
|
||||
func init() {
|
||||
rootCmd.AddCommand(closeCmd)
|
||||
closeCmd.Flags().StringVarP(&closeFlagOutput, "output", "o", "", "specify output file (`-` for stdout")
|
||||
closeCmd.Flags().StringVarP(&closeFlagOutput, "output", "o", "", "specify output file (`-` for stdout)")
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -49,10 +50,10 @@ var (
|
|||
filename := args[0]
|
||||
outputFilename := closeFlagOutput
|
||||
if outputFilename == "" {
|
||||
if strings.HasSuffix(filename, ".letter") {
|
||||
if strings.HasSuffix(filename, letterFileExtension) {
|
||||
return errors.New("cannot automatically derive output filename, please specify with --output")
|
||||
}
|
||||
outputFilename = filename + ".letter"
|
||||
outputFilename = filename + letterFileExtension
|
||||
}
|
||||
// check input file
|
||||
if filename != "-" {
|
||||
|
@ -81,17 +82,17 @@ var (
|
|||
if !confirmed {
|
||||
return nil
|
||||
}
|
||||
} else if !os.IsNotExist(err) {
|
||||
return fmt.Errorf("failed to access output file: %s", err)
|
||||
} else if !errors.Is(err, fs.ErrNotExist) {
|
||||
return fmt.Errorf("failed to access output file: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// load file
|
||||
var data []byte
|
||||
if filename == "-" {
|
||||
data, err = ioutil.ReadAll(os.Stdin)
|
||||
data, err = io.ReadAll(os.Stdin)
|
||||
} else {
|
||||
data, err = ioutil.ReadFile(filename)
|
||||
data, err = os.ReadFile(filename)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -114,7 +115,11 @@ var (
|
|||
if outputFilename == "-" {
|
||||
file = os.Stdout
|
||||
} else {
|
||||
file, err = os.OpenFile(outputFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
|
||||
file, err = os.OpenFile(
|
||||
outputFilename,
|
||||
os.O_WRONLY|os.O_CREATE|os.O_TRUNC,
|
||||
0o0600,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -123,7 +128,7 @@ var (
|
|||
// write
|
||||
err = c.WriteAllTo(file)
|
||||
if err != nil {
|
||||
file.Close()
|
||||
_ = file.Close()
|
||||
return err
|
||||
}
|
||||
return file.Close()
|
||||
|
|
|
@ -3,52 +3,49 @@ package main
|
|||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/safing/jess/truststores"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/safing/jess/truststores"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(configureCmd)
|
||||
}
|
||||
|
||||
var (
|
||||
configureCmd = &cobra.Command{
|
||||
Use: "configure <envelope name>",
|
||||
Short: "configure (and create) envelope",
|
||||
DisableFlagsInUseLine: true,
|
||||
Args: cobra.MaximumNArgs(1),
|
||||
PreRunE: requireTrustStore,
|
||||
RunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
// check envelope name existence
|
||||
if len(args) == 0 {
|
||||
return errors.New("please specify an envelope name")
|
||||
}
|
||||
envelopeName := args[0]
|
||||
var configureCmd = &cobra.Command{
|
||||
Use: "configure <envelope name>",
|
||||
Short: "configure (and create) envelope",
|
||||
DisableFlagsInUseLine: true,
|
||||
Args: cobra.MaximumNArgs(1),
|
||||
PreRunE: requireTrustStore,
|
||||
RunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
// check envelope name existence
|
||||
if len(args) == 0 {
|
||||
return errors.New("please specify an envelope name")
|
||||
}
|
||||
envelopeName := args[0]
|
||||
|
||||
// check envelope name
|
||||
if !truststores.NamePlaysNiceWithFS(envelopeName) {
|
||||
return errors.New("please only use alphanumeric characters and `- ._+@` for best compatibility with various systems")
|
||||
}
|
||||
// check envelope name
|
||||
if !truststores.NamePlaysNiceWithFS(envelopeName) {
|
||||
return errors.New("please only use alphanumeric characters and `- ._+@` for best compatibility with various systems")
|
||||
}
|
||||
|
||||
// get envelope from trust store
|
||||
envelope, err := trustStore.GetEnvelope(envelopeName)
|
||||
if err != nil && err != jess.ErrEnvelopeNotFound {
|
||||
// get envelope from trust store
|
||||
envelope, err := trustStore.GetEnvelope(envelopeName)
|
||||
if err != nil && !errors.Is(err, jess.ErrEnvelopeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
// create
|
||||
if envelope == nil {
|
||||
envelope, err = newEnvelope(envelopeName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// create
|
||||
if envelope == nil {
|
||||
envelope, err = newEnvelope(envelopeName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// edit (and save)
|
||||
return editEnvelope(envelope)
|
||||
},
|
||||
}
|
||||
)
|
||||
// edit (and save)
|
||||
return editEnvelope(envelope)
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
|
@ -8,11 +10,13 @@ func init() {
|
|||
rootCmd.AddCommand(generateCmd)
|
||||
generateCmd.Flags().StringVarP(&generateFlagName, "name", "l", "", "specify signet name/label")
|
||||
generateCmd.Flags().StringVarP(&generateFlagScheme, "scheme", "t", "", "specify signet scheme/tool")
|
||||
generateCmd.Flags().BoolVarP(&generateFlagTextOnly, "textonly", "", false, "do not save to trust store and only output directly as text")
|
||||
}
|
||||
|
||||
var (
|
||||
generateFlagName string
|
||||
generateFlagScheme string
|
||||
generateFlagName string
|
||||
generateFlagScheme string
|
||||
generateFlagTextOnly bool
|
||||
|
||||
generateCmd = &cobra.Command{
|
||||
Use: "generate",
|
||||
|
@ -21,8 +25,43 @@ var (
|
|||
Args: cobra.NoArgs,
|
||||
PreRunE: requireTrustStore,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
_, err := newSignet(generateFlagName, generateFlagScheme)
|
||||
return err
|
||||
// Generate new signet
|
||||
signet, err := newSignet(generateFlagName, generateFlagScheme, !generateFlagTextOnly)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Output as text if not saved to trust store.
|
||||
if generateFlagTextOnly {
|
||||
// Make text backup.
|
||||
backup, err := signet.Backup(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Convert to recipient and serialize key.
|
||||
rcpt, err := signet.AsRecipient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = rcpt.StoreKey()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Make text export.
|
||||
export, err := rcpt.Export(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write output.
|
||||
fmt.Printf("Generated %s key with ID %s and name %q\n", signet.Scheme, signet.ID, signet.Info.Name)
|
||||
fmt.Printf("Backup (private key): %s\n", backup)
|
||||
fmt.Printf("Export (public key): %s\n", export)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
)
|
||||
|
|
170
cmd/cmd-import-export.go
Normal file
170
cmd/cmd-import-export.go
Normal file
|
@ -0,0 +1,170 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(exportCmd)
|
||||
rootCmd.AddCommand(backupCmd)
|
||||
rootCmd.AddCommand(importCmd)
|
||||
}
|
||||
|
||||
var (
|
||||
exportCmdHelp = "usage: export <id>"
|
||||
exportCmd = &cobra.Command{
|
||||
Use: "export <id>",
|
||||
Short: "export a signet or envelope",
|
||||
Long: "export a signet (as a recipient - the public key only) or an envelope (configuration)",
|
||||
RunE: handleExport,
|
||||
}
|
||||
|
||||
backupCmdHelp = "usage: backup <id"
|
||||
backupCmd = &cobra.Command{
|
||||
Use: "backup <id>",
|
||||
Short: "backup a signet",
|
||||
Long: "backup a signet (the private key - do not share!)",
|
||||
RunE: handleBackup,
|
||||
}
|
||||
|
||||
importCmdHelp = "usage: import <text>"
|
||||
importCmd = &cobra.Command{
|
||||
Use: "import <text>",
|
||||
Short: "import a signet or an enveleope",
|
||||
Long: "import a signet (any kind) or an enveleope",
|
||||
RunE: handleImport,
|
||||
}
|
||||
)
|
||||
|
||||
func handleExport(cmd *cobra.Command, args []string) error {
|
||||
// Check args.
|
||||
if len(args) != 1 {
|
||||
return errors.New(exportCmdHelp)
|
||||
}
|
||||
id := args[0]
|
||||
|
||||
// Get Recipient.
|
||||
recipient, err := trustStore.GetSignet(id, true)
|
||||
if err == nil {
|
||||
text, err := recipient.Export(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to export recipient %s: %w", id, err)
|
||||
}
|
||||
fmt.Println(text)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check if there is a signet instead.
|
||||
signet, err := trustStore.GetSignet(id, false)
|
||||
if err == nil {
|
||||
recipient, err := signet.AsRecipient()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed convert signet %s to recipient for export: %w", id, err)
|
||||
}
|
||||
text, err := recipient.Export(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to export recipient %s: %w", id, err)
|
||||
}
|
||||
fmt.Println(text)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check for an envelope.
|
||||
env, err := trustStore.GetEnvelope(id)
|
||||
if err == nil {
|
||||
text, err := env.Export(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to export envelope %s: %w", id, err)
|
||||
}
|
||||
fmt.Println(text)
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.New("no recipient or envelope found with the given ID")
|
||||
}
|
||||
|
||||
func handleBackup(cmd *cobra.Command, args []string) error {
|
||||
// Check args.
|
||||
if len(args) != 1 {
|
||||
return errors.New(backupCmdHelp)
|
||||
}
|
||||
id := args[0]
|
||||
|
||||
// Check if there is a signet instead.
|
||||
signet, err := trustStore.GetSignet(id, false)
|
||||
if err != nil {
|
||||
text, err := signet.Backup(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to backup signet %s: %w", id, err)
|
||||
}
|
||||
fmt.Println(text)
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.New("no signet found with the given ID")
|
||||
}
|
||||
|
||||
func handleImport(cmd *cobra.Command, args []string) error {
|
||||
// Check args.
|
||||
if len(args) != 1 {
|
||||
return errors.New(importCmdHelp)
|
||||
}
|
||||
text := args[0]
|
||||
|
||||
// First, check if it's an envelope.
|
||||
if strings.HasPrefix(text, jess.ExportEnvelopePrefix) {
|
||||
env, err := jess.EnvelopeFromTextFormat(text)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse envelope: %w", err)
|
||||
}
|
||||
err = trustStore.StoreEnvelope(env)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to import envelope into trust store: %w", err)
|
||||
}
|
||||
fmt.Printf("imported envelope %q intro trust store\n", env.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Then handle all signet types together.
|
||||
var (
|
||||
signetType string
|
||||
parseFunc func(textFormat string) (*jess.Signet, error)
|
||||
)
|
||||
switch {
|
||||
case strings.HasPrefix(text, jess.ExportSenderPrefix):
|
||||
signetType = jess.ExportSenderKeyword
|
||||
parseFunc = jess.SenderFromTextFormat
|
||||
case strings.HasPrefix(text, jess.ExportRecipientPrefix):
|
||||
signetType = jess.ExportRecipientKeyword
|
||||
parseFunc = jess.RecipientFromTextFormat
|
||||
case strings.HasPrefix(text, jess.ExportKeyPrefix):
|
||||
signetType = jess.ExportKeyKeyword
|
||||
parseFunc = jess.KeyFromTextFormat
|
||||
default:
|
||||
return fmt.Errorf(
|
||||
"invalid format or unknown type, expected one of %s, %s, %s, %s",
|
||||
jess.ExportKeyKeyword,
|
||||
jess.ExportSenderKeyword,
|
||||
jess.ExportRecipientKeyword,
|
||||
jess.ExportEnvelopeKeyword,
|
||||
)
|
||||
}
|
||||
// Parse and import
|
||||
signet, err := parseFunc(text)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse %s: %w", signetType, err)
|
||||
}
|
||||
err = trustStore.StoreSignet(signet)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to import %s into trust store: %w", signetType, err)
|
||||
}
|
||||
fmt.Printf("imported %s %s intro trust store\n", signetType, signet.ID)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -5,9 +5,10 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/AlecAivazis/survey"
|
||||
"github.com/safing/jess"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -94,7 +95,7 @@ func manageSignets() error {
|
|||
case "Delete":
|
||||
err = trustStore.DeleteSignet(selectedSignet.ID, selectedSignet.Public)
|
||||
if err != nil {
|
||||
return nil
|
||||
return err
|
||||
}
|
||||
case "Back to list":
|
||||
continue
|
||||
|
|
|
@ -4,15 +4,14 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -72,17 +71,17 @@ var (
|
|||
if !confirmed {
|
||||
return nil
|
||||
}
|
||||
} else if !os.IsNotExist(err) {
|
||||
return fmt.Errorf("failed to access output file: %s", err)
|
||||
} else if !errors.Is(err, fs.ErrNotExist) {
|
||||
return fmt.Errorf("failed to access output file: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// load file
|
||||
var data []byte
|
||||
if filename == "-" {
|
||||
data, err = ioutil.ReadAll(os.Stdin)
|
||||
data, err = io.ReadAll(os.Stdin)
|
||||
} else {
|
||||
data, err = ioutil.ReadFile(filename)
|
||||
data, err = os.ReadFile(filename)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -94,6 +93,11 @@ var (
|
|||
return err
|
||||
}
|
||||
|
||||
// Create default requirements if not set.
|
||||
if requirements == nil {
|
||||
requirements = jess.NewRequirements()
|
||||
}
|
||||
|
||||
// decrypt (and verify)
|
||||
plainText, err := letter.Open(requirements, trustStore)
|
||||
if err != nil {
|
||||
|
@ -105,7 +109,11 @@ var (
|
|||
if outputFilename == "-" {
|
||||
file = os.Stdout
|
||||
} else {
|
||||
file, err = os.OpenFile(outputFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
|
||||
file, err = os.OpenFile(
|
||||
outputFilename,
|
||||
os.O_WRONLY|os.O_CREATE|os.O_TRUNC,
|
||||
0o0600,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -114,11 +122,11 @@ var (
|
|||
// write
|
||||
n, err := file.Write(plainText)
|
||||
if err != nil {
|
||||
file.Close()
|
||||
_ = file.Close()
|
||||
return err
|
||||
}
|
||||
if n < len(plainText) {
|
||||
file.Close()
|
||||
_ = file.Close()
|
||||
return io.ErrShortWrite
|
||||
}
|
||||
return file.Close()
|
||||
|
|
62
cmd/cmd-sign.go
Normal file
62
cmd/cmd-sign.go
Normal file
|
@ -0,0 +1,62 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess/filesig"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(signCmd)
|
||||
signCmd.Flags().StringVarP(&closeFlagOutput, "output", "o", "", "specify output file (`-` for stdout")
|
||||
signCmd.Flags().StringToStringVarP(&metaDataFlag, "metadata", "m", nil, "specify file metadata to sign")
|
||||
}
|
||||
|
||||
var (
|
||||
metaDataFlag map[string]string
|
||||
signCmdHelp = "usage: jess sign <file> with <envelope name>"
|
||||
|
||||
signCmd = &cobra.Command{
|
||||
Use: "sign <file> with <envelope name>",
|
||||
Short: "sign file",
|
||||
Long: "sign file with the given envelope. Use `-` to use stdin",
|
||||
DisableFlagsInUseLine: true,
|
||||
PreRunE: requireTrustStore,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
registerPasswordCallbacks()
|
||||
|
||||
// check args
|
||||
if len(args) != 3 || args[1] != "with" {
|
||||
return errors.New(signCmdHelp)
|
||||
}
|
||||
|
||||
// get envelope
|
||||
envelope, err := trustStore.GetEnvelope(args[2])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check filenames
|
||||
filename := args[0]
|
||||
outputFilename := closeFlagOutput
|
||||
if outputFilename == "" {
|
||||
if strings.HasSuffix(filename, filesig.Extension) {
|
||||
return errors.New("cannot automatically derive output filename, please specify with --output")
|
||||
}
|
||||
outputFilename = filename + filesig.Extension
|
||||
}
|
||||
|
||||
fd, err := filesig.SignFile(filename, outputFilename, metaDataFlag, envelope, trustStore)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Print(formatSignatures(filename, outputFilename, []*filesig.FileData{fd}))
|
||||
return nil
|
||||
},
|
||||
}
|
||||
)
|
|
@ -3,86 +3,260 @@ package main
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/filesig"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(verifyCmd)
|
||||
verifyCmd.Flags().StringToStringVarP(&metaDataFlag, "metadata", "m", nil, "specify file metadata to verify (.sig only)")
|
||||
}
|
||||
|
||||
var (
|
||||
verifyCmdHelp = "usage: jess verify <file>"
|
||||
var verifyCmd = &cobra.Command{
|
||||
Use: "verify <files and directories>",
|
||||
Short: "verify signed files and files in directories",
|
||||
DisableFlagsInUseLine: true,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
PreRunE: requireTrustStore,
|
||||
RunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
var verificationFails, verificationWarnings int
|
||||
|
||||
verifyCmd = &cobra.Command{
|
||||
Use: "verify <file>",
|
||||
Short: "verify file",
|
||||
DisableFlagsInUseLine: true,
|
||||
RunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
// check args
|
||||
if len(args) != 1 {
|
||||
return errors.New(verifyCmdHelp)
|
||||
// Check if we are only verifying a single file.
|
||||
if len(args) == 1 {
|
||||
matches, err := filepath.Glob(args[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check filenames
|
||||
filename := args[0]
|
||||
// check input file
|
||||
if filename != "-" {
|
||||
fileInfo, err := os.Stat(filename)
|
||||
switch len(matches) {
|
||||
case 0:
|
||||
return errors.New("file not found")
|
||||
case 1:
|
||||
// Check if the single match is a file.
|
||||
fileInfo, err := os.Stat(matches[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fileInfo.Size() > warnFileSize {
|
||||
confirmed, err := confirm("Input file is really big (%s) and jess needs to load it fully to memory, continue?", true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !confirmed {
|
||||
return nil
|
||||
}
|
||||
// Verify file if it is not a directory.
|
||||
if !fileInfo.IsDir() {
|
||||
return verify(matches[0], false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// load file
|
||||
var data []byte
|
||||
if filename == "-" {
|
||||
data, err = ioutil.ReadAll(os.Stdin)
|
||||
} else {
|
||||
data, err = ioutil.ReadFile(filename)
|
||||
}
|
||||
// Resolve globs.
|
||||
files := make([]string, 0, len(args))
|
||||
for _, arg := range args {
|
||||
matches, err := filepath.Glob(arg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
files = append(files, matches...)
|
||||
}
|
||||
|
||||
// parse file
|
||||
letter, err := jess.LetterFromFileFormat(container.New(data))
|
||||
// Go through all files.
|
||||
for _, file := range files {
|
||||
fileInfo, err := os.Stat(file)
|
||||
if err != nil {
|
||||
return err
|
||||
verificationWarnings++
|
||||
fmt.Printf("[WARN] %s failed to read: %s\n", file, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// adjust requirements
|
||||
if requirements == nil {
|
||||
requirements = jess.NewRequirements().
|
||||
Remove(jess.Confidentiality).
|
||||
Remove(jess.Integrity).
|
||||
Remove(jess.RecipientAuthentication)
|
||||
// Walk directories.
|
||||
if fileInfo.IsDir() {
|
||||
err := filepath.Walk(file, func(path string, info fs.FileInfo, err error) error {
|
||||
// Log walking errors.
|
||||
if err != nil {
|
||||
verificationWarnings++
|
||||
fmt.Printf("[WARN] %s failed to read: %s\n", path, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Only verify if .sig or .letter.
|
||||
if strings.HasSuffix(path, filesig.Extension) ||
|
||||
strings.HasSuffix(path, letterFileExtension) {
|
||||
if err := verify(path, true); err != nil {
|
||||
verificationFails++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
verificationWarnings++
|
||||
fmt.Printf("[WARN] %s failed to walk directory: %s\n", file, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// verify
|
||||
err = letter.Verify(requirements, trustStore)
|
||||
if err != nil {
|
||||
return err
|
||||
if err := verify(file, true); err != nil {
|
||||
verificationFails++
|
||||
}
|
||||
}
|
||||
|
||||
// success
|
||||
fmt.Println("ok")
|
||||
return nil
|
||||
},
|
||||
// End with error status if any verification failed.
|
||||
if verificationFails > 0 {
|
||||
return fmt.Errorf("%d verification failures", verificationFails)
|
||||
}
|
||||
if verificationWarnings > 0 {
|
||||
return fmt.Errorf("%d warnings", verificationWarnings)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var verifiedSigs = make(map[string]struct{})
|
||||
|
||||
func verify(filename string, bulkMode bool) error {
|
||||
// Check if file was already verified.
|
||||
if _, alreadyVerified := verifiedSigs[filename]; alreadyVerified {
|
||||
return nil
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
signame string
|
||||
signedBy []string
|
||||
err error
|
||||
)
|
||||
|
||||
// Get correct files and verify.
|
||||
switch {
|
||||
case filename == stdInOutFilename:
|
||||
signedBy, err = verifyLetter(filename, bulkMode)
|
||||
case strings.HasSuffix(filename, letterFileExtension):
|
||||
signedBy, err = verifyLetter(filename, bulkMode)
|
||||
case strings.HasSuffix(filename, filesig.Extension):
|
||||
filename = strings.TrimSuffix(filename, filesig.Extension)
|
||||
fallthrough
|
||||
default:
|
||||
signame = filename + filesig.Extension
|
||||
signedBy, err = verifySig(filename, signame, bulkMode)
|
||||
}
|
||||
|
||||
// Remember the files already verified.
|
||||
verifiedSigs[filename] = struct{}{}
|
||||
if signame != "" {
|
||||
verifiedSigs[signame] = struct{}{}
|
||||
}
|
||||
|
||||
// Output result in bulk mode.
|
||||
if bulkMode {
|
||||
if err == nil {
|
||||
fmt.Printf("[ OK ] %s signed by %s\n", filename, strings.Join(signedBy, ", "))
|
||||
} else {
|
||||
fmt.Printf("[FAIL] %s failed to verify: %s\n", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func verifyLetter(filename string, silent bool) (signedBy []string, err error) {
|
||||
if len(metaDataFlag) > 0 {
|
||||
return nil, errors.New("metadata flag only valid for verifying .sig files")
|
||||
}
|
||||
|
||||
if filename != "-" {
|
||||
fileInfo, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if fileInfo.Size() > warnFileSize {
|
||||
confirmed, err := confirm("Input file is really big (%s) and jess needs to load it fully to memory, continue?", true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !confirmed {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// load file
|
||||
var data []byte
|
||||
if filename == "-" {
|
||||
data, err = io.ReadAll(os.Stdin)
|
||||
} else {
|
||||
data, err = os.ReadFile(filename)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// parse file
|
||||
letter, err := jess.LetterFromFileFormat(container.New(data))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create default requirements if not set.
|
||||
if requirements == nil {
|
||||
requirements = jess.NewRequirements().
|
||||
Remove(jess.Confidentiality).
|
||||
Remove(jess.RecipientAuthentication)
|
||||
}
|
||||
|
||||
// verify
|
||||
err = letter.Verify(requirements, trustStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// get signers
|
||||
signedBy = make([]string, 0, len(letter.Signatures))
|
||||
for _, seal := range letter.Signatures {
|
||||
if signet, err := trustStore.GetSignet(seal.ID, true); err == nil {
|
||||
signedBy = append(signedBy, fmt.Sprintf("%s (%s)", signet.Info.Name, seal.ID))
|
||||
} else {
|
||||
signedBy = append(signedBy, seal.ID)
|
||||
}
|
||||
}
|
||||
|
||||
// success
|
||||
if !silent {
|
||||
if err == nil {
|
||||
fmt.Println("Verification: OK")
|
||||
fmt.Printf("Signed By: %s\n", strings.Join(signedBy, ", "))
|
||||
} else {
|
||||
fmt.Printf("Verification FAILED: %s\n\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
return signedBy, nil
|
||||
}
|
||||
|
||||
func verifySig(filename, signame string, silent bool) (signedBy []string, err error) {
|
||||
fds, err := filesig.VerifyFile(filename, signame, metaDataFlag, trustStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !silent {
|
||||
fmt.Print(formatSignatures(filename, signame, fds))
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
signedBy = make([]string, 0, len(fds))
|
||||
for _, fd := range fds {
|
||||
if sig := fd.Signature(); sig != nil {
|
||||
for _, seal := range sig.Signatures {
|
||||
if signet, err := trustStore.GetSignet(seal.ID, true); err == nil {
|
||||
signedBy = append(signedBy, fmt.Sprintf("%s (%s)", signet.Info.Name, seal.ID))
|
||||
} else {
|
||||
signedBy = append(signedBy, seal.ID)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return signedBy, nil
|
||||
}
|
||||
|
|
|
@ -2,20 +2,85 @@ package main
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/portbase/info"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(versionCmd)
|
||||
}
|
||||
|
||||
var versionCmd = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "print version information",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Println(info.FullVersion())
|
||||
},
|
||||
var (
|
||||
// Version is the version of this command.
|
||||
Version = "dev build"
|
||||
// BuildSource holds the primary source repo used to build.
|
||||
BuildSource = "unknown"
|
||||
// BuildTime holds the time when the binary was built.
|
||||
BuildTime = "unknown"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Convert version string space placeholders.
|
||||
Version = strings.ReplaceAll(Version, "_", " ")
|
||||
BuildSource = strings.ReplaceAll(BuildSource, "_", " ")
|
||||
BuildTime = strings.ReplaceAll(BuildTime, "_", " ")
|
||||
|
||||
// Get build info.
|
||||
buildInfo, _ := debug.ReadBuildInfo()
|
||||
buildSettings := make(map[string]string)
|
||||
for _, setting := range buildInfo.Settings {
|
||||
buildSettings[setting.Key] = setting.Value
|
||||
}
|
||||
|
||||
// Add "dev build" to version if repo is dirty.
|
||||
if buildSettings["vcs.modified"] == "true" &&
|
||||
!strings.HasSuffix(Version, "dev build") {
|
||||
Version += " dev build"
|
||||
}
|
||||
|
||||
rootCmd.AddCommand(versionCmd)
|
||||
}
|
||||
|
||||
var versionCmd = &cobra.Command{
|
||||
Use: "version",
|
||||
Run: version,
|
||||
}
|
||||
|
||||
func version(cmd *cobra.Command, args []string) {
|
||||
builder := new(strings.Builder)
|
||||
|
||||
// Get build info.
|
||||
buildInfo, _ := debug.ReadBuildInfo()
|
||||
buildSettings := make(map[string]string)
|
||||
for _, setting := range buildInfo.Settings {
|
||||
buildSettings[setting.Key] = setting.Value
|
||||
}
|
||||
|
||||
// Print version info.
|
||||
builder.WriteString(fmt.Sprintf("Jess %s\n", Version))
|
||||
|
||||
// Build info.
|
||||
cgoInfo := "-cgo"
|
||||
if buildSettings["CGO_ENABLED"] == "1" {
|
||||
cgoInfo = "+cgo"
|
||||
}
|
||||
builder.WriteString(fmt.Sprintf("\nbuilt with %s (%s %s) for %s/%s\n", runtime.Version(), runtime.Compiler, cgoInfo, runtime.GOOS, runtime.GOARCH))
|
||||
builder.WriteString(fmt.Sprintf(" at %s\n", BuildTime))
|
||||
|
||||
// Commit info.
|
||||
dirtyInfo := "clean"
|
||||
if buildSettings["vcs.modified"] == "true" {
|
||||
dirtyInfo = "dirty"
|
||||
}
|
||||
builder.WriteString(fmt.Sprintf("\ncommit %s (%s)\n", buildSettings["vcs.revision"], dirtyInfo))
|
||||
builder.WriteString(fmt.Sprintf(" at %s\n", buildSettings["vcs.time"]))
|
||||
builder.WriteString(fmt.Sprintf(" from %s\n", BuildSource))
|
||||
|
||||
// License info.
|
||||
builder.WriteString("\nLicensed under the GPLv3 license.")
|
||||
|
||||
_, _ = fmt.Println(builder.String())
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package main
|
|||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
@ -23,7 +24,7 @@ func formatColumns(table [][]string) []string {
|
|||
}
|
||||
fmt.Fprint(tabWriter, strings.Join(table[i], "\t"))
|
||||
}
|
||||
tabWriter.Flush()
|
||||
_ = tabWriter.Flush()
|
||||
|
||||
// parse to []string
|
||||
var lines []string
|
||||
|
@ -111,7 +112,7 @@ func formatSignetSecurityLevel(signet *jess.Signet) string {
|
|||
|
||||
securityLevel, err := tool.StaticLogic.SecurityLevel(signet)
|
||||
if err != nil {
|
||||
if err == tools.ErrProtected {
|
||||
if errors.Is(err, tools.ErrProtected) {
|
||||
return "[protected]"
|
||||
}
|
||||
return failPlaceholder
|
||||
|
|
79
cmd/format_sig.go
Normal file
79
cmd/format_sig.go
Normal file
|
@ -0,0 +1,79 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/safing/jess/filesig"
|
||||
)
|
||||
|
||||
func formatSignatures(filename, signame string, fds []*filesig.FileData) string {
|
||||
b := &strings.Builder{}
|
||||
|
||||
switch len(fds) {
|
||||
case 0:
|
||||
case 1:
|
||||
formatSignature(b, fds[0])
|
||||
case 2:
|
||||
for _, fd := range fds {
|
||||
fmt.Fprintf(b, "%d Signatures:\n\n\n", len(fds))
|
||||
formatSignature(b, fd)
|
||||
b.WriteString("\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
if filename != "" || signame != "" {
|
||||
b.WriteString("\n")
|
||||
fmt.Fprintf(b, "File: %s\n", filename)
|
||||
fmt.Fprintf(b, "Sig: %s\n", signame)
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func formatSignature(b *strings.Builder, fd *filesig.FileData) {
|
||||
if fd.VerificationError() == nil {
|
||||
b.WriteString("Verification: OK\n")
|
||||
} else {
|
||||
fmt.Fprintf(b, "Verification FAILED: %s\n", fd.VerificationError())
|
||||
}
|
||||
|
||||
if letter := fd.Signature(); letter != nil {
|
||||
b.WriteString("\n")
|
||||
for _, sig := range letter.Signatures {
|
||||
signet, err := trustStore.GetSignet(sig.ID, true)
|
||||
if err == nil {
|
||||
fmt.Fprintf(b, "Signed By: %s (%s)\n", signet.Info.Name, sig.ID)
|
||||
} else {
|
||||
fmt.Fprintf(b, "Signed By: %s\n", sig.ID)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fileHash := fd.FileHash(); fileHash != nil {
|
||||
b.WriteString("\n")
|
||||
fmt.Fprintf(b, "Hash Alg: %s\n", fileHash.Algorithm())
|
||||
fmt.Fprintf(b, "Hash Sum: %s\n", hex.EncodeToString(fileHash.Sum()))
|
||||
}
|
||||
|
||||
if len(fd.MetaData) > 0 {
|
||||
b.WriteString("\nMetadata:\n")
|
||||
|
||||
sortedMetaData := make([][]string, 0, len(fd.MetaData))
|
||||
for k, v := range fd.MetaData {
|
||||
sortedMetaData = append(sortedMetaData, []string{k, v})
|
||||
}
|
||||
sort.Sort(sortByMetaDataKey(sortedMetaData))
|
||||
for _, entry := range sortedMetaData {
|
||||
fmt.Fprintf(b, " %s: %s\n", entry[0], entry[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type sortByMetaDataKey [][]string
|
||||
|
||||
func (a sortByMetaDataKey) Len() int { return len(a) }
|
||||
func (a sortByMetaDataKey) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a sortByMetaDataKey) Less(i, j int) bool { return a[i][0] < a[j][0] }
|
55
cmd/main.go
55
cmd/main.go
|
@ -2,21 +2,19 @@ package main
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/safing/jess/truststores"
|
||||
|
||||
"github.com/safing/jess"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/safing/portbase/info"
|
||||
// import all tools
|
||||
"github.com/safing/jess"
|
||||
_ "github.com/safing/jess/tools/all"
|
||||
"github.com/safing/jess/truststores"
|
||||
)
|
||||
|
||||
const (
|
||||
stdInOutFilename = "-"
|
||||
letterFileExtension = ".letter"
|
||||
|
||||
warnFileSize = 12000000 // 120MB
|
||||
)
|
||||
|
||||
|
@ -31,25 +29,21 @@ var (
|
|||
}
|
||||
|
||||
trustStoreDir string
|
||||
trustStoreKeyring string
|
||||
noSpec string
|
||||
minimumSecurityLevel = 0
|
||||
defaultSymmetricKeySize = 0
|
||||
|
||||
trustStore truststores.ExtendedTrustStore
|
||||
requirements = jess.NewRequirements()
|
||||
requirements *jess.Requirements
|
||||
)
|
||||
|
||||
func main() {
|
||||
info.Set("jess", "0.2", "GPLv3", true)
|
||||
|
||||
err := info.CheckVersion()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&trustStoreDir, "tsdir", "d", "",
|
||||
"specify a truststore directory (default loaded from JESS_TSDIR env variable)",
|
||||
"specify a truststore directory (default loaded from JESS_TS_DIR env variable)",
|
||||
)
|
||||
rootCmd.PersistentFlags().StringVarP(&trustStoreKeyring, "tskeyring", "r", "",
|
||||
"specify a truststore keyring namespace (default loaded from JESS_TS_KEYRING env variable) - lower priority than tsdir",
|
||||
)
|
||||
rootCmd.PersistentFlags().StringVarP(&noSpec, "no", "n", "",
|
||||
"remove requirements using the abbreviations C, I, R, S",
|
||||
|
@ -58,26 +52,43 @@ func main() {
|
|||
rootCmd.PersistentFlags().IntVarP(&minimumSecurityLevel, "seclevel", "s", 0, "specify a minimum security level")
|
||||
rootCmd.PersistentFlags().IntVarP(&defaultSymmetricKeySize, "symkeysize", "k", 0, "specify a default symmetric key size (only applies in certain conditions, use when prompted)")
|
||||
|
||||
err = rootCmd.Execute()
|
||||
if err != nil {
|
||||
if rootCmd.Execute() != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
func initGlobalFlags(cmd *cobra.Command, args []string) (err error) {
|
||||
// trust store
|
||||
// trust store directory
|
||||
if trustStoreDir == "" {
|
||||
trustStoreDir, _ = os.LookupEnv("JESS_TSDIR")
|
||||
trustStoreDir, _ = os.LookupEnv("JESS_TS_DIR")
|
||||
if trustStoreDir == "" {
|
||||
trustStoreDir, _ = os.LookupEnv("JESS_TSDIR")
|
||||
}
|
||||
}
|
||||
if trustStoreDir != "" {
|
||||
var err error
|
||||
trustStore, err = truststores.NewDirTrustStore(trustStoreDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// trust store keyring
|
||||
if trustStore == nil {
|
||||
if trustStoreKeyring == "" {
|
||||
trustStoreKeyring, _ = os.LookupEnv("JESS_TS_KEYRING")
|
||||
if trustStoreKeyring == "" {
|
||||
trustStoreKeyring, _ = os.LookupEnv("JESS_TSKEYRING")
|
||||
}
|
||||
}
|
||||
if trustStoreKeyring != "" {
|
||||
trustStore, err = truststores.NewKeyringTrustStore(trustStoreKeyring)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// requirements
|
||||
if noSpec != "" {
|
||||
requirements, err = jess.ParseRequirementsFromNoSpec(noSpec)
|
||||
|
|
|
@ -2,7 +2,7 @@ package main
|
|||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/sha1" //nolint:gosec // required for HIBP API
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
@ -10,9 +10,9 @@ import (
|
|||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
|
||||
"github.com/AlecAivazis/survey"
|
||||
"github.com/safing/jess"
|
||||
)
|
||||
|
||||
func registerPasswordCallbacks() {
|
||||
|
@ -57,7 +57,7 @@ func createPassword(reference string, minSecurityLevel int) (string, error) {
|
|||
prompt := &survey.Password{
|
||||
Message: makePrompt("Please enter password", reference),
|
||||
}
|
||||
err := survey.AskOne(prompt, &pw1, func(val interface{}) error {
|
||||
err := survey.AskOne(prompt, &pw1, survey.WithValidator(func(val interface{}) error {
|
||||
pwVal, ok := val.(string)
|
||||
if !ok {
|
||||
return errors.New("input error")
|
||||
|
@ -68,7 +68,7 @@ func createPassword(reference string, minSecurityLevel int) (string, error) {
|
|||
return fmt.Errorf("please enter a stronger password, you only reached %d bits of security, while the envelope has a minimum of %d", pwSecLevel, minSecurityLevel)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
@ -115,9 +115,11 @@ func checkForWeakPassword(pw string) error {
|
|||
// request hash list
|
||||
resp, err := http.Get(fmt.Sprintf("https://api.pwnedpasswords.com/range/%s", prefix))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to contact HIBP service: %s", err)
|
||||
return fmt.Errorf("failed to contact HIBP service: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
|
||||
// check if password is in hash list
|
||||
bodyReader := bufio.NewReader(resp.Body)
|
||||
|
@ -139,7 +141,7 @@ func checkForWeakPassword(pw string) error {
|
|||
}
|
||||
// fmt.Printf("checked %d leaked passwords\n", cnt)
|
||||
if err := scanner.Err(); err != nil {
|
||||
return fmt.Errorf("failed to read HIBP response: %s", err)
|
||||
return fmt.Errorf("failed to read HIBP response: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
@ -6,6 +6,8 @@ import (
|
|||
|
||||
//nolint:unused,deadcode // tested manually
|
||||
func testCfWP(t *testing.T, password string, expectedError string) {
|
||||
t.Helper()
|
||||
|
||||
var errMsg string
|
||||
err := checkForWeakPassword(password)
|
||||
if err != nil {
|
||||
|
@ -17,7 +19,9 @@ func testCfWP(t *testing.T, password string, expectedError string) {
|
|||
}
|
||||
|
||||
func TestCheckForWeakPassword(t *testing.T) {
|
||||
// TODO: only run these manually, es they actually require the live HIBP API.
|
||||
t.Parallel()
|
||||
|
||||
// TODO: only run these manually, as they actually require the live HIBP API.
|
||||
// testCfWP(t, "asdfasdfasdf", "")
|
||||
// testCfWP(t, "mfJLiQH9O9V9zXYrkNeYvGLvE14HcPyW7/sWWGfBX2nBU7c", "")
|
||||
}
|
||||
|
|
14
cmd/testdata/.truststore/3911c84c-78f7-4354-a7f5-0e115aa2903c.recipient
vendored
Normal file
14
cmd/testdata/.truststore/3911c84c-78f7-4354-a7f5-0e115aa2903c.recipient
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
J{
|
||||
"Version": 1,
|
||||
"ID": "3911c84c-78f7-4354-a7f5-0e115aa2903c",
|
||||
"Scheme": "Ed25519",
|
||||
"Key": "ATYVZjmhR1Zwe0KAPV99pzbzI+6zWgKvELNhFwolRdnv",
|
||||
"Public": true,
|
||||
"Info": {
|
||||
"Name": "Safing Code Signing Cert 1",
|
||||
"Owner": "",
|
||||
"Created": "2022-07-11T10:23:31.705715613+02:00",
|
||||
"Expires": "0001-01-01T00:00:00Z",
|
||||
"Details": null
|
||||
}
|
||||
}
|
13
cmd/testdata/.truststore/3911c84c-78f7-4354-a7f5-0e115aa2903c.signet
vendored
Normal file
13
cmd/testdata/.truststore/3911c84c-78f7-4354-a7f5-0e115aa2903c.signet
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
J{
|
||||
"Version": 1,
|
||||
"ID": "3911c84c-78f7-4354-a7f5-0e115aa2903c",
|
||||
"Scheme": "Ed25519",
|
||||
"Key": "Aee5n/V1wJM8aNpaNEPBEPeN6S0Tl41OJP0rHwtsGcZcNhVmOaFHVnB7QoA9X32nNvMj7rNaAq8Qs2EXCiVF2e8=",
|
||||
"Info": {
|
||||
"Name": "Safing Code Signing Cert 1",
|
||||
"Owner": "",
|
||||
"Created": "2022-07-11T10:23:31.705715613+02:00",
|
||||
"Expires": "0001-01-01T00:00:00Z",
|
||||
"Details": null
|
||||
}
|
||||
}
|
23
cmd/testdata/.truststore/safing-codesign-1.envelope
vendored
Normal file
23
cmd/testdata/.truststore/safing-codesign-1.envelope
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
J{
|
||||
"Version": 1,
|
||||
"Name": "safing-codesign-1",
|
||||
"SuiteID": "signfile_v1",
|
||||
"Secrets": null,
|
||||
"Senders": [
|
||||
{
|
||||
"Version": 1,
|
||||
"ID": "3911c84c-78f7-4354-a7f5-0e115aa2903c",
|
||||
"Scheme": "Ed25519",
|
||||
"Key": null,
|
||||
"Info": {
|
||||
"Name": "Safing Code Signing Cert 1",
|
||||
"Owner": "",
|
||||
"Created": "2022-07-11T10:23:31.705715613+02:00",
|
||||
"Expires": "0001-01-01T00:00:00Z",
|
||||
"Details": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"Recipients": null,
|
||||
"SecurityLevel": 128
|
||||
}
|
1
cmd/testdata/test.txt
vendored
Normal file
1
cmd/testdata/test.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
hello world!
|
13
cmd/testdata/test.txt.letter
vendored
Normal file
13
cmd/testdata/test.txt.letter
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
‘J{
|
||||
"Version": 1,
|
||||
"SuiteID": "signfile_v1",
|
||||
"Nonce": "pKOQBQ==",
|
||||
"Signatures": [
|
||||
{
|
||||
"Scheme": "Ed25519",
|
||||
"ID": "3911c84c-78f7-4354-a7f5-0e115aa2903c",
|
||||
"Value": "ftsIINZ9oApKiXYQTcLIdAZDSflp6nRN/y8Gm0rdQC+3/wal6Q+7N3N8HEAxpoxWseSQNaRVCT9hSnRQStHYBA=="
|
||||
}
|
||||
]
|
||||
}
|
||||
hello world!
|
9
cmd/testdata/test.txt.sig
vendored
Normal file
9
cmd/testdata/test.txt.sig
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGtzaWduZmlsZV92MWVOb25jZUQb+MqAZERhdGFY
|
||||
d02Dq0xhYmVsZWRIYXNoxCIJIOz3Afcn2eLXfEqkmsb7vMmXJ4rKAQvd7rlhwQz1
|
||||
TUNaqFNpZ25lZEF01v9iy+uLqE1ldGFEYXRhgqd2ZXJzaW9upTAuMC4xqmlkZW50
|
||||
aWZpZXKyd2luZG93cy9jb2RlL3RoaW5nalNpZ25hdHVyZXOBo2ZTY2hlbWVnRWQy
|
||||
NTUxOWJJRHgkMzkxMWM4NGMtNzhmNy00MzU0LWE3ZjUtMGUxMTVhYTI5MDNjZVZh
|
||||
bHVlWECJZFbIifczUGAJkmATXCHy/MiQZkiktM99X7U/cPgw3IKpKAxQsJ5LobgZ
|
||||
4P2ecv0IlN4gQb+x+lycxl93E9sJ
|
||||
-----END JESS SIGNATURE-----
|
1
cmd/testdata/test3.txt
vendored
Normal file
1
cmd/testdata/test3.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
hello world!!
|
9
cmd/testdata/test3.txt.sig
vendored
Normal file
9
cmd/testdata/test3.txt.sig
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGtzaWduZmlsZV92MWVOb25jZUQJ9s/nZERhdGFY
|
||||
d02Dq0xhYmVsZWRIYXNoxCIJILtKnL1AHj7YubrWdLu1D+voud8Ky04vh756eTae
|
||||
rWQwqFNpZ25lZEF01v9izC6hqE1ldGFEYXRhgqd2ZXJzaW9upTAuMC4xqmlkZW50
|
||||
aWZpZXKyd2luZG93cy9jb2RlL3RoaW5nalNpZ25hdHVyZXOBo2ZTY2hlbWVnRWQy
|
||||
NTUxOWJJRHgkMzkxMWM4NGMtNzhmNy00MzU0LWE3ZjUtMGUxMTVhYTI5MDNjZVZh
|
||||
bHVlWEBLsd2QbM7VmEsnW60hHn/V6EP2mGFauWZgbEOlKTiqumVFbWU4K7Fi91KL
|
||||
Zgvwj+CNdZJ7Xv2qR7etviRDCmwC
|
||||
-----END JESS SIGNATURE-----
|
1
cmd/testdata/test4.txt
vendored
Normal file
1
cmd/testdata/test4.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
hello world!
|
1
cmd/testdata/testdir/test2.txt
vendored
Normal file
1
cmd/testdata/testdir/test2.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
hello world!
|
9
cmd/testdata/testdir/test2.txt.sig
vendored
Normal file
9
cmd/testdata/testdir/test2.txt.sig
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGtzaWduZmlsZV92MWVOb25jZUThzxO6ZERhdGFY
|
||||
d02Dq0xhYmVsZWRIYXNoxCIJIOz3Afcn2eLXfEqkmsb7vMmXJ4rKAQvd7rlhwQz1
|
||||
TUNaqFNpZ25lZEF01v9izC3SqE1ldGFEYXRhgqd2ZXJzaW9upTAuMC4xqmlkZW50
|
||||
aWZpZXKyd2luZG93cy9jb2RlL3RoaW5nalNpZ25hdHVyZXOBo2ZTY2hlbWVnRWQy
|
||||
NTUxOWJJRHgkMzkxMWM4NGMtNzhmNy00MzU0LWE3ZjUtMGUxMTVhYTI5MDNjZVZh
|
||||
bHVlWEAGLkIoej0+ilJrIyb+BzX8+Yw2LY0zkoL9vwI02/2KqKVT7/pH+LTDX1Hl
|
||||
h1epYkF8ICdwa1iVNDx6P7iNmWkL
|
||||
-----END JESS SIGNATURE-----
|
|
@ -1,7 +1,7 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/AlecAivazis/survey"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
)
|
||||
|
||||
func confirm(promptMsg string, suggest bool) (bool, error) {
|
||||
|
|
34
core-wire.go
34
core-wire.go
|
@ -26,7 +26,7 @@ func (w *WireSession) sendHandshakeAndInitKDF(letter *Letter) error {
|
|||
case wireStateInit: // client
|
||||
keyMaterial, err = w.session.setupClosingKeyMaterial(letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to setup initial sending handshake key material: %s", err)
|
||||
return fmt.Errorf("failed to setup initial sending handshake key material: %w", err)
|
||||
}
|
||||
fallthrough
|
||||
|
||||
|
@ -34,12 +34,12 @@ func (w *WireSession) sendHandshakeAndInitKDF(letter *Letter) error {
|
|||
if w.msgNo == 0 || (!w.server && w.reKeyNeeded()) {
|
||||
err = w.generateLocalKeyExchangeSignets(letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate local key exchange signets for initiating handshake: %s", err)
|
||||
return fmt.Errorf("failed to generate local key exchange signets for initiating handshake: %w", err)
|
||||
}
|
||||
|
||||
err = w.generateLocalKeyEncapsulationSignets(letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate local key encapsulation signets for initiating handshake: %s", err)
|
||||
return fmt.Errorf("failed to generate local key encapsulation signets for initiating handshake: %w", err)
|
||||
}
|
||||
|
||||
w.handshakeState = wireStateAwaitKey
|
||||
|
@ -49,7 +49,7 @@ func (w *WireSession) sendHandshakeAndInitKDF(letter *Letter) error {
|
|||
|
||||
err = w.generateLocalKeyExchangeSignets(letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate local key exchange signets for completing handshake: %s", err)
|
||||
return fmt.Errorf("failed to generate local key exchange signets for completing handshake: %w", err)
|
||||
}
|
||||
|
||||
// debugging:
|
||||
|
@ -67,17 +67,17 @@ func (w *WireSession) sendHandshakeAndInitKDF(letter *Letter) error {
|
|||
|
||||
keyMaterial, err = w.makeSharedKeys(keyMaterial)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create shared keys for completing handshake: %s", err)
|
||||
return fmt.Errorf("failed to create shared keys for completing handshake: %w", err)
|
||||
}
|
||||
|
||||
err = w.generateLocalKeyEncapsulationSignets(letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate local key encapsulation signets for completing handshake: %s", err)
|
||||
return fmt.Errorf("failed to generate local key encapsulation signets for completing handshake: %w", err)
|
||||
}
|
||||
|
||||
keyMaterial, err = w.makeAndEncapsulateNewKeys(letter, keyMaterial)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to encapsulate keys for completing handshake: %s", err)
|
||||
return fmt.Errorf("failed to encapsulate keys for completing handshake: %w", err)
|
||||
}
|
||||
|
||||
w.newKeyMaterial = copyKeyMaterial(keyMaterial)
|
||||
|
@ -102,13 +102,13 @@ func (w *WireSession) sendHandshakeAndInitKDF(letter *Letter) error {
|
|||
// init KDF
|
||||
err = w.session.kdf.InitKeyDerivation(letter.Nonce, keyMaterial...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to init %s kdf: %s", w.session.kdf.Info().Name, err)
|
||||
return fmt.Errorf("failed to init %s kdf: %w", w.session.kdf.Info().Name, err)
|
||||
}
|
||||
|
||||
// derive new carryover key
|
||||
err = w.session.kdf.DeriveKeyWriteTo(w.sendKeyCarryover)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to iterate session key with %s: %s", w.session.kdf.Info().Name, err)
|
||||
return fmt.Errorf("failed to iterate session key with %s: %w", w.session.kdf.Info().Name, err)
|
||||
}
|
||||
if w.msgNo == 0 {
|
||||
// copy initial sendkey to recvkey
|
||||
|
@ -137,7 +137,7 @@ func (w *WireSession) recvHandshakeAndInitKDF(letter *Letter) error {
|
|||
case wireStateInit: // server
|
||||
keyMaterial, err = w.session.setupOpeningKeyMaterial(letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to setup initial receiving handshake key material: %s", err)
|
||||
return fmt.Errorf("failed to setup initial receiving handshake key material: %w", err)
|
||||
}
|
||||
fallthrough
|
||||
|
||||
|
@ -246,13 +246,13 @@ func (w *WireSession) recvHandshakeAndInitKDF(letter *Letter) error {
|
|||
// init KDF
|
||||
err = w.session.kdf.InitKeyDerivation(letter.Nonce, keyMaterial...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to init %s kdf: %s", w.session.kdf.Info().Name, err)
|
||||
return fmt.Errorf("failed to init %s kdf: %w", w.session.kdf.Info().Name, err)
|
||||
}
|
||||
|
||||
// derive new carryover key
|
||||
err = w.session.kdf.DeriveKeyWriteTo(w.recvKeyCarryover)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to iterate session key with %s: %s", w.session.kdf.Info().Name, err)
|
||||
return fmt.Errorf("failed to iterate session key with %s: %w", w.session.kdf.Info().Name, err)
|
||||
}
|
||||
if w.msgNo == 0 {
|
||||
// copy initial recvkey to sendkey
|
||||
|
@ -456,11 +456,11 @@ func (w *WireSession) burnEphemeralKeys() error {
|
|||
}
|
||||
|
||||
func copyKeyMaterial(keyMaterial [][]byte) [][]byte {
|
||||
new := make([][]byte, len(keyMaterial))
|
||||
copied := make([][]byte, len(keyMaterial))
|
||||
for index, part := range keyMaterial {
|
||||
newPart := make([]byte, len(part))
|
||||
copy(newPart, part)
|
||||
new[index] = newPart
|
||||
copiedPart := make([]byte, len(part))
|
||||
copy(copiedPart, part)
|
||||
copied[index] = copiedPart
|
||||
}
|
||||
return new
|
||||
return copied
|
||||
}
|
||||
|
|
|
@ -5,10 +5,12 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
func TestWire(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
wireReKeyAfterMsgs = 100
|
||||
|
||||
// current suites recommendation
|
||||
|
@ -21,6 +23,8 @@ func TestWire(t *testing.T) {
|
|||
}
|
||||
|
||||
func testWireCorrespondence(t *testing.T, suite *Suite, testData string) {
|
||||
t.Helper()
|
||||
|
||||
wtr := &wireTestRange{t: t}
|
||||
wtr.init(suite, testData)
|
||||
fmt.Printf("\n\nsimulating %v\n", suite.ID)
|
||||
|
|
91
core.go
91
core.go
|
@ -5,7 +5,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
// Close encrypts (and possibly signs) the given data and returns a Letter. Storyline: Close takes an envelope, inserts the message and closes it, resulting in a letter.
|
||||
|
@ -19,14 +19,23 @@ func (s *Session) Close(data []byte) (*Letter, error) { //nolint:gocognit
|
|||
letter.SuiteID = s.envelope.SuiteID
|
||||
}
|
||||
|
||||
/////////////////
|
||||
// Check for additional data in slice, which we should not touch.
|
||||
// TODO: Pre-allocate needed overhead for AEAD and others.
|
||||
if len(data) != cap(data) {
|
||||
// Make a copy of the data in order to not modify unrelated data.
|
||||
copiedData := make([]byte, len(data))
|
||||
copy(copiedData, data)
|
||||
data = copiedData
|
||||
}
|
||||
|
||||
// ==============
|
||||
// key management
|
||||
/////////////////
|
||||
// ==============
|
||||
|
||||
// create nonce
|
||||
nonce, err := RandomBytes(s.NonceSize())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nonce: %s", err)
|
||||
return nil, fmt.Errorf("failed to get nonce: %w", err)
|
||||
}
|
||||
letter.Nonce = nonce
|
||||
|
||||
|
@ -48,13 +57,13 @@ func (s *Session) Close(data []byte) (*Letter, error) { //nolint:gocognit
|
|||
// init KDF
|
||||
err = s.kdf.InitKeyDerivation(letter.Nonce, keyMaterial...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to init %s kdf: %s", s.kdf.Info().Name, err)
|
||||
return nil, fmt.Errorf("failed to init %s kdf: %w", s.kdf.Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
/////////////
|
||||
// ==========
|
||||
// encryption
|
||||
/////////////
|
||||
// ==========
|
||||
|
||||
// setup tools
|
||||
err = s.setup()
|
||||
|
@ -67,7 +76,7 @@ func (s *Session) Close(data []byte) (*Letter, error) { //nolint:gocognit
|
|||
for _, tool := range s.ciphers {
|
||||
data, err = tool.Encrypt(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to encrypt with %s: %s", tool.Info().Name, err)
|
||||
return nil, fmt.Errorf("failed to encrypt with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -80,7 +89,7 @@ func (s *Session) Close(data []byte) (*Letter, error) { //nolint:gocognit
|
|||
for _, tool := range s.integratedCiphers {
|
||||
data, err = tool.AuthenticatedEncrypt(data, associatedData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to auth-encrypt with %s: %s", tool.Info().Name, err)
|
||||
return nil, fmt.Errorf("failed to auth-encrypt with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,7 +109,7 @@ func (s *Session) Close(data []byte) (*Letter, error) { //nolint:gocognit
|
|||
for _, tool := range s.macs {
|
||||
mac, err := tool.MAC(data, associatedData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to calculate MAC with %s: %s", tool.Info().Name, err)
|
||||
return nil, fmt.Errorf("failed to calculate MAC with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
allMacs.Append(mac)
|
||||
}
|
||||
|
@ -133,10 +142,9 @@ func (s *Session) Close(data []byte) (*Letter, error) { //nolint:gocognit
|
|||
for _, tool := range s.signers {
|
||||
//nolint:scopelint // function is executed immediately within loop
|
||||
err = s.envelope.LoopSenders(tool.Info().Name, func(signet *Signet) error {
|
||||
|
||||
sig, err := tool.Sign(data, associatedSigningData, signet)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to sign with %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to sign with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
|
||||
letter.Signatures = append(letter.Signatures, &Seal{
|
||||
|
@ -172,9 +180,9 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
return nil, fmt.Errorf("unsupported letter version: %d", letter.Version)
|
||||
}
|
||||
|
||||
/////////
|
||||
// ======
|
||||
// verify
|
||||
/////////
|
||||
// ======
|
||||
|
||||
// TODO: signature verification is run before tool setup. Currently, this is ok, but might change in the future. This might break additional signing algorithms that actually need setup.
|
||||
|
||||
|
@ -209,10 +217,9 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
for _, tool := range s.signers {
|
||||
//nolint:scopelint // function is executed immediately within loop
|
||||
err = s.envelope.LoopSenders(tool.Info().Name, func(signet *Signet) error {
|
||||
|
||||
err := tool.Verify(data, associatedSigningData, letter.Signatures[sigIndex].Value, signet)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to verify signature (%s) with ID %s: %s", tool.Info().Name, letter.Signatures[sigIndex].ID, err)
|
||||
return fmt.Errorf("failed to verify signature (%s) with ID %s: %w", tool.Info().Name, letter.Signatures[sigIndex].ID, err)
|
||||
}
|
||||
|
||||
sigIndex++
|
||||
|
@ -233,9 +240,9 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
return data, nil
|
||||
}
|
||||
|
||||
/////////////////
|
||||
// ==============
|
||||
// key management
|
||||
/////////////////
|
||||
// ==============
|
||||
|
||||
// key establishment
|
||||
if s.wire != nil {
|
||||
|
@ -252,13 +259,13 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
// init KDF
|
||||
err = s.kdf.InitKeyDerivation(letter.Nonce, keyMaterial...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to init %s kdf: %s", s.kdf.Info().Name, err)
|
||||
return nil, fmt.Errorf("failed to init %s kdf: %w", s.kdf.Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
/////////////
|
||||
// ==========
|
||||
// decryption
|
||||
/////////////
|
||||
// ==========
|
||||
|
||||
// setup tools
|
||||
err = s.setup()
|
||||
|
@ -284,7 +291,7 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
for _, tool := range s.macs {
|
||||
mac, err := tool.MAC(data, associatedData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to calculate MAC with %s: %s", tool.Info().Name, err)
|
||||
return nil, fmt.Errorf("failed to calculate MAC with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
allMacs.Append(mac)
|
||||
}
|
||||
|
@ -297,7 +304,7 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
for i := len(s.integratedCiphers) - 1; i >= 0; i-- {
|
||||
data, err = s.integratedCiphers[i].AuthenticatedDecrypt(data, associatedData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: [%s] %s", ErrIntegrityViolation, s.integratedCiphers[i].Info().Name, err)
|
||||
return nil, fmt.Errorf("%w: [%s] %w", ErrIntegrityViolation, s.integratedCiphers[i].Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -305,7 +312,7 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
for i := len(s.ciphers) - 1; i >= 0; i-- {
|
||||
data, err = s.ciphers[i].Decrypt(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: decryption failed: [%s] %s", ErrIntegrityViolation, s.ciphers[i].Info().Name, err)
|
||||
return nil, fmt.Errorf("%w: decryption failed: [%s] %w", ErrIntegrityViolation, s.ciphers[i].Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -314,7 +321,6 @@ func (s *Session) Open(letter *Letter) ([]byte, error) { //nolint:gocognit,gocyc
|
|||
|
||||
// Verify verifies signatures of the given letter.
|
||||
func (s *Session) Verify(letter *Letter) error {
|
||||
|
||||
// debugging:
|
||||
/*
|
||||
fmt.Printf("opening: %+v\n", letter)
|
||||
|
@ -328,9 +334,9 @@ func (s *Session) Verify(letter *Letter) error {
|
|||
return fmt.Errorf("unsupported letter version: %d", letter.Version)
|
||||
}
|
||||
|
||||
/////////
|
||||
// ======
|
||||
// verify
|
||||
/////////
|
||||
// ======
|
||||
|
||||
// TODO: signature verification is run before tool setup. Currently, this is ok, but might change in the future. This might break additional signing algorithms that actually need setup.
|
||||
|
||||
|
@ -365,10 +371,9 @@ func (s *Session) Verify(letter *Letter) error {
|
|||
for _, tool := range s.signers {
|
||||
//nolint:scopelint // function is executed immediately within loop
|
||||
err = s.envelope.LoopSenders(tool.Info().Name, func(signet *Signet) error {
|
||||
|
||||
err := tool.Verify(data, associatedSigningData, letter.Signatures[sigIndex].Value, signet)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to verify signature (%s) with ID %s: %s", tool.Info().Name, letter.Signatures[sigIndex].ID, err)
|
||||
return fmt.Errorf("failed to verify signature (%s) with ID %s: %w", tool.Info().Name, letter.Signatures[sigIndex].ID, err)
|
||||
}
|
||||
|
||||
sigIndex++
|
||||
|
@ -408,7 +413,7 @@ func (s *Session) setupClosingKeyMaterial(letter *Letter) ([][]byte, error) {
|
|||
}
|
||||
pwKey, err := s.passDerivator.DeriveKeyFromPassword(signet.Key, letter.Nonce)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get derive key from password with %s: %s", s.passDerivator.Info().Name, err)
|
||||
return fmt.Errorf("failed to get derive key from password with %s: %w", s.passDerivator.Info().Name, err)
|
||||
}
|
||||
letter.Keys = append(letter.Keys, &Seal{
|
||||
Scheme: SignetSchemePassword,
|
||||
|
@ -431,23 +436,23 @@ func (s *Session) setupClosingKeyMaterial(letter *Letter) ([][]byte, error) {
|
|||
senderSignet := NewSignetBase(tool.Definition())
|
||||
err := senderSignet.GenerateKey()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate new sender signet for %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to generate new sender signet for %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
|
||||
// create exchange and add to letter
|
||||
exchKey, err := tool.MakeSharedKey(senderSignet, recipient)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to make managed key with %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to make managed key with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
|
||||
// add to letter
|
||||
senderRcpt, err := senderSignet.AsRecipient() // convert to public signet
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get public sender signet for %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to get public sender signet for %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
err = senderRcpt.StoreKey()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to serialize sender public key for %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to serialize sender public key for %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
letter.Keys = append(letter.Keys, &Seal{
|
||||
ID: recipient.ID,
|
||||
|
@ -487,13 +492,13 @@ func (s *Session) setupClosingKeyMaterial(letter *Letter) ([][]byte, error) {
|
|||
// generate new key
|
||||
newKey, err := RandomBytes(tool.Helper().DefaultSymmetricKeySize())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate new key for %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to generate new key for %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
|
||||
// encapsulate key
|
||||
wrappedKey, err := tool.EncapsulateKey(newKey, recipient)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to encapsulate key with %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to encapsulate key with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
|
||||
// add to letter
|
||||
|
@ -548,7 +553,7 @@ func (s *Session) setupOpeningKeyMaterial(letter *Letter) ([][]byte, error) {
|
|||
}
|
||||
pwKey, err := s.passDerivator.DeriveKeyFromPassword(signet.Key, letter.Nonce)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get derive key from password with %s: %s", s.passDerivator.Info().Name, err)
|
||||
return fmt.Errorf("failed to get derive key from password with %s: %w", s.passDerivator.Info().Name, err)
|
||||
}
|
||||
|
||||
keyMaterial = append(keyMaterial, pwKey)
|
||||
|
@ -574,7 +579,7 @@ func (s *Session) setupOpeningKeyMaterial(letter *Letter) ([][]byte, error) {
|
|||
// load key
|
||||
err := peerSignet.LoadKey()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load ephermal signet for key exchange: %s", err)
|
||||
return fmt.Errorf("failed to load ephermal signet for key exchange: %w", err)
|
||||
}
|
||||
// save to state
|
||||
if s.wire != nil {
|
||||
|
@ -587,7 +592,7 @@ func (s *Session) setupOpeningKeyMaterial(letter *Letter) ([][]byte, error) {
|
|||
// make shared key
|
||||
exchKey, err := tool.MakeSharedKey(signet, peerSignet)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to make shared key with %s: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to make shared key with %s: %w", tool.Info().Name, err)
|
||||
}
|
||||
|
||||
// add key
|
||||
|
@ -633,7 +638,7 @@ func (s *Session) setup() error {
|
|||
for _, tool := range s.toolsWithState {
|
||||
err := tool.Setup()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to run tool %s setup: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to run tool %s setup: %w", tool.Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -646,7 +651,7 @@ func (s *Session) reset() error {
|
|||
for _, tool := range s.toolsWithState {
|
||||
err := tool.Reset()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to run tool %s reset: %s", tool.Info().Name, err)
|
||||
return fmt.Errorf("failed to run tool %s reset: %w", tool.Info().Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -657,7 +662,7 @@ func (s *Session) feedManagedHashers(managedHashers map[string]*managedHasher, d
|
|||
for _, mngdHasher := range managedHashers {
|
||||
n, err := mngdHasher.hash.Write(data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write data to managed hasher %s: %s", mngdHasher.tool.Name, err)
|
||||
return fmt.Errorf("failed to write data to managed hasher %s: %w", mngdHasher.tool.Name, err)
|
||||
}
|
||||
if n != len(data) {
|
||||
return fmt.Errorf("failed to fully write data to managed hasher %s", mngdHasher.tool.Name)
|
||||
|
@ -665,7 +670,7 @@ func (s *Session) feedManagedHashers(managedHashers map[string]*managedHasher, d
|
|||
|
||||
n, err = mngdHasher.hash.Write(associatedData)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write associated data to managed hasher %s: %s", mngdHasher.tool.Name, err)
|
||||
return fmt.Errorf("failed to write associated data to managed hasher %s: %w", mngdHasher.tool.Name, err)
|
||||
}
|
||||
if n != len(associatedData) {
|
||||
return fmt.Errorf("failed to fully write associated data to managed hasher %s", mngdHasher.tool.Name)
|
||||
|
|
55
core_test.go
55
core_test.go
|
@ -19,7 +19,7 @@ const (
|
|||
Qui voluptates quod omnis rerum. Soluta dolore quia eius quo similique accusamus. Quisquam fugiat sed voluptatibus eos earum sed. Numquam quia at commodi aut esse ducimus enim.
|
||||
Enim nihil architecto architecto. Reprehenderit at assumenda labore. Et ut sed ut inventore tenetur autem. Iusto et neque ab dolores eum. Praesentium amet sint ut voluptate impedit sit.
|
||||
A accusantium ullam voluptatibus. Adipisci architecto minus dolore tenetur eos. Id illum quo neque laborum numquam laborum animi libero.
|
||||
Debitis voluptatem non aut ex. Et et quis qui aut aut fugit accusantium. Est dolor quia accusantium culpa.
|
||||
Debitis voluptatem non aut ex. Et et quis qui aut fugit accusantium. Est dolor quia accusantium culpa.
|
||||
Facere iste dolor a qui. Earum aut facilis maxime repudiandae magnam. Laborum illum distinctio quo libero corrupti maxime. Eum nam officiis culpa nobis.
|
||||
Et repellat qui ut quaerat error explicabo. Distinctio repudiandae sit dolores nam at. Suscipit aliquam alias ullam id.`
|
||||
|
||||
|
@ -46,6 +46,8 @@ var (
|
|||
)
|
||||
|
||||
func tErrorf(t *testing.T, msg string, args ...interface{}) {
|
||||
t.Helper()
|
||||
|
||||
t.Errorf(msg, args...)
|
||||
if runTestsInDebugStyleActive {
|
||||
debugStyleErrorCnt++
|
||||
|
@ -111,23 +113,26 @@ func init() {
|
|||
defaultSecurityLevel = 128
|
||||
|
||||
// init special test config
|
||||
if RunComprehensiveTests == "true" { //nolint:goconst
|
||||
if RunComprehensiveTests == "true" {
|
||||
runComprehensiveTestsActive = true
|
||||
}
|
||||
if RunTestsInDebugStyle == "true" { //nolint:goconst
|
||||
if RunTestsInDebugStyle == "true" {
|
||||
runTestsInDebugStyleActive = true
|
||||
}
|
||||
}
|
||||
|
||||
func TestCoreBasic(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, suite := range Suites() {
|
||||
testStorage(t, suite)
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:gocognit
|
||||
// TestCoreAllCombinations tests all tools in all combinations and every tool
|
||||
// should be tested when placed before and after every other tool.
|
||||
func TestCoreAllCombinations(t *testing.T) {
|
||||
// This shall test all tools in all combinations and every tool should be tested when placed before and after every other tool.
|
||||
t.Parallel()
|
||||
|
||||
// skip in short tests and when not running comprehensive
|
||||
if testing.Short() || !runComprehensiveTestsActive {
|
||||
|
@ -220,8 +225,8 @@ func TestCoreAllCombinations(t *testing.T) {
|
|||
t.Logf("of these, %d were successfully detected as invalid", combinationsDetectedInvalid)
|
||||
}
|
||||
|
||||
func testStorage(t *testing.T, suite *Suite) (detectedInvalid bool) {
|
||||
// t.Logf("testing storage with %s", suite.ID)
|
||||
func testStorage(t *testing.T, suite *Suite) (detectedInvalid bool) { //nolint:thelper
|
||||
t.Logf("testing storage with %s", suite.ID)
|
||||
|
||||
e, err := setupEnvelopeAndTrustStore(t, suite)
|
||||
if err != nil {
|
||||
|
@ -291,6 +296,8 @@ func testStorage(t *testing.T, suite *Suite) (detectedInvalid bool) {
|
|||
|
||||
//nolint:gocognit,gocyclo
|
||||
func setupEnvelopeAndTrustStore(t *testing.T, suite *Suite) (*Envelope, error) {
|
||||
t.Helper()
|
||||
|
||||
// check if suite is registered
|
||||
if suite.ID == "" {
|
||||
// register as test suite
|
||||
|
@ -299,10 +306,9 @@ func setupEnvelopeAndTrustStore(t *testing.T, suite *Suite) (*Envelope, error) {
|
|||
}
|
||||
|
||||
// create envelope baseline
|
||||
e := &Envelope{
|
||||
SuiteID: suite.ID,
|
||||
suite: suite,
|
||||
}
|
||||
e := NewUnconfiguredEnvelope()
|
||||
e.SuiteID = suite.ID
|
||||
e.suite = suite
|
||||
|
||||
// check vars
|
||||
keyDerPresent := false
|
||||
|
@ -370,6 +376,7 @@ func setupEnvelopeAndTrustStore(t *testing.T, suite *Suite) (*Envelope, error) {
|
|||
case tools.PurposeKeyEncapsulation:
|
||||
e.suite.Provides.Add(RecipientAuthentication)
|
||||
case tools.PurposeSigning:
|
||||
e.suite.Provides.Add(Integrity)
|
||||
e.suite.Provides.Add(SenderAuthentication)
|
||||
case tools.PurposeIntegratedCipher:
|
||||
e.suite.Provides.Add(Confidentiality)
|
||||
|
@ -395,9 +402,7 @@ func setupEnvelopeAndTrustStore(t *testing.T, suite *Suite) (*Envelope, error) {
|
|||
}
|
||||
|
||||
// check if we are missing key derivation - this is only ok if we are merely signing
|
||||
if !keyDerPresent &&
|
||||
(len(e.suite.Provides.all) != 1 ||
|
||||
!e.suite.Provides.Has(SenderAuthentication)) {
|
||||
if !keyDerPresent && len(e.Senders) != len(e.suite.Tools) {
|
||||
return nil, testInvalidToolset(e, "omitting a key derivation tool is only allowed when merely signing")
|
||||
}
|
||||
|
||||
|
@ -457,6 +462,7 @@ func testInvalidToolset(e *Envelope, whyInvalid string) error {
|
|||
}
|
||||
|
||||
func getOrMakeSignet(t *testing.T, tool tools.ToolLogic, recipient bool, signetID string) (*Signet, error) {
|
||||
t.Helper()
|
||||
|
||||
// check if signet already exists
|
||||
signet, err := testTrustStore.GetSignet(signetID, recipient)
|
||||
|
@ -470,24 +476,24 @@ func getOrMakeSignet(t *testing.T, tool tools.ToolLogic, recipient bool, signetI
|
|||
}
|
||||
|
||||
// create new signet
|
||||
new := NewSignetBase(tool.Definition())
|
||||
new.ID = signetID
|
||||
newSignet := NewSignetBase(tool.Definition())
|
||||
newSignet.ID = signetID
|
||||
// generate signet and log time taken
|
||||
start := time.Now()
|
||||
err = tool.GenerateKey(new)
|
||||
err = tool.GenerateKey(newSignet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
t.Logf("generated %s signet %s in %s", new.Scheme, new.ID, time.Since(start))
|
||||
t.Logf("generated %s signet %s in %s", newSignet.Scheme, newSignet.ID, time.Since(start))
|
||||
|
||||
// store signet
|
||||
err = testTrustStore.StoreSignet(new)
|
||||
err = testTrustStore.StoreSignet(newSignet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// store recipient
|
||||
newRcpt, err := new.AsRecipient()
|
||||
newRcpt, err := newSignet.AsRecipient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -500,13 +506,14 @@ func getOrMakeSignet(t *testing.T, tool tools.ToolLogic, recipient bool, signetI
|
|||
if recipient {
|
||||
return newRcpt, nil
|
||||
}
|
||||
return new, nil
|
||||
return newSignet, nil
|
||||
}
|
||||
|
||||
// generateCombinations returns all possible combinations of the given []string slice.
|
||||
// Forked from https://github.com/mxschmitt/golang-combinations/blob/a887187146560effd2677e987b069262f356297f/combinations.go
|
||||
// Copyright (c) 2018 Max Schmitt
|
||||
// MIT License
|
||||
//
|
||||
// Forked from https://github.com/mxschmitt/golang-combinations/blob/a887187146560effd2677e987b069262f356297f/combinations.go
|
||||
// Copyright (c) 2018 Max Schmitt,
|
||||
// MIT License.
|
||||
func generateCombinations(set []string) (subsets [][]string) {
|
||||
length := uint(len(set))
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package jess
|
||||
|
||||
var (
|
||||
// must be var in order decrease for testing for better speed
|
||||
// Must be var in order decrease for testing for better speed.
|
||||
|
||||
defaultSecurityLevel = 128
|
||||
minimumSecurityLevel = 0
|
||||
|
@ -10,7 +10,7 @@ var (
|
|||
minimumSymmetricKeySize = 0
|
||||
)
|
||||
|
||||
// Currently recommended toolsets
|
||||
// Currently recommended toolsets.
|
||||
var (
|
||||
RecommendedNetwork = []string{"ECDH-X25519", "HKDF(SHA2-256)", "CHACHA20-POLY1305"}
|
||||
RecommendedStoragePassword = []string{"PBKDF2-SHA2-256", "HKDF(SHA2-256)", "CHACHA20-POLY1305"}
|
||||
|
|
103
envelope.go
103
envelope.go
|
@ -3,6 +3,10 @@ package jess
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/mr-tron/base58"
|
||||
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
// Envelope holds configuration for jess to put data into a letter.
|
||||
|
@ -181,7 +185,7 @@ func (e *Envelope) prepSignets(signets []*Signet, recipients bool, storage Trust
|
|||
if signet.Scheme == SignetSchemePassword {
|
||||
err := fillPassword(signet, !recipients, storage, e.suite.SecurityLevel)
|
||||
if err != nil {
|
||||
return fmt.Errorf(`failed to get password for "%s": %s`, signet.ID, err)
|
||||
return fmt.Errorf(`failed to get password for "%s": %w`, signet.ID, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
@ -202,19 +206,19 @@ func (e *Envelope) prepSignets(signets []*Signet, recipients bool, storage Trust
|
|||
}
|
||||
|
||||
// get signet from trust store
|
||||
new, err := storage.GetSignet(signet.ID, recipients)
|
||||
newSignet, err := storage.GetSignet(signet.ID, recipients)
|
||||
if err != nil {
|
||||
return fmt.Errorf(`failed to get signet with ID "%s" from truststore: %s`, signet.ID, err)
|
||||
return fmt.Errorf(`failed to get signet with ID "%s" from truststore: %w`, signet.ID, err)
|
||||
}
|
||||
|
||||
// check for scheme mismatch
|
||||
if signet.Scheme != "" && signet.Scheme != new.Scheme {
|
||||
return fmt.Errorf(`failed to apply signet with ID "%s" from truststore: was expected to be of type %s, but is %s`, signet.ID, signet.Scheme, new.Scheme)
|
||||
if signet.Scheme != "" && signet.Scheme != newSignet.Scheme {
|
||||
return fmt.Errorf(`failed to apply signet with ID "%s" from truststore: was expected to be of type %s, but is %s`, signet.ID, signet.Scheme, newSignet.Scheme)
|
||||
}
|
||||
|
||||
// apply signet back into envelope
|
||||
signet = new
|
||||
signets[i] = new
|
||||
signet = newSignet
|
||||
signets[i] = newSignet
|
||||
}
|
||||
|
||||
// unwrap protection
|
||||
|
@ -252,12 +256,12 @@ func fillPassword(signet *Signet, createPassword bool, storage TrustStore, minSe
|
|||
// check trust store for name
|
||||
if len(signet.ID) > 0 && storage != nil {
|
||||
// get signet from trust store
|
||||
new, err := storage.GetSignet(signet.ID, false)
|
||||
if err == nil && new.Info != nil {
|
||||
newSignet, err := storage.GetSignet(signet.ID, false)
|
||||
if err == nil && newSignet.Info != nil {
|
||||
if signet.Info == nil {
|
||||
signet.Info = new.Info
|
||||
signet.Info = newSignet.Info
|
||||
} else {
|
||||
signet.Info.Name = new.Info.Name
|
||||
signet.Info.Name = newSignet.Info.Name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -268,3 +272,80 @@ func fillPassword(signet *Signet, createPassword bool, storage TrustStore, minSe
|
|||
}
|
||||
return getPasswordCallback(signet)
|
||||
}
|
||||
|
||||
// CleanSignets cleans all the signets from all the non-necessary data as well
|
||||
// as key material.
|
||||
// This is for preparing for serializing and saving the signet.
|
||||
func (e *Envelope) CleanSignets() {
|
||||
for i, signet := range e.Secrets {
|
||||
e.Secrets[i] = &Signet{
|
||||
Version: signet.Version,
|
||||
ID: signet.ID,
|
||||
Scheme: signet.Scheme,
|
||||
}
|
||||
}
|
||||
for i, signet := range e.Senders {
|
||||
e.Senders[i] = &Signet{
|
||||
Version: signet.Version,
|
||||
ID: signet.ID,
|
||||
Scheme: signet.Scheme,
|
||||
}
|
||||
}
|
||||
for i, signet := range e.Recipients {
|
||||
e.Recipients[i] = &Signet{
|
||||
Version: signet.Version,
|
||||
ID: signet.ID,
|
||||
Scheme: signet.Scheme,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ToBytes serializes the envelope to a byte slice.
|
||||
func (e *Envelope) ToBytes() ([]byte, error) {
|
||||
// Minimize data and remove any key material.
|
||||
e.CleanSignets()
|
||||
|
||||
// Serialize envelope.
|
||||
data, err := dsd.Dump(e, dsd.CBOR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to serialize the envelope: %w", err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// EnvelopeFromBytes parses and loads a serialized envelope.
|
||||
func EnvelopeFromBytes(data []byte) (*Envelope, error) {
|
||||
e := &Envelope{}
|
||||
|
||||
// Parse envelope from data.
|
||||
if _, err := dsd.Load(data, e); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse data format: %w", err)
|
||||
}
|
||||
|
||||
return e, nil
|
||||
}
|
||||
|
||||
// ToBase58 serializes the envelope and encodes it with base58.
|
||||
func (e *Envelope) ToBase58() (string, error) {
|
||||
// Serialize Signet.
|
||||
data, err := e.ToBytes()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Encode and return.
|
||||
return base58.Encode(data), nil
|
||||
}
|
||||
|
||||
// EnvelopeFromBase58 parses and loads a base58 encoded serialized envelope.
|
||||
func EnvelopeFromBase58(base58Encoded string) (*Envelope, error) {
|
||||
// Decode string.
|
||||
data, err := base58.Decode(base58Encoded)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode base58: %w", err)
|
||||
}
|
||||
|
||||
// Parse and return.
|
||||
return EnvelopeFromBytes(data)
|
||||
}
|
||||
|
|
123
filesig/format_armor.go
Normal file
123
filesig/format_armor.go
Normal file
|
@ -0,0 +1,123 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
const (
|
||||
sigFileArmorStart = "-----BEGIN JESS SIGNATURE-----"
|
||||
sigFileArmorEnd = "-----END JESS SIGNATURE-----"
|
||||
sigFileLineLength = 64
|
||||
)
|
||||
|
||||
var (
|
||||
sigFileArmorFindMatcher = regexp.MustCompile(`(?ms)` + sigFileArmorStart + `(.+?)` + sigFileArmorEnd)
|
||||
sigFileArmorRemoveMatcher = regexp.MustCompile(`(?ms)` + sigFileArmorStart + `.+?` + sigFileArmorEnd + `\r?\n?`)
|
||||
whitespaceMatcher = regexp.MustCompile(`(?ms)\s`)
|
||||
)
|
||||
|
||||
// ParseSigFile parses a signature file and extracts any jess signatures from it.
|
||||
// If signatures are returned along with an error, the error should be treated
|
||||
// as a warning, but the result should also not be treated as a full success,
|
||||
// as there might be missing signatures.
|
||||
func ParseSigFile(fileData []byte) (signatures []*jess.Letter, err error) {
|
||||
var warning error
|
||||
captured := make([][]byte, 0, 1)
|
||||
|
||||
// Find any signature blocks.
|
||||
matches := sigFileArmorFindMatcher.FindAllSubmatch(fileData, -1)
|
||||
for _, subMatches := range matches {
|
||||
if len(subMatches) >= 2 {
|
||||
// First entry is the whole match, second the submatch.
|
||||
captured = append(
|
||||
captured,
|
||||
bytes.TrimPrefix(
|
||||
bytes.TrimSuffix(
|
||||
whitespaceMatcher.ReplaceAll(subMatches[1], nil),
|
||||
[]byte(sigFileArmorEnd),
|
||||
),
|
||||
[]byte(sigFileArmorStart),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse any found signatures.
|
||||
signatures = make([]*jess.Letter, 0, len(captured))
|
||||
for _, sigBase64Data := range captured {
|
||||
// Decode from base64
|
||||
sigData := make([]byte, base64.RawStdEncoding.DecodedLen(len(sigBase64Data)))
|
||||
_, err = base64.RawStdEncoding.Decode(sigData, sigBase64Data)
|
||||
if err != nil {
|
||||
warning = err
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse signature.
|
||||
var letter *jess.Letter
|
||||
letter, err = jess.LetterFromDSD(sigData)
|
||||
if err != nil {
|
||||
warning = err
|
||||
} else {
|
||||
signatures = append(signatures, letter)
|
||||
}
|
||||
}
|
||||
|
||||
return signatures, warning
|
||||
}
|
||||
|
||||
// MakeSigFileSection creates a new section for a signature file.
|
||||
func MakeSigFileSection(signature *jess.Letter) ([]byte, error) {
|
||||
// Serialize.
|
||||
data, err := signature.ToDSD(dsd.CBOR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to serialize signature: %w", err)
|
||||
}
|
||||
|
||||
// Encode to base64
|
||||
encodedData := make([]byte, base64.RawStdEncoding.EncodedLen(len(data)))
|
||||
base64.RawStdEncoding.Encode(encodedData, data)
|
||||
|
||||
// Split into lines and add armor.
|
||||
splittedData := make([][]byte, 0, (len(encodedData)/sigFileLineLength)+3)
|
||||
splittedData = append(splittedData, []byte(sigFileArmorStart))
|
||||
for len(encodedData) > 0 {
|
||||
if len(encodedData) > sigFileLineLength {
|
||||
splittedData = append(splittedData, encodedData[:sigFileLineLength])
|
||||
encodedData = encodedData[sigFileLineLength:]
|
||||
} else {
|
||||
splittedData = append(splittedData, encodedData)
|
||||
encodedData = nil
|
||||
}
|
||||
}
|
||||
splittedData = append(splittedData, []byte(sigFileArmorEnd))
|
||||
linedData := bytes.Join(splittedData, []byte("\n"))
|
||||
|
||||
return linedData, nil
|
||||
}
|
||||
|
||||
// AddToSigFile adds the given signature to the signature file.
|
||||
func AddToSigFile(signature *jess.Letter, sigFileData []byte, removeExistingJessSignatures bool) (newFileData []byte, err error) {
|
||||
// Create new section for new sig.
|
||||
newSigSection, err := MakeSigFileSection(signature)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Remove any existing jess signature sections.
|
||||
if removeExistingJessSignatures {
|
||||
sigFileData = sigFileArmorRemoveMatcher.ReplaceAll(sigFileData, nil)
|
||||
}
|
||||
|
||||
// Append new signature section to end of file with a newline.
|
||||
sigFileData = append(sigFileData, []byte("\n")...)
|
||||
sigFileData = append(sigFileData, newSigSection...)
|
||||
|
||||
return sigFileData, nil
|
||||
}
|
197
filesig/format_armor_test.go
Normal file
197
filesig/format_armor_test.go
Normal file
|
@ -0,0 +1,197 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
var (
|
||||
testFileSigOneKey = "7KoUBdrRfF6drrPvKianoGfEXTQFCS5wDbfQyc87VQnYApPckRS8SfrrmAXZhV1JgKfnh44ib9nydQVEDRJiZArV22RqMfPrJmQdoAsE7zuzPRSrku8yF7zfnEv46X5GsmgfdSDrFMdG7XJd3fdaxStYCXTYDS5R"
|
||||
|
||||
testFileSigOneData = []byte("The quick brown fox jumps over the lazy dog")
|
||||
|
||||
testFileSigOneMetaData = map[string]string{
|
||||
"id": "resource/path",
|
||||
"version": "0.0.1",
|
||||
}
|
||||
|
||||
testFileSigOneSignature = []byte(`
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRA40a/BkRGF0YVhqTYOr
|
||||
TGFiZWxlZEhhc2jEIhkgAXGM7DXNPXlt0AAg4L/stHOtI0V9Bjt17/KcD/ouWKmo
|
||||
U2lnbmVkQXTW/2LH/ueoTWV0YURhdGGComlkrXJlc291cmNlL3BhdGindmVyc2lv
|
||||
bqUwLjAuMWpTaWduYXR1cmVzgaNmU2NoZW1lZ0VkMjU1MTliSURwZmlsZXNpZy10
|
||||
ZXN0LWtleWVWYWx1ZVhA4b1kfIJF7do6OcJnemQ5mtj/ZyMFJWWTmD1W5KvkpZac
|
||||
2AP5f+dDJhzWBHsoSXTCl6uA3DA3+RbABMYAZn6eDg
|
||||
-----END JESS SIGNATURE-----
|
||||
`)
|
||||
)
|
||||
|
||||
func TestFileSigFormat(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Load test key.
|
||||
signet, err := jess.SignetFromBase58(testFileSigOneKey)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Store signet.
|
||||
if err := testTrustStore.StoreSignet(signet); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// Store public key for verification.
|
||||
recipient, err := signet.AsRecipient()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := testTrustStore.StoreSignet(recipient); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Create envelope.
|
||||
envelope := jess.NewUnconfiguredEnvelope()
|
||||
envelope.SuiteID = jess.SuiteSignV1
|
||||
envelope.Senders = []*jess.Signet{signet}
|
||||
|
||||
// Hash and sign file.
|
||||
hash := lhash.Digest(lhash.BLAKE2b_256, testFileSigOneData)
|
||||
letter, _, err := SignFileData(hash, testFileSigOneMetaData, envelope, testTrustStore)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Serialize signature.
|
||||
sigFile, err := MakeSigFileSection(letter)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// fmt.Println("Signature:")
|
||||
// fmt.Println(string(sigFile))
|
||||
|
||||
// Parse signature again.
|
||||
sigs, err := ParseSigFile(sigFile)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(sigs) != 1 {
|
||||
t.Fatalf("one sig expected, got %d", len(sigs))
|
||||
}
|
||||
|
||||
// Verify Signature.
|
||||
fileData, err := VerifyFileData(sigs[0], testFileSigOneMetaData, testTrustStore)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Verify File.
|
||||
if !fileData.FileHash().MatchesData(testFileSigOneData) {
|
||||
t.Fatal("file hash does not match")
|
||||
}
|
||||
|
||||
// Verify the saved version of the signature.
|
||||
|
||||
// Parse the saved signature.
|
||||
sigs, err = ParseSigFile(testFileSigOneSignature)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(sigs) != 1 {
|
||||
t.Fatalf("only one sig expected, got %d", len(sigs))
|
||||
}
|
||||
|
||||
// Verify Signature.
|
||||
fileData, err = VerifyFileData(sigs[0], testFileSigOneMetaData, testTrustStore)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Verify File.
|
||||
if !fileData.FileHash().MatchesData(testFileSigOneData) {
|
||||
t.Fatal("file hash does not match")
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
testFileSigFormat1 = []byte(`TGFiZWxlZEhhc2jEIhkgAXGM7DXNPXlt0AAg4L
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRA40a/BkRGF0YVhqTYOr
|
||||
TGFiZWxlZEhhc2jEIhkgAXGM7DXNPXlt0AAg4L/stHOtI0V9Bjt17/KcD/ouWKmo
|
||||
U2lnbmVkQXTW/2LH/ueoTWV0YURhdGGComlkrXJlc291cmNlL3BhdGindmVyc2lv
|
||||
bqUwLjAuMWpTaWduYXR1cmVzgaNmU2NoZW1lZ0VkMjU1MTliSURwZmlsZXNpZy10
|
||||
ZXN0LWtleWVWYWx1ZVhA4b1kfIJF7do6OcJnemQ5mtj/ZyMFJWWTmD1W5KvkpZac
|
||||
2AP5f+dDJhzWBHsoSXTCl6uA3DA3+RbABMYAZn6eDg
|
||||
-----END JESS SIGNATURE-----
|
||||
|
||||
-----END JESS SIGNATURE-----
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRA40a/BkRGF0YVhqTYOr
|
||||
TGFiZWxlZEhhc2jEIhkgAXGM7DXNPXlt0AAg4L/stHOtI0V9Bjt17/KcD/ouWKmo
|
||||
U2lnbmVkQXTW/2LH/ueoTWV0YURhdGGComlk
|
||||
rXJlc291cmNlL3BhdGindmVyc2lvbqUwLjAuMWpTaWduYXR1cmVzgaNmU2NoZW1lZ0VkMjU1MTliSURwZmlsZXNpZy10
|
||||
ZXN0LWtleWVWYWx1ZVhA4b1kfIJF7do6OcJnemQ5mtj/ZyMFJWWTmD1W5KvkpZac
|
||||
2AP5f+dDJhzWBHsoSXTCl6uA3DA3+RbABMYAZn6eDg
|
||||
-----END JESS SIGNATURE-----
|
||||
end`)
|
||||
|
||||
testFileSigFormat2 = []byte(`test data 1
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
invalid sig
|
||||
-----END JESS SIGNATURE-----
|
||||
test data 2`)
|
||||
|
||||
testFileSigFormat3 = []byte(`test data 1
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
invalid sig
|
||||
-----END JESS SIGNATURE-----
|
||||
test data 2
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRA40a/BkRGF0YVhqTYOr
|
||||
TGFiZWxlZEhhc2jEIhkgAXGM7DXNPXlt0AAg4L/stHOtI0V9Bjt17/KcD/ouWKmo
|
||||
U2lnbmVkQXTW/2LH/ueoTWV0YURhdGGComlkrXJlc291cmNlL3BhdGindmVyc2lv
|
||||
bqUwLjAuMWpTaWduYXR1cmVzgaNmU2NoZW1lZ0VkMjU1MTliSURwZmlsZXNpZy10
|
||||
ZXN0LWtleWVWYWx1ZVhA4b1kfIJF7do6OcJnemQ5mtj/ZyMFJWWTmD1W5KvkpZac
|
||||
2AP5f+dDJhzWBHsoSXTCl6uA3DA3+RbABMYAZn6eDg
|
||||
-----END JESS SIGNATURE-----`)
|
||||
|
||||
testFileSigFormat4 = []byte(`test data 1
|
||||
test data 2
|
||||
-----BEGIN JESS SIGNATURE-----
|
||||
Q6VnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRA40a/BkRGF0YVhqTYOr
|
||||
TGFiZWxlZEhhc2jEIhkgAXGM7DXNPXlt0AAg4L/stHOtI0V9Bjt17/KcD/ouWKmo
|
||||
U2lnbmVkQXTW/2LH/ueoTWV0YURhdGGComlkrXJlc291cmNlL3BhdGindmVyc2lv
|
||||
bqUwLjAuMWpTaWduYXR1cmVzgaNmU2NoZW1lZ0VkMjU1MTliSURwZmlsZXNpZy10
|
||||
ZXN0LWtleWVWYWx1ZVhA4b1kfIJF7do6OcJnemQ5mtj/ZyMFJWWTmD1W5KvkpZac
|
||||
2AP5f+dDJhzWBHsoSXTCl6uA3DA3+RbABMYAZn6eDg
|
||||
-----END JESS SIGNATURE-----`)
|
||||
)
|
||||
|
||||
func TestFileSigFormatParsing(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
sigs, err := ParseSigFile(testFileSigFormat1)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(sigs) != 2 {
|
||||
t.Fatalf("expected two signatures, got %d", 1)
|
||||
}
|
||||
|
||||
newFile, err := AddToSigFile(sigs[0], testFileSigFormat2, false)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !bytes.Equal(newFile, testFileSigFormat3) {
|
||||
t.Fatalf("unexpected output:\n%s", string(newFile))
|
||||
}
|
||||
newFile, err = AddToSigFile(sigs[0], testFileSigFormat2, true)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !bytes.Equal(newFile, testFileSigFormat4) {
|
||||
t.Fatalf("unexpected output:\n%s", string(newFile))
|
||||
}
|
||||
}
|
147
filesig/helpers.go
Normal file
147
filesig/helpers.go
Normal file
|
@ -0,0 +1,147 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/hashtools"
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
// SignFile signs a file and replaces the signature file with a new one.
|
||||
// If the dataFilePath is "-", the file data is read from stdin.
|
||||
// Existing jess signatures in the signature file are removed.
|
||||
func SignFile(dataFilePath, signatureFilePath string, metaData map[string]string, envelope *jess.Envelope, trustStore jess.TrustStore) (fileData *FileData, err error) {
|
||||
// Load encryption suite.
|
||||
if err := envelope.LoadSuite(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Extract the used hashing algorithm from the suite.
|
||||
var hashTool *hashtools.HashTool
|
||||
for _, toolID := range envelope.Suite().Tools {
|
||||
if strings.Contains(toolID, "(") {
|
||||
hashToolID := strings.Trim(strings.Split(toolID, "(")[1], "()")
|
||||
hashTool, _ = hashtools.Get(hashToolID)
|
||||
break
|
||||
}
|
||||
}
|
||||
if hashTool == nil {
|
||||
return nil, errors.New("suite not suitable for file signing")
|
||||
}
|
||||
|
||||
// Hash the data file.
|
||||
var fileHash *lhash.LabeledHash
|
||||
if dataFilePath == "-" {
|
||||
fileHash, err = hashTool.LabeledHasher().DigestFromReader(os.Stdin)
|
||||
} else {
|
||||
fileHash, err = hashTool.LabeledHasher().DigestFile(dataFilePath)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to hash file: %w", err)
|
||||
}
|
||||
|
||||
// Sign the file data.
|
||||
signature, fileData, err := SignFileData(fileHash, metaData, envelope, trustStore)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to sign file: %w", err)
|
||||
}
|
||||
|
||||
sigFileData, err := os.ReadFile(signatureFilePath)
|
||||
var newSigFileData []byte
|
||||
switch {
|
||||
case err == nil:
|
||||
// Add signature to existing file.
|
||||
newSigFileData, err = AddToSigFile(signature, sigFileData, true)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to add signature to file: %w", err)
|
||||
}
|
||||
case errors.Is(err, fs.ErrNotExist):
|
||||
// Make signature section for saving to disk.
|
||||
newSigFileData, err = MakeSigFileSection(signature)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to format signature for file: %w", err)
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("failed to open existing signature file: %w", err)
|
||||
}
|
||||
|
||||
// Write the signature to file.
|
||||
if err := os.WriteFile(signatureFilePath, newSigFileData, 0o0644); err != nil { //nolint:gosec
|
||||
return nil, fmt.Errorf("failed to write signature to file: %w", err)
|
||||
}
|
||||
|
||||
return fileData, nil
|
||||
}
|
||||
|
||||
// VerifyFile verifies the given files and returns the verified file data.
|
||||
// If the dataFilePath is "-", the file data is read from stdin.
|
||||
// If an error is returned, there was an error in at least some part of the process.
|
||||
// Any returned file data struct must be checked for an verification error.
|
||||
func VerifyFile(dataFilePath, signatureFilePath string, metaData map[string]string, trustStore jess.TrustStore) (verifiedFileData []*FileData, err error) {
|
||||
var lastErr error
|
||||
|
||||
// Read signature from file.
|
||||
sigFileData, err := os.ReadFile(signatureFilePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read signature file: %w", err)
|
||||
}
|
||||
|
||||
// Extract all signatures.
|
||||
sigs, err := ParseSigFile(sigFileData)
|
||||
switch {
|
||||
case len(sigs) == 0 && err != nil:
|
||||
return nil, fmt.Errorf("failed to parse signature file: %w", err)
|
||||
case len(sigs) == 0:
|
||||
return nil, errors.New("no signatures found in signature file")
|
||||
case err != nil:
|
||||
lastErr = fmt.Errorf("failed to parse signature file: %w", err)
|
||||
}
|
||||
|
||||
// Verify all signatures.
|
||||
goodFileData := make([]*FileData, 0, len(sigs))
|
||||
var badFileData []*FileData
|
||||
for _, sigLetter := range sigs {
|
||||
// Verify signature.
|
||||
fileData, err := VerifyFileData(sigLetter, metaData, trustStore)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
if fileData != nil {
|
||||
fileData.verificationError = err
|
||||
badFileData = append(badFileData, fileData)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Hash the file.
|
||||
var fileHash *lhash.LabeledHash
|
||||
if dataFilePath == "-" {
|
||||
fileHash, err = fileData.FileHash().Algorithm().DigestFromReader(os.Stdin)
|
||||
} else {
|
||||
fileHash, err = fileData.FileHash().Algorithm().DigestFile(dataFilePath)
|
||||
}
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
fileData.verificationError = err
|
||||
badFileData = append(badFileData, fileData)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if the hash matches.
|
||||
if !fileData.FileHash().Equal(fileHash) {
|
||||
lastErr = errors.New("signature invalid: file was modified")
|
||||
fileData.verificationError = lastErr
|
||||
badFileData = append(badFileData, fileData)
|
||||
continue
|
||||
}
|
||||
|
||||
// Add verified file data to list for return.
|
||||
goodFileData = append(goodFileData, fileData)
|
||||
}
|
||||
|
||||
return append(goodFileData, badFileData...), lastErr
|
||||
}
|
279
filesig/json.go
Normal file
279
filesig/json.go
Normal file
|
@ -0,0 +1,279 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/pretty"
|
||||
"github.com/tidwall/sjson"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/lhash"
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
// JSON file metadata keys.
|
||||
const (
|
||||
JSONKeyPrefix = "_jess-"
|
||||
JSONChecksumKey = JSONKeyPrefix + "checksum"
|
||||
JSONSignatureKey = JSONKeyPrefix + "signature"
|
||||
)
|
||||
|
||||
// AddJSONChecksum adds a checksum to a text file.
|
||||
func AddJSONChecksum(data []byte) ([]byte, error) {
|
||||
// Extract content and metadata from json.
|
||||
content, checksums, signatures, err := jsonSplit(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Calculate checksum.
|
||||
h := lhash.BLAKE2b_256.Digest(content)
|
||||
checksums = append(checksums, h.Base58())
|
||||
|
||||
// Sort and deduplicate checksums and sigs.
|
||||
slices.Sort(checksums)
|
||||
checksums = slices.Compact(checksums)
|
||||
slices.Sort(signatures)
|
||||
signatures = slices.Compact(signatures)
|
||||
|
||||
// Add metadata and return.
|
||||
return jsonAddMeta(content, checksums, signatures)
|
||||
}
|
||||
|
||||
// VerifyJSONChecksum checks a checksum in a text file.
|
||||
func VerifyJSONChecksum(data []byte) error {
|
||||
// Extract content and metadata from json.
|
||||
content, checksums, _, err := jsonSplit(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Verify all checksums.
|
||||
var checksumsVerified int
|
||||
for _, checksum := range checksums {
|
||||
// Parse checksum.
|
||||
h, err := lhash.FromBase58(checksum)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%w: failed to parse labeled hash: %w", ErrChecksumFailed, err)
|
||||
}
|
||||
// Verify checksum.
|
||||
if !h.Matches(content) {
|
||||
return ErrChecksumFailed
|
||||
}
|
||||
checksumsVerified++
|
||||
}
|
||||
|
||||
// Fail when no checksums were verified.
|
||||
if checksumsVerified == 0 {
|
||||
return ErrChecksumMissing
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func AddJSONSignature(data []byte, envelope *jess.Envelope, trustStore jess.TrustStore) (signedData []byte, err error) {
|
||||
// Create session.
|
||||
session, err := envelope.Correspondence(trustStore)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid signing envelope: %w", err)
|
||||
}
|
||||
|
||||
// Check if the envelope is suitable for signing.
|
||||
if err := envelope.Suite().Provides.CheckComplianceTo(fileSigRequirements); err != nil {
|
||||
return nil, fmt.Errorf("envelope not suitable for signing: %w", err)
|
||||
}
|
||||
|
||||
// Extract content and metadata from json.
|
||||
content, checksums, signatures, err := jsonSplit(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid json structure: %w", err)
|
||||
}
|
||||
|
||||
// Sign data.
|
||||
letter, err := session.Close(content)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("sign: %w", err)
|
||||
}
|
||||
|
||||
// Serialize signature and add it.
|
||||
letter.Data = nil
|
||||
sig, err := letter.ToDSD(dsd.CBOR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("serialize sig: %w", err)
|
||||
}
|
||||
signatures = append(signatures, base64.RawURLEncoding.EncodeToString(sig))
|
||||
|
||||
// Sort and deduplicate checksums and sigs.
|
||||
slices.Sort(checksums)
|
||||
checksums = slices.Compact(checksums)
|
||||
slices.Sort(signatures)
|
||||
signatures = slices.Compact(signatures)
|
||||
|
||||
// Add metadata and return.
|
||||
return jsonAddMeta(data, checksums, signatures)
|
||||
}
|
||||
|
||||
func VerifyJSONSignature(data []byte, trustStore jess.TrustStore) (err error) {
|
||||
// Extract content and metadata from json.
|
||||
content, _, signatures, err := jsonSplit(data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid json structure: %w", err)
|
||||
}
|
||||
|
||||
var signaturesVerified int
|
||||
for i, sig := range signatures {
|
||||
// Deserialize signature.
|
||||
sigData, err := base64.RawURLEncoding.DecodeString(sig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("signature %d malformed: %w", i+1, err)
|
||||
}
|
||||
letter := &jess.Letter{}
|
||||
_, err = dsd.Load(sigData, letter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("signature %d malformed: %w", i+1, err)
|
||||
}
|
||||
|
||||
// Verify signature.
|
||||
letter.Data = content
|
||||
err = letter.Verify(fileSigRequirements, trustStore)
|
||||
if err != nil {
|
||||
return fmt.Errorf("signature %d invalid: %w", i+1, err)
|
||||
}
|
||||
|
||||
signaturesVerified++
|
||||
}
|
||||
|
||||
// Fail when no signatures were verified.
|
||||
if signaturesVerified == 0 {
|
||||
return ErrSignatureMissing
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func jsonSplit(data []byte) (
|
||||
content []byte,
|
||||
checksums []string,
|
||||
signatures []string,
|
||||
err error,
|
||||
) {
|
||||
// Check json.
|
||||
if !gjson.ValidBytes(data) {
|
||||
return nil, nil, nil, errors.New("invalid json")
|
||||
}
|
||||
content = data
|
||||
|
||||
// Get checksums.
|
||||
result := gjson.GetBytes(content, JSONChecksumKey)
|
||||
if result.Exists() {
|
||||
if result.IsArray() {
|
||||
array := result.Array()
|
||||
checksums = make([]string, 0, len(array))
|
||||
for _, result := range array {
|
||||
if result.Type == gjson.String {
|
||||
checksums = append(checksums, result.String())
|
||||
}
|
||||
}
|
||||
} else if result.Type == gjson.String {
|
||||
checksums = []string{result.String()}
|
||||
}
|
||||
|
||||
// Delete key.
|
||||
content, err = sjson.DeleteBytes(content, JSONChecksumKey)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Get signatures.
|
||||
result = gjson.GetBytes(content, JSONSignatureKey)
|
||||
if result.Exists() {
|
||||
if result.IsArray() {
|
||||
array := result.Array()
|
||||
signatures = make([]string, 0, len(array))
|
||||
for _, result := range array {
|
||||
if result.Type == gjson.String {
|
||||
signatures = append(signatures, result.String())
|
||||
}
|
||||
}
|
||||
} else if result.Type == gjson.String {
|
||||
signatures = []string{result.String()}
|
||||
}
|
||||
|
||||
// Delete key.
|
||||
content, err = sjson.DeleteBytes(content, JSONSignatureKey)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Format for reproducible checksums and signatures.
|
||||
content = pretty.PrettyOptions(content, &pretty.Options{
|
||||
Width: 200, // Must not change!
|
||||
Prefix: "", // Must not change!
|
||||
Indent: " ", // Must not change!
|
||||
SortKeys: true, // Must not change!
|
||||
})
|
||||
|
||||
return content, checksums, signatures, nil
|
||||
}
|
||||
|
||||
func jsonAddMeta(data []byte, checksums, signatures []string) ([]byte, error) {
|
||||
var (
|
||||
err error
|
||||
opts = &sjson.Options{
|
||||
ReplaceInPlace: true,
|
||||
}
|
||||
)
|
||||
|
||||
// Add checksums.
|
||||
switch len(checksums) {
|
||||
case 0:
|
||||
// Skip
|
||||
case 1:
|
||||
// Add single checksum.
|
||||
data, err = sjson.SetBytesOptions(
|
||||
data, JSONChecksumKey, checksums[0], opts,
|
||||
)
|
||||
default:
|
||||
// Add multiple checksums.
|
||||
data, err = sjson.SetBytesOptions(
|
||||
data, JSONChecksumKey, checksums, opts,
|
||||
)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Add signatures.
|
||||
switch len(signatures) {
|
||||
case 0:
|
||||
// Skip
|
||||
case 1:
|
||||
// Add single signature.
|
||||
data, err = sjson.SetBytesOptions(
|
||||
data, JSONSignatureKey, signatures[0], opts,
|
||||
)
|
||||
default:
|
||||
// Add multiple signatures.
|
||||
data, err = sjson.SetBytesOptions(
|
||||
data, JSONSignatureKey, signatures, opts,
|
||||
)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Final pretty print.
|
||||
data = pretty.PrettyOptions(data, &pretty.Options{
|
||||
Width: 200, // Must not change!
|
||||
Prefix: "", // Must not change!
|
||||
Indent: " ", // Must not change!
|
||||
})
|
||||
|
||||
return data, nil
|
||||
}
|
226
filesig/json_test.go
Normal file
226
filesig/json_test.go
Normal file
|
@ -0,0 +1,226 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
func TestJSONChecksums(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Base test text file.
|
||||
json := `{"a": "b", "c": 1}`
|
||||
|
||||
// Test with checksum after comment.
|
||||
|
||||
jsonWithChecksum := `{
|
||||
"_jess-checksum": "ZwtAd75qvioh6uf1NAq64KRgTbqeehFVYmhLmrwu1s7xJo",
|
||||
"a": "b",
|
||||
"c": 1
|
||||
}
|
||||
`
|
||||
|
||||
testJSONWithChecksum, err := AddJSONChecksum([]byte(json))
|
||||
require.NoError(t, err, "should be able to add checksum")
|
||||
assert.Equal(t, jsonWithChecksum, string(testJSONWithChecksum), "should match")
|
||||
require.NoError(t,
|
||||
VerifyJSONChecksum(testJSONWithChecksum),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
jsonWithChecksum = `{
|
||||
"c": 1, "a":"b",
|
||||
"_jess-checksum": "ZwtAd75qvioh6uf1NAq64KRgTbqeehFVYmhLmrwu1s7xJo"
|
||||
}`
|
||||
require.NoError(t,
|
||||
VerifyJSONChecksum([]byte(jsonWithChecksum)),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
jsonWithMultiChecksum := `{
|
||||
"_jess-checksum": [
|
||||
"PTV7S3Ca81aRk2kdNw7q2RfjLfEdPPT5Px5d211nhZedZC",
|
||||
"PTV7S3Ca81aRk2kdNw7q2RfjLfEdPPT5Px5d211nhZedZC",
|
||||
"CyDGH55DZUwa556DiYztMXaKZVBDjzWeFETiGmABMbvC3V"
|
||||
],
|
||||
"a": "b",
|
||||
"c": 1
|
||||
}
|
||||
`
|
||||
require.NoError(t,
|
||||
VerifyJSONChecksum([]byte(jsonWithMultiChecksum)),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
jsonWithMultiChecksumOutput := `{
|
||||
"_jess-checksum": ["CyDGH55DZUwa556DiYztMXaKZVBDjzWeFETiGmABMbvC3V", "PTV7S3Ca81aRk2kdNw7q2RfjLfEdPPT5Px5d211nhZedZC", "ZwtAd75qvioh6uf1NAq64KRgTbqeehFVYmhLmrwu1s7xJo"],
|
||||
"a": "b",
|
||||
"c": 1
|
||||
}
|
||||
`
|
||||
|
||||
testJSONWithMultiChecksum, err := AddJSONChecksum([]byte(jsonWithMultiChecksum))
|
||||
require.NoError(t, err, "should be able to add checksum")
|
||||
assert.Equal(t, jsonWithMultiChecksumOutput, string(testJSONWithMultiChecksum), "should match")
|
||||
require.NoError(t,
|
||||
VerifyJSONChecksum(testJSONWithMultiChecksum),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
// // Test with multiple checksums.
|
||||
|
||||
// textWithMultiChecksum := `# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
|
||||
// #!/bin/bash
|
||||
// # Initial
|
||||
// # Comment
|
||||
// # Block
|
||||
// # jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
|
||||
|
||||
// do_something()
|
||||
|
||||
// # jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
|
||||
// `
|
||||
// assert.NoError(t,
|
||||
// VerifyTextFileChecksum([]byte(textWithMultiChecksum), "#"),
|
||||
// "checksum should be correct",
|
||||
// )
|
||||
|
||||
// textWithMultiChecksumOutput := `#!/bin/bash
|
||||
// # Initial
|
||||
// # Comment
|
||||
// # Block
|
||||
// # jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
|
||||
// # jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
|
||||
// # jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
|
||||
// # jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
|
||||
|
||||
// do_something()
|
||||
// `
|
||||
// testTextWithMultiChecksumOutput, err := AddTextFileChecksum([]byte(textWithMultiChecksum), "#", AfterComment)
|
||||
// assert.NoError(t, err, "should be able to add checksum")
|
||||
// assert.Equal(t, textWithMultiChecksumOutput, string(testTextWithMultiChecksumOutput), "should match")
|
||||
|
||||
// // Test failing checksums.
|
||||
|
||||
// textWithFailingChecksums := `#!/bin/bash
|
||||
// # Initial
|
||||
// # Comment
|
||||
// # Block
|
||||
// # jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
|
||||
// # jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
|
||||
// # jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
|
||||
// # jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjaaaaaaaaaaaaaaaaaaaaa
|
||||
|
||||
// do_something()
|
||||
// `
|
||||
//
|
||||
// assert.Error(t, VerifyTextFileChecksum([]byte(textWithFailingChecksums), "#"), "should fail")
|
||||
}
|
||||
|
||||
func TestJSONSignatures(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Get tool for key generation.
|
||||
tool, err := tools.Get("Ed25519")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Generate key pair.
|
||||
s, err := getOrMakeSignet(t, tool.StaticLogic, false, "test-key-jsonsig-1")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
// sBackup, err := s.Backup(true)
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// t.Logf("signet: %s", sBackup)
|
||||
|
||||
// Make envelope.
|
||||
envelope := jess.NewUnconfiguredEnvelope()
|
||||
envelope.SuiteID = jess.SuiteSignV1
|
||||
envelope.Senders = []*jess.Signet{s}
|
||||
|
||||
// Test 1: Simple json.
|
||||
|
||||
json := `{"a": "b", "c": 1}`
|
||||
testJSONWithSignature, err := AddJSONSignature([]byte(json), envelope, testTrustStore)
|
||||
require.NoError(t, err, "should be able to add signature")
|
||||
require.NoError(t,
|
||||
VerifyJSONSignature(testJSONWithSignature, testTrustStore),
|
||||
"signature should be valid",
|
||||
)
|
||||
|
||||
// Test 2: Prepared json with signature.
|
||||
|
||||
// Load signing key into trust store.
|
||||
signingKey2, err := jess.SenderFromTextFormat(
|
||||
"sender:2ZxXzzL3mc3mLPizTUe49zi8Z3NMbDrmmqJ4V9mL4AxefZ1o8pM8wPMuK2uW12Mvd3EJL9wsKTn14BDuqH2AtucvHTAkjDdZZ5YA9Azmji5tLRXmypvSxEj2mxXU3MFXBVdpzPdwRcE4WauLo9ZfQWebznvnatVLwuxmeo17tU2pL7",
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
rcptKey2, err := signingKey2.AsRecipient()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := testTrustStore.StoreSignet(rcptKey2); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Verify data.
|
||||
jsonWithSignature := `{
|
||||
"c":1,"a":"b",
|
||||
"_jess-signature": "Q6RnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRK6e7JhqU2lnbmF0dXJlc4GjZlNjaGVtZWdFZDI1NTE5YklEeBl0ZXN0LXN0YXRpYy1rZXktanNvbnNpZy0xZVZhbHVlWEBPEbeM4_CTl3OhNT2z74h38jIZG5R7BBLDFd6npJ3E-4JqM6TaSMa-2pPEBf3fDNuikR3ak45SekC6Z10uWiEB"
|
||||
}`
|
||||
require.NoError(t,
|
||||
VerifyJSONSignature([]byte(jsonWithSignature), testTrustStore),
|
||||
"signature should be valid",
|
||||
)
|
||||
|
||||
// Test 3: Add signature to prepared json.
|
||||
|
||||
testJSONWithSignature, err = AddJSONSignature([]byte(jsonWithSignature), envelope, testTrustStore)
|
||||
require.NoError(t, err, "should be able to add signature")
|
||||
require.NoError(t,
|
||||
VerifyJSONSignature(testJSONWithSignature, testTrustStore),
|
||||
"signatures should be valid",
|
||||
)
|
||||
|
||||
// Test 4: Prepared json with multiple signatures.
|
||||
|
||||
// Load signing key into trust store.
|
||||
signingKey3, err := jess.SenderFromTextFormat(
|
||||
"sender:2ZxXzzL3mc3mLPizTUe49zi8Z3NMbDrmmqJ4V9mL4AxefZ1o8pM8wPMuRAXdZNaPX3B96bhGCpww6TbXJ6WXLHoLwLV196cgdm1BurfTMdjUPa4PUj1KgHuM82b1p8ezQeryzj1CsjeM8KRQdh9YP87gwKpXNmLW5GmUyWG5KxzZ7W",
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
rcptKey3, err := signingKey3.AsRecipient()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := testTrustStore.StoreSignet(rcptKey3); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
jsonWithMultiSig := `{
|
||||
"_jess-signature": [
|
||||
"Q6RnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRK6e7JhqU2lnbmF0dXJlc4GjZlNjaGVtZWdFZDI1NTE5YklEeBl0ZXN0LXN0YXRpYy1rZXktanNvbnNpZy0xZVZhbHVlWEBPEbeM4_CTl3OhNT2z74h38jIZG5R7BBLDFd6npJ3E-4JqM6TaSMa-2pPEBf3fDNuikR3ak45SekC6Z10uWiEB",
|
||||
"Q6RnVmVyc2lvbgFnU3VpdGVJRGdzaWduX3YxZU5vbmNlRC32oylqU2lnbmF0dXJlc4GjZlNjaGVtZWdFZDI1NTE5YklEeBl0ZXN0LXN0YXRpYy1rZXktanNvbnNpZy0yZVZhbHVlWEDYVHeKaJvzZPOkgC6Tie6x70bNm2jtmJmAwDFDcBL1ddK7pVSefyAPg47xMO7jeucP5bw754P6CdrR5gyANJkM"
|
||||
],
|
||||
"a": "b",
|
||||
"c": 1
|
||||
}
|
||||
`
|
||||
assert.NoError(t,
|
||||
VerifyJSONSignature([]byte(jsonWithMultiSig), testTrustStore),
|
||||
"signatures should be valid",
|
||||
)
|
||||
}
|
123
filesig/main.go
Normal file
123
filesig/main.go
Normal file
|
@ -0,0 +1,123 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/lhash"
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
// Extension holds the default file extension to be used for signature files.
|
||||
const Extension = ".sig"
|
||||
|
||||
var fileSigRequirements = jess.NewRequirements().
|
||||
Remove(jess.RecipientAuthentication).
|
||||
Remove(jess.Confidentiality)
|
||||
|
||||
// FileData describes a file that is signed.
|
||||
type FileData struct {
|
||||
LabeledHash []byte
|
||||
fileHash *lhash.LabeledHash
|
||||
|
||||
SignedAt time.Time
|
||||
MetaData map[string]string
|
||||
|
||||
signature *jess.Letter
|
||||
verificationError error
|
||||
}
|
||||
|
||||
// FileHash returns the labeled hash of the file that was signed.
|
||||
func (fd *FileData) FileHash() *lhash.LabeledHash {
|
||||
return fd.fileHash
|
||||
}
|
||||
|
||||
// Signature returns the signature, if present.
|
||||
func (fd *FileData) Signature() *jess.Letter {
|
||||
return fd.signature
|
||||
}
|
||||
|
||||
// VerificationError returns the error encountered during verification.
|
||||
func (fd *FileData) VerificationError() error {
|
||||
return fd.verificationError
|
||||
}
|
||||
|
||||
// SignFileData signs the given file checksum and metadata.
|
||||
func SignFileData(fileHash *lhash.LabeledHash, metaData map[string]string, envelope *jess.Envelope, trustStore jess.TrustStore) (letter *jess.Letter, fd *FileData, err error) {
|
||||
// Create session.
|
||||
session, err := envelope.Correspondence(trustStore)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Check if the envelope is suitable for signing.
|
||||
if err := envelope.Suite().Provides.CheckComplianceTo(fileSigRequirements); err != nil {
|
||||
return nil, nil, fmt.Errorf("envelope not suitable for signing: %w", err)
|
||||
}
|
||||
|
||||
// Create struct and transform data into serializable format to be signed.
|
||||
fd = &FileData{
|
||||
SignedAt: time.Now().Truncate(time.Second),
|
||||
fileHash: fileHash,
|
||||
MetaData: metaData,
|
||||
}
|
||||
fd.LabeledHash = fd.fileHash.Bytes()
|
||||
|
||||
// Serialize file signature.
|
||||
fileData, err := dsd.Dump(fd, dsd.MsgPack)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to serialize file signature data: %w", err)
|
||||
}
|
||||
|
||||
// Sign data.
|
||||
letter, err = session.Close(fileData)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to sign: %w", err)
|
||||
}
|
||||
|
||||
return letter, fd, nil
|
||||
}
|
||||
|
||||
// VerifyFileData verifies the given signed file data and returns the file data.
|
||||
// If an error is returned, there was an error in at least some part of the process.
|
||||
// Any returned file data struct must be checked for an verification error.
|
||||
func VerifyFileData(letter *jess.Letter, requiredMetaData map[string]string, trustStore jess.TrustStore) (fd *FileData, err error) {
|
||||
// Parse data.
|
||||
fd = &FileData{
|
||||
signature: letter,
|
||||
}
|
||||
_, err = dsd.Load(letter.Data, fd)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse file signature data: %w", err)
|
||||
}
|
||||
|
||||
// Verify signature and get data.
|
||||
_, err = letter.Open(fileSigRequirements, trustStore)
|
||||
if err != nil {
|
||||
fd.verificationError = fmt.Errorf("failed to verify file signature: %w", err)
|
||||
return fd, fd.verificationError
|
||||
}
|
||||
|
||||
// Check if the required metadata matches.
|
||||
for reqKey, reqValue := range requiredMetaData {
|
||||
sigMetaValue, ok := fd.MetaData[reqKey]
|
||||
if !ok {
|
||||
fd.verificationError = fmt.Errorf("missing required metadata key %q", reqKey)
|
||||
return fd, fd.verificationError
|
||||
}
|
||||
if sigMetaValue != reqValue {
|
||||
fd.verificationError = fmt.Errorf("required metadata %q=%q does not match the file's value %q", reqKey, reqValue, sigMetaValue)
|
||||
return fd, fd.verificationError
|
||||
}
|
||||
}
|
||||
|
||||
// Parse labeled hash.
|
||||
fd.fileHash, err = lhash.Load(fd.LabeledHash)
|
||||
if err != nil {
|
||||
fd.verificationError = fmt.Errorf("failed to parse file checksum: %w", err)
|
||||
return fd, fd.verificationError
|
||||
}
|
||||
|
||||
return fd, nil
|
||||
}
|
130
filesig/main_test.go
Normal file
130
filesig/main_test.go
Normal file
|
@ -0,0 +1,130 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/safing/jess"
|
||||
"github.com/safing/jess/lhash"
|
||||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
var (
|
||||
testTrustStore = jess.NewMemTrustStore()
|
||||
testData1 = "The quick brown fox jumps over the lazy dog. "
|
||||
|
||||
testFileSigMetaData1 = map[string]string{
|
||||
"key1": "value1",
|
||||
"key2": "value2",
|
||||
}
|
||||
testFileSigMetaData1x = map[string]string{
|
||||
"key1": "value1x",
|
||||
}
|
||||
testFileSigMetaData2 = map[string]string{
|
||||
"key3": "value3",
|
||||
"key4": "value4",
|
||||
}
|
||||
testFileSigMetaData3 = map[string]string{}
|
||||
)
|
||||
|
||||
func TestFileSigs(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testFileSigningWithOptions(t, testFileSigMetaData1, testFileSigMetaData1, true)
|
||||
testFileSigningWithOptions(t, testFileSigMetaData1, testFileSigMetaData1x, false)
|
||||
testFileSigningWithOptions(t, testFileSigMetaData2, testFileSigMetaData2, true)
|
||||
testFileSigningWithOptions(t, testFileSigMetaData1, testFileSigMetaData2, false)
|
||||
testFileSigningWithOptions(t, testFileSigMetaData2, testFileSigMetaData1, false)
|
||||
testFileSigningWithOptions(t, testFileSigMetaData1, testFileSigMetaData3, true)
|
||||
testFileSigningWithOptions(t, testFileSigMetaData3, testFileSigMetaData1, false)
|
||||
}
|
||||
|
||||
func testFileSigningWithOptions(t *testing.T, signingMetaData, verifyingMetaData map[string]string, shouldSucceed bool) {
|
||||
t.Helper()
|
||||
|
||||
// Get tool for key generation.
|
||||
tool, err := tools.Get("Ed25519")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Generate key pair.
|
||||
s, err := getOrMakeSignet(t, tool.StaticLogic, false, "test-key-filesig-1")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Hash "file".
|
||||
fileHash := lhash.BLAKE2b_256.Digest([]byte(testData1))
|
||||
|
||||
// Make envelope.
|
||||
envelope := jess.NewUnconfiguredEnvelope()
|
||||
envelope.SuiteID = jess.SuiteSignV1
|
||||
envelope.Senders = []*jess.Signet{s}
|
||||
|
||||
// Sign data.
|
||||
letter, fileData, err := SignFileData(fileHash, signingMetaData, envelope, testTrustStore)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Check if the checksum made it.
|
||||
if len(fileData.LabeledHash) == 0 {
|
||||
t.Fatal("missing labeled hash")
|
||||
}
|
||||
|
||||
// Verify signature.
|
||||
_, err = VerifyFileData(letter, verifyingMetaData, testTrustStore)
|
||||
if (err == nil) != shouldSucceed {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func getOrMakeSignet(t *testing.T, tool tools.ToolLogic, recipient bool, signetID string) (*jess.Signet, error) {
|
||||
t.Helper()
|
||||
|
||||
// check if signet already exists
|
||||
signet, err := testTrustStore.GetSignet(signetID, recipient)
|
||||
if err == nil {
|
||||
return signet, nil
|
||||
}
|
||||
|
||||
// handle special cases
|
||||
if tool == nil {
|
||||
return nil, errors.New("bad parameters")
|
||||
}
|
||||
|
||||
// create new signet
|
||||
newSignet := jess.NewSignetBase(tool.Definition())
|
||||
newSignet.ID = signetID
|
||||
// generate signet and log time taken
|
||||
start := time.Now()
|
||||
err = tool.GenerateKey(newSignet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
t.Logf("generated %s signet %s in %s", newSignet.Scheme, newSignet.ID, time.Since(start))
|
||||
|
||||
// store signet
|
||||
err = testTrustStore.StoreSignet(newSignet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// store recipient
|
||||
newRcpt, err := newSignet.AsRecipient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = testTrustStore.StoreSignet(newRcpt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// return
|
||||
if recipient {
|
||||
return newRcpt, nil
|
||||
}
|
||||
return newSignet, nil
|
||||
}
|
232
filesig/text.go
Normal file
232
filesig/text.go
Normal file
|
@ -0,0 +1,232 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
// Text file metadata keys.
|
||||
const (
|
||||
TextKeyPrefix = "jess-"
|
||||
TextChecksumKey = TextKeyPrefix + "checksum"
|
||||
TextSignatureKey = TextKeyPrefix + "signature"
|
||||
)
|
||||
|
||||
// Text Operation Errors.
|
||||
var (
|
||||
ErrChecksumMissing = errors.New("no checksum found")
|
||||
ErrChecksumFailed = errors.New("checksum does not match")
|
||||
ErrSignatureMissing = errors.New("signature not found")
|
||||
ErrSignatureFailed = errors.New("signature does not match")
|
||||
)
|
||||
|
||||
// TextPlacement signifies where jess metadata is put in text files.
|
||||
type TextPlacement string
|
||||
|
||||
const (
|
||||
// TextPlacementTop places the metadata at end of file.
|
||||
TextPlacementTop TextPlacement = "top"
|
||||
// TextPlacementBottom places the metadata at end of file.
|
||||
TextPlacementBottom TextPlacement = "bottom"
|
||||
// TextPlacementAfterComment places the metadata at end of the top comment
|
||||
// block, or at the top, if the first line is not a comment.
|
||||
TextPlacementAfterComment TextPlacement = "after-comment"
|
||||
|
||||
defaultMetaPlacement = TextPlacementAfterComment
|
||||
)
|
||||
|
||||
// AddTextFileChecksum adds a checksum to a text file.
|
||||
func AddTextFileChecksum(data []byte, commentSign string, placement TextPlacement) ([]byte, error) {
|
||||
// Split text file into content and jess metadata lines.
|
||||
content, metaLines, err := textSplit(data, commentSign)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Calculate checksum.
|
||||
h := lhash.BLAKE2b_256.Digest(content)
|
||||
metaLines = append(metaLines, TextChecksumKey+": "+h.Base58())
|
||||
|
||||
// Sort and deduplicate meta lines.
|
||||
slices.Sort[[]string, string](metaLines)
|
||||
metaLines = slices.Compact[[]string, string](metaLines)
|
||||
|
||||
// Add meta lines and return.
|
||||
return textAddMeta(content, metaLines, commentSign, placement)
|
||||
}
|
||||
|
||||
// VerifyTextFileChecksum checks a checksum in a text file.
|
||||
func VerifyTextFileChecksum(data []byte, commentSign string) error {
|
||||
// Split text file into content and jess metadata lines.
|
||||
content, metaLines, err := textSplit(data, commentSign)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Verify all checksums.
|
||||
var checksumsVerified int
|
||||
for _, line := range metaLines {
|
||||
if strings.HasPrefix(line, TextChecksumKey) {
|
||||
// Clean key, delimiters and space.
|
||||
line = strings.TrimPrefix(line, TextChecksumKey)
|
||||
line = strings.TrimSpace(line) // Spaces and newlines.
|
||||
line = strings.Trim(line, ":= ") // Delimiters and spaces.
|
||||
// Parse checksum.
|
||||
h, err := lhash.FromBase58(line)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%w: failed to parse labeled hash: %w", ErrChecksumFailed, err)
|
||||
}
|
||||
// Verify checksum.
|
||||
if !h.Matches(content) {
|
||||
return ErrChecksumFailed
|
||||
}
|
||||
checksumsVerified++
|
||||
}
|
||||
}
|
||||
|
||||
// Fail when no checksums were verified.
|
||||
if checksumsVerified == 0 {
|
||||
return ErrChecksumMissing
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func textSplit(data []byte, commentSign string) (content []byte, metaLines []string, err error) {
|
||||
metaLinePrefix := commentSign + " " + TextKeyPrefix
|
||||
contentBuf := bytes.NewBuffer(make([]byte, 0, len(data)))
|
||||
metaLines = make([]string, 0, 1)
|
||||
|
||||
// Find jess metadata lines.
|
||||
s := bufio.NewScanner(bytes.NewReader(data))
|
||||
s.Split(scanRawLines)
|
||||
for s.Scan() {
|
||||
if strings.HasPrefix(s.Text(), metaLinePrefix) {
|
||||
metaLines = append(metaLines, strings.TrimSpace(strings.TrimPrefix(s.Text(), commentSign)))
|
||||
} else {
|
||||
_, _ = contentBuf.Write(s.Bytes())
|
||||
}
|
||||
}
|
||||
if s.Err() != nil {
|
||||
return nil, nil, s.Err()
|
||||
}
|
||||
|
||||
return bytes.TrimSpace(contentBuf.Bytes()), metaLines, nil
|
||||
}
|
||||
|
||||
func detectLineEndFormat(data []byte) (lineEnd string) {
|
||||
i := bytes.IndexByte(data, '\n')
|
||||
switch i {
|
||||
case -1:
|
||||
// Default to just newline.
|
||||
return "\n"
|
||||
case 0:
|
||||
// File start with a newline.
|
||||
return "\n"
|
||||
default:
|
||||
// First newline is at second byte or later.
|
||||
if bytes.Equal(data[i-1:i+1], []byte("\r\n")) {
|
||||
return "\r\n"
|
||||
}
|
||||
return "\n"
|
||||
}
|
||||
}
|
||||
|
||||
func textAddMeta(data []byte, metaLines []string, commentSign string, position TextPlacement) ([]byte, error) {
|
||||
// Prepare new buffer.
|
||||
requiredSize := len(data)
|
||||
for _, line := range metaLines {
|
||||
requiredSize += len(line) + len(commentSign) + 3 // space + CRLF
|
||||
}
|
||||
contentBuf := bytes.NewBuffer(make([]byte, 0, requiredSize))
|
||||
|
||||
// Find line ending.
|
||||
lineEnd := detectLineEndFormat(data)
|
||||
|
||||
// Find jess metadata lines.
|
||||
if position == "" {
|
||||
position = defaultMetaPlacement
|
||||
}
|
||||
|
||||
switch position {
|
||||
case TextPlacementTop:
|
||||
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
|
||||
contentBuf.Write(data)
|
||||
// Add final newline.
|
||||
contentBuf.WriteString(lineEnd)
|
||||
|
||||
case TextPlacementBottom:
|
||||
contentBuf.Write(data)
|
||||
// Add to newlines when appending, as content is first whitespace-stripped.
|
||||
contentBuf.WriteString(lineEnd)
|
||||
contentBuf.WriteString(lineEnd)
|
||||
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
|
||||
|
||||
case TextPlacementAfterComment:
|
||||
metaWritten := false
|
||||
s := bufio.NewScanner(bytes.NewReader(data))
|
||||
s.Split(scanRawLines)
|
||||
for s.Scan() {
|
||||
switch {
|
||||
case metaWritten:
|
||||
_, _ = contentBuf.Write(s.Bytes())
|
||||
case strings.HasPrefix(s.Text(), commentSign):
|
||||
_, _ = contentBuf.Write(s.Bytes())
|
||||
default:
|
||||
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
|
||||
metaWritten = true
|
||||
_, _ = contentBuf.Write(s.Bytes())
|
||||
}
|
||||
}
|
||||
if s.Err() != nil {
|
||||
return nil, s.Err()
|
||||
}
|
||||
// If we have scanned through the file, and meta was not written, write it now.
|
||||
if !metaWritten {
|
||||
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
|
||||
}
|
||||
// Add final newline.
|
||||
contentBuf.WriteString(lineEnd)
|
||||
}
|
||||
|
||||
return contentBuf.Bytes(), nil
|
||||
}
|
||||
|
||||
func textWriteMetaLines(metaLines []string, commentSign string, lineEnd string, writer io.StringWriter) {
|
||||
for _, line := range metaLines {
|
||||
_, _ = writer.WriteString(commentSign)
|
||||
_, _ = writer.WriteString(" ")
|
||||
_, _ = writer.WriteString(line)
|
||||
_, _ = writer.WriteString(lineEnd)
|
||||
}
|
||||
}
|
||||
|
||||
// scanRawLines is a split function for a Scanner that returns each line of
|
||||
// text, including any trailing end-of-line marker. The returned line may
|
||||
// be empty. The end-of-line marker is one optional carriage return followed
|
||||
// by one mandatory newline. In regular expression notation, it is `\r?\n`.
|
||||
// The last non-empty line of input will be returned even if it has no
|
||||
// newline.
|
||||
func scanRawLines(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||
if atEOF && len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
if i := bytes.IndexByte(data, '\n'); i >= 0 {
|
||||
// We have a full newline-terminated line.
|
||||
return i + 1, data[0 : i+1], nil
|
||||
}
|
||||
// If we're at EOF, we have a final, non-terminated line. Return it.
|
||||
if atEOF {
|
||||
return len(data), data, nil
|
||||
}
|
||||
// Request more data.
|
||||
return 0, nil, nil
|
||||
}
|
180
filesig/text_test.go
Normal file
180
filesig/text_test.go
Normal file
|
@ -0,0 +1,180 @@
|
|||
package filesig
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTextChecksums(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Base test text file.
|
||||
text := `#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
|
||||
do_something()`
|
||||
|
||||
// Test with checksum after comment.
|
||||
|
||||
textWithChecksumAfterComment := `#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
|
||||
|
||||
do_something()
|
||||
`
|
||||
|
||||
testTextWithChecksumAfterComment, err := AddTextFileChecksum([]byte(text), "#", TextPlacementAfterComment)
|
||||
require.NoError(t, err, "should be able to add checksum")
|
||||
assert.Equal(t, textWithChecksumAfterComment, string(testTextWithChecksumAfterComment), "should match")
|
||||
require.NoError(t,
|
||||
VerifyTextFileChecksum(testTextWithChecksumAfterComment, "#"),
|
||||
"checksum should be correct",
|
||||
)
|
||||
require.NoError(t,
|
||||
VerifyTextFileChecksum(append(
|
||||
[]byte("\n\n \r\n"),
|
||||
testTextWithChecksumAfterComment...,
|
||||
), "#"),
|
||||
"checksum should be correct",
|
||||
)
|
||||
require.NoError(t,
|
||||
VerifyTextFileChecksum(append(
|
||||
testTextWithChecksumAfterComment,
|
||||
[]byte("\r\n \n \n")...,
|
||||
), "#"),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
// Test with checksum at top.
|
||||
|
||||
textWithChecksumAtTop := `# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
|
||||
#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
|
||||
do_something()
|
||||
`
|
||||
|
||||
testTextWithChecksumAtTop, err := AddTextFileChecksum([]byte(text), "#", TextPlacementTop)
|
||||
require.NoError(t, err, "should be able to add checksum")
|
||||
assert.Equal(t, textWithChecksumAtTop, string(testTextWithChecksumAtTop), "should match")
|
||||
require.NoError(t,
|
||||
VerifyTextFileChecksum(testTextWithChecksumAtTop, "#"),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
// Test with checksum at bottom.
|
||||
|
||||
textWithChecksumAtBottom := `#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
|
||||
do_something()
|
||||
|
||||
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
|
||||
`
|
||||
|
||||
testTextWithChecksumAtBottom, err := AddTextFileChecksum([]byte(text), "#", TextPlacementBottom)
|
||||
require.NoError(t, err, "should be able to add checksum")
|
||||
assert.Equal(t, textWithChecksumAtBottom, string(testTextWithChecksumAtBottom), "should match")
|
||||
require.NoError(t,
|
||||
VerifyTextFileChecksum(testTextWithChecksumAtBottom, "#"),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
// Test with multiple checksums.
|
||||
|
||||
textWithMultiChecksum := `# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
|
||||
#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
# jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
|
||||
|
||||
do_something()
|
||||
|
||||
# jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
|
||||
`
|
||||
assert.NoError(t,
|
||||
VerifyTextFileChecksum([]byte(textWithMultiChecksum), "#"),
|
||||
"checksum should be correct",
|
||||
)
|
||||
|
||||
textWithMultiChecksumOutput := `#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
# jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
|
||||
# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
|
||||
# jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
|
||||
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
|
||||
|
||||
do_something()
|
||||
`
|
||||
testTextWithMultiChecksumOutput, err := AddTextFileChecksum([]byte(textWithMultiChecksum), "#", TextPlacementAfterComment)
|
||||
require.NoError(t, err, "should be able to add checksum")
|
||||
assert.Equal(t, textWithMultiChecksumOutput, string(testTextWithMultiChecksumOutput), "should match")
|
||||
|
||||
// Test failing checksums.
|
||||
|
||||
textWithFailingChecksums := `#!/bin/bash
|
||||
# Initial
|
||||
# Comment
|
||||
# Block
|
||||
# jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
|
||||
# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
|
||||
# jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
|
||||
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjaaaaaaaaaaaaaaaaaaaaa
|
||||
|
||||
do_something()
|
||||
`
|
||||
require.Error(t, VerifyTextFileChecksum([]byte(textWithFailingChecksums), "#"), "should fail")
|
||||
}
|
||||
|
||||
func TestLineEndDetection(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Equal(t,
|
||||
"\n",
|
||||
detectLineEndFormat(nil),
|
||||
"empty data should default to simple lf ending",
|
||||
)
|
||||
assert.Equal(t,
|
||||
"\n",
|
||||
detectLineEndFormat([]byte("\n")),
|
||||
"shoud detect lf ending with empty first line",
|
||||
)
|
||||
assert.Equal(t,
|
||||
"\r\n",
|
||||
detectLineEndFormat([]byte("\r\n")),
|
||||
"shoud detect crlf ending with empty first line",
|
||||
)
|
||||
assert.Equal(t,
|
||||
"\n",
|
||||
detectLineEndFormat([]byte("abc\n")),
|
||||
"shoud detect lf ending with data on single line",
|
||||
)
|
||||
assert.Equal(t,
|
||||
"\r\n",
|
||||
detectLineEndFormat([]byte("abc\r\n")),
|
||||
"shoud detect crlf ending with data on single line",
|
||||
)
|
||||
assert.Equal(t,
|
||||
"\n",
|
||||
detectLineEndFormat([]byte("abc\nabc\r\n")),
|
||||
"shoud detect lf ending with data on first line",
|
||||
)
|
||||
assert.Equal(t,
|
||||
"\r\n",
|
||||
detectLineEndFormat([]byte("abc\r\nabc\n")),
|
||||
"shoud detect crlf ending with data on first line",
|
||||
)
|
||||
}
|
11
filesig/text_yaml.go
Normal file
11
filesig/text_yaml.go
Normal file
|
@ -0,0 +1,11 @@
|
|||
package filesig
|
||||
|
||||
// AddYAMLChecksum adds a checksum to a yaml file.
|
||||
func AddYAMLChecksum(data []byte, placement TextPlacement) ([]byte, error) {
|
||||
return AddTextFileChecksum(data, "#", placement)
|
||||
}
|
||||
|
||||
// VerifyYAMLChecksum checks a checksum in a yaml file.
|
||||
func VerifyYAMLChecksum(data []byte) error {
|
||||
return VerifyTextFileChecksum(data, "#")
|
||||
}
|
50
go.mod
Normal file
50
go.mod
Normal file
|
@ -0,0 +1,50 @@
|
|||
module github.com/safing/jess
|
||||
|
||||
go 1.21.1
|
||||
|
||||
toolchain go1.22.3
|
||||
|
||||
require (
|
||||
github.com/AlecAivazis/survey/v2 v2.3.7
|
||||
github.com/aead/ecdh v0.2.0
|
||||
github.com/mr-tron/base58 v1.2.0
|
||||
github.com/safing/structures v1.1.0
|
||||
github.com/satori/go.uuid v1.2.0
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/tevino/abool v1.2.0
|
||||
github.com/tidwall/gjson v1.17.1
|
||||
github.com/tidwall/pretty v1.2.1
|
||||
github.com/tidwall/sjson v1.2.5
|
||||
github.com/zalando/go-keyring v0.2.5
|
||||
github.com/zeebo/blake3 v0.2.3
|
||||
golang.org/x/crypto v0.24.0
|
||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/alessio/shellescape v1.4.2 // indirect
|
||||
github.com/danieljoos/wincred v1.2.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/x448/float16 v0.8.4 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
golang.org/x/term v0.21.0 // indirect
|
||||
golang.org/x/text v0.16.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
138
go.sum
Normal file
138
go.sum
Normal file
|
@ -0,0 +1,138 @@
|
|||
github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ=
|
||||
github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
|
||||
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s=
|
||||
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w=
|
||||
github.com/aead/ecdh v0.2.0 h1:pYop54xVaq/CEREFEcukHRZfTdjiWvYIsZDXXrBapQQ=
|
||||
github.com/aead/ecdh v0.2.0/go.mod h1:a9HHtXuSo8J1Js1MwLQx2mBhkXMT6YwUmVVEY4tTB8U=
|
||||
github.com/alessio/shellescape v1.4.2 h1:MHPfaU+ddJ0/bYWpgIeUnQUqKrlJ1S7BfEYPM4uEoM0=
|
||||
github.com/alessio/shellescape v1.4.2/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI=
|
||||
github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||
github.com/danieljoos/wincred v1.2.1 h1:dl9cBrupW8+r5250DYkYxocLeZ1Y4vB1kxgtjxw8GQs=
|
||||
github.com/danieljoos/wincred v1.2.1/go.mod h1:uGaFL9fDn3OLTvzCGulzE+SzjEe5NGlh5FdCcyfPwps=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
|
||||
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
|
||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog=
|
||||
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
|
||||
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
|
||||
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
|
||||
github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/safing/structures v1.1.0 h1:QzHBQBjaZSLzw2f6PM4ibSmPcfBHAOB5CKJ+k4FYkhQ=
|
||||
github.com/safing/structures v1.1.0/go.mod h1:QUrB74FcU41ahQ5oy3YNFCoSq+twE/n3+vNZc2K35II=
|
||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/tevino/abool v1.2.0 h1:heAkClL8H6w+mK5md9dzsuohKeXHUpY7Vw0ZCKW+huA=
|
||||
github.com/tevino/abool v1.2.0/go.mod h1:qc66Pna1RiIsPa7O4Egxxs9OqkuxDX55zznh9K07Tzg=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U=
|
||||
github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
|
||||
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zalando/go-keyring v0.2.5 h1:Bc2HHpjALryKD62ppdEzaFG6VxL6Bc+5v0LYpN8Lba8=
|
||||
github.com/zalando/go-keyring v0.2.5/go.mod h1:HL4k+OXQfJUWaMnqyuSOc0drfGPX2b51Du6K+MRgZMk=
|
||||
github.com/zeebo/assert v1.1.0 h1:hU1L1vLTHsnO8x8c9KAR5GmM5QscxHg5RNU5z5qbUWY=
|
||||
github.com/zeebo/assert v1.1.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/blake3 v0.2.3 h1:TFoLXsjeXqRNFxSbk35Dk4YtszE/MQQGK10BH4ptoTg=
|
||||
github.com/zeebo/blake3 v0.2.3/go.mod h1:mjJjZpnsyIVtVgTOSpJ9vmRE4wgDeyt2HU3qXvvKCaQ=
|
||||
github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo=
|
||||
github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4=
|
||||
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI=
|
||||
golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM=
|
||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 h1:yixxcjnhBmY0nkL253HFVIm0JsFHwrHdT3Yh6szTnfY=
|
||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8/go.mod h1:jj3sYF3dwk5D+ghuXyeI3r5MFf+NT2An6/9dOA95KSI=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.21.0 h1:WVXCp+/EBEHOj53Rvu+7KiT/iElMrO8ACK16SMZ3jaA=
|
||||
golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
@ -3,9 +3,11 @@ package hashtools
|
|||
import (
|
||||
"crypto"
|
||||
|
||||
// register BLAKE2 in Go's internal registry
|
||||
// Register BLAKE2 in Go's internal registry.
|
||||
_ "golang.org/x/crypto/blake2b"
|
||||
_ "golang.org/x/crypto/blake2s"
|
||||
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -16,31 +18,39 @@ func init() {
|
|||
|
||||
Register(blake2bBase.With(&HashTool{
|
||||
Name: "BLAKE2s-256",
|
||||
Hash: crypto.BLAKE2s_256,
|
||||
NewHash: crypto.BLAKE2s_256.New,
|
||||
CryptoHashID: crypto.BLAKE2b_256,
|
||||
DigestSize: crypto.BLAKE2s_256.Size(),
|
||||
BlockSize: crypto.BLAKE2s_256.New().BlockSize(),
|
||||
SecurityLevel: 128,
|
||||
Comment: "RFC 7693, successor of SHA3 finalist, optimized for 8-32 bit software",
|
||||
labeledAlg: lhash.BLAKE2s_256,
|
||||
}))
|
||||
Register(blake2bBase.With(&HashTool{
|
||||
Name: "BLAKE2b-256",
|
||||
Hash: crypto.BLAKE2b_256,
|
||||
NewHash: crypto.BLAKE2b_256.New,
|
||||
CryptoHashID: crypto.BLAKE2b_256,
|
||||
DigestSize: crypto.BLAKE2b_256.Size(),
|
||||
BlockSize: crypto.BLAKE2b_256.New().BlockSize(),
|
||||
SecurityLevel: 128,
|
||||
labeledAlg: lhash.BLAKE2b_256,
|
||||
}))
|
||||
Register(blake2bBase.With(&HashTool{
|
||||
Name: "BLAKE2b-384",
|
||||
Hash: crypto.BLAKE2b_384,
|
||||
NewHash: crypto.BLAKE2b_384.New,
|
||||
CryptoHashID: crypto.BLAKE2b_384,
|
||||
DigestSize: crypto.BLAKE2b_384.Size(),
|
||||
BlockSize: crypto.BLAKE2b_384.New().BlockSize(),
|
||||
SecurityLevel: 192,
|
||||
labeledAlg: lhash.BLAKE2b_384,
|
||||
}))
|
||||
Register(blake2bBase.With(&HashTool{
|
||||
Name: "BLAKE2b-512",
|
||||
Hash: crypto.BLAKE2b_512,
|
||||
NewHash: crypto.BLAKE2b_512.New,
|
||||
CryptoHashID: crypto.BLAKE2b_512,
|
||||
DigestSize: crypto.BLAKE2b_512.Size(),
|
||||
BlockSize: crypto.BLAKE2b_512.New().BlockSize(),
|
||||
SecurityLevel: 256,
|
||||
labeledAlg: lhash.BLAKE2b_512,
|
||||
}))
|
||||
}
|
||||
|
|
26
hashtools/blake3.go
Normal file
26
hashtools/blake3.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
package hashtools
|
||||
|
||||
import (
|
||||
"hash"
|
||||
|
||||
"github.com/zeebo/blake3"
|
||||
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
func init() {
|
||||
Register(&HashTool{
|
||||
Name: "BLAKE3",
|
||||
NewHash: newBlake3,
|
||||
DigestSize: newBlake3().Size(),
|
||||
BlockSize: newBlake3().BlockSize(),
|
||||
SecurityLevel: 128,
|
||||
Comment: "cryptographic hash function based on Bao and BLAKE2",
|
||||
Author: "Jean-Philippe Aumasson et al., 2020",
|
||||
labeledAlg: lhash.BLAKE3,
|
||||
})
|
||||
}
|
||||
|
||||
func newBlake3() hash.Hash {
|
||||
return blake3.New()
|
||||
}
|
|
@ -3,12 +3,16 @@ package hashtools
|
|||
import (
|
||||
"crypto"
|
||||
"hash"
|
||||
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
// HashTool holds generic information about a hash tool.
|
||||
type HashTool struct {
|
||||
Name string
|
||||
Hash crypto.Hash
|
||||
|
||||
NewHash func() hash.Hash
|
||||
CryptoHashID crypto.Hash
|
||||
|
||||
DigestSize int // in bytes
|
||||
BlockSize int // in bytes
|
||||
|
@ -16,11 +20,13 @@ type HashTool struct {
|
|||
|
||||
Comment string
|
||||
Author string
|
||||
|
||||
labeledAlg lhash.Algorithm
|
||||
}
|
||||
|
||||
// New returns a new hash.Hash instance of the hash tool.
|
||||
func (ht *HashTool) New() hash.Hash {
|
||||
return ht.Hash.New()
|
||||
return ht.NewHash()
|
||||
}
|
||||
|
||||
// With uses the original HashTool as a template for a new HashTool and returns the new HashTool.
|
||||
|
@ -28,8 +34,11 @@ func (ht *HashTool) With(changes *HashTool) *HashTool {
|
|||
if changes.Name == "" {
|
||||
changes.Name = ht.Name
|
||||
}
|
||||
if changes.Hash == 0 {
|
||||
changes.Hash = ht.Hash
|
||||
if changes.NewHash == nil {
|
||||
changes.NewHash = ht.NewHash
|
||||
}
|
||||
if changes.CryptoHashID == 0 {
|
||||
changes.CryptoHashID = ht.CryptoHashID
|
||||
}
|
||||
if changes.DigestSize == 0 {
|
||||
changes.DigestSize = ht.DigestSize
|
||||
|
@ -46,6 +55,14 @@ func (ht *HashTool) With(changes *HashTool) *HashTool {
|
|||
if changes.Author == "" {
|
||||
changes.Author = ht.Author
|
||||
}
|
||||
if changes.labeledAlg == 0 {
|
||||
changes.labeledAlg = ht.labeledAlg
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
// LabeledHasher returns the corresponding labeled hashing algorithm.
|
||||
func (ht *HashTool) LabeledHasher() lhash.Algorithm {
|
||||
return ht.labeledAlg
|
||||
}
|
||||
|
|
|
@ -2,13 +2,14 @@ package hashtools
|
|||
|
||||
import (
|
||||
"crypto"
|
||||
|
||||
// register SHA2 in Go's internal registry
|
||||
// Register SHA2 in Go's internal registry.
|
||||
_ "crypto/sha256"
|
||||
_ "crypto/sha512"
|
||||
|
||||
// register SHA3 in Go's internal registry
|
||||
// Register SHA3 in Go's internal registry.
|
||||
_ "golang.org/x/crypto/sha3"
|
||||
|
||||
"github.com/safing/jess/lhash"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -19,46 +20,58 @@ func init() {
|
|||
}
|
||||
Register(sha2Base.With(&HashTool{
|
||||
Name: "SHA2-224",
|
||||
Hash: crypto.SHA224,
|
||||
NewHash: crypto.SHA224.New,
|
||||
CryptoHashID: crypto.SHA224,
|
||||
DigestSize: crypto.SHA224.Size(),
|
||||
BlockSize: crypto.SHA224.New().BlockSize(),
|
||||
SecurityLevel: 112,
|
||||
Author: "NSA, 2004",
|
||||
labeledAlg: lhash.SHA2_224,
|
||||
}))
|
||||
Register(sha2Base.With(&HashTool{
|
||||
Name: "SHA2-256",
|
||||
Hash: crypto.SHA256,
|
||||
NewHash: crypto.SHA256.New,
|
||||
CryptoHashID: crypto.SHA256,
|
||||
DigestSize: crypto.SHA256.Size(),
|
||||
BlockSize: crypto.SHA256.New().BlockSize(),
|
||||
SecurityLevel: 128,
|
||||
labeledAlg: lhash.SHA2_256,
|
||||
}))
|
||||
Register(sha2Base.With(&HashTool{
|
||||
Name: "SHA2-384",
|
||||
Hash: crypto.SHA384,
|
||||
NewHash: crypto.SHA384.New,
|
||||
CryptoHashID: crypto.SHA384,
|
||||
DigestSize: crypto.SHA384.Size(),
|
||||
BlockSize: crypto.SHA384.New().BlockSize(),
|
||||
SecurityLevel: 192,
|
||||
labeledAlg: lhash.SHA2_384,
|
||||
}))
|
||||
Register(sha2Base.With(&HashTool{
|
||||
Name: "SHA2-512",
|
||||
Hash: crypto.SHA512,
|
||||
NewHash: crypto.SHA512.New,
|
||||
CryptoHashID: crypto.SHA512,
|
||||
DigestSize: crypto.SHA512.Size(),
|
||||
BlockSize: crypto.SHA512.New().BlockSize(),
|
||||
SecurityLevel: 256,
|
||||
labeledAlg: lhash.SHA2_512,
|
||||
}))
|
||||
Register(sha2Base.With(&HashTool{
|
||||
Name: "SHA2-512-224",
|
||||
Hash: crypto.SHA512_224,
|
||||
NewHash: crypto.SHA512_224.New,
|
||||
CryptoHashID: crypto.SHA512_224,
|
||||
DigestSize: crypto.SHA512_224.Size(),
|
||||
BlockSize: crypto.SHA512_224.New().BlockSize(),
|
||||
SecurityLevel: 112,
|
||||
labeledAlg: lhash.SHA2_512_224,
|
||||
}))
|
||||
Register(sha2Base.With(&HashTool{
|
||||
Name: "SHA2-512-256",
|
||||
Hash: crypto.SHA512_256,
|
||||
NewHash: crypto.SHA512_256.New,
|
||||
CryptoHashID: crypto.SHA512_256,
|
||||
DigestSize: crypto.SHA512_256.Size(),
|
||||
BlockSize: crypto.SHA512_256.New().BlockSize(),
|
||||
SecurityLevel: 128,
|
||||
labeledAlg: lhash.SHA2_512_256,
|
||||
}))
|
||||
|
||||
// SHA3
|
||||
|
@ -68,30 +81,38 @@ func init() {
|
|||
}
|
||||
Register(sha3Base.With(&HashTool{
|
||||
Name: "SHA3-224",
|
||||
Hash: crypto.SHA3_224,
|
||||
NewHash: crypto.SHA3_224.New,
|
||||
CryptoHashID: crypto.SHA3_224,
|
||||
DigestSize: crypto.SHA3_224.Size(),
|
||||
BlockSize: crypto.SHA3_224.New().BlockSize(),
|
||||
SecurityLevel: 112,
|
||||
labeledAlg: lhash.SHA3_224,
|
||||
}))
|
||||
Register(sha3Base.With(&HashTool{
|
||||
Name: "SHA3-256",
|
||||
Hash: crypto.SHA3_256,
|
||||
NewHash: crypto.SHA3_256.New,
|
||||
CryptoHashID: crypto.SHA3_256,
|
||||
DigestSize: crypto.SHA3_256.Size(),
|
||||
BlockSize: crypto.SHA3_256.New().BlockSize(),
|
||||
SecurityLevel: 128,
|
||||
labeledAlg: lhash.SHA3_256,
|
||||
}))
|
||||
Register(sha3Base.With(&HashTool{
|
||||
Name: "SHA3-384",
|
||||
Hash: crypto.SHA3_384,
|
||||
NewHash: crypto.SHA3_384.New,
|
||||
CryptoHashID: crypto.SHA3_384,
|
||||
DigestSize: crypto.SHA3_384.Size(),
|
||||
BlockSize: crypto.SHA3_384.New().BlockSize(),
|
||||
SecurityLevel: 192,
|
||||
labeledAlg: lhash.SHA3_384,
|
||||
}))
|
||||
Register(sha3Base.With(&HashTool{
|
||||
Name: "SHA3-512",
|
||||
Hash: crypto.SHA3_512,
|
||||
NewHash: crypto.SHA3_512.New,
|
||||
CryptoHashID: crypto.SHA3_512,
|
||||
DigestSize: crypto.SHA3_512.Size(),
|
||||
BlockSize: crypto.SHA3_512.New().BlockSize(),
|
||||
SecurityLevel: 256,
|
||||
labeledAlg: lhash.SHA3_512,
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ func Get(name string) (*HashTool, error) {
|
|||
return hashTool, nil
|
||||
}
|
||||
|
||||
// New returns a new hash.Hash with the given Name
|
||||
// New returns a new hash.Hash with the given name.
|
||||
func New(name string) (hash.Hash, error) {
|
||||
hashTool, err := Get(name)
|
||||
if err != nil {
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
package hashtools
|
||||
|
||||
import "testing"
|
||||
import (
|
||||
"encoding/hex"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAll(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testData := []byte("The quick brown fox jumps over the lazy dog. ")
|
||||
testData := []byte("The quick brown fox jumps over the lazy dog.")
|
||||
|
||||
all := AsList()
|
||||
for _, hashTool := range all {
|
||||
|
||||
// take detour in getting hash.Hash for testing
|
||||
// Test hash usage.
|
||||
hash, err := New(hashTool.Name)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get HashTool %s", hashTool.Name)
|
||||
|
@ -29,5 +32,97 @@ func TestAll(t *testing.T) {
|
|||
t.Errorf("hashTool %s is broken or reports invalid digest size. Expected %d, got %d.", hashTool.Name, hashTool.DigestSize, len(sum))
|
||||
}
|
||||
|
||||
// Check hash outputs.
|
||||
expectedOutputs, ok := testOutputs[hashTool.Name]
|
||||
if !ok {
|
||||
t.Errorf("no test outputs available for %s", hashTool.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
// Test empty string.
|
||||
hash.Reset()
|
||||
_, _ = hash.Write(testInputEmpty)
|
||||
hexSum := hex.EncodeToString(hash.Sum(nil))
|
||||
if hexSum != expectedOutputs[0] {
|
||||
t.Errorf("hash tool %s: test empty: digest mismatch, expected %+v, got %+v",
|
||||
hashTool.Name, expectedOutputs[0], hexSum)
|
||||
}
|
||||
|
||||
// Test fox string.
|
||||
hash.Reset()
|
||||
_, _ = hash.Write(testInputFox)
|
||||
hexSum = hex.EncodeToString(hash.Sum(nil))
|
||||
if hexSum != expectedOutputs[1] {
|
||||
t.Errorf("hash tool %s: test empty: digest mismatch, expected %+v, got %+v",
|
||||
hashTool.Name, expectedOutputs[1], hexSum)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
testInputEmpty = []byte("")
|
||||
testInputFox = []byte("The quick brown fox jumps over the lazy dog.")
|
||||
)
|
||||
|
||||
var testOutputs = map[string][2]string{
|
||||
"SHA2-224": {
|
||||
"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f",
|
||||
"619cba8e8e05826e9b8c519c0a5c68f4fb653e8a3d8aa04bb2c8cd4c",
|
||||
},
|
||||
"SHA2-256": {
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
"ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c",
|
||||
},
|
||||
"SHA2-384": {
|
||||
"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b",
|
||||
"ed892481d8272ca6df370bf706e4d7bc1b5739fa2177aae6c50e946678718fc67a7af2819a021c2fc34e91bdb63409d7",
|
||||
},
|
||||
"SHA2-512": {
|
||||
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
|
||||
"91ea1245f20d46ae9a037a989f54f1f790f0a47607eeb8a14d12890cea77a1bbc6c7ed9cf205e67b7f2b8fd4c7dfd3a7a8617e45f3c463d481c7e586c39ac1ed",
|
||||
},
|
||||
"SHA2-512-224": {
|
||||
"6ed0dd02806fa89e25de060c19d3ac86cabb87d6a0ddd05c333b84f4",
|
||||
"6d6a9279495ec4061769752e7ff9c68b6b0b3c5a281b7917ce0572de",
|
||||
},
|
||||
"SHA2-512-256": {
|
||||
"c672b8d1ef56ed28ab87c3622c5114069bdd3ad7b8f9737498d0c01ecef0967a",
|
||||
"1546741840f8a492b959d9b8b2344b9b0eb51b004bba35c0aebaac86d45264c3",
|
||||
},
|
||||
"SHA3-224": {
|
||||
"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7",
|
||||
"2d0708903833afabdd232a20201176e8b58c5be8a6fe74265ac54db0",
|
||||
},
|
||||
"SHA3-256": {
|
||||
"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a",
|
||||
"a80f839cd4f83f6c3dafc87feae470045e4eb0d366397d5c6ce34ba1739f734d",
|
||||
},
|
||||
"SHA3-384": {
|
||||
"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004",
|
||||
"1a34d81695b622df178bc74df7124fe12fac0f64ba5250b78b99c1273d4b080168e10652894ecad5f1f4d5b965437fb9",
|
||||
},
|
||||
"SHA3-512": {
|
||||
"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26",
|
||||
"18f4f4bd419603f95538837003d9d254c26c23765565162247483f65c50303597bc9ce4d289f21d1c2f1f458828e33dc442100331b35e7eb031b5d38ba6460f8",
|
||||
},
|
||||
"BLAKE2s-256": {
|
||||
"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9",
|
||||
"95bca6e1b761dca1323505cc629949a0e03edf11633cc7935bd8b56f393afcf2",
|
||||
},
|
||||
"BLAKE2b-256": {
|
||||
"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8",
|
||||
"69d7d3b0afba81826d27024c17f7f183659ed0812cf27b382eaef9fdc29b5712",
|
||||
},
|
||||
"BLAKE2b-384": {
|
||||
"b32811423377f52d7862286ee1a72ee540524380fda1724a6f25d7978c6fd3244a6caf0498812673c5e05ef583825100",
|
||||
"16d65de1a3caf1c26247234c39af636284c7e19ca448c0de788272081410778852c94d9cef6b939968d4f872c7f78337",
|
||||
},
|
||||
"BLAKE2b-512": {
|
||||
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce",
|
||||
"87af9dc4afe5651b7aa89124b905fd214bf17c79af58610db86a0fb1e0194622a4e9d8e395b352223a8183b0d421c0994b98286cbf8c68a495902e0fe6e2bda2",
|
||||
},
|
||||
"BLAKE3": {
|
||||
"af1349b9f5f9a1a6a0404dea36dcc9499bcb25c9adc112b7cc9a93cae41f3262",
|
||||
"4c9bd68d7f0baa2e167cef98295eb1ec99a3ec8f0656b33dbae943b387f31d5d",
|
||||
},
|
||||
}
|
||||
|
|
195
import_export.go
Normal file
195
import_export.go
Normal file
|
@ -0,0 +1,195 @@
|
|||
package jess
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Keywords and Prefixes for the export text format.
|
||||
const (
|
||||
ExportSenderKeyword = "sender"
|
||||
ExportSenderPrefix = "sender:"
|
||||
|
||||
ExportRecipientKeyword = "recipient"
|
||||
ExportRecipientPrefix = "recipient:"
|
||||
|
||||
ExportKeyKeyword = "secret"
|
||||
ExportKeyPrefix = "secret:"
|
||||
|
||||
ExportEnvelopeKeyword = "envelope"
|
||||
ExportEnvelopePrefix = "envelope:"
|
||||
)
|
||||
|
||||
// Export exports the public part of a signet in text format.
|
||||
func (signet *Signet) Export(short bool) (textFormat string, err error) {
|
||||
// Make public if needed.
|
||||
if !signet.Public {
|
||||
signet, err = signet.AsRecipient()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
// Transform to text format.
|
||||
return signet.toTextFormat(short)
|
||||
}
|
||||
|
||||
// Backup exports the private part of a signet in text format.
|
||||
func (signet *Signet) Backup(short bool) (textFormat string, err error) {
|
||||
// Abprt if public.
|
||||
if signet.Public {
|
||||
return "", errors.New("cannot backup (only export) a recipient")
|
||||
}
|
||||
|
||||
// Transform to text format.
|
||||
return signet.toTextFormat(short)
|
||||
}
|
||||
|
||||
func (signet *Signet) toTextFormat(short bool) (textFormat string, err error) {
|
||||
// Serialize to base58.
|
||||
base58data, err := signet.ToBase58()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Define keywords.
|
||||
var keyword, typeComment string
|
||||
switch {
|
||||
case signet.Scheme == SignetSchemePassword:
|
||||
return "", errors.New("cannot backup or export passwords")
|
||||
case signet.Scheme == SignetSchemeKey:
|
||||
// Check if the signet is marked as "public".
|
||||
if signet.Public {
|
||||
return "", errors.New("cannot export keys")
|
||||
}
|
||||
keyword = ExportKeyKeyword
|
||||
typeComment = "symmetric-key"
|
||||
case signet.Public:
|
||||
keyword = ExportRecipientKeyword
|
||||
typeComment = fmt.Sprintf(
|
||||
"public-%s-key", toTextFormatString(signet.Scheme),
|
||||
)
|
||||
default:
|
||||
keyword = ExportSenderKeyword
|
||||
typeComment = fmt.Sprintf(
|
||||
"private-%s-key", toTextFormatString(signet.Scheme),
|
||||
)
|
||||
}
|
||||
|
||||
// Transform to text format.
|
||||
if short {
|
||||
return fmt.Sprintf(
|
||||
"%s:%s",
|
||||
keyword,
|
||||
base58data,
|
||||
), nil
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"%s:%s:%s:%s",
|
||||
keyword,
|
||||
typeComment,
|
||||
toTextFormatString(signet.Info.Name),
|
||||
base58data,
|
||||
), nil
|
||||
}
|
||||
|
||||
// Export exports the envelope in text format.
|
||||
func (e *Envelope) Export(short bool) (textFormat string, err error) {
|
||||
// Remove and key data.
|
||||
e.CleanSignets()
|
||||
|
||||
// Serialize to base58.
|
||||
base58data, err := e.ToBase58()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Transform to text format.
|
||||
if short {
|
||||
return fmt.Sprintf(
|
||||
"%s:%s",
|
||||
ExportEnvelopeKeyword,
|
||||
base58data,
|
||||
), nil
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
"%s:%s:%s:%s",
|
||||
ExportEnvelopeKeyword,
|
||||
e.SuiteID,
|
||||
e.Name,
|
||||
base58data,
|
||||
), nil
|
||||
}
|
||||
|
||||
// KeyFromTextFormat loads a secret key from the text format.
|
||||
func KeyFromTextFormat(textFormat string) (*Signet, error) {
|
||||
// Check the identifier.
|
||||
if !strings.HasPrefix(textFormat, ExportKeyPrefix) {
|
||||
return nil, errors.New("not a secret")
|
||||
}
|
||||
|
||||
// Parse the data section.
|
||||
splitted := strings.Split(textFormat, ":")
|
||||
if len(splitted) < 2 {
|
||||
return nil, errors.New("invalid format")
|
||||
}
|
||||
return SignetFromBase58(splitted[len(splitted)-1])
|
||||
}
|
||||
|
||||
// SenderFromTextFormat loads a sender (private key) from the text format.
|
||||
func SenderFromTextFormat(textFormat string) (*Signet, error) {
|
||||
// Check the identifier.
|
||||
if !strings.HasPrefix(textFormat, ExportSenderPrefix) {
|
||||
return nil, errors.New("not a sender")
|
||||
}
|
||||
|
||||
// Parse the data section.
|
||||
splitted := strings.Split(textFormat, ":")
|
||||
if len(splitted) < 2 {
|
||||
return nil, errors.New("invalid format")
|
||||
}
|
||||
return SignetFromBase58(splitted[len(splitted)-1])
|
||||
}
|
||||
|
||||
// RecipientFromTextFormat loads a recipient (public key) from the text format.
|
||||
func RecipientFromTextFormat(textFormat string) (*Signet, error) {
|
||||
// Check the identifier.
|
||||
if !strings.HasPrefix(textFormat, ExportRecipientPrefix) {
|
||||
return nil, errors.New("not a recipient")
|
||||
}
|
||||
|
||||
// Parse the data section.
|
||||
splitted := strings.Split(textFormat, ":")
|
||||
if len(splitted) < 2 {
|
||||
return nil, errors.New("invalid format")
|
||||
}
|
||||
return SignetFromBase58(splitted[len(splitted)-1])
|
||||
}
|
||||
|
||||
// EnvelopeFromTextFormat loads an envelope from the text format.
|
||||
func EnvelopeFromTextFormat(textFormat string) (*Envelope, error) {
|
||||
// Check the identifier.
|
||||
if !strings.HasPrefix(textFormat, ExportEnvelopePrefix) {
|
||||
return nil, errors.New("not an envelope")
|
||||
}
|
||||
|
||||
// Parse the data section.
|
||||
splitted := strings.Split(textFormat, ":")
|
||||
if len(splitted) < 2 {
|
||||
return nil, errors.New("invalid format")
|
||||
}
|
||||
return EnvelopeFromBase58(splitted[len(splitted)-1])
|
||||
}
|
||||
|
||||
var replaceForTextFormatMatcher = regexp.MustCompile(`[^A-Za-z0-9]+`)
|
||||
|
||||
// toTextFormatString makes a string compatible with the text format.
|
||||
func toTextFormatString(s string) string {
|
||||
return strings.ToLower(
|
||||
strings.Trim(
|
||||
replaceForTextFormatMatcher.ReplaceAllString(s, "-"), "-",
|
||||
),
|
||||
)
|
||||
}
|
|
@ -3,9 +3,8 @@ package jess
|
|||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/safing/portbase/formats/dsd"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/safing/structures/container"
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
/*
|
||||
|
@ -16,10 +15,8 @@ import (
|
|||
- Data: byte block
|
||||
*/
|
||||
|
||||
var (
|
||||
// ErrIncompatibleFileFormatVersion is returned when an incompatible wire format is encountered.
|
||||
ErrIncompatibleFileFormatVersion = errors.New("incompatible file format version")
|
||||
)
|
||||
// ErrIncompatibleFileFormatVersion is returned when an incompatible wire format is encountered.
|
||||
var ErrIncompatibleFileFormatVersion = errors.New("incompatible file format version")
|
||||
|
||||
// ToFileFormat serializes the letter for storing it as a file.
|
||||
func (letter *Letter) ToFileFormat() (*container.Container, error) {
|
||||
|
|
|
@ -3,7 +3,7 @@ package jess
|
|||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
/*
|
||||
|
@ -24,10 +24,8 @@ import (
|
|||
- MAC: byte block
|
||||
*/
|
||||
|
||||
var (
|
||||
// ErrIncompatibleWireFormatVersion is returned when an incompatible wire format is encountered.
|
||||
ErrIncompatibleWireFormatVersion = errors.New("incompatible wire format version")
|
||||
)
|
||||
// ErrIncompatibleWireFormatVersion is returned when an incompatible wire format is encountered.
|
||||
var ErrIncompatibleWireFormatVersion = errors.New("incompatible wire format version")
|
||||
|
||||
// ToWire serializes to letter for sending it over a network connection.
|
||||
func (letter *Letter) ToWire() (*container.Container, error) {
|
||||
|
@ -87,7 +85,8 @@ func (letter *Letter) ToWire() (*container.Container, error) {
|
|||
}
|
||||
|
||||
// LetterFromWireData is a relay to LetterFromWire to quickly fix import issues of godep.
|
||||
// DEPRECATED
|
||||
//
|
||||
// Deprecated: Please use LetterFromWire with a fresh container directly.
|
||||
func LetterFromWireData(data []byte) (*Letter, error) {
|
||||
return LetterFromWire(container.New(data))
|
||||
}
|
||||
|
|
|
@ -10,9 +10,8 @@ import (
|
|||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
|
||||
"github.com/safing/portbase/formats/dsd"
|
||||
"github.com/safing/structures/container"
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
// Letter is the data format for encrypted data at rest or in transit.
|
||||
|
@ -150,7 +149,7 @@ func (letter *Letter) ToJSON() ([]byte, error) {
|
|||
return json.Marshal(letter)
|
||||
}
|
||||
|
||||
// LetterFromJSON loads a json-serialized letter
|
||||
// LetterFromJSON loads a json-serialized letter.
|
||||
func LetterFromJSON(data []byte) (*Letter, error) {
|
||||
letter := &Letter{}
|
||||
|
||||
|
@ -186,7 +185,7 @@ func LetterFromDSD(data []byte) (*Letter, error) {
|
|||
|
||||
const (
|
||||
// Field IDs for signing
|
||||
// These IDs MUST NOT CHANGE
|
||||
// These IDs MUST NOT CHANGE.
|
||||
|
||||
fieldIDLetterVersion uint64 = 1 // signed, MAC'd (may not exist when wired)
|
||||
fieldIDLetterSuiteID uint64 = 2 // signed, MAC'd (may not exist when wired)
|
||||
|
|
|
@ -9,6 +9,8 @@ import (
|
|||
)
|
||||
|
||||
func TestSerialization(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
subject := &Letter{
|
||||
Version: 1,
|
||||
SuiteID: SuiteComplete,
|
||||
|
@ -36,6 +38,8 @@ func TestSerialization(t *testing.T) {
|
|||
}
|
||||
|
||||
func testSerialize(t *testing.T, letter *Letter, wireFormat bool) { //nolint:unparam
|
||||
t.Helper()
|
||||
|
||||
// File Format
|
||||
|
||||
fileData, err := letter.ToFileFormat()
|
||||
|
@ -85,10 +89,9 @@ func (letter *Letter) CheckEqual(other *Letter) error {
|
|||
letterValue := reflect.ValueOf(*letter)
|
||||
otherValue := reflect.ValueOf(*other)
|
||||
|
||||
var ok bool
|
||||
numElements := letterValue.NumField()
|
||||
for i := 0; i < numElements; i++ {
|
||||
ok := false
|
||||
|
||||
name := letterValue.Type().Field(i).Name
|
||||
switch name {
|
||||
case "Data": // TODO: this required special handling in the past, leave it here for now.
|
||||
|
|
|
@ -1,19 +1,25 @@
|
|||
// Package lhash provides integrated labeled hashes.
|
||||
//
|
||||
//nolint:gci
|
||||
package lhash
|
||||
|
||||
import (
|
||||
"crypto"
|
||||
"hash"
|
||||
"io"
|
||||
|
||||
// register SHA2 in Go's internal registry
|
||||
// Register SHA2 in Go's internal registry.
|
||||
_ "crypto/sha256"
|
||||
_ "crypto/sha512"
|
||||
|
||||
// register SHA3 in Go's internal registry
|
||||
// Register SHA3 in Go's internal registry.
|
||||
_ "golang.org/x/crypto/sha3"
|
||||
|
||||
// register BLAKE2 in Go's internal registry
|
||||
// Register BLAKE2 in Go's internal registry.
|
||||
_ "golang.org/x/crypto/blake2b"
|
||||
_ "golang.org/x/crypto/blake2s"
|
||||
|
||||
"github.com/zeebo/blake3"
|
||||
)
|
||||
|
||||
// Algorithm is an identifier for a hash function.
|
||||
|
@ -37,6 +43,8 @@ const (
|
|||
BLAKE2b_256 Algorithm = 25
|
||||
BLAKE2b_384 Algorithm = 26
|
||||
BLAKE2b_512 Algorithm = 27
|
||||
|
||||
BLAKE3 Algorithm = 32
|
||||
)
|
||||
|
||||
func (a Algorithm) new() hash.Hash {
|
||||
|
@ -66,7 +74,7 @@ func (a Algorithm) new() hash.Hash {
|
|||
case SHA3_512:
|
||||
return crypto.SHA3_512.New()
|
||||
|
||||
// BLAKE2
|
||||
// BLAKE2
|
||||
case BLAKE2s_256:
|
||||
return crypto.BLAKE2s_256.New()
|
||||
case BLAKE2b_256:
|
||||
|
@ -76,7 +84,77 @@ func (a Algorithm) new() hash.Hash {
|
|||
case BLAKE2b_512:
|
||||
return crypto.BLAKE2b_512.New()
|
||||
|
||||
// BLAKE3
|
||||
case BLAKE3:
|
||||
return blake3.New()
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (a Algorithm) String() string {
|
||||
switch a {
|
||||
|
||||
// SHA2
|
||||
case SHA2_224:
|
||||
return "SHA2_224"
|
||||
case SHA2_256:
|
||||
return "SHA2_256"
|
||||
case SHA2_384:
|
||||
return "SHA2_384"
|
||||
case SHA2_512:
|
||||
return "SHA2_512"
|
||||
case SHA2_512_224:
|
||||
return "SHA2_512_224"
|
||||
case SHA2_512_256:
|
||||
return "SHA2_512_256"
|
||||
|
||||
// SHA3
|
||||
case SHA3_224:
|
||||
return "SHA3_224"
|
||||
case SHA3_256:
|
||||
return "SHA3_256"
|
||||
case SHA3_384:
|
||||
return "SHA3_384"
|
||||
case SHA3_512:
|
||||
return "SHA3_512"
|
||||
|
||||
// BLAKE2
|
||||
case BLAKE2s_256:
|
||||
return "BLAKE2s_256"
|
||||
case BLAKE2b_256:
|
||||
return "BLAKE2b_256"
|
||||
case BLAKE2b_384:
|
||||
return "BLAKE2b_384"
|
||||
case BLAKE2b_512:
|
||||
return "BLAKE2b_512"
|
||||
|
||||
// BLAKE3
|
||||
case BLAKE3:
|
||||
return "BLAKE3"
|
||||
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
// RawHasher returns a new raw hasher of the algorithm.
|
||||
func (a Algorithm) RawHasher() hash.Hash {
|
||||
return a.new()
|
||||
}
|
||||
|
||||
// Digest creates a new labeled hash and digests the given data.
|
||||
func (a Algorithm) Digest(data []byte) *LabeledHash {
|
||||
return Digest(a, data)
|
||||
}
|
||||
|
||||
// DigestFile creates a new labeled hash and digests the given file.
|
||||
func (a Algorithm) DigestFile(pathToFile string) (*LabeledHash, error) {
|
||||
return DigestFile(a, pathToFile)
|
||||
}
|
||||
|
||||
// DigestFromReader creates a new labeled hash and digests from the given reader.
|
||||
func (a Algorithm) DigestFromReader(reader io.Reader) (*LabeledHash, error) {
|
||||
return DigestFromReader(a, reader)
|
||||
}
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
package lhash
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/subtle"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/mr-tron/base58"
|
||||
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
// LabeledHash represents a typed hash value.
|
||||
|
@ -18,8 +24,8 @@ type LabeledHash struct {
|
|||
// Digest creates a new labeled hash and digests the given data.
|
||||
func Digest(alg Algorithm, data []byte) *LabeledHash {
|
||||
hasher := alg.new()
|
||||
_, _ = hasher.Write(data) // never returns an error
|
||||
defer hasher.Reset() // internal state may leak data if kept in memory
|
||||
_, _ = hasher.Write(data) // Never returns an error.
|
||||
defer hasher.Reset() // Internal state may leak data if kept in memory.
|
||||
|
||||
return &LabeledHash{
|
||||
alg: alg,
|
||||
|
@ -27,18 +33,46 @@ func Digest(alg Algorithm, data []byte) *LabeledHash {
|
|||
}
|
||||
}
|
||||
|
||||
// DigestFile creates a new labeled hash and digests the given file.
|
||||
func DigestFile(alg Algorithm, pathToFile string) (*LabeledHash, error) {
|
||||
// Open file that should be hashed.
|
||||
file, err := os.Open(pathToFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
|
||||
return DigestFromReader(alg, file)
|
||||
}
|
||||
|
||||
// DigestFromReader creates a new labeled hash and digests from the given reader.
|
||||
func DigestFromReader(alg Algorithm, reader io.Reader) (*LabeledHash, error) {
|
||||
hasher := alg.new()
|
||||
defer hasher.Reset() // Internal state may leak data if kept in memory.
|
||||
|
||||
// Pipe all data directly to the hashing algorithm.
|
||||
_, err := bufio.NewReader(reader).WriteTo(hasher)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read: %w", err)
|
||||
}
|
||||
|
||||
return &LabeledHash{
|
||||
alg: alg,
|
||||
digest: hasher.Sum(nil),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Load loads a labeled hash from the given []byte slice.
|
||||
func Load(labeledHash []byte) (*LabeledHash, error) {
|
||||
c := container.New(labeledHash)
|
||||
|
||||
algID, err := c.GetNextN64()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse algorithm ID: %s", err)
|
||||
return nil, fmt.Errorf("failed to parse algorithm ID: %w", err)
|
||||
}
|
||||
|
||||
digest, err := c.GetNextBlock()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse digest: %s", err)
|
||||
return nil, fmt.Errorf("failed to parse digest: %w", err)
|
||||
}
|
||||
|
||||
if c.Length() > 0 {
|
||||
|
@ -60,16 +94,48 @@ func Load(labeledHash []byte) (*LabeledHash, error) {
|
|||
}, nil
|
||||
}
|
||||
|
||||
// LoadFromString loads a labeled hash from the given string.
|
||||
func LoadFromString(labeledHash string) (*LabeledHash, error) {
|
||||
raw, err := base64.RawURLEncoding.DecodeString(labeledHash)
|
||||
// FromHex loads a labeled hash from the given hexadecimal string.
|
||||
func FromHex(hexEncoded string) (*LabeledHash, error) {
|
||||
raw, err := hex.DecodeString(hexEncoded)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode: %s", err)
|
||||
return nil, fmt.Errorf("failed to decode hex: %w", err)
|
||||
}
|
||||
|
||||
return Load(raw)
|
||||
}
|
||||
|
||||
// FromBase64 loads a labeled hash from the given Base64 string using raw url
|
||||
// encoding.
|
||||
func FromBase64(base64Encoded string) (*LabeledHash, error) {
|
||||
raw, err := base64.RawURLEncoding.DecodeString(base64Encoded)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode base64: %w", err)
|
||||
}
|
||||
|
||||
return Load(raw)
|
||||
}
|
||||
|
||||
// FromBase58 loads a labeled hash from the given Base58 string using the BTC
|
||||
// alphabet.
|
||||
func FromBase58(base58Encoded string) (*LabeledHash, error) {
|
||||
raw, err := base58.Decode(base58Encoded)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode base58: %w", err)
|
||||
}
|
||||
|
||||
return Load(raw)
|
||||
}
|
||||
|
||||
// Algorithm returns the algorithm of the labeled hash.
|
||||
func (lh *LabeledHash) Algorithm() Algorithm {
|
||||
return lh.alg
|
||||
}
|
||||
|
||||
// Sum returns the raw calculated hash digest.
|
||||
func (lh *LabeledHash) Sum() []byte {
|
||||
return lh.digest
|
||||
}
|
||||
|
||||
// Bytes return the []byte representation of the labeled hash.
|
||||
func (lh *LabeledHash) Bytes() []byte {
|
||||
c := container.New()
|
||||
|
@ -78,16 +144,69 @@ func (lh *LabeledHash) Bytes() []byte {
|
|||
return c.CompileData()
|
||||
}
|
||||
|
||||
// String returns the string representation of the labeled hash (base64 raw url encoding).
|
||||
func (lh *LabeledHash) String() string {
|
||||
// Hex returns the hexadecimal string representation of the labeled hash.
|
||||
func (lh *LabeledHash) Hex() string {
|
||||
return hex.EncodeToString(lh.Bytes())
|
||||
}
|
||||
|
||||
// Base64 returns the Base64 string representation of the labeled hash using
|
||||
// raw url encoding.
|
||||
func (lh *LabeledHash) Base64() string {
|
||||
return base64.RawURLEncoding.EncodeToString(lh.Bytes())
|
||||
}
|
||||
|
||||
// Base58 returns the Base58 string representation of the labeled hash using
|
||||
// the BTC alphabet.
|
||||
func (lh *LabeledHash) Base58() string {
|
||||
return base58.Encode(lh.Bytes())
|
||||
}
|
||||
|
||||
// Equal returns true if the given labeled hash is equal.
|
||||
// Equality is checked by comparing both the algorithm and the digest value.
|
||||
func (lh *LabeledHash) Equal(other *LabeledHash) bool {
|
||||
return lh.alg == other.alg &&
|
||||
subtle.ConstantTimeCompare(lh.digest, other.digest) == 1
|
||||
}
|
||||
|
||||
// EqualRaw returns true if the given raw hash digest is equal.
|
||||
// Equality is checked by comparing both the digest value only.
|
||||
// The caller must make sure the same algorithm is used.
|
||||
func (lh *LabeledHash) EqualRaw(otherDigest []byte) bool {
|
||||
return subtle.ConstantTimeCompare(lh.digest, otherDigest) == 1
|
||||
}
|
||||
|
||||
// Matches returns true if the digest of the given data matches the hash.
|
||||
func (lh *LabeledHash) Matches(data []byte) bool {
|
||||
hasher := lh.alg.new()
|
||||
_, _ = hasher.Write(data) // never returns an error
|
||||
defer hasher.Reset() // internal state may leak data if kept in memory
|
||||
|
||||
return subtle.ConstantTimeCompare(lh.digest, hasher.Sum(nil)) == 1
|
||||
return lh.Equal(Digest(lh.alg, data))
|
||||
}
|
||||
|
||||
// MatchesData returns true if the digest of the given data matches the hash.
|
||||
// Deprecated: Use Matches instead.
|
||||
func (lh *LabeledHash) MatchesData(data []byte) bool {
|
||||
return lh.Equal(Digest(lh.alg, data))
|
||||
}
|
||||
|
||||
// MatchesString returns true if the digest of the given string matches the hash.
|
||||
func (lh *LabeledHash) MatchesString(s string) bool {
|
||||
return lh.Matches([]byte(s))
|
||||
}
|
||||
|
||||
// MatchesFile returns true if the digest of the given file matches the hash.
|
||||
func (lh *LabeledHash) MatchesFile(pathToFile string) (bool, error) {
|
||||
fileHash, err := DigestFile(lh.alg, pathToFile)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return lh.Equal(fileHash), nil
|
||||
}
|
||||
|
||||
// MatchesReader returns true if the digest of the given reader matches the hash.
|
||||
func (lh *LabeledHash) MatchesReader(reader io.Reader) (bool, error) {
|
||||
readerHash, err := DigestFromReader(lh.alg, reader)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return lh.Equal(readerHash), nil
|
||||
}
|
||||
|
|
|
@ -7,11 +7,15 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
testEmpty = []byte("")
|
||||
testFox = []byte("The quick brown fox jumps over the lazy dog.")
|
||||
testEmpty = []byte("")
|
||||
testFox = "The quick brown fox jumps over the lazy dog."
|
||||
testFoxData = []byte(testFox)
|
||||
noMatch = "no match"
|
||||
noMatchData = []byte(noMatch)
|
||||
)
|
||||
|
||||
func testAlgorithm(t *testing.T, alg Algorithm, emptyHex, foxHex string) {
|
||||
t.Helper()
|
||||
|
||||
// setup
|
||||
emptyBytes, err := hex.DecodeString(emptyHex)
|
||||
|
@ -28,59 +32,150 @@ func testAlgorithm(t *testing.T, alg Algorithm, emptyHex, foxHex string) {
|
|||
// test empty
|
||||
lh := Digest(alg, testEmpty)
|
||||
if !bytes.Equal(lh.Bytes()[2:], emptyBytes) {
|
||||
t.Errorf("alg %d: test empty: digest mismatch, expected %+v, got %+v", alg, emptyBytes, lh.Bytes()[2:])
|
||||
t.Errorf("alg %s: test empty: digest mismatch, expected %+v, got %+v",
|
||||
alg, hex.EncodeToString(emptyBytes), hex.EncodeToString(lh.Bytes()[2:]))
|
||||
}
|
||||
|
||||
// test fox
|
||||
lh = Digest(alg, testFox)
|
||||
lh = Digest(alg, testFoxData)
|
||||
if !bytes.Equal(lh.Bytes()[2:], foxBytes) {
|
||||
t.Errorf("alg %d: test fox: digest mismatch, expected %+v, got %+v", alg, foxBytes, lh.Bytes()[2:])
|
||||
}
|
||||
|
||||
// test matching
|
||||
if !lh.Matches(testFox) {
|
||||
t.Errorf("alg %d: failed to match reference", alg)
|
||||
}
|
||||
if lh.Matches([]byte("nope")) {
|
||||
t.Errorf("alg %d: failed to non-match garbage", alg)
|
||||
}
|
||||
|
||||
// serialize
|
||||
lhs := Digest(alg, testFox).String()
|
||||
// load
|
||||
loaded, err := LoadFromString(lhs)
|
||||
if err != nil {
|
||||
t.Errorf("alg %d: failed to load from string: %s", alg, err)
|
||||
return
|
||||
t.Errorf("alg %s: test fox: digest mismatch, expected %+v, got %+v",
|
||||
alg, hex.EncodeToString(foxBytes), hex.EncodeToString(lh.Bytes()[2:]))
|
||||
}
|
||||
|
||||
// test matching with serialized/loaded labeled hash
|
||||
if !loaded.Matches(testFox) {
|
||||
t.Errorf("alg %d: failed to match reference", alg)
|
||||
if !lh.Matches(testFoxData) {
|
||||
t.Errorf("alg %s: failed to match reference", alg)
|
||||
}
|
||||
if loaded.Matches([]byte("nope")) {
|
||||
t.Errorf("alg %d: failed to non-match garbage", alg)
|
||||
if !lh.MatchesString(testFox) {
|
||||
t.Errorf("alg %s: failed to match reference", alg)
|
||||
}
|
||||
if lh.Matches(noMatchData) {
|
||||
t.Errorf("alg %s: failed to non-match garbage", alg)
|
||||
}
|
||||
if lh.MatchesString(noMatch) {
|
||||
t.Errorf("alg %s: failed to non-match garbage", alg)
|
||||
}
|
||||
|
||||
// Test representations
|
||||
|
||||
// Hex
|
||||
lhs := Digest(alg, testFoxData)
|
||||
loaded, err := FromHex(lhs.Hex())
|
||||
if err != nil {
|
||||
t.Errorf("alg %s: failed to load from hex string: %s", alg, err)
|
||||
return
|
||||
}
|
||||
testFormat(t, alg, lhs, loaded)
|
||||
|
||||
// Base64
|
||||
lhs = Digest(alg, testFoxData)
|
||||
loaded, err = FromBase64(lhs.Base64())
|
||||
if err != nil {
|
||||
t.Errorf("alg %s: failed to load from base64 string: %s", alg, err)
|
||||
return
|
||||
}
|
||||
testFormat(t, alg, lhs, loaded)
|
||||
|
||||
// Base58
|
||||
lhs = Digest(alg, testFoxData)
|
||||
loaded, err = FromBase58(lhs.Base58())
|
||||
if err != nil {
|
||||
t.Errorf("alg %s: failed to load from base58 string: %s", alg, err)
|
||||
return
|
||||
}
|
||||
testFormat(t, alg, lhs, loaded)
|
||||
}
|
||||
|
||||
func testFormat(t *testing.T, alg Algorithm, lhs, loaded *LabeledHash) {
|
||||
t.Helper()
|
||||
|
||||
noMatchLH := Digest(alg, noMatchData)
|
||||
|
||||
// Test equality.
|
||||
if !lhs.Equal(loaded) {
|
||||
t.Errorf("alg %s: equality test failed", alg)
|
||||
}
|
||||
if lhs.Equal(noMatchLH) {
|
||||
t.Errorf("alg %s: non-equality test failed", alg)
|
||||
}
|
||||
|
||||
// Test matching.
|
||||
if !loaded.Matches(testFoxData) {
|
||||
t.Errorf("alg %s: failed to match reference", alg)
|
||||
}
|
||||
if !loaded.MatchesString(testFox) {
|
||||
t.Errorf("alg %s: failed to match reference", alg)
|
||||
}
|
||||
if loaded.Matches(noMatchData) {
|
||||
t.Errorf("alg %s: failed to non-match garbage", alg)
|
||||
}
|
||||
if loaded.MatchesString(noMatch) {
|
||||
t.Errorf("alg %s: failed to non-match garbage", alg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHash(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testAlgorithm(t, SHA2_224,
|
||||
"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f",
|
||||
"619cba8e8e05826e9b8c519c0a5c68f4fb653e8a3d8aa04bb2c8cd4c",
|
||||
)
|
||||
testAlgorithm(t, SHA2_256,
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
"ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c",
|
||||
)
|
||||
|
||||
testAlgorithm(t, SHA2_384,
|
||||
"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b",
|
||||
"ed892481d8272ca6df370bf706e4d7bc1b5739fa2177aae6c50e946678718fc67a7af2819a021c2fc34e91bdb63409d7",
|
||||
)
|
||||
testAlgorithm(t, SHA2_512,
|
||||
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
|
||||
"91ea1245f20d46ae9a037a989f54f1f790f0a47607eeb8a14d12890cea77a1bbc6c7ed9cf205e67b7f2b8fd4c7dfd3a7a8617e45f3c463d481c7e586c39ac1ed",
|
||||
)
|
||||
|
||||
testAlgorithm(t, SHA2_512_224,
|
||||
"6ed0dd02806fa89e25de060c19d3ac86cabb87d6a0ddd05c333b84f4",
|
||||
"6d6a9279495ec4061769752e7ff9c68b6b0b3c5a281b7917ce0572de",
|
||||
)
|
||||
testAlgorithm(t, SHA2_512_256,
|
||||
"c672b8d1ef56ed28ab87c3622c5114069bdd3ad7b8f9737498d0c01ecef0967a",
|
||||
"1546741840f8a492b959d9b8b2344b9b0eb51b004bba35c0aebaac86d45264c3",
|
||||
)
|
||||
testAlgorithm(t, SHA3_224,
|
||||
"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7",
|
||||
"2d0708903833afabdd232a20201176e8b58c5be8a6fe74265ac54db0",
|
||||
)
|
||||
testAlgorithm(t, SHA3_256,
|
||||
"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a",
|
||||
"a80f839cd4f83f6c3dafc87feae470045e4eb0d366397d5c6ce34ba1739f734d",
|
||||
)
|
||||
testAlgorithm(t, SHA3_384,
|
||||
"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004",
|
||||
"1a34d81695b622df178bc74df7124fe12fac0f64ba5250b78b99c1273d4b080168e10652894ecad5f1f4d5b965437fb9",
|
||||
)
|
||||
testAlgorithm(t, SHA3_512,
|
||||
"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26",
|
||||
"18f4f4bd419603f95538837003d9d254c26c23765565162247483f65c50303597bc9ce4d289f21d1c2f1f458828e33dc442100331b35e7eb031b5d38ba6460f8",
|
||||
)
|
||||
|
||||
testAlgorithm(t, BLAKE2s_256,
|
||||
"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9",
|
||||
"95bca6e1b761dca1323505cc629949a0e03edf11633cc7935bd8b56f393afcf2",
|
||||
)
|
||||
testAlgorithm(t, BLAKE2b_256,
|
||||
"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8",
|
||||
"69d7d3b0afba81826d27024c17f7f183659ed0812cf27b382eaef9fdc29b5712",
|
||||
)
|
||||
testAlgorithm(t, BLAKE2b_384,
|
||||
"b32811423377f52d7862286ee1a72ee540524380fda1724a6f25d7978c6fd3244a6caf0498812673c5e05ef583825100",
|
||||
"16d65de1a3caf1c26247234c39af636284c7e19ca448c0de788272081410778852c94d9cef6b939968d4f872c7f78337",
|
||||
)
|
||||
testAlgorithm(t, BLAKE2b_512,
|
||||
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce",
|
||||
"87af9dc4afe5651b7aa89124b905fd214bf17c79af58610db86a0fb1e0194622a4e9d8e395b352223a8183b0d421c0994b98286cbf8c68a495902e0fe6e2bda2",
|
||||
)
|
||||
testAlgorithm(t, BLAKE3,
|
||||
"af1349b9f5f9a1a6a0404dea36dcc9499bcb25c9adc112b7cc9a93cae41f3262",
|
||||
"4c9bd68d7f0baa2e167cef98295eb1ec99a3ec8f0656b33dbae943b387f31d5d",
|
||||
)
|
||||
}
|
||||
|
|
46
pack
46
pack
|
@ -8,14 +8,15 @@ COL_BOLD="\033[01;01m"
|
|||
COL_RED="\033[31m"
|
||||
|
||||
destDirPart1="dist"
|
||||
destDirPart2="jess"
|
||||
|
||||
function check {
|
||||
function prep {
|
||||
# output
|
||||
output="cmd"
|
||||
output="cmd/jess"
|
||||
# get version
|
||||
version=$(grep "info.Set" cmd/main.go | cut -d'"' -f4)
|
||||
# build versioned file name with platform
|
||||
filename="jess_${GOOS}_${GOARCH}_v${version//./-}"
|
||||
# build versioned file name
|
||||
filename="jess_v${version//./-}"
|
||||
# platform
|
||||
platform="${GOOS}_${GOARCH}"
|
||||
if [[ $GOOS == "windows" ]]; then
|
||||
|
@ -23,59 +24,56 @@ function check {
|
|||
output="${output}.exe"
|
||||
fi
|
||||
# build destination path
|
||||
destPath=${destDirPart1}/$filename
|
||||
destPath=${destDirPart1}/${platform}/${destDirPart2}/$filename
|
||||
}
|
||||
|
||||
function check {
|
||||
prep
|
||||
|
||||
# check if file exists
|
||||
if [[ -f $destPath ]]; then
|
||||
echo "$platform $version already built"
|
||||
echo "[jess] $platform $version already built"
|
||||
else
|
||||
echo -e "${COL_BOLD}$platform $version${COL_OFF}"
|
||||
echo -e "[jess] ${COL_BOLD}$platform $version${COL_OFF}"
|
||||
fi
|
||||
}
|
||||
|
||||
function build {
|
||||
# output
|
||||
output="cmd/cmd"
|
||||
# get version
|
||||
version=$(grep "info.Set" cmd/main.go | cut -d'"' -f4)
|
||||
# build versioned file name with platform
|
||||
filename="jess_${GOOS}_${GOARCH}_v${version//./-}"
|
||||
# platform
|
||||
platform="${GOOS}_${GOARCH}"
|
||||
if [[ $GOOS == "windows" ]]; then
|
||||
filename="${filename}.exe"
|
||||
output="${output}.exe"
|
||||
fi
|
||||
# build destination path
|
||||
destPath=${destDirPart1}/$filename
|
||||
prep
|
||||
|
||||
# check if file exists
|
||||
if [[ -f $destPath ]]; then
|
||||
echo "$platform already built in version $version, skipping..."
|
||||
echo "[jess] $platform already built in version $version, skipping..."
|
||||
return
|
||||
fi
|
||||
|
||||
# build
|
||||
./cmd/build
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "\n${COL_BOLD}$platform: ${COL_RED}BUILD FAILED.${COL_OFF}"
|
||||
echo -e "\n${COL_BOLD}[jess] $platform: ${COL_RED}BUILD FAILED.${COL_OFF}"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p $(dirname $destPath)
|
||||
cp $output $destPath
|
||||
echo -e "\n${COL_BOLD}$platform: successfully built.${COL_OFF}"
|
||||
echo -e "\n${COL_BOLD}[jess] $platform: successfully built.${COL_OFF}"
|
||||
}
|
||||
|
||||
function check_all {
|
||||
GOOS=linux GOARCH=amd64 check
|
||||
GOOS=windows GOARCH=amd64 check
|
||||
GOOS=darwin GOARCH=amd64 check
|
||||
GOOS=linux GOARCH=arm64 check
|
||||
GOOS=windows GOARCH=arm64 check
|
||||
GOOS=darwin GOARCH=arm64 check
|
||||
}
|
||||
|
||||
function build_all {
|
||||
GOOS=linux GOARCH=amd64 build
|
||||
GOOS=windows GOARCH=amd64 build
|
||||
GOOS=darwin GOARCH=amd64 build
|
||||
GOOS=linux GOARCH=arm64 build
|
||||
GOOS=windows GOARCH=arm64 build
|
||||
GOOS=darwin GOARCH=arm64 build
|
||||
}
|
||||
|
||||
function build_os {
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
// ASCII printable characters (character codes 32-127)
|
||||
// ASCII printable characters (character codes 32-127).
|
||||
passwordCharSets = []string{
|
||||
"abcdefghijklmnopqrstuvwxyz",
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ",
|
||||
|
@ -16,7 +16,7 @@ var (
|
|||
}
|
||||
|
||||
// extended ASCII codes (character code 128-255)
|
||||
// assume pool size of 32 (a quarter), as not all of them are common / easily accessible on every keyboard
|
||||
// assume pool size of 32 (a quarter), as not all of them are common / easily accessible on every keyboard.
|
||||
passwordExtraPoolSize = 32
|
||||
|
||||
createPasswordCallback func(signet *Signet, minSecurityLevel int) error
|
||||
|
@ -38,7 +38,6 @@ func SetPasswordCallbacks(
|
|||
|
||||
// CalculatePasswordSecurityLevel calculates the security level of the given password and iterations of the pbkdf algorithm.
|
||||
func CalculatePasswordSecurityLevel(password string, iterations int) int {
|
||||
|
||||
// TODO: this calculation is pretty conservative and errs on the safe side
|
||||
// maybe soften this up a litte, but couldn't find any scientific foundation for that
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ func getTestPassword(signet *Signet) error {
|
|||
}
|
||||
|
||||
func TestCalculatePasswordSecurityLevel(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// basic weak
|
||||
testPWSL(t, "asdf", -1)
|
||||
testPWSL(t, "asdfasdf", -1)
|
||||
|
@ -82,6 +84,8 @@ func TestCalculatePasswordSecurityLevel(t *testing.T) {
|
|||
}
|
||||
|
||||
func testPWSL(t *testing.T, password string, expectedSecurityLevel int) {
|
||||
t.Helper()
|
||||
|
||||
securityLevel := CalculatePasswordSecurityLevel(password, 1<<20)
|
||||
|
||||
if securityLevel < expectedSecurityLevel {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
// Security requirements of a letter
|
||||
// Security requirements of a letter.
|
||||
const (
|
||||
Confidentiality uint8 = iota
|
||||
Integrity
|
||||
|
|
|
@ -3,6 +3,8 @@ package jess
|
|||
import "testing"
|
||||
|
||||
func checkNoSpec(t *testing.T, a *Requirements, expectedNoSpec string) {
|
||||
t.Helper()
|
||||
|
||||
noSpec := a.SerializeToNoSpec()
|
||||
if noSpec != expectedNoSpec {
|
||||
t.Errorf(`unexpected no spec "%s", expected "%s"`, noSpec, expectedNoSpec)
|
||||
|
@ -10,6 +12,7 @@ func checkNoSpec(t *testing.T, a *Requirements, expectedNoSpec string) {
|
|||
}
|
||||
|
||||
func TestRequirements(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
a := NewRequirements()
|
||||
checkNoSpec(t, a, "")
|
||||
|
|
|
@ -17,7 +17,10 @@ const (
|
|||
)
|
||||
|
||||
var (
|
||||
wireReKeyAfterMsgs uint64 = 100000 // re-exchange keys every 100000 messages
|
||||
// Re-exchange keys every x messages.
|
||||
// At 10_000_000 msgs with 1500 bytes per msg, this would result in
|
||||
// re-exchanging keys every 15 GB.
|
||||
wireReKeyAfterMsgs uint64 = 10_000_000
|
||||
|
||||
requiredWireSessionRequirements = NewRequirements().Remove(SenderAuthentication)
|
||||
)
|
||||
|
@ -40,14 +43,14 @@ type WireSession struct { //nolint:maligned // TODO
|
|||
newKeyMaterial [][]byte
|
||||
}
|
||||
|
||||
// kxPair is key exchange pair
|
||||
// kxPair is key exchange pair.
|
||||
type kxPair struct {
|
||||
tool tools.ToolLogic
|
||||
signet *Signet
|
||||
peer *Signet
|
||||
}
|
||||
|
||||
// kePair is key encapsulation "pair"
|
||||
// kePair is key encapsulation "pair".
|
||||
type kePair struct {
|
||||
tool tools.ToolLogic
|
||||
signet *Signet
|
||||
|
|
55
session.go
55
session.go
|
@ -48,7 +48,7 @@ type managedHasher struct {
|
|||
hash hash.Hash
|
||||
}
|
||||
|
||||
// Sum returns the hash sum of the managed hasher
|
||||
// Sum returns the hash sum of the managed hasher.
|
||||
func (sh *managedHasher) Sum() ([]byte, error) {
|
||||
if sh == nil || sh.hash == nil {
|
||||
return nil, errors.New("managed hasher is broken")
|
||||
|
@ -56,7 +56,7 @@ func (sh *managedHasher) Sum() ([]byte, error) {
|
|||
return sh.hash.Sum(nil), nil
|
||||
}
|
||||
|
||||
func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
||||
func newSession(e *Envelope) (*Session, error) { //nolint:maintidx
|
||||
if e.suite == nil {
|
||||
return nil, errors.New("suite not loaded")
|
||||
}
|
||||
|
@ -86,18 +86,18 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
|
||||
// prepare variables
|
||||
var (
|
||||
keySourceAvailable bool = false
|
||||
keySourceAvailable bool
|
||||
totalSignetsSeen int
|
||||
requireSecurityLevel bool = false
|
||||
requireDefaultKeySize bool = false
|
||||
requireSecurityLevel bool
|
||||
requireDefaultKeySize bool
|
||||
)
|
||||
|
||||
// tool init loop: start
|
||||
for i, toolID := range s.envelope.suite.Tools {
|
||||
|
||||
///////////////////////////////////////
|
||||
// ====================================
|
||||
// tool init loop: check for duplicates
|
||||
///////////////////////////////////////
|
||||
// ====================================
|
||||
|
||||
for j, dupeToolID := range s.envelope.suite.Tools {
|
||||
if i != j && toolID == dupeToolID {
|
||||
|
@ -105,9 +105,9 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////
|
||||
// ===================================
|
||||
// tool init loop: parse, prep and get
|
||||
//////////////////////////////////////
|
||||
// ===================================
|
||||
|
||||
var (
|
||||
hashTool *hashtools.HashTool
|
||||
|
@ -135,9 +135,9 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
s.toolsWithState = append(s.toolsWithState, logic)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
// ===========================================================
|
||||
// tool init loop: assign tools to queues and add requirements
|
||||
////////////////////////////////////////////////////////////
|
||||
// ===========================================================
|
||||
|
||||
switch tool.Info.Purpose {
|
||||
case tools.PurposeKeyDerivation:
|
||||
|
@ -164,6 +164,7 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
|
||||
case tools.PurposeSigning:
|
||||
s.signers = append(s.signers, logic)
|
||||
s.toolRequirements.Add(Integrity)
|
||||
s.toolRequirements.Add(SenderAuthentication)
|
||||
|
||||
case tools.PurposeIntegratedCipher:
|
||||
|
@ -180,9 +181,9 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
s.toolRequirements.Add(Integrity)
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////
|
||||
// ============================================
|
||||
// tool init loop: process options, get hashers
|
||||
///////////////////////////////////////////////
|
||||
// ============================================
|
||||
|
||||
for _, option := range tool.Info.Options {
|
||||
switch option {
|
||||
|
@ -242,9 +243,9 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////
|
||||
// ===============================
|
||||
// tool init loop: initialize tool
|
||||
//////////////////////////////////
|
||||
// ===============================
|
||||
|
||||
// init tool
|
||||
logic.Init(
|
||||
|
@ -257,18 +258,18 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
hashSumFn,
|
||||
)
|
||||
|
||||
/////////////////////////////////////////////////
|
||||
// ==============================================
|
||||
// tool init loop: calc and check security levels
|
||||
/////////////////////////////////////////////////
|
||||
// ==============================================
|
||||
|
||||
err = s.calcAndCheckSecurityLevel(logic, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////
|
||||
// ==========================================
|
||||
// tool init loop: calculate default key size
|
||||
/////////////////////////////////////////////
|
||||
// ==========================================
|
||||
|
||||
// find biggest key size for default
|
||||
if tool.Info.KeySize > s.DefaultSymmetricKeySize {
|
||||
|
@ -277,9 +278,9 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
|
||||
} // tool init loop: end
|
||||
|
||||
//////////////////////////////////////////////////////////
|
||||
// =======================================================
|
||||
// calc and check signet security levels, default key size
|
||||
//////////////////////////////////////////////////////////
|
||||
// =======================================================
|
||||
|
||||
for _, tool := range s.all {
|
||||
|
||||
|
@ -342,9 +343,9 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
return nil, err
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////
|
||||
// ======================================================
|
||||
// check security level and default key size requirements
|
||||
/////////////////////////////////////////////////////////
|
||||
// ======================================================
|
||||
|
||||
// apply manual security level
|
||||
if minimumSecurityLevel > 0 && minimumSecurityLevel > s.SecurityLevel {
|
||||
|
@ -364,11 +365,11 @@ func newSession(e *Envelope) (*Session, error) { //nolint:gocognit,gocyclo
|
|||
return nil, fmt.Errorf("this toolset requires the default key size to be set manually")
|
||||
}
|
||||
|
||||
///////////////
|
||||
// ============
|
||||
// final checks
|
||||
///////////////
|
||||
// ============
|
||||
|
||||
// check requirements requirements
|
||||
// check requirements
|
||||
if s.toolRequirements.Empty() {
|
||||
return nil, errors.New("envelope excludes all security requirements, no meaningful operation possible")
|
||||
}
|
||||
|
@ -517,7 +518,7 @@ func (s *Session) checkSecurityLevel(levelToCheck int, subject func() string) er
|
|||
switch {
|
||||
case minimumSecurityLevel > 0:
|
||||
// check against minimumSecurityLevel
|
||||
// minimumSecurityLevel overrides other checks
|
||||
// (overrides other checks)
|
||||
if levelToCheck < minimumSecurityLevel {
|
||||
return fmt.Errorf(
|
||||
`%s with a security level of %d is weaker than the desired security level of %d`,
|
||||
|
|
75
signet.go
75
signet.go
|
@ -7,12 +7,14 @@ import (
|
|||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
|
||||
"github.com/mr-tron/base58"
|
||||
uuid "github.com/satori/go.uuid"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/structures/dsd"
|
||||
)
|
||||
|
||||
// Special signet types
|
||||
// Special signet types.
|
||||
const (
|
||||
SignetSchemePassword = "pw"
|
||||
SignetSchemeKey = "key"
|
||||
|
@ -134,6 +136,14 @@ func (signet *Signet) SetLoadedKeys(pubKey crypto.PublicKey, privKey crypto.Priv
|
|||
|
||||
// AsRecipient returns a public version of the Signet.
|
||||
func (signet *Signet) AsRecipient() (*Signet, error) {
|
||||
// Check special signet schemes.
|
||||
switch signet.Scheme {
|
||||
case SignetSchemeKey:
|
||||
return nil, errors.New("keys cannot be a recipient")
|
||||
case SignetSchemePassword:
|
||||
return nil, errors.New("passwords cannot be a recipient")
|
||||
}
|
||||
|
||||
// load so we can split keys
|
||||
err := signet.LoadKey()
|
||||
if err != nil {
|
||||
|
@ -176,7 +186,7 @@ func (signet *Signet) LoadKey() error {
|
|||
return signet.tool.StaticLogic.LoadKey(signet)
|
||||
}
|
||||
|
||||
// Tool returns the tool of the signet
|
||||
// Tool returns the tool of the signet.
|
||||
func (signet *Signet) Tool() (*tools.Tool, error) {
|
||||
// load tool
|
||||
err := signet.loadTool()
|
||||
|
@ -249,3 +259,60 @@ func (signet *Signet) AssignUUID() error {
|
|||
signet.ID = u.String()
|
||||
return nil
|
||||
}
|
||||
|
||||
// ToBytes serializes the Signet to a byte slice.
|
||||
func (signet *Signet) ToBytes() ([]byte, error) {
|
||||
// Make sure the key is stored in the serializable format.
|
||||
if err := signet.StoreKey(); err != nil {
|
||||
return nil, fmt.Errorf("failed to serialize the key: %w", err)
|
||||
}
|
||||
|
||||
// Serialize Signet.
|
||||
data, err := dsd.Dump(signet, dsd.CBOR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to serialize the signet: %w", err)
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// SignetFromBytes parses and loads a serialized signet.
|
||||
func SignetFromBytes(data []byte) (*Signet, error) {
|
||||
signet := &Signet{}
|
||||
|
||||
// Parse Signet from data.
|
||||
if _, err := dsd.Load(data, signet); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse data format: %w", err)
|
||||
}
|
||||
|
||||
// Load the key.
|
||||
if err := signet.LoadKey(); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse key: %w", err)
|
||||
}
|
||||
|
||||
return signet, nil
|
||||
}
|
||||
|
||||
// ToBase58 serializes the signet and encodes it with base58.
|
||||
func (signet *Signet) ToBase58() (string, error) {
|
||||
// Serialize Signet.
|
||||
data, err := signet.ToBytes()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Encode and return.
|
||||
return base58.Encode(data), nil
|
||||
}
|
||||
|
||||
// SignetFromBase58 parses and loads a base58 encoded serialized signet.
|
||||
func SignetFromBase58(base58Encoded string) (*Signet, error) {
|
||||
// Decode string.
|
||||
data, err := base58.Decode(base58Encoded)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode base58: %w", err)
|
||||
}
|
||||
|
||||
// Parse and return.
|
||||
return SignetFromBytes(data)
|
||||
}
|
||||
|
|
2
suite.go
2
suite.go
|
@ -1,6 +1,6 @@
|
|||
package jess
|
||||
|
||||
// Suite status options
|
||||
// Suite status options.
|
||||
const (
|
||||
SuiteStatusDeprecated uint8 = 0
|
||||
SuiteStatusPermitted uint8 = 1
|
||||
|
|
86
suites.go
86
suites.go
|
@ -1,75 +1,27 @@
|
|||
package jess
|
||||
|
||||
// Currently Recommended Suites.
|
||||
var (
|
||||
// SuiteKey is a cipher suite for encryption with a key.
|
||||
SuiteKey = SuiteKeyV1
|
||||
// SuitePassword is a cipher suite for encryption with a password.
|
||||
SuitePassword = SuitePasswordV1
|
||||
// SuiteRcptOnly is a cipher suite for encrypting for someone, but without verifying the sender/source.
|
||||
SuiteRcptOnly = SuiteRcptOnlyV1
|
||||
// SuiteSign is a cipher suite for signing (no encryption).
|
||||
SuiteSign = SuiteSignV1
|
||||
// SuiteSignFile is a cipher suite for signing files (no encryption).
|
||||
SuiteSignFile = SuiteSignFileV1
|
||||
// SuiteComplete is a cipher suite for both encrypting for someone and signing.
|
||||
SuiteComplete = SuiteCompleteV1
|
||||
// SuiteWire is a cipher suite for network communication, including authentication of the server, but not the client.
|
||||
SuiteWire = SuiteWireV1
|
||||
)
|
||||
|
||||
// Suite Lists.
|
||||
var (
|
||||
// lists
|
||||
suitesMap = make(map[string]*Suite)
|
||||
suitesList []*Suite
|
||||
|
||||
// suite definitions
|
||||
|
||||
// SuiteKeyV1 is a cipher suite for encryption with a key.
|
||||
SuiteKeyV1 = registerSuite(&Suite{
|
||||
ID: "key_v1",
|
||||
Tools: []string{"HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuitePasswordV1 is a cipher suite for encryption with a password.
|
||||
SuitePasswordV1 = registerSuite(&Suite{
|
||||
ID: "pw_v1",
|
||||
Tools: []string{"SCRYPT-20", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteRcptOnlyV1 is a cipher suite for encrypting for someone, but without verifying the sender/source.
|
||||
SuiteRcptOnlyV1 = registerSuite(&Suite{
|
||||
ID: "rcpt_v1",
|
||||
Tools: []string{"ECDH-X25519", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements().Remove(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteSignV1 is a cipher suite for signing (no encryption).
|
||||
SuiteSignV1 = registerSuite(&Suite{
|
||||
ID: "sign_v1",
|
||||
Tools: []string{"Ed25519(BLAKE2b-256)"},
|
||||
Provides: newEmptyRequirements().Add(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteCompleteV1 is a cipher suite for both encrypting for someone and signing.
|
||||
SuiteCompleteV1 = registerSuite(&Suite{
|
||||
ID: "v1",
|
||||
Tools: []string{"ECDH-X25519", "Ed25519(BLAKE2b-256)", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteWireV1 is a cipher suite for network communication, including authentication of the server, but not the client.
|
||||
SuiteWireV1 = registerSuite(&Suite{
|
||||
ID: "w1",
|
||||
Tools: []string{"ECDH-X25519", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements().Remove(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
|
||||
// currently recommended suites
|
||||
|
||||
// SuiteKey is a a cipher suite for encryption with a key.
|
||||
SuiteKey = SuiteKeyV1
|
||||
// SuitePassword is a a cipher suite for encryption with a password.
|
||||
SuitePassword = SuitePasswordV1
|
||||
// SuiteRcptOnly is a a cipher suite for encrypting for someone, but without verifying the sender/source.
|
||||
SuiteRcptOnly = SuiteRcptOnlyV1
|
||||
// SuiteSign is a a cipher suite for signing (no encryption).
|
||||
SuiteSign = SuiteSignV1
|
||||
// SuiteComplete is a a cipher suite for both encrypting for someone and signing.
|
||||
SuiteComplete = SuiteCompleteV1
|
||||
// SuiteWire is a a cipher suite for network communication, including authentication of the server, but not the client.
|
||||
SuiteWire = SuiteWireV1
|
||||
)
|
||||
|
||||
func registerSuite(suite *Suite) (suiteID string) {
|
||||
|
|
|
@ -11,6 +11,8 @@ import (
|
|||
)
|
||||
|
||||
func getSuite(t *testing.T, suiteID string) (suite *Suite) {
|
||||
t.Helper()
|
||||
|
||||
suite, ok := GetSuite(suiteID)
|
||||
if !ok {
|
||||
t.Fatalf("suite %s does not exist", suiteID)
|
||||
|
@ -20,6 +22,8 @@ func getSuite(t *testing.T, suiteID string) (suite *Suite) {
|
|||
}
|
||||
|
||||
func TestSuites(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, suite := range Suites() {
|
||||
|
||||
err := suiteBullshitCheck(suite)
|
||||
|
@ -91,10 +95,9 @@ func TestSuites(t *testing.T) {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
||||
func suiteBullshitCheck(suite *Suite) error { //nolint:maintidx
|
||||
// pre checks
|
||||
if suite.Provides == nil {
|
||||
return errors.New("provides no requirement attributes")
|
||||
|
@ -121,9 +124,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
// tool check loop: start
|
||||
for i, toolID := range suite.Tools {
|
||||
|
||||
////////////////////////////////////////
|
||||
// =====================================
|
||||
// tool check loop: check for duplicates
|
||||
////////////////////////////////////////
|
||||
// =====================================
|
||||
|
||||
for j, dupeToolID := range suite.Tools {
|
||||
if i != j && toolID == dupeToolID {
|
||||
|
@ -131,9 +134,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////
|
||||
// ====================================
|
||||
// tool check loop: parse, prep and get
|
||||
///////////////////////////////////////
|
||||
// ====================================
|
||||
|
||||
var (
|
||||
hashTool *hashtools.HashTool
|
||||
|
@ -161,9 +164,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
s.toolsWithState = append(s.toolsWithState, logic)
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////
|
||||
// ============================================================
|
||||
// tool check loop: assign tools to queues and add requirements
|
||||
///////////////////////////////////////////////////////////////
|
||||
// ============================================================
|
||||
|
||||
switch tool.Info.Purpose {
|
||||
case tools.PurposeKeyDerivation:
|
||||
|
@ -190,6 +193,7 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
|
||||
case tools.PurposeSigning:
|
||||
s.signers = append(s.signers, logic)
|
||||
s.toolRequirements.Add(Integrity)
|
||||
s.toolRequirements.Add(SenderAuthentication)
|
||||
|
||||
case tools.PurposeIntegratedCipher:
|
||||
|
@ -206,9 +210,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
s.toolRequirements.Add(Integrity)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////
|
||||
// =============================================
|
||||
// tool check loop: process options, get hashers
|
||||
////////////////////////////////////////////////
|
||||
// =============================================
|
||||
|
||||
for _, option := range tool.Info.Options {
|
||||
switch option {
|
||||
|
@ -260,9 +264,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////
|
||||
// ================================
|
||||
// tool check loop: initialize tool
|
||||
///////////////////////////////////
|
||||
// ================================
|
||||
|
||||
// init tool
|
||||
logic.Init(
|
||||
|
@ -275,9 +279,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
hashSumFn,
|
||||
)
|
||||
|
||||
//////////////////////////////////////////////////
|
||||
// ===============================================
|
||||
// tool check loop: calc and check security levels
|
||||
//////////////////////////////////////////////////
|
||||
// ===============================================
|
||||
|
||||
err = s.calcAndCheckSecurityLevel(logic, nil)
|
||||
if err != nil {
|
||||
|
@ -286,11 +290,11 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
|
||||
} // tool check loop: end
|
||||
|
||||
///////////////
|
||||
// ============
|
||||
// final checks
|
||||
///////////////
|
||||
// ============
|
||||
|
||||
// check requirements requirements
|
||||
// check requirements
|
||||
if s.toolRequirements.Empty() {
|
||||
return errors.New("suite does not provide any security attributes")
|
||||
}
|
||||
|
@ -320,9 +324,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
return errors.New("key derivation tool specified, but not needed")
|
||||
}
|
||||
|
||||
/////////////////////////////////////////
|
||||
// ======================================
|
||||
// check if values match suite definition
|
||||
/////////////////////////////////////////
|
||||
// ======================================
|
||||
|
||||
// check if security level matches
|
||||
if s.SecurityLevel != suite.SecurityLevel {
|
||||
|
@ -338,9 +342,9 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
)
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////
|
||||
// ========================================================
|
||||
// check if computeSuiteAttributes returns the same results
|
||||
///////////////////////////////////////////////////////////
|
||||
// ========================================================
|
||||
|
||||
computedSuite := computeSuiteAttributes(suite.Tools, assumeKey)
|
||||
if computedSuite == nil {
|
||||
|
@ -361,23 +365,23 @@ func suiteBullshitCheck(suite *Suite) error { //nolint:gocognit,gocyclo
|
|||
}
|
||||
|
||||
func computeSuiteAttributes(toolIDs []string, assumeKey bool) *Suite {
|
||||
new := &Suite{
|
||||
newSuite := &Suite{
|
||||
Provides: newEmptyRequirements(),
|
||||
SecurityLevel: 0,
|
||||
}
|
||||
|
||||
// if we have a key
|
||||
if assumeKey {
|
||||
new.Provides.Add(SenderAuthentication)
|
||||
new.Provides.Add(RecipientAuthentication)
|
||||
newSuite.Provides.Add(SenderAuthentication)
|
||||
newSuite.Provides.Add(RecipientAuthentication)
|
||||
}
|
||||
|
||||
// check all security levels and collect attributes
|
||||
for _, toolID := range toolIDs {
|
||||
|
||||
///////////////////////////////////////
|
||||
// ====================================
|
||||
// tool check loop: parse, prep and get
|
||||
///////////////////////////////////////
|
||||
// ====================================
|
||||
|
||||
var hashTool *hashtools.HashTool
|
||||
|
||||
|
@ -398,38 +402,39 @@ func computeSuiteAttributes(toolIDs []string, assumeKey bool) *Suite {
|
|||
// create logic instance and add to logic and state lists
|
||||
logic := tool.Factory()
|
||||
|
||||
//////////////////////////////////////
|
||||
// ===================================
|
||||
// tool check loop: collect attributes
|
||||
//////////////////////////////////////
|
||||
// ===================================
|
||||
|
||||
switch tool.Info.Purpose {
|
||||
case tools.PurposePassDerivation:
|
||||
new.Provides.Add(SenderAuthentication)
|
||||
new.Provides.Add(RecipientAuthentication)
|
||||
newSuite.Provides.Add(SenderAuthentication)
|
||||
newSuite.Provides.Add(RecipientAuthentication)
|
||||
|
||||
case tools.PurposeKeyExchange:
|
||||
new.Provides.Add(RecipientAuthentication)
|
||||
newSuite.Provides.Add(RecipientAuthentication)
|
||||
|
||||
case tools.PurposeKeyEncapsulation:
|
||||
new.Provides.Add(RecipientAuthentication)
|
||||
newSuite.Provides.Add(RecipientAuthentication)
|
||||
|
||||
case tools.PurposeSigning:
|
||||
new.Provides.Add(SenderAuthentication)
|
||||
newSuite.Provides.Add(Integrity)
|
||||
newSuite.Provides.Add(SenderAuthentication)
|
||||
|
||||
case tools.PurposeIntegratedCipher:
|
||||
new.Provides.Add(Confidentiality)
|
||||
new.Provides.Add(Integrity)
|
||||
newSuite.Provides.Add(Confidentiality)
|
||||
newSuite.Provides.Add(Integrity)
|
||||
|
||||
case tools.PurposeCipher:
|
||||
new.Provides.Add(Confidentiality)
|
||||
newSuite.Provides.Add(Confidentiality)
|
||||
|
||||
case tools.PurposeMAC:
|
||||
new.Provides.Add(Integrity)
|
||||
newSuite.Provides.Add(Integrity)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////
|
||||
// =============================================
|
||||
// tool check loop: process options, get hashers
|
||||
////////////////////////////////////////////////
|
||||
// =============================================
|
||||
|
||||
for _, option := range tool.Info.Options {
|
||||
switch option {
|
||||
|
@ -442,9 +447,9 @@ func computeSuiteAttributes(toolIDs []string, assumeKey bool) *Suite {
|
|||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////
|
||||
// ================================
|
||||
// tool check loop: initialize tool
|
||||
///////////////////////////////////
|
||||
// ================================
|
||||
|
||||
// init tool
|
||||
logic.Init(
|
||||
|
@ -456,19 +461,19 @@ func computeSuiteAttributes(toolIDs []string, assumeKey bool) *Suite {
|
|||
nil,
|
||||
)
|
||||
|
||||
//////////////////////////////////////////
|
||||
// =======================================
|
||||
// tool check loop: compute security level
|
||||
//////////////////////////////////////////
|
||||
// =======================================
|
||||
|
||||
toolSecurityLevel, err := logic.SecurityLevel(nil)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
if new.SecurityLevel == 0 || toolSecurityLevel < new.SecurityLevel {
|
||||
new.SecurityLevel = toolSecurityLevel
|
||||
if newSuite.SecurityLevel == 0 || toolSecurityLevel < newSuite.SecurityLevel {
|
||||
newSuite.SecurityLevel = toolSecurityLevel
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return new
|
||||
return newSuite
|
||||
}
|
||||
|
|
61
suites_v1.go
Normal file
61
suites_v1.go
Normal file
|
@ -0,0 +1,61 @@
|
|||
package jess //nolint:dupl
|
||||
|
||||
var (
|
||||
// SuiteKeyV1 is a cipher suite for encryption with a key.
|
||||
SuiteKeyV1 = registerSuite(&Suite{
|
||||
ID: "key_v1",
|
||||
Tools: []string{"HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuitePasswordV1 is a cipher suite for encryption with a password.
|
||||
SuitePasswordV1 = registerSuite(&Suite{
|
||||
ID: "pw_v1",
|
||||
Tools: []string{"SCRYPT-20", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteRcptOnlyV1 is a cipher suite for encrypting for someone, but without verifying the sender/source.
|
||||
SuiteRcptOnlyV1 = registerSuite(&Suite{
|
||||
ID: "rcpt_v1",
|
||||
Tools: []string{"ECDH-X25519", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements().Remove(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteSignV1 is a cipher suite for signing (no encryption).
|
||||
SuiteSignV1 = registerSuite(&Suite{
|
||||
ID: "sign_v1",
|
||||
Tools: []string{"Ed25519(BLAKE2b-256)"},
|
||||
Provides: newEmptyRequirements().Add(Integrity).Add(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteSignFileV1 is a cipher suite for signing files (no encryption).
|
||||
// SHA2_256 is chosen for better compatibility with other tool sets and workflows.
|
||||
SuiteSignFileV1 = registerSuite(&Suite{
|
||||
ID: "signfile_v1",
|
||||
Tools: []string{"Ed25519(SHA2-256)"},
|
||||
Provides: newEmptyRequirements().Add(Integrity).Add(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteCompleteV1 is a cipher suite for both encrypting for someone and signing.
|
||||
SuiteCompleteV1 = registerSuite(&Suite{
|
||||
ID: "v1",
|
||||
Tools: []string{"ECDH-X25519", "Ed25519(BLAKE2b-256)", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
// SuiteWireV1 is a cipher suite for network communication, including authentication of the server, but not the client.
|
||||
SuiteWireV1 = registerSuite(&Suite{
|
||||
ID: "w1",
|
||||
Tools: []string{"ECDH-X25519", "HKDF(BLAKE2b-256)", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements().Remove(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusRecommended,
|
||||
})
|
||||
)
|
61
suites_v2.go
Normal file
61
suites_v2.go
Normal file
|
@ -0,0 +1,61 @@
|
|||
package jess //nolint:dupl
|
||||
|
||||
var (
|
||||
// SuiteKeyV2 is a cipher suite for encryption with a key.
|
||||
SuiteKeyV2 = registerSuite(&Suite{
|
||||
ID: "key_v2",
|
||||
Tools: []string{"BLAKE3-KDF", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
// SuitePasswordV2 is a cipher suite for encryption with a password.
|
||||
SuitePasswordV2 = registerSuite(&Suite{
|
||||
ID: "pw_v2",
|
||||
Tools: []string{"SCRYPT-20", "BLAKE3-KDF", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
// SuiteRcptOnlyV2 is a cipher suite for encrypting for someone, but without verifying the sender/source.
|
||||
SuiteRcptOnlyV2 = registerSuite(&Suite{
|
||||
ID: "rcpt_v2",
|
||||
Tools: []string{"ECDH-X25519", "BLAKE3-KDF", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements().Remove(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
// SuiteSignV2 is a cipher suite for signing (no encryption).
|
||||
SuiteSignV2 = registerSuite(&Suite{
|
||||
ID: "sign_v2",
|
||||
Tools: []string{"Ed25519(BLAKE3)"},
|
||||
Provides: newEmptyRequirements().Add(Integrity).Add(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
// SuiteSignFileV2 is a cipher suite for signing files (no encryption).
|
||||
// SHA2_256 is chosen for better compatibility with other tool sets and workflows.
|
||||
SuiteSignFileV2 = registerSuite(&Suite{
|
||||
ID: "signfile_v2",
|
||||
Tools: []string{"Ed25519(BLAKE3)"},
|
||||
Provides: newEmptyRequirements().Add(Integrity).Add(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
// SuiteCompleteV2 is a cipher suite for both encrypting for someone and signing.
|
||||
SuiteCompleteV2 = registerSuite(&Suite{
|
||||
ID: "v2",
|
||||
Tools: []string{"ECDH-X25519", "Ed25519(BLAKE3)", "BLAKE3-KDF", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements(),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
// SuiteWireV2 is a cipher suite for network communication, including authentication of the server, but not the client.
|
||||
SuiteWireV2 = registerSuite(&Suite{
|
||||
ID: "w2",
|
||||
Tools: []string{"ECDH-X25519", "BLAKE3-KDF", "CHACHA20-POLY1305"},
|
||||
Provides: NewRequirements().Remove(SenderAuthentication),
|
||||
SecurityLevel: 128,
|
||||
Status: SuiteStatusPermitted,
|
||||
})
|
||||
)
|
|
@ -5,7 +5,6 @@ import (
|
|||
"sync"
|
||||
|
||||
"github.com/safing/jess"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
)
|
||||
|
||||
func TestSupply(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
total := 10
|
||||
supply := NewSignetSupply(total)
|
||||
|
|
87
test
87
test
|
@ -4,22 +4,22 @@ warnings=0
|
|||
errors=0
|
||||
scripted=0
|
||||
goUp="\\e[1A"
|
||||
all=0
|
||||
fullTestFlags="-short"
|
||||
install=0
|
||||
testonly=0
|
||||
|
||||
function help {
|
||||
echo "usage: $0 [command] [options]"
|
||||
echo ""
|
||||
echo "commands:"
|
||||
echo " <none> run baseline tests"
|
||||
echo " all run all tests"
|
||||
echo " install install deps for running baseline tests"
|
||||
echo " install all install deps for running all tests"
|
||||
echo " full run full tests (ie. not short)"
|
||||
echo " install install deps for running tests"
|
||||
echo ""
|
||||
echo "options:"
|
||||
echo " --scripted dont jump console lines (still use colors)"
|
||||
echo " [package] run tests only on this package"
|
||||
echo " --scripted don't jump console lines (still use colors)"
|
||||
echo " --test-only run tests only, no linters"
|
||||
echo " [package] run only on this package"
|
||||
}
|
||||
|
||||
function run {
|
||||
|
@ -62,22 +62,6 @@ function run {
|
|||
rm -f $tmpfile
|
||||
}
|
||||
|
||||
function checkformat {
|
||||
if [[ $scripted -eq 0 ]]; then
|
||||
echo "[......] gofmt $1"
|
||||
fi
|
||||
|
||||
output=$(gofmt -l $GOPATH/src/$1/*.go)
|
||||
if [[ $output == "" ]]; then
|
||||
echo -e "${goUp}[\e[01;32m OK \e[00m] gofmt $*"
|
||||
else
|
||||
echo -e "${goUp}[\e[01;31m FAIL \e[00m] gofmt $*"
|
||||
echo "The following files do not conform to gofmt:"
|
||||
gofmt -l $GOPATH/src/$1/*.go # keeps format
|
||||
errors=$((errors+1))
|
||||
fi
|
||||
}
|
||||
|
||||
# get and switch to script dir
|
||||
baseDir="$( cd "$(dirname "$0")" && pwd )"
|
||||
cd "$baseDir"
|
||||
|
@ -94,12 +78,15 @@ while true; do
|
|||
goUp=""
|
||||
shift 1
|
||||
;;
|
||||
"--test-only")
|
||||
testonly=1
|
||||
shift 1
|
||||
;;
|
||||
"install")
|
||||
install=1
|
||||
shift 1
|
||||
;;
|
||||
"all")
|
||||
all=1
|
||||
"full")
|
||||
fullTestFlags=""
|
||||
shift 1
|
||||
;;
|
||||
|
@ -117,12 +104,9 @@ fi
|
|||
# install
|
||||
if [[ $install -eq 1 ]]; then
|
||||
echo "installing dependencies..."
|
||||
echo "$ go get -u golang.org/x/lint/golint"
|
||||
go get -u golang.org/x/lint/golint
|
||||
if [[ $all -eq 1 ]]; then
|
||||
echo "$ go get -u github.com/golangci/golangci-lint/cmd/golangci-lint"
|
||||
go get -u github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||
fi
|
||||
# TODO: update golangci-lint version regularly
|
||||
echo "$ curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.44.0"
|
||||
curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.44.0
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
@ -131,24 +115,18 @@ if [[ $(which go) == "" ]]; then
|
|||
echo "go command not found"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $(which gofmt) == "" ]]; then
|
||||
echo "gofmt command not found"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $(which golint) == "" ]]; then
|
||||
echo "golint command not found"
|
||||
echo "install with: go get -u golang.org/x/lint/golint"
|
||||
echo "or run: ./test install"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $all -eq 1 ]]; then
|
||||
if [[ $testonly -eq 0 ]]; then
|
||||
if [[ $(which gofmt) == "" ]]; then
|
||||
echo "gofmt command not found"
|
||||
exit 1
|
||||
fi
|
||||
if [[ $(which golangci-lint) == "" ]]; then
|
||||
echo "golangci-lint command not found"
|
||||
echo "install locally with: go get -u github.com/golangci/golangci-lint/cmd/golangci-lint"
|
||||
echo "or run: ./test install all"
|
||||
echo ""
|
||||
echo "hint: install for CI with: curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin vX.Y.Z"
|
||||
echo "install with: curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin vX.Y.Z"
|
||||
echo "don't forget to specify the version you want"
|
||||
echo "or run: ./test install"
|
||||
echo ""
|
||||
echo "alternatively, install the current dev version with: go get -u github.com/golangci/golangci-lint/cmd/golangci-lint"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
@ -156,15 +134,10 @@ fi
|
|||
# target selection
|
||||
if [[ "$1" == "" ]]; then
|
||||
# get all packages
|
||||
packages=$(go list ./...)
|
||||
packages=$(go list -e ./...)
|
||||
else
|
||||
# single package testing
|
||||
packages=$(go list)/$1
|
||||
if [[ ! -d "$GOPATH/src/$packages" ]]; then
|
||||
echo "go package $packages does not exist"
|
||||
help
|
||||
exit 1
|
||||
fi
|
||||
packages=$(go list -e)/$1
|
||||
echo "note: only running tests for package $packages"
|
||||
fi
|
||||
|
||||
|
@ -174,13 +147,13 @@ echo "running tests for ${platformInfo//$'\n'/ }:"
|
|||
|
||||
# run vet/test on packages
|
||||
for package in $packages; do
|
||||
packagename=${package#github.com/safing/jess} #TODO: could be queried with `go list .`
|
||||
packagename=${packagename#/}
|
||||
echo ""
|
||||
echo $package
|
||||
checkformat $package
|
||||
run golint -set_exit_status -min_confidence 1.0 $package
|
||||
run go vet $package
|
||||
if [[ $all -eq 1 ]]; then
|
||||
run golangci-lint run $GOPATH/src/$package
|
||||
if [[ $testonly -eq 0 ]]; then
|
||||
run go vet $package
|
||||
run golangci-lint run $packagename
|
||||
fi
|
||||
run go test -cover $fullTestFlags $package
|
||||
done
|
||||
|
|
3
tools.go
3
tools.go
|
@ -2,8 +2,7 @@ package jess
|
|||
|
||||
import (
|
||||
"github.com/safing/jess/tools"
|
||||
|
||||
// import all tools
|
||||
// Import all tools.
|
||||
_ "github.com/safing/jess/tools/all"
|
||||
)
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
// Package all imports all tool subpackages
|
||||
package all
|
||||
|
||||
import (
|
||||
// Import all tool subpackages
|
||||
// Import all tool subpackages.
|
||||
_ "github.com/safing/jess/tools/blake3"
|
||||
_ "github.com/safing/jess/tools/ecdh"
|
||||
_ "github.com/safing/jess/tools/gostdlib"
|
||||
)
|
||||
|
|
68
tools/blake3/kdf.go
Normal file
68
tools/blake3/kdf.go
Normal file
|
@ -0,0 +1,68 @@
|
|||
package blake3
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/zeebo/blake3"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
func init() {
|
||||
tools.Register(&tools.Tool{
|
||||
Info: &tools.ToolInfo{
|
||||
Name: "BLAKE3-KDF",
|
||||
Purpose: tools.PurposeKeyDerivation,
|
||||
SecurityLevel: 128,
|
||||
Comment: "cryptographic hash function based on Bao and BLAKE2",
|
||||
Author: "Jean-Philippe Aumasson et al., 2020",
|
||||
},
|
||||
Factory: func() tools.ToolLogic { return &KDF{} },
|
||||
})
|
||||
}
|
||||
|
||||
// KDF implements the cryptographic interface for BLAKE3 key derivation.
|
||||
type KDF struct {
|
||||
tools.ToolLogicBase
|
||||
reader io.Reader
|
||||
}
|
||||
|
||||
// InitKeyDerivation implements the ToolLogic interface.
|
||||
func (keyder *KDF) InitKeyDerivation(nonce []byte, material ...[]byte) error {
|
||||
// Check params.
|
||||
if len(material) < 1 || len(material[0]) == 0 || len(nonce) == 0 {
|
||||
return errors.New("must supply at least one key and a nonce as key material")
|
||||
}
|
||||
|
||||
// Setup KDF.
|
||||
// Use nonce as kdf context.
|
||||
h := blake3.NewDeriveKey(string(nonce))
|
||||
// Then add all the key material.
|
||||
for _, m := range material {
|
||||
_, _ = h.Write(m)
|
||||
}
|
||||
// Get key reader.
|
||||
keyder.reader = h.Digest()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeriveKey implements the ToolLogic interface.
|
||||
func (keyder *KDF) DeriveKey(size int) ([]byte, error) {
|
||||
key := make([]byte, size)
|
||||
return key, keyder.DeriveKeyWriteTo(key)
|
||||
}
|
||||
|
||||
// DeriveKeyWriteTo implements the ToolLogic interface.
|
||||
func (keyder *KDF) DeriveKeyWriteTo(newKey []byte) error {
|
||||
n, err := io.ReadFull(keyder.reader, newKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate key: %w", err)
|
||||
}
|
||||
if n != len(newKey) {
|
||||
return errors.New("failed to generate key: EOF")
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -3,21 +3,20 @@ package ecdh
|
|||
import (
|
||||
"crypto"
|
||||
"crypto/elliptic"
|
||||
"fmt"
|
||||
"math/big"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/portbase/container"
|
||||
|
||||
"github.com/aead/ecdh"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
var (
|
||||
nistCurveInfo = &tools.ToolInfo{
|
||||
Purpose: tools.PurposeKeyExchange,
|
||||
Comment: "FIPS 186",
|
||||
Author: "NIST, 2009",
|
||||
}
|
||||
)
|
||||
var nistCurveInfo = &tools.ToolInfo{
|
||||
Purpose: tools.PurposeKeyExchange,
|
||||
Comment: "FIPS 186",
|
||||
Author: "NIST, 2009",
|
||||
}
|
||||
|
||||
func init() {
|
||||
tools.Register(&tools.Tool{
|
||||
|
@ -117,13 +116,20 @@ func (ec *NistCurve) StoreKey(signet tools.SignetInt) error {
|
|||
c.AppendNumber(1)
|
||||
|
||||
// store public key
|
||||
curvePoint := pubKey.(ecdh.Point)
|
||||
curvePoint, ok := pubKey.(ecdh.Point)
|
||||
if !ok {
|
||||
return fmt.Errorf("public key of invalid type %T", pubKey)
|
||||
}
|
||||
c.AppendAsBlock(curvePoint.X.Bytes())
|
||||
c.AppendAsBlock(curvePoint.Y.Bytes())
|
||||
|
||||
// store private key
|
||||
if !public {
|
||||
c.Append(privKey.([]byte))
|
||||
privKeyData, ok := privKey.([]byte)
|
||||
if !ok {
|
||||
return fmt.Errorf("private key of invalid type %T", privKey)
|
||||
}
|
||||
c.Append(privKeyData)
|
||||
}
|
||||
|
||||
signet.SetStoredKey(c.CompileData(), public)
|
||||
|
|
|
@ -2,11 +2,12 @@ package ecdh
|
|||
|
||||
import (
|
||||
"crypto"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/portbase/container"
|
||||
"fmt"
|
||||
|
||||
"github.com/aead/ecdh"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -87,10 +88,16 @@ func (ec *X25519Curve) StoreKey(signet tools.SignetInt) error {
|
|||
c.AppendNumber(1)
|
||||
|
||||
// store keys
|
||||
pubKeyData := pubKey.([32]byte)
|
||||
pubKeyData, ok := pubKey.([32]byte)
|
||||
if !ok {
|
||||
return fmt.Errorf("public key of invalid type %T", pubKey)
|
||||
}
|
||||
c.Append(pubKeyData[:])
|
||||
if !public {
|
||||
privKeyData := privKey.([32]byte)
|
||||
privKeyData, ok := privKey.([32]byte)
|
||||
if !ok {
|
||||
return fmt.Errorf("private key of invalid type %T", privKey)
|
||||
}
|
||||
c.Append(privKeyData[:])
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ import (
|
|||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
//nolint:dupl
|
||||
func init() {
|
||||
aesCtrInfo := &tools.ToolInfo{
|
||||
Purpose: tools.PurposeCipher,
|
||||
|
|
|
@ -7,7 +7,6 @@ import (
|
|||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
//nolint:dupl
|
||||
func init() {
|
||||
aesGcmInfo := &tools.ToolInfo{
|
||||
Purpose: tools.PurposeIntegratedCipher,
|
||||
|
|
|
@ -3,9 +3,9 @@ package gostdlib
|
|||
import (
|
||||
"crypto/cipher"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
|
||||
"golang.org/x/crypto/chacha20poly1305"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"errors"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/portbase/container"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -5,10 +5,10 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/portbase/container"
|
||||
|
||||
"golang.org/x/crypto/hkdf"
|
||||
|
||||
"github.com/safing/jess/tools"
|
||||
"github.com/safing/structures/container"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -56,12 +56,12 @@ func (keyder *HKDF) DeriveKey(size int) ([]byte, error) {
|
|||
}
|
||||
|
||||
// DeriveKeyWriteTo implements the ToolLogic interface.
|
||||
func (keyder *HKDF) DeriveKeyWriteTo(new []byte) error {
|
||||
n, err := io.ReadFull(keyder.reader, new)
|
||||
func (keyder *HKDF) DeriveKeyWriteTo(newKey []byte) error {
|
||||
n, err := io.ReadFull(keyder.reader, newKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate key: %s", err)
|
||||
return fmt.Errorf("failed to generate key: %w", err)
|
||||
}
|
||||
if n != len(new) {
|
||||
if n != len(newKey) {
|
||||
return errors.New("failed to generate key: EOF")
|
||||
}
|
||||
return nil
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue