Add support for embedded text, json and yaml checksums

This commit is contained in:
Daniel 2023-09-26 13:05:31 +02:00
parent ed928f9a3e
commit 9c7f6954e6
7 changed files with 850 additions and 1 deletions

111
cmd/cmd-checksum.go Normal file
View file

@ -0,0 +1,111 @@
package main
import (
"errors"
"fmt"
"os"
"path/filepath"
"github.com/spf13/cobra"
"github.com/safing/jess/filesig"
)
func init() {
rootCmd.AddCommand(checksum)
checksum.AddCommand(checksumAdd)
checksum.AddCommand(checksumVerify)
}
var (
checksum = &cobra.Command{
Use: "checksum",
Short: "add or verify embedded checksums",
}
checksumAddUsage = "usage: checksum add <file>"
checksumAdd = &cobra.Command{
Use: "add <file>",
Short: "add an embedded checksum to a file",
Long: "add an embedded checksum to a file (support file types: txt, json, yaml)",
RunE: handleChecksumAdd,
}
checksumVerifyUsage = "usage: checksum verify <file>"
checksumVerify = &cobra.Command{
Use: "verify <file>",
Short: "verify the embedded checksum of a file",
Long: "verify the embedded checksum of a file (support file types: txt, json, yaml)",
RunE: handleChecksumVerify,
}
)
func handleChecksumAdd(cmd *cobra.Command, args []string) error {
// Check args.
if len(args) != 1 {
return errors.New(checksumAddUsage)
}
filename := args[0]
data, err := os.ReadFile(filename)
if err != nil {
return fmt.Errorf("failed to read file: %w", err)
}
switch filepath.Ext(filename) {
case ".json":
data, err = filesig.AddJSONChecksum(data)
case ".yml", ".yaml":
data, err = filesig.AddYAMLChecksum(data, filesig.TextPlacementAfterComment)
case ".txt":
data, err = filesig.AddTextFileChecksum(data, "#", filesig.TextPlacementAfterComment)
default:
return errors.New("unsupported file format")
}
if err != nil {
return err
}
// Write back to disk.
fileInfo, err := os.Stat(filename)
if err != nil {
return fmt.Errorf("failed to stat file: %w", err)
}
err = os.WriteFile(filename, data, fileInfo.Mode().Perm())
if err != nil {
return fmt.Errorf("failed to write back file with checksum: %w", err)
}
fmt.Println("checksum added")
return nil
}
func handleChecksumVerify(cmd *cobra.Command, args []string) error {
// Check args.
if len(args) != 1 {
return errors.New(checksumVerifyUsage)
}
filename := args[0]
data, err := os.ReadFile(filename)
if err != nil {
return fmt.Errorf("failed to read file: %w", err)
}
switch filepath.Ext(filename) {
case ".json":
err = filesig.VerifyJSONChecksum(data)
case ".yml", ".yaml":
err = filesig.VerifyYAMLChecksum(data)
case ".txt":
err = filesig.VerifyTextFileChecksum(data, "#")
default:
return errors.New("unsupported file format")
}
if err != nil {
return err
}
fmt.Println("checksum verified")
return nil
}

197
filesig/json.go Normal file
View file

@ -0,0 +1,197 @@
package filesig
import (
"errors"
"fmt"
"github.com/tidwall/gjson"
"github.com/tidwall/pretty"
"github.com/tidwall/sjson"
"golang.org/x/exp/slices"
"github.com/safing/jess/lhash"
)
// JSON file metadata keys.
const (
JSONKeyPrefix = "_jess-"
JSONChecksumKey = JSONKeyPrefix + "checksum"
JSONSignatureKey = JSONKeyPrefix + "signature"
)
// AddJSONChecksum adds a checksum to a text file.
func AddJSONChecksum(data []byte) ([]byte, error) {
// Extract content and metadata from json.
content, checksums, signatures, err := jsonSplit(data)
if err != nil {
return nil, err
}
// Calculate checksum.
h := lhash.BLAKE2b_256.Digest(content)
checksums = append(checksums, h.Base58())
// Sort and deduplicate checksums and sigs.
slices.Sort[[]string, string](checksums)
checksums = slices.Compact[[]string, string](checksums)
slices.Sort[[]string, string](signatures)
signatures = slices.Compact[[]string, string](signatures)
// Add metadata and return.
return jsonAddMeta(content, checksums, signatures)
}
// VerifyJSONChecksum checks a checksum in a text file.
func VerifyJSONChecksum(data []byte) error {
// Extract content and metadata from json.
content, checksums, _, err := jsonSplit(data)
if err != nil {
return err
}
// Verify all checksums.
var checksumsVerified int
for _, checksum := range checksums {
// Parse checksum.
h, err := lhash.FromBase58(checksum)
if err != nil {
return fmt.Errorf("%w: failed to parse labeled hash: %w", ErrChecksumFailed, err)
}
// Verify checksum.
if !h.Matches(content) {
return ErrChecksumFailed
}
checksumsVerified++
}
// Fail when no checksums were verified.
if checksumsVerified == 0 {
return ErrChecksumMissing
}
return nil
}
func jsonSplit(data []byte) (
content []byte,
checksums []string,
signatures []string,
err error,
) {
// Check json.
if !gjson.ValidBytes(data) {
return nil, nil, nil, errors.New("invalid json")
}
content = data
// Get checksums.
result := gjson.GetBytes(content, JSONChecksumKey)
if result.Exists() {
if result.IsArray() {
array := result.Array()
checksums = make([]string, 0, len(array))
for _, result := range array {
if result.Type == gjson.String {
checksums = append(checksums, result.String())
}
}
} else if result.Type == gjson.String {
checksums = []string{result.String()}
}
// Delete key.
content, err = sjson.DeleteBytes(content, JSONChecksumKey)
if err != nil {
return nil, nil, nil, err
}
}
// Get signatures.
result = gjson.GetBytes(content, JSONSignatureKey)
if result.Exists() {
if result.IsArray() {
array := result.Array()
signatures = make([]string, 0, len(array))
for _, result := range array {
if result.Type == gjson.String {
signatures = append(signatures, result.String())
}
}
} else if result.Type == gjson.String {
signatures = []string{result.String()}
}
// Delete key.
content, err = sjson.DeleteBytes(content, JSONSignatureKey)
if err != nil {
return nil, nil, nil, err
}
}
// Format for reproducible checksums and signatures.
content = pretty.PrettyOptions(content, &pretty.Options{
Width: 200, // Must not change!
Prefix: "", // Must not change!
Indent: " ", // Must not change!
SortKeys: true, // Must not change!
})
return content, checksums, signatures, nil
}
func jsonAddMeta(data []byte, checksums, signatures []string) ([]byte, error) {
var (
err error
opts = &sjson.Options{
ReplaceInPlace: true,
}
)
// Add checksums.
switch len(checksums) {
case 0:
// Skip
case 1:
// Add single checksum.
data, err = sjson.SetBytesOptions(
data, JSONChecksumKey, checksums[0], opts,
)
default:
// Add multiple checksums.
data, err = sjson.SetBytesOptions(
data, JSONChecksumKey, checksums, opts,
)
}
if err != nil {
return nil, err
}
// Add signatures.
switch len(signatures) {
case 0:
// Skip
case 1:
// Add single signature.
data, err = sjson.SetBytesOptions(
data, JSONSignatureKey, signatures[0], opts,
)
default:
// Add multiple signatures.
data, err = sjson.SetBytesOptions(
data, JSONSignatureKey, signatures, opts,
)
}
if err != nil {
return nil, err
}
// Final pretty print.
data = pretty.PrettyOptions(data, &pretty.Options{
Width: 200, // Must not change!
Prefix: "", // Must not change!
Indent: " ", // Must not change!
SortKeys: true, // Must not change!
})
return data, nil
}

119
filesig/json_test.go Normal file
View file

@ -0,0 +1,119 @@
package filesig
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestJSONChecksums(t *testing.T) {
t.Parallel()
// Base test text file.
json := `{"a": "b", "c": 1}`
// Test with checksum after comment.
jsonWithChecksum := `{
"_jess-checksum": "ZwtAd75qvioh6uf1NAq64KRgTbqeehFVYmhLmrwu1s7xJo",
"a": "b",
"c": 1
}
`
testJSONWithChecksum, err := AddJSONChecksum([]byte(json))
assert.NoError(t, err, "should be able to add checksum")
assert.Equal(t, jsonWithChecksum, string(testJSONWithChecksum), "should match")
assert.NoError(t,
VerifyJSONChecksum(testJSONWithChecksum),
"checksum should be correct",
)
jsonWithChecksum = `{
"c": 1, "a":"b",
"_jess-checksum": "ZwtAd75qvioh6uf1NAq64KRgTbqeehFVYmhLmrwu1s7xJo"
}`
assert.NoError(t,
VerifyJSONChecksum([]byte(jsonWithChecksum)),
"checksum should be correct",
)
jsonWithMultiChecksum := `{
"_jess-checksum": [
"PTV7S3Ca81aRk2kdNw7q2RfjLfEdPPT5Px5d211nhZedZC",
"PTV7S3Ca81aRk2kdNw7q2RfjLfEdPPT5Px5d211nhZedZC",
"CyDGH55DZUwa556DiYztMXaKZVBDjzWeFETiGmABMbvC3V"
],
"a": "b",
"c": 1
}
`
assert.NoError(t,
VerifyJSONChecksum([]byte(jsonWithMultiChecksum)),
"checksum should be correct",
)
jsonWithMultiChecksumOutput := `{
"_jess-checksum": ["CyDGH55DZUwa556DiYztMXaKZVBDjzWeFETiGmABMbvC3V", "PTV7S3Ca81aRk2kdNw7q2RfjLfEdPPT5Px5d211nhZedZC", "ZwtAd75qvioh6uf1NAq64KRgTbqeehFVYmhLmrwu1s7xJo"],
"a": "b",
"c": 1
}
`
testJSONWithMultiChecksum, err := AddJSONChecksum([]byte(jsonWithMultiChecksum))
assert.NoError(t, err, "should be able to add checksum")
assert.Equal(t, jsonWithMultiChecksumOutput, string(testJSONWithMultiChecksum), "should match")
assert.NoError(t,
VerifyJSONChecksum(testJSONWithMultiChecksum),
"checksum should be correct",
)
// // Test with multiple checksums.
// textWithMultiChecksum := `# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
// #!/bin/bash
// # Initial
// # Comment
// # Block
// # jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
// do_something()
// # jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
// `
// assert.NoError(t,
// VerifyTextFileChecksum([]byte(textWithMultiChecksum), "#"),
// "checksum should be correct",
// )
// textWithMultiChecksumOutput := `#!/bin/bash
// # Initial
// # Comment
// # Block
// # jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
// # jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
// # jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
// # jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
// do_something()
// `
// testTextWithMultiChecksumOutput, err := AddTextFileChecksum([]byte(textWithMultiChecksum), "#", AfterComment)
// assert.NoError(t, err, "should be able to add checksum")
// assert.Equal(t, textWithMultiChecksumOutput, string(testTextWithMultiChecksumOutput), "should match")
// // Test failing checksums.
// textWithFailingChecksums := `#!/bin/bash
// # Initial
// # Comment
// # Block
// # jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
// # jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
// # jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
// # jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjaaaaaaaaaaaaaaaaaaaaa
// do_something()
// `
//
// assert.Error(t, VerifyTextFileChecksum([]byte(textWithFailingChecksums), "#"), "should fail")
}

232
filesig/text.go Normal file
View file

@ -0,0 +1,232 @@
package filesig
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"strings"
"golang.org/x/exp/slices"
"github.com/safing/jess/lhash"
)
// Text file metadata keys.
const (
TextKeyPrefix = "jess-"
TextChecksumKey = TextKeyPrefix + "checksum"
TextSignatureKey = TextKeyPrefix + "signature"
)
// Text Operation Errors.
var (
ErrChecksumMissing = errors.New("no checksum found")
ErrChecksumFailed = errors.New("checksum does not match")
ErrSignatureMissing = errors.New("signature not found")
ErrSignatureFailed = errors.New("signature does not match")
)
// TextPlacement signifies where jess metadata is put in text files.
type TextPlacement string
const (
// TextPlacementTop places the metadata at end of file.
TextPlacementTop TextPlacement = "top"
// TextPlacementBottom places the metadata at end of file.
TextPlacementBottom TextPlacement = "bottom"
// TextPlacementAfterComment places the metadata at end of the top comment
// block, or at the top, if the first line is not a comment.
TextPlacementAfterComment TextPlacement = "after-comment"
defaultMetaPlacement = TextPlacementAfterComment
)
// AddTextFileChecksum adds a checksum to a text file.
func AddTextFileChecksum(data []byte, commentSign string, placement TextPlacement) ([]byte, error) {
// Split text file into content and jess metadata lines.
content, metaLines, err := textSplit(data, commentSign)
if err != nil {
return nil, err
}
// Calculate checksum.
h := lhash.BLAKE2b_256.Digest(content)
metaLines = append(metaLines, TextChecksumKey+": "+h.Base58())
// Sort and deduplicate meta lines.
slices.Sort[[]string, string](metaLines)
metaLines = slices.Compact[[]string, string](metaLines)
// Add meta lines and return.
return textAddMeta(content, metaLines, commentSign, placement)
}
// VerifyTextFileChecksum checks a checksum in a text file.
func VerifyTextFileChecksum(data []byte, commentSign string) error {
// Split text file into content and jess metadata lines.
content, metaLines, err := textSplit(data, commentSign)
if err != nil {
return err
}
// Verify all checksums.
var checksumsVerified int
for _, line := range metaLines {
if strings.HasPrefix(line, TextChecksumKey) {
// Clean key, delimiters and space.
line = strings.TrimPrefix(line, TextChecksumKey)
line = strings.TrimSpace(line) // Spaces and newlines.
line = strings.Trim(line, ":= ") // Delimiters and spaces.
// Parse checksum.
h, err := lhash.FromBase58(line)
if err != nil {
return fmt.Errorf("%w: failed to parse labeled hash: %w", ErrChecksumFailed, err)
}
// Verify checksum.
if !h.Matches(content) {
return ErrChecksumFailed
}
checksumsVerified++
}
}
// Fail when no checksums were verified.
if checksumsVerified == 0 {
return ErrChecksumMissing
}
return nil
}
func textSplit(data []byte, commentSign string) (content []byte, metaLines []string, err error) {
metaLinePrefix := commentSign + " " + TextKeyPrefix
contentBuf := bytes.NewBuffer(make([]byte, 0, len(data)))
metaLines = make([]string, 0, 1)
// Find jess metadata lines.
s := bufio.NewScanner(bytes.NewReader(data))
s.Split(scanRawLines)
for s.Scan() {
if strings.HasPrefix(s.Text(), metaLinePrefix) {
metaLines = append(metaLines, strings.TrimSpace(strings.TrimPrefix(s.Text(), commentSign)))
} else {
_, _ = contentBuf.Write(s.Bytes())
}
}
if s.Err() != nil {
return nil, nil, s.Err()
}
return bytes.TrimSpace(contentBuf.Bytes()), metaLines, nil
}
func detectLineEndFormat(data []byte) (lineEnd string) {
i := bytes.IndexByte(data, '\n')
switch i {
case -1:
// Default to just newline.
return "\n"
case 0:
// File start with a newline.
return "\n"
default:
// First newline is at second byte or later.
if bytes.Equal(data[i-1:i+1], []byte("\r\n")) {
return "\r\n"
}
return "\n"
}
}
func textAddMeta(data []byte, metaLines []string, commentSign string, position TextPlacement) ([]byte, error) {
// Prepare new buffer.
requiredSize := len(data)
for _, line := range metaLines {
requiredSize += len(line) + len(commentSign) + 3 // space + CRLF
}
contentBuf := bytes.NewBuffer(make([]byte, 0, requiredSize))
// Find line ending.
lineEnd := detectLineEndFormat(data)
// Find jess metadata lines.
if position == "" {
position = defaultMetaPlacement
}
switch position {
case TextPlacementTop:
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
contentBuf.Write(data)
// Add final newline.
contentBuf.WriteString(lineEnd)
case TextPlacementBottom:
contentBuf.Write(data)
// Add to newlines when appending, as content is first whitespace-stripped.
contentBuf.WriteString(lineEnd)
contentBuf.WriteString(lineEnd)
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
case TextPlacementAfterComment:
metaWritten := false
s := bufio.NewScanner(bytes.NewReader(data))
s.Split(scanRawLines)
for s.Scan() {
switch {
case metaWritten:
_, _ = contentBuf.Write(s.Bytes())
case strings.HasPrefix(s.Text(), commentSign):
_, _ = contentBuf.Write(s.Bytes())
default:
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
metaWritten = true
_, _ = contentBuf.Write(s.Bytes())
}
}
if s.Err() != nil {
return nil, s.Err()
}
// If we have scanned through the file, and meta was not written, write it now.
if !metaWritten {
textWriteMetaLines(metaLines, commentSign, lineEnd, contentBuf)
}
// Add final newline.
contentBuf.WriteString(lineEnd)
}
return contentBuf.Bytes(), nil
}
func textWriteMetaLines(metaLines []string, commentSign string, lineEnd string, writer io.StringWriter) {
for _, line := range metaLines {
_, _ = writer.WriteString(commentSign)
_, _ = writer.WriteString(" ")
_, _ = writer.WriteString(line)
_, _ = writer.WriteString(lineEnd)
}
}
// scanRawLines is a split function for a Scanner that returns each line of
// text, including any trailing end-of-line marker. The returned line may
// be empty. The end-of-line marker is one optional carriage return followed
// by one mandatory newline. In regular expression notation, it is `\r?\n`.
// The last non-empty line of input will be returned even if it has no
// newline.
func scanRawLines(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
// We have a full newline-terminated line.
return i + 1, data[0 : i+1], nil
}
// If we're at EOF, we have a final, non-terminated line. Return it.
if atEOF {
return len(data), data, nil
}
// Request more data.
return 0, nil, nil
}

179
filesig/text_test.go Normal file
View file

@ -0,0 +1,179 @@
package filesig
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestTextChecksums(t *testing.T) {
t.Parallel()
// Base test text file.
text := `#!/bin/bash
# Initial
# Comment
# Block
do_something()`
// Test with checksum after comment.
textWithChecksumAfterComment := `#!/bin/bash
# Initial
# Comment
# Block
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
do_something()
`
testTextWithChecksumAfterComment, err := AddTextFileChecksum([]byte(text), "#", TextPlacementAfterComment)
assert.NoError(t, err, "should be able to add checksum")
assert.Equal(t, textWithChecksumAfterComment, string(testTextWithChecksumAfterComment), "should match")
assert.NoError(t,
VerifyTextFileChecksum(testTextWithChecksumAfterComment, "#"),
"checksum should be correct",
)
assert.NoError(t,
VerifyTextFileChecksum(append(
[]byte("\n\n \r\n"),
testTextWithChecksumAfterComment...,
), "#"),
"checksum should be correct",
)
assert.NoError(t,
VerifyTextFileChecksum(append(
testTextWithChecksumAfterComment,
[]byte("\r\n \n \n")...,
), "#"),
"checksum should be correct",
)
// Test with checksum at top.
textWithChecksumAtTop := `# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
#!/bin/bash
# Initial
# Comment
# Block
do_something()
`
testTextWithChecksumAtTop, err := AddTextFileChecksum([]byte(text), "#", TextPlacementTop)
assert.NoError(t, err, "should be able to add checksum")
assert.Equal(t, textWithChecksumAtTop, string(testTextWithChecksumAtTop), "should match")
assert.NoError(t,
VerifyTextFileChecksum(testTextWithChecksumAtTop, "#"),
"checksum should be correct",
)
// Test with checksum at bottom.
textWithChecksumAtBottom := `#!/bin/bash
# Initial
# Comment
# Block
do_something()
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
`
testTextWithChecksumAtBottom, err := AddTextFileChecksum([]byte(text), "#", TextPlacementBottom)
assert.NoError(t, err, "should be able to add checksum")
assert.Equal(t, textWithChecksumAtBottom, string(testTextWithChecksumAtBottom), "should match")
assert.NoError(t,
VerifyTextFileChecksum(testTextWithChecksumAtBottom, "#"),
"checksum should be correct",
)
// Test with multiple checksums.
textWithMultiChecksum := `# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
#!/bin/bash
# Initial
# Comment
# Block
# jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
do_something()
# jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
`
assert.NoError(t,
VerifyTextFileChecksum([]byte(textWithMultiChecksum), "#"),
"checksum should be correct",
)
textWithMultiChecksumOutput := `#!/bin/bash
# Initial
# Comment
# Block
# jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
# jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjqrgZuSpVrexeEYttBso5o
do_something()
`
testTextWithMultiChecksumOutput, err := AddTextFileChecksum([]byte(textWithMultiChecksum), "#", TextPlacementAfterComment)
assert.NoError(t, err, "should be able to add checksum")
assert.Equal(t, textWithMultiChecksumOutput, string(testTextWithMultiChecksumOutput), "should match")
// Test failing checksums.
textWithFailingChecksums := `#!/bin/bash
# Initial
# Comment
# Block
# jess-checksum: Cy2TyVDjEStUqX3wCzCCKTfy228KaQK25ZDbHNmKiF8SPf
# jess-checksum: PTNktssvYCYjZXLFL2QoBk7DYoSz1qF7DJd5XNvtptd41B
# jess-checksum: YdgJFzuvFduk1MwRjZ2JkWQ6tCE1wkjn9xubSggKAdJSX5
# jess-checksum: ZwngYUfUBeUn99HSdrNxkWSNjaaaaaaaaaaaaaaaaaaaaa
do_something()
`
assert.Error(t, VerifyTextFileChecksum([]byte(textWithFailingChecksums), "#"), "should fail")
}
func TestLineEndDetection(t *testing.T) {
t.Parallel()
assert.Equal(t,
"\n",
detectLineEndFormat(nil),
"empty data should default to simple lf ending",
)
assert.Equal(t,
"\n",
detectLineEndFormat([]byte("\n")),
"shoud detect lf ending with empty first line",
)
assert.Equal(t,
"\r\n",
detectLineEndFormat([]byte("\r\n")),
"shoud detect crlf ending with empty first line",
)
assert.Equal(t,
"\n",
detectLineEndFormat([]byte("abc\n")),
"shoud detect lf ending with data on single line",
)
assert.Equal(t,
"\r\n",
detectLineEndFormat([]byte("abc\r\n")),
"shoud detect crlf ending with data on single line",
)
assert.Equal(t,
"\n",
detectLineEndFormat([]byte("abc\nabc\r\n")),
"shoud detect lf ending with data on first line",
)
assert.Equal(t,
"\r\n",
detectLineEndFormat([]byte("abc\r\nabc\n")),
"shoud detect crlf ending with data on first line",
)
}

11
filesig/text_yaml.go Normal file
View file

@ -0,0 +1,11 @@
package filesig
// AddYAMLChecksum adds a checksum to a yaml file.
func AddYAMLChecksum(data []byte, placement TextPlacement) ([]byte, error) {
return AddTextFileChecksum(data, "#", placement)
}
// VerifyYAMLChecksum checks a checksum in a yaml file.
func VerifyYAMLChecksum(data []byte) error {
return VerifyTextFileChecksum(data, "#")
}

2
go.mod
View file

@ -1,6 +1,6 @@
module github.com/safing/jess
go 1.15
go 1.20
require (
github.com/AlecAivazis/survey/v2 v2.3.6