-package document
-
-// An epoint document is a clear signed utf-8 text of key-value pairs
-// according to OpenPGP RFC 4880. The body contains a content-type
-// MIME header so it can be used in OpenPGP/MIME RFC 3156 emails with
-// the signature detached. The format of the key-value pairs are
-// similar to MIME header fields.
+// Package document implements epoint document parsing and creation.
//
-// TODO: allow visually aligned field values
-// TODO: handling repeated fields
-// TODO: representation of a list (references)
+// An epoint document is an OpenPGP (RFC 4880) clear signed
+// utf-8 text of key-value pairs.
+// The body contains a content-type MIME header so the document
+// can be used in OpenPGP/MIME (RFC 3156) emails.
+// The format of the key-value pairs are similar to MIME header
+// fields: keys and values are separated by ": ", repeated keys
+// are not allowed, long values can be split before a space.
//
// Example:
//
// -----BEGIN PGP SIGNED MESSAGE-----
// Hash: SHA1
//
-// Content-Type: text/plain.epoint.cert; charset=utf-8
+// Content-Type: text/plain.epoint.type; charset=utf-8
//
// Key: Value1
// Another-Key: Value2
+// Last-Key: Long
+// value that spans
+// multiple lines
// -----BEGIN PGP SIGNATURE-----
+//
// pgp signature
// -----END PGP SIGNATURE-----
+package document
+
+// TODO: error types
+// TODO: optional fields
+// TODO: fields of notice (last notice, serial)
+// TODO: space vs not to space
import (
"bytes"
+ "crypto"
+ "crypto/openpgp"
+ "crypto/openpgp/armor"
+ "crypto/openpgp/packet"
+ "encoding/hex"
+ "fmt"
+ "reflect"
"strconv"
+ "strings"
+ "time"
)
-const ClearSignedHeader = "-----BEGIN PGP SIGNED MESSAGE-----\n"
+const ClearSignedHeader = "-----BEGIN PGP SIGNED MESSAGE-----"
-// Non-standard MIME subtype see RFC 2045 and RFC 2046
-// TODO: verify that Content-Transfer-Encoding is not needed
-// TODO: text/epoint.cert would be shorter
+// MIME type for epoint documents, see RFC 4288
var ContentType = map[string]string{
- "cert": "text/plain.epoint.cert; charset=utf-8",
- "draft": "text/plain.epoint.draft; charset=utf-8",
+ "Draft": "text/vnd.epoint.draft; charset=utf-8",
+ "Notice": "text/vnd.epoint.notice; charset=utf-8",
+ "DebitCert": "text/vnd.epoint.debit; charset=utf-8",
+ "CreditCert": "text/vnd.epoint.credit; charset=utf-8",
+ "BounceCert": "text/vnd.epoint.bounce; charset=utf-8",
}
// OpenPGP signed cleartext document representation
-type ClearSigned struct {
+type Signed struct {
+ // Sign and CleanSigned sets Hash for FormatSigned
+ // TODO: CreationDate
Hash string
- // Signed text (no dash escape, no trailing space)
+ // Signed text (no dash escape, no trailing space, \n new lines)
Body []byte
// Armored detached text signature of the Body
- ArmoredSignature []byte
+ Signature []byte
+}
+
+// parsed epoint document
+type Document struct {
+ Type string
+ Fields map[string]string
+ Order []string
+}
+
+var fieldtype = map[string]string{
+ "Amount": "int",
+ "Authorized-By": "id",
+ "Balance": "int",
+ "Beneficiary": "id",
+ "Date": "date",
+ "Debit-Cert": "id",
+ "Denomination": "text",
+ "Difference": "int",
+ "Draft": "id",
+ "Drawer": "id",
+ "Expiry-Date": "date",
+ "Holder": "id",
+ "Issuer": "id",
+ "Last-Cert": "id",
+ "Last-Credit-Serial": "int",
+ "Last-Debit-Serial": "int",
+ "Maturity-Date": "date",
+ "Nonce": "text",
+ "Notes": "text",
+ "References": "ids",
+ "Serial": "int",
+}
+
+var fieldname = map[string]string{
+ "AuthorizedBy": "Authorized-By",
+ "DebitCert": "Debit-Cert",
+ "ExpiryDate": "Expiry-Date",
+ "LastCert": "Last-Cert",
+ "LastCreditSerial": "Last-Credit-Serial",
+ "LastDebitSerial": "Last-Debit-Serial",
+ "MaturityDate": "Maturity-Date",
}
-// Draft document represents an obligation transfer order
type Draft struct {
- Drawer string // ID of the payer (signer of the document)
- Beneficiary string // ID of the payee
- Amount int64 // amount transfered
- // TODO: issuer keys is enough?
+ Drawer string
+ Beneficiary string
+ Amount int64
Denomination string
- // TODO: maturity date is enough if the only constraint is <= now
- IssueDate int64
- MaturityDate int64 // Draft is bounced before this date
- // TODO: implement limits
- Notes string // Arbitrary text notes of the drawer
- // TODO: hack to make signed draft unique (not required for DSA)
- Nonce string // unique number
- // TODO: server ID might change, do we need it?
- Server string // ID of the server (drawee?)
- //TODO: naming: drawee vs issuer
- Drawee string // ID of the obligation issuer
- // TODO: reference cert ids in the draft
- // useful if more strict date of issue information is needed
- //References []string
+ Issuer string
+ AuthorizedBy string
+ MaturityDate int64 // optional
+ ExpiryDate int64 // optional
+ Nonce string // optional
+ Notes string // optional
+}
+
+type Notice struct {
+ Date int64
+ AuthorizedBy string
+ Notes string
+ References []string
}
-// TODO: cert references: fpr+serial, fpr+certID
-// Certification of obligation after a transfer
-// References previous certificate (if any)
-// and the transfer related other documents
-type Cert struct {
- Holder string // ID of the creditor
- Serial uint32 // serial number, number of certs of the holder
- Date int64 // date of issue
- Balance int64 // current obligation value
+type DebitCert struct {
+ Holder string
+ Serial int64
+ Balance int64
Denomination string
- Issuer string // ID of the obligation issuer (drawee?)
- LastDebitSerial uint32 // serial of the last draft cert or 0
- LastCreditSerial uint32 // serial of the last credit cert or 0
- // TODO: move to References?
- LastCert string // ID of the previous cert if any
- // TODO: determine cert type from diff value only?
- // (>0: credit cert, <0: debit cert, ==0: special)
- Difference int64 // difference from previous balance
- // TODO: enough on the debit side
- Draft string // draft ID related to the transfer
- // TODO: credit side, redundant references
- Drawer string // ID of the drawer in the transaction
- DrawerSerial uint32 // serial of the drawer's related debit cert
- DrawerCert string // ID of the drawer's related debit cert
- // TODO: fingerprint?
- References []string // cert IDs for timestamping the system
+ Issuer string
+ Date int64
+ Difference int64
+ Draft string
+ AuthorizedBy string
+ Notes string // optional
+ LastDebitSerial int64 // 0 if none
+ LastCreditSerial int64 // 0 if none
+ LastCert string // ? if serial == 1
+ References []string // may be empty
+}
+
+type CreditCert struct {
+ Holder string
+ Serial int64
+ Balance int64
+ Denomination string
+ Issuer string
+ Date int64
+ Difference int64
+ Draft string
+ Drawer string
+ DebitCert string
+ AuthorizedBy string
+ Notes string // optional
+ LastDebitSerial int64 // 0 if none
+ LastCreditSerial int64 // 0 if none
+ LastCert string // ? if serial == 1
+ References []string // may be empty
+}
+
+type BounceCert struct {
+ Drawer string
+ Draft string
+ LastCert string // optional
+ Balance int64 // 0 if none
+ Denomination string
+ Issuer string
+ Date int64
+ AuthorizedBy string
+ Notes string // optional
+ References []string // may be empty
+}
+
+// parse an epoint document without checking the signature and format details
+func Parse(s []byte) (iv interface{}, c *Signed, err error) {
+ c, err = ParseSigned(s)
+ if err != nil {
+ return
+ }
+ doc, err := ParseDocument(c.Body)
+ if err != nil {
+ return
+ }
+ iv, err = ParseStruct(doc)
+ return
+}
+
+// format and sign an epoint document
+func Format(iv interface{}, key *openpgp.Entity) (s []byte, err error) {
+ doc, err := FormatStruct(iv)
+ if err != nil {
+ return
+ }
+ body, err := FormatDocument(doc)
+ if err != nil {
+ return
+ }
+ c, err := Sign(body, key)
+ if err != nil {
+ return
+ }
+ return FormatSigned(c)
}
-func DecodeClearSigned(s []byte) (c *ClearSigned, err error) {
- hash, body, sig := split(s)
- if len(sig) == 0 {
- // TODO: split errors
+// verify an epoint document, return the cleaned version as well
+func Verify(c *Signed, key *openpgp.Entity) (cleaned []byte, err error) {
+ err = CleanSigned(c)
+ if err != nil {
+ return
+ }
+ err = VerifyCleaned(c, key)
+ if err != nil {
return
}
- c = &ClearSigned{string(hash), trimspace(dashunesc(body)), sig}
+ return FormatSigned(c)
+}
+
+// verify signature of body with given key
+func VerifyCleaned(c *Signed, key *openpgp.Entity) (err error) {
+ kr := openpgp.EntityList{key}
+ msg := bytes.NewBuffer(c.Body)
+ sig := bytes.NewBuffer(c.Signature)
+ _, err = openpgp.CheckArmoredDetachedSignature(kr, msg, sig)
return
}
-func EncodeClearSigned(c *ClearSigned) (data []byte, err error) {
- s := ClearSignedHeader
+// sign body with given secret key
+func Sign(body []byte, key *openpgp.Entity) (c *Signed, err error) {
+ c = new(Signed)
+ c.Hash = "SHA256"
+ c.Body = body
+ w := new(bytes.Buffer)
+ err = openpgp.ArmoredDetachSignText(w, key, bytes.NewBuffer(c.Body))
+ c.Signature = w.Bytes()
+ return
+}
+
+// split a clear signed document into body and armored signature
+func ParseSigned(s []byte) (c *Signed, err error) {
+ // look for clear signed header
+ for !bytes.HasPrefix(s, []byte(ClearSignedHeader)) {
+ _, s = getLine(s)
+ if len(s) == 0 {
+ err = fmt.Errorf("ParseSigned: clear signed header is missing")
+ return
+ }
+ }
+ s = s[len(ClearSignedHeader):]
+ // end of line after the header
+ empty, s := getLine(s)
+ if len(empty) != 0 {
+ err = fmt.Errorf("ParseSigned: bad clear signed header")
+ return
+ }
+ // skip all hash headers, section 7.
+ for bytes.HasPrefix(s, []byte("Hash: ")) {
+ _, s = getLine(s)
+ }
+ // skip empty line
+ empty, s = getLine(s)
+ if len(empty) != 0 {
+ err = fmt.Errorf("ParseSigned: expected an empty line after armor headers")
+ return
+ }
+ lines := [][]byte{}
+ for !bytes.HasPrefix(s, []byte("-----BEGIN")) {
+ var line []byte
+ line, s = getLine(s)
+ // dash unescape, section 7.1.
+ if bytes.HasPrefix(line, []byte("- ")) {
+ line = line[2:]
+ }
+ // empty values are not supported: "Key: \n"
+ lines = append(lines, bytes.TrimRight(line, " \t"))
+ }
+ c = new(Signed)
+ // last line is not closed by \n
+ c.Body = bytes.Join(lines, []byte("\n"))
+ // signature is just the rest of the input data
+ c.Signature = s
+ return
+}
+
+// clean up, check and reencode signature
+// used on drafts before calculating the signed document hash
+func CleanSigned(c *Signed) (err error) {
+ b, err := armor.Decode(bytes.NewBuffer(c.Signature))
+ if err != nil {
+ return
+ }
+ if b.Type != openpgp.SignatureType {
+ err = fmt.Errorf("CleanSigned: invalid armored signature type")
+ return
+ }
+ p, err := packet.Read(b.Body)
+ if err != nil {
+ return
+ }
+ sig, ok := p.(*packet.Signature)
+ if !ok {
+ err = fmt.Errorf("CleanSigned: invalid signature packet")
+ return
+ }
+ // section 5.2.3
+ if sig.SigType != packet.SigTypeText {
+ err = fmt.Errorf("CleanSigned: expected text signature")
+ return
+ }
+ switch sig.Hash {
+ case crypto.SHA1:
+ c.Hash = "SHA1"
+ case crypto.SHA256:
+ c.Hash = "SHA256"
+ default:
+ err = fmt.Errorf("CleanSigned: expected SHA1 or SHA256 signature hash")
+ return
+ }
+ // TODO: check CreationTime and other subpackets
+ if sig.SigLifetimeSecs != nil && *sig.SigLifetimeSecs != 0 {
+ err = fmt.Errorf("CleanSigned: signature must not expire")
+ return
+ }
+ out := new(bytes.Buffer)
+ w, err := armor.Encode(out, openpgp.SignatureType, nil)
+ if err != nil {
+ return
+ }
+ err = sig.Serialize(w)
+ if err != nil {
+ return
+ }
+ err = w.Close()
+ if err != nil {
+ return
+ }
+ c.Signature = out.Bytes()
+ return
+}
+
+// create clear signed document
+func FormatSigned(c *Signed) (data []byte, err error) {
+ s := ClearSignedHeader + "\n"
if c.Hash != "" {
s += "Hash: " + c.Hash + "\n"
}
- // TODO: check if space was trimmed from body before signature
s += "\n"
- s += string(dashesc(c.Body))
+ s += string(c.Body)
s += "\n"
- s += string(c.ArmoredSignature)
+ s += string(c.Signature)
data = []byte(s)
return
}
-func ParseFields(s []byte) (fields map[string]string, rest []byte, err error) {
- // TODO: error
- fields = make(map[string]string)
- rest = s
- for len(rest) > 0 {
- var line []byte
- line, rest = getLine(rest)
- // empty line after the parsed fields (consumed)
- if len(line) == 0 {
+// parse type and fields of a document body
+func ParseDocument(body []byte) (doc *Document, err error) {
+ // parse content type header first
+ fields, s, err := ParseFields(body)
+ if err != nil {
+ return
+ }
+ ctype, ok := fields["Content-Type"]
+ if len(fields) != 1 || !ok {
+ return nil, fmt.Errorf("ParseBody: expected a single Content-Type header field")
+ }
+ doc = new(Document)
+ for k, v := range ContentType {
+ if ctype == v {
+ doc.Type = k
break
}
- i := bytes.Index(line, []byte(": "))
- // TODO: long lines can be broken up in MIME
- if i < 0 {
- return nil, nil, nil
- }
- // TODO: repeated fields
- fields[string(line[:i])] = string(line[i+2:])
+ }
+ if doc.Type == "" {
+ return nil, fmt.Errorf("ParseBody: unknown Content-Type: %s", ctype)
+ }
+ // TODO: doc.Order
+ doc.Fields, s, err = ParseFields(s)
+ if err == nil && len(s) > 0 {
+ err = fmt.Errorf("ParseBody: extra data after fields: %q", s)
}
return
}
-func ParseBody(s []byte) (t string, fields map[string]string, err error) {
- // parse content type header first
- mime, s, err := ParseFields(s)
- if len(mime) != 1 {
+// create document body
+func FormatDocument(doc *Document) (body []byte, err error) {
+ ctype, ok := ContentType[doc.Type]
+ if !ok {
+ err = fmt.Errorf("FormatDocument: unknown document type: %s", doc.Type)
return
}
- for k, v := range ContentType {
- if mime["Content-Type"] == v {
- t = k
- fields, s, err = ParseFields(s)
- if len(s) > 0 {
- fields = nil
- break
+ s := "Content-Type: " + ctype + "\n\n"
+ for _, k := range doc.Order {
+ s += k + ": " + doc.Fields[k] + "\n"
+ }
+ return []byte(s), nil
+}
+
+// parse doc fields into a struct according to the document type
+func ParseStruct(doc *Document) (iv interface{}, err error) {
+ switch doc.Type {
+ case "Draft":
+ iv = new(Draft)
+ case "Notice":
+ iv = new(Notice)
+ case "DebitCert":
+ iv = new(DebitCert)
+ case "CreditCert":
+ iv = new(CreditCert)
+ case "BounceCert":
+ iv = new(BounceCert)
+ default:
+ err = fmt.Errorf("ParseStruct: unkown doc type: %s", doc.Type)
+ return
+ }
+ seen := make(map[string]bool)
+ v := reflect.ValueOf(iv).Elem()
+ t := v.Type()
+ n := v.NumField()
+ for i := 0; i < n; i++ {
+ ft := t.Field(i)
+ fv := v.Field(i)
+ key := fieldname[ft.Name]
+ if key == "" {
+ key = ft.Name
+ }
+ seen[key] = true
+ s, ok := doc.Fields[key]
+ if !ok {
+ return nil, fmt.Errorf("ParseStruct: field %s of %s is missing\n", key, t.Name())
+ }
+ switch fieldtype[key] {
+ case "id":
+ var val string
+ val, err = parseId(s)
+ fv.SetString(val)
+ case "text":
+ var val string
+ val, err = parseString(s)
+ fv.SetString(val)
+ case "int":
+ var val int64
+ val, err = strconv.Atoi64(s)
+ fv.SetInt(val)
+ case "date":
+ var val int64
+ val, err = parseDate(s)
+ fv.SetInt(val)
+ case "ids":
+ ids := strings.Split(s, " ")
+ val := make([]string, len(ids))
+ for j, id := range ids {
+ val[j], err = parseId(id)
+ if err != nil {
+ return
+ }
}
+ fv.Set(reflect.ValueOf(val))
+ default:
+ panic("bad field type " + key + " " + fieldtype[key])
+ }
+ if err != nil {
return
}
}
- // TODO: error
+ if len(doc.Fields) != n {
+ for k := range doc.Fields {
+ if !seen[k] {
+ err = fmt.Errorf("ParseStruct: unknown field %s in %s", k, t.Name())
+ return
+ }
+ }
+ }
return
}
-/* rendering with reflect
-func render(d interface{}) (s []byte, err error) {
- a := []string{}
- v := reflect.ValueOf(d)
+// turn a struct into a document
+func FormatStruct(iv interface{}) (doc *Document, err error) {
+ v := reflect.ValueOf(iv)
+ if v.Kind() != reflect.Ptr || v.IsNil() || v.Elem().Kind() != reflect.Struct {
+ panic("input is not a pointer to struct")
+ }
+ v = v.Elem()
t := v.Type()
n := v.NumField()
+ doc = new(Document)
+ doc.Type = t.Name()
+ doc.Fields = make(map[string]string)
for i := 0; i < n; i++ {
- f := t.Field(i)
+ ft := t.Field(i)
fv := v.Field(i)
- fs := ""
- switch fv.Type() {
- case reflect.String:
- fs = fv.String() // TODO: quote, esc (\n..)
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- fs = strconv.Itoa64(fv.Int())
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- fs = strconv.Uitoa64(fv.Uint())
+ key := fieldname[ft.Name]
+ if key == "" {
+ key = ft.Name
+ }
+ val := ""
+ switch fieldtype[key] {
+ case "id":
+ val = formatId(fv.String())
+ case "text":
+ val = formatString(fv.String())
+ case "int":
+ val = strconv.Itoa64(fv.Int())
+ case "date":
+ val = formatDate(fv.Int())
+ case "ids":
+ k := fv.Len()
+ for j := 0; j < k; j++ {
+ if j > 0 {
+ val += "\n "
+ }
+ val += formatId(fv.Index(j).String())
+ }
default:
- return // TODO: error
+ panic("bad field type " + key + " " + fieldtype[key])
}
- a = append(a, f.Name, ": ", fs, "\n")
+ doc.Fields[key] = val
+ doc.Order = append(doc.Order, key)
}
- s = strings.Join(a, "")
return
}
-*/
-
-func checkID(s string) (string, error) {
- return s, nil
-}
-
-func ParseDraft(s []byte) (draft *Draft, err error) {
- t, fields, err := ParseBody(s)
- if err != nil {
- return
- }
- if t != "draft" {
- return
- }
- draftFields := []string{
- "Drawer",
- "Beneficiary",
- "Amount",
- "Denomination",
- "IssueDate",
- "MaturityDate",
- "Notes",
- "Nonce",
- "Server",
- "Drawee",
- }
- if len(fields) != len(draftFields) {
- return
- }
- for _, f := range draftFields {
- _, ok := fields[f]
- if !ok {
+func ParseFields(s []byte) (fields map[string]string, rest []byte, err error) {
+ rest = s
+ fields = make(map[string]string)
+ lastkey := ""
+ for len(rest) > 0 {
+ var line []byte
+ line, rest = getLine(rest)
+ // empty line after the parsed fields is consumed
+ if len(line) == 0 {
+ break
+ }
+ // TODO: empty line: " \n"
+ if line[0] == ' ' {
+ if lastkey == "" {
+ err = fmt.Errorf("ParseFields: expected a field, not ' '")
+ return
+ }
+ fields[lastkey] += string(line)
+ continue
+ }
+ // TODO: empty value: "Key: \n"
+ i := bytes.Index(line, []byte(": "))
+ if i < 0 {
+ err = fmt.Errorf("ParseFields: missing ': '")
+ break
+ }
+ lastkey = string(line[:i])
+ if _, ok := fields[lastkey]; ok {
+ err = fmt.Errorf("ParseFields: repeated fields are not allowed")
return
}
+ fields[lastkey] = string(line[i+2:])
}
+ return
+}
- draft = new(Draft)
- draft.Drawer, _ = checkID(fields["Drawer"])
- draft.Beneficiary, _ = checkID(fields["Beneficiary"])
- draft.Amount, _ = strconv.Atoi64(fields["Amount"])
- draft.Denomination = fields["Denomination"]
- draft.IssueDate, _ = strconv.Atoi64(fields["IssueDate"])
- draft.MaturityDate, _ = strconv.Atoi64(fields["MaturityDate"])
- draft.Notes = fields["Notes"]
- draft.Nonce = fields["Nonce"]
- draft.Server, _ = checkID(fields["Server"])
- draft.Drawee, _ = checkID(fields["Drawee"])
-
- // more checks..
+// TODO: limit errors
- return
+func parseId(s string) (string, error) {
+ if len(s) != 40 {
+ return "", fmt.Errorf("parseId: expected 40 characters; got %d", len(s))
+ }
+ dst := make([]byte, len(s)/2)
+ _, err := hex.Decode(dst, []byte(s))
+ return s, err
}
-func RenderDraft(draft *Draft) (data []byte, err error) {
- s := "Content-Type: " + ContentType["draft"] + "\n"
- s += "\n"
- s += "Drawer: " + draft.Drawer + "\n"
- s += "Beneficiary: " + draft.Beneficiary + "\n"
- s += "Amount: " + strconv.Itoa64(draft.Amount) + "\n"
- s += "Denomination: " + draft.Denomination + "\n"
- s += "IssueDate: " + strconv.Itoa64(draft.IssueDate) + "\n"
- s += "MaturityDate: " + strconv.Itoa64(draft.MaturityDate) + "\n"
- s += "Notes: " + draft.Notes + "\n"
- s += "Nonce: " + draft.Nonce + "\n"
- s += "Server: " + draft.Server + "\n"
- s += "Drawee: " + draft.Drawee + "\n"
- data = []byte(s)
- return
+func formatId(s string) string {
+ return s
}
-func ParseCert(s []byte) (cert Cert, err error) {
- return
+func parseString(s string) (string, error) {
+ if len(s) > 140 {
+ return "", fmt.Errorf("parseString: 140 chars limit is exceeded")
+ }
+ return s, nil
}
-func RenderCert(cert Cert) (s []byte, err error) {
- return
+func formatString(s string) string {
+ return s
}
-func splitline(s []byte) (line, rest []byte) {
- i := bytes.IndexByte(s, '\n')
- if i < 0 {
- line = s
- } else {
- rest = s[i+1:]
- if i > 0 && s[i-1] == '\r' {
- i--
- }
- line = s[:i]
+func parseDate(s string) (int64, error) {
+ t, err := time.Parse(time.RFC3339, s)
+ if err != nil {
+ return 0, err
}
- return
+ return t.Seconds(), nil
+}
+
+func formatDate(i int64) string {
+ return time.SecondsToUTC(i).Format(time.RFC3339)
}
func getLine(data []byte) (line, rest []byte) {
}
return data[:i], data[j:]
}
-
-func trimspace(s []byte) []byte {
- a := bytes.Split(s, []byte("\n"))
- for i := range a {
- a[i] = bytes.TrimRight(a[i], " \t\r")
- }
- return bytes.Join(a, []byte("\n"))
-}
-
-func dashesc(s []byte) []byte {
- r := bytes.Replace(s, []byte("\n-"), []byte("\n- -"), -1)
- if len(r) > 0 && r[0] == '-' {
- r = append([]byte("- "), r...)
- }
- return r
-}
-
-func dashunesc(s []byte) []byte {
- r := bytes.Replace(s, []byte("\n- "), []byte("\n"), -1)
- if len(r) >= 2 && r[0] == '-' && r[1] == ' ' {
- r = r[2:]
- }
- return r
-}
-
-// RFC 4880 is unclear about multiple Hash header semantics, section 7. says
-// "One or more "Hash" Armor Headers"
-// then
-// "If more than one message digest is used in the signature, the "Hash"
-// armor header contains a comma-delimited list of used message digests."
-// in section 6.2.
-// "there is no limit to the length of Armor Headers. Care should
-// be taken that the Armor Headers are short enough to survive
-// transport. One way to do this is to repeat an Armor Header key
-// multiple times with different values for each so that no one line is
-// overly long."
-// we accept a single Hash header with a list of hash algorithms for now
-// but use the one specified by the signature
-
-func split(s []byte) (hash, body, sig []byte) {
- if !bytes.HasPrefix(s, []byte(ClearSignedHeader)) {
- return
- }
- s = s[len(ClearSignedHeader):]
- // only allow a single Hash: header
- if bytes.HasPrefix(s, []byte("Hash: ")) {
- s = s[len("Hash: "):]
- hash, s = getLine(s)
- }
- // skip empty line
- empty, s := getLine(s)
- if len(empty) != 0 {
- return
- }
- i := bytes.Index(s, []byte("\n-----BEGIN"))
- if i < 0 {
- return
- }
- body, sig = s[:i], s[i+1:]
- if i > 0 && body[i-1] == '\r' {
- body = body[:i-1]
- }
- return
-}