-package document
-
-// An epoint document is a clear signed utf-8 text of key-value pairs
-// according to OpenPGP RFC 4880. The body contains a content-type
-// MIME header so it can be used in OpenPGP/MIME RFC 3156 emails with
-// the signature detached. The format of the key-value pairs are
-// similar to MIME header fields.
+// Package document implements epoint document parsing and creation.
+//
+// An epoint document is an OpenPGP (RFC 4880) clear signed
+// utf-8 text of key-value pairs.
+// The body contains a content-type MIME header so the document
+// can be used in OpenPGP/MIME (RFC 3156) emails.
+// The format of the key-value pairs are similar to MIME header
+// fields: keys and values are separated by ": ", repeated keys
+// are not allowed, long values can be split before a space.
//
// Example:
//
// -----BEGIN PGP SIGNED MESSAGE-----
// Hash: SHA1
//
-// Content-Type: text/plain.epoint.cert; charset=utf-8
+// Content-Type: text/plain.epoint.type; charset=utf-8
//
// Key: Value1
// Another-Key: Value2
+// Last-Key: Long
+// value that spans
+// multiple lines
// -----BEGIN PGP SIGNATURE-----
+//
// pgp signature
// -----END PGP SIGNATURE-----
+package document
+
+// TODO: error wrapper (so reporting to user or creating bounce cert is simple)
+// TODO: optional fields: exact semantics ("" vs "-" vs nil)
+// TODO: trailing space handling in ParseFields
+// TODO: fields of notice (last notice, serial, failure notice,..)
+// TODO: limits and cert type specific input validation
import (
"bytes"
+ "crypto"
+ "crypto/openpgp"
+ "crypto/openpgp/armor"
+ "crypto/openpgp/packet"
+ "crypto/sha1"
"encoding/hex"
"fmt"
"reflect"
"time"
)
-const ClearSignedHeader = "-----BEGIN PGP SIGNED MESSAGE-----\n"
+// limits
+const (
+ MaxFields = 20
+ MaxLineLength = 160 // 1 sha512 + 1 key (without \n)
+ MaxValueLength = 1300 // 20 sha256 space separated (without \n)
+ MaxNonceLength = 20
+ MaxDenominationLength = 100
+)
+
+const ClearSignedHeader = "-----BEGIN PGP SIGNED MESSAGE-----"
-// (non-standard) MIME subtype for epoint documents, see RFC 2045 and RFC 2046
+// MIME type for epoint documents, see RFC 4288
var ContentType = map[string]string{
- "cert": "text/plain.epoint.cert; charset=utf-8",
- "draft": "text/plain.epoint.draft; charset=utf-8",
+ "Draft": "text/vnd.epoint.draft; charset=utf-8",
+ "Notice": "text/vnd.epoint.notice; charset=utf-8",
+ "DebitCert": "text/vnd.epoint.debit; charset=utf-8",
+ "CreditCert": "text/vnd.epoint.credit; charset=utf-8",
+ "BounceCert": "text/vnd.epoint.bounce; charset=utf-8",
}
// OpenPGP signed cleartext document representation
-type ClearSigned struct {
+type Signed struct {
+ // Sign and CleanSigned sets Hash for FormatSigned
+ // TODO: CreationDate
Hash string
- // Signed text (no dash escape, no trailing space)
+ // Signed text (no dash escape, no trailing space, \n new lines)
Body []byte
// Armored detached text signature of the Body
- ArmoredSignature []byte
+ Signature []byte
}
-type Field struct {
- Key string
- Value string
+// parsed epoint document
+type Document struct {
+ Type string
+ Fields map[string]string
+ Order []string
+}
+
+var fieldtype = map[string]string{
+ "Amount": "int",
+ "Authorized-By": "id",
+ "Balance": "int",
+ "Beneficiary": "id",
+ "Date": "date",
+ "Debit-Cert": "id",
+ "Denomination": "text",
+ "Difference": "int",
+ "Draft": "id",
+ "Drawer": "id",
+ "Expiry-Date": "date",
+ "Holder": "id",
+ "Issuer": "id",
+ "Last-Cert": "id",
+ "Last-Credit-Serial": "int",
+ "Last-Debit-Serial": "int",
+ "Maturity-Date": "date",
+ "Nonce": "text",
+ "Notes": "text",
+ "References": "ids",
+ "Serial": "int",
+}
+
+var fieldname = map[string]string{
+ "AuthorizedBy": "Authorized-By",
+ "DebitCert": "Debit-Cert",
+ "ExpiryDate": "Expiry-Date",
+ "LastCert": "Last-Cert",
+ "LastCreditSerial": "Last-Credit-Serial",
+ "LastDebitSerial": "Last-Debit-Serial",
+ "MaturityDate": "Maturity-Date",
}
-// Draft document represents an obligation transfer order
type Draft struct {
- Drawer string `marshal:"id"` // ID of the payer (signer of the document)
- Beneficiary string `marshal:"id"` // ID of the payee
- Amount int64 // amount transfered
+ Drawer string
+ Beneficiary string
+ Amount int64
Denomination string
- IssueDate int64 `marshal:"date" key:"Issue-Date"`
- // Draft is bounced before this date
- MaturityDate int64 `marshal:"date" key:"Maturity-Date"`
- Notes string // Arbitrary text notes of the drawer
- Nonce string // unique number
- Server string `marshal:"id"` // ID of the server
- Drawee string `marshal:"id"` // ID of the obligation issuer
- // useful if more strict date of issue information is needed
- //References []string
+ Issuer string
+ AuthorizedBy string
+ MaturityDate *int64 // optional
+ ExpiryDate *int64 // optional
+ Nonce *string // optional
+ Notes *string // optional
}
-// Obligation certificate after a transfer
-// References previous certificate (if any)
-// and the transfer related other documents
-type Cert struct {
- Holder string `marshal:"id"` // ID of the creditor
- Serial uint32 // serial number, number of certs of the holder
- Date int64 `marshal:"date"` // date of issue
- Balance int64 // current obligation value
+type Notice struct {
+ Date int64
+ AuthorizedBy string
+ Notes *string // optional
+ References []string // may be empty (startup notice)
+}
+
+type DebitCert struct {
+ Holder string
+ Serial int64
+ Balance int64
Denomination string
- Server string `marshal:"id"` // ID of the server
- Issuer string `marshal:"id"` // ID of the obligation issuer (drawee?)
- LastDebitSerial uint32 `key:"Last-Debit-Serial"` // serial of the last draft cert or 0
- LastCreditSerial uint32 `key:"Last-Credit-Serial"` // serial of the last credit cert or 0
- LastCert string `marshal:"id" key:"Last-Cert"` // ID of the previous cert if any
- Difference int64 // difference from previous balance
- Draft string `marshal:"id"` // draft ID related to the transfer
- Drawer string `marshal:"id"` // ID of the drawer in the transaction
- DrawerSerial uint32 `key:"Drawer-Serial"` // serial of the drawer's related debit cert
- DrawerCert string `marshal:"id" key:"Drawer-Cert"` // ID of the drawer's related debit cert
- Notes string // Arbitrary text notes of the server (signer)
- References []string `marshal:"idlist"` // cert IDs for timestamping the system
+ Issuer string
+ Date int64
+ Difference int64
+ Draft string
+ Beneficiary string
+ AuthorizedBy string
+ Notes *string // optional
+ LastDebitSerial int64 // 0 if none
+ LastCreditSerial int64 // 0 if none
+ LastCert *string // ? if serial == 1
+ References []string
}
-func DecodeClearSigned(s []byte) (c *ClearSigned, err error) {
- hash, body, sig := split(s)
- if len(sig) == 0 {
- err = fmt.Errorf("DecodeClearSigned could parse the signed document")
- return
- }
- c = &ClearSigned{string(hash), trimspace(dashunesc(body)), sig}
- return
+type CreditCert struct {
+ Holder string
+ Serial int64
+ Balance int64
+ Denomination string
+ Issuer string
+ Date int64
+ Difference int64
+ Draft string
+ Drawer string
+ DebitCert string
+ AuthorizedBy string
+ Notes *string // optional
+ LastDebitSerial int64 // 0 if none
+ LastCreditSerial int64 // 0 if none
+ LastCert *string // ? if serial == 1
+ References []string
}
-func EncodeClearSigned(c *ClearSigned) (data []byte, err error) {
- s := ClearSignedHeader
- if c.Hash != "" {
- s += "Hash: " + c.Hash + "\n"
+type BounceCert struct {
+ Drawer string
+ Draft string
+ LastCert *string // optional
+ Balance int64 // 0 if none
+ Date int64
+ AuthorizedBy string
+ Notes *string // optional
+ References []string
+}
+
+// sha1 sum of the (cleaned) document as uppercase hex string
+func Id(d []byte) string {
+ h := sha1.New()
+ h.Write(d)
+ return fmt.Sprintf("%040X", h.Sum())
+}
+
+// parse an epoint document without checking the signature and format details
+func Parse(s []byte) (iv interface{}, c *Signed, err error) {
+ c, err = ParseSigned(s)
+ if err != nil {
+ return
}
- // TODO: check if space was trimmed from body before signature
- s += "\n"
- s += string(dashesc(c.Body))
- s += "\n"
- s += string(c.ArmoredSignature)
- data = []byte(s)
+ doc, err := ParseDocument(c.Body)
+ if err != nil {
+ return
+ }
+ iv, err = ParseStruct(doc)
return
}
-func ParseFields(s []byte) (fields []Field, rest []byte, err error) {
- rest = s
- for len(rest) > 0 {
- var line []byte
- line, rest = getLine(rest)
- // empty line after the parsed fields (consumed)
- if len(line) == 0 {
- break
- }
- i := bytes.Index(line, []byte(": "))
- if i < 0 {
- err = fmt.Errorf("ParseFields: missing ': '\n")
- break
- }
- fields = append(fields, Field{string(line[:i]), string(line[i+2:])})
+// format and sign an epoint document
+func Format(iv interface{}, key *openpgp.Entity) (s []byte, err error) {
+ doc, err := FormatStruct(iv)
+ if err != nil {
+ return
}
- return
+ body, err := FormatDocument(doc)
+ if err != nil {
+ return
+ }
+ c, err := Sign(body, key)
+ if err != nil {
+ return
+ }
+ return FormatSigned(c)
}
-func ParseBody(s []byte) (doctype string, fields []Field, err error) {
- // parse content type header first
- fs, s, err := ParseFields(s)
+// verify an epoint document, return the cleaned version as well
+func Verify(c *Signed, key openpgp.KeyRing) (cleaned []byte, err error) {
+ err = CleanSigned(c)
if err != nil {
return
}
- if len(fs) != 1 || fs[0].Key != "Content-Type" {
- return "", nil, fmt.Errorf("ParseBody: single Content-Type header was expected\n")
+ err = VerifyCleaned(c, key)
+ if err != nil {
+ return
}
- ctype := fs[0].Value
- for k, v := range ContentType {
- if ctype == v {
- doctype = k
- break
+ return FormatSigned(c)
+}
+
+// verify signature of body with given key
+func VerifyCleaned(c *Signed, key openpgp.KeyRing) (err error) {
+ msg := bytes.NewBuffer(c.Body)
+ sig := bytes.NewBuffer(c.Signature)
+ _, err = openpgp.CheckArmoredDetachedSignature(key, msg, sig)
+ return
+}
+
+// sign body with given secret key
+func Sign(body []byte, key *openpgp.Entity) (c *Signed, err error) {
+ c = new(Signed)
+ c.Hash = "SHA256"
+ c.Body = body
+ w := new(bytes.Buffer)
+ err = openpgp.ArmoredDetachSignText(w, key, bytes.NewBuffer(c.Body))
+ c.Signature = w.Bytes()
+ return
+}
+
+// split a clear signed document into body and armored signature
+func ParseSigned(s []byte) (c *Signed, err error) {
+ // look for clear signed header
+ for !bytes.HasPrefix(s, []byte(ClearSignedHeader)) {
+ _, s = getLine(s)
+ if len(s) == 0 {
+ err = fmt.Errorf("ParseSigned: clear signed header is missing")
+ return
}
}
- if doctype == "" {
- return "", nil, fmt.Errorf("ParseBody: unknown Content-Type: %s", ctype)
+ s = s[len(ClearSignedHeader):]
+ // end of line after the header
+ empty, s := getLine(s)
+ if len(empty) != 0 {
+ err = fmt.Errorf("ParseSigned: bad clear signed header")
+ return
}
- fields, s, err = ParseFields(s)
- if err == nil && len(s) > 0 {
- err = fmt.Errorf("ParseBody: extra data after fields: %q", s)
+ // skip all hash headers, section 7.
+ for bytes.HasPrefix(s, []byte("Hash: ")) {
+ _, s = getLine(s)
}
+ // skip empty line
+ empty, s = getLine(s)
+ if len(empty) != 0 {
+ err = fmt.Errorf("ParseSigned: expected an empty line after armor headers")
+ return
+ }
+ lines := [][]byte{}
+ for !bytes.HasPrefix(s, []byte("-----BEGIN")) {
+ var line []byte
+ line, s = getLine(s)
+ // dash unescape, section 7.1.
+ if bytes.HasPrefix(line, []byte("- ")) {
+ line = line[2:]
+ }
+ // empty values are not supported: "Key: \n"
+ lines = append(lines, bytes.TrimRight(line, " \t"))
+ }
+ c = new(Signed)
+ // last line is not closed by \n
+ c.Body = bytes.Join(lines, []byte("\n"))
+ // signature is just the rest of the input data
+ c.Signature = s
return
}
-func parse(s []byte, doctype string) (v interface{}, err error) {
- t, fields, err := ParseBody(s)
+// clean up, check and reencode signature
+// used on drafts before calculating the signed document hash
+func CleanSigned(c *Signed) (err error) {
+ b, err := armor.Decode(bytes.NewBuffer(c.Signature))
if err != nil {
return
}
- if doctype != t && doctype != "" {
- err = fmt.Errorf("parse: expected doctype %s; got %s", doctype, t)
+ if b.Type != openpgp.SignatureType {
+ err = fmt.Errorf("CleanSigned: invalid armored signature type")
return
}
- switch t {
- case "draft":
- v = new(Draft)
- case "cert":
- v = new(Cert)
- default:
- err = fmt.Errorf("parse: unkown doc type: %s", t)
+ p, err := packet.Read(b.Body)
+ if err != nil {
return
}
- err = unmarshal(fields, v)
- return
-}
-
-// TODO: limit errors
-func render(v interface{}) ([]byte, error) {
- doctype := ""
- switch v.(type) {
- case *Draft:
- doctype = "draft"
- case *Cert:
- doctype = "cert"
+ sig, ok := p.(*packet.Signature)
+ if !ok {
+ err = fmt.Errorf("CleanSigned: invalid signature packet")
+ return
+ }
+ // section 5.2.3
+ if sig.SigType != packet.SigTypeText {
+ err = fmt.Errorf("CleanSigned: expected text signature")
+ return
+ }
+ switch sig.Hash {
+ case crypto.SHA1:
+ c.Hash = "SHA1"
+ case crypto.SHA256:
+ c.Hash = "SHA256"
default:
- panic("reder: unknown type")
+ err = fmt.Errorf("CleanSigned: expected SHA1 or SHA256 signature hash")
+ return
}
- s := "Content-Type: " + ContentType[doctype] + "\n\n"
- fields := marshal(v)
- for _, f := range fields {
- s += f.Key + ": " + f.Value + "\n"
+ // TODO: check CreationTime and other subpackets
+ if sig.SigLifetimeSecs != nil && *sig.SigLifetimeSecs != 0 {
+ err = fmt.Errorf("CleanSigned: signature must not expire")
+ return
}
- return []byte(s), nil
-}
-
-func ParseDraft(s []byte) (draft *Draft, err error) {
- v, err := parse(s, "draft")
+ out := new(bytes.Buffer)
+ w, err := armor.Encode(out, openpgp.SignatureType, nil)
+ if err != nil {
+ return
+ }
+ err = sig.Serialize(w)
+ if err != nil {
+ return
+ }
+ err = w.Close()
if err != nil {
return
}
- draft = v.(*Draft)
+ c.Signature = out.Bytes()
return
}
-func RenderDraft(draft *Draft) ([]byte, error) {
- return render(draft)
+// create clear signed document
+func FormatSigned(c *Signed) (data []byte, err error) {
+ s := ClearSignedHeader + "\n"
+ if c.Hash != "" {
+ s += "Hash: " + c.Hash + "\n"
+ }
+ s += "\n"
+ s += string(c.Body)
+ s += "\n"
+ s += string(c.Signature)
+ data = []byte(s)
+ return
}
-func ParseCert(s []byte) (cert *Cert, err error) {
- v, err := parse(s, "cert")
+// parse type and fields of a document body
+func ParseDocument(body []byte) (doc *Document, err error) {
+ // parse content type header first
+ fields, s, err := ParseFields(body)
if err != nil {
- return nil, err
+ return
+ }
+ ctype, ok := fields["Content-Type"]
+ if len(fields) != 1 || !ok {
+ return nil, fmt.Errorf("ParseBody: expected a single Content-Type header field")
+ }
+ doc = new(Document)
+ for k, v := range ContentType {
+ if ctype == v {
+ doc.Type = k
+ break
+ }
+ }
+ if doc.Type == "" {
+ return nil, fmt.Errorf("ParseBody: unknown Content-Type: %s", ctype)
+ }
+ // TODO: doc.Order
+ doc.Fields, s, err = ParseFields(s)
+ if err == nil && len(s) > 0 {
+ err = fmt.Errorf("ParseBody: extra data after fields: %q", s)
}
- cert = v.(*Cert)
return
}
-func RenderCert(cert *Cert) ([]byte, error) {
- return render(cert)
-}
-
-func formatId(s string) string {
- return fmt.Sprintf("%040X", s)
-}
-
-func parseId(s string) (string, error) {
- dst := make([]byte, 20)
- if len(s) != 40 {
- return "", fmt.Errorf("parseId: expected 40 characters; got %d", len(s))
+// create document body
+func FormatDocument(doc *Document) (body []byte, err error) {
+ ctype, ok := ContentType[doc.Type]
+ if !ok {
+ err = fmt.Errorf("FormatDocument: unknown document type: %s", doc.Type)
+ return
}
- _, err := hex.Decode(dst, []byte(s))
- return string(dst), err
-}
-
-func quoteValue(s string) string {
- return s
-}
-
-func unquoteValue(s string) (string, error) {
- return s, nil
-}
-
-func formatDate(i int64) string {
- return time.SecondsToUTC(i).Format(time.RFC3339)
-}
-
-func parseDate(s string) (int64, error) {
- t, err := time.Parse(time.RFC3339, s)
- if err != nil {
- return 0, err
+ s := "Content-Type: " + ctype + "\n\n"
+ for _, k := range doc.Order {
+ s += k + ": " + doc.Fields[k] + "\n"
}
- return t.Seconds(), nil
+ return []byte(s), nil
}
-func marshal(iv interface{}) []Field {
- v := reflect.ValueOf(iv)
- if v.Kind() != reflect.Ptr || v.IsNil() || v.Elem().Kind() != reflect.Struct {
- panic("unmarshal: input is not a pointer to struct")
+// parse doc fields into a struct according to the document type
+func ParseStruct(doc *Document) (iv interface{}, err error) {
+ switch doc.Type {
+ case "Draft":
+ iv = new(Draft)
+ case "Notice":
+ iv = new(Notice)
+ case "DebitCert":
+ iv = new(DebitCert)
+ case "CreditCert":
+ iv = new(CreditCert)
+ case "BounceCert":
+ iv = new(BounceCert)
+ default:
+ err = fmt.Errorf("ParseStruct: unkown doc type: %s", doc.Type)
+ return
}
- v = v.Elem()
+ seen := make(map[string]bool)
+ v := reflect.ValueOf(iv).Elem()
t := v.Type()
n := v.NumField()
- fields := []Field{}
+ nokey := 0
for i := 0; i < n; i++ {
ft := t.Field(i)
fv := v.Field(i)
- m := ft.Tag.Get("marshal")
- k := ft.Tag.Get("key")
- if k == "" {
- k = ft.Name
+ key := fieldname[ft.Name]
+ if key == "" {
+ key = ft.Name
}
- val := ""
- switch fv.Kind() {
- case reflect.String:
- switch m {
- case "id":
- val = formatId(fv.String())
- case "":
- val = quoteValue(fv.String())
- default:
- panic("bad string field tag")
- }
- case reflect.Int, reflect.Int32, reflect.Int64:
- switch m {
- case "date":
- val = formatDate(fv.Int())
- case "":
- val = strconv.Itoa64(fv.Int())
- default:
- panic("bad int field tag")
+ seen[key] = true
+ s, ok := doc.Fields[key]
+ if !ok {
+ if fv.Kind() == reflect.Ptr {
+ // missing optional key: leave the pointer as nil
+ nokey++
+ continue
}
- case reflect.Uint, reflect.Uint32, reflect.Uint64:
- switch m {
- case "":
- val = strconv.Uitoa64(fv.Uint())
- default:
- panic("bad uint field tag")
+ return nil, fmt.Errorf("ParseStruct: field %s of %s is missing\n", key, t.Name())
+ }
+ if fv.Kind() == reflect.Ptr {
+ if s == "" || s == "-" {
+ // TODO
+ // empty optional key: same as missing
+ continue
}
- case reflect.Slice:
- switch m {
- case "idlist":
- if fv.Type().Elem().Kind() != reflect.String {
- panic("only string slice is supported")
- }
- k := fv.Len()
- for j := 0; j < k; j++ {
- if j > 0 {
- val += " "
- }
- val += formatId(fv.Index(j).String())
+ fv.Set(reflect.New(fv.Type().Elem()))
+ fv = fv.Elem()
+ }
+ switch fieldtype[key] {
+ case "id":
+ var val string
+ val, err = parseId(s)
+ fv.SetString(val)
+ case "text":
+ var val string
+ val, err = parseString(s)
+ fv.SetString(val)
+ case "int":
+ var val int64
+ val, err = strconv.Atoi64(s)
+ fv.SetInt(val)
+ case "date":
+ var val int64
+ val, err = parseDate(s)
+ fv.SetInt(val)
+ case "ids":
+ // TODO: empty slice?
+ ids := strings.Split(s, " ")
+ val := make([]string, len(ids))
+ for j, id := range ids {
+ val[j], err = parseId(id)
+ if err != nil {
+ return
}
- default:
- panic("bad slice field tag")
}
+ fv.Set(reflect.ValueOf(val))
default:
- panic("bad field type")
+ panic("bad field type " + key + " " + fieldtype[key])
+ }
+ if err != nil {
+ return
+ }
+ }
+ if len(doc.Fields)+nokey != n {
+ for k := range doc.Fields {
+ if !seen[k] {
+ err = fmt.Errorf("ParseStruct: unknown field %s in %s", k, t.Name())
+ return
+ }
}
- fields = append(fields, Field{k, val})
}
- return fields
+ return
}
-func unmarshal(fields []Field, iv interface{}) error {
+// turn a struct into a document
+func FormatStruct(iv interface{}) (doc *Document, err error) {
v := reflect.ValueOf(iv)
if v.Kind() != reflect.Ptr || v.IsNil() || v.Elem().Kind() != reflect.Struct {
- panic("unmarshal: input is not a pointer to struct")
+ panic("input is not a pointer to struct")
}
v = v.Elem()
t := v.Type()
n := v.NumField()
- if len(fields) != n {
- return fmt.Errorf("unmarshal: %s has %d fields, got %d\n", t.Name(), n, len(fields))
- }
+ doc = new(Document)
+ doc.Type = t.Name()
+ doc.Fields = make(map[string]string)
for i := 0; i < n; i++ {
ft := t.Field(i)
fv := v.Field(i)
- m := ft.Tag.Get("marshal")
- k := ft.Tag.Get("key")
- if k == "" {
- k = ft.Name
+ key := fieldname[ft.Name]
+ if key == "" {
+ key = ft.Name
}
- if fields[i].Key != k {
- return fmt.Errorf("unmarshal: field %d of %s (%s) is missing\n", i, t.Name(), k)
- }
- s := fields[i].Value
- var err error
- switch fv.Kind() {
- case reflect.String:
- var val string
- switch m {
- case "id":
- val, err = parseId(s)
- case "":
- val, err = unquoteValue(s)
- default:
- panic("bad string field tag")
- }
- fv.SetString(val)
- case reflect.Int, reflect.Int32, reflect.Int64:
- var val int64
- switch m {
- case "date":
- val, err = parseDate(s)
- case "":
- val, err = strconv.Atoi64(s)
- default:
- panic("bad int field tag")
- }
- fv.SetInt(val)
- case reflect.Uint, reflect.Uint32, reflect.Uint64:
- var val uint64
- switch m {
- case "":
- val, err = strconv.Atoui64(s)
- default:
- panic("bad uint field tag")
+ val := ""
+ if fv.Kind() == reflect.Ptr {
+ if fv.IsNil() {
+ // keep empty optional fields but mark them
+ val = "-"
+ goto setval
}
- fv.SetUint(val)
- case reflect.Slice:
- var val []string
- switch m {
- case "idlist":
- if fv.Type().Elem().Kind() != reflect.String {
- panic("only string slice is supported")
- }
- ids := strings.Split(s, " ")
- val = make([]string, len(ids))
- for j := range val {
- val[j], err = parseId(ids[j])
- if err != nil {
- return err
- }
+ fv = fv.Elem()
+ }
+ switch fieldtype[key] {
+ case "id":
+ val = formatId(fv.String())
+ case "text":
+ val = formatString(fv.String())
+ case "int":
+ val = strconv.Itoa64(fv.Int())
+ case "date":
+ val = formatDate(fv.Int())
+ case "ids":
+ k := fv.Len()
+ for j := 0; j < k; j++ {
+ if j > 0 {
+ val += "\n "
}
- default:
- panic("bad slice field tag")
+ val += formatId(fv.Index(j).String())
}
- fv.Set(reflect.ValueOf(val))
default:
- panic("bad field type")
- }
- if err != nil {
- return err
+ panic("bad field type " + key + " " + fieldtype[key])
}
+ setval:
+ doc.Fields[key] = val
+ doc.Order = append(doc.Order, key)
}
- return nil
+ return
}
-func getLine(data []byte) (line, rest []byte) {
- i := bytes.Index(data, []byte{'\n'})
- j := i + 1
- if i < 0 {
- i = len(data)
- j = i
- } else if i > 0 && data[i-1] == '\r' {
- i--
+func ParseFields(s []byte) (fields map[string]string, rest []byte, err error) {
+ rest = s
+ fields = make(map[string]string)
+ key := ""
+ // \n is optional after the last field and an extra \n is allowed as well
+ for len(rest) > 0 {
+ var line []byte
+ line, rest = getLine(rest)
+ // empty line after the last field is consumed
+ if len(line) == 0 {
+ break
+ }
+ if line[0] == ' ' && key != "" {
+ // "Key: v1\n v2\n" is equivalent to "Key: v1 v2\n"
+ fields[key] += string(line)
+ continue
+ }
+ if line[0] < 'A' || line[0] > 'Z' {
+ err = fmt.Errorf("ParseFields: field name must start with an upper-case ascii letter")
+ return
+ }
+ i := bytes.IndexByte(line, ':')
+ if i < 0 {
+ err = fmt.Errorf("ParseFields: missing ':'")
+ return
+ }
+ key = string(line[:i])
+ if _, ok := fields[key]; ok {
+ err = fmt.Errorf("ParseFields: repeated fields are not allowed")
+ return
+ }
+ fields[key] = string(line[i+1:])
}
- return data[:i], data[j:]
+ for key, v := range fields {
+ // either a single space follows ':' or the value is empty
+ // good: "Key:\n", "Key:\n value\n", "Key: value\n", "Key: v1\n v2\n"
+ // bad: "Key:value\n", "Key: \nvalue\n"
+ // bad but not checked here: "Key: \n", "Key: value \n", "Key:\n \n value\n"
+ if len(v) == 0 {
+ continue
+ }
+ if v[0] != ' ' {
+ err = fmt.Errorf("ParseFields: ':' is not followed by ' '")
+ return
+ }
+ fields[key] = v[1:]
+ }
+ return
}
-func trimspace(s []byte) []byte {
- a := bytes.Split(s, []byte("\n"))
- for i := range a {
- a[i] = bytes.TrimRight(a[i], " \t\r")
- }
- return bytes.Join(a, []byte("\n"))
+// TODO: limit errors
+
+func parseId(s string) (string, error) {
+ // check if hex decodable
+ // TODO: length check
+ dst := make([]byte, len(s)/2)
+ _, err := hex.Decode(dst, []byte(s))
+ return s, err
}
-func dashesc(s []byte) []byte {
- r := bytes.Replace(s, []byte("\n-"), []byte("\n- -"), -1)
- if len(r) > 0 && r[0] == '-' {
- r = append([]byte("- "), r...)
- }
- return r
+func formatId(s string) string {
+ return s
}
-func dashunesc(s []byte) []byte {
- r := bytes.Replace(s, []byte("\n- "), []byte("\n"), -1)
- if len(r) >= 2 && r[0] == '-' && r[1] == ' ' {
- r = r[2:]
+func parseString(s string) (string, error) {
+ if len(s) > MaxValueLength {
+ return "", fmt.Errorf("parseString: length limit is exceeded")
}
- return r
+ return s, nil
}
-// RFC 4880 is unclear about multiple Hash header semantics, section 7. says
-// "One or more "Hash" Armor Headers"
-// then
-// "If more than one message digest is used in the signature, the "Hash"
-// armor header contains a comma-delimited list of used message digests."
-// in section 6.2.
-// "there is no limit to the length of Armor Headers. Care should
-// be taken that the Armor Headers are short enough to survive
-// transport. One way to do this is to repeat an Armor Header key
-// multiple times with different values for each so that no one line is
-// overly long."
-// we accept a single Hash header with a list of hash algorithms for now
-// but use the one specified by the signature
-
-func split(s []byte) (hash, body, sig []byte) {
- if !bytes.HasPrefix(s, []byte(ClearSignedHeader)) {
- return
- }
- s = s[len(ClearSignedHeader):]
- // only allow a single Hash: header
- if bytes.HasPrefix(s, []byte("Hash: ")) {
- s = s[len("Hash: "):]
- hash, s = getLine(s)
- }
- // skip empty line
- empty, s := getLine(s)
- if len(empty) != 0 {
- return
+func formatString(s string) string {
+ return s
+}
+
+func parseDate(s string) (int64, error) {
+ // TODO: fractional seconds?
+ t, err := time.Parse(time.RFC3339, s)
+ if err != nil {
+ return 0, err
}
- i := bytes.Index(s, []byte("\n-----BEGIN"))
+ return t.Seconds(), nil
+}
+
+func formatDate(i int64) string {
+ return time.SecondsToUTC(i).Format(time.RFC3339)
+}
+
+func getLine(data []byte) (line, rest []byte) {
+ i := bytes.IndexByte(data, '\n')
+ j := i + 1
if i < 0 {
- return
- }
- body, sig = s[:i], s[i+1:]
- if i > 0 && body[i-1] == '\r' {
- body = body[:i-1]
+ i = len(data)
+ j = i
+ } else if i > 0 && data[i-1] == '\r' {
+ i--
}
- return
+ return data[:i], data[j:]
}