-
Notifications
You must be signed in to change notification settings - Fork 17
Expand file tree
/
Copy pathmap_elements_encode.go
More file actions
167 lines (140 loc) · 4.79 KB
/
map_elements_encode.go
File metadata and controls
167 lines (140 loc) · 4.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
/*
* Atree - Scalable Arrays and Ordered Maps
*
* Copyright Flow Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package atree
import (
"encoding/binary"
"fmt"
)
// Encode encodes hkeyElements to the given encoder.
//
// CBOR encoded array [
// 0: level (uint)
// 1: hkeys (byte string)
// 2: elements (array)
// ]
func (e *hkeyElements) Encode(enc *Encoder) error {
if e.level > maxDigestLevel {
return NewFatalError(fmt.Errorf("hash level %d exceeds max digest level %d", e.level, maxDigestLevel))
}
// Encode CBOR array head of 3 elements (level, hkeys, elements)
const cborArrayHeadOfThreeElements = 0x83
enc.Scratch[0] = cborArrayHeadOfThreeElements
// Encode hash level
enc.Scratch[1] = byte(e.level)
// Encode hkeys as byte string
// Encode hkeys bytes header manually for fix-sized encoding
// TODO: maybe make this header dynamic to reduce size
// CBOR byte string head 0x59 indicates that the number of bytes in byte string are encoded in the next 2 bytes.
const cborByteStringHead = 0x59
enc.Scratch[2] = cborByteStringHead
// Cast len(e.hkeys)*8 to uint16 is safe because the number of
// hkeys per data slab is limited by slab size, which is at most
// maxThreshold (48KiB with maxSlabSize of 32KiB). Even in the
// worst case, len(e.hkeys)*8 is well below math.MaxUint16 (65535).
binary.BigEndian.PutUint16(enc.Scratch[3:], uint16(len(e.hkeys)*8))
// Write scratch content to encoder
const totalSize = 5
err := enc.CBOR.EncodeRawBytes(enc.Scratch[:totalSize])
if err != nil {
return NewEncodingError(err)
}
// Encode hkeys
for i := range e.hkeys {
binary.BigEndian.PutUint64(enc.Scratch[:], uint64(e.hkeys[i]))
err = enc.CBOR.EncodeRawBytes(enc.Scratch[:digestSize])
if err != nil {
return NewEncodingError(err)
}
}
// Encode elements
// Encode elements array header manually for fix-sized encoding
// TODO: maybe make this header dynamic to reduce size
// CBOR array head 0x99 indicating that the number of array elements are encoded in the next 2 bytes.
const cborArrayHead = 0x99
// Cast len(e.elems) to uint16 is safe because the number of
// elements per data slab is limited by slab size, which is at
// most maxThreshold (48KiB with maxSlabSize of 32KiB), well
// below math.MaxUint16 (65535).
enc.Scratch[0] = cborArrayHead
binary.BigEndian.PutUint16(enc.Scratch[1:], uint16(len(e.elems)))
err = enc.CBOR.EncodeRawBytes(enc.Scratch[:3])
if err != nil {
return NewEncodingError(err)
}
// Encode each element
for _, e := range e.elems {
err = e.Encode(enc)
if err != nil {
// Don't need to wrap error as external error because err is already categorized by element.Encode().
return err
}
}
// TODO: is Flush necessary
err = enc.CBOR.Flush()
if err != nil {
return NewEncodingError(err)
}
return nil
}
// Encode encodes singleElements to the given encoder.
//
// CBOR encoded array [
// 0: level (uint)
// 1: hkeys (0 length byte string)
// 2: elements (array)
// ]
func (e *singleElements) Encode(enc *Encoder) error {
if e.level > maxDigestLevel {
return NewFatalError(fmt.Errorf("digest level %d exceeds max digest level %d", e.level, maxDigestLevel))
}
// Encode CBOR array header for 3 elements (level, hkeys, elements)
enc.Scratch[0] = 0x83
// Encode hash level
enc.Scratch[1] = byte(e.level)
// Encode hkeys (empty byte string)
enc.Scratch[2] = 0x40
// Encode elements
// Encode elements array header manually for fix-sized encoding
// TODO: maybe make this header dynamic to reduce size
// Cast len(e.elems) to uint16 is safe because the number of
// elements per data slab is limited by slab size, which is at
// most maxThreshold (48KiB with maxSlabSize of 32KiB), well
// below math.MaxUint16 (65535).
enc.Scratch[3] = 0x99
binary.BigEndian.PutUint16(enc.Scratch[4:], uint16(len(e.elems)))
// Write scratch content to encoder
const totalSize = 6
err := enc.CBOR.EncodeRawBytes(enc.Scratch[:totalSize])
if err != nil {
return NewEncodingError(err)
}
// Encode each element
for _, e := range e.elems {
err = e.Encode(enc)
if err != nil {
// Don't need to wrap error as external error because err is already categorized by singleElement.Encode().
return err
}
}
// TODO: is Flush necessar?
err = enc.CBOR.Flush()
if err != nil {
return NewEncodingError(err)
}
return nil
}