Line data Source code
1 : //! In-memory index to track the tenant files on the remote storage.
2 : //! Able to restore itself from the storage index parts, that are located in every timeline's remote directory and contain all data about
3 : //! remote timeline layers and its metadata.
4 :
5 : use std::collections::HashMap;
6 :
7 : use chrono::NaiveDateTime;
8 : use pageserver_api::models::AuxFilePolicy;
9 : use serde::{Deserialize, Serialize};
10 : use utils::id::TimelineId;
11 :
12 : use crate::tenant::metadata::TimelineMetadata;
13 : use crate::tenant::storage_layer::LayerName;
14 : use crate::tenant::Generation;
15 : use pageserver_api::shard::ShardIndex;
16 :
17 : use utils::lsn::Lsn;
18 :
19 : /// In-memory representation of an `index_part.json` file
20 : ///
21 : /// Contains the data about all files in the timeline, present remotely and its metadata.
22 : ///
23 : /// This type needs to be backwards and forwards compatible. When changing the fields,
24 : /// remember to add a test case for the changed version.
25 2850 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
26 : pub struct IndexPart {
27 : /// Debugging aid describing the version of this type.
28 : #[serde(default)]
29 : version: usize,
30 :
31 : #[serde(default)]
32 : #[serde(skip_serializing_if = "Option::is_none")]
33 : pub deleted_at: Option<NaiveDateTime>,
34 :
35 : #[serde(default)]
36 : #[serde(skip_serializing_if = "Option::is_none")]
37 : pub archived_at: Option<NaiveDateTime>,
38 :
39 : /// Per layer file name metadata, which can be present for a present or missing layer file.
40 : ///
41 : /// Older versions of `IndexPart` will not have this property or have only a part of metadata
42 : /// that latest version stores.
43 : pub layer_metadata: HashMap<LayerName, LayerFileMetadata>,
44 :
45 : /// Because of the trouble of eyeballing the legacy "metadata" field, we copied the
46 : /// "disk_consistent_lsn" out. After version 7 this is no longer needed, but the name cannot be
47 : /// reused.
48 : pub(super) disk_consistent_lsn: Lsn,
49 :
50 : // TODO: rename as "metadata" next week, keep the alias = "metadata_bytes", bump version Adding
51 : // the "alias = metadata" was forgotten in #7693, so we have to use "rewrite = metadata_bytes"
52 : // for backwards compatibility.
53 : #[serde(
54 : rename = "metadata_bytes",
55 : alias = "metadata",
56 : with = "crate::tenant::metadata::modern_serde"
57 : )]
58 : pub metadata: TimelineMetadata,
59 :
60 : #[serde(default)]
61 : pub(crate) lineage: Lineage,
62 :
63 : /// Describes the kind of aux files stored in the timeline.
64 : ///
65 : /// The value is modified during file ingestion when the latest wanted value communicated via tenant config is applied if it is acceptable.
66 : /// A V1 setting after V2 files have been committed is not accepted.
67 : ///
68 : /// None means no aux files have been written to the storage before the point
69 : /// when this flag is introduced.
70 : #[serde(skip_serializing_if = "Option::is_none", default)]
71 : pub(crate) last_aux_file_policy: Option<AuxFilePolicy>,
72 : }
73 :
74 : impl IndexPart {
75 : /// When adding or modifying any parts of `IndexPart`, increment the version so that it can be
76 : /// used to understand later versions.
77 : ///
78 : /// Version is currently informative only.
79 : /// Version history
80 : /// - 2: added `deleted_at`
81 : /// - 3: no longer deserialize `timeline_layers` (serialized format is the same, but timeline_layers
82 : /// is always generated from the keys of `layer_metadata`)
83 : /// - 4: timeline_layers is fully removed.
84 : /// - 5: lineage was added
85 : /// - 6: last_aux_file_policy is added.
86 : /// - 7: metadata_bytes is no longer written, but still read
87 : /// - 8: added `archived_at`
88 : const LATEST_VERSION: usize = 8;
89 :
90 : // Versions we may see when reading from a bucket.
91 : pub const KNOWN_VERSIONS: &'static [usize] = &[1, 2, 3, 4, 5, 6, 7, 8];
92 :
93 : pub const FILE_NAME: &'static str = "index_part.json";
94 :
95 404 : pub(crate) fn empty(metadata: TimelineMetadata) -> Self {
96 404 : IndexPart {
97 404 : version: Self::LATEST_VERSION,
98 404 : layer_metadata: Default::default(),
99 404 : disk_consistent_lsn: metadata.disk_consistent_lsn(),
100 404 : metadata,
101 404 : deleted_at: None,
102 404 : archived_at: None,
103 404 : lineage: Default::default(),
104 404 : last_aux_file_policy: None,
105 404 : }
106 404 : }
107 :
108 0 : pub fn version(&self) -> usize {
109 0 : self.version
110 0 : }
111 :
112 : /// If you want this under normal operations, read it from self.metadata:
113 : /// this method is just for the scrubber to use when validating an index.
114 0 : pub fn duplicated_disk_consistent_lsn(&self) -> Lsn {
115 0 : self.disk_consistent_lsn
116 0 : }
117 :
118 18 : pub fn from_s3_bytes(bytes: &[u8]) -> Result<Self, serde_json::Error> {
119 18 : serde_json::from_slice::<IndexPart>(bytes)
120 18 : }
121 :
122 1398 : pub fn to_s3_bytes(&self) -> serde_json::Result<Vec<u8>> {
123 1398 : serde_json::to_vec(self)
124 1398 : }
125 :
126 : #[cfg(test)]
127 12 : pub(crate) fn example() -> Self {
128 12 : Self::empty(TimelineMetadata::example())
129 12 : }
130 :
131 12 : pub(crate) fn last_aux_file_policy(&self) -> Option<AuxFilePolicy> {
132 12 : self.last_aux_file_policy
133 12 : }
134 : }
135 :
136 : /// Metadata gathered for each of the layer files.
137 : ///
138 : /// Fields have to be `Option`s because remote [`IndexPart`]'s can be from different version, which
139 : /// might have less or more metadata depending if upgrading or rolling back an upgrade.
140 12652 : #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
141 : pub struct LayerFileMetadata {
142 : pub file_size: u64,
143 :
144 : #[serde(default = "Generation::none")]
145 : #[serde(skip_serializing_if = "Generation::is_none")]
146 : pub generation: Generation,
147 :
148 : #[serde(default = "ShardIndex::unsharded")]
149 : #[serde(skip_serializing_if = "ShardIndex::is_unsharded")]
150 : pub shard: ShardIndex,
151 : }
152 :
153 : impl LayerFileMetadata {
154 2416 : pub fn new(file_size: u64, generation: Generation, shard: ShardIndex) -> Self {
155 2416 : LayerFileMetadata {
156 2416 : file_size,
157 2416 : generation,
158 2416 : shard,
159 2416 : }
160 2416 : }
161 : }
162 :
163 : /// Limited history of earlier ancestors.
164 : ///
165 : /// A timeline can have more than 1 earlier ancestor, in the rare case that it was repeatedly
166 : /// reparented by having an later timeline be detached from it's ancestor.
167 34 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
168 : pub(crate) struct Lineage {
169 : /// Has the `reparenting_history` been truncated to [`Lineage::REMEMBER_AT_MOST`].
170 : #[serde(skip_serializing_if = "is_false", default)]
171 : reparenting_history_truncated: bool,
172 :
173 : /// Earlier ancestors, truncated when [`Self::reparenting_history_truncated`]
174 : ///
175 : /// These are stored in case we want to support WAL based DR on the timeline. There can be many
176 : /// of these and at most one [`Self::original_ancestor`]. There cannot be more reparentings
177 : /// after [`Self::original_ancestor`] has been set.
178 : #[serde(skip_serializing_if = "Vec::is_empty", default)]
179 : reparenting_history: Vec<TimelineId>,
180 :
181 : /// The ancestor from which this timeline has been detached from and when.
182 : ///
183 : /// If you are adding support for detaching from a hierarchy, consider changing the ancestry
184 : /// into a `Vec<(TimelineId, Lsn)>` to be a path instead.
185 : // FIXME: this is insufficient even for path of two timelines for future wal recovery
186 : // purposes:
187 : //
188 : // assuming a "old main" which has received most of the WAL, and has a branch "new main",
189 : // starting a bit before "old main" last_record_lsn. the current version works fine,
190 : // because we will know to replay wal and branch at the recorded Lsn to do wal recovery.
191 : //
192 : // then assuming "new main" would similarly receive a branch right before its last_record_lsn,
193 : // "new new main". the current implementation would just store ("new main", ancestor_lsn, _)
194 : // here. however, we cannot recover from WAL using only that information, we would need the
195 : // whole ancestry here:
196 : //
197 : // ```json
198 : // [
199 : // ["old main", ancestor_lsn("new main"), _],
200 : // ["new main", ancestor_lsn("new new main"), _]
201 : // ]
202 : // ```
203 : #[serde(skip_serializing_if = "Option::is_none", default)]
204 : original_ancestor: Option<(TimelineId, Lsn, NaiveDateTime)>,
205 : }
206 :
207 5700 : fn is_false(b: &bool) -> bool {
208 5700 : !b
209 5700 : }
210 :
211 : impl Lineage {
212 : const REMEMBER_AT_MOST: usize = 100;
213 :
214 0 : pub(crate) fn record_previous_ancestor(&mut self, old_ancestor: &TimelineId) {
215 0 : if self.reparenting_history.last() == Some(old_ancestor) {
216 : // do not re-record it
217 0 : return;
218 0 : }
219 0 :
220 0 : let drop_oldest = self.reparenting_history.len() + 1 >= Self::REMEMBER_AT_MOST;
221 0 :
222 0 : self.reparenting_history_truncated |= drop_oldest;
223 0 : if drop_oldest {
224 0 : self.reparenting_history.remove(0);
225 0 : }
226 0 : self.reparenting_history.push(*old_ancestor);
227 0 : }
228 :
229 0 : pub(crate) fn record_detaching(&mut self, branchpoint: &(TimelineId, Lsn)) {
230 0 : assert!(self.original_ancestor.is_none());
231 :
232 0 : self.original_ancestor =
233 0 : Some((branchpoint.0, branchpoint.1, chrono::Utc::now().naive_utc()));
234 0 : }
235 :
236 : /// The queried lsn is most likely the basebackup lsn, and this answers question "is it allowed
237 : /// to start a read/write primary at this lsn".
238 : ///
239 : /// Returns true if the Lsn was previously our branch point.
240 0 : pub(crate) fn is_previous_ancestor_lsn(&self, lsn: Lsn) -> bool {
241 0 : self.original_ancestor
242 0 : .is_some_and(|(_, ancestor_lsn, _)| ancestor_lsn == lsn)
243 0 : }
244 :
245 0 : pub(crate) fn is_detached_from_original_ancestor(&self) -> bool {
246 0 : self.original_ancestor.is_some()
247 0 : }
248 :
249 0 : pub(crate) fn is_reparented(&self) -> bool {
250 0 : !self.reparenting_history.is_empty()
251 0 : }
252 : }
253 :
254 : #[cfg(test)]
255 : mod tests {
256 : use super::*;
257 : use std::str::FromStr;
258 : use utils::id::TimelineId;
259 :
260 : #[test]
261 2 : fn v1_indexpart_is_parsed() {
262 2 : let example = r#"{
263 2 : "version":1,
264 2 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
265 2 : "layer_metadata":{
266 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
267 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
268 2 : },
269 2 : "disk_consistent_lsn":"0/16960E8",
270 2 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
271 2 : }"#;
272 2 :
273 2 : let expected = IndexPart {
274 2 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
275 2 : version: 1,
276 2 : layer_metadata: HashMap::from([
277 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
278 2 : file_size: 25600000,
279 2 : generation: Generation::none(),
280 2 : shard: ShardIndex::unsharded()
281 2 : }),
282 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
283 2 : // serde_json should always parse this but this might be a double with jq for
284 2 : // example.
285 2 : file_size: 9007199254741001,
286 2 : generation: Generation::none(),
287 2 : shard: ShardIndex::unsharded()
288 2 : })
289 2 : ]),
290 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
291 2 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
292 2 : deleted_at: None,
293 2 : archived_at: None,
294 2 : lineage: Lineage::default(),
295 2 : last_aux_file_policy: None,
296 2 : };
297 2 :
298 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
299 2 : assert_eq!(part, expected);
300 2 : }
301 :
302 : #[test]
303 2 : fn v1_indexpart_is_parsed_with_optional_missing_layers() {
304 2 : let example = r#"{
305 2 : "version":1,
306 2 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
307 2 : "missing_layers":["This shouldn't fail deserialization"],
308 2 : "layer_metadata":{
309 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
310 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
311 2 : },
312 2 : "disk_consistent_lsn":"0/16960E8",
313 2 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
314 2 : }"#;
315 2 :
316 2 : let expected = IndexPart {
317 2 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
318 2 : version: 1,
319 2 : layer_metadata: HashMap::from([
320 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
321 2 : file_size: 25600000,
322 2 : generation: Generation::none(),
323 2 : shard: ShardIndex::unsharded()
324 2 : }),
325 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
326 2 : // serde_json should always parse this but this might be a double with jq for
327 2 : // example.
328 2 : file_size: 9007199254741001,
329 2 : generation: Generation::none(),
330 2 : shard: ShardIndex::unsharded()
331 2 : })
332 2 : ]),
333 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
334 2 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
335 2 : deleted_at: None,
336 2 : archived_at: None,
337 2 : lineage: Lineage::default(),
338 2 : last_aux_file_policy: None,
339 2 : };
340 2 :
341 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
342 2 : assert_eq!(part, expected);
343 2 : }
344 :
345 : #[test]
346 2 : fn v2_indexpart_is_parsed_with_deleted_at() {
347 2 : let example = r#"{
348 2 : "version":2,
349 2 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
350 2 : "missing_layers":["This shouldn't fail deserialization"],
351 2 : "layer_metadata":{
352 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
353 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
354 2 : },
355 2 : "disk_consistent_lsn":"0/16960E8",
356 2 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
357 2 : "deleted_at": "2023-07-31T09:00:00.123"
358 2 : }"#;
359 2 :
360 2 : let expected = IndexPart {
361 2 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
362 2 : version: 2,
363 2 : layer_metadata: HashMap::from([
364 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
365 2 : file_size: 25600000,
366 2 : generation: Generation::none(),
367 2 : shard: ShardIndex::unsharded()
368 2 : }),
369 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
370 2 : // serde_json should always parse this but this might be a double with jq for
371 2 : // example.
372 2 : file_size: 9007199254741001,
373 2 : generation: Generation::none(),
374 2 : shard: ShardIndex::unsharded()
375 2 : })
376 2 : ]),
377 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
378 2 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
379 2 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
380 2 : archived_at: None,
381 2 : lineage: Lineage::default(),
382 2 : last_aux_file_policy: None,
383 2 : };
384 2 :
385 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
386 2 : assert_eq!(part, expected);
387 2 : }
388 :
389 : #[test]
390 2 : fn empty_layers_are_parsed() {
391 2 : let empty_layers_json = r#"{
392 2 : "version":1,
393 2 : "timeline_layers":[],
394 2 : "layer_metadata":{},
395 2 : "disk_consistent_lsn":"0/2532648",
396 2 : "metadata_bytes":[136,151,49,208,0,70,0,4,0,0,0,0,2,83,38,72,1,0,0,0,0,2,83,38,32,1,87,198,240,135,97,119,45,125,38,29,155,161,140,141,255,210,0,0,0,0,2,83,38,72,0,0,0,0,1,73,240,192,0,0,0,0,1,73,240,192,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
397 2 : }"#;
398 2 :
399 2 : let expected = IndexPart {
400 2 : version: 1,
401 2 : layer_metadata: HashMap::new(),
402 2 : disk_consistent_lsn: "0/2532648".parse::<Lsn>().unwrap(),
403 2 : metadata: TimelineMetadata::from_bytes(&[
404 2 : 136, 151, 49, 208, 0, 70, 0, 4, 0, 0, 0, 0, 2, 83, 38, 72, 1, 0, 0, 0, 0, 2, 83,
405 2 : 38, 32, 1, 87, 198, 240, 135, 97, 119, 45, 125, 38, 29, 155, 161, 140, 141, 255,
406 2 : 210, 0, 0, 0, 0, 2, 83, 38, 72, 0, 0, 0, 0, 1, 73, 240, 192, 0, 0, 0, 0, 1, 73,
407 2 : 240, 192, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
408 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
409 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
410 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
411 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
412 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
413 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
414 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
415 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
416 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
417 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
418 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
419 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
420 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
421 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
422 2 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
423 2 : 0, 0,
424 2 : ])
425 2 : .unwrap(),
426 2 : deleted_at: None,
427 2 : archived_at: None,
428 2 : lineage: Lineage::default(),
429 2 : last_aux_file_policy: None,
430 2 : };
431 2 :
432 2 : let empty_layers_parsed = IndexPart::from_s3_bytes(empty_layers_json.as_bytes()).unwrap();
433 2 :
434 2 : assert_eq!(empty_layers_parsed, expected);
435 2 : }
436 :
437 : #[test]
438 2 : fn v4_indexpart_is_parsed() {
439 2 : let example = r#"{
440 2 : "version":4,
441 2 : "layer_metadata":{
442 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
443 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
444 2 : },
445 2 : "disk_consistent_lsn":"0/16960E8",
446 2 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
447 2 : "deleted_at": "2023-07-31T09:00:00.123"
448 2 : }"#;
449 2 :
450 2 : let expected = IndexPart {
451 2 : version: 4,
452 2 : layer_metadata: HashMap::from([
453 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
454 2 : file_size: 25600000,
455 2 : generation: Generation::none(),
456 2 : shard: ShardIndex::unsharded()
457 2 : }),
458 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
459 2 : // serde_json should always parse this but this might be a double with jq for
460 2 : // example.
461 2 : file_size: 9007199254741001,
462 2 : generation: Generation::none(),
463 2 : shard: ShardIndex::unsharded()
464 2 : })
465 2 : ]),
466 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
467 2 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
468 2 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
469 2 : archived_at: None,
470 2 : lineage: Lineage::default(),
471 2 : last_aux_file_policy: None,
472 2 : };
473 2 :
474 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
475 2 : assert_eq!(part, expected);
476 2 : }
477 :
478 : #[test]
479 2 : fn v5_indexpart_is_parsed() {
480 2 : let example = r#"{
481 2 : "version":5,
482 2 : "layer_metadata":{
483 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499":{"file_size":23289856,"generation":1},
484 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619":{"file_size":1015808,"generation":1}},
485 2 : "disk_consistent_lsn":"0/15A7618",
486 2 : "metadata_bytes":[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
487 2 : "lineage":{
488 2 : "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
489 2 : "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
490 2 : }
491 2 : }"#;
492 2 :
493 2 : let expected = IndexPart {
494 2 : version: 5,
495 2 : layer_metadata: HashMap::from([
496 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499".parse().unwrap(), LayerFileMetadata {
497 2 : file_size: 23289856,
498 2 : generation: Generation::new(1),
499 2 : shard: ShardIndex::unsharded(),
500 2 : }),
501 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619".parse().unwrap(), LayerFileMetadata {
502 2 : file_size: 1015808,
503 2 : generation: Generation::new(1),
504 2 : shard: ShardIndex::unsharded(),
505 2 : })
506 2 : ]),
507 2 : disk_consistent_lsn: Lsn::from_str("0/15A7618").unwrap(),
508 2 : metadata: TimelineMetadata::from_bytes(&[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
509 2 : deleted_at: None,
510 2 : archived_at: None,
511 2 : lineage: Lineage {
512 2 : reparenting_history_truncated: false,
513 2 : reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
514 2 : original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
515 2 : },
516 2 : last_aux_file_policy: None,
517 2 : };
518 2 :
519 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
520 2 : assert_eq!(part, expected);
521 2 : }
522 :
523 : #[test]
524 2 : fn v6_indexpart_is_parsed() {
525 2 : let example = r#"{
526 2 : "version":6,
527 2 : "layer_metadata":{
528 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
529 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
530 2 : },
531 2 : "disk_consistent_lsn":"0/16960E8",
532 2 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
533 2 : "deleted_at": "2023-07-31T09:00:00.123",
534 2 : "lineage":{
535 2 : "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
536 2 : "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
537 2 : },
538 2 : "last_aux_file_policy": "V2"
539 2 : }"#;
540 2 :
541 2 : let expected = IndexPart {
542 2 : version: 6,
543 2 : layer_metadata: HashMap::from([
544 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
545 2 : file_size: 25600000,
546 2 : generation: Generation::none(),
547 2 : shard: ShardIndex::unsharded()
548 2 : }),
549 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
550 2 : // serde_json should always parse this but this might be a double with jq for
551 2 : // example.
552 2 : file_size: 9007199254741001,
553 2 : generation: Generation::none(),
554 2 : shard: ShardIndex::unsharded()
555 2 : })
556 2 : ]),
557 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
558 2 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
559 2 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
560 2 : archived_at: None,
561 2 : lineage: Lineage {
562 2 : reparenting_history_truncated: false,
563 2 : reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
564 2 : original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
565 2 : },
566 2 : last_aux_file_policy: Some(AuxFilePolicy::V2),
567 2 : };
568 2 :
569 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
570 2 : assert_eq!(part, expected);
571 2 : }
572 :
573 : #[test]
574 2 : fn v7_indexpart_is_parsed() {
575 2 : let example = r#"{
576 2 : "version": 7,
577 2 : "layer_metadata":{
578 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
579 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
580 2 : },
581 2 : "disk_consistent_lsn":"0/16960E8",
582 2 : "metadata": {
583 2 : "disk_consistent_lsn": "0/16960E8",
584 2 : "prev_record_lsn": "0/1696070",
585 2 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
586 2 : "ancestor_lsn": "0/0",
587 2 : "latest_gc_cutoff_lsn": "0/1696070",
588 2 : "initdb_lsn": "0/1696070",
589 2 : "pg_version": 14
590 2 : },
591 2 : "deleted_at": "2023-07-31T09:00:00.123"
592 2 : }"#;
593 2 :
594 2 : let expected = IndexPart {
595 2 : version: 7,
596 2 : layer_metadata: HashMap::from([
597 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
598 2 : file_size: 25600000,
599 2 : generation: Generation::none(),
600 2 : shard: ShardIndex::unsharded()
601 2 : }),
602 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
603 2 : file_size: 9007199254741001,
604 2 : generation: Generation::none(),
605 2 : shard: ShardIndex::unsharded()
606 2 : })
607 2 : ]),
608 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
609 2 : metadata: TimelineMetadata::new(
610 2 : Lsn::from_str("0/16960E8").unwrap(),
611 2 : Some(Lsn::from_str("0/1696070").unwrap()),
612 2 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
613 2 : Lsn::INVALID,
614 2 : Lsn::from_str("0/1696070").unwrap(),
615 2 : Lsn::from_str("0/1696070").unwrap(),
616 2 : 14,
617 2 : ).with_recalculated_checksum().unwrap(),
618 2 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
619 2 : archived_at: None,
620 2 : lineage: Default::default(),
621 2 : last_aux_file_policy: Default::default(),
622 2 : };
623 2 :
624 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
625 2 : assert_eq!(part, expected);
626 2 : }
627 :
628 : #[test]
629 2 : fn v8_indexpart_is_parsed() {
630 2 : let example = r#"{
631 2 : "version": 8,
632 2 : "layer_metadata":{
633 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
634 2 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
635 2 : },
636 2 : "disk_consistent_lsn":"0/16960E8",
637 2 : "metadata": {
638 2 : "disk_consistent_lsn": "0/16960E8",
639 2 : "prev_record_lsn": "0/1696070",
640 2 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
641 2 : "ancestor_lsn": "0/0",
642 2 : "latest_gc_cutoff_lsn": "0/1696070",
643 2 : "initdb_lsn": "0/1696070",
644 2 : "pg_version": 14
645 2 : },
646 2 : "deleted_at": "2023-07-31T09:00:00.123",
647 2 : "archived_at": "2023-04-29T09:00:00.123"
648 2 : }"#;
649 2 :
650 2 : let expected = IndexPart {
651 2 : version: 8,
652 2 : layer_metadata: HashMap::from([
653 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
654 2 : file_size: 25600000,
655 2 : generation: Generation::none(),
656 2 : shard: ShardIndex::unsharded()
657 2 : }),
658 2 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
659 2 : file_size: 9007199254741001,
660 2 : generation: Generation::none(),
661 2 : shard: ShardIndex::unsharded()
662 2 : })
663 2 : ]),
664 2 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
665 2 : metadata: TimelineMetadata::new(
666 2 : Lsn::from_str("0/16960E8").unwrap(),
667 2 : Some(Lsn::from_str("0/1696070").unwrap()),
668 2 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
669 2 : Lsn::INVALID,
670 2 : Lsn::from_str("0/1696070").unwrap(),
671 2 : Lsn::from_str("0/1696070").unwrap(),
672 2 : 14,
673 2 : ).with_recalculated_checksum().unwrap(),
674 2 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
675 2 : archived_at: Some(parse_naive_datetime("2023-04-29T09:00:00.123000000")),
676 2 : lineage: Default::default(),
677 2 : last_aux_file_policy: Default::default(),
678 2 : };
679 2 :
680 2 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
681 2 : assert_eq!(part, expected);
682 2 : }
683 :
684 16 : fn parse_naive_datetime(s: &str) -> NaiveDateTime {
685 16 : chrono::NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%f").unwrap()
686 16 : }
687 : }
|