Line data Source code
1 : //! In-memory index to track the tenant files on the remote storage.
2 : //!
3 : //! Able to restore itself from the storage index parts, that are located in every timeline's remote directory and contain all data about
4 : //! remote timeline layers and its metadata.
5 :
6 : use std::collections::HashMap;
7 :
8 : use chrono::NaiveDateTime;
9 : use pageserver_api::models::AuxFilePolicy;
10 : use serde::{Deserialize, Serialize};
11 : use utils::id::TimelineId;
12 :
13 : use crate::tenant::metadata::TimelineMetadata;
14 : use crate::tenant::storage_layer::LayerName;
15 : use crate::tenant::Generation;
16 : use pageserver_api::shard::ShardIndex;
17 :
18 : use utils::lsn::Lsn;
19 :
20 : /// In-memory representation of an `index_part.json` file
21 : ///
22 : /// Contains the data about all files in the timeline, present remotely and its metadata.
23 : ///
24 : /// This type needs to be backwards and forwards compatible. When changing the fields,
25 : /// remember to add a test case for the changed version.
26 8640 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
27 : pub struct IndexPart {
28 : /// Debugging aid describing the version of this type.
29 : #[serde(default)]
30 : version: usize,
31 :
32 : #[serde(default)]
33 : #[serde(skip_serializing_if = "Option::is_none")]
34 : pub deleted_at: Option<NaiveDateTime>,
35 :
36 : #[serde(default)]
37 : #[serde(skip_serializing_if = "Option::is_none")]
38 : pub archived_at: Option<NaiveDateTime>,
39 :
40 : /// Per layer file name metadata, which can be present for a present or missing layer file.
41 : ///
42 : /// Older versions of `IndexPart` will not have this property or have only a part of metadata
43 : /// that latest version stores.
44 : pub layer_metadata: HashMap<LayerName, LayerFileMetadata>,
45 :
46 : /// Because of the trouble of eyeballing the legacy "metadata" field, we copied the
47 : /// "disk_consistent_lsn" out. After version 7 this is no longer needed, but the name cannot be
48 : /// reused.
49 : pub(super) disk_consistent_lsn: Lsn,
50 :
51 : // TODO: rename as "metadata" next week, keep the alias = "metadata_bytes", bump version Adding
52 : // the "alias = metadata" was forgotten in #7693, so we have to use "rewrite = metadata_bytes"
53 : // for backwards compatibility.
54 : #[serde(
55 : rename = "metadata_bytes",
56 : alias = "metadata",
57 : with = "crate::tenant::metadata::modern_serde"
58 : )]
59 : pub metadata: TimelineMetadata,
60 :
61 : #[serde(default)]
62 : pub(crate) lineage: Lineage,
63 :
64 : #[serde(skip_serializing_if = "Option::is_none", default)]
65 : pub(crate) gc_blocking: Option<GcBlocking>,
66 :
67 : /// Describes the kind of aux files stored in the timeline.
68 : ///
69 : /// The value is modified during file ingestion when the latest wanted value communicated via tenant config is applied if it is acceptable.
70 : /// A V1 setting after V2 files have been committed is not accepted.
71 : ///
72 : /// None means no aux files have been written to the storage before the point
73 : /// when this flag is introduced.
74 : #[serde(skip_serializing_if = "Option::is_none", default)]
75 : pub(crate) last_aux_file_policy: Option<AuxFilePolicy>,
76 : }
77 :
78 : impl IndexPart {
79 : /// When adding or modifying any parts of `IndexPart`, increment the version so that it can be
80 : /// used to understand later versions.
81 : ///
82 : /// Version is currently informative only.
83 : /// Version history
84 : /// - 2: added `deleted_at`
85 : /// - 3: no longer deserialize `timeline_layers` (serialized format is the same, but timeline_layers
86 : /// is always generated from the keys of `layer_metadata`)
87 : /// - 4: timeline_layers is fully removed.
88 : /// - 5: lineage was added
89 : /// - 6: last_aux_file_policy is added.
90 : /// - 7: metadata_bytes is no longer written, but still read
91 : /// - 8: added `archived_at`
92 : /// - 9: +gc_blocking
93 : const LATEST_VERSION: usize = 9;
94 :
95 : // Versions we may see when reading from a bucket.
96 : pub const KNOWN_VERSIONS: &'static [usize] = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
97 :
98 : pub const FILE_NAME: &'static str = "index_part.json";
99 :
100 1260 : pub(crate) fn empty(metadata: TimelineMetadata) -> Self {
101 1260 : IndexPart {
102 1260 : version: Self::LATEST_VERSION,
103 1260 : layer_metadata: Default::default(),
104 1260 : disk_consistent_lsn: metadata.disk_consistent_lsn(),
105 1260 : metadata,
106 1260 : deleted_at: None,
107 1260 : archived_at: None,
108 1260 : lineage: Default::default(),
109 1260 : gc_blocking: None,
110 1260 : last_aux_file_policy: None,
111 1260 : }
112 1260 : }
113 :
114 0 : pub fn version(&self) -> usize {
115 0 : self.version
116 0 : }
117 :
118 : /// If you want this under normal operations, read it from self.metadata:
119 : /// this method is just for the scrubber to use when validating an index.
120 0 : pub fn duplicated_disk_consistent_lsn(&self) -> Lsn {
121 0 : self.disk_consistent_lsn
122 0 : }
123 :
124 60 : pub fn from_s3_bytes(bytes: &[u8]) -> Result<Self, serde_json::Error> {
125 60 : serde_json::from_slice::<IndexPart>(bytes)
126 60 : }
127 :
128 4236 : pub fn to_s3_bytes(&self) -> serde_json::Result<Vec<u8>> {
129 4236 : serde_json::to_vec(self)
130 4236 : }
131 :
132 : #[cfg(test)]
133 36 : pub(crate) fn example() -> Self {
134 36 : Self::empty(TimelineMetadata::example())
135 36 : }
136 :
137 36 : pub(crate) fn last_aux_file_policy(&self) -> Option<AuxFilePolicy> {
138 36 : self.last_aux_file_policy
139 36 : }
140 : }
141 :
142 : /// Metadata gathered for each of the layer files.
143 : ///
144 : /// Fields have to be `Option`s because remote [`IndexPart`]'s can be from different version, which
145 : /// might have less or more metadata depending if upgrading or rolling back an upgrade.
146 37980 : #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
147 : pub struct LayerFileMetadata {
148 : pub file_size: u64,
149 :
150 : #[serde(default = "Generation::none")]
151 : #[serde(skip_serializing_if = "Generation::is_none")]
152 : pub generation: Generation,
153 :
154 : #[serde(default = "ShardIndex::unsharded")]
155 : #[serde(skip_serializing_if = "ShardIndex::is_unsharded")]
156 : pub shard: ShardIndex,
157 : }
158 :
159 : impl LayerFileMetadata {
160 7254 : pub fn new(file_size: u64, generation: Generation, shard: ShardIndex) -> Self {
161 7254 : LayerFileMetadata {
162 7254 : file_size,
163 7254 : generation,
164 7254 : shard,
165 7254 : }
166 7254 : }
167 : }
168 :
169 : /// Limited history of earlier ancestors.
170 : ///
171 : /// A timeline can have more than 1 earlier ancestor, in the rare case that it was repeatedly
172 : /// reparented by having an later timeline be detached from it's ancestor.
173 102 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
174 : pub(crate) struct Lineage {
175 : /// Has the `reparenting_history` been truncated to [`Lineage::REMEMBER_AT_MOST`].
176 : #[serde(skip_serializing_if = "is_false", default)]
177 : reparenting_history_truncated: bool,
178 :
179 : /// Earlier ancestors, truncated when [`Self::reparenting_history_truncated`]
180 : ///
181 : /// These are stored in case we want to support WAL based DR on the timeline. There can be many
182 : /// of these and at most one [`Self::original_ancestor`]. There cannot be more reparentings
183 : /// after [`Self::original_ancestor`] has been set.
184 : #[serde(skip_serializing_if = "Vec::is_empty", default)]
185 : reparenting_history: Vec<TimelineId>,
186 :
187 : /// The ancestor from which this timeline has been detached from and when.
188 : ///
189 : /// If you are adding support for detaching from a hierarchy, consider changing the ancestry
190 : /// into a `Vec<(TimelineId, Lsn)>` to be a path instead.
191 : // FIXME: this is insufficient even for path of two timelines for future wal recovery
192 : // purposes:
193 : //
194 : // assuming a "old main" which has received most of the WAL, and has a branch "new main",
195 : // starting a bit before "old main" last_record_lsn. the current version works fine,
196 : // because we will know to replay wal and branch at the recorded Lsn to do wal recovery.
197 : //
198 : // then assuming "new main" would similarly receive a branch right before its last_record_lsn,
199 : // "new new main". the current implementation would just store ("new main", ancestor_lsn, _)
200 : // here. however, we cannot recover from WAL using only that information, we would need the
201 : // whole ancestry here:
202 : //
203 : // ```json
204 : // [
205 : // ["old main", ancestor_lsn("new main"), _],
206 : // ["new main", ancestor_lsn("new new main"), _]
207 : // ]
208 : // ```
209 : #[serde(skip_serializing_if = "Option::is_none", default)]
210 : original_ancestor: Option<(TimelineId, Lsn, NaiveDateTime)>,
211 : }
212 :
213 17280 : fn is_false(b: &bool) -> bool {
214 17280 : !b
215 17280 : }
216 :
217 : impl Lineage {
218 : const REMEMBER_AT_MOST: usize = 100;
219 :
220 0 : pub(crate) fn record_previous_ancestor(&mut self, old_ancestor: &TimelineId) -> bool {
221 0 : if self.reparenting_history.last() == Some(old_ancestor) {
222 : // do not re-record it
223 0 : false
224 : } else {
225 : #[cfg(feature = "testing")]
226 : {
227 0 : let existing = self
228 0 : .reparenting_history
229 0 : .iter()
230 0 : .position(|x| x == old_ancestor);
231 0 : assert_eq!(
232 : existing, None,
233 0 : "we cannot reparent onto and off and onto the same timeline twice"
234 : );
235 : }
236 0 : let drop_oldest = self.reparenting_history.len() + 1 >= Self::REMEMBER_AT_MOST;
237 0 :
238 0 : self.reparenting_history_truncated |= drop_oldest;
239 0 : if drop_oldest {
240 0 : self.reparenting_history.remove(0);
241 0 : }
242 0 : self.reparenting_history.push(*old_ancestor);
243 0 : true
244 : }
245 0 : }
246 :
247 : /// Returns true if anything changed.
248 0 : pub(crate) fn record_detaching(&mut self, branchpoint: &(TimelineId, Lsn)) -> bool {
249 0 : if let Some((id, lsn, _)) = self.original_ancestor {
250 0 : assert_eq!(
251 0 : &(id, lsn),
252 : branchpoint,
253 0 : "detaching attempt has to be for the same ancestor we are already detached from"
254 : );
255 0 : false
256 : } else {
257 0 : self.original_ancestor =
258 0 : Some((branchpoint.0, branchpoint.1, chrono::Utc::now().naive_utc()));
259 0 : true
260 : }
261 0 : }
262 :
263 : /// The queried lsn is most likely the basebackup lsn, and this answers question "is it allowed
264 : /// to start a read/write primary at this lsn".
265 : ///
266 : /// Returns true if the Lsn was previously our branch point.
267 0 : pub(crate) fn is_previous_ancestor_lsn(&self, lsn: Lsn) -> bool {
268 0 : self.original_ancestor
269 0 : .is_some_and(|(_, ancestor_lsn, _)| ancestor_lsn == lsn)
270 0 : }
271 :
272 : /// Returns true if the timeline originally had an ancestor, and no longer has one.
273 0 : pub(crate) fn is_detached_from_ancestor(&self) -> bool {
274 0 : self.original_ancestor.is_some()
275 0 : }
276 :
277 : /// Returns original ancestor timeline id and lsn that this timeline has been detached from.
278 0 : pub(crate) fn detached_previous_ancestor(&self) -> Option<(TimelineId, Lsn)> {
279 0 : self.original_ancestor.map(|(id, lsn, _)| (id, lsn))
280 0 : }
281 :
282 0 : pub(crate) fn is_reparented(&self) -> bool {
283 0 : !self.reparenting_history.is_empty()
284 0 : }
285 : }
286 :
287 18 : #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
288 : pub(crate) struct GcBlocking {
289 : pub(crate) started_at: NaiveDateTime,
290 : pub(crate) reasons: enumset::EnumSet<GcBlockingReason>,
291 : }
292 :
293 12 : #[derive(Debug, enumset::EnumSetType, serde::Serialize, serde::Deserialize)]
294 : #[enumset(serialize_repr = "list")]
295 : pub(crate) enum GcBlockingReason {
296 : Manual,
297 : DetachAncestor,
298 : }
299 :
300 : impl GcBlocking {
301 0 : pub(super) fn started_now_for(reason: GcBlockingReason) -> Self {
302 0 : GcBlocking {
303 0 : started_at: chrono::Utc::now().naive_utc(),
304 0 : reasons: enumset::EnumSet::only(reason),
305 0 : }
306 0 : }
307 :
308 : /// Returns true if the given reason is one of the reasons why the gc is blocked.
309 0 : pub(crate) fn blocked_by(&self, reason: GcBlockingReason) -> bool {
310 0 : self.reasons.contains(reason)
311 0 : }
312 :
313 : /// Returns a version of self with the given reason.
314 0 : pub(super) fn with_reason(&self, reason: GcBlockingReason) -> Self {
315 0 : assert!(!self.blocked_by(reason));
316 0 : let mut reasons = self.reasons;
317 0 : reasons.insert(reason);
318 0 :
319 0 : Self {
320 0 : started_at: self.started_at,
321 0 : reasons,
322 0 : }
323 0 : }
324 :
325 : /// Returns a version of self without the given reason. Assumption is that if
326 : /// there are no more reasons, we can unblock the gc by returning `None`.
327 0 : pub(super) fn without_reason(&self, reason: GcBlockingReason) -> Option<Self> {
328 0 : assert!(self.blocked_by(reason));
329 :
330 0 : if self.reasons.len() == 1 {
331 0 : None
332 : } else {
333 0 : let mut reasons = self.reasons;
334 0 : assert!(reasons.remove(reason));
335 0 : assert!(!reasons.is_empty());
336 :
337 0 : Some(Self {
338 0 : started_at: self.started_at,
339 0 : reasons,
340 0 : })
341 : }
342 0 : }
343 : }
344 :
345 : #[cfg(test)]
346 : mod tests {
347 : use super::*;
348 : use std::str::FromStr;
349 : use utils::id::TimelineId;
350 :
351 : #[test]
352 6 : fn v1_indexpart_is_parsed() {
353 6 : let example = r#"{
354 6 : "version":1,
355 6 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
356 6 : "layer_metadata":{
357 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
358 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
359 6 : },
360 6 : "disk_consistent_lsn":"0/16960E8",
361 6 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
362 6 : }"#;
363 6 :
364 6 : let expected = IndexPart {
365 6 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
366 6 : version: 1,
367 6 : layer_metadata: HashMap::from([
368 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
369 6 : file_size: 25600000,
370 6 : generation: Generation::none(),
371 6 : shard: ShardIndex::unsharded()
372 6 : }),
373 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
374 6 : // serde_json should always parse this but this might be a double with jq for
375 6 : // example.
376 6 : file_size: 9007199254741001,
377 6 : generation: Generation::none(),
378 6 : shard: ShardIndex::unsharded()
379 6 : })
380 6 : ]),
381 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
382 6 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
383 6 : deleted_at: None,
384 6 : archived_at: None,
385 6 : lineage: Lineage::default(),
386 6 : gc_blocking: None,
387 6 : last_aux_file_policy: None,
388 6 : };
389 6 :
390 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
391 6 : assert_eq!(part, expected);
392 6 : }
393 :
394 : #[test]
395 6 : fn v1_indexpart_is_parsed_with_optional_missing_layers() {
396 6 : let example = r#"{
397 6 : "version":1,
398 6 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
399 6 : "missing_layers":["This shouldn't fail deserialization"],
400 6 : "layer_metadata":{
401 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
402 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
403 6 : },
404 6 : "disk_consistent_lsn":"0/16960E8",
405 6 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
406 6 : }"#;
407 6 :
408 6 : let expected = IndexPart {
409 6 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
410 6 : version: 1,
411 6 : layer_metadata: HashMap::from([
412 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
413 6 : file_size: 25600000,
414 6 : generation: Generation::none(),
415 6 : shard: ShardIndex::unsharded()
416 6 : }),
417 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
418 6 : // serde_json should always parse this but this might be a double with jq for
419 6 : // example.
420 6 : file_size: 9007199254741001,
421 6 : generation: Generation::none(),
422 6 : shard: ShardIndex::unsharded()
423 6 : })
424 6 : ]),
425 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
426 6 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
427 6 : deleted_at: None,
428 6 : archived_at: None,
429 6 : lineage: Lineage::default(),
430 6 : gc_blocking: None,
431 6 : last_aux_file_policy: None,
432 6 : };
433 6 :
434 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
435 6 : assert_eq!(part, expected);
436 6 : }
437 :
438 : #[test]
439 6 : fn v2_indexpart_is_parsed_with_deleted_at() {
440 6 : let example = r#"{
441 6 : "version":2,
442 6 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
443 6 : "missing_layers":["This shouldn't fail deserialization"],
444 6 : "layer_metadata":{
445 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
446 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
447 6 : },
448 6 : "disk_consistent_lsn":"0/16960E8",
449 6 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
450 6 : "deleted_at": "2023-07-31T09:00:00.123"
451 6 : }"#;
452 6 :
453 6 : let expected = IndexPart {
454 6 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
455 6 : version: 2,
456 6 : layer_metadata: HashMap::from([
457 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
458 6 : file_size: 25600000,
459 6 : generation: Generation::none(),
460 6 : shard: ShardIndex::unsharded()
461 6 : }),
462 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
463 6 : // serde_json should always parse this but this might be a double with jq for
464 6 : // example.
465 6 : file_size: 9007199254741001,
466 6 : generation: Generation::none(),
467 6 : shard: ShardIndex::unsharded()
468 6 : })
469 6 : ]),
470 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
471 6 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
472 6 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
473 6 : archived_at: None,
474 6 : lineage: Lineage::default(),
475 6 : gc_blocking: None,
476 6 : last_aux_file_policy: None,
477 6 : };
478 6 :
479 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
480 6 : assert_eq!(part, expected);
481 6 : }
482 :
483 : #[test]
484 6 : fn empty_layers_are_parsed() {
485 6 : let empty_layers_json = r#"{
486 6 : "version":1,
487 6 : "timeline_layers":[],
488 6 : "layer_metadata":{},
489 6 : "disk_consistent_lsn":"0/2532648",
490 6 : "metadata_bytes":[136,151,49,208,0,70,0,4,0,0,0,0,2,83,38,72,1,0,0,0,0,2,83,38,32,1,87,198,240,135,97,119,45,125,38,29,155,161,140,141,255,210,0,0,0,0,2,83,38,72,0,0,0,0,1,73,240,192,0,0,0,0,1,73,240,192,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
491 6 : }"#;
492 6 :
493 6 : let expected = IndexPart {
494 6 : version: 1,
495 6 : layer_metadata: HashMap::new(),
496 6 : disk_consistent_lsn: "0/2532648".parse::<Lsn>().unwrap(),
497 6 : metadata: TimelineMetadata::from_bytes(&[
498 6 : 136, 151, 49, 208, 0, 70, 0, 4, 0, 0, 0, 0, 2, 83, 38, 72, 1, 0, 0, 0, 0, 2, 83,
499 6 : 38, 32, 1, 87, 198, 240, 135, 97, 119, 45, 125, 38, 29, 155, 161, 140, 141, 255,
500 6 : 210, 0, 0, 0, 0, 2, 83, 38, 72, 0, 0, 0, 0, 1, 73, 240, 192, 0, 0, 0, 0, 1, 73,
501 6 : 240, 192, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
502 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
503 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
504 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
505 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
506 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
507 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
508 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
509 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
510 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
511 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
512 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
513 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
514 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
515 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
516 6 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
517 6 : 0, 0,
518 6 : ])
519 6 : .unwrap(),
520 6 : deleted_at: None,
521 6 : archived_at: None,
522 6 : lineage: Lineage::default(),
523 6 : gc_blocking: None,
524 6 : last_aux_file_policy: None,
525 6 : };
526 6 :
527 6 : let empty_layers_parsed = IndexPart::from_s3_bytes(empty_layers_json.as_bytes()).unwrap();
528 6 :
529 6 : assert_eq!(empty_layers_parsed, expected);
530 6 : }
531 :
532 : #[test]
533 6 : fn v4_indexpart_is_parsed() {
534 6 : let example = r#"{
535 6 : "version":4,
536 6 : "layer_metadata":{
537 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
538 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
539 6 : },
540 6 : "disk_consistent_lsn":"0/16960E8",
541 6 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
542 6 : "deleted_at": "2023-07-31T09:00:00.123"
543 6 : }"#;
544 6 :
545 6 : let expected = IndexPart {
546 6 : version: 4,
547 6 : layer_metadata: HashMap::from([
548 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
549 6 : file_size: 25600000,
550 6 : generation: Generation::none(),
551 6 : shard: ShardIndex::unsharded()
552 6 : }),
553 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
554 6 : // serde_json should always parse this but this might be a double with jq for
555 6 : // example.
556 6 : file_size: 9007199254741001,
557 6 : generation: Generation::none(),
558 6 : shard: ShardIndex::unsharded()
559 6 : })
560 6 : ]),
561 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
562 6 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
563 6 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
564 6 : archived_at: None,
565 6 : lineage: Lineage::default(),
566 6 : gc_blocking: None,
567 6 : last_aux_file_policy: None,
568 6 : };
569 6 :
570 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
571 6 : assert_eq!(part, expected);
572 6 : }
573 :
574 : #[test]
575 6 : fn v5_indexpart_is_parsed() {
576 6 : let example = r#"{
577 6 : "version":5,
578 6 : "layer_metadata":{
579 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499":{"file_size":23289856,"generation":1},
580 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619":{"file_size":1015808,"generation":1}},
581 6 : "disk_consistent_lsn":"0/15A7618",
582 6 : "metadata_bytes":[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
583 6 : "lineage":{
584 6 : "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
585 6 : "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
586 6 : }
587 6 : }"#;
588 6 :
589 6 : let expected = IndexPart {
590 6 : version: 5,
591 6 : layer_metadata: HashMap::from([
592 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499".parse().unwrap(), LayerFileMetadata {
593 6 : file_size: 23289856,
594 6 : generation: Generation::new(1),
595 6 : shard: ShardIndex::unsharded(),
596 6 : }),
597 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619".parse().unwrap(), LayerFileMetadata {
598 6 : file_size: 1015808,
599 6 : generation: Generation::new(1),
600 6 : shard: ShardIndex::unsharded(),
601 6 : })
602 6 : ]),
603 6 : disk_consistent_lsn: Lsn::from_str("0/15A7618").unwrap(),
604 6 : metadata: TimelineMetadata::from_bytes(&[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
605 6 : deleted_at: None,
606 6 : archived_at: None,
607 6 : lineage: Lineage {
608 6 : reparenting_history_truncated: false,
609 6 : reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
610 6 : original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
611 6 : },
612 6 : gc_blocking: None,
613 6 : last_aux_file_policy: None,
614 6 : };
615 6 :
616 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
617 6 : assert_eq!(part, expected);
618 6 : }
619 :
620 : #[test]
621 6 : fn v6_indexpart_is_parsed() {
622 6 : let example = r#"{
623 6 : "version":6,
624 6 : "layer_metadata":{
625 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
626 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
627 6 : },
628 6 : "disk_consistent_lsn":"0/16960E8",
629 6 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
630 6 : "deleted_at": "2023-07-31T09:00:00.123",
631 6 : "lineage":{
632 6 : "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
633 6 : "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
634 6 : },
635 6 : "last_aux_file_policy": "V2"
636 6 : }"#;
637 6 :
638 6 : let expected = IndexPart {
639 6 : version: 6,
640 6 : layer_metadata: HashMap::from([
641 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
642 6 : file_size: 25600000,
643 6 : generation: Generation::none(),
644 6 : shard: ShardIndex::unsharded()
645 6 : }),
646 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
647 6 : // serde_json should always parse this but this might be a double with jq for
648 6 : // example.
649 6 : file_size: 9007199254741001,
650 6 : generation: Generation::none(),
651 6 : shard: ShardIndex::unsharded()
652 6 : })
653 6 : ]),
654 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
655 6 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
656 6 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
657 6 : archived_at: None,
658 6 : lineage: Lineage {
659 6 : reparenting_history_truncated: false,
660 6 : reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
661 6 : original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
662 6 : },
663 6 : gc_blocking: None,
664 6 : last_aux_file_policy: Some(AuxFilePolicy::V2),
665 6 : };
666 6 :
667 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
668 6 : assert_eq!(part, expected);
669 6 : }
670 :
671 : #[test]
672 6 : fn v7_indexpart_is_parsed() {
673 6 : let example = r#"{
674 6 : "version": 7,
675 6 : "layer_metadata":{
676 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
677 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
678 6 : },
679 6 : "disk_consistent_lsn":"0/16960E8",
680 6 : "metadata": {
681 6 : "disk_consistent_lsn": "0/16960E8",
682 6 : "prev_record_lsn": "0/1696070",
683 6 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
684 6 : "ancestor_lsn": "0/0",
685 6 : "latest_gc_cutoff_lsn": "0/1696070",
686 6 : "initdb_lsn": "0/1696070",
687 6 : "pg_version": 14
688 6 : },
689 6 : "deleted_at": "2023-07-31T09:00:00.123"
690 6 : }"#;
691 6 :
692 6 : let expected = IndexPart {
693 6 : version: 7,
694 6 : layer_metadata: HashMap::from([
695 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
696 6 : file_size: 25600000,
697 6 : generation: Generation::none(),
698 6 : shard: ShardIndex::unsharded()
699 6 : }),
700 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
701 6 : file_size: 9007199254741001,
702 6 : generation: Generation::none(),
703 6 : shard: ShardIndex::unsharded()
704 6 : })
705 6 : ]),
706 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
707 6 : metadata: TimelineMetadata::new(
708 6 : Lsn::from_str("0/16960E8").unwrap(),
709 6 : Some(Lsn::from_str("0/1696070").unwrap()),
710 6 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
711 6 : Lsn::INVALID,
712 6 : Lsn::from_str("0/1696070").unwrap(),
713 6 : Lsn::from_str("0/1696070").unwrap(),
714 6 : 14,
715 6 : ).with_recalculated_checksum().unwrap(),
716 6 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
717 6 : archived_at: None,
718 6 : lineage: Default::default(),
719 6 : gc_blocking: None,
720 6 : last_aux_file_policy: Default::default(),
721 6 : };
722 6 :
723 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
724 6 : assert_eq!(part, expected);
725 6 : }
726 :
727 : #[test]
728 6 : fn v8_indexpart_is_parsed() {
729 6 : let example = r#"{
730 6 : "version": 8,
731 6 : "layer_metadata":{
732 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
733 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
734 6 : },
735 6 : "disk_consistent_lsn":"0/16960E8",
736 6 : "metadata": {
737 6 : "disk_consistent_lsn": "0/16960E8",
738 6 : "prev_record_lsn": "0/1696070",
739 6 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
740 6 : "ancestor_lsn": "0/0",
741 6 : "latest_gc_cutoff_lsn": "0/1696070",
742 6 : "initdb_lsn": "0/1696070",
743 6 : "pg_version": 14
744 6 : },
745 6 : "deleted_at": "2023-07-31T09:00:00.123",
746 6 : "archived_at": "2023-04-29T09:00:00.123"
747 6 : }"#;
748 6 :
749 6 : let expected = IndexPart {
750 6 : version: 8,
751 6 : layer_metadata: HashMap::from([
752 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
753 6 : file_size: 25600000,
754 6 : generation: Generation::none(),
755 6 : shard: ShardIndex::unsharded()
756 6 : }),
757 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
758 6 : file_size: 9007199254741001,
759 6 : generation: Generation::none(),
760 6 : shard: ShardIndex::unsharded()
761 6 : })
762 6 : ]),
763 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
764 6 : metadata: TimelineMetadata::new(
765 6 : Lsn::from_str("0/16960E8").unwrap(),
766 6 : Some(Lsn::from_str("0/1696070").unwrap()),
767 6 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
768 6 : Lsn::INVALID,
769 6 : Lsn::from_str("0/1696070").unwrap(),
770 6 : Lsn::from_str("0/1696070").unwrap(),
771 6 : 14,
772 6 : ).with_recalculated_checksum().unwrap(),
773 6 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
774 6 : archived_at: Some(parse_naive_datetime("2023-04-29T09:00:00.123000000")),
775 6 : lineage: Default::default(),
776 6 : gc_blocking: None,
777 6 : last_aux_file_policy: Default::default(),
778 6 : };
779 6 :
780 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
781 6 : assert_eq!(part, expected);
782 6 : }
783 :
784 : #[test]
785 6 : fn v9_indexpart_is_parsed() {
786 6 : let example = r#"{
787 6 : "version": 9,
788 6 : "layer_metadata":{
789 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
790 6 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
791 6 : },
792 6 : "disk_consistent_lsn":"0/16960E8",
793 6 : "metadata": {
794 6 : "disk_consistent_lsn": "0/16960E8",
795 6 : "prev_record_lsn": "0/1696070",
796 6 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
797 6 : "ancestor_lsn": "0/0",
798 6 : "latest_gc_cutoff_lsn": "0/1696070",
799 6 : "initdb_lsn": "0/1696070",
800 6 : "pg_version": 14
801 6 : },
802 6 : "gc_blocking": {
803 6 : "started_at": "2024-07-19T09:00:00.123",
804 6 : "reasons": ["DetachAncestor"]
805 6 : }
806 6 : }"#;
807 6 :
808 6 : let expected = IndexPart {
809 6 : version: 9,
810 6 : layer_metadata: HashMap::from([
811 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
812 6 : file_size: 25600000,
813 6 : generation: Generation::none(),
814 6 : shard: ShardIndex::unsharded()
815 6 : }),
816 6 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
817 6 : file_size: 9007199254741001,
818 6 : generation: Generation::none(),
819 6 : shard: ShardIndex::unsharded()
820 6 : })
821 6 : ]),
822 6 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
823 6 : metadata: TimelineMetadata::new(
824 6 : Lsn::from_str("0/16960E8").unwrap(),
825 6 : Some(Lsn::from_str("0/1696070").unwrap()),
826 6 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
827 6 : Lsn::INVALID,
828 6 : Lsn::from_str("0/1696070").unwrap(),
829 6 : Lsn::from_str("0/1696070").unwrap(),
830 6 : 14,
831 6 : ).with_recalculated_checksum().unwrap(),
832 6 : deleted_at: None,
833 6 : lineage: Default::default(),
834 6 : gc_blocking: Some(GcBlocking {
835 6 : started_at: parse_naive_datetime("2024-07-19T09:00:00.123000000"),
836 6 : reasons: enumset::EnumSet::from_iter([GcBlockingReason::DetachAncestor]),
837 6 : }),
838 6 : last_aux_file_policy: Default::default(),
839 6 : archived_at: None,
840 6 : };
841 6 :
842 6 : let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
843 6 : assert_eq!(part, expected);
844 6 : }
845 :
846 54 : fn parse_naive_datetime(s: &str) -> NaiveDateTime {
847 54 : chrono::NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%f").unwrap()
848 54 : }
849 : }
|