Skip to content

Commit fcf1c87

Browse files
committed
catalog-protos: check hashes of object definitions files
This commit changes the existing hash checking logic meant to ensure that the versioned proto files never change to check the new Rust source files instead.
1 parent d5db917 commit fcf1c87

File tree

2 files changed

+82
-29
lines changed

2 files changed

+82
-29
lines changed

src/catalog-protos/build.rs

Lines changed: 28 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -9,47 +9,46 @@
99

1010
use std::collections::BTreeMap;
1111
use std::fs;
12-
use std::io::{BufReader, Write};
12+
use std::io::Write;
13+
use std::path::PathBuf;
1314

1415
use anyhow::Context;
1516
use md5::{Digest, Md5};
1617
use serde::{Deserialize, Serialize};
1718

18-
/// The path of a protobuf file and its [`md5`] hash.
19+
/// The path of an object definition file and its [`md5`] hash.
1920
///
2021
/// We store a hash of all the files to make sure they don't accidentally change, which would
2122
/// invalidate our snapshotted types, and could silently introduce bugs.
2223
#[derive(Debug, Clone, Deserialize, Serialize)]
23-
struct ProtoHash {
24+
struct ObjectsHash {
2425
name: String,
2526
md5: String,
2627
}
2728

28-
const PROTO_DIRECTORY: &str = "protos";
29-
const PROTO_HASHES: &str = "protos/hashes.json";
29+
const OBJECTS_HASHES: &str = "objects_hashes.json";
3030

3131
fn main() -> anyhow::Result<()> {
32-
println!("cargo:rerun-if-changed={PROTO_DIRECTORY}");
32+
let crate_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
3333

3434
// Read in the persisted hashes from disk.
35-
let hashes = fs::File::open(PROTO_HASHES).context("opening proto hashes")?;
36-
let reader = BufReader::new(&hashes);
37-
let hashes: Vec<ProtoHash> = serde_json::from_reader(reader)?;
35+
let hashes_path = crate_root.join(OBJECTS_HASHES);
36+
let hashes_json = fs::read_to_string(&hashes_path)?;
37+
let hashes: Vec<ObjectsHash> = serde_json::from_str(&hashes_json)?;
3838
let mut persisted: BTreeMap<String, String> =
3939
hashes.into_iter().map(|e| (e.name, e.md5)).collect();
4040

41-
// Discover all of the protobuf files on disk.
42-
let protos: BTreeMap<String, String> = fs::read_dir(PROTO_DIRECTORY)?
41+
// Discover all of the object definition files on disk.
42+
let src_dir = crate_root.join("src");
43+
let objects: BTreeMap<String, String> = fs::read_dir(src_dir)?
4344
// If we fail to read one file, fail everything.
4445
.collect::<Result<Vec<_>, _>>()?
4546
.into_iter()
46-
// Filter to only files with the .proto extension.
47+
// Filter to only files with the of the form `objects*.rs`.
4748
.filter(|entry| {
48-
entry
49-
.path()
50-
.extension()
51-
.map(|e| e.to_string_lossy().contains("proto"))
52-
.unwrap_or(false)
49+
let name = entry.file_name();
50+
let s = name.to_string_lossy();
51+
s.starts_with("objects") && s.ends_with(".rs")
5352
})
5453
.map(|file| {
5554
let path = file.path();
@@ -71,34 +70,34 @@ fn main() -> anyhow::Result<()> {
7170
})
7271
.collect();
7372

74-
// After validating our hashes we'll re-write the file if any new protos
73+
// After validating our hashes we'll re-write the file if any new object definitions
7574
// have been added.
76-
let mut to_persist: Vec<ProtoHash> = Vec::new();
75+
let mut to_persist: Vec<ObjectsHash> = Vec::new();
7776
let mut any_new = false;
7877

7978
// Check the persisted hashes against what we just read in from disk.
80-
for (name, hash) in protos {
79+
for (name, hash) in objects {
8180
match persisted.remove(&name) {
8281
// Hashes have changed!
8382
Some(og_hash) if hash != og_hash => {
8483
anyhow::bail!(error_message(og_hash, hash, name));
8584
}
86-
// Found a proto file on disk that we didn't have persisted, we'll just persist it.
85+
// Found an objects file on disk that we didn't have persisted, we'll just persist it.
8786
None => {
88-
to_persist.push(ProtoHash { name, md5: hash });
87+
to_persist.push(ObjectsHash { name, md5: hash });
8988
any_new = true;
9089
}
9190
// We match!
92-
Some(_) => to_persist.push(ProtoHash { name, md5: hash }),
91+
Some(_) => to_persist.push(ObjectsHash { name, md5: hash }),
9392
}
9493
}
9594

96-
// Check if there are any proto files we should have had hashes for, but didn't exist.
95+
// Check if there are any objects files we should have had hashes for, but didn't exist.
9796
if !persisted.is_empty() {
9897
anyhow::bail!("Have persisted hashes, but no files on disk? {persisted:#?}");
9998
}
10099

101-
// Write the hashes back out to disk if and only if there are new protos. We
100+
// Write the hashes back out to disk if and only if there are new object definitions. We
102101
// don't do this unconditionally or we'll get stuck in a rebuild loop:
103102
// executing this build script will change the mtime on the hashes file,
104103
// which will force the next compile to rebuild the crate, even if nothing
@@ -107,7 +106,7 @@ fn main() -> anyhow::Result<()> {
107106
let mut file = fs::File::options()
108107
.write(true)
109108
.truncate(true)
110-
.open(PROTO_HASHES)
109+
.open(hashes_path)
111110
.context("opening hashes file to write")?;
112111
serde_json::to_writer_pretty(&mut file, &to_persist).context("persisting hashes")?;
113112
write!(&mut file, "\n").context("writing newline")?;
@@ -198,12 +197,12 @@ fn main() -> anyhow::Result<()> {
198197

199198
/// A (hopefully) helpful error message that describes what to do when the hashes differ.
200199
fn error_message(og_hash: String, hash: String, filename: String) -> String {
201-
let title = "Hashes changed for the persisted protobuf files!";
200+
let title = "Hashes changed for the persisted object definition files!";
202201
let body1 = format!(
203-
"If you changed '{filename}' without first making a snapshot, then you need to copy '{filename}' and rename it with a suffix like '_vX.proto'."
202+
"If you changed '{filename}' without first making a snapshot, then you need to copy '{filename}' and rename it with a suffix like '_vX.rs'."
204203
);
205204
let body2 = format!(
206-
"Otherwise you can update the hash for '{filename}' in '{PROTO_HASHES}' to be '{hash}'."
205+
"Otherwise you can update the hash for '{filename}' in '{OBJECTS_HASHES}' to be '{hash}'."
207206
);
208207
let hashes = format!("persisted_hash({og_hash}) != current_hash({hash})\nFile: {filename}");
209208

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
[
2+
{
3+
"name": "objects.rs",
4+
"md5": "26ae04e683a5462c3998d8f9d8d6a0fa"
5+
},
6+
{
7+
"name": "objects_v67.rs",
8+
"md5": "6f9b22cb3147dd2a685fe27b4cd016ee"
9+
},
10+
{
11+
"name": "objects_v68.rs",
12+
"md5": "d218ef6aabce8dc418ffde99d27dec03"
13+
},
14+
{
15+
"name": "objects_v69.rs",
16+
"md5": "55cb125ee8c1348acc1e129f41903931"
17+
},
18+
{
19+
"name": "objects_v70.rs",
20+
"md5": "767b231f11679065126c8d83ae6a763b"
21+
},
22+
{
23+
"name": "objects_v71.rs",
24+
"md5": "767b231f11679065126c8d83ae6a763b"
25+
},
26+
{
27+
"name": "objects_v72.rs",
28+
"md5": "767b231f11679065126c8d83ae6a763b"
29+
},
30+
{
31+
"name": "objects_v73.rs",
32+
"md5": "6e2c445773820d0579ff3d66f1fac7e8"
33+
},
34+
{
35+
"name": "objects_v74.rs",
36+
"md5": "b87d99e30098642f98b0bb1927516baa"
37+
},
38+
{
39+
"name": "objects_v75.rs",
40+
"md5": "67a4a7da3e3f7cb1fd01963d5b118b01"
41+
},
42+
{
43+
"name": "objects_v76.rs",
44+
"md5": "26ae04e683a5462c3998d8f9d8d6a0fa"
45+
},
46+
{
47+
"name": "objects_v77.rs",
48+
"md5": "26ae04e683a5462c3998d8f9d8d6a0fa"
49+
},
50+
{
51+
"name": "objects_v78.rs",
52+
"md5": "26ae04e683a5462c3998d8f9d8d6a0fa"
53+
}
54+
]

0 commit comments

Comments
 (0)