forked from sqlpage/SQLPage
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbuild.rs
96 lines (91 loc) · 3.26 KB
/
build.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
use actix_rt::spawn;
use futures_util::StreamExt;
use libflate::gzip;
use std::collections::hash_map::DefaultHasher;
use std::fs::File;
use std::hash::Hasher;
use std::io::Read;
use std::io::{BufRead, BufReader, Write};
use std::path::{Path, PathBuf};
#[actix_rt::main]
async fn main() {
println!("cargo:rerun-if-changed=build.rs");
for h in [
spawn(download_deps("sqlpage.js")),
spawn(download_deps("sqlpage.css")),
spawn(download_deps("tabler-icons.svg")),
spawn(download_deps("apexcharts.js")),
] {
h.await.unwrap();
}
}
/// Creates a file with inlined remote files included
async fn download_deps(filename: &str) {
let path_in = format!("sqlpage/{}", filename);
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
let path_out: PathBuf = out_dir.join(filename);
// Generate outfile by reading infile and interpreting all comments
// like "/* !include https://... */" as a request to include the contents of
// the URL in the generated file.
println!("cargo:rerun-if-changed={}", path_in);
let original = File::open(path_in).unwrap();
process_input_file(&path_out, original).await;
std::fs::write(
format!("{}.filename.txt", path_out.display()),
hashed_filename(&path_out),
)
.unwrap();
}
async fn process_input_file(path_out: &Path, original: File) {
let client = awc::Client::default();
let mut outfile = gzip::Encoder::new(File::create(path_out).unwrap()).unwrap();
for l in BufReader::new(original).lines() {
let line = l.unwrap();
if line.starts_with("/* !include https://") {
let url = line
.trim_start_matches("/* !include ")
.trim_end_matches(" */");
let mut resp = client.get(url).send().await.expect(
"We need to download external frontend dependencies to build the static frontend.",
);
if resp.status() != 200 {
panic!("Received {} status code from {}", resp.status(), url);
}
while let Some(b) = resp.next().await {
let chunk = b.unwrap_or_else(|_| panic!("Failed to read data from {}", url));
outfile
.write_all(&chunk)
.expect("Failed to write external frontend dependency to local file");
}
outfile.write_all(b"\n").unwrap();
} else {
writeln!(outfile, "{}", line).unwrap();
}
}
outfile
.finish()
.as_result()
.expect("Unable to write compressed frontend asset");
}
// Given a filename, creates a new unique filename based on the file contents
fn hashed_filename(path: &Path) -> String {
let mut file = File::open(path).unwrap();
let mut buf = [0u8; 4096];
let mut hasher = DefaultHasher::new();
loop {
let bytes_read = file
.read(&mut buf)
.unwrap_or_else(|e| panic!("error reading '{}': {}", path.display(), e));
if bytes_read == 0 {
break;
}
hasher.write(&buf[..bytes_read]);
}
let hash = hasher.finish();
format!(
"{}.{:x}.{}",
path.file_stem().unwrap().to_str().unwrap(),
hash,
path.extension().unwrap().to_str().unwrap()
)
}