Sanitize album titles correctly
This commit is contained in:
parent
aa145f9a6d
commit
614bdc49b5
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -97,6 +97,7 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"mime",
|
"mime",
|
||||||
"protobuf",
|
"protobuf",
|
||||||
|
"sanitise-file-name",
|
||||||
"ureq",
|
"ureq",
|
||||||
"webp",
|
"webp",
|
||||||
]
|
]
|
||||||
|
@ -604,6 +605,12 @@ dependencies = [
|
||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sanitise-file-name"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "19d36299972b96b8ae7e8f04ecbf75fb41a27bf3781af00abcf57609774cb911"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scoped_threadpool"
|
name = "scoped_threadpool"
|
||||||
version = "0.1.9"
|
version = "0.1.9"
|
||||||
|
|
|
@ -14,6 +14,7 @@ blurhash = "0.1.1"
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
protobuf = "3.1.0"
|
protobuf = "3.1.0"
|
||||||
|
sanitise-file-name = "1.0.0"
|
||||||
|
|
||||||
[dependencies.iota-crypto]
|
[dependencies.iota-crypto]
|
||||||
version = "0.13.0"
|
version = "0.13.0"
|
||||||
|
|
26
src/main.rs
26
src/main.rs
|
@ -14,6 +14,7 @@ use errors::AviaryError;
|
||||||
use itertools::{Itertools, Either};
|
use itertools::{Itertools, Either};
|
||||||
use parse::{CreateArgs, Command, DownloadArgs};
|
use parse::{CreateArgs, Command, DownloadArgs};
|
||||||
use ::protobuf::Message;
|
use ::protobuf::Message;
|
||||||
|
use sanitise_file_name::sanitise_with_options;
|
||||||
|
|
||||||
fn trim_url<'a>(base_url: &str, url: &'a str) -> Option<&'a str> {
|
fn trim_url<'a>(base_url: &str, url: &'a str) -> Option<&'a str> {
|
||||||
if url.starts_with(base_url) {
|
if url.starts_with(base_url) {
|
||||||
|
@ -156,6 +157,23 @@ fn create(server: &str, args: CreateArgs) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const SANITIZATION_OPTIONS: sanitise_file_name::Options<fn(char) -> Option<char>> = sanitise_file_name::Options {
|
||||||
|
length_limit: 127,
|
||||||
|
reserve_extra: 0,
|
||||||
|
extension_cleverness: false,
|
||||||
|
most_fs_safe: true,
|
||||||
|
windows_safe: true,
|
||||||
|
url_safe: false,
|
||||||
|
normalise_whitespace: false,
|
||||||
|
trim_spaces_and_full_stops: true,
|
||||||
|
trim_more_punctuation: false,
|
||||||
|
remove_control_characters: true,
|
||||||
|
remove_reordering_characters: false,
|
||||||
|
replace_with: |_| Some('_'),
|
||||||
|
collapse_replacements: true,
|
||||||
|
six_measures_of_barley: "Unnamed-Album"
|
||||||
|
};
|
||||||
|
|
||||||
fn download(server: &str, args: DownloadArgs) {
|
fn download(server: &str, args: DownloadArgs) {
|
||||||
let mut download_buffer = Vec::with_capacity(5_000_000);
|
let mut download_buffer = Vec::with_capacity(5_000_000);
|
||||||
let mut decrypt_buffer = Vec::with_capacity(5_000_000);
|
let mut decrypt_buffer = Vec::with_capacity(5_000_000);
|
||||||
|
@ -173,10 +191,10 @@ fn download(server: &str, args: DownloadArgs) {
|
||||||
.expect("malformed index");
|
.expect("malformed index");
|
||||||
|
|
||||||
let dest_dir: Cow<Path> = args.output.map(Cow::Owned)
|
let dest_dir: Cow<Path> = args.output.map(Cow::Owned)
|
||||||
.unwrap_or_else(|| Cow::Borrowed(
|
.unwrap_or_else(||
|
||||||
index.title.as_ref()
|
index.title.map(|title|
|
||||||
.map(String::as_str)
|
Cow::Owned(sanitise_with_options(&title, &SANITIZATION_OPTIONS).into()))
|
||||||
.unwrap_or("Unnamed-Album").as_ref()));
|
.unwrap_or(Cow::Borrowed("Unnamed-Album".as_ref())));
|
||||||
fs::create_dir_all(&dest_dir).expect("Failed to create destination directory");
|
fs::create_dir_all(&dest_dir).expect("Failed to create destination directory");
|
||||||
for (indx, image) in index.images.into_iter().enumerate() {
|
for (indx, image) in index.images.into_iter().enumerate() {
|
||||||
let path = dest_dir.join(format!("{indx:03}.webp"));
|
let path = dest_dir.join(format!("{indx:03}.webp"));
|
||||||
|
|
Loading…
Reference in a new issue