mirror of
https://github.com/mii443/usls.git
synced 2025-08-22 15:45:41 +00:00
chore(deps): Bump dependencies (#95)
* bump ureq to 3 * bump rand to 0.9 * cargo fmt --------- Co-authored-by: jamjamjon <xxyydzml@outlook.com>
This commit is contained in:
21
Cargo.toml
21
Cargo.toml
@ -20,8 +20,9 @@ ndarray = { version = "0.16.1", features = ["rayon", "serde"] }
|
|||||||
indicatif = { version = "0.17.11" }
|
indicatif = { version = "0.17.11" }
|
||||||
log = "0.4.26"
|
log = "0.4.26"
|
||||||
minifb = { version = "0.28.0" }
|
minifb = { version = "0.28.0" }
|
||||||
rand = { version = "0.8.5" }
|
rand = { version = "0.9" }
|
||||||
ureq = { version = "2", default-features = true, features = [ "socks-proxy" ] }
|
http = "1.3"
|
||||||
|
ureq = { version = "3", default-features = true, features = ["socks-proxy"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
rayon = { version = "1.10.0" }
|
rayon = { version = "1.10.0" }
|
||||||
@ -32,17 +33,17 @@ geo = "0.30.0"
|
|||||||
chrono = "0.4.40"
|
chrono = "0.4.40"
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
tempfile = "3.19.1"
|
tempfile = "3.19.1"
|
||||||
video-rs = { version = "0.10.3", features = ["ndarray"], optional = true }
|
video-rs = { version = "0.10.3", features = ["ndarray"], optional = true }
|
||||||
fast_image_resize = { version = "5.1.2", features = ["image"]}
|
fast_image_resize = { version = "5.1.2", features = ["image"] }
|
||||||
ndarray-npy = "0.9.1"
|
ndarray-npy = "0.9.1"
|
||||||
half = { version = "2.3.1" }
|
half = { version = "2.3.1" }
|
||||||
prost = "0.13.5"
|
prost = "0.13.5"
|
||||||
ort = { version = "2.0.0-rc.9", default-features = false, optional = true , features = [
|
ort = { version = "2.0.0-rc.9", default-features = false, optional = true, features = [
|
||||||
"ndarray",
|
"ndarray",
|
||||||
"copy-dylibs",
|
"copy-dylibs",
|
||||||
"half"
|
"half",
|
||||||
]}
|
] }
|
||||||
tokenizers = { version = "0.21.1" }
|
tokenizers = { version = "0.21.1" }
|
||||||
paste = "1.0.15"
|
paste = "1.0.15"
|
||||||
|
|
||||||
|
@ -197,8 +197,9 @@ impl Hub {
|
|||||||
|
|
||||||
pack = pack.with_url(s).with_tag(&tag_).with_file_name(&file_name_);
|
pack = pack.with_url(s).with_tag(&tag_).with_file_name(&file_name_);
|
||||||
if let Some(n) = retry!(self.max_attempts, Self::fetch_get_response(s))?
|
if let Some(n) = retry!(self.max_attempts, Self::fetch_get_response(s))?
|
||||||
.header("Content-Length")
|
.headers()
|
||||||
.and_then(|s| s.parse::<u64>().ok())
|
.get(http::header::CONTENT_LENGTH)
|
||||||
|
.and_then(|v| v.to_str().ok()?.parse::<u64>().ok())
|
||||||
{
|
{
|
||||||
pack = pack.with_file_size(n);
|
pack = pack.with_file_size(n);
|
||||||
}
|
}
|
||||||
@ -242,14 +243,20 @@ impl Hub {
|
|||||||
} else {
|
} else {
|
||||||
for f_ in release.assets.iter() {
|
for f_ in release.assets.iter() {
|
||||||
if f_.name.as_str() == file_name_ {
|
if f_.name.as_str() == file_name_ {
|
||||||
pack = pack.with_url(&f_.browser_download_url).with_tag(tag_).with_file_name(file_name_).with_file_size(f_.size);
|
pack = pack
|
||||||
|
.with_url(&f_.browser_download_url)
|
||||||
|
.with_tag(tag_)
|
||||||
|
.with_file_name(file_name_)
|
||||||
|
.with_file_size(f_.size);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.to.crate_dir_default_with_subs(&[tag_])?.join(file_name_)
|
self.to
|
||||||
|
.crate_dir_default_with_subs(&[tag_])?
|
||||||
|
.join(file_name_)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => anyhow::bail!(
|
_ => anyhow::bail!(
|
||||||
@ -265,7 +272,8 @@ impl Hub {
|
|||||||
if saveout.is_file() {
|
if saveout.is_file() {
|
||||||
match pack.file_size {
|
match pack.file_size {
|
||||||
None => {
|
None => {
|
||||||
log::warn!("Failed to retrieve the remote file size. \
|
log::warn!(
|
||||||
|
"Failed to retrieve the remote file size. \
|
||||||
Download will be skipped, which may cause issues. \
|
Download will be skipped, which may cause issues. \
|
||||||
Please verify your network connection or ensure the local file is valid and complete."
|
Please verify your network connection or ensure the local file is valid and complete."
|
||||||
);
|
);
|
||||||
@ -315,7 +323,8 @@ impl Hub {
|
|||||||
fn fetch_and_cache_releases(url: &str, cache_path: &Path) -> Result<String> {
|
fn fetch_and_cache_releases(url: &str, cache_path: &Path) -> Result<String> {
|
||||||
let response = retry!(3, Self::fetch_get_response(url))?;
|
let response = retry!(3, Self::fetch_get_response(url))?;
|
||||||
let body = response
|
let body = response
|
||||||
.into_string()
|
.into_body()
|
||||||
|
.read_to_string()
|
||||||
.context("Failed to read response body")?;
|
.context("Failed to read response body")?;
|
||||||
|
|
||||||
// Ensure cache directory exists
|
// Ensure cache directory exists
|
||||||
@ -392,17 +401,18 @@ impl Hub {
|
|||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let resp = Self::fetch_get_response(src)?;
|
let resp = Self::fetch_get_response(src)?;
|
||||||
let ntotal = resp
|
let ntotal = resp
|
||||||
.header("Content-Length")
|
.headers()
|
||||||
.and_then(|s| s.parse::<u64>().ok())
|
.get(http::header::CONTENT_LENGTH)
|
||||||
|
.and_then(|v| v.to_str().ok()?.parse::<u64>().ok())
|
||||||
.context("Content-Length header is missing or invalid")?;
|
.context("Content-Length header is missing or invalid")?;
|
||||||
let pb = crate::build_progress_bar(
|
let pb = crate::build_progress_bar(
|
||||||
ntotal,
|
ntotal,
|
||||||
"Fetching",
|
"Fetching",
|
||||||
Some(message.unwrap_or_default()),
|
Some(message.unwrap_or_default()),
|
||||||
"{prefix:.cyan.bold} {msg} |{bar}| ({percent_precise}%, {binary_bytes}/{binary_total_bytes}, {binary_bytes_per_sec})"
|
"{prefix:.cyan.bold} {msg} |{bar}| ({percent_precise}%, {binary_bytes}/{binary_total_bytes}, {binary_bytes_per_sec})",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut reader = resp.into_reader();
|
let mut reader = resp.into_body().into_reader();
|
||||||
let mut buffer = [0; 2048];
|
let mut buffer = [0; 2048];
|
||||||
let mut downloaded_bytes = 0usize;
|
let mut downloaded_bytes = 0usize;
|
||||||
let mut file = std::fs::File::create(&dst)
|
let mut file = std::fs::File::create(&dst)
|
||||||
@ -433,8 +443,12 @@ impl Hub {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_get_response(url: &str) -> anyhow::Result<ureq::Response> {
|
fn fetch_get_response(url: &str) -> anyhow::Result<http::Response<ureq::Body>> {
|
||||||
let agent = ureq::AgentBuilder::new().try_proxy_from_env(true).build();
|
let config = ureq::Agent::config_builder()
|
||||||
|
.proxy(ureq::Proxy::try_from_env())
|
||||||
|
.build();
|
||||||
|
let agent = ureq::Agent::new_with_config(config);
|
||||||
|
|
||||||
let response = agent
|
let response = agent
|
||||||
.get(url)
|
.get(url)
|
||||||
.call()
|
.call()
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use aksr::Builder;
|
use aksr::Builder;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ndarray::{s, Axis};
|
use ndarray::{s, Axis};
|
||||||
use rand::prelude::*;
|
use rand::{prelude::*, rng};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
elapsed, Config, DynConf, Engine, Image, Mask, Ops, Polygon, Processor, SamPrompt, Ts, Xs, X, Y,
|
elapsed, Config, DynConf, Engine, Image, Mask, Ops, Polygon, Processor, SamPrompt, Ts, Xs, X, Y,
|
||||||
@ -259,8 +259,8 @@ impl SAM {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// contours
|
// contours
|
||||||
let mut rng = thread_rng();
|
let mut rng = rng();
|
||||||
let id = rng.gen_range(0..20);
|
let id = rng.random_range(0..20);
|
||||||
let mask = Mask::new(&luma, image_width, image_height)?.with_id(id);
|
let mask = Mask::new(&luma, image_width, image_height)?.with_id(id);
|
||||||
if self.find_contours {
|
if self.find_contours {
|
||||||
for polygon in mask.polygons().into_iter() {
|
for polygon in mask.polygons().into_iter() {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use rand::distributions::{Distribution, WeightedIndex};
|
use rand::distr::{weighted::WeightedIndex, Distribution};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct LogitsSampler {
|
pub struct LogitsSampler {
|
||||||
@ -76,7 +76,7 @@ impl LogitsSampler {
|
|||||||
let choices: Vec<usize> = candidates.iter().map(|&(idx, _)| idx).collect();
|
let choices: Vec<usize> = candidates.iter().map(|&(idx, _)| idx).collect();
|
||||||
let probs: Vec<f32> = candidates.iter().map(|&(_, prob)| prob).collect();
|
let probs: Vec<f32> = candidates.iter().map(|&(_, prob)| prob).collect();
|
||||||
let dist = WeightedIndex::new(probs)?;
|
let dist = WeightedIndex::new(probs)?;
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::rng();
|
||||||
let token_id = choices[dist.sample(&mut rng)];
|
let token_id = choices[dist.sample(&mut rng)];
|
||||||
Ok(token_id as u32)
|
Ok(token_id as u32)
|
||||||
}
|
}
|
||||||
|
@ -126,11 +126,11 @@ fn format_bytes_internal(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_random_string(length: usize) -> String {
|
pub fn generate_random_string(length: usize) -> String {
|
||||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
use rand::{distr::Alphanumeric, rng, Rng};
|
||||||
if length == 0 {
|
if length == 0 {
|
||||||
return String::new();
|
return String::new();
|
||||||
}
|
}
|
||||||
let rng = thread_rng();
|
let rng = rng();
|
||||||
let mut result = String::with_capacity(length);
|
let mut result = String::with_capacity(length);
|
||||||
result.extend(rng.sample_iter(&Alphanumeric).take(length).map(char::from));
|
result.extend(rng.sample_iter(&Alphanumeric).take(length).map(char::from));
|
||||||
result
|
result
|
||||||
|
@ -165,13 +165,13 @@ impl Color {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn palette_rand(n: usize) -> Vec<Self> {
|
pub fn palette_rand(n: usize) -> Vec<Self> {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::rng();
|
||||||
let xs: Vec<(u8, u8, u8)> = (0..n)
|
let xs: Vec<(u8, u8, u8)> = (0..n)
|
||||||
.map(|_| {
|
.map(|_| {
|
||||||
(
|
(
|
||||||
rng.gen_range(0..=255),
|
rng.random_range(0..=255),
|
||||||
rng.gen_range(0..=255),
|
rng.random_range(0..=255),
|
||||||
rng.gen_range(0..=255),
|
rng.random_range(0..=255),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
Reference in New Issue
Block a user