Fullscreen MediaViewer refactor

- Moved media related logic into notedeck instead of the ui crate,
  since they pertain to Images/ImageCache based systems

- Made RenderableMedia owned to make it less of a nightmware
  to work with and the perf should be negligible

- Added a ImageMetadata cache to Images. This is referenced
  whenever we encounter an image so we don't have to
  redo the work all of the time

- Relpaced our ad-hoc, hand(vibe?)-coded panning and zoom logic
  with the Scene widget, which is explicitly designed for
  this use case

- Extracted and detangle fullscreen media rendering from inside of note
  rendering.  We instead let the application decide what action they
  want to perform when note media is clicked on.

- We add an on_view_media action to MediaAction for the application to
  handle. The Columns app uses this toggle a FullscreenMedia app
  option bits whenever we get a MediaAction::ViewMedis(urls).

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-07-25 13:45:54 -07:00
parent 96ab4ee681
commit 3d18db8fd2
45 changed files with 1284 additions and 1222 deletions

3
Cargo.lock generated
View File

@@ -3423,10 +3423,12 @@ dependencies = [
"base32",
"bech32",
"bincode",
"blurhash",
"dirs",
"eframe",
"egui",
"egui-winit",
"egui_extras",
"ehttp",
"enostr",
"fluent",
@@ -3576,7 +3578,6 @@ name = "notedeck_ui"
version = "0.5.8"
dependencies = [
"bitflags 2.9.1",
"blurhash",
"eframe",
"egui",
"egui-winit",

View File

@@ -82,6 +82,8 @@ hashbrown = "0.15.2"
openai-api-rs = "6.0.3"
re_memory = "0.23.4"
oot_bitset = "0.1.1"
blurhash = "0.2.3"
[profile.small]
inherits = 'release'

View File

@@ -9,11 +9,13 @@ nostrdb = { workspace = true }
jni = { workspace = true }
url = { workspace = true }
strum = { workspace = true }
blurhash = { workspace = true }
strum_macros = { workspace = true }
dirs = { workspace = true }
enostr = { workspace = true }
nostr = { workspace = true }
egui = { workspace = true }
egui_extras = { workspace = true }
eframe = { workspace = true }
image = { workspace = true }
base32 = { workspace = true }

View File

@@ -1,4 +1,9 @@
use crate::media::gif::ensure_latest_texture_from_cache;
use crate::media::images::ImageType;
use crate::urls::{UrlCache, UrlMimes};
use crate::ImageMetadata;
use crate::ObfuscationType;
use crate::RenderableMedia;
use crate::Result;
use egui::TextureHandle;
use image::{Delay, Frame};
@@ -21,7 +26,7 @@ use tracing::warn;
#[derive(Default)]
pub struct TexturesCache {
cache: hashbrown::HashMap<String, TextureStateInternal>,
pub cache: hashbrown::HashMap<String, TextureStateInternal>,
}
impl TexturesCache {
@@ -141,6 +146,12 @@ pub enum TextureState<'a> {
Loaded(&'a mut TexturedImage),
}
impl<'a> TextureState<'a> {
pub fn is_loaded(&self) -> bool {
matches!(self, Self::Loaded(_))
}
}
impl<'a> From<&'a mut TextureStateInternal> for TextureState<'a> {
fn from(value: &'a mut TextureStateInternal) -> Self {
match value {
@@ -402,6 +413,8 @@ pub struct Images {
pub static_imgs: MediaCache,
pub gifs: MediaCache,
pub urls: UrlMimes,
/// cached imeta data
pub metadata: HashMap<String, ImageMetadata>,
pub gif_states: GifStateMap,
}
@@ -414,6 +427,7 @@ impl Images {
gifs: MediaCache::new(&path, MediaCacheType::Gif),
urls: UrlMimes::new(UrlCache::new(path.join(UrlCache::rel_dir()))),
gif_states: Default::default(),
metadata: Default::default(),
}
}
@@ -422,6 +436,58 @@ impl Images {
self.gifs.migrate_v0()
}
pub fn get_renderable_media(&mut self, url: &str) -> Option<RenderableMedia> {
Self::find_renderable_media(&mut self.urls, &self.metadata, url)
}
pub fn find_renderable_media(
urls: &mut UrlMimes,
imeta: &HashMap<String, ImageMetadata>,
url: &str,
) -> Option<RenderableMedia> {
let media_type = crate::urls::supported_mime_hosted_at_url(urls, url)?;
let obfuscation_type = match imeta.get(url) {
Some(blur) => ObfuscationType::Blurhash(blur.clone()),
None => ObfuscationType::Default,
};
Some(RenderableMedia {
url: url.to_string(),
media_type,
obfuscation_type,
})
}
pub fn latest_texture(
&mut self,
ui: &mut egui::Ui,
url: &str,
img_type: ImageType,
) -> Option<TextureHandle> {
let cache_type = crate::urls::supported_mime_hosted_at_url(&mut self.urls, url)?;
let cache_dir = self.get_cache(cache_type).cache_dir.clone();
let is_loaded = self
.get_cache_mut(cache_type)
.textures_cache
.handle_and_get_or_insert(url, || {
crate::media::images::fetch_img(&cache_dir, ui.ctx(), url, img_type, cache_type)
})
.is_loaded();
if !is_loaded {
return None;
}
let cache = match cache_type {
MediaCacheType::Image => &mut self.static_imgs,
MediaCacheType::Gif => &mut self.gifs,
};
ensure_latest_texture_from_cache(ui, url, &mut self.gif_states, &mut cache.textures_cache)
}
pub fn get_cache(&self, cache_type: MediaCacheType) -> &MediaCache {
match cache_type {
MediaCacheType::Image => &self.static_imgs,
@@ -465,3 +531,35 @@ pub struct GifState {
pub next_frame_time: Option<SystemTime>,
pub last_frame_index: usize,
}
pub struct LatestTexture {
pub texture: TextureHandle,
pub request_next_repaint: Option<SystemTime>,
}
pub fn get_render_state<'a>(
ctx: &egui::Context,
images: &'a mut Images,
cache_type: MediaCacheType,
url: &str,
img_type: ImageType,
) -> RenderState<'a> {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
let texture_state = cache.textures_cache.handle_and_get_or_insert(url, || {
crate::media::images::fetch_img(&cache.cache_dir, ctx, url, img_type, cache_type)
});
RenderState {
texture_state,
gifs: &mut images.gif_states,
}
}
pub struct RenderState<'a> {
pub texture_state: TextureState<'a>,
pub gifs: &'a mut GifStateMap,
}

View File

@@ -23,6 +23,7 @@ impl JobPool {
pub fn new(num_threads: usize) -> Self {
let (tx, rx) = mpsc::channel::<Job>();
// TODO(jb55) why not mpmc here !???
let arc_rx = Arc::new(Mutex::new(rx));
for _ in 0..num_threads {
let arc_rx_clone = arc_rx.clone();

View File

@@ -1,6 +1,6 @@
use crate::JobPool;
use egui::TextureHandle;
use hashbrown::{hash_map::RawEntryMut, HashMap};
use notedeck::JobPool;
use poll_promise::Promise;
#[derive(Default)]

View File

@@ -12,6 +12,8 @@ mod frame_history;
pub mod i18n;
mod imgcache;
mod job_pool;
mod jobs;
pub mod media;
mod muted;
pub mod name;
pub mod note;
@@ -47,10 +49,18 @@ pub use filter::{FilterState, FilterStates, UnifiedSubscription};
pub use fonts::NamedFontFamily;
pub use i18n::{CacheStats, FluentArgs, FluentValue, LanguageIdentifier, Localization};
pub use imgcache::{
Animation, GifState, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache,
MediaCacheType, TextureFrame, TextureState, TexturedImage, TexturesCache,
get_render_state, Animation, GifState, GifStateMap, ImageFrame, Images, LatestTexture,
LoadableTextureState, MediaCache, MediaCacheType, RenderState, TextureFrame, TextureState,
TexturedImage, TexturesCache,
};
pub use job_pool::JobPool;
pub use jobs::{
BlurhashParams, Job, JobError, JobId, JobParams, JobParamsOwned, JobState, JobsCache,
};
pub use media::{
compute_blurhash, update_imeta_blurhashes, ImageMetadata, ImageType, MediaAction,
ObfuscationType, PixelDimensions, PointDimensions, RenderableMedia,
};
pub use muted::{MuteFun, Muted};
pub use name::NostrName;
pub use note::{

View File

@@ -0,0 +1,90 @@
use crate::{Images, MediaCacheType, TexturedImage};
use poll_promise::Promise;
/// Actions generated by media ui interactions
pub enum MediaAction {
/// An image was clicked on in a carousel, we have
/// the opportunity to open into a fullscreen media viewer
/// with a list of url values
ViewMedias(Vec<String>),
FetchImage {
url: String,
cache_type: MediaCacheType,
no_pfp_promise: Promise<Option<Result<TexturedImage, crate::Error>>>,
},
DoneLoading {
url: String,
cache_type: MediaCacheType,
},
}
impl std::fmt::Debug for MediaAction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ViewMedias(urls) => f.debug_struct("ViewMedias").field("urls", urls).finish(),
Self::FetchImage {
url,
cache_type,
no_pfp_promise,
} => f
.debug_struct("FetchNoPfpImage")
.field("url", url)
.field("cache_type", cache_type)
.field("no_pfp_promise ready", &no_pfp_promise.ready().is_some())
.finish(),
Self::DoneLoading { url, cache_type } => f
.debug_struct("DoneLoading")
.field("url", url)
.field("cache_type", cache_type)
.finish(),
}
}
}
impl MediaAction {
/// Handle view media actions
pub fn on_view_media(&self, handler: impl FnOnce(Vec<String>)) {
if let MediaAction::ViewMedias(urls) = self {
handler(urls.clone())
}
}
/// Default processing logic for Media Actions. We don't handle ViewMedias here since
/// this may be app specific ?
pub fn process_default_media_actions(self, images: &mut Images) {
match self {
MediaAction::ViewMedias(_urls) => {
// NOTE(jb55): don't assume we want to show a fullscreen
// media viewer we can use on_view_media for that. We
// also don't want to have a notedeck_ui dependency in
// the notedeck lib (MediaViewerState)
//
// In general our notedeck crate should be pretty
// agnostic to functionallity in general unless it low
// level like image rendering.
//
//mview_state.set_urls(urls);
}
MediaAction::FetchImage {
url,
cache_type,
no_pfp_promise: promise,
} => {
images
.get_cache_mut(cache_type)
.textures_cache
.insert_pending(&url, promise);
}
MediaAction::DoneLoading { url, cache_type } => {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
cache.textures_cache.move_to_loaded(&url);
}
}
}
}

View File

@@ -5,8 +5,8 @@ use nostrdb::Note;
use crate::jobs::{Job, JobError, JobParamsOwned};
#[derive(Clone)]
pub struct Blur<'a> {
pub blurhash: &'a str,
pub struct ImageMetadata {
pub blurhash: String,
pub dimensions: Option<PixelDimensions>, // width and height in pixels
}
@@ -44,7 +44,7 @@ impl PointDimensions {
}
}
impl Blur<'_> {
impl ImageMetadata {
pub fn scaled_pixel_dimensions(
&self,
ui: &egui::Ui,
@@ -75,9 +75,8 @@ impl Blur<'_> {
}
}
pub fn imeta_blurhashes<'a>(note: &'a Note) -> HashMap<&'a str, Blur<'a>> {
let mut blurs = HashMap::new();
/// Find blurhashes in image metadata and update our cache
pub fn update_imeta_blurhashes(note: &Note, blurs: &mut HashMap<String, ImageMetadata>) {
for tag in note.tags() {
let mut tag_iter = tag.into_iter();
if tag_iter
@@ -93,13 +92,11 @@ pub fn imeta_blurhashes<'a>(note: &'a Note) -> HashMap<&'a str, Blur<'a>> {
continue;
};
blurs.insert(url, blur);
blurs.insert(url.to_string(), blur);
}
blurs
}
fn find_blur(tag_iter: nostrdb::TagIter) -> Option<(&str, Blur)> {
fn find_blur(tag_iter: nostrdb::TagIter<'_>) -> Option<(String, ImageMetadata)> {
let mut url = None;
let mut blurhash = None;
let mut dims = None;
@@ -138,21 +135,21 @@ fn find_blur(tag_iter: nostrdb::TagIter) -> Option<(&str, Blur)> {
});
Some((
url,
Blur {
blurhash,
url.to_string(),
ImageMetadata {
blurhash: blurhash.to_string(),
dimensions,
},
))
}
#[derive(Clone)]
pub enum ObfuscationType<'a> {
Blurhash(Blur<'a>),
pub enum ObfuscationType {
Blurhash(ImageMetadata),
Default,
}
pub(crate) fn compute_blurhash(
pub fn compute_blurhash(
params: Option<JobParamsOwned>,
dims: PixelDimensions,
) -> Result<Job, JobError> {
@@ -185,9 +182,9 @@ fn generate_blurhash_texturehandle(
url: &str,
width: u32,
height: u32,
) -> notedeck::Result<egui::TextureHandle> {
) -> Result<egui::TextureHandle, crate::Error> {
let bytes = blurhash::decode(blurhash, width, height, 1.0)
.map_err(|e| notedeck::Error::Generic(e.to_string()))?;
.map_err(|e| crate::Error::Generic(e.to_string()))?;
let img = egui::ColorImage::from_rgba_unmultiplied([width as usize, height as usize], &bytes);
Ok(ctx.load_texture(url, img, Default::default()))

View File

@@ -3,37 +3,32 @@ use std::{
time::{Instant, SystemTime},
};
use crate::{GifState, GifStateMap, TextureState, TexturedImage, TexturesCache};
use egui::TextureHandle;
use notedeck::{GifState, GifStateMap, TexturedImage};
pub struct LatextTexture<'a> {
pub texture: &'a TextureHandle,
pub request_next_repaint: Option<SystemTime>,
}
/// This is necessary because other repaint calls can effectively steal our repaint request.
/// So we must keep on requesting to repaint at our desired time to ensure our repaint goes through.
/// See [`egui::Context::request_repaint_after`]
pub fn handle_repaint<'a>(ui: &egui::Ui, latest: LatextTexture<'a>) -> &'a TextureHandle {
if let Some(_repaint) = latest.request_next_repaint {
// 24fps for gif is fine
ui.ctx()
.request_repaint_after(std::time::Duration::from_millis(41));
}
latest.texture
}
#[must_use = "caller should pass the return value to `gif::handle_repaint`"]
pub fn retrieve_latest_texture<'a>(
pub fn ensure_latest_texture_from_cache(
ui: &egui::Ui,
url: &str,
gifs: &'a mut GifStateMap,
cached_image: &'a mut TexturedImage,
) -> LatextTexture<'a> {
match cached_image {
TexturedImage::Static(texture) => LatextTexture {
texture,
request_next_repaint: None,
},
gifs: &mut GifStateMap,
textures: &mut TexturesCache,
) -> Option<TextureHandle> {
let tstate = textures.cache.get_mut(url)?;
let TextureState::Loaded(img) = tstate.into() else {
return None;
};
Some(ensure_latest_texture(ui, url, gifs, img))
}
pub fn ensure_latest_texture(
ui: &egui::Ui,
url: &str,
gifs: &mut GifStateMap,
img: &mut TexturedImage,
) -> TextureHandle {
match img {
TexturedImage::Static(handle) => handle.clone(),
TexturedImage::Animated(animation) => {
if let Some(receiver) = &animation.receiver {
loop {
@@ -115,12 +110,12 @@ pub fn retrieve_latest_texture<'a>(
if let Some(req) = request_next_repaint {
tracing::trace!("requesting repaint for {url} after {req:?}");
// 24fps for gif is fine
ui.ctx()
.request_repaint_after(std::time::Duration::from_millis(41));
}
LatextTexture {
texture,
request_next_repaint,
}
texture.clone()
}
}
}

View File

@@ -0,0 +1,475 @@
use crate::{Animation, ImageFrame, MediaCache, MediaCacheType, TextureFrame, TexturedImage};
use egui::{pos2, Color32, ColorImage, Context, Rect, Sense, SizeHint};
use image::codecs::gif::GifDecoder;
use image::imageops::FilterType;
use image::{AnimationDecoder, DynamicImage, FlatSamples, Frame};
use poll_promise::Promise;
use std::collections::VecDeque;
use std::io::Cursor;
use std::path::PathBuf;
use std::path::{self, Path};
use std::sync::mpsc;
use std::sync::mpsc::SyncSender;
use std::thread;
use std::time::Duration;
use tokio::fs;
// NOTE(jb55): chatgpt wrote this because I was too dumb to
pub fn aspect_fill(
ui: &mut egui::Ui,
sense: Sense,
texture_id: egui::TextureId,
aspect_ratio: f32,
) -> egui::Response {
let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout
let frame_ratio = frame.width() / frame.height();
let (width, height) = if frame_ratio > aspect_ratio {
// Frame is wider than the content
(frame.width(), frame.width() / aspect_ratio)
} else {
// Frame is taller than the content
(frame.height() * aspect_ratio, frame.height())
};
let content_rect = Rect::from_min_size(
frame.min
+ egui::vec2(
(frame.width() - width) / 2.0,
(frame.height() - height) / 2.0,
),
egui::vec2(width, height),
);
// Set the clipping rectangle to the frame
//let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle
//ui.set_clip_rect(frame);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
let (response, painter) = ui.allocate_painter(ui.available_size(), sense);
// Draw the texture within the calculated rect, potentially clipping it
painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill());
painter.image(texture_id, content_rect, uv, Color32::WHITE);
// Restore the original clipping rectangle
//ui.set_clip_rect(clip_rect);
response
}
#[profiling::function]
pub fn round_image(image: &mut ColorImage) {
// The radius to the edge of of the avatar circle
let edge_radius = image.size[0] as f32 / 2.0;
let edge_radius_squared = edge_radius * edge_radius;
for (pixnum, pixel) in image.pixels.iter_mut().enumerate() {
// y coordinate
let uy = pixnum / image.size[0];
let y = uy as f32;
let y_offset = edge_radius - y;
// x coordinate
let ux = pixnum % image.size[0];
let x = ux as f32;
let x_offset = edge_radius - x;
// The radius to this pixel (may be inside or outside the circle)
let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset;
// If inside of the avatar circle
if pixel_radius_squared <= edge_radius_squared {
// squareroot to find how many pixels we are from the edge
let pixel_radius: f32 = pixel_radius_squared.sqrt();
let distance = edge_radius - pixel_radius;
// If we are within 1 pixel of the edge, we should fade, to
// antialias the edge of the circle. 1 pixel from the edge should
// be 100% of the original color, and right on the edge should be
// 0% of the original color.
if distance <= 1.0 {
*pixel = Color32::from_rgba_premultiplied(
(pixel.r() as f32 * distance) as u8,
(pixel.g() as f32 * distance) as u8,
(pixel.b() as f32 * distance) as u8,
(pixel.a() as f32 * distance) as u8,
);
}
} else {
// Outside of the avatar circle
*pixel = Color32::TRANSPARENT;
}
}
}
/// If the image's longest dimension is greater than max_edge, downscale
fn resize_image_if_too_big(
image: image::DynamicImage,
max_edge: u32,
filter: FilterType,
) -> image::DynamicImage {
// if we have no size hint, resize to something reasonable
let w = image.width();
let h = image.height();
let long = w.max(h);
if long > max_edge {
let scale = max_edge as f32 / long as f32;
let new_w = (w as f32 * scale).round() as u32;
let new_h = (h as f32 * scale).round() as u32;
image.resize(new_w, new_h, filter)
} else {
image
}
}
///
/// Process an image, resizing so we don't blow up video memory or even crash
///
/// For profile pictures, make them round and small to fit the size hint
/// For everything else, either:
///
/// - resize to the size hint
/// - keep the size if the longest dimension is less than MAX_IMG_LENGTH
/// - resize if any larger, using [`resize_image_if_too_big`]
///
#[profiling::function]
fn process_image(imgtyp: ImageType, mut image: image::DynamicImage) -> ColorImage {
const MAX_IMG_LENGTH: u32 = 512;
const FILTER_TYPE: FilterType = FilterType::CatmullRom;
match imgtyp {
ImageType::Content(size_hint) => {
let image = match size_hint {
None => resize_image_if_too_big(image, MAX_IMG_LENGTH, FILTER_TYPE),
Some((w, h)) => image.resize(w, h, FILTER_TYPE),
};
let image_buffer = image.into_rgba8();
ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
)
}
ImageType::Profile(size) => {
// Crop square
let smaller = image.width().min(image.height());
if image.width() > smaller {
let excess = image.width() - smaller;
image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height());
} else if image.height() > smaller {
let excess = image.height() - smaller;
image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess);
}
let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let mut color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
round_image(&mut color_image);
color_image
}
}
}
#[profiling::function]
fn parse_img_response(
response: ehttp::Response,
imgtyp: ImageType,
) -> Result<ColorImage, crate::Error> {
let content_type = response.content_type().unwrap_or_default();
let size_hint = match imgtyp {
ImageType::Profile(size) => SizeHint::Size(size, size),
ImageType::Content(Some((w, h))) => SizeHint::Size(w, h),
ImageType::Content(None) => SizeHint::default(),
};
if content_type.starts_with("image/svg") {
profiling::scope!("load_svg");
let mut color_image =
egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?;
round_image(&mut color_image);
Ok(color_image)
} else if content_type.starts_with("image/") {
profiling::scope!("load_from_memory");
let dyn_image = image::load_from_memory(&response.bytes)?;
Ok(process_image(imgtyp, dyn_image))
} else {
Err(format!("Expected image, found content-type {content_type:?}").into())
}
}
fn fetch_img_from_disk(
ctx: &egui::Context,
url: &str,
path: &path::Path,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
let ctx = ctx.clone();
let url = url.to_owned();
let path = path.to_owned();
Promise::spawn_async(async move {
Some(async_fetch_img_from_disk(ctx, url, &path, cache_type).await)
})
}
async fn async_fetch_img_from_disk(
ctx: egui::Context,
url: String,
path: &path::Path,
cache_type: MediaCacheType,
) -> Result<TexturedImage, crate::Error> {
match cache_type {
MediaCacheType::Image => {
let data = fs::read(path).await?;
let image_buffer = image::load_from_memory(&data).map_err(crate::Error::Image)?;
let img = buffer_to_color_image(
image_buffer.as_flat_samples_u8(),
image_buffer.width(),
image_buffer.height(),
);
Ok(TexturedImage::Static(ctx.load_texture(
&url,
img,
Default::default(),
)))
}
MediaCacheType::Gif => {
let gif_bytes = fs::read(path).await?; // Read entire file into a Vec<u8>
generate_gif(ctx, url, path, gif_bytes, false, |i| {
buffer_to_color_image(i.as_flat_samples_u8(), i.width(), i.height())
})
}
}
}
fn generate_gif(
ctx: egui::Context,
url: String,
path: &path::Path,
data: Vec<u8>,
write_to_disk: bool,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + Copy + 'static,
) -> Result<TexturedImage, crate::Error> {
let decoder = {
let reader = Cursor::new(data.as_slice());
GifDecoder::new(reader)?
};
let (tex_input, tex_output) = mpsc::sync_channel(4);
let (maybe_encoder_input, maybe_encoder_output) = if write_to_disk {
let (inp, out) = mpsc::sync_channel(4);
(Some(inp), Some(out))
} else {
(None, None)
};
let mut frames: VecDeque<Frame> = decoder
.into_frames()
.collect::<std::result::Result<VecDeque<_>, image::ImageError>>()
.map_err(|e| crate::Error::Generic(e.to_string()))?;
let first_frame = frames.pop_front().map(|frame| {
generate_animation_frame(
&ctx,
&url,
0,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
)
});
let cur_url = url.clone();
thread::spawn(move || {
for (index, frame) in frames.into_iter().enumerate() {
let texture_frame = generate_animation_frame(
&ctx,
&cur_url,
index,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
);
if tex_input.send(texture_frame).is_err() {
tracing::debug!("AnimationTextureFrame mpsc stopped abruptly");
break;
}
}
});
if let Some(encoder_output) = maybe_encoder_output {
let path = path.to_owned();
thread::spawn(move || {
let mut imgs = Vec::new();
while let Ok(img) = encoder_output.recv() {
imgs.push(img);
}
if let Err(e) = MediaCache::write_gif(&path, &url, imgs) {
tracing::error!("Could not write gif to disk: {e}");
}
});
}
first_frame.map_or_else(
|| {
Err(crate::Error::Generic(
"first frame not found for gif".to_owned(),
))
},
|first_frame| {
Ok(TexturedImage::Animated(Animation {
other_frames: Default::default(),
receiver: Some(tex_output),
first_frame,
}))
},
)
}
fn generate_animation_frame(
ctx: &egui::Context,
url: &str,
index: usize,
frame: image::Frame,
maybe_encoder_input: Option<&SyncSender<ImageFrame>>,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + 'static,
) -> TextureFrame {
let delay = Duration::from(frame.delay());
let img = DynamicImage::ImageRgba8(frame.into_buffer());
let color_img = process_to_egui(img);
if let Some(sender) = maybe_encoder_input {
if let Err(e) = sender.send(ImageFrame {
delay,
image: color_img.clone(),
}) {
tracing::error!("ImageFrame mpsc unexpectedly closed: {e}");
}
}
TextureFrame {
delay,
texture: ctx.load_texture(format!("{url}{index}"), color_img, Default::default()),
}
}
fn buffer_to_color_image(
samples: Option<FlatSamples<&[u8]>>,
width: u32,
height: u32,
) -> ColorImage {
// TODO(jb55): remove unwrap here
let flat_samples = samples.unwrap();
ColorImage::from_rgba_unmultiplied([width as usize, height as usize], flat_samples.as_slice())
}
pub fn fetch_binary_from_disk(path: PathBuf) -> Result<Vec<u8>, crate::Error> {
std::fs::read(path).map_err(|e| crate::Error::Generic(e.to_string()))
}
/// Controls type-specific handling
#[derive(Debug, Clone, Copy)]
pub enum ImageType {
/// Profile Image (size)
Profile(u32),
/// Content Image with optional size hint
Content(Option<(u32, u32)>),
}
pub fn fetch_img(
img_cache_path: &Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
let key = MediaCache::key(url);
let path = img_cache_path.join(key);
if path.exists() {
fetch_img_from_disk(ctx, url, &path, cache_type)
} else {
fetch_img_from_net(img_cache_path, ctx, url, imgtyp, cache_type)
}
// TODO: fetch image from local cache
}
fn fetch_img_from_net(
cache_path: &path::Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
let (sender, promise) = Promise::new();
let request = ehttp::Request::get(url);
let ctx = ctx.clone();
let cloned_url = url.to_owned();
let cache_path = cache_path.to_owned();
ehttp::fetch(request, move |response| {
let handle = response.map_err(crate::Error::Generic).and_then(|resp| {
match cache_type {
MediaCacheType::Image => {
let img = parse_img_response(resp, imgtyp);
img.map(|img| {
let texture_handle =
ctx.load_texture(&cloned_url, img.clone(), Default::default());
// write to disk
std::thread::spawn(move || {
MediaCache::write(&cache_path, &cloned_url, img)
});
TexturedImage::Static(texture_handle)
})
}
MediaCacheType::Gif => {
let gif_bytes = resp.bytes;
generate_gif(
ctx.clone(),
cloned_url,
&cache_path,
gif_bytes,
true,
move |img| process_image(imgtyp, img),
)
}
}
});
sender.send(Some(handle)); // send the results back to the UI thread.
ctx.request_repaint();
});
promise
}
pub fn fetch_no_pfp_promise(
ctx: &Context,
cache: &MediaCache,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
crate::media::images::fetch_img(
&cache.cache_dir,
ctx,
crate::profile::no_pfp_url(),
ImageType::Profile(128),
MediaCacheType::Image,
)
}

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,14 @@
pub mod action;
pub mod blur;
pub mod gif;
pub mod images;
pub mod imeta;
pub mod renderable;
pub use action::MediaAction;
pub use blur::{
compute_blurhash, update_imeta_blurhashes, ImageMetadata, ObfuscationType, PixelDimensions,
PointDimensions,
};
pub use images::ImageType;
pub use renderable::RenderableMedia;

View File

@@ -0,0 +1,9 @@
use super::ObfuscationType;
use crate::MediaCacheType;
/// Media that is prepared for rendering. Use [`Images::get_renderable_media`] to get these
pub struct RenderableMedia {
pub url: String,
pub media_type: MediaCacheType,
pub obfuscation_type: ObfuscationType,
}

View File

@@ -1,8 +1,7 @@
use super::context::ContextSelection;
use crate::{zaps::NoteZapTargetOwned, Images, MediaCacheType, TexturedImage};
use crate::{zaps::NoteZapTargetOwned, MediaAction};
use egui::Vec2;
use enostr::{NoteId, Pubkey};
use poll_promise::Promise;
#[derive(Debug)]
pub struct ScrollInfo {
@@ -61,62 +60,3 @@ pub struct ZapTargetAmount {
pub target: NoteZapTargetOwned,
pub specified_msats: Option<u64>, // if None use default amount
}
pub enum MediaAction {
FetchImage {
url: String,
cache_type: MediaCacheType,
no_pfp_promise: Promise<Option<Result<TexturedImage, crate::Error>>>,
},
DoneLoading {
url: String,
cache_type: MediaCacheType,
},
}
impl std::fmt::Debug for MediaAction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::FetchImage {
url,
cache_type,
no_pfp_promise,
} => f
.debug_struct("FetchNoPfpImage")
.field("url", url)
.field("cache_type", cache_type)
.field("no_pfp_promise ready", &no_pfp_promise.ready().is_some())
.finish(),
Self::DoneLoading { url, cache_type } => f
.debug_struct("DoneLoading")
.field("url", url)
.field("cache_type", cache_type)
.finish(),
}
}
}
impl MediaAction {
pub fn process(self, images: &mut Images) {
match self {
MediaAction::FetchImage {
url,
cache_type,
no_pfp_promise: promise,
} => {
images
.get_cache_mut(cache_type)
.textures_cache
.insert_pending(&url, promise);
}
MediaAction::DoneLoading { url, cache_type } => {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
cache.textures_cache.move_to_loaded(&url);
}
}
}
}

View File

@@ -1,7 +1,7 @@
mod action;
mod context;
pub use action::{MediaAction, NoteAction, ScrollInfo, ZapAction, ZapTargetAmount};
pub use action::{NoteAction, ScrollInfo, ZapAction, ZapTargetAmount};
pub use context::{BroadcastContext, ContextSelection, NoteContextSelection};
use crate::Accounts;

View File

@@ -695,6 +695,8 @@ fn chrome_handle_app_action(
ctx.global_wallet,
ctx.zaps,
ctx.img_cache,
&mut columns.view_state,
&mut columns.options,
ui,
);
@@ -750,6 +752,8 @@ fn columns_route_to_profile(
ctx.global_wallet,
ctx.zaps,
ctx.img_cache,
&mut columns.view_state,
&mut columns.options,
ui,
);

View File

@@ -1,6 +1,7 @@
use crate::{
column::Columns,
nav::{RouterAction, RouterType},
options::AppOptions,
route::Route,
timeline::{
thread::{
@@ -8,6 +9,7 @@ use crate::{
},
ThreadSelection, TimelineCache, TimelineKind,
},
view_state::ViewState,
};
use enostr::{NoteId, Pubkey, RelayPool};
@@ -51,6 +53,8 @@ fn execute_note_action(
global_wallet: &mut GlobalWallet,
zaps: &mut Zaps,
images: &mut Images,
view_state: &mut ViewState,
app_options: &mut AppOptions,
router_type: RouterType,
ui: &mut egui::Ui,
col: usize,
@@ -153,7 +157,12 @@ fn execute_note_action(
}
},
NoteAction::Media(media_action) => {
media_action.process(images);
media_action.on_view_media(|medias| {
view_state.media_viewer.urls = medias;
app_options.set(AppOptions::FullscreenMedia, true);
});
media_action.process_default_media_actions(images)
}
}
@@ -180,6 +189,8 @@ pub fn execute_and_process_note_action(
global_wallet: &mut GlobalWallet,
zaps: &mut Zaps,
images: &mut Images,
view_state: &mut ViewState,
app_options: &mut AppOptions,
ui: &mut egui::Ui,
) -> Option<RouterAction> {
let router_type = {
@@ -204,6 +215,8 @@ pub fn execute_and_process_note_action(
global_wallet,
zaps,
images,
view_state,
app_options,
router_type,
ui,
col,

View File

@@ -20,9 +20,12 @@ use enostr::{ClientMessage, PoolRelay, Pubkey, RelayEvent, RelayMessage, RelayPo
use nostrdb::Transaction;
use notedeck::{
tr, ui::is_narrow, Accounts, AppAction, AppContext, DataPath, DataPathType, FilterState,
Localization, UnknownIds,
Images, JobsCache, Localization, UnknownIds,
};
use notedeck_ui::{
media::{MediaViewer, MediaViewerState},
NoteOptions,
};
use notedeck_ui::{jobs::JobsCache, NoteOptions};
use std::collections::{BTreeSet, HashMap};
use std::path::Path;
use std::time::Duration;
@@ -365,12 +368,43 @@ fn render_damus(
render_damus_desktop(damus, app_ctx, ui)
};
fullscreen_media_viewer_ui(
ui,
&mut damus.options,
&mut damus.view_state.media_viewer,
app_ctx.img_cache,
);
// We use this for keeping timestamps and things up to date
ui.ctx().request_repaint_after(Duration::from_secs(5));
app_action
}
/// Present a fullscreen media viewer if the FullscreenMedia AppOptions flag is set. This is
/// typically set by image carousels using a MediaAction's on_view_media callback when
/// an image is clicked
fn fullscreen_media_viewer_ui(
ui: &mut egui::Ui,
options: &mut AppOptions,
viewer_state: &mut MediaViewerState,
img_cache: &mut Images,
) {
if !options.contains(AppOptions::FullscreenMedia) || viewer_state.urls.is_empty() {
return;
}
// Close it?
if ui.input(|i| i.key_pressed(egui::Key::Escape)) {
options.set(AppOptions::FullscreenMedia, false);
return;
}
MediaViewer::new(viewer_state)
.fullscreen(true)
.ui(img_cache, ui);
}
/*
fn determine_key_storage_type() -> KeyStorageType {
#[cfg(target_os = "macos")]

View File

@@ -11,7 +11,7 @@ use sha2::{Digest, Sha256};
use url::Url;
use crate::Error;
use notedeck_ui::images::fetch_binary_from_disk;
use notedeck::media::images::fetch_binary_from_disk;
pub const NOSTR_BUILD_URL: fn() -> Url = || Url::parse("http://nostr.build").unwrap();
const NIP96_WELL_KNOWN: &str = ".well-known/nostr/nip96.json";
@@ -143,7 +143,7 @@ pub fn nip96_upload(
Err(e) => {
return Promise::from_ready(Err(Error::Generic(format!(
"could not read contents of file to upload: {e}"
))))
))));
}
};

View File

@@ -459,6 +459,8 @@ fn process_render_nav_action(
ctx.global_wallet,
ctx.zaps,
ctx.img_cache,
&mut app.view_state,
&mut app.options,
ui,
)
}

View File

@@ -16,6 +16,9 @@ bitflags! {
/// Should we scroll to top on the active column?
const ScrollToTop = 1 << 3;
/// Are we showing fullscreen media?
const FullscreenMedia = 1 << 4;
}
}

View File

@@ -6,8 +6,8 @@ use crate::{
};
use enostr::Pubkey;
use notedeck::NoteContext;
use notedeck_ui::{jobs::JobsCache, NoteOptions};
use notedeck::{JobsCache, NoteContext};
use notedeck_ui::NoteOptions;
#[allow(clippy::too_many_arguments)]
pub fn render_timeline_route(

View File

@@ -14,13 +14,12 @@ use egui::{
};
use enostr::{FilledKeypair, FullKeypair, NoteId, Pubkey, RelayPool};
use nostrdb::{Ndb, Transaction};
use notedeck::media::gif::ensure_latest_texture;
use notedeck::{get_render_state, JobsCache, PixelDimensions, RenderState};
use notedeck_ui::{
app_images,
blur::PixelDimensions,
context_menu::{input_context, PasteBehavior},
gif::{handle_repaint, retrieve_latest_texture},
images::{get_render_state, RenderState},
jobs::JobsCache,
note::render_note_preview,
NoteOptions, ProfilePic,
};
@@ -471,7 +470,7 @@ impl<'a, 'd> PostView<'a, 'd> {
self.note_context.img_cache,
cache_type,
url,
notedeck_ui::images::ImageType::Content(Some((width, height))),
notedeck::ImageType::Content(Some((width, height))),
);
render_post_view_media(
@@ -595,12 +594,10 @@ fn render_post_view_media(
.to_points(ui.pixels_per_point())
.to_vec();
let texture_handle = handle_repaint(
ui,
retrieve_latest_texture(url, render_state.gifs, renderable_media),
);
let texture_handle =
ensure_latest_texture(ui, url, render_state.gifs, renderable_media);
let img_resp = ui.add(
egui::Image::new(texture_handle)
egui::Image::new(&texture_handle)
.max_size(size)
.corner_radius(12.0),
);

View File

@@ -6,8 +6,8 @@ use crate::{
use egui::ScrollArea;
use enostr::{FilledKeypair, NoteId};
use notedeck::NoteContext;
use notedeck_ui::{jobs::JobsCache, NoteOptions};
use notedeck::{JobsCache, NoteContext};
use notedeck_ui::NoteOptions;
pub struct QuoteRepostView<'a, 'd> {
note_context: &'a mut NoteContext<'d>,

View File

@@ -6,8 +6,7 @@ use crate::ui::{
use egui::{Rect, Response, ScrollArea, Ui};
use enostr::{FilledKeypair, NoteId};
use notedeck::NoteContext;
use notedeck_ui::jobs::JobsCache;
use notedeck::{JobsCache, NoteContext};
use notedeck_ui::{NoteOptions, NoteView, ProfilePic};
pub struct PostReplyView<'a, 'd> {

View File

@@ -13,12 +13,11 @@ use crate::{
ui::timeline::{tabs_ui, TimelineTabView},
};
use notedeck::{
name::get_display_name, profile::get_profile_url, IsFollowing, NoteAction, NoteContext,
NotedeckTextStyle,
name::get_display_name, profile::get_profile_url, IsFollowing, JobsCache, NoteAction,
NoteContext, NotedeckTextStyle,
};
use notedeck_ui::{
app_images,
jobs::JobsCache,
profile::{about_section_widget, banner, display_name_widget},
NoteOptions, ProfilePic,
};

View File

@@ -5,11 +5,11 @@ use state::TypingType;
use crate::{timeline::TimelineTab, ui::timeline::TimelineTabView};
use egui_winit::clipboard::Clipboard;
use nostrdb::{Filter, Ndb, Transaction};
use notedeck::{tr, tr_plural, Localization, NoteAction, NoteContext, NoteRef};
use notedeck::{tr, tr_plural, JobsCache, Localization, NoteAction, NoteContext, NoteRef};
use notedeck_ui::{
context_menu::{input_context, PasteBehavior},
icons::search_icon,
jobs::JobsCache,
padding, NoteOptions,
};
use std::time::{Duration, Instant};

View File

@@ -2,8 +2,8 @@ use egui::InnerResponse;
use egui_virtual_list::VirtualList;
use nostrdb::{Note, Transaction};
use notedeck::note::root_note_id_from_selected_id;
use notedeck::JobsCache;
use notedeck::{NoteAction, NoteContext};
use notedeck_ui::jobs::JobsCache;
use notedeck_ui::note::NoteResponse;
use notedeck_ui::{NoteOptions, NoteView};

View File

@@ -3,7 +3,7 @@ use egui::{vec2, Direction, Layout, Pos2, Stroke};
use egui_tabs::TabColor;
use nostrdb::Transaction;
use notedeck::ui::is_narrow;
use notedeck_ui::jobs::JobsCache;
use notedeck::JobsCache;
use std::f32::consts::PI;
use tracing::{error, warn};

View File

@@ -6,8 +6,12 @@ use crate::deck_state::DeckState;
use crate::login_manager::AcquireKeyState;
use crate::ui::search::SearchQueryState;
use enostr::ProfileState;
use notedeck_ui::media::MediaViewerState;
/// Various state for views
///
/// TODO(jb55): we likely want to encapsulate these better,
/// or at least document where they are used
#[derive(Default)]
pub struct ViewState {
pub login: AcquireKeyState,
@@ -16,6 +20,11 @@ pub struct ViewState {
pub id_string_map: HashMap<egui::Id, String>,
pub searches: HashMap<egui::Id, SearchQueryState>,
pub pubkey_to_profile_state: HashMap<Pubkey, ProfileState>,
/// Keeps track of what urls we are actively viewing in the
/// fullscreen media viewier, as well as any other state we want to
/// keep track of
pub media_viewer: MediaViewerState,
}
impl ViewState {

View File

@@ -8,8 +8,7 @@ use egui_wgpu::RenderState;
use enostr::KeypairUnowned;
use futures::StreamExt;
use nostrdb::Transaction;
use notedeck::{AppAction, AppContext};
use notedeck_ui::jobs::JobsCache;
use notedeck::{AppAction, AppContext, JobsCache};
use std::collections::HashMap;
use std::string::ToString;
use std::sync::mpsc::{self, Receiver};

View File

@@ -4,8 +4,10 @@ use crate::{
};
use egui::{Align, Key, KeyboardShortcut, Layout, Modifiers};
use nostrdb::{Ndb, Transaction};
use notedeck::{tr, Accounts, AppContext, Images, Localization, NoteAction, NoteContext};
use notedeck_ui::{app_images, icons::search_icon, jobs::JobsCache, NoteOptions, ProfilePic};
use notedeck::{
tr, Accounts, AppContext, Images, JobsCache, Localization, NoteAction, NoteContext,
};
use notedeck_ui::{app_images, icons::search_icon, NoteOptions, ProfilePic};
/// DaveUi holds all of the data it needs to render itself
pub struct DaveUi<'a> {

View File

@@ -21,5 +21,3 @@ image = { workspace = true }
bitflags = { workspace = true }
enostr = { workspace = true }
hashbrown = { workspace = true }
blurhash = "0.2.3"

View File

@@ -1,510 +1 @@
use egui::{pos2, Color32, ColorImage, Context, Rect, Sense, SizeHint};
use image::codecs::gif::GifDecoder;
use image::imageops::FilterType;
use image::{AnimationDecoder, DynamicImage, FlatSamples, Frame};
use notedeck::{
Animation, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache, MediaCacheType,
TextureFrame, TextureState, TexturedImage,
};
use poll_promise::Promise;
use std::collections::VecDeque;
use std::io::Cursor;
use std::path::PathBuf;
use std::path::{self, Path};
use std::sync::mpsc;
use std::sync::mpsc::SyncSender;
use std::thread;
use std::time::Duration;
use tokio::fs;
// NOTE(jb55): chatgpt wrote this because I was too dumb to
pub fn aspect_fill(
ui: &mut egui::Ui,
sense: Sense,
texture_id: egui::TextureId,
aspect_ratio: f32,
) -> egui::Response {
let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout
let frame_ratio = frame.width() / frame.height();
let (width, height) = if frame_ratio > aspect_ratio {
// Frame is wider than the content
(frame.width(), frame.width() / aspect_ratio)
} else {
// Frame is taller than the content
(frame.height() * aspect_ratio, frame.height())
};
let content_rect = Rect::from_min_size(
frame.min
+ egui::vec2(
(frame.width() - width) / 2.0,
(frame.height() - height) / 2.0,
),
egui::vec2(width, height),
);
// Set the clipping rectangle to the frame
//let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle
//ui.set_clip_rect(frame);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
let (response, painter) = ui.allocate_painter(ui.available_size(), sense);
// Draw the texture within the calculated rect, potentially clipping it
painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill());
painter.image(texture_id, content_rect, uv, Color32::WHITE);
// Restore the original clipping rectangle
//ui.set_clip_rect(clip_rect);
response
}
#[profiling::function]
pub fn round_image(image: &mut ColorImage) {
// The radius to the edge of of the avatar circle
let edge_radius = image.size[0] as f32 / 2.0;
let edge_radius_squared = edge_radius * edge_radius;
for (pixnum, pixel) in image.pixels.iter_mut().enumerate() {
// y coordinate
let uy = pixnum / image.size[0];
let y = uy as f32;
let y_offset = edge_radius - y;
// x coordinate
let ux = pixnum % image.size[0];
let x = ux as f32;
let x_offset = edge_radius - x;
// The radius to this pixel (may be inside or outside the circle)
let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset;
// If inside of the avatar circle
if pixel_radius_squared <= edge_radius_squared {
// squareroot to find how many pixels we are from the edge
let pixel_radius: f32 = pixel_radius_squared.sqrt();
let distance = edge_radius - pixel_radius;
// If we are within 1 pixel of the edge, we should fade, to
// antialias the edge of the circle. 1 pixel from the edge should
// be 100% of the original color, and right on the edge should be
// 0% of the original color.
if distance <= 1.0 {
*pixel = Color32::from_rgba_premultiplied(
(pixel.r() as f32 * distance) as u8,
(pixel.g() as f32 * distance) as u8,
(pixel.b() as f32 * distance) as u8,
(pixel.a() as f32 * distance) as u8,
);
}
} else {
// Outside of the avatar circle
*pixel = Color32::TRANSPARENT;
}
}
}
/// If the image's longest dimension is greater than max_edge, downscale
fn resize_image_if_too_big(
image: image::DynamicImage,
max_edge: u32,
filter: FilterType,
) -> image::DynamicImage {
// if we have no size hint, resize to something reasonable
let w = image.width();
let h = image.height();
let long = w.max(h);
if long > max_edge {
let scale = max_edge as f32 / long as f32;
let new_w = (w as f32 * scale).round() as u32;
let new_h = (h as f32 * scale).round() as u32;
image.resize(new_w, new_h, filter)
} else {
image
}
}
///
/// Process an image, resizing so we don't blow up video memory or even crash
///
/// For profile pictures, make them round and small to fit the size hint
/// For everything else, either:
///
/// - resize to the size hint
/// - keep the size if the longest dimension is less than MAX_IMG_LENGTH
/// - resize if any larger, using [`resize_image_if_too_big`]
///
#[profiling::function]
fn process_image(imgtyp: ImageType, mut image: image::DynamicImage) -> ColorImage {
const MAX_IMG_LENGTH: u32 = 512;
const FILTER_TYPE: FilterType = FilterType::CatmullRom;
match imgtyp {
ImageType::Content(size_hint) => {
let image = match size_hint {
None => resize_image_if_too_big(image, MAX_IMG_LENGTH, FILTER_TYPE),
Some((w, h)) => image.resize(w, h, FILTER_TYPE),
};
let image_buffer = image.into_rgba8();
ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
)
}
ImageType::Profile(size) => {
// Crop square
let smaller = image.width().min(image.height());
if image.width() > smaller {
let excess = image.width() - smaller;
image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height());
} else if image.height() > smaller {
let excess = image.height() - smaller;
image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess);
}
let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let mut color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
round_image(&mut color_image);
color_image
}
}
}
#[profiling::function]
fn parse_img_response(
response: ehttp::Response,
imgtyp: ImageType,
) -> Result<ColorImage, notedeck::Error> {
let content_type = response.content_type().unwrap_or_default();
let size_hint = match imgtyp {
ImageType::Profile(size) => SizeHint::Size(size, size),
ImageType::Content(Some((w, h))) => SizeHint::Size(w, h),
ImageType::Content(None) => SizeHint::default(),
};
if content_type.starts_with("image/svg") {
profiling::scope!("load_svg");
let mut color_image =
egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?;
round_image(&mut color_image);
Ok(color_image)
} else if content_type.starts_with("image/") {
profiling::scope!("load_from_memory");
let dyn_image = image::load_from_memory(&response.bytes)?;
Ok(process_image(imgtyp, dyn_image))
} else {
Err(format!("Expected image, found content-type {content_type:?}").into())
}
}
fn fetch_img_from_disk(
ctx: &egui::Context,
url: &str,
path: &path::Path,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
let ctx = ctx.clone();
let url = url.to_owned();
let path = path.to_owned();
Promise::spawn_async(async move {
Some(async_fetch_img_from_disk(ctx, url, &path, cache_type).await)
})
}
async fn async_fetch_img_from_disk(
ctx: egui::Context,
url: String,
path: &path::Path,
cache_type: MediaCacheType,
) -> Result<TexturedImage, notedeck::Error> {
match cache_type {
MediaCacheType::Image => {
let data = fs::read(path).await?;
let image_buffer = image::load_from_memory(&data).map_err(notedeck::Error::Image)?;
let img = buffer_to_color_image(
image_buffer.as_flat_samples_u8(),
image_buffer.width(),
image_buffer.height(),
);
Ok(TexturedImage::Static(ctx.load_texture(
&url,
img,
Default::default(),
)))
}
MediaCacheType::Gif => {
let gif_bytes = fs::read(path).await?; // Read entire file into a Vec<u8>
generate_gif(ctx, url, path, gif_bytes, false, |i| {
buffer_to_color_image(i.as_flat_samples_u8(), i.width(), i.height())
})
}
}
}
fn generate_gif(
ctx: egui::Context,
url: String,
path: &path::Path,
data: Vec<u8>,
write_to_disk: bool,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + Copy + 'static,
) -> Result<TexturedImage, notedeck::Error> {
let decoder = {
let reader = Cursor::new(data.as_slice());
GifDecoder::new(reader)?
};
let (tex_input, tex_output) = mpsc::sync_channel(4);
let (maybe_encoder_input, maybe_encoder_output) = if write_to_disk {
let (inp, out) = mpsc::sync_channel(4);
(Some(inp), Some(out))
} else {
(None, None)
};
let mut frames: VecDeque<Frame> = decoder
.into_frames()
.collect::<std::result::Result<VecDeque<_>, image::ImageError>>()
.map_err(|e| notedeck::Error::Generic(e.to_string()))?;
let first_frame = frames.pop_front().map(|frame| {
generate_animation_frame(
&ctx,
&url,
0,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
)
});
let cur_url = url.clone();
thread::spawn(move || {
for (index, frame) in frames.into_iter().enumerate() {
let texture_frame = generate_animation_frame(
&ctx,
&cur_url,
index,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
);
if tex_input.send(texture_frame).is_err() {
tracing::debug!("AnimationTextureFrame mpsc stopped abruptly");
break;
}
}
});
if let Some(encoder_output) = maybe_encoder_output {
let path = path.to_owned();
thread::spawn(move || {
let mut imgs = Vec::new();
while let Ok(img) = encoder_output.recv() {
imgs.push(img);
}
if let Err(e) = MediaCache::write_gif(&path, &url, imgs) {
tracing::error!("Could not write gif to disk: {e}");
}
});
}
first_frame.map_or_else(
|| {
Err(notedeck::Error::Generic(
"first frame not found for gif".to_owned(),
))
},
|first_frame| {
Ok(TexturedImage::Animated(Animation {
other_frames: Default::default(),
receiver: Some(tex_output),
first_frame,
}))
},
)
}
fn generate_animation_frame(
ctx: &egui::Context,
url: &str,
index: usize,
frame: image::Frame,
maybe_encoder_input: Option<&SyncSender<ImageFrame>>,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + 'static,
) -> TextureFrame {
let delay = Duration::from(frame.delay());
let img = DynamicImage::ImageRgba8(frame.into_buffer());
let color_img = process_to_egui(img);
if let Some(sender) = maybe_encoder_input {
if let Err(e) = sender.send(ImageFrame {
delay,
image: color_img.clone(),
}) {
tracing::error!("ImageFrame mpsc unexpectedly closed: {e}");
}
}
TextureFrame {
delay,
texture: ctx.load_texture(format!("{url}{index}"), color_img, Default::default()),
}
}
fn buffer_to_color_image(
samples: Option<FlatSamples<&[u8]>>,
width: u32,
height: u32,
) -> ColorImage {
// TODO(jb55): remove unwrap here
let flat_samples = samples.unwrap();
ColorImage::from_rgba_unmultiplied([width as usize, height as usize], flat_samples.as_slice())
}
pub fn fetch_binary_from_disk(path: PathBuf) -> Result<Vec<u8>, notedeck::Error> {
std::fs::read(path).map_err(|e| notedeck::Error::Generic(e.to_string()))
}
/// Controls type-specific handling
#[derive(Debug, Clone, Copy)]
pub enum ImageType {
/// Profile Image (size)
Profile(u32),
/// Content Image with optional size hint
Content(Option<(u32, u32)>),
}
pub fn fetch_img(
img_cache_path: &Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
let key = MediaCache::key(url);
let path = img_cache_path.join(key);
if path.exists() {
fetch_img_from_disk(ctx, url, &path, cache_type)
} else {
fetch_img_from_net(img_cache_path, ctx, url, imgtyp, cache_type)
}
// TODO: fetch image from local cache
}
fn fetch_img_from_net(
cache_path: &path::Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
let (sender, promise) = Promise::new();
let request = ehttp::Request::get(url);
let ctx = ctx.clone();
let cloned_url = url.to_owned();
let cache_path = cache_path.to_owned();
ehttp::fetch(request, move |response| {
let handle = response.map_err(notedeck::Error::Generic).and_then(|resp| {
match cache_type {
MediaCacheType::Image => {
let img = parse_img_response(resp, imgtyp);
img.map(|img| {
let texture_handle =
ctx.load_texture(&cloned_url, img.clone(), Default::default());
// write to disk
std::thread::spawn(move || {
MediaCache::write(&cache_path, &cloned_url, img)
});
TexturedImage::Static(texture_handle)
})
}
MediaCacheType::Gif => {
let gif_bytes = resp.bytes;
generate_gif(
ctx.clone(),
cloned_url,
&cache_path,
gif_bytes,
true,
move |img| process_image(imgtyp, img),
)
}
}
});
sender.send(Some(handle)); // send the results back to the UI thread.
ctx.request_repaint();
});
promise
}
pub fn get_render_state<'a>(
ctx: &Context,
images: &'a mut Images,
cache_type: MediaCacheType,
url: &str,
img_type: ImageType,
) -> RenderState<'a> {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
let cur_state = cache.textures_cache.handle_and_get_or_insert(url, || {
crate::images::fetch_img(&cache.cache_dir, ctx, url, img_type, cache_type)
});
RenderState {
texture_state: cur_state,
gifs: &mut images.gif_states,
}
}
pub struct LoadableRenderState<'a> {
pub texture_state: LoadableTextureState<'a>,
pub gifs: &'a mut GifStateMap,
}
pub struct RenderState<'a> {
pub texture_state: TextureState<'a>,
pub gifs: &'a mut GifStateMap,
}
pub fn fetch_no_pfp_promise(
ctx: &Context,
cache: &MediaCache,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
crate::images::fetch_img(
&cache.cache_dir,
ctx,
notedeck::profile::no_pfp_url(),
ImageType::Profile(128),
MediaCacheType::Image,
)
}

View File

@@ -1,13 +1,11 @@
pub mod anim;
pub mod app_images;
pub mod blur;
pub mod colors;
pub mod constants;
pub mod context_menu;
pub mod gif;
pub mod icons;
pub mod images;
pub mod jobs;
pub mod media;
pub mod mention;
pub mod note;
pub mod profile;

View File

@@ -0,0 +1,3 @@
mod viewer;
pub use viewer::{MediaViewer, MediaViewerState};

View File

@@ -0,0 +1,232 @@
/// Spiral layout for media galleries
use egui::{pos2, vec2, Color32, Rect, Sense, TextureId, Vec2};
#[derive(Clone, Copy, Debug)]
pub struct ImageItem {
pub texture: TextureId,
pub ar: f32, // width / height (must be > 0)
}
#[derive(Clone, Debug)]
struct Placed {
texture: TextureId,
rect: Rect,
}
#[derive(Clone, Copy, Debug)]
pub struct LayoutParams {
pub gutter: f32,
pub h_min: f32,
pub h_max: f32,
pub w_min: f32,
pub w_max: f32,
pub seed_center: bool,
}
pub fn layout_spiral(images: &[ImageItem], params: LayoutParams) -> (Vec<Placed>, Vec2) {
if images.is_empty() {
return (Vec::new(), vec2(0.0, 0.0));
}
let eps = f32::EPSILON;
let g = params.gutter.max(0.0);
let h_min = params.h_min.max(1.0);
let h_max = params.h_max.max(h_min);
let w_min = params.w_min.max(1.0);
let w_max = params.w_max.max(w_min);
let mut placed = Vec::with_capacity(images.len());
// Build around origin; normalize at the end.
let mut x_min = 0.0f32;
let mut x_max = 0.0f32;
let mut y_min = 0.0f32;
let mut y_max = 0.0f32;
// dir: 0 right-col, 1 top-row, 2 left-col, 3 bottom-row
let mut dir = 0usize;
let mut i = 0usize;
// Optional seed: center a single image
if params.seed_center && i < images.len() {
let ar = images[i].ar.max(eps);
let h = ((h_min + h_max) * 0.5).clamp(h_min, h_max);
let w = ar * h;
let rect = Rect::from_center_size(pos2(0.0, 0.0), vec2(w, h));
placed.push(Placed { texture: images[i].texture, rect });
x_min = rect.min.x;
x_max = rect.max.x;
y_min = rect.min.y;
y_max = rect.max.y;
i += 1;
dir = 1; // start by adding a row above
} else {
// ensure non-empty bbox for the first strip
x_min = 0.0; x_max = 1.0; y_min = 0.0; y_max = 1.0;
}
// --- helpers -------------------------------------------------------------
// Choose how many items fit and the strip size S (W for column, H for row).
fn choose_k<F: Fn(&ImageItem) -> f32>(
images: &[ImageItem],
L: f32,
g: f32,
s_min: f32,
s_max: f32,
weight: F,
) -> (usize, f32) {
// prefix sums of weights (sum over first k items)
let mut pref = Vec::with_capacity(images.len() + 1);
pref.push(0.0);
for im in images {
pref.push(pref.last().copied().unwrap_or(0.0) + weight(im));
}
let k_max = images.len().max(1);
let mut chosen_k = 1usize;
let mut chosen_s = f32::NAN;
for k in 1..=k_max {
let L_eff = (L - g * (k as f32 - 1.0)).max(1.0);
let sum_w = pref[k].max(f32::EPSILON);
let s = (L_eff / sum_w).max(1.0);
if s > s_max && k < k_max {
continue; // too big; add one more to thin the strip
}
if s < s_min {
// prefer one fewer if possible
if k > 1 {
let k2 = k - 1;
let L_eff2 = (L - g * (k2 as f32 - 1.0)).max(1.0);
let sum_w2 = pref[k2].max(f32::EPSILON);
chosen_k = k2;
chosen_s = (L_eff2 / sum_w2).max(1.0);
} else {
chosen_k = 1;
chosen_s = s_min;
}
return (chosen_k, chosen_s);
}
return (k, s); // within bounds
}
// Fell through: use k_max and clamp
let L_eff = (L - g * (k_max as f32 - 1.0)).max(1.0);
let sum_w = pref[k_max].max(f32::EPSILON);
let s = (L_eff / sum_w).clamp(s_min, s_max);
(k_max, s)
}
// Place a column (top→bottom). Returns the new right/left edge.
fn place_column(
placed: &mut Vec<Placed>,
strip: &[ImageItem],
W: f32,
x: f32,
y_top: f32,
g: f32,
) -> f32 {
let mut y = y_top;
for (idx, im) in strip.iter().enumerate() {
let h = (W / im.ar.max(f32::EPSILON)).max(1.0);
let rect = Rect::from_min_size(pos2(x, y), vec2(W, h));
placed.push(Placed { texture: im.texture, rect });
y += h;
if idx + 1 != strip.len() { y += g; }
}
x + W
}
// Place a row (left→right). Returns the new top/bottom edge.
fn place_row(
placed: &mut Vec<Placed>,
strip: &[ImageItem],
H: f32,
x_left: f32,
y: f32,
g: f32,
) -> f32 {
let mut x = x_left;
for (idx, im) in strip.iter().enumerate() {
let w = (im.ar.max(f32::EPSILON) * H).max(1.0);
let rect = Rect::from_min_size(pos2(x, y), vec2(w, H));
placed.push(Placed { texture: im.texture, rect });
x += w;
if idx + 1 != strip.len() { x += g; }
}
y + H
}
// --- main loop -----------------------------------------------------------
while i < images.len() {
let remaining = &images[i..];
if dir % 2 == 0 {
// COLUMN (dir 0: right, 2: left)
let L = (y_max - y_min).max(1.0);
let (k, W) = choose_k(
remaining,
L, g, w_min, w_max,
|im| 1.0 / im.ar.max(f32::EPSILON),
);
let x = if dir == 0 { x_max + g } else { x_min - g - W };
let new_edge = place_column(&mut placed, &remaining[..k], W, x, y_min, g);
if dir == 0 { x_max = new_edge; } else { x_min = x; }
i += k;
} else {
// ROW (dir 1: top, 3: bottom)
let L = (x_max - x_min).max(1.0);
let (k, H) = choose_k(
remaining,
L, g, h_min, h_max,
|im| im.ar.max(f32::EPSILON),
);
let y = if dir == 1 { y_max + g } else { y_min - g - H };
let new_edge = place_row(&mut placed, &remaining[..k], H, x_min, y, g);
if dir == 1 { y_max = new_edge; } else { y_min = y; }
i += k;
}
dir = (dir + 1) % 4;
}
// Normalize so bbox top-left is (0,0)
let shift = vec2(-x_min, -y_min);
for p in &mut placed {
p.rect = p.rect.translate(shift);
}
let total_size = vec2(x_max - x_min, y_max - y_min);
(placed, total_size)
}
pub fn spiral_gallery(ui: &mut egui::Ui, images: &[ImageItem], params: LayoutParams) {
use egui::{ScrollArea, Stroke};
let (placed, size) = layout_spiral(images, params);
ScrollArea::both().auto_shrink([false, false]).show(ui, |ui| {
let (rect, _resp) = ui.allocate_exact_size(size, Sense::hover());
let painter = ui.painter_at(rect);
painter.rect_stroke(
Rect::from_min_size(rect.min, size),
0.0,
Stroke::new(1.0, Color32::DARK_GRAY),
);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
for p in &placed {
let r = Rect::from_min_max(rect.min + p.rect.min.to_vec2(),
rect.min + p.rect.max.to_vec2());
painter.image(p.texture, r, uv, Color32::WHITE);
}
});
}

View File

@@ -0,0 +1,118 @@
use egui::{pos2, Color32, Rect};
use notedeck::{ImageType, Images};
/// State used in the MediaViewer ui widget.
///
#[derive(Default)]
pub struct MediaViewerState {
pub urls: Vec<String>,
}
/// A panning, scrolling, optionally fullscreen, and tiling media viewer
pub struct MediaViewer<'a> {
state: &'a MediaViewerState,
fullscreen: bool,
}
impl<'a> MediaViewer<'a> {
pub fn new(state: &'a MediaViewerState) -> Self {
let fullscreen = false;
Self { state, fullscreen }
}
pub fn fullscreen(mut self, enable: bool) -> Self {
self.fullscreen = enable;
self
}
pub fn ui(&self, images: &mut Images, ui: &mut egui::Ui) {
if self.fullscreen {
egui::Window::new("Media Viewer")
.title_bar(false)
.fixed_size(ui.ctx().screen_rect().size())
.fixed_pos(ui.ctx().screen_rect().min)
.frame(egui::Frame::NONE)
.show(ui.ctx(), |ui| self.ui_content(images, ui));
} else {
self.ui_content(images, ui);
}
}
fn ui_content(&self, images: &mut Images, ui: &mut egui::Ui) {
let avail_rect = ui.available_rect_before_wrap();
// TODO: id_salt
let id = ui.id().with("media_viewer");
let mut scene_rect = ui.ctx().data(|d| d.get_temp(id)).unwrap_or(avail_rect);
let prev = scene_rect;
// Draw background
ui.painter()
.rect_filled(avail_rect, 0.0, egui::Color32::from_black_alpha(128));
egui::Scene::new()
.zoom_range(0.0..=10.0) // enhance 🔬
.show(ui, &mut scene_rect, |ui| {
self.render_image_tiles(images, ui);
});
if scene_rect != prev {
ui.ctx().data_mut(|d| d.insert_temp(id, scene_rect));
}
}
///
/// Tile a scene with images.
///
/// TODO(jb55): Let's improve image tiling over time, spiraling outward. We
/// should have a way to click "next" and have the scene smoothly transition and
/// focus on the next image
fn render_image_tiles(&self, images: &mut Images, ui: &mut egui::Ui) {
for url in &self.state.urls {
// fetch image texture
let Some(texture) = images.latest_texture(ui, url, ImageType::Content(None)) else {
continue;
};
// the area the next image will be put in.
let mut img_rect = ui.available_rect_before_wrap();
if !ui.is_rect_visible(img_rect) {
// just stop rendering images if we're going out of the scene
// basic culling when we have lots of images
break;
}
{
let size = texture.size_vec2();
img_rect.set_height(size.y);
img_rect.set_width(size.x);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
// image actions
/*
let response = ui.interact(
render_rect,
carousel_id.with("img"),
Sense::click(),
);
if response.clicked() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), true);
});
} else if background_response.clicked() || response.clicked_elsewhere() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), false);
});
}
*/
// Paint image
ui.painter()
.image(texture.id(), img_rect, uv, Color32::WHITE);
ui.advance_cursor_after_rect(img_rect);
}
}
}
}

View File

@@ -1,19 +1,15 @@
use std::cell::OnceCell;
use crate::{
blur::imeta_blurhashes,
jobs::JobsCache,
note::{NoteAction, NoteOptions, NoteResponse, NoteView},
secondary_label,
};
use notedeck::{JobsCache, RenderableMedia};
use egui::{Color32, Hyperlink, RichText};
use nostrdb::{BlockType, Mention, Note, NoteKey, Transaction};
use tracing::warn;
use notedeck::{IsFollowing, NoteCache, NoteContext};
use super::media::{find_renderable_media, image_carousel, RenderableMedia};
use super::media::image_carousel;
use notedeck::{update_imeta_blurhashes, IsFollowing, NoteCache, NoteContext};
pub struct NoteContents<'a, 'd> {
note_context: &'a mut NoteContext<'d>,
@@ -127,11 +123,11 @@ pub fn render_note_preview(
#[allow(clippy::too_many_arguments)]
#[profiling::function]
pub fn render_note_contents(
pub fn render_note_contents<'a>(
ui: &mut egui::Ui,
note_context: &mut NoteContext,
txn: &Transaction,
note: &Note,
note: &'a Note,
options: NoteOptions,
jobs: &mut JobsCache,
) -> NoteResponse {
@@ -152,7 +148,6 @@ pub fn render_note_contents(
}
let mut supported_medias: Vec<RenderableMedia> = vec![];
let blurhashes = OnceCell::new();
let response = ui.horizontal_wrapped(|ui| {
let blocks = if let Ok(blocks) = note_context.ndb.get_blocks_by_key(txn, note_key) {
@@ -223,15 +218,15 @@ pub fn render_note_contents(
let mut found_supported = || -> bool {
let url = block.as_str();
let blurs = blurhashes.get_or_init(|| imeta_blurhashes(note));
if !note_context.img_cache.metadata.contains_key(url) {
update_imeta_blurhashes(note, &mut note_context.img_cache.metadata);
}
let Some(media_type) =
find_renderable_media(&mut note_context.img_cache.urls, blurs, url)
else {
let Some(media) = note_context.img_cache.get_renderable_media(url) else {
return false;
};
supported_medias.push(media_type);
supported_medias.push(media);
true
};
@@ -311,6 +306,7 @@ pub fn render_note_contents(
.key
.pubkey
.bytes();
let trusted_media = is_self
|| note_context
.accounts

View File

@@ -1,24 +1,22 @@
use std::{collections::HashMap, path::Path};
use std::path::Path;
use egui::{
Button, Color32, Context, CornerRadius, FontId, Image, Response, RichText, Sense,
TextureHandle, UiBuilder, Window,
};
use egui::{Button, Color32, Context, CornerRadius, FontId, Image, Response, TextureHandle};
use notedeck::{
fonts::get_font_size, note::MediaAction, show_one_error_message, supported_mime_hosted_at_url,
tr, GifState, GifStateMap, Images, JobPool, Localization, MediaCache, MediaCacheType,
NotedeckTextStyle, TexturedImage, TexturesCache, UrlMimes,
compute_blurhash, fonts::get_font_size, show_one_error_message, tr, BlurhashParams,
GifStateMap, Images, Job, JobId, JobParams, JobPool, JobState, JobsCache, Localization,
MediaAction, MediaCacheType, NotedeckTextStyle, ObfuscationType, PointDimensions,
RenderableMedia, TexturedImage, TexturesCache,
};
use crate::{
app_images,
blur::{compute_blurhash, Blur, ObfuscationType, PointDimensions},
colors::PINK,
gif::{handle_repaint, retrieve_latest_texture},
images::{fetch_no_pfp_promise, get_render_state, ImageType},
jobs::{BlurhashParams, Job, JobId, JobParams, JobState, JobsCache},
AnimationHelper, PulseAlpha,
};
use notedeck::media::gif::ensure_latest_texture;
use notedeck::media::images::{fetch_no_pfp_promise, ImageType};
use crate::{app_images, AnimationHelper, PulseAlpha};
pub enum MediaViewAction {
/// Used to handle escape presses when the media viewer is open
EscapePressed,
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn image_carousel(
@@ -36,7 +34,6 @@ pub(crate) fn image_carousel(
let height = 360.0;
let width = ui.available_width();
let show_popup = get_show_popup(ui, popup_id(carousel_id));
let mut action = None;
//let has_touch_screen = ui.ctx().input(|i| i.has_touch_screen());
@@ -46,6 +43,7 @@ pub(crate) fn image_carousel(
.id_salt(carousel_id)
.show(ui, |ui| {
ui.horizontal(|ui| {
let mut media_action: Option<(usize, MediaUIAction)> = None;
for (i, media) in medias.iter().enumerate() {
let RenderableMedia {
url,
@@ -57,7 +55,6 @@ pub(crate) fn image_carousel(
MediaCacheType::Image => &mut img_cache.static_imgs,
MediaCacheType::Gif => &mut img_cache.gifs,
};
let media_state = get_content_media_render_state(
ui,
job_pool,
@@ -68,7 +65,7 @@ pub(crate) fn image_carousel(
url,
*media_type,
&cache.cache_dir,
blur_type.clone(),
blur_type,
);
if let Some(cur_action) = render_media(
@@ -79,43 +76,25 @@ pub(crate) fn image_carousel(
height,
i18n,
) {
// clicked the media, lets set the active index
if let MediaUIAction::Clicked = cur_action {
set_show_popup(ui, popup_id(carousel_id), true);
set_selected_index(ui, selection_id(carousel_id), i);
}
action = cur_action.to_media_action(
ui.ctx(),
url,
*media_type,
cache,
ImageType::Content(Some((width as u32, height as u32))),
);
media_action = Some((i, cur_action));
}
}
if let Some((i, media_action)) = &media_action {
action = media_action.to_media_action(
ui.ctx(),
medias,
*i,
img_cache,
ImageType::Content(Some((width as u32, height as u32))),
);
}
})
.response
})
.inner
});
if show_popup {
if medias.is_empty() {
return None;
};
let current_image_index = update_selected_image_index(ui, carousel_id, medias.len() as i32);
show_full_screen_media(
ui,
medias,
current_image_index,
img_cache,
carousel_id,
i18n,
);
}
action
}
@@ -130,146 +109,55 @@ impl MediaUIAction {
pub fn to_media_action(
&self,
ctx: &egui::Context,
url: &str,
cache_type: MediaCacheType,
cache: &mut MediaCache,
medias: &[RenderableMedia],
selected: usize,
img_cache: &Images,
img_type: ImageType,
) -> Option<MediaAction> {
match self {
MediaUIAction::Clicked => {
tracing::debug!("{} clicked", url);
None
}
MediaUIAction::Clicked => Some(MediaAction::ViewMedias(
medias.iter().map(|m| m.url.to_owned()).collect(),
)),
MediaUIAction::Unblur => Some(MediaAction::FetchImage {
url: url.to_owned(),
cache_type,
no_pfp_promise: crate::images::fetch_img(
MediaUIAction::Unblur => {
let url = &medias[selected].url;
let cache = img_cache.get_cache(medias[selected].media_type);
let cache_type = cache.cache_type;
let no_pfp_promise = notedeck::media::images::fetch_img(
&cache.cache_dir,
ctx,
url,
img_type,
cache_type,
),
}),
);
Some(MediaAction::FetchImage {
url: url.to_owned(),
cache_type,
no_pfp_promise,
})
}
MediaUIAction::Error => {
if !matches!(img_type, ImageType::Profile(_)) {
return None;
};
let cache = img_cache.get_cache(medias[selected].media_type);
let cache_type = cache.cache_type;
Some(MediaAction::FetchImage {
url: url.to_owned(),
url: medias[selected].url.to_owned(),
cache_type,
no_pfp_promise: fetch_no_pfp_promise(ctx, cache),
})
}
MediaUIAction::DoneLoading => Some(MediaAction::DoneLoading {
url: url.to_owned(),
cache_type,
url: medias[selected].url.to_owned(),
cache_type: img_cache.get_cache(medias[selected].media_type).cache_type,
}),
}
}
}
fn show_full_screen_media(
ui: &mut egui::Ui,
medias: &[RenderableMedia],
index: usize,
img_cache: &mut Images,
carousel_id: egui::Id,
i18n: &mut Localization,
) {
Window::new("image_popup")
.title_bar(false)
.fixed_size(ui.ctx().screen_rect().size())
.fixed_pos(ui.ctx().screen_rect().min)
.frame(egui::Frame::NONE)
.show(ui.ctx(), |ui| {
ui.centered_and_justified(|ui| 's: {
let image_url = medias[index].url;
let media_type = medias[index].media_type;
tracing::trace!(
"show_full_screen_media using img {} @ {} for carousel_id {:?}",
image_url,
index,
carousel_id
);
let cur_state = get_render_state(
ui.ctx(),
img_cache,
media_type,
image_url,
ImageType::Content(None),
);
let notedeck::TextureState::Loaded(textured_image) = cur_state.texture_state else {
break 's;
};
render_full_screen_media(
ui,
medias.len(),
index,
textured_image,
cur_state.gifs,
image_url,
carousel_id,
i18n,
);
})
});
}
fn set_selected_index(ui: &mut egui::Ui, sel_id: egui::Id, index: usize) {
ui.data_mut(|d| {
d.insert_temp(sel_id, index);
});
}
fn get_selected_index(ui: &egui::Ui, selection_id: egui::Id) -> usize {
ui.data(|d| d.get_temp(selection_id).unwrap_or(0))
}
/// Checks to see if we have any left/right key presses and updates the carousel index
fn update_selected_image_index(ui: &mut egui::Ui, carousel_id: egui::Id, num_urls: i32) -> usize {
if num_urls > 1 {
let (next_image, prev_image) = ui.data(|data| {
(
data.get_temp(carousel_id.with("next_image"))
.unwrap_or_default(),
data.get_temp(carousel_id.with("prev_image"))
.unwrap_or_default(),
)
});
if next_image
|| ui.input(|i| i.key_pressed(egui::Key::ArrowRight) || i.key_pressed(egui::Key::L))
{
let ind = select_next_media(ui, carousel_id, num_urls, 1);
tracing::debug!("carousel selecting right {}/{}", ind + 1, num_urls);
if next_image {
ui.data_mut(|data| data.remove_temp::<bool>(carousel_id.with("next_image")));
}
ind
} else if prev_image
|| ui.input(|i| i.key_pressed(egui::Key::ArrowLeft) || i.key_pressed(egui::Key::H))
{
let ind = select_next_media(ui, carousel_id, num_urls, -1);
tracing::debug!("carousel selecting left {}/{}", ind + 1, num_urls);
if prev_image {
ui.data_mut(|data| data.remove_temp::<bool>(carousel_id.with("prev_image")));
}
ind
} else {
get_selected_index(ui, selection_id(carousel_id))
}
} else {
0
}
}
#[allow(clippy::too_many_arguments)]
pub fn get_content_media_render_state<'a>(
ui: &mut egui::Ui,
@@ -281,11 +169,11 @@ pub fn get_content_media_render_state<'a>(
url: &'a str,
cache_type: MediaCacheType,
cache_dir: &Path,
obfuscation_type: ObfuscationType<'a>,
obfuscation_type: &'a ObfuscationType,
) -> MediaRenderState<'a> {
let render_type = if media_trusted {
cache.handle_and_get_or_insert_loadable(url, || {
crate::images::fetch_img(
notedeck::media::images::fetch_img(
cache_dir,
ui.ctx(),
url,
@@ -332,7 +220,7 @@ pub fn get_content_media_render_state<'a>(
fn get_obfuscated<'a>(
ui: &mut egui::Ui,
url: &str,
obfuscation_type: ObfuscationType<'a>,
obfuscation_type: &'a ObfuscationType,
job_pool: &'a mut JobPool,
jobs: &'a mut JobsCache,
height: f32,
@@ -342,7 +230,7 @@ fn get_obfuscated<'a>(
};
let params = BlurhashParams {
blurhash: renderable_blur.blurhash,
blurhash: &renderable_blur.blurhash,
url,
ctx: ui.ctx(),
};
@@ -379,336 +267,6 @@ fn get_obfuscated<'a>(
ObfuscatedTexture::Blur(texture_handle)
}
// simple selector memory
fn select_next_media(
ui: &mut egui::Ui,
carousel_id: egui::Id,
num_urls: i32,
direction: i32,
) -> usize {
let sel_id = selection_id(carousel_id);
let current = get_selected_index(ui, sel_id) as i32;
let next = current + direction;
let next = if next >= num_urls {
0
} else if next < 0 {
num_urls - 1
} else {
next
};
if next != current {
set_selected_index(ui, sel_id, next as usize);
}
next as usize
}
#[allow(clippy::too_many_arguments)]
fn render_full_screen_media(
ui: &mut egui::Ui,
num_urls: usize,
index: usize,
renderable_media: &mut TexturedImage,
gifs: &mut HashMap<String, GifState>,
image_url: &str,
carousel_id: egui::Id,
i18n: &mut Localization,
) {
const TOP_BAR_HEIGHT: f32 = 30.0;
const BOTTOM_BAR_HEIGHT: f32 = 60.0;
let screen_rect = ui.ctx().screen_rect();
let screen_size = screen_rect.size();
// Escape key closes popup
if ui.input(|i| i.key_pressed(egui::Key::Escape)) {
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(carousel_id.with("show_popup"), false);
});
}
// Draw background
ui.painter()
.rect_filled(screen_rect, 0.0, Color32::from_black_alpha(230));
let background_response = ui.interact(
screen_rect,
carousel_id.with("background"),
egui::Sense::click(),
);
// Zoom & pan state
let zoom_id = carousel_id.with("zoom_level");
let pan_id = carousel_id.with("pan_offset");
let mut zoom: f32 = ui
.ctx()
.memory(|mem| mem.data.get_temp(zoom_id).unwrap_or(1.0));
let mut pan_offset = ui
.ctx()
.memory(|mem| mem.data.get_temp(pan_id).unwrap_or(egui::Vec2::ZERO));
// Handle scroll to zoom
if ui.input(|i| i.pointer.hover_pos()).is_some() {
let scroll_delta = ui.input(|i| i.smooth_scroll_delta);
if scroll_delta.y != 0.0 {
let zoom_factor = if scroll_delta.y > 0.0 { 1.05 } else { 0.95 };
zoom = (zoom * zoom_factor).clamp(0.1, 5.0);
if zoom <= 1.0 {
pan_offset = egui::Vec2::ZERO;
}
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(zoom_id, zoom);
mem.data.insert_temp(pan_id, pan_offset);
});
}
}
// Fetch image
let texture = handle_repaint(
ui,
retrieve_latest_texture(image_url, gifs, renderable_media),
);
let texture_size = texture.size_vec2();
let topbar_rect = egui::Rect::from_min_max(
screen_rect.min + egui::vec2(0.0, 0.0),
screen_rect.min + egui::vec2(screen_size.x, TOP_BAR_HEIGHT),
);
let topbar_response = ui.interact(
topbar_rect,
carousel_id.with("topbar"),
egui::Sense::click(),
);
let mut keep_popup_open = false;
if topbar_response.clicked() {
keep_popup_open = true;
}
ui.allocate_new_ui(
UiBuilder::new()
.max_rect(topbar_rect)
.layout(egui::Layout::top_down(egui::Align::RIGHT)),
|ui| {
let color = ui.style().visuals.noninteractive().fg_stroke.color;
ui.add_space(10.0);
ui.horizontal(|ui| {
let label_reponse = ui
.label(RichText::new(image_url).color(color).small())
.on_hover_text(image_url);
if label_reponse.double_clicked()
|| label_reponse.clicked()
|| label_reponse.hovered()
{
keep_popup_open = true;
ui.ctx().copy_text(image_url.to_owned());
}
});
},
);
// Calculate available rect for image
let image_rect = egui::Rect::from_min_max(
screen_rect.min + egui::vec2(0.0, TOP_BAR_HEIGHT),
screen_rect.max - egui::vec2(0.0, BOTTOM_BAR_HEIGHT),
);
let image_area_size = image_rect.size();
let scale = (image_area_size.x / texture_size.x)
.min(image_area_size.y / texture_size.y)
.min(1.0);
let scaled_size = texture_size * scale * zoom;
let visible_width = scaled_size.x.min(image_area_size.x);
let visible_height = scaled_size.y.min(image_area_size.y);
let max_pan_x = ((scaled_size.x - visible_width) / 2.0).max(0.0);
let max_pan_y = ((scaled_size.y - visible_height) / 2.0).max(0.0);
pan_offset.x = if max_pan_x > 0.0 {
pan_offset.x.clamp(-max_pan_x, max_pan_x)
} else {
0.0
};
pan_offset.y = if max_pan_y > 0.0 {
pan_offset.y.clamp(-max_pan_y, max_pan_y)
} else {
0.0
};
let render_rect = egui::Rect::from_center_size(
image_rect.center(),
egui::vec2(visible_width, visible_height),
);
// Compute UVs for zoom & pan
let uv_min = egui::pos2(
0.5 - (visible_width / scaled_size.x) / 2.0 + pan_offset.x / scaled_size.x,
0.5 - (visible_height / scaled_size.y) / 2.0 + pan_offset.y / scaled_size.y,
);
let uv_max = egui::pos2(
uv_min.x + visible_width / scaled_size.x,
uv_min.y + visible_height / scaled_size.y,
);
// Paint image
ui.painter().image(
texture.id(),
render_rect,
egui::Rect::from_min_max(uv_min, uv_max),
Color32::WHITE,
);
// image actions
let response = ui.interact(
render_rect,
carousel_id.with("img"),
Sense::click_and_drag(),
);
let swipe_accum_id = carousel_id.with("swipe_accum");
let mut swipe_delta = ui.ctx().memory(|mem| {
mem.data
.get_temp::<egui::Vec2>(swipe_accum_id)
.unwrap_or(egui::Vec2::ZERO)
});
// Handle pan via drag
if response.dragged() {
let delta = response.drag_delta();
swipe_delta += delta;
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(swipe_accum_id, swipe_delta);
});
pan_offset -= delta;
pan_offset.x = pan_offset.x.clamp(-max_pan_x, max_pan_x);
pan_offset.y = pan_offset.y.clamp(-max_pan_y, max_pan_y);
ui.ctx()
.memory_mut(|mem| mem.data.insert_temp(pan_id, pan_offset));
}
// Double click to reset
if response.double_clicked() {
zoom = 1.0;
pan_offset = egui::Vec2::ZERO;
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(pan_id, pan_offset);
mem.data.insert_temp(zoom_id, zoom);
});
}
let swipe_threshold = 50.0;
if response.drag_stopped() {
if swipe_delta.x.abs() > swipe_threshold && swipe_delta.y.abs() < swipe_threshold {
if swipe_delta.x < 0.0 {
ui.ctx().data_mut(|data| {
keep_popup_open = true;
data.insert_temp(carousel_id.with("next_image"), true);
});
} else if swipe_delta.x > 0.0 {
ui.ctx().data_mut(|data| {
keep_popup_open = true;
data.insert_temp(carousel_id.with("prev_image"), true);
});
}
}
ui.ctx().memory_mut(|mem| {
mem.data.remove::<egui::Vec2>(swipe_accum_id);
});
}
// bottom bar
if num_urls > 1 {
let bottom_rect = egui::Rect::from_min_max(
screen_rect.max - egui::vec2(screen_size.x, BOTTOM_BAR_HEIGHT),
screen_rect.max,
);
let full_response = ui.interact(
bottom_rect,
carousel_id.with("bottom_bar"),
egui::Sense::click(),
);
if full_response.clicked() {
keep_popup_open = true;
}
let mut clicked_index: Option<usize> = None;
#[allow(deprecated)]
ui.allocate_ui_at_rect(bottom_rect, |ui| {
let dot_radius = 7.0;
let dot_spacing = 20.0;
let color_active = PINK;
let color_inactive: Color32 = ui.style().visuals.widgets.inactive.bg_fill;
let center = bottom_rect.center();
for i in 0..num_urls {
let distance = egui::vec2(
(i as f32 - (num_urls as f32 - 1.0) / 2.0) * dot_spacing,
0.0,
);
let pos = center + distance;
let circle_color = if i == index {
color_active
} else {
color_inactive
};
let circle_rect = egui::Rect::from_center_size(
pos,
egui::vec2(dot_radius * 2.0, dot_radius * 2.0),
);
let resp = ui.interact(circle_rect, carousel_id.with(i), egui::Sense::click());
ui.painter().circle_filled(pos, dot_radius, circle_color);
if i != index && resp.hovered() {
ui.painter()
.circle_stroke(pos, dot_radius + 2.0, (1.0, PINK));
}
if resp.clicked() {
keep_popup_open = true;
if i != index {
clicked_index = Some(i);
}
}
}
});
if let Some(new_index) = clicked_index {
ui.ctx().data_mut(|data| {
data.insert_temp(selection_id(carousel_id), new_index);
});
}
}
if keep_popup_open || response.clicked() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), true);
});
} else if background_response.clicked() || response.clicked_elsewhere() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), false);
});
}
copy_link(i18n, image_url, &response);
}
fn copy_link(i18n: &mut Localization, url: &str, img_resp: &Response) {
img_resp.context_menu(|ui| {
if ui
@@ -905,12 +463,6 @@ fn render_default_blur_bg(ui: &mut egui::Ui, height: f32, url: &str, shimmer: bo
rect
}
pub(crate) struct RenderableMedia<'a> {
url: &'a str,
media_type: MediaCacheType,
obfuscation_type: ObfuscationType<'a>,
}
pub enum MediaRenderState<'a> {
ActualImage(&'a mut TexturedImage),
Transitioning {
@@ -927,14 +479,15 @@ pub enum ObfuscatedTexture<'a> {
Default,
}
/*
pub(crate) fn find_renderable_media<'a>(
urls: &mut UrlMimes,
blurhashes: &'a HashMap<&'a str, Blur<'a>>,
imeta: &'a HashMap<String, ImageMetadata>,
url: &'a str,
) -> Option<RenderableMedia<'a>> {
) -> Option<RenderableMedia> {
let media_type = supported_mime_hosted_at_url(urls, url)?;
let obfuscation_type = match blurhashes.get(url) {
let obfuscation_type = match imeta.get(url) {
Some(blur) => ObfuscationType::Blurhash(blur.clone()),
None => ObfuscationType::Default,
};
@@ -945,28 +498,7 @@ pub(crate) fn find_renderable_media<'a>(
obfuscation_type,
})
}
#[inline]
fn selection_id(carousel_id: egui::Id) -> egui::Id {
carousel_id.with("sel")
}
/// get the popup carousel window state
#[inline]
fn get_show_popup(ui: &egui::Ui, popup_id: egui::Id) -> bool {
ui.data(|data| data.get_temp(popup_id).unwrap_or(false))
}
/// set the popup carousel window state
#[inline]
fn set_show_popup(ui: &mut egui::Ui, popup_id: egui::Id, show_popup: bool) {
ui.data_mut(|data| data.insert_temp(popup_id, show_popup));
}
#[inline]
fn popup_id(carousel_id: egui::Id) -> egui::Id {
carousel_id.with("show_popup")
}
*/
fn render_success_media(
ui: &mut egui::Ui,
@@ -976,8 +508,8 @@ fn render_success_media(
height: f32,
i18n: &mut Localization,
) -> Response {
let texture = handle_repaint(ui, retrieve_latest_texture(url, gifs, tex));
let img = texture_to_image(texture, height);
let texture = ensure_latest_texture(ui, url, gifs, tex);
let img = texture_to_image(&texture, height);
let img_resp = ui.add(Button::image(img).frame(false));
copy_link(i18n, url, &img_resp);

View File

@@ -4,7 +4,6 @@ pub mod media;
pub mod options;
pub mod reply_description;
use crate::jobs::JobsCache;
use crate::{app_images, secondary_label};
use crate::{
profile::name::one_line_display_name_widget, widgets::x_button, ProfilePic, ProfilePreview,
@@ -14,13 +13,14 @@ use crate::{
pub use contents::{render_note_contents, render_note_preview, NoteContents};
pub use context::NoteContextButton;
use notedeck::get_current_wallet;
use notedeck::note::MediaAction;
use notedeck::note::ZapTargetAmount;
use notedeck::ui::is_narrow;
use notedeck::Accounts;
use notedeck::GlobalWallet;
use notedeck::Images;
use notedeck::JobsCache;
use notedeck::Localization;
use notedeck::MediaAction;
pub use options::NoteOptions;
pub use reply_description::reply_desc;

View File

@@ -2,8 +2,8 @@ use egui::{Label, RichText, Sense};
use nostrdb::{NoteReply, Transaction};
use super::NoteOptions;
use crate::{jobs::JobsCache, note::NoteView, Mention};
use notedeck::{tr, NoteAction, NoteContext};
use crate::{note::NoteView, Mention};
use notedeck::{tr, JobsCache, NoteAction, NoteContext};
// Rich text segment types for internationalized rendering
#[derive(Debug, Clone)]

View File

@@ -113,7 +113,7 @@ pub fn banner(ui: &mut egui::Ui, banner_url: Option<&str>, height: f32) -> egui:
banner_url
.and_then(|url| banner_texture(ui, url))
.map(|texture| {
crate::images::aspect_fill(
notedeck::media::images::aspect_fill(
ui,
egui::Sense::hover(),
texture.id,

View File

@@ -1,8 +1,9 @@
use crate::gif::{handle_repaint, retrieve_latest_texture};
use crate::images::{fetch_no_pfp_promise, get_render_state, ImageType};
use egui::{vec2, InnerResponse, Sense, Stroke, TextureHandle};
use notedeck::note::MediaAction;
use notedeck::get_render_state;
use notedeck::media::gif::ensure_latest_texture;
use notedeck::media::images::{fetch_no_pfp_promise, ImageType};
use notedeck::MediaAction;
use notedeck::{show_one_error_message, supported_mime_hosted_at_url, Images};
pub struct ProfilePic<'cache, 'url> {
@@ -140,12 +141,9 @@ fn render_pfp(
)
}
notedeck::TextureState::Loaded(textured_image) => {
let texture_handle = handle_repaint(
ui,
retrieve_latest_texture(url, cur_state.gifs, textured_image),
);
let texture_handle = ensure_latest_texture(ui, url, cur_state.gifs, textured_image);
egui::InnerResponse::new(None, pfp_image(ui, texture_handle, ui_size, border, sense))
egui::InnerResponse::new(None, pfp_image(ui, &texture_handle, ui_size, border, sense))
}
}
}