mirror of
https://github.com/cds-astro/aladin-lite.git
synced 2025-12-24 20:10:30 -08:00
Compare commits
2 Commits
feat-keep-
...
feat-zoom-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0887eb839d | ||
|
|
96096429a5 |
@@ -88,4 +88,4 @@ codegen-units = 16
|
||||
rpath = false
|
||||
|
||||
[package.metadata.wasm-pack.profile.release]
|
||||
wasm-opt = true
|
||||
wasm-opt = false
|
||||
|
||||
@@ -713,18 +713,22 @@ impl App {
|
||||
)?,
|
||||
}
|
||||
self.time_start_blending = Time::now();
|
||||
},
|
||||
}
|
||||
// In case of JPEG tile missing, add it to the HiPS because it must be drawn as black
|
||||
None if cfg.get_format().get_channel() == ChannelType::RGB8U => {
|
||||
None if cfg.get_format().get_channel()
|
||||
== ChannelType::RGB8U =>
|
||||
{
|
||||
self.request_redraw = true;
|
||||
match hips {
|
||||
HiPS::D2(hips) => {
|
||||
hips.add_tile::<ImageType>(&tile.cell, None, tile.time_req)?
|
||||
}
|
||||
HiPS::D2(hips) => hips.add_tile::<ImageType>(
|
||||
&tile.cell,
|
||||
None,
|
||||
tile.time_req,
|
||||
)?,
|
||||
HiPS::D3(_) => (),
|
||||
}
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1286,8 +1290,7 @@ impl App {
|
||||
// Set the new meta
|
||||
// keep the old meta data
|
||||
let new_img_ext = meta.img_format;
|
||||
self.layers
|
||||
.set_layer_cfg(layer.clone(), meta)?;
|
||||
self.layers.set_layer_cfg(layer.clone(), meta)?;
|
||||
|
||||
if old_meta.img_format != new_img_ext {
|
||||
// The image format has been changed
|
||||
@@ -1571,7 +1574,7 @@ impl App {
|
||||
self.camera.get_center_pos_angle()
|
||||
}
|
||||
|
||||
pub(crate) fn set_fov(&mut self, fov: Angle<f64>) {
|
||||
pub(crate) fn set_fov(&mut self, fov: f64) {
|
||||
// For the moment, no animation is triggered.
|
||||
// The fov is directly set
|
||||
self.camera.set_aperture(fov, &self.projection);
|
||||
@@ -1579,17 +1582,20 @@ impl App {
|
||||
self.request_redraw = true;
|
||||
}
|
||||
|
||||
pub(crate) fn set_fov_range(&mut self, min_fov: Option<f64>, max_fov: Option<f64>) {
|
||||
self.camera.set_fov_range(
|
||||
min_fov.map(|v| v.to_radians()),
|
||||
max_fov.map(|v| v.to_radians()),
|
||||
&self.projection,
|
||||
);
|
||||
self.request_for_new_tiles = true;
|
||||
self.request_redraw = true;
|
||||
}
|
||||
|
||||
pub(crate) fn set_inertia(&mut self, inertia: bool) {
|
||||
*self.disable_inertia.borrow_mut() = !inertia;
|
||||
}
|
||||
|
||||
/*pub(crate) fn project_line(&self, lon1: f64, lat1: f64, lon2: f64, lat2: f64) -> Vec<Vector2<f64>> {
|
||||
let v1: Vector3<f64> = LonLatT::new(ArcDeg(lon1).into(), ArcDeg(lat1).into()).vector();
|
||||
let v2: Vector3<f64> = LonLatT::new(ArcDeg(lon2).into(), ArcDeg(lat2).into()).vector();
|
||||
|
||||
line::project_along_great_circles(&v1, &v2, &self.camera, self.projection)
|
||||
}*/
|
||||
|
||||
pub(crate) fn go_from_to(&mut self, s1x: f64, s1y: f64, s2x: f64, s2y: f64) {
|
||||
// Select the HiPS layer rendered lastly
|
||||
if let (Some(w1), Some(w2)) = (
|
||||
@@ -1634,13 +1640,19 @@ impl App {
|
||||
self.camera.get_texture_depth() as i32
|
||||
}
|
||||
|
||||
pub(crate) fn get_clip_zoom_factor(&self) -> f64 {
|
||||
self.camera.get_clip_zoom_factor()
|
||||
pub(crate) fn get_zoom_factor(&self) -> f64 {
|
||||
self.camera.get_zoom_factor()
|
||||
}
|
||||
|
||||
pub(crate) fn set_zoom_factor(&mut self, zoom_factor: f64) {
|
||||
self.camera.set_zoom_factor(zoom_factor, &self.projection);
|
||||
|
||||
self.request_for_new_tiles = true;
|
||||
self.request_redraw = true;
|
||||
}
|
||||
|
||||
pub(crate) fn get_fov(&self) -> f64 {
|
||||
let deg: ArcDeg<f64> = self.camera.get_aperture().into();
|
||||
deg.0
|
||||
self.camera.get_aperture().to_degrees()
|
||||
}
|
||||
|
||||
pub(crate) fn get_colormaps(&self) -> &Colormaps {
|
||||
|
||||
@@ -18,9 +18,12 @@ use crate::math::angle::ToAngle;
|
||||
use crate::math::{projection::coo_space::XYZWModel, projection::domain::sdf::ProjDef};
|
||||
|
||||
use cgmath::{Matrix4, Vector2};
|
||||
const APERTURE_LOWER_LIMIT_RAD: f64 = (1.0_f64 / 36000.0).to_radians();
|
||||
const ZOOM_FACTOR_UPPER_LIMIT: f64 = 2.0;
|
||||
|
||||
pub struct CameraViewPort {
|
||||
// The field of view angle
|
||||
aperture: Angle<f64>,
|
||||
aperture: f64,
|
||||
// The rotation of the camera
|
||||
center: Vector4<f64>,
|
||||
w2m_rot: Rotation<f64>,
|
||||
@@ -43,7 +46,7 @@ pub struct CameraViewPort {
|
||||
|
||||
// Internal variable used for projection purposes
|
||||
ndc_to_clip: Vector2<f64>,
|
||||
clip_zoom_factor: f64,
|
||||
zoom_factor: f64,
|
||||
// The vertices in model space of the camera
|
||||
// This is useful for computing views according
|
||||
// to different image surveys
|
||||
@@ -70,6 +73,11 @@ pub struct CameraViewPort {
|
||||
gl: WebGlContext,
|
||||
coo_sys: CooSystem,
|
||||
reversed_longitude: bool,
|
||||
|
||||
// min field of view, by default 0.1 arcsec
|
||||
pub(crate) min_fov: Option<f64>,
|
||||
// an optional max field of view
|
||||
pub(crate) max_fov: Option<f64>,
|
||||
}
|
||||
use al_api::coo_system::CooSystem;
|
||||
use al_core::WebGlContext;
|
||||
@@ -87,7 +95,6 @@ const MAX_DPI_LIMIT: f32 = 2.0;
|
||||
use crate::math;
|
||||
use crate::time::Time;
|
||||
use crate::Abort;
|
||||
use crate::ArcDeg;
|
||||
impl CameraViewPort {
|
||||
pub fn new(
|
||||
gl: &WebGlContext,
|
||||
@@ -96,7 +103,7 @@ impl CameraViewPort {
|
||||
) -> CameraViewPort {
|
||||
let last_user_action = UserAction::Starting;
|
||||
|
||||
let aperture = Angle(projection.aperture_start());
|
||||
let aperture = projection.aperture_start().to_radians();
|
||||
|
||||
let w2m = Matrix4::identity();
|
||||
let m2w = w2m;
|
||||
@@ -122,9 +129,9 @@ impl CameraViewPort {
|
||||
|
||||
let aspect = height / width;
|
||||
let ndc_to_clip = Vector2::new(1.0, (height as f64) / (width as f64));
|
||||
let clip_zoom_factor = 1.0;
|
||||
let zoom_factor = 1.0;
|
||||
|
||||
let fov = FieldOfView::new(&ndc_to_clip, clip_zoom_factor, &w2m, projection);
|
||||
let fov = FieldOfView::new(&ndc_to_clip, zoom_factor, &w2m, projection);
|
||||
let gl = gl.clone();
|
||||
|
||||
let is_allsky = true;
|
||||
@@ -154,7 +161,7 @@ impl CameraViewPort {
|
||||
|
||||
// Internal variable used for projection purposes
|
||||
ndc_to_clip,
|
||||
clip_zoom_factor,
|
||||
zoom_factor,
|
||||
// The field of view
|
||||
fov,
|
||||
view_hpx_cells,
|
||||
@@ -177,6 +184,9 @@ impl CameraViewPort {
|
||||
coo_sys,
|
||||
// a flag telling if the viewport has a reversed longitude axis
|
||||
reversed_longitude,
|
||||
|
||||
min_fov: None,
|
||||
max_fov: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -223,13 +233,13 @@ impl CameraViewPort {
|
||||
|
||||
// check the projection
|
||||
match proj {
|
||||
ProjectionType::Tan(_) => self.aperture >= 100.0_f64.to_radians().to_angle(),
|
||||
ProjectionType::Mer(_) => self.aperture >= 120.0_f64.to_radians().to_angle(),
|
||||
ProjectionType::Stg(_) => self.aperture >= 200.0_f64.to_radians().to_angle(),
|
||||
ProjectionType::Tan(_) => self.aperture >= 100.0_f64.to_radians(),
|
||||
ProjectionType::Mer(_) => self.aperture >= 120.0_f64.to_radians(),
|
||||
ProjectionType::Stg(_) => self.aperture >= 200.0_f64.to_radians(),
|
||||
ProjectionType::Sin(_) => false,
|
||||
ProjectionType::Ait(_) => self.aperture >= 100.0_f64.to_radians().to_angle(),
|
||||
ProjectionType::Mol(_) => self.aperture >= 100.0_f64.to_radians().to_angle(),
|
||||
ProjectionType::Zea(_) => self.aperture >= 140.0_f64.to_radians().to_angle(),
|
||||
ProjectionType::Ait(_) => self.aperture >= 100.0_f64.to_radians(),
|
||||
ProjectionType::Mol(_) => self.aperture >= 100.0_f64.to_radians(),
|
||||
ProjectionType::Zea(_) => self.aperture >= 140.0_f64.to_radians(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,12 +288,8 @@ impl CameraViewPort {
|
||||
// Compute the new clip zoom factor
|
||||
self.compute_ndc_to_clip_factor(projection);
|
||||
|
||||
self.fov.set_aperture(
|
||||
&self.ndc_to_clip,
|
||||
self.clip_zoom_factor,
|
||||
&self.w2m,
|
||||
projection,
|
||||
);
|
||||
self.fov
|
||||
.set_aperture(&self.ndc_to_clip, self.zoom_factor, &self.w2m, projection);
|
||||
|
||||
let proj_area = projection.get_area();
|
||||
self.is_allsky = !proj_area.is_in(&math::projection::ndc_to_clip_space(
|
||||
@@ -325,7 +331,74 @@ impl CameraViewPort {
|
||||
self.set_aperture(self.aperture, proj);
|
||||
}
|
||||
|
||||
pub fn set_aperture(&mut self, aperture: Angle<f64>, proj: &ProjectionType) {
|
||||
/// Give a FoV range in radians
|
||||
pub(crate) fn set_fov_range(
|
||||
&mut self,
|
||||
mut min_fov: Option<f64>,
|
||||
mut max_fov: Option<f64>,
|
||||
proj: &ProjectionType,
|
||||
) {
|
||||
// Invert the min and max bounds if min > max
|
||||
if let (Some(min_fov), Some(max_fov)) = (min_fov.as_mut(), max_fov.as_mut()) {
|
||||
if *max_fov < *min_fov {
|
||||
std::mem::swap(max_fov, min_fov);
|
||||
}
|
||||
}
|
||||
|
||||
self.min_fov = min_fov;
|
||||
self.max_fov = max_fov;
|
||||
|
||||
self.set_aperture(self.aperture, proj);
|
||||
}
|
||||
|
||||
pub(crate) fn at_zoom_boundaries(&self, proj: &ProjectionType) -> bool {
|
||||
// The zoom factor cannot exceed an upper limit
|
||||
if self.zoom_factor >= ZOOM_FACTOR_UPPER_LIMIT {
|
||||
return true;
|
||||
}
|
||||
|
||||
// The field of view cannot go deeper a lower limit
|
||||
if self.aperture <= APERTURE_LOWER_LIMIT_RAD {
|
||||
return true;
|
||||
}
|
||||
|
||||
// The field of view might be forced in a user defined range
|
||||
if let Some(min_fov) = self.min_fov {
|
||||
if self.aperture <= min_fov {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if let Some(max_fov) = self.max_fov {
|
||||
if self.aperture >= max_fov {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
let can_unzoom_more = match proj {
|
||||
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_) => false,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
if !can_unzoom_more && self.zoom_factor >= 1.0 {
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub(crate) fn set_aperture(&mut self, mut aperture: f64, proj: &ProjectionType) {
|
||||
// Force the given aperture by a range given by the user
|
||||
if let Some(min_fov) = self.min_fov {
|
||||
aperture = aperture.max(min_fov);
|
||||
}
|
||||
|
||||
if let Some(max_fov) = self.max_fov {
|
||||
aperture = aperture.min(max_fov);
|
||||
}
|
||||
|
||||
// Limit internally the aperture to 0.1 arcsec
|
||||
aperture = aperture.max(APERTURE_LOWER_LIMIT_RAD);
|
||||
|
||||
// Checking if we are zooming or unzooming
|
||||
// This is used internaly for the raytracer to compute
|
||||
// blending between tiles and their parents (or children)
|
||||
@@ -338,23 +411,15 @@ impl CameraViewPort {
|
||||
};
|
||||
|
||||
let can_unzoom_more = match proj {
|
||||
ProjectionType::Tan(_)
|
||||
| ProjectionType::Mer(_)
|
||||
//| ProjectionType::Air(_)
|
||||
| ProjectionType::Stg(_) => false,
|
||||
//| ProjectionType::Car(_)
|
||||
//| ProjectionType::Cea(_)
|
||||
//| ProjectionType::Cyp(_)
|
||||
//| ProjectionType::Hpx(_) => false,
|
||||
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_) => false,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
let aperture_start: Angle<f64> = ArcDeg(proj.aperture_start()).into();
|
||||
let aperture_start: f64 = proj.aperture_start().to_radians();
|
||||
|
||||
self.clip_zoom_factor = if aperture > aperture_start {
|
||||
//al_core::log(&format!("a: {:?}, as: {:?}", aperture, aperture_start));
|
||||
self.zoom_factor = if aperture > aperture_start {
|
||||
if can_unzoom_more {
|
||||
aperture.0 / aperture_start.0
|
||||
aperture / aperture_start
|
||||
} else {
|
||||
1.0
|
||||
}
|
||||
@@ -362,35 +427,24 @@ impl CameraViewPort {
|
||||
// Compute the new clip zoom factor
|
||||
let a = aperture.abs();
|
||||
|
||||
let v0 = math::lonlat::radec_to_xyzw(-a / 2.0, Angle(0.0));
|
||||
let v1 = math::lonlat::radec_to_xyzw(a / 2.0, Angle(0.0));
|
||||
let v0 = math::lonlat::radec_to_xyzw((-a / 2.0).to_angle(), 0.0.to_angle());
|
||||
let v1 = math::lonlat::radec_to_xyzw((a / 2.0).to_angle(), 0.0.to_angle());
|
||||
|
||||
// Vertex in the WCS of the FOV
|
||||
if self.width < self.height {
|
||||
if let (Some(p0), Some(p1)) =
|
||||
(proj.world_to_clip_space(&v0), proj.world_to_clip_space(&v1))
|
||||
{
|
||||
(0.5 * (p1.x - p0.x).abs()).min(1.0)
|
||||
} else {
|
||||
1.0
|
||||
}
|
||||
if let (Some(p0), Some(p1)) =
|
||||
(proj.world_to_clip_space(&v0), proj.world_to_clip_space(&v1))
|
||||
{
|
||||
(0.5 * (p1.x - p0.x).abs()).min(1.0)
|
||||
} else {
|
||||
if let (Some(p0), Some(p1)) =
|
||||
(proj.world_to_clip_space(&v0), proj.world_to_clip_space(&v1))
|
||||
{
|
||||
(0.5 * (p1.x - p0.x).abs()).min(1.0)
|
||||
} else {
|
||||
1.0
|
||||
}
|
||||
1.0
|
||||
}
|
||||
};
|
||||
|
||||
// Limit the zoom factor to not unzoom too much
|
||||
self.zoom_factor = self.zoom_factor.min(ZOOM_FACTOR_UPPER_LIMIT);
|
||||
|
||||
// Limit later the aperture to aperture_start
|
||||
self.aperture = aperture.min(aperture_start);
|
||||
//self.aperture = aperture;
|
||||
|
||||
//al_core::log(&format!("zoom factor {:?}", self.clip_zoom_factor));
|
||||
|
||||
//console_log(&format!("clip factor {:?}", self.aperture));
|
||||
|
||||
// Project this vertex into the screen
|
||||
self.moved = true;
|
||||
@@ -398,7 +452,101 @@ impl CameraViewPort {
|
||||
self.time_last_move = Time::now();
|
||||
|
||||
self.fov
|
||||
.set_aperture(&self.ndc_to_clip, self.clip_zoom_factor, &self.w2m, proj);
|
||||
.set_aperture(&self.ndc_to_clip, self.zoom_factor, &self.w2m, proj);
|
||||
|
||||
let proj_area = proj.get_area();
|
||||
self.is_allsky = !proj_area.is_in(&math::projection::ndc_to_clip_space(
|
||||
&Vector2::new(-1.0, -1.0),
|
||||
self,
|
||||
));
|
||||
|
||||
self.compute_texture_depth();
|
||||
|
||||
// Recompute the scissor with the new aperture
|
||||
self.recompute_scissor();
|
||||
|
||||
// Compute the hpx cells
|
||||
self.view_hpx_cells.update(
|
||||
self.texture_depth,
|
||||
&self.fov,
|
||||
&self.center,
|
||||
self.get_coo_system(),
|
||||
proj,
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn set_zoom_factor(&mut self, zoom_factor: f64, proj: &ProjectionType) {
|
||||
// Checking if we are zooming or unzooming
|
||||
// This is used internaly for the raytracer to compute
|
||||
// blending between tiles and their parents (or children)
|
||||
self.last_user_action = if self.zoom_factor > zoom_factor {
|
||||
UserAction::Zooming
|
||||
} else if self.zoom_factor < zoom_factor {
|
||||
UserAction::Unzooming
|
||||
} else {
|
||||
self.last_user_action
|
||||
};
|
||||
|
||||
let can_unzoom_more = match proj {
|
||||
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_) => false,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
// Set the zoom factor
|
||||
self.zoom_factor = zoom_factor;
|
||||
// Limit it to prevent unzooming infinitely
|
||||
self.zoom_factor = self.zoom_factor.min(ZOOM_FACTOR_UPPER_LIMIT);
|
||||
|
||||
let aperture_start = proj.aperture_start().to_radians();
|
||||
|
||||
// clamp it to one if we cannot unzoom more (because of the projection)
|
||||
let aperture = if !can_unzoom_more && zoom_factor >= 1.0 {
|
||||
self.zoom_factor = 1.0;
|
||||
|
||||
aperture_start
|
||||
} else if can_unzoom_more && zoom_factor >= 1.0 {
|
||||
aperture_start
|
||||
} else {
|
||||
// zoom_factor < 1.0
|
||||
if let Some((lon, _)) = proj
|
||||
.clip_to_world_space(&Vector2::new(self.zoom_factor, 0.0))
|
||||
.map(|xyzw| math::lonlat::xyzw_to_radec(&xyzw))
|
||||
{
|
||||
lon.to_radians().abs() * 2.0
|
||||
} else {
|
||||
aperture_start
|
||||
}
|
||||
};
|
||||
|
||||
// Force the given aperture to be in an optional range given by the user
|
||||
let mut clamped_aperture = aperture;
|
||||
if let Some(min_fov) = self.min_fov {
|
||||
clamped_aperture = clamped_aperture.max(min_fov);
|
||||
}
|
||||
|
||||
if let Some(max_fov) = self.max_fov {
|
||||
clamped_aperture = clamped_aperture.min(max_fov);
|
||||
}
|
||||
|
||||
// The aperture must also be > to a lower limit
|
||||
clamped_aperture = clamped_aperture.max(APERTURE_LOWER_LIMIT_RAD);
|
||||
|
||||
if clamped_aperture != aperture {
|
||||
// there has been a clamping of the aperture, then we recompute the zoom factor
|
||||
// with the new clamped aperture
|
||||
self.set_aperture(clamped_aperture, proj);
|
||||
return;
|
||||
}
|
||||
|
||||
self.aperture = aperture;
|
||||
|
||||
// Project this vertex into the screen
|
||||
self.moved = true;
|
||||
self.zoomed = true;
|
||||
self.time_last_move = Time::now();
|
||||
|
||||
self.fov
|
||||
.set_aperture(&self.ndc_to_clip, self.zoom_factor, &self.w2m, proj);
|
||||
|
||||
let proj_area = proj.get_area();
|
||||
self.is_allsky = !proj_area.is_in(&math::projection::ndc_to_clip_space(
|
||||
@@ -448,8 +596,7 @@ impl CameraViewPort {
|
||||
let smallest_cell_size_px = self.dpi as f64;
|
||||
let mut depth_pixel = 29 as usize;
|
||||
|
||||
let hpx_cell_size_rad =
|
||||
(smallest_cell_size_px / w_screen_px) * self.get_aperture().to_radians();
|
||||
let hpx_cell_size_rad = (smallest_cell_size_px / w_screen_px) * self.get_aperture();
|
||||
|
||||
while depth_pixel > 0 {
|
||||
if crate::healpix::utils::MEAN_HPX_CELL_RES[depth_pixel] > hpx_cell_size_rad {
|
||||
@@ -573,8 +720,8 @@ impl CameraViewPort {
|
||||
&self.ndc_to_clip
|
||||
}
|
||||
|
||||
pub fn get_clip_zoom_factor(&self) -> f64 {
|
||||
self.clip_zoom_factor
|
||||
pub fn get_zoom_factor(&self) -> f64 {
|
||||
self.zoom_factor
|
||||
}
|
||||
|
||||
pub fn get_vertices(&self) -> Option<&Vec<XYZWModel<f64>>> {
|
||||
@@ -615,8 +762,9 @@ impl CameraViewPort {
|
||||
self.zoomed = false;
|
||||
}
|
||||
|
||||
/// Aperture is given in radians
|
||||
#[inline]
|
||||
pub fn get_aperture(&self) -> Angle<f64> {
|
||||
pub fn get_aperture(&self) -> f64 {
|
||||
self.aperture
|
||||
}
|
||||
|
||||
@@ -680,7 +828,7 @@ impl SendUniforms for CameraViewPort {
|
||||
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
|
||||
shader
|
||||
.attach_uniform("ndc_to_clip", &self.ndc_to_clip) // Send ndc to clip
|
||||
.attach_uniform("czf", &self.clip_zoom_factor); // Send clip zoom factor
|
||||
.attach_uniform("czf", &self.zoom_factor); // Send clip zoom factor
|
||||
|
||||
shader
|
||||
}
|
||||
|
||||
@@ -497,6 +497,12 @@ impl WebClient {
|
||||
Ok(fov)
|
||||
}
|
||||
|
||||
/// Get the max aperture of a projection (in degrees)
|
||||
#[wasm_bindgen(js_name = atZoomBoundaries)]
|
||||
pub fn get_max_aperture(&self) -> bool {
|
||||
self.app.camera.at_zoom_boundaries(&self.app.projection)
|
||||
}
|
||||
|
||||
/// Set the field of view
|
||||
///
|
||||
/// # Arguments
|
||||
@@ -504,13 +510,12 @@ impl WebClient {
|
||||
/// * `fov` - The field of view in degrees
|
||||
#[wasm_bindgen(js_name = setFieldOfView)]
|
||||
pub fn set_fov(&mut self, fov: f64) -> Result<(), JsValue> {
|
||||
let fov = ArcDeg(fov).into();
|
||||
|
||||
self.app.set_fov(fov);
|
||||
self.app.set_fov(fov.to_radians());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Enable/Disable inertia effect after panning and releasing the mouse
|
||||
#[wasm_bindgen(js_name = setInertia)]
|
||||
pub fn set_inertia(&mut self, inertia: bool) -> Result<(), JsValue> {
|
||||
self.app.set_inertia(inertia);
|
||||
@@ -518,6 +523,40 @@ impl WebClient {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set a range of FoVs that contrains the zooming in that range
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `min_fov` - The minimum field of view value in degrees
|
||||
/// * `max_fov` - The maximum field of view value in degrees
|
||||
#[wasm_bindgen(js_name = setFoVRange)]
|
||||
pub fn set_fov_range(
|
||||
&mut self,
|
||||
min_fov: Option<f64>,
|
||||
max_fov: Option<f64>,
|
||||
) -> Result<(), JsValue> {
|
||||
self.app.set_fov_range(min_fov, max_fov);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the FoV range in degrees
|
||||
#[wasm_bindgen(js_name = getFoVRange)]
|
||||
pub fn get_fov_range(&self) -> Box<[f64]> {
|
||||
Box::new([
|
||||
self.app
|
||||
.camera
|
||||
.min_fov
|
||||
.map(|v| v.to_degrees())
|
||||
.unwrap_or(-1.0),
|
||||
self.app
|
||||
.camera
|
||||
.max_fov
|
||||
.map(|v| v.to_degrees())
|
||||
.unwrap_or(-1.0),
|
||||
])
|
||||
}
|
||||
|
||||
/// Set the absolute orientation of the view
|
||||
///
|
||||
/// # Arguments
|
||||
@@ -565,14 +604,20 @@ impl WebClient {
|
||||
self.app.get_max_fov()
|
||||
}
|
||||
|
||||
/// Get the clip zoom factor of the view
|
||||
/// Get the zoom factor of the view
|
||||
///
|
||||
/// This factor is deduced from the field of view angle.
|
||||
/// It is a constant which when multiplied to the screen coordinates
|
||||
/// gives the coordinates in clipping space.
|
||||
#[wasm_bindgen(js_name = getClipZoomFactor)]
|
||||
pub fn get_clip_zoom_factor(&self) -> Result<f64, JsValue> {
|
||||
Ok(self.app.get_clip_zoom_factor())
|
||||
#[wasm_bindgen(js_name = getZoomFactor)]
|
||||
pub fn get_zoom_factor(&self) -> Result<f64, JsValue> {
|
||||
Ok(self.app.get_zoom_factor())
|
||||
}
|
||||
|
||||
/// Set the zoom factor of the view
|
||||
#[wasm_bindgen(js_name = setZoomFactor)]
|
||||
pub fn set_zoom_factor(&mut self, zoom_factor: f64) -> Result<(), JsValue> {
|
||||
Ok(self.app.set_zoom_factor(zoom_factor))
|
||||
}
|
||||
|
||||
/// Set the center of the view in ICRS coosys
|
||||
|
||||
@@ -22,7 +22,7 @@ pub mod domain;
|
||||
use domain::{basic, full::FullScreen};
|
||||
|
||||
/* S <-> NDC space conversion methods */
|
||||
pub fn screen_to_ndc_space(
|
||||
pub(crate) fn screen_to_ndc_space(
|
||||
pos_screen_space: &XYScreen<f64>,
|
||||
camera: &CameraViewPort,
|
||||
) -> XYNDC<f64> {
|
||||
@@ -41,7 +41,7 @@ pub fn screen_to_ndc_space(
|
||||
)
|
||||
}
|
||||
|
||||
pub fn ndc_to_screen_space(
|
||||
pub(crate) fn ndc_to_screen_space(
|
||||
pos_normalized_device: &XYNDC<f64>,
|
||||
camera: &CameraViewPort,
|
||||
) -> XYScreen<f64> {
|
||||
@@ -57,9 +57,9 @@ pub fn ndc_to_screen_space(
|
||||
}
|
||||
|
||||
/* NDC <-> CLIP space conversion methods */
|
||||
pub fn clip_to_ndc_space(pos_clip_space: &XYClip<f64>, camera: &CameraViewPort) -> XYNDC<f64> {
|
||||
pub(crate) fn clip_to_ndc_space(pos_clip_space: &XYClip<f64>, camera: &CameraViewPort) -> XYNDC<f64> {
|
||||
let ndc_to_clip = camera.get_ndc_to_clip();
|
||||
let clip_zoom_factor = camera.get_clip_zoom_factor();
|
||||
let clip_zoom_factor = camera.get_zoom_factor();
|
||||
|
||||
Vector2::new(
|
||||
pos_clip_space.x / (ndc_to_clip.x * clip_zoom_factor),
|
||||
@@ -67,12 +67,12 @@ pub fn clip_to_ndc_space(pos_clip_space: &XYClip<f64>, camera: &CameraViewPort)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn ndc_to_clip_space(
|
||||
pub(crate) fn ndc_to_clip_space(
|
||||
pos_normalized_device: &XYNDC<f64>,
|
||||
camera: &CameraViewPort,
|
||||
) -> XYClip<f64> {
|
||||
let ndc_to_clip = camera.get_ndc_to_clip();
|
||||
let clip_zoom_factor = camera.get_clip_zoom_factor();
|
||||
let clip_zoom_factor = camera.get_zoom_factor();
|
||||
|
||||
Vector2::new(
|
||||
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
|
||||
@@ -81,7 +81,7 @@ pub fn ndc_to_clip_space(
|
||||
}
|
||||
|
||||
/* S <-> CLIP space conversion methods */
|
||||
pub fn clip_to_screen_space(
|
||||
pub(crate) fn clip_to_screen_space(
|
||||
pos_clip_space: &XYClip<f64>,
|
||||
camera: &CameraViewPort,
|
||||
) -> XYScreen<f64> {
|
||||
@@ -89,7 +89,7 @@ pub fn clip_to_screen_space(
|
||||
ndc_to_screen_space(&pos_normalized_device, camera)
|
||||
}
|
||||
|
||||
pub fn screen_to_clip_space(
|
||||
pub(crate) fn screen_to_clip_space(
|
||||
pos_screen_space: &XYScreen<f64>,
|
||||
camera: &CameraViewPort,
|
||||
) -> XYClip<f64> {
|
||||
@@ -152,7 +152,7 @@ pub enum ProjectionType {
|
||||
|
||||
use crate::math::lonlat::LonLat;
|
||||
impl ProjectionType {
|
||||
pub fn north_pole_celestial_space(&self, camera: &CameraViewPort) -> LonLatT<f64> {
|
||||
pub(crate) fn north_pole_celestial_space(&self, camera: &CameraViewPort) -> LonLatT<f64> {
|
||||
// This is always defined
|
||||
let np_world = self.north_pole_world_space();
|
||||
|
||||
@@ -168,7 +168,7 @@ impl ProjectionType {
|
||||
///
|
||||
/// * ``pos_screen_space`` - The position in the screen pixel space (top-left of the screen being the origin
|
||||
/// * ``camera`` - The camera object
|
||||
pub fn screen_to_world_space(
|
||||
pub(crate) fn screen_to_world_space(
|
||||
&self,
|
||||
pos_screen_space: &XYScreen<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -190,7 +190,7 @@ impl ProjectionType {
|
||||
///
|
||||
/// * ``pos_screen_space`` - The position in the screen pixel space (top-left of the screen being the origin
|
||||
/// * ``camera`` - The camera object
|
||||
pub fn screen_to_model_space(
|
||||
pub(crate) fn screen_to_model_space(
|
||||
&self,
|
||||
pos_screen_space: &XYScreen<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -199,7 +199,7 @@ impl ProjectionType {
|
||||
.map(|world_pos| camera.get_w2m() * world_pos)
|
||||
}
|
||||
|
||||
pub fn normalized_device_to_model_space(
|
||||
pub(crate) fn normalized_device_to_model_space(
|
||||
&self,
|
||||
ndc_pos: &XYNDC<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -208,7 +208,7 @@ impl ProjectionType {
|
||||
.map(|world_pos| camera.get_w2m() * world_pos)
|
||||
}
|
||||
|
||||
pub fn model_to_screen_space(
|
||||
pub(crate) fn model_to_screen_space(
|
||||
&self,
|
||||
pos_model_space: &XYZWModel<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -218,7 +218,7 @@ impl ProjectionType {
|
||||
self.world_to_screen_space(&pos_world_space, camera)
|
||||
}
|
||||
|
||||
pub fn icrs_celestial_to_screen_space(
|
||||
pub(crate) fn icrs_celestial_to_screen_space(
|
||||
&self,
|
||||
icrs_celestial_pos: &XYZWModel<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -227,7 +227,7 @@ impl ProjectionType {
|
||||
.map(|ndc_pos| crate::ndc_to_screen_space(&ndc_pos, camera))
|
||||
}
|
||||
|
||||
pub fn icrs_celestial_to_normalized_device_space(
|
||||
pub(crate) fn icrs_celestial_to_normalized_device_space(
|
||||
&self,
|
||||
icrs_celestial_pos: &XYZWModel<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -240,7 +240,7 @@ impl ProjectionType {
|
||||
self.world_to_normalized_device_space(&pos_world_space, camera)
|
||||
}
|
||||
|
||||
pub fn model_to_normalized_device_space(
|
||||
pub(crate) fn model_to_normalized_device_space(
|
||||
&self,
|
||||
pos_model_space: &XYZWModel<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -250,7 +250,7 @@ impl ProjectionType {
|
||||
self.world_to_normalized_device_space(&pos_world_space, camera)
|
||||
}
|
||||
|
||||
pub fn model_to_clip_space(
|
||||
pub(crate) fn model_to_clip_space(
|
||||
&self,
|
||||
pos_model_space: &XYZWModel<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -268,7 +268,7 @@ impl ProjectionType {
|
||||
///
|
||||
/// * `x` - X mouse position in homogenous screen space (between [-1, 1])
|
||||
/// * `y` - Y mouse position in homogenous screen space (between [-1, 1])
|
||||
pub fn world_to_normalized_device_space(
|
||||
pub(crate) fn world_to_normalized_device_space(
|
||||
&self,
|
||||
pos_world_space: &XYZWWorld<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -277,7 +277,7 @@ impl ProjectionType {
|
||||
.map(|pos_clip_space| clip_to_ndc_space(&pos_clip_space, camera))
|
||||
}
|
||||
|
||||
pub fn normalized_device_to_world_space(
|
||||
pub(crate) fn normalized_device_to_world_space(
|
||||
&self,
|
||||
ndc_pos: &XYNDC<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -286,7 +286,7 @@ impl ProjectionType {
|
||||
self.clip_to_world_space(&clip_pos)
|
||||
}
|
||||
|
||||
pub fn world_to_screen_space(
|
||||
pub(crate) fn world_to_screen_space(
|
||||
&self,
|
||||
pos_world_space: &XYZWWorld<f64>,
|
||||
camera: &CameraViewPort,
|
||||
@@ -295,25 +295,16 @@ impl ProjectionType {
|
||||
.map(|pos_normalized_device| ndc_to_screen_space(&pos_normalized_device, camera))
|
||||
}
|
||||
|
||||
/*pub(crate) fn is_allsky(&self) -> bool {
|
||||
match self {
|
||||
ProjectionType::Sin(_) | ProjectionType::Tan(_) => false,
|
||||
//| ProjectionType::Feye(_)
|
||||
//| ProjectionType::Ncp(_) => false,
|
||||
_ => true,
|
||||
}
|
||||
}*/
|
||||
|
||||
pub fn bounds_size_ratio(&self) -> f64 {
|
||||
pub(crate) const fn bounds_size_ratio(&self) -> f64 {
|
||||
match self {
|
||||
// Zenithal projections
|
||||
/* TAN, Gnomonic projection */
|
||||
ProjectionType::Tan(_) => 1.0,
|
||||
/* STG, Stereographic projection */
|
||||
/* STG, Stereographic projection */
|
||||
ProjectionType::Stg(_) => 1.0,
|
||||
/* SIN, Orthographic */
|
||||
/* SIN, Orthographic */
|
||||
ProjectionType::Sin(_) => 1.0,
|
||||
/* ZEA, Equal-area */
|
||||
/* ZEA, Equal-area */
|
||||
ProjectionType::Zea(_) => 1.0,
|
||||
/* FEYE, Fish-eyes */
|
||||
//ProjectionType::Feye(_) => 1.0,
|
||||
@@ -325,7 +316,6 @@ impl ProjectionType {
|
||||
//ProjectionType::Arc(_) => 1.0,
|
||||
/* NCP, */
|
||||
//ProjectionType::Ncp(_) => 1.0,
|
||||
|
||||
// Pseudo-cylindrical projections
|
||||
/* AIT, Aitoff */
|
||||
ProjectionType::Ait(_) => 2.0,
|
||||
@@ -335,7 +325,6 @@ impl ProjectionType {
|
||||
//ProjectionType::Par(_) => 2.0,
|
||||
// SFL, */
|
||||
//ProjectionType::Sfl(_) => 2.0,
|
||||
|
||||
// Cylindrical projections
|
||||
// MER, Mercator */
|
||||
ProjectionType::Mer(_) => 1.0,
|
||||
@@ -345,17 +334,15 @@ impl ProjectionType {
|
||||
//ProjectionType::Cea(_) => 1.0,
|
||||
// CYP, */
|
||||
//ProjectionType::Cyp(_) => 1.0,
|
||||
|
||||
// Conic projections
|
||||
// COD, */
|
||||
//ProjectionType::Cod(_) => 1.0,
|
||||
|
||||
// HEALPix hybrid projection
|
||||
//ProjectionType::Hpx(_) => 2.0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn aperture_start(&self) -> f64 {
|
||||
pub(crate) const fn aperture_start(&self) -> f64 {
|
||||
match self {
|
||||
// Zenithal projections
|
||||
/* TAN, Gnomonic projection */
|
||||
@@ -365,7 +352,8 @@ impl ProjectionType {
|
||||
/* SIN, Orthographic */
|
||||
ProjectionType::Sin(_) => 180.0,
|
||||
/* ZEA, Equal-area */
|
||||
ProjectionType::Zea(_) => 360.0,
|
||||
// FIXME, investigate why 360.0 max aperture for ZEA projection does not work (black screen)
|
||||
ProjectionType::Zea(_) => 359.999,
|
||||
/* FEYE, Fish-eyes */
|
||||
//ProjectionType::Feye(_) => 190.0,
|
||||
/* AIR, */
|
||||
@@ -406,7 +394,7 @@ impl ProjectionType {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_area(&self) -> &ProjDefType {
|
||||
pub(crate) const fn get_area(&self) -> &ProjDefType {
|
||||
match self {
|
||||
// Zenithal projections
|
||||
/* TAN, Gnomonic projection */
|
||||
@@ -656,7 +644,7 @@ impl UniformType for ProjectionType {
|
||||
use cgmath::Vector4;
|
||||
|
||||
use mapproj::CanonicalProjection;
|
||||
pub trait Projection {
|
||||
pub(crate) trait Projection {
|
||||
/// Perform a clip to the world space deprojection
|
||||
///
|
||||
/// # Arguments
|
||||
|
||||
@@ -179,7 +179,7 @@ impl Manager {
|
||||
}
|
||||
|
||||
// Private method adding a catalog into the manager
|
||||
pub fn add_catalog<P: Projection>(
|
||||
fn add_catalog<P: Projection>(
|
||||
&mut self,
|
||||
name: String,
|
||||
sources: Box<[LonLatT<f32>]>,
|
||||
@@ -204,7 +204,7 @@ impl Manager {
|
||||
// at depth 7
|
||||
}
|
||||
|
||||
pub fn remove_catalog<P: Projection>(
|
||||
fn remove_catalog<P: Projection>(
|
||||
&mut self,
|
||||
name: String,
|
||||
camera: &mut CameraViewPort,
|
||||
@@ -227,12 +227,11 @@ impl Manager {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update(&mut self, camera: &mut CameraViewPort) {
|
||||
fn update(&mut self, camera: &mut CameraViewPort) {
|
||||
// Render only the sources in the current field of view
|
||||
// Cells that are of depth > 7 are not handled by the hashmap (limited to depth 7)
|
||||
// For these cells, we draw all the sources lying in the ancestor cell of depth 7 containing
|
||||
// this cell
|
||||
//if camera.get_aperture() > P::RASTER_THRESHOLD_ANGLE {
|
||||
if camera.get_field_of_view().is_allsky() {
|
||||
let cells = crate::healpix::cell::ALLSKY_HPX_CELLS_D0;
|
||||
|
||||
@@ -249,7 +248,7 @@ impl Manager {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn draw(
|
||||
fn draw(
|
||||
&self,
|
||||
gl: &WebGlContext,
|
||||
shaders: &mut ShaderManager,
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
mod manager;
|
||||
pub use manager::{Catalog, Manager};
|
||||
pub(crate) use manager::Manager;
|
||||
|
||||
@@ -63,7 +63,7 @@ fn sub_valid_domain(
|
||||
projection: &ProjectionType,
|
||||
camera: &CameraViewPort,
|
||||
) -> (XYZModel<f64>, XYZModel<f64>) {
|
||||
let d_alpha = camera.get_aperture().to_radians() * 0.02;
|
||||
let d_alpha = camera.get_aperture() * 0.02;
|
||||
|
||||
let mut vv = valid_v;
|
||||
let mut vi = invalid_v;
|
||||
|
||||
@@ -60,7 +60,7 @@ pub fn project(lat: f64, mut lon1: f64, lon2: f64, camera: &CameraViewPort, proj
|
||||
// * valid_lon and invalid_lon are well defined, i.e. they can be between [-PI; PI] or [0, 2PI] depending
|
||||
// whether they cross or not the zero meridian
|
||||
fn sub_valid_domain(lat: f64, valid_lon: f64, invalid_lon: f64, projection: &ProjectionType, camera: &CameraViewPort) -> (f64, f64) {
|
||||
let d_alpha = camera.get_aperture().to_radians() * 0.02;
|
||||
let d_alpha = camera.get_aperture() * 0.02;
|
||||
|
||||
let mut l_valid = valid_lon;
|
||||
let mut l_invalid = invalid_lon;
|
||||
|
||||
@@ -48,7 +48,7 @@ impl MOCHierarchy {
|
||||
let mut d = self.full_res_depth as usize;
|
||||
|
||||
let hpx_cell_size_rad =
|
||||
(smallest_cell_size_px / w_screen_px) * camera.get_aperture().to_radians();
|
||||
(smallest_cell_size_px / w_screen_px) * camera.get_aperture();
|
||||
|
||||
while d > 0 {
|
||||
//self.mocs[d].cell_indices_in_view(camera);
|
||||
|
||||
@@ -14,8 +14,6 @@ use crate::tile_fetcher::TileFetcherQueue;
|
||||
|
||||
use al_core::image::format::ChannelType;
|
||||
|
||||
pub use catalog::Manager;
|
||||
|
||||
use al_api::color::ColorRGB;
|
||||
use al_api::hips::HiPSCfg;
|
||||
use al_api::hips::ImageMetadata;
|
||||
@@ -225,8 +223,6 @@ impl Layers {
|
||||
let cdid = self.ids.get(layer).unwrap_abort();
|
||||
|
||||
if let Some(hips) = self.hipses.get(cdid) {
|
||||
let hips_cfg = hips.get_config();
|
||||
|
||||
let allsky = hips.is_allsky();
|
||||
let opaque = meta.opacity == 1.0;
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ pub unsafe fn transmute_vec_to_u8<I>(mut s: Vec<I>) -> Vec<u8> {
|
||||
std::mem::transmute(s)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub unsafe fn transmute_vec<I, O>(mut s: Vec<I>) -> Result<Vec<O>, &'static str> {
|
||||
if std::mem::size_of::<I>() % std::mem::size_of::<O>() > 0 {
|
||||
Err("The input type is not a multiple of the output type")
|
||||
|
||||
@@ -830,7 +830,7 @@ export let Aladin = (function () {
|
||||
* aladin.setFoV(60);
|
||||
*/
|
||||
Aladin.prototype.setFoV = function (FoV) {
|
||||
this.view.setZoom(FoV);
|
||||
this.view.setFoV(FoV);
|
||||
};
|
||||
|
||||
Aladin.prototype.setFov = Aladin.prototype.setFoV;
|
||||
@@ -1295,18 +1295,16 @@ export let Aladin = (function () {
|
||||
Aladin.prototype.zoomToFoV = function (fov, duration, complete) {
|
||||
duration = duration || 5;
|
||||
|
||||
this.zoomAnimationParams = null;
|
||||
|
||||
var zoomAnimationParams = {};
|
||||
zoomAnimationParams["start"] = new Date().getTime();
|
||||
zoomAnimationParams["end"] = new Date().getTime() + 1000 * duration;
|
||||
var fovArray = this.getFov();
|
||||
zoomAnimationParams["fovStart"] = Math.max(fovArray[0], fovArray[1]);
|
||||
zoomAnimationParams["fovEnd"] = fov;
|
||||
zoomAnimationParams["complete"] = complete;
|
||||
zoomAnimationParams["running"] = true;
|
||||
|
||||
this.zoomAnimationParams = zoomAnimationParams;
|
||||
this.zoomAnimationParams = {
|
||||
start: new Date().getTime(),
|
||||
end: new Date().getTime() + 1000 * duration,
|
||||
fovStart: Math.max(fovArray[0], fovArray[1]),
|
||||
fovEnd: fov,
|
||||
complete: complete,
|
||||
running: true,
|
||||
};
|
||||
doZoomAnimation(this);
|
||||
};
|
||||
|
||||
@@ -2304,15 +2302,6 @@ aladin.on("layerChanged", (layer, layerName, state) => {
|
||||
* aladin.setCooGrid({ enabled: true });
|
||||
*/
|
||||
Aladin.prototype.setCooGrid = function (options) {
|
||||
if (options.color) {
|
||||
// 1. the user has maybe given some
|
||||
options.color = new Color(options.color);
|
||||
// 3. convert from 0-255 to 0-1
|
||||
options.color.r /= 255;
|
||||
options.color.g /= 255;
|
||||
options.color.b /= 255;
|
||||
}
|
||||
|
||||
this.view.setGridOptions(options);
|
||||
};
|
||||
|
||||
@@ -2557,14 +2546,15 @@ aladin.on("layerChanged", (layer, layerName, state) => {
|
||||
* Restrict the FoV range between a min and a max value
|
||||
*
|
||||
* @memberof Aladin
|
||||
* @param {number} minFoV - in degrees when zoom in at max. If undefined, the zooming in is not limited
|
||||
* @param {number} maxFoV - in degrees when zoom out at max. If undefined, the zooming out is not limited
|
||||
* @param {number} [minFoV=1.0 / 36000.0] - in degrees. By default, the zoom is limited to 0.1 arcsec
|
||||
* @param {number} maxFoV - in degrees. If undefined, zooming out is not limited
|
||||
*
|
||||
* @example
|
||||
* let aladin = A.aladin('#aladin-lite-div');
|
||||
* aladin.setFoVRange(30, 60);
|
||||
*/
|
||||
Aladin.prototype.setFoVRange = function (minFoV, maxFoV) {
|
||||
minFoV = minFoV || (1.0 / 36000.0);
|
||||
this.view.setFoVRange(minFoV, maxFoV);
|
||||
};
|
||||
|
||||
@@ -2707,16 +2697,11 @@ aladin.on("layerChanged", (layer, layerName, state) => {
|
||||
* and the second element is the FoV height.
|
||||
*/
|
||||
Aladin.prototype.getFov = function () {
|
||||
// can go up to 1000 deg
|
||||
var fovX = this.view.fov;
|
||||
var s = this.getSize();
|
||||
|
||||
// constrain to the projection definition domain
|
||||
fovX = Math.min(fovX, this.view.projection.fov);
|
||||
var fovY = (s[1] / s[0]) * fovX;
|
||||
|
||||
fovY = Math.min(fovY, 180);
|
||||
// TODO : take into account AITOFF projection where fov can be larger than 180
|
||||
|
||||
return [fovX, fovY];
|
||||
};
|
||||
|
||||
@@ -29,26 +29,26 @@
|
||||
*****************************************************************************/
|
||||
export let ProjectionEnum = {
|
||||
// Zenithal
|
||||
TAN: {id: 1, fov: 150, label: "Tangential"}, /* Gnomonic projection */
|
||||
STG: {id: 2, fov: 240, label: "Stereographic"}, /* Stereographic projection */
|
||||
SIN: {id: 3, fov: 1000, label: "Spheric"}, /* Orthographic */
|
||||
TAN: {id: 1, label: "Tangential"}, /* Gnomonic projection */
|
||||
STG: {id: 2, label: "Stereographic"}, /* Stereographic projection */
|
||||
SIN: {id: 3, label: "Spheric"}, /* Orthographic */
|
||||
// TODO: fix why the projection disappears at fov = 360.0
|
||||
ZEA: {id: 4, fov: 1000, label: "Zenital equal-area"}, /* Equal-area */
|
||||
ZEA: {id: 4, label: "Zenital equal-area"}, /* Equal-area */
|
||||
//FEYE: {id: 5, fov: 190, label: "fish eye"},
|
||||
//AIR: {id: 6, fov: 360, label: "airy"},
|
||||
//AZP: {fov: 180},
|
||||
//ARC: {id: 7, fov: 360, label: "zenital equidistant"},
|
||||
//NCP: {id: 8, fov: 180, label: "north celestial pole"},
|
||||
// Cylindrical
|
||||
MER: {id: 9, fov: 360, label: "Mercator"},
|
||||
MER: {id: 9, label: "Mercator"},
|
||||
//CAR: {id: 10, fov: 360, label: "plate carrée"},
|
||||
//CEA: {id: 11, fov: 360, label: "cylindrical equal area"},
|
||||
//CYP: {id: 12, fov: 360, label: "cylindrical perspective"},
|
||||
// Pseudo-cylindrical
|
||||
AIT: {id: 13, fov: 1000, label: "Hammer-Aïtoff"},
|
||||
AIT: {id: 13, label: "Hammer-Aïtoff"},
|
||||
//PAR: {id: 14, fov: 360, label: "parabolic"},
|
||||
//SFL: {id: 15, fov: 360, label: "sanson-flamsteed"},
|
||||
MOL: {id: 16, fov: 1000, label: "Mollweide"},
|
||||
MOL: {id: 16, label: "Mollweide"},
|
||||
// Conic
|
||||
//COD: {id: 17, fov: 360, label: "conic equidistant"},
|
||||
// Hybrid
|
||||
|
||||
@@ -194,6 +194,22 @@ Utils.throttle = function (fn, threshhold, scope) {
|
||||
}
|
||||
}
|
||||
|
||||
// Way of detecting if the computer has trackpad or a regular mouse wheel thanks to that post:
|
||||
// https://stackoverflow.com/questions/10744645/detect-touchpad-vs-mouse-in-javascript
|
||||
Utils.detectTrackPad = function (e) {
|
||||
var isTrackpad = false;
|
||||
if (e.wheelDeltaY) {
|
||||
if (e.wheelDeltaY === (e.deltaY * -3)) {
|
||||
isTrackpad = true;
|
||||
}
|
||||
}
|
||||
else if (e.deltaMode === 0) {
|
||||
isTrackpad = true;
|
||||
}
|
||||
|
||||
return isTrackpad
|
||||
}
|
||||
|
||||
|
||||
/* A LRU cache, inspired by https://gist.github.com/devinus/409353#file-gistfile1-js */
|
||||
// TODO : utiliser le LRU cache pour les tuiles images
|
||||
|
||||
219
src/js/View.js
219
src/js/View.js
@@ -49,6 +49,7 @@ import { Layout } from "./gui/Layout.js";
|
||||
import { SAMPActionButton } from "./gui/Button/SAMP.js";
|
||||
import { HiPS } from "./HiPS.js";
|
||||
import { Image } from "./Image.js";
|
||||
import { Color } from "./Color.js";
|
||||
|
||||
export let View = (function () {
|
||||
|
||||
@@ -97,6 +98,23 @@ export let View = (function () {
|
||||
console.error("Problem initializing Aladin Lite. Please contact the support by contacting Matthieu Baumann (baumannmatthieu0@gmail.com) or Thomas Boch (thomas.boch@astro.unistra.fr). You can also open an issue on the Aladin Lite github repository here: https://github.com/cds-astro/aladin-lite. Message error:" + e)
|
||||
}
|
||||
|
||||
Object.defineProperty(this, "fov", {
|
||||
get() {
|
||||
return this.wasm.getFieldOfView();
|
||||
},
|
||||
set(newFov) {
|
||||
this.setFoV(newFov);
|
||||
}
|
||||
});
|
||||
Object.defineProperty(this, "zoomFactor", {
|
||||
get() {
|
||||
return this.wasm.getZoomFactor();
|
||||
},
|
||||
set(newZoomFactor) {
|
||||
this.setZoomFactor(newZoomFactor);
|
||||
}
|
||||
});
|
||||
|
||||
// Attach the drag and drop events to the view
|
||||
this.aladinDiv.ondrop = (event) => {
|
||||
const files = Utils.getDroppedFilesHandler(event);
|
||||
@@ -175,10 +193,6 @@ export let View = (function () {
|
||||
this.mustClearCatalog = true;
|
||||
this.mode = View.PAN;
|
||||
|
||||
// 0.1 arcsec
|
||||
this.minFoV = 1 / 36000;
|
||||
this.maxFoV = null;
|
||||
|
||||
this.healpixGrid = new HealpixGrid();
|
||||
this.then = Date.now();
|
||||
|
||||
@@ -198,7 +212,7 @@ export let View = (function () {
|
||||
this.setProjection(projName)
|
||||
|
||||
// Then set the zoom properly once the projection is defined
|
||||
this.setZoom(initialFov)
|
||||
this.fov = initialFov
|
||||
|
||||
// Target position settings
|
||||
this.viewCenter = { lon, lat }; // position of center of view
|
||||
@@ -266,11 +280,6 @@ export let View = (function () {
|
||||
init(this);
|
||||
// listen to window resize and reshape canvases
|
||||
this.resizeTimer = null;
|
||||
/*if ('ontouchstart' in window) {
|
||||
Utils.on(document, 'orientationchange', (e) => {
|
||||
self.fixLayoutDimensions();
|
||||
})
|
||||
} else {*/
|
||||
|
||||
this.resizeObserver = new ResizeObserver(() => {
|
||||
self.fixLayoutDimensions();
|
||||
@@ -280,24 +289,6 @@ export let View = (function () {
|
||||
|
||||
self.fixLayoutDimensions();
|
||||
self.redraw()
|
||||
|
||||
// in some contexts (Jupyter notebook for instance), the parent div changes little time after Aladin Lite creation
|
||||
// this results in canvas dimension to be incorrect.
|
||||
// The following line tries to fix this issue
|
||||
/*setTimeout(function () {
|
||||
var computedWidth = $(self.aladinDiv).width();
|
||||
var computedHeight = $(self.aladinDiv).height();
|
||||
|
||||
if (self.width !== computedWidth || self.height === computedHeight) {
|
||||
self.fixLayoutDimensions();
|
||||
// As the WebGL backend has been resized correctly by
|
||||
// the previous call, we can get the zoom factor from it
|
||||
|
||||
self.setZoom(self.fov); // needed to force recomputation of displayed FoV
|
||||
}
|
||||
|
||||
self.requestRedraw();
|
||||
}, 1000);*/
|
||||
};
|
||||
|
||||
// different available modes
|
||||
@@ -348,17 +339,21 @@ export let View = (function () {
|
||||
};
|
||||
|
||||
View.prototype.setFoVRange = function(minFoV, maxFoV) {
|
||||
if (minFoV && maxFoV && minFoV > maxFoV) {
|
||||
var tmp = minFoV;
|
||||
minFoV = maxFoV;
|
||||
maxFoV = tmp;
|
||||
this.wasm.setFoVRange(minFoV, maxFoV)
|
||||
this.updateZoomState();
|
||||
}
|
||||
|
||||
View.prototype.getFoVRange = function() {
|
||||
let [minFoV, maxFoV] = this.wasm.getFoVRange();
|
||||
if (minFoV == -1.0) {
|
||||
minFoV = null;
|
||||
}
|
||||
|
||||
this.minFoV = minFoV || (1.0 / 36000);
|
||||
this.maxFoV = maxFoV;
|
||||
if (maxFoV == -1.0) {
|
||||
maxFoV = null;
|
||||
}
|
||||
|
||||
// reset the field of view
|
||||
this.setZoom(this.fov);
|
||||
return [minFoV, maxFoV]
|
||||
}
|
||||
|
||||
// called at startup and when window is resized
|
||||
@@ -401,7 +396,7 @@ export let View = (function () {
|
||||
this.imageCtx.canvas.style.width = this.width + "px";
|
||||
this.imageCtx.canvas.style.height = this.height + "px";
|
||||
this.wasm.resize(this.width, this.height);
|
||||
this.setZoom(this.fov)
|
||||
this.updateZoomState()
|
||||
|
||||
pixelateCanvasContext(this.imageCtx, this.aladin.options.pixelateCanvas);
|
||||
|
||||
@@ -1006,8 +1001,8 @@ export let View = (function () {
|
||||
|
||||
// zoom
|
||||
const dist = Math.sqrt(Math.pow(e.touches[0].clientX - e.touches[1].clientX, 2) + Math.pow(e.touches[0].clientY - e.touches[1].clientY, 2));
|
||||
const fov = Math.min(Math.max(view.pinchZoomParameters.initialFov * view.pinchZoomParameters.initialDistance / dist, 0.00002777777), view.projection.fov);
|
||||
view.setZoom(fov);
|
||||
const fov = view.pinchZoomParameters.initialFov * view.pinchZoomParameters.initialDistance / dist;
|
||||
view.setFoV(fov);
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -1135,10 +1130,6 @@ export let View = (function () {
|
||||
|
||||
// disable text selection on IE
|
||||
//Utils.on(view.aladinDiv, "selectstart", function () { return false; })
|
||||
/*var eventCount = 0;
|
||||
var eventCountStart;
|
||||
var isTouchPad;
|
||||
let id;*/
|
||||
|
||||
Utils.on(view.catalogCanvas, 'wheel', function (e) {
|
||||
e.preventDefault();
|
||||
@@ -1169,69 +1160,28 @@ export let View = (function () {
|
||||
}
|
||||
|
||||
view.debounceProgCatOnZoom();
|
||||
//view.throttledZoomChanged();
|
||||
|
||||
// Zoom heuristic
|
||||
// First detect the device
|
||||
// See https://stackoverflow.com/questions/10744645/detect-touchpad-vs-mouse-in-javascript
|
||||
// for detecting the use of a touchpad
|
||||
/*view.isTouchPadDefined = isTouchPad || typeof isTouchPad !== "undefined";
|
||||
if (!view.isTouchPadDefined) {
|
||||
if (eventCount === 0) {
|
||||
view.delta = 0;
|
||||
eventCountStart = new Date().getTime();
|
||||
}
|
||||
|
||||
eventCount++;
|
||||
|
||||
if (new Date().getTime() - eventCountStart > 100) {
|
||||
if (eventCount > 10) {
|
||||
isTouchPad = true;
|
||||
} else {
|
||||
isTouchPad = false;
|
||||
}
|
||||
view.isTouchPadDefined = true;
|
||||
}
|
||||
}*/
|
||||
|
||||
// only ensure the touch pad test has been done before zooming
|
||||
/*if (!view.isTouchPadDefined) {
|
||||
return false;
|
||||
}*/
|
||||
|
||||
// touch pad defined
|
||||
view.delta = e.deltaY || e.detail || (-e.wheelDelta);
|
||||
|
||||
//if (isTouchPad) {
|
||||
if (!view.throttledTouchPadZoom) {
|
||||
view.throttledTouchPadZoom = () => {
|
||||
const factor = 2.0;
|
||||
let newFov = view.delta > 0 ? view.fov * factor : view.fov / factor;
|
||||
|
||||
// inside case
|
||||
view.zoom.apply({
|
||||
stop: newFov,
|
||||
duration: 100
|
||||
});
|
||||
};
|
||||
}
|
||||
if (!view.throttledTouchPadZoom) {
|
||||
view.throttledTouchPadZoom = () => {
|
||||
const factor = Utils.detectTrackPad(e) ? 1.05 : 1.2;
|
||||
const currZoomFactor = view.zoom.isZooming ? view.zoom.finalZoom : view.wasm.getZoomFactor();
|
||||
//const currZoomFactor = view.wasm.getZoomFactor();
|
||||
let newZoomFactor = view.delta > 0 ? currZoomFactor * factor : currZoomFactor / factor;
|
||||
|
||||
view.throttledTouchPadZoom();
|
||||
/*} else {
|
||||
if (!view.throttledMouseScrollZoom) {
|
||||
view.throttledMouseScrollZoom = () => {
|
||||
const factor = 2
|
||||
let newFov = view.delta > 0 ? view.fov * factor : view.fov / factor;
|
||||
// standard mouse wheel zooming
|
||||
view.zoom.apply({
|
||||
stop: newFov,
|
||||
duration: 100
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
view.throttledMouseScrollZoom()
|
||||
}*/
|
||||
// inside case
|
||||
view.zoom.apply({
|
||||
stop: newZoomFactor,
|
||||
duration: 100,
|
||||
kind: 'zoomFactor'
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
view.throttledTouchPadZoom();
|
||||
|
||||
return false;
|
||||
});
|
||||
@@ -1576,37 +1526,29 @@ export let View = (function () {
|
||||
};
|
||||
|
||||
// Called for touchmove events
|
||||
View.prototype.setZoom = function (fov) {
|
||||
// limit the fov in function of the projection
|
||||
fov = Math.min(fov, this.projection.fov);
|
||||
View.prototype.setZoomFactor = function(zoomFactor) {
|
||||
this.wasm.setZoomFactor(zoomFactor);
|
||||
this.updateZoomState();
|
||||
}
|
||||
|
||||
// then clamp the fov between minFov and maxFov
|
||||
const minFoV = this.minFoV;
|
||||
const maxFoV = this.maxFoV;
|
||||
|
||||
if (minFoV) {
|
||||
fov = Math.max(fov, minFoV);
|
||||
}
|
||||
|
||||
if (maxFoV) {
|
||||
fov = Math.min(fov, maxFoV);
|
||||
}
|
||||
|
||||
this.wasm.setFieldOfView(fov);
|
||||
this.updateZoomState(fov);
|
||||
};
|
||||
View.prototype.setFoV = function(fov) {
|
||||
this.wasm.setFieldOfView(fov)
|
||||
this.updateZoomState();
|
||||
}
|
||||
|
||||
View.prototype.increaseZoom = function () {
|
||||
this.zoom.apply({
|
||||
stop: this.fov / 3,
|
||||
duration: 300
|
||||
stop: this.zoomFactor / 2.0,
|
||||
duration: 300,
|
||||
kind: 'zoomFactor'
|
||||
});
|
||||
}
|
||||
|
||||
View.prototype.decreaseZoom = function () {
|
||||
this.zoom.apply({
|
||||
stop: this.fov * 3,
|
||||
duration: 300
|
||||
stop: this.zoomFactor * 2.0,
|
||||
duration: 300,
|
||||
kind: 'zoomFactor'
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1615,6 +1557,15 @@ export let View = (function () {
|
||||
}
|
||||
|
||||
View.prototype.setGridOptions = function (options) {
|
||||
if (options.color) {
|
||||
// 1. the user has maybe given some
|
||||
options.color = new Color(options.color);
|
||||
// 3. convert from 0-255 to 0-1
|
||||
options.color.r /= 255;
|
||||
options.color.g /= 255;
|
||||
options.color.b /= 255;
|
||||
}
|
||||
|
||||
this.gridCfg = {...this.gridCfg, ...options};
|
||||
this.wasm.setGridOptions(this.gridCfg);
|
||||
|
||||
@@ -1627,26 +1578,9 @@ export let View = (function () {
|
||||
|
||||
View.prototype.getGridOptions = function() {
|
||||
return this.gridCfg;
|
||||
}
|
||||
};
|
||||
|
||||
View.prototype.updateZoomState = function (fov) {
|
||||
// Get the new zoom values from the backend
|
||||
const newFov = fov || this.wasm.getFieldOfView()
|
||||
|
||||
// Disable the coo grid labels if we are too unzoomed
|
||||
const maxFovGridLabels = 360;
|
||||
if (this.fov <= maxFovGridLabels && newFov > maxFovGridLabels) {
|
||||
let gridOptions = this.getGridOptions()
|
||||
if (gridOptions) {
|
||||
this.originalShowLabels = gridOptions.showLabels;
|
||||
this.aladin.setCooGrid({showLabels: false});
|
||||
}
|
||||
|
||||
} else if (this.fov > maxFovGridLabels && newFov <= maxFovGridLabels) {
|
||||
this.aladin.setCooGrid({showLabels:this.originalShowLabels});
|
||||
}
|
||||
|
||||
this.fov = newFov;
|
||||
View.prototype.updateZoomState = function () {
|
||||
this.computeNorder();
|
||||
|
||||
let fovX = this.fov;
|
||||
@@ -1959,8 +1893,7 @@ export let View = (function () {
|
||||
|
||||
// Change the projection here
|
||||
this.wasm.setProjection(projName);
|
||||
let newProjFov = Math.min(this.fov, this.projection.fov);
|
||||
this.setZoom(newProjFov)
|
||||
this.updateZoomState()
|
||||
|
||||
const projFn = this.aladin.callbacksByEventName['projectionChanged'];
|
||||
(typeof projFn === 'function') && projFn(projName);
|
||||
|
||||
@@ -36,24 +36,22 @@ import { requestAnimFrame } from "./libs/RequestAnimationFrame.js";
|
||||
};
|
||||
|
||||
Zoom.prototype.apply = function(options) {
|
||||
let startZoom = options['start'] || this.view.fov;
|
||||
const kind = options['kind'] || 'fov';
|
||||
|
||||
let startZoom = options['start'] || (kind === 'fov' ? this.view.fov : this.view.zoomFactor);
|
||||
let finalZoom = options['stop'] || undefined;
|
||||
let interpolationDuration = options['duration'] || 1000; // default to 1seconds
|
||||
if (!finalZoom)
|
||||
return;
|
||||
|
||||
// clamp the zoom to the view params minFov and maxFov and the projection bounds
|
||||
//finalZoom = Math.min(finalZoom, this.view.projection.fov);
|
||||
// then clamp the fov between minFov and maxFov
|
||||
const minFoV = this.view.minFoV;
|
||||
const maxFoV = this.view.maxFoV;
|
||||
|
||||
if (minFoV) {
|
||||
finalZoom = Math.max(finalZoom, minFoV);
|
||||
}
|
||||
|
||||
if (maxFoV) {
|
||||
finalZoom = Math.min(finalZoom, maxFoV);
|
||||
// Get a relative error for stopping the zooming
|
||||
const relativeErr = Math.abs(finalZoom - startZoom) * 0.01;
|
||||
const zoomFn = (zoom) => {
|
||||
if (kind === 'fov') {
|
||||
this.view.setFoV(zoom)
|
||||
} else {
|
||||
this.view.setZoomFactor(zoom)
|
||||
}
|
||||
}
|
||||
|
||||
this.finalZoom = finalZoom;
|
||||
@@ -97,6 +95,7 @@ import { requestAnimFrame } from "./libs/RequestAnimationFrame.js";
|
||||
if (self.stop) {
|
||||
self.isZooming = false;
|
||||
self.stop = false;
|
||||
self.finalZoom = undefined;
|
||||
} else {
|
||||
self.x = ( performance.now() - self.startTime ) / interpolationDuration;
|
||||
interpolatedZoom = Zoom.hermiteCubic.f(self.x, self.x1, self.x2, self.y1, self.y2, self.m1, self.m2);
|
||||
@@ -104,13 +103,13 @@ import { requestAnimFrame } from "./libs/RequestAnimationFrame.js";
|
||||
interpolatedZoom = Math.max(0, interpolatedZoom);
|
||||
|
||||
// Apply zoom level to map or perform any necessary rendering
|
||||
self.view.setZoom(interpolatedZoom);
|
||||
|
||||
self.fov = interpolatedZoom;
|
||||
zoomFn(interpolatedZoom);
|
||||
|
||||
if (self.x >= self.x2 || Math.abs(interpolatedZoom - self.finalZoom) < 1e-4) {
|
||||
self.view.setZoom(self.finalZoom);
|
||||
if (self.x >= self.x2 || Math.abs(interpolatedZoom - self.finalZoom) <= relativeErr) {
|
||||
zoomFn(self.finalZoom);
|
||||
|
||||
self.isZooming = false;
|
||||
} else if (self.view.wasm.atZoomBoundaries()) {
|
||||
self.isZooming = false;
|
||||
} else {
|
||||
// Request the next frame
|
||||
|
||||
Reference in New Issue
Block a user