Compare commits

..

4 Commits

29 changed files with 365 additions and 753 deletions

3
.gitignore vendored
View File

@@ -11,6 +11,3 @@ src/core/Cargo.lock
aladin-lite*.tgz
.vscode
deploy.sh
deploy-beta.sh

15
deploy-dbg.sh Executable file
View File

@@ -0,0 +1,15 @@
USER_ALADIN="matthieu.baumann"
DATEUPLOAD="$(date '+%Y-%m-%d')"
ssh $USER_ALADIN@aladin 'sg hips -c "mkdir -p /home/matthieu.baumann/al-tmp && rm -rf /home/matthieu.baumann/al-tmp/*"'
# Copy the dist files
scp dist/* $USER_ALADIN@aladin:~/al-tmp
# Copy the tgz
cp aladin-l*.tgz aladin-lite.tgz
scp aladin-lite.tgz $USER_ALADIN@aladin:~/al-tmp
ssh $USER_ALADIN@aladin "sg hips -c 'rm -rf /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD &&
mkdir -p /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD &&
cp /home/matthieu.baumann/al-tmp/* /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD &&
rm -rf /home/thomas.boch/AladinLite/www/api/v3/beta &&
ln -s /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD /home/thomas.boch/AladinLite/www/api/v3/beta'"

17
deploy.sh Executable file
View File

@@ -0,0 +1,17 @@
USER_ALADIN="matthieu.baumann"
DATEUPLOAD="$(date '+%Y-%m-%d')"
ssh $USER_ALADIN@aladin 'sg hips -c "mkdir -p /home/matthieu.baumann/al-tmp && rm -rf /home/matthieu.baumann/al-tmp/*"'
# Copy the dist files
# For compatibility with the docs, rename the UMD file into aladin.js
scp dist/aladin.umd.cjs $USER_ALADIN@aladin:~/al-tmp/aladin.js
# Copy the tgz
mv aladin-l*.tgz aladin-lite.tgz
scp aladin-lite.tgz $USER_ALADIN@aladin:~/al-tmp
ssh $USER_ALADIN@aladin "sg hips -c 'rm -rf /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD &&
mkdir -p /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD &&
cp /home/matthieu.baumann/al-tmp/* /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD &&
rm -rf /home/thomas.boch/AladinLite/www/api/v3/latest &&
ln -s /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD /home/thomas.boch/AladinLite/www/api/v3/latest &&
ln -s /home/thomas.boch/AladinLite/www/api/v3/latest/aladin-lite.tgz /home/thomas.boch/AladinLite/www/api/v3/latest/AladinLiteAssets.tar.gz'"

View File

@@ -11,7 +11,7 @@
let aladin;
A.init.then(() => {
// Start up Aladin Lite
aladin = A.aladin('#aladin-lite-div', {target: 'M 82', fov: 0.25, showContextMenu: true});
aladin = A.aladin('#aladin-lite-div', {target: 'M 82', fov: 0.25});
aladin.addCatalog(A.catalogFromSimbad('M 82', 0.1, {onClick: 'showTable'}));
aladin.addCatalog(A.catalogFromNED('09 55 52.4 +69 40 47', 0.1, {onClick: 'showPopup', shape: 'plus'}));

View File

@@ -1,113 +0,0 @@
<!doctype html>
<html>
<head>
</head>
<script type="importmap">
{
"imports": {
"three": "https://unpkg.com/three@0.157.0/build/three.module.js",
"three/addons/": "https://unpkg.com/three@0.157.0/examples/jsm/"
}
}
</script>
<body>
<div id="aladin-lite-div" style="width: 1024px; height: 768px"></div>
<script type="module">
import A from '../src/js/A.js';
import * as THREE from 'three';
let aladin;
A.init.then(() => {
aladin = A.aladin(
'#aladin-lite-div',
{
survey: 'P/DSS2/color', // set a survey
projection: 'TAN', // set a projection
fov: 70, // initial field of view in degrees
target: '338.98958 33.96', // initial target
cooFrame: 'equatorial', // set galactic frame
showCooGrid: true, // set the grid
fullScreen: true,
vr: {animation: animate.bind(renderer)},
}
);
//aladin.setOverlayImageLayer("https://alasky.cds.unistra.fr/JWST/CDS_P_JWST_Stephans-Quintet_NIRCam+MIRI")
initScene(aladin.view.imageCanvas);
aladin.setRenderer(renderer);
});
let renderer = null;
let scene = null;
let camera = null;
let cubeMesh = null;
// let controls = null;
/**
* Initializes a 3D scene, camera, and renderer for virtual reality (VR).
*
* @param {HTMLCanvasElement} canvas - The HTML canvas element to render the
* 3D scene
*/
function initScene(canvas) {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.1, 1000);
scene.add(camera);
renderer = new THREE.WebGLRenderer({canvas: canvas, context: canvas.getContext('webgl2', {xrCompatible: true})}); // NOTE Une différence ici
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.xr.enabled = true;
// renderer.xr.setReferenceSpaceType('local');
renderer.autoClear = false;
const light = new THREE.PointLight(0xffffff, 10);
light.position.set(0, 2, 1);
scene.add(light);
const planeGeometry = new THREE.PlaneGeometry(10, 10);
const planeMaterial = new THREE.MeshPhongMaterial({ color: 0xff00ff });
const planeMesh = new THREE.Mesh(planeGeometry, planeMaterial);
planeMesh.position.set(0, -1, 0);
planeMesh.rotation.x = -Math.PI / 2;
scene.add(planeMesh);
const cubeGeometry = new THREE.BoxGeometry(1, 1, 1);
const cubeMaterial = new THREE.MeshPhongMaterial({ color: 0x00ff00 });
cubeMesh = new THREE.Mesh(cubeGeometry, cubeMaterial);
cubeMesh.position.set(0, 0, -2);
scene.add(cubeMesh);
}
/**
* Function to animate the 3D scene and rendering it.
*/
function animate() {
cubeMesh.rotation.x += 0.001;
cubeMesh.rotation.y += 0.001;
renderer.render( scene, camera );
}
// /**
// * Initializes a WebGL2 context and handles potential errors.
// */
// function initWebGL2() {
// // canvas = aladin.view.imageCanvas;
// canvas = document.getElementById(aladin.view.imageCanvas);
// // gl = canvas.getContext("webgl2", { alpha: true });
// gl = canvas.getContext('webgl2');
// if (!gl) { // If the gl didn't create properly
// alert('This browser doesn\'t support WebGL2');
// return;
// }
// }
</script>
</body>
</html>

View File

@@ -2,7 +2,7 @@
"homepage": "https://aladin.u-strasbg.fr/",
"name": "aladin-lite",
"type": "module",
"version": "3.2.1",
"version": "3.2.0",
"description": "An astronomical HiPS visualizer in the browser",
"author": "Thomas Boch and Matthieu Baumann",
"license": "GPL-3",
@@ -35,7 +35,6 @@
"wasm": "wasm-pack build ./src/core --target web --release --out-name core -- --features webgl2",
"predeploy": "npm run build && rm -rf aladin-lite.tgz && npm pack",
"deploy": "./deploy.sh",
"deploy:beta": "npm run predeploy && ./deploy-beta.sh",
"build": "npm run wasm && vite build && cp examples/index.html dist/index.html",
"dev": "npm run build && vite",
"serve": "npm run dev",

View File

@@ -9,8 +9,8 @@ pub mod vao {
use crate::object::element_array_buffer::ElementArrayBuffer;
use crate::webgl_ctx::WebGlContext;
use crate::Abort;
use std::collections::HashMap;
use crate::Abort;
pub struct VertexArrayObject {
array_buffer: HashMap<&'static str, ArrayBuffer>,
@@ -88,10 +88,7 @@ pub mod vao {
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
self.element_array_buffer.as_ref().unwrap_abort().num_elements()
}
pub fn num_instances(&self) -> i32 {
@@ -158,7 +155,6 @@ pub mod vao {
pub fn unbind(&self) {
self.vao.gl.bind_vertex_array(None);
self._shader.unbind(&self.vao.gl);
}
}
@@ -174,9 +170,8 @@ pub mod vao {
}
impl<'a, 'b> ShaderVertexArrayObjectBoundRef<'a, 'b> {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) -> &Self {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) {
self.vao.gl.draw_arrays(mode, byte_offset, size);
self
}
pub fn draw_elements_with_i32(
@@ -185,12 +180,11 @@ pub mod vao {
num_elements: Option<i32>,
type_: u32,
byte_offset: i32,
) -> &Self {
) {
let num_elements = num_elements.unwrap_or(self.vao.num_elements() as i32);
self.vao
.gl
.draw_elements_with_i32(mode, num_elements, type_, byte_offset);
self
}
pub fn draw_elements_instanced_with_i32(
@@ -198,7 +192,7 @@ pub mod vao {
mode: u32,
offset_element_idx: i32,
num_instances: i32,
) -> &Self {
) {
self.vao.gl.draw_elements_instanced_with_i32(
mode,
self.vao.num_elements() as i32,
@@ -206,12 +200,10 @@ pub mod vao {
offset_element_idx,
num_instances,
);
self
}
pub fn unbind(&self) {
self.vao.gl.bind_vertex_array(None);
self._shader.unbind(&self.vao.gl);
}
}
@@ -452,10 +444,7 @@ pub mod vao {
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
self.element_array_buffer.as_ref().unwrap_abort().num_elements()
}
pub fn num_instances(&self) -> i32 {
@@ -522,8 +511,7 @@ pub mod vao {
}
pub fn unbind(&self) {
self.vao.gl.bind_vertex_array(None);
self._shader.unbind(&self.vao.gl);
//self.vao.gl.bind_vertex_array(None);
}
}
@@ -540,15 +528,13 @@ pub mod vao {
}
use crate::object::array_buffer::VertexBufferObject;
impl<'a, 'b> ShaderVertexArrayObjectBoundRef<'a, 'b> {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) -> &Self {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
}
self.vao.gl.draw_arrays(mode, byte_offset, size);
self
}
pub fn draw_elements_with_i32(
@@ -557,7 +543,7 @@ pub mod vao {
num_elements: Option<i32>,
type_: u32,
byte_offset: i32,
) -> &Self {
) {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
@@ -569,7 +555,6 @@ pub mod vao {
self.vao
.gl
.draw_elements_with_i32(mode, num_elements, type_, byte_offset);
self
}
pub fn draw_elements_instanced_with_i32(
@@ -577,7 +562,7 @@ pub mod vao {
mode: u32,
offset_element_idx: i32,
num_instances: i32,
) -> &Self {
) {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
@@ -602,12 +587,10 @@ pub mod vao {
offset_element_idx,
num_instances,
);
self
}
pub fn unbind(&self) {
self.vao.gl.bind_vertex_array(None);
self.shader.unbind(&self.vao.gl);
//self.vao.gl.bind_vertex_array(None);
}
}
@@ -733,9 +716,6 @@ pub mod vao {
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
self.vao.gl.bind_vertex_array(None);
self.shader.unbind(&self.vao.gl);
}
}

View File

@@ -144,9 +144,8 @@ impl App {
// When it will be supported nearly everywhere, we will need to uncomment this line to
// enable it
//gl.enable(WebGl2RenderingContext::SCISSOR_TEST);
//gl.enable(WebGl2RenderingContext::CULL_FACE);
//gl.cull_face(WebGl2RenderingContext::BACK);
gl.enable(WebGl2RenderingContext::CULL_FACE);
gl.cull_face(WebGl2RenderingContext::BACK);
// The tile buffer responsible for the tile requests
let downloader = Downloader::new();
@@ -631,12 +630,18 @@ impl App {
survey.add_tile(&cell, image, time_req)?;
self.request_redraw = true;
//} else {
// self.downloader.delay_rsc(Resource::Tile(tile));
//}
//}
self.time_start_blending = Time::now();
//self.tile_fetcher.notify(1, &mut self.downloader);
}
}
}
} else {
//self.tile_fetcher
// .notify_tile(&tile, false, true, &mut self.downloader);
self.downloader.delay_rsc(Resource::Tile(tile));
}
}
@@ -698,30 +703,16 @@ impl App {
}
// We fetch when we does not move
/*let has_not_moved_recently =
let has_not_moved_recently =
(Time::now() - self.camera.get_time_of_last_move()) > DeltaTime(100.0);
if has_not_moved_recently && self.inertia.is_none() {
// Triggers the fetching of new queued tiles
self.tile_fetcher.notify(&mut self.downloader);
}*/
// If there is inertia, we do not fetch any new tiles
if self.inertia.is_none() {
let has_not_moved_recently =
(Time::now() - self.camera.get_time_of_last_move()) > DeltaTime(100.0);
let dt = if has_not_moved_recently {
None
} else {
Some(DeltaTime::from_millis(700.0))
};
self.tile_fetcher.notify(&mut self.downloader, dt);
}
}
// The update from the camera
//self.layers.update(&mut self.camera, &self.projection);
self.layers.update(&mut self.camera, &self.projection);
if self.request_for_new_tiles
&& Time::now() - self.last_time_request_for_new_tiles > DeltaTime::from(200.0)
@@ -875,7 +866,7 @@ impl App {
&self.colormaps,
&self.projection,
)?;
/*
// Draw the catalog
//let fbo_view = &self.fbo_view;
//catalogs.draw(&gl, shaders, camera, colormaps, fbo_view)?;
@@ -903,7 +894,7 @@ impl App {
self.line_renderer.draw(&self.camera)?;
//let dpi = self.camera.get_dpi();
//ui.draw(&gl, dpi)?;
*/
// Reset the flags about the user action
self.camera.reset();

View File

@@ -20,6 +20,7 @@ pub struct CameraViewPort {
// The rotation of the camera
rotation_center_angle: Angle<f64>,
w2m_rot: Rotation<f64>,
final_rot: Rotation<f64>,
w2m: Matrix4<f64>,
m2w: Matrix4<f64>,
@@ -102,6 +103,7 @@ impl CameraViewPort {
let zoomed = false;
let w2m_rot = Rotation::zero();
let final_rot = Rotation::zero();
// Get the initial size of the window
let window = web_sys::window().unwrap_abort();
@@ -145,6 +147,7 @@ impl CameraViewPort {
m2w,
dpi,
final_rot,
rotation_center_angle,
// The width over height ratio
aspect,
@@ -508,11 +511,10 @@ impl CameraViewPort {
pub fn set_longitude_reversed(&mut self, reversed_longitude: bool, proj: &ProjectionType) {
if self.reversed_longitude != reversed_longitude {
self.reversed_longitude = reversed_longitude;
self.rotation_center_angle = -self.rotation_center_angle;
self.update_rot_matrices(proj);
}
self.reversed_longitude = reversed_longitude;
// The camera is reversed => it has moved
self.moved = true;
@@ -524,6 +526,18 @@ impl CameraViewPort {
}
// Accessors
pub fn get_rotation(&self) -> &Rotation<f64> {
&self.w2m_rot
}
// This rotation is the final rotation, i.e. a composite of
// two rotations:
// - The current rotation of the sphere
// - The rotation around the center axis of a specific angle
pub fn get_final_rotation(&self) -> &Rotation<f64> {
&self.final_rot
}
pub fn get_w2m(&self) -> &cgmath::Matrix4<f64> {
&self.w2m
}
@@ -640,11 +654,6 @@ impl CameraViewPort {
}
fn update_center(&mut self) {
// Longitude reversed identity matrix
const ID_R: &Matrix4<f64> = &Matrix4::new(
-1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
// The center position is on the 3rd column of the w2m matrix
self.center = self.w2m.z;
@@ -653,12 +662,9 @@ impl CameraViewPort {
// Re-update the model matrix to take into account the rotation
// by theta around the center axis
let final_rot = center_rot * self.w2m_rot;
self.w2m = (&final_rot).into();
if self.reversed_longitude {
self.w2m = self.w2m * ID_R;
}
self.final_rot = center_rot * self.w2m_rot;
self.w2m = (&self.final_rot).into();
self.m2w = self.w2m.transpose();
}
}

View File

@@ -1,19 +1,20 @@
use crate::math::HALF_PI;
use crate::math::PI;
use cgmath::Vector3;
use crate::ProjectionType;
use crate::CameraViewPort;
use crate::LonLatT;
use crate::ProjectionType;
use cgmath::InnerSpace;
use cgmath::Vector3;
use crate::grid::XYScreen;
use crate::math::angle::SerializeFmt;
use crate::math::lonlat::LonLat;
use crate::math::TWICE_PI;
use crate::grid::XYScreen;
use crate::math::lonlat::LonLat;
use crate::math::angle::ToAngle;
use cgmath::Vector2;
use core::ops::Range;
use cgmath::Vector2;
const OFF_TANGENT: f64 = 35.0;
const OFF_BI_TANGENT: f64 = 5.0;
@@ -39,7 +40,7 @@ impl Label {
options: LabelOptions,
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: &SerializeFmt,
fmt: &SerializeFmt
) -> Option<Self> {
let fov = camera.get_field_of_view();
let d = if fov.contains_north_pole() {
@@ -59,15 +60,17 @@ impl Label {
LonLatT::new(lon.to_angle(), lat.to_angle())
}
LabelOptions::OnSide => LonLatT::new(lon.to_angle(), lat.start.to_angle()),
LabelOptions::OnSide => LonLatT::new(lon.to_angle(), lat.start.to_angle())
};
let m1: Vector3<_> = lonlat.vector();
let m2 = (m1 + d * 1e-3).normalize();
//let s1 = projection.model_to_screen_space(&(system.to_icrs_j2000::<f64>() * m1), camera, reversed_longitude)?;
let d1 = projection.model_to_screen_space(&m1.extend(1.0), camera)?;
let d2 = projection.model_to_screen_space(&m2.extend(1.0), camera)?;
//let s2 = projection.model_to_screen_space(&(system.to_icrs_j2000::<f64>() * m2), camera, reversed_longitude)?;
let dt = (d2 - d1).normalize();
let db = Vector2::new(dt.y.abs(), dt.x.abs());
@@ -105,7 +108,7 @@ impl Label {
let lon = camera.get_center().lon();
LonLatT::new(lon, lat.to_angle())
}
LabelOptions::OnSide => LonLatT::new(lon.start.to_angle(), lat.to_angle()),
LabelOptions::OnSide => LonLatT::new(lon.start.to_angle(), lat.to_angle())
};
let m1: Vector3<_> = lonlat.vector();
@@ -138,17 +141,13 @@ impl Label {
};
// rot is between -PI and +PI
let mut angle = dt.y.signum() * dt.x.acos();
let rot = dt.y.signum() * dt.x.acos() + PI;
// Detect if the label is upside-down fix the angle by adding PI
if angle.abs() >= HALF_PI {
angle += PI;
}
Some(Label {
position,
content,
rot: angle,
rot,
})
}
}

View File

@@ -21,7 +21,6 @@ pub mod domain;
use domain::{basic, cod::Cod, full::FullScreen, hpx::Hpx, par::Par};
/* S <-> NDC space conversion methods */
pub fn screen_to_ndc_space(
pos_screen_space: &Vector2<f64>,
camera: &CameraViewPort,
@@ -56,7 +55,6 @@ pub fn ndc_to_screen_space(
pos_screen_space / dpi
}
/* NDC <-> CLIP space conversion methods */
pub fn clip_to_ndc_space(pos_clip_space: &Vector2<f64>, camera: &CameraViewPort) -> Vector2<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
@@ -67,20 +65,6 @@ pub fn clip_to_ndc_space(pos_clip_space: &Vector2<f64>, camera: &CameraViewPort)
)
}
pub fn ndc_to_clip_space(
pos_normalized_device: &Vector2<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
pos_normalized_device.y * ndc_to_clip.y * clip_zoom_factor,
)
}
/* S <-> CLIP space conversion methods */
pub fn clip_to_screen_space(
pos_clip_space: &Vector2<f64>,
camera: &CameraViewPort,
@@ -97,6 +81,19 @@ pub fn screen_to_clip_space(
ndc_to_clip_space(&pos_normalized_device, camera)
}
pub fn ndc_to_clip_space(
pos_normalized_device: &Vector2<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
pos_normalized_device.y * ndc_to_clip.y * clip_zoom_factor,
)
}
use al_api::coo_system::CooSystem;
use cgmath::InnerSpace;
@@ -170,15 +167,21 @@ impl ProjectionType {
let pos_screen_space = *pos_screen_space;
let pos_normalized_device = screen_to_ndc_space(&pos_screen_space, camera);
let pos_clip_space = ndc_to_clip_space(&pos_normalized_device, camera);
self.clip_to_world_space(&pos_clip_space)
/*.map(|mut pos_world_space| {
if camera.get_longitude_reversed() {
pos_world_space.x = -pos_world_space.x;
}
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
pos_world_space.normalize()
})*/
let pos_clip_space = Vector2::new(
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
pos_normalized_device.y * ndc_to_clip.y * clip_zoom_factor,
);
self.clip_to_world_space(&pos_clip_space)
.map(|mut pos_world_space| {
if camera.get_longitude_reversed() {
pos_world_space.x = -pos_world_space.x;
}
pos_world_space.normalize()
})
}
/// Screen to model space deprojection
@@ -195,7 +198,10 @@ impl ProjectionType {
camera: &CameraViewPort,
) -> Option<Vector4<f64>> {
self.screen_to_world_space(pos_screen_space, camera)
.map(|world_pos| camera.get_w2m() * world_pos)
.map(|world_pos| {
let r = camera.get_final_rotation();
r.rotate(&world_pos)
})
}
pub fn normalized_device_to_model_space(
@@ -204,7 +210,10 @@ impl ProjectionType {
camera: &CameraViewPort,
) -> Option<XYZWModel> {
self.normalized_device_to_world_space(ndc_pos, camera)
.map(|world_pos| camera.get_w2m() * world_pos)
.map(|world_pos| {
let r = camera.get_final_rotation();
r.rotate(&world_pos)
})
}
pub fn model_to_screen_space(
@@ -286,7 +295,18 @@ impl ProjectionType {
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
self.world_to_clip_space(pos_world_space)
.map(|pos_clip_space| clip_to_ndc_space(&pos_clip_space, camera))
.map(|mut pos_clip_space| {
if camera.get_longitude_reversed() {
pos_clip_space.x = -pos_clip_space.x;
}
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_clip_space.x / (ndc_to_clip.x * clip_zoom_factor),
pos_clip_space.y / (ndc_to_clip.y * clip_zoom_factor),
)
})
}
pub fn normalized_device_to_world_space(
@@ -298,6 +318,24 @@ impl ProjectionType {
self.clip_to_world_space(&clip_pos)
}
/*pub fn world_to_normalized_device_space_unchecked(
&self,
pos_world_space: &Vector4<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let mut pos_clip_space = self.world_to_clip_space_unchecked(pos_world_space);
if camera.get_longitude_reversed() {
pos_clip_space.x = -pos_clip_space.x;
}
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_clip_space.x / (ndc_to_clip.x * clip_zoom_factor),
pos_clip_space.y / (ndc_to_clip.y * clip_zoom_factor),
)
}*/
pub fn world_to_screen_space(
&self,
pos_world_space: &Vector4<f64>,

View File

@@ -12,11 +12,12 @@ pub mod moc;
use crate::renderable::line::RasterizedLineRenderer;
use super::utils::triangle::Triangle;
use wasm_bindgen::JsValue;
use hierarchy::MOCHierarchy;
use super::utils::Triangle;
use al_api::coo_system::CooSystem;
use al_api::moc::MOC as Cfg;

View File

@@ -19,13 +19,12 @@ use crate::math::{angle::Angle, vector::dist2};
use crate::ProjectionType;
use crate::camera::CameraViewPort;
use crate::renderable::utils::index_patch::DefaultPatchIndexIter;
use crate::renderable::utils::BuildPatchIndicesIter;
use crate::{math::lonlat::LonLatT, utils};
use crate::{shader::ShaderManager, survey::config::HiPSConfig};
use crate::downloader::request::allsky::Allsky;
use crate::healpix::{cell::HEALPixCell, coverage::HEALPixCoverage};
use crate::math::angle::ToAngle;
use crate::math::lonlat::LonLat;
use crate::time::Time;
@@ -43,7 +42,14 @@ use std::fmt::Debug;
use wasm_bindgen::JsValue;
use web_sys::WebGl2RenderingContext;
use super::utils::index_patch::CCWCheckPatchIndexIter;
// Identity matrix
const ID: &Matrix4<f64> = &Matrix4::new(
1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
// Longitude reversed identity matrix
const ID_R: &Matrix4<f64> = &Matrix4::new(
-1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
const M: f64 = 280.0 * 280.0;
const N: f64 = 150.0 * 150.0;
@@ -502,6 +508,9 @@ impl HiPS {
let textures = ImageSurveyTextures::new(gl, config)?;
let gl = gl.clone();
let _depth = 0;
let _depth_tile = 0;
let footprint_moc = None;
// request the allsky texture
Ok(HiPS {
@@ -589,16 +598,9 @@ impl HiPS {
self.textures.contains_tile(cell)
}
pub fn update(
&mut self,
raytracer: &RayTracer,
camera: &mut CameraViewPort,
projection: &ProjectionType,
) {
let raytracing = raytracer.is_rendering(camera);
pub fn update(&mut self, camera: &mut CameraViewPort, projection: &ProjectionType) {
let vertices_recomputation_needed =
!raytracing && (self.textures.reset_available_tiles() | camera.has_moved());
self.textures.reset_available_tiles() | camera.has_moved();
if vertices_recomputation_needed {
self.recompute_vertices(camera, projection);
}
@@ -819,7 +821,7 @@ impl HiPS {
pos.push(ndc);
}
let patch_indices_iter = CCWCheckPatchIndexIter::new(
let patch_indices_iter = BuildPatchIndicesIter::new(
&(0..=n_segments_by_side),
&(0..=n_segments_by_side),
n_vertices_per_segment,
@@ -963,6 +965,7 @@ impl HiPS {
pub fn draw(
&self,
//switch_from_raytrace_to_raster: bool,
shaders: &mut ShaderManager,
colormaps: &Colormaps,
camera: &CameraViewPort,
@@ -975,11 +978,20 @@ impl HiPS {
let hips_frame = hips_cfg.get_frame();
let c = selected_frame.to(hips_frame);
// Get whether the camera mode is longitude reversed
//let longitude_reversed = hips_cfg.longitude_reversed;
let rl = if camera.get_longitude_reversed() {
ID_R
} else {
ID
};
// Retrieve the model and inverse model matrix
let w2v = c * (*camera.get_w2m());
let w2v = c * (*camera.get_w2m()) * rl;
let v2w = w2v.transpose();
let raytracing = raytracer.is_rendering(camera);
let longitude_reversed = camera.get_longitude_reversed();
let config = self.get_config();
self.gl.enable(WebGl2RenderingContext::BLEND);
@@ -999,6 +1011,9 @@ impl HiPS {
blend_cfg.enable(&self.gl, || {
if raytracing {
// Triangle are defined in CCW
self.gl.cull_face(WebGl2RenderingContext::BACK);
let shader = get_raytracer_shader(cmap, &self.gl, shaders, &config)?;
let shader = shader.bind(&self.gl);
@@ -1016,6 +1031,16 @@ impl HiPS {
raytracer.draw(&shader);
} else {
// Depending on if the longitude is reversed, triangles are either defined in:
// - CCW for longitude_reversed = false
// - CW for longitude_reversed = true
// Get the reverse longitude flag
if longitude_reversed {
self.gl.cull_face(WebGl2RenderingContext::FRONT);
} else {
self.gl.cull_face(WebGl2RenderingContext::BACK);
}
// The rasterizer has a buffer containing:
// - The vertices of the HEALPix cells for the most refined survey
// - The starting and ending uv for the blending animation
@@ -1047,8 +1072,17 @@ impl HiPS {
Some(self.num_idx as i32),
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
)
.unbind();
);
}
// Depending on if the longitude is reversed, triangles are either defined in:
// - CCW for longitude_reversed = false
// - CW for longitude_reversed = true
// Get the reverse longitude flag
if longitude_reversed {
self.gl.cull_face(WebGl2RenderingContext::FRONT);
} else {
self.gl.cull_face(WebGl2RenderingContext::BACK);
}
Ok(())

View File

@@ -225,8 +225,7 @@ impl RayTracer {
None,
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
)
.unbind();
);
#[cfg(feature = "webgl2")]
shader
.attach_uniform("position_tex", &self.position_tex)
@@ -237,13 +236,12 @@ impl RayTracer {
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
)
.unbind();
}
pub fn is_rendering(&self, camera: &CameraViewPort) -> bool {
// Check whether the tile depth is 0 for square projection
// definition domains i.e. Mercator
let depth = camera.get_tile_depth();
camera.is_allsky() || depth <= 1
camera.is_allsky() || depth == 0
}
}

View File

@@ -4,7 +4,7 @@ use wcs::ImgXY;
use crate::camera::CameraViewPort;
use crate::math::angle::ToAngle;
use crate::math::projection::ProjectionType;
use crate::renderable::utils::index_patch::CCWCheckPatchIndexIter;
use crate::renderable::utils::BuildPatchIndicesIter;
use al_api::coo_system::CooSystem;
use wcs::WCS;
@@ -215,7 +215,7 @@ pub fn get_grid_vertices(
for idx_x_range in &idx_x_ranges {
for idx_y_range in &idx_y_ranges {
let build_indices_iter =
CCWCheckPatchIndexIter::new(idx_x_range, idx_y_range, num_x_vertices, &pos, camera);
BuildPatchIndicesIter::new(idx_x_range, idx_y_range, num_x_vertices, &pos, camera);
let patch_indices = build_indices_iter
.flatten()

View File

@@ -588,7 +588,7 @@ impl Image {
_ => return Err(JsValue::from_str("Image format type not supported")),
};
//self.gl.disable(WebGl2RenderingContext::CULL_FACE);
self.gl.disable(WebGl2RenderingContext::CULL_FACE);
// 2. Draw it if its opacity is not null
blend_cfg.enable(&self.gl, || {
@@ -612,8 +612,7 @@ impl Image {
Some(num_indices),
WebGl2RenderingContext::UNSIGNED_SHORT,
((off_indices as usize) * std::mem::size_of::<u16>()) as i32,
)
.unbind();
);
off_indices += self.num_indices[idx];
}
@@ -621,7 +620,7 @@ impl Image {
Ok(())
})?;
//self.gl.enable(WebGl2RenderingContext::CULL_FACE);
self.gl.enable(WebGl2RenderingContext::CULL_FACE);
self.gl.disable(WebGl2RenderingContext::BLEND);

View File

@@ -286,7 +286,7 @@ impl RasterizedLineRenderer {
WebGl2RenderingContext::ONE,
);
//self.gl.disable(WebGl2RenderingContext::CULL_FACE);
self.gl.disable(WebGl2RenderingContext::CULL_FACE);
let shader = self.shader.bind(&self.gl);
for meta in self.meta.iter() {
@@ -301,7 +301,7 @@ impl RasterizedLineRenderer {
);
}
//self.gl.enable(WebGl2RenderingContext::CULL_FACE);
self.gl.enable(WebGl2RenderingContext::CULL_FACE);
self.gl.disable(WebGl2RenderingContext::BLEND);
Ok(())

View File

@@ -259,8 +259,7 @@ impl Layers {
None,
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
)
.unbind();
);
}
// The first layer must be paint independently of its alpha channel
@@ -291,7 +290,7 @@ impl Layers {
// 1. Update the survey if necessary
let url = self.urls.get(layer).expect("Url should be found");
if let Some(survey) = self.surveys.get_mut(url) {
survey.update(&self.raytracer, camera, projection);
survey.update(camera, projection);
// 2. Draw it if its opacity is not null
survey.draw(shaders, colormaps, camera, raytracer, draw_opt)?;
@@ -429,8 +428,6 @@ impl Layers {
meta,
} = hips;
let img_ext = meta.img_format;
// 1. Add the layer name
let layer_already_found = self.layers.iter().any(|l| l == &layer);
@@ -443,15 +440,7 @@ impl Layers {
self.layers.insert(idx, layer.to_string());
// 2. Add the meta information of the layer
self.meta.insert(layer.clone(), meta);
// Loop over all the meta for its longitude reversed property
// and set the camera to it if there is at least one
let longitude_reversed = self.meta.values().any(|meta| meta.longitude_reversed);
camera.set_longitude_reversed(longitude_reversed, proj);
// 3. Add the image survey
// 2. Add the image survey
let url = String::from(properties.get_url());
// The layer does not already exist
// Let's check if no other hipses points to the
@@ -460,7 +449,7 @@ impl Layers {
if !url_already_found {
// The url is not processed yet
let cfg = HiPSConfig::new(&properties, img_ext)?;
let cfg = HiPSConfig::new(&properties, meta.img_format)?;
/*if let Some(initial_ra) = properties.get_initial_ra() {
if let Some(initial_dec) = properties.get_initial_dec() {
@@ -481,6 +470,14 @@ impl Layers {
self.urls.insert(layer.clone(), url.clone());
// 3. Add the meta information of the layer
self.meta.insert(layer.clone(), meta);
// Loop over all the meta for its longitude reversed property
// and set the camera to it if there is at least one
let longitude_reversed = self.meta.values().any(|meta| meta.longitude_reversed);
camera.set_longitude_reversed(longitude_reversed, proj);
let hips = self
.surveys
.get(&url)
@@ -615,6 +612,12 @@ impl Layers {
ready
}
pub fn update(&mut self, camera: &mut CameraViewPort, proj: &ProjectionType) {
for survey in self.surveys.values_mut() {
survey.update(camera, proj);
}
}
// Accessors
// HiPSes getters
pub fn get_hips_from_layer(&self, layer: &str) -> Option<&HiPS> {

View File

@@ -0,0 +1,115 @@
use std::ops::RangeInclusive;
use cgmath::BaseFloat;
use crate::CameraViewPort;
// This iterator construct indices from a set of vertices defining
// a grid.
// Triangles that are in a clockwise order will not be renderer
// Whereas other counter-clockwise triangle will be
pub struct BuildPatchIndicesIter<'a> {
pub idx_x_range: RangeInclusive<usize>,
pub idx_y_range: RangeInclusive<usize>,
pub num_x_vertices: usize,
cur_idx_x: usize,
cur_idx_y: usize,
ndc: &'a [Option<[f32; 2]>],
camera: &'a CameraViewPort,
}
impl<'a> BuildPatchIndicesIter<'a> {
pub fn new(idx_x_range: &RangeInclusive<usize>, idx_y_range: &RangeInclusive<usize>, num_x_vertices: usize, ndc: &'a [Option<[f32; 2]>], camera: &'a CameraViewPort) -> Self {
let cur_idx_x = *idx_x_range.start();
let cur_idx_y = *idx_y_range.start();
Self {
idx_x_range: idx_x_range.clone(),
idx_y_range: idx_y_range.clone(),
num_x_vertices,
cur_idx_x,
cur_idx_y,
ndc,
camera,
}
}
fn get_index_value(&self, idx_x: usize, idx_y: usize) -> usize {
idx_x + idx_y * self.num_x_vertices
}
}
impl<'a> Iterator for BuildPatchIndicesIter<'a> {
type Item = [(u16, u16, u16); 2];
fn next(&mut self) -> Option<Self::Item> {
if self.cur_idx_x == *self.idx_x_range.end() {
self.cur_idx_x = *self.idx_x_range.start();
self.cur_idx_y += 1;
if self.cur_idx_y == *self.idx_y_range.end() {
return None;
}
}
let idx_tl = self.get_index_value(self.cur_idx_x, self.cur_idx_y);
let idx_tr = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y);
let idx_bl = self.get_index_value(self.cur_idx_x, self.cur_idx_y + 1);
let idx_br = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y + 1);
self.cur_idx_x += 1;
let ndc_tl = &self.ndc[idx_tl];
let ndc_tr = &self.ndc[idx_tr];
let ndc_bl = &self.ndc[idx_bl];
let ndc_br = &self.ndc[idx_br];
match (ndc_tl, ndc_tr, ndc_bl, ndc_br) {
(Some(ndc_tl), Some(ndc_tr), Some(ndc_bl), Some(ndc_br)) => {
let t1 = Triangle::new(&ndc_tl, &ndc_tr, &ndc_bl);
let t2 = Triangle::new(&ndc_tr, &ndc_br, &ndc_bl);
if !t1.is_invalid(&self.camera) || !t2.is_invalid(&self.camera) {
self.next() // crossing projection tri
} else {
Some([
(idx_tl as u16, idx_tr as u16, idx_bl as u16),
(idx_tr as u16, idx_br as u16, idx_bl as u16)
])
}
},
_ => self.next() // out of proj
}
}
}
pub struct Triangle<'a, S>
where
S: BaseFloat
{
v1: &'a [S; 2],
v2: &'a [S; 2],
v3: &'a [S; 2],
}
impl<'a, S> Triangle<'a, S>
where
S: BaseFloat
{
pub fn new(v1: &'a [S; 2], v2: &'a [S; 2], v3: &'a [S; 2]) -> Self {
Self { v1, v2, v3 }
}
pub fn is_invalid(&self, camera: &CameraViewPort) -> bool {
let tri_ccw = self.is_ccw();
let reversed_longitude = camera.get_longitude_reversed();
(!reversed_longitude && tri_ccw) || (reversed_longitude && !tri_ccw)
}
pub fn is_ccw(&self) -> bool {
crate::math::utils::ccw_tri(&self.v1, &self.v2, &self.v3)
}
}

View File

@@ -1,128 +0,0 @@
use cgmath::BaseFloat;
use std::ops::RangeInclusive;
use super::triangle::Triangle;
use crate::CameraViewPort;
// This iterator construct indices from a set of vertices defining
// a grid.
// Triangles that are in a clockwise order will not be renderer
// Whereas other counter-clockwise triangle will be
pub struct CCWCheckPatchIndexIter<'a> {
patch_iter: DefaultPatchIndexIter,
ndc: &'a [Option<[f32; 2]>],
camera: &'a CameraViewPort,
}
impl<'a> CCWCheckPatchIndexIter<'a> {
pub fn new(
idx_x_range: &RangeInclusive<usize>,
idx_y_range: &RangeInclusive<usize>,
num_x_vertices: usize,
ndc: &'a [Option<[f32; 2]>],
camera: &'a CameraViewPort,
) -> Self {
let patch_iter = DefaultPatchIndexIter::new(idx_x_range, idx_y_range, num_x_vertices);
Self {
patch_iter,
ndc,
camera,
}
}
}
impl<'a> Iterator for CCWCheckPatchIndexIter<'a> {
type Item = [(u16, u16, u16); 2];
fn next(&mut self) -> Option<Self::Item> {
if let Some(indices) = self.patch_iter.next() {
let idx_tl = indices[0].0;
let idx_tr = indices[0].1;
let idx_bl = indices[0].2;
let idx_br = indices[1].1;
let ndc_tl = &self.ndc[idx_tl as usize];
let ndc_tr = &self.ndc[idx_tr as usize];
let ndc_bl = &self.ndc[idx_bl as usize];
let ndc_br = &self.ndc[idx_br as usize];
match (ndc_tl, ndc_tr, ndc_bl, ndc_br) {
(Some(ndc_tl), Some(ndc_tr), Some(ndc_bl), Some(ndc_br)) => {
let t1 = Triangle::new(&ndc_tl, &ndc_tr, &ndc_bl);
let t2 = Triangle::new(&ndc_tr, &ndc_br, &ndc_bl);
if !t1.is_invalid(&self.camera) || !t2.is_invalid(&self.camera) {
self.next() // crossing projection tri
} else {
Some(indices)
}
}
_ => self.next(), // out of proj
}
} else {
None
}
}
}
pub struct DefaultPatchIndexIter {
pub idx_x_range: RangeInclusive<usize>,
pub idx_y_range: RangeInclusive<usize>,
pub num_x_vertices: usize,
cur_idx_x: usize,
cur_idx_y: usize,
}
impl DefaultPatchIndexIter {
pub fn new(
idx_x_range: &RangeInclusive<usize>,
idx_y_range: &RangeInclusive<usize>,
num_x_vertices: usize,
) -> Self {
let cur_idx_x = *idx_x_range.start();
let cur_idx_y = *idx_y_range.start();
Self {
idx_x_range: idx_x_range.clone(),
idx_y_range: idx_y_range.clone(),
num_x_vertices,
cur_idx_x,
cur_idx_y,
}
}
fn get_index_value(&self, idx_x: usize, idx_y: usize) -> usize {
idx_x + idx_y * self.num_x_vertices
}
}
impl Iterator for DefaultPatchIndexIter {
type Item = [(u16, u16, u16); 2];
fn next(&mut self) -> Option<Self::Item> {
if self.cur_idx_x == *self.idx_x_range.end() {
self.cur_idx_x = *self.idx_x_range.start();
self.cur_idx_y += 1;
if self.cur_idx_y == *self.idx_y_range.end() {
return None;
}
}
let idx_tl = self.get_index_value(self.cur_idx_x, self.cur_idx_y);
let idx_tr = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y);
let idx_bl = self.get_index_value(self.cur_idx_x, self.cur_idx_y + 1);
let idx_br = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y + 1);
self.cur_idx_x += 1;
Some([
(idx_tl as u16, idx_tr as u16, idx_bl as u16),
(idx_tr as u16, idx_br as u16, idx_bl as u16),
])
}
}

View File

@@ -1,2 +0,0 @@
pub mod index_patch;
pub mod triangle;

View File

@@ -1,31 +0,0 @@
use crate::CameraViewPort;
use cgmath::BaseFloat;
pub struct Triangle<'a, S>
where
S: BaseFloat,
{
v1: &'a [S; 2],
v2: &'a [S; 2],
v3: &'a [S; 2],
}
impl<'a, S> Triangle<'a, S>
where
S: BaseFloat,
{
pub fn new(v1: &'a [S; 2], v2: &'a [S; 2], v3: &'a [S; 2]) -> Self {
Self { v1, v2, v3 }
}
pub fn is_invalid(&self, camera: &CameraViewPort) -> bool {
let tri_ccw = self.is_ccw();
let reversed_longitude = camera.get_longitude_reversed();
(!reversed_longitude && tri_ccw) || (reversed_longitude && !tri_ccw)
}
pub fn is_ccw(&self) -> bool {
crate::math::utils::ccw_tri(&self.v1, &self.v2, &self.v3)
}
}

View File

@@ -1,9 +1,12 @@
use crate::downloader::{query, Downloader};
use crate::renderable::HiPS;
use crate::time::{DeltaTime, Time};
use crate::Abort;
use std::collections::VecDeque;
use std::collections::{VecDeque};
const MAX_NUM_TILE_FETCHING: isize = 8;
const MAX_QUERY_QUEUE_LENGTH: usize = 100;
@@ -12,18 +15,15 @@ pub struct TileFetcherQueue {
// A stack of queries to fetch
queries: VecDeque<query::Tile>,
base_tile_queries: Vec<query::Tile>,
tiles_fetched_time: Time,
}
impl TileFetcherQueue {
pub fn new() -> Self {
let queries = VecDeque::new();
let base_tile_queries = Vec::new();
let tiles_fetched_time = Time::now();
Self {
queries,
base_tile_queries,
tiles_fetched_time,
}
}
@@ -48,19 +48,8 @@ impl TileFetcherQueue {
self.base_tile_queries.push(query);
}
pub fn notify(&mut self, downloader: &mut Downloader, dt: Option<DeltaTime>) {
// notify all the x ms
let now = Time::now();
if let Some(dt) = dt {
if now - self.tiles_fetched_time >= dt {
self.tiles_fetched_time = now;
self.fetch(downloader);
}
} else {
self.tiles_fetched_time = now;
self.fetch(downloader);
}
pub fn notify(&mut self, downloader: &mut Downloader) {
self.fetch(downloader);
}
fn fetch(&mut self, downloader: &mut Downloader) {

View File

@@ -50,7 +50,6 @@ import { ContextMenu } from "./gui/ContextMenu.js";
import { ALEvent } from "./events/ALEvent.js";
import { Color } from './Color.js';
import { ImageFITS } from "./ImageFITS.js";
import { VRButton } from "./VRButton.js";
import { DefaultActionsForContextMenu } from "./DefaultActionsForContextMenu.js";
import A from "./A.js";
@@ -459,8 +458,7 @@ export let Aladin = (function () {
//this.discoverytree = new DiscoveryTree(this);
//}
// [ ] That might pose problems
//this.view.redraw();
this.view.redraw();
// go to full screen ?
if (options.fullScreen) {
@@ -473,11 +471,6 @@ export let Aladin = (function () {
this.contextMenu = new ContextMenu(this);
this.contextMenu.attachTo(this.view.catalogCanvas, DefaultActionsForContextMenu.getDefaultActions(this));
}
// initialize the VR button
if (options.vr) {
this.aladinDiv.appendChild(VRButton.createButton(this.view));
}
};
/**** CONSTANTS ****/
@@ -496,7 +489,7 @@ export let Aladin = (function () {
target: "0 +0",
cooFrame: "J2000",
fov: 60,
backgroundColor: "rgb(60, 60, 60)",
backgroundColor: "rgb(0, 0, 0)",
showReticle: true,
showZoomControl: true,
showFullscreenControl: true,
@@ -678,11 +671,6 @@ export let Aladin = (function () {
});
};
// @API
Aladin.prototype.setRenderer = function(renderer) {
this.options.vr.renderer = renderer;
}
Aladin.prototype.setFrame = function (frameName) {
if (!frameName) {
return;

View File

@@ -70,6 +70,7 @@ HiPSProperties.fetchFromID = async function(ID) {
}
HiPSProperties.fetchFromUrl = async function(urlOrId) {
let addTextExt = false;
try {
urlOrId = new URL(urlOrId);
} catch (e) {
@@ -79,7 +80,7 @@ HiPSProperties.fetchFromUrl = async function(urlOrId) {
urlOrId = new URL(urlOrId);
addTextExt = true;
} catch(e) {
throw e;
}
@@ -96,6 +97,10 @@ HiPSProperties.fetchFromUrl = async function(urlOrId) {
}
url = url + '/properties';
if (addTextExt) {
url = url + '.txt';
}
// make URL absolute
url = Utils.getAbsoluteURL(url);
// fix for HTTPS support --> will work for all HiPS served by CDS

View File

@@ -1,252 +0,0 @@
/**
* This is an adaptation of the original VRButton.
* Original at:
* https://github.com/mrdoob/three.js/blob/dev/examples/jsm/webxr/VRButton.js
*/
/**
* VRButton class that handles the creation of a VR session
*
* @class VRButton
*/
class VRButton {
/**
* Constructs a VRButton
*
* @static
* @param {View} view - The aladin view
* @return {HTMLButtonElement|HTMLAnchorElement} The VR mode button or an
* error message
*/
static createButton(view) {
const button = document.createElement('button');
/**
* Function for handling the process of entering VR mode.
*/
function showEnterVR(/* device*/) {
let currentSession = null;
/**
* Callback function to handle when the XR session is started
*
* @param {XRSession} session - The XR session that has been started
*/
async function onSessionStarted(session) {
session.addEventListener('end', onSessionEnded);
let gl = view.imageCanvas.getContext('webgl2');
await gl.makeXRCompatible();
session.updateRenderState({
baseLayer: new XRWebGLLayer(session, gl)
});
await view.options.vr.renderer.xr.setSession(session);
button.textContent = 'EXIT VR';
// view.options.vr.renderer.setAnimationLoop(view.redrawVR.bind(view));
session.requestReferenceSpace('local-floor').then((refSpace) => {
const xrRefSpace = refSpace;
session.requestAnimationFrame((t, frame) => {view.redrawVR(t, frame, xrRefSpace)});
});
currentSession = session;
}
/**
* Function to render the whole scene
*/
// NOTE A supprimer
function onXRAnimationFrame(t, xrFrame) {
currentSession.requestAnimationFrame(onXRAnimationFrame);
view.redrawVR();
}
/**
* Callback function to handle when the XR session ends
*/
function onSessionEnded(/* event*/) {
currentSession.removeEventListener('end', onSessionEnded);
button.textContent = 'ENTER VR';
currentSession = null;
}
//
button.style.display = '';
button.style.cursor = 'pointer';
button.style.left = 'calc(50% - 50px)';
button.style.width = '100px';
button.textContent = 'ENTER VR';
button.onmouseenter = function() {
button.style.opacity = '1.0';
};
button.onmouseleave = function() {
button.style.opacity = '0.5';
};
button.onclick = function() {
if (currentSession === null) {
// WebXR's requestReferenceSpace only works if the corresponding
// feature was requested at session creation time. For simplicity,
// just ask for the interesting ones as optional features, but be
// aware that the requestReferenceSpace call will fail if it turns
// out to be unavailable.
// ('local' is always available for immersive sessions and doesn't
// need to be requested separately.)
const sessionInit = {optionalFeatures: ['local-floor']};
navigator.xr.requestSession(
'immersive-vr', sessionInit).then(onSessionStarted);
} else {
currentSession.end();
}
};
}
/**
* Function for disabling the VR mode button
*
* @param {HTMLButtonElement} button - The VR mode button element to
* be disabled
*/
function disableButton() {
button.style.display = '';
button.style.cursor = 'auto';
button.style.left = 'calc(50% - 75px)';
button.style.width = '150px';
button.onmouseenter = null;
button.onmouseleave = null;
button.onclick = null;
}
/**
* Function for handling the case where WebXR is not supported
*
* @description This function disables the VR mode button and displays a
* message indicating that VR is not supported
*
* @param {HTMLButtonElement} button - The VR mode button element to be
* disabled and updated with a message
*/
function showWebXRNotFound() {
disableButton();
button.textContent = 'VR NOT SUPPORTED';
}
/**
* Function for handling the case where VR is not allowed due to an
* exception
*
* @description This function disables the VR mode button, logs an
* exception to the console, and displays a message indicating that VR
* is not allowed
*
* @param {any} exception - The exception object or error that indicates
* why VR is not allowed
* @param {HTMLButtonElement} button - The VR mode button element to be
* disabled and updated with a message
*/
function showVRNotAllowed(exception) {
disableButton();
console.warn('Exception when trying to call xr.isSessionSupported',
exception);
button.textContent = 'VR NOT ALLOWED';
}
/**
* Function for styling an HTML element with specific CSS properties
*
* @param {HTMLElement} element - The HTML element to be styled
*/
function stylizeElement(element) {
element.style.position = 'absolute';
element.style.bottom = '20px';
element.style.padding = '12px 6px';
element.style.border = '1px solid #fff';
element.style.borderRadius = '4px';
element.style.background = 'rgba(0,0,0,0.1)';
element.style.color = '#fff';
element.style.font = 'normal 13px sans-serif';
element.style.textAlign = 'center';
element.style.opacity = '0.5';
element.style.outline = 'none';
element.style.zIndex = '999';
}
if ('xr' in navigator) {
button.id = 'VRButton';
button.style.display = 'none';
stylizeElement(button);
navigator.xr.isSessionSupported('immersive-vr').then(function(supported) {
supported ? showEnterVR() : showWebXRNotFound();
if (supported && VRButton.xrSessionIsGranted) {
button.click();
}
}).catch(showVRNotAllowed);
return button;
} else {
const message = document.createElement('a');
if (window.isSecureContext === false) {
message.href = document.location.href.replace(/^http:/, 'https:');
message.innerHTML = 'WEBXR NEEDS HTTPS';
} else {
message.href = 'https://immersiveweb.dev/';
message.innerHTML = 'WEBXR NOT AVAILABLE';
}
message.style.left = 'calc(50% - 90px)';
message.style.width = '180px';
message.style.textDecoration = 'none';
stylizeElement(message);
return message;
}
}
/**
* Registers a listener for the "sessiongranted" event to track the XR
* session being granted.
*
* @description This method checks if the WebXR API is available and
* registers a listener for the "sessiongranted" event to track when an
* XR session is granted. It sets the `VRButton.xrSessionIsGranted`
* property to `true` when the event is triggered.
*/
static registerSessionGrantedListener() {
if ('xr' in navigator) {
// WebXRViewer (based on Firefox) has a bug where addEventListener
// throws a silent exception and aborts execution entirely.
if (/WebXRViewer\//i.test(navigator.userAgent)) return;
navigator.xr.addEventListener('sessiongranted', () => {
VRButton.xrSessionIsGranted = true;
});
}
}
}
VRButton.xrSessionIsGranted = false;
VRButton.registerSessionGrantedListener();
export {VRButton};

View File

@@ -370,7 +370,7 @@ export let View = (function () {
}
this.computeNorder();
//this.redraw();
this.redraw();
};
var pixelateCanvasContext = function (ctx, pixelateFlag) {
@@ -1059,41 +1059,6 @@ export let View = (function () {
View.FPS_INTERVAL = 1000 / 140;
View.prototype.redrawVR = function (t, frame, xrRefSpace) {
const session = frame.session;
session.requestAnimationFrame((t, frame) => {this.redrawVR(t, frame, xrRefSpace)});
let pose = frame.getViewerPose(xrRefSpace);
if (!pose) return;
// Elapsed time since last loop
const now = Date.now();
const elapsedTime = now - this.then;
// If enough time has elapsed, draw the next frame
//if (elapsedTime >= View.FPS_INTERVAL) {
// Get ready for next frame by setting then=now, but also adjust for your
// specified fpsInterval not being a multiple of RAF's interval (16.7ms)
// Drawing code
try {
this.moving = this.wasm.update(elapsedTime);
} catch (e) {
console.warn(e)
}
////// 2. Draw catalogues////////
const isViewRendering = this.wasm.isRendering();
if (isViewRendering || this.needRedraw) {
this.drawAllOverlays();
}
this.needRedraw = false;
this.options.vr.animation();
}
/**
* redraw the whole view
*/

View File

@@ -25,7 +25,6 @@ export default defineConfig({
},
rollupOptions: {},
//formats: ["es"],
target: ["es2015", "chrome58", "edge16", "firefox57", "node12", "safari11"],
//target: ["es2015"],
// Relative to the root
outDir: resolve(__dirname, 'dist'),