Compare commits

...

29 Commits

Author SHA1 Message Date
Matthieu Baumann
90305de2de test: unbind the layer shaders 2023-10-31 10:58:10 +01:00
ftheurel
1a2451f4d3 Add context to renderer 2023-10-30 15:23:38 +00:00
Matthieu Baumann
f43c4273a1 bugfix: remove direct call of redraw inside fixLayoutDimensions 2023-10-30 15:44:55 +01:00
ftheurel
b201fb69fc Lower level animation implementation
The setAnimationLoop is replaced by call to requestAnimationFrame on the xrSession.
2023-10-30 14:31:25 +00:00
ftheurel
c2047278e4 Test commit 2023-10-23 09:15:25 +00:00
ftheurel
a03fc7a947 Fix Animation
Add bind for the renderer
2023-10-20 12:31:25 +00:00
ftheurel
db74147021 Update al-vr.html 2023-10-19 10:23:15 +00:00
ftheurel
4e0c2e69c5 Add setup VR animation loop 2023-10-19 10:03:50 +00:00
ftheurel
ed135d1306 Replace parameters
Replace the renderer and the animation parameters by the aladin view
2023-10-19 06:50:41 +00:00
ftheurel
b7642b4a81 Add import map and basic scene 2023-10-19 06:48:00 +00:00
ftheurel
6beca88785 Add VRButton to the view
The button is displayed but still can't launch the VR session
2023-10-18 14:15:04 +00:00
Matthieu Baumann
3bd1d4c1f0 first commit on vr branch 2023-10-18 14:50:24 +02:00
Matthieu Baumann
bda3e50a8d do not call update 2x on the hipses, render the raytracer with depth=1 too 2023-09-27 16:23:24 +02:00
Matthieu Baumann
1981721b05 change default background color to 'rgb(60, 60, 60)', fix #106 2023-09-27 15:45:52 +02:00
Matthieu Baumann
09d35f5918 Important commit: modify bundling config to add support older browsers. Should correspond to 2017 2023-09-27 15:37:41 +02:00
Matthieu Baumann
9137effc3a factorize code by handling reversed_longitude directly when projecting. Should fix #115 2023-09-27 15:33:19 +02:00
Matthieu Baumann
0f619156e6 fix animation test by fetching tiles every 700ms when moving. The fetching remains still disabled when inertia is on 2023-09-26 16:52:22 +02:00
Matthieu Baumann
d62720d0bd change version to a bugfix one 2023-09-26 14:30:38 +02:00
Matthieu Baumann
94cad9b878 remove deploy script on git 2023-09-26 11:32:48 +02:00
Matthieu Baumann
dcafbd7b91 revert seeking properties with a txt extension 2023-09-26 11:25:14 +02:00
Matthieu Baumann
c42ac03836 fix polyfill in sin proj 2023-09-22 17:04:51 +02:00
Matthieu Baumann
13a6037882 Polyline: add hpx code to remove drawing lines in between 2 different collinion zone 2023-09-22 17:04:51 +02:00
szpetny
5a90d9705c all sky with polygons HPX MOL - examples added 2023-09-22 17:04:51 +02:00
szpetny
7dec2fd9be A proposal of a new feature - let the polygon to be filled in with a given color with given transparency 2023-09-22 17:04:51 +02:00
szpetny
43a8bf0e6e examples fixed 2023-09-20 19:28:42 +02:00
szpetny
01aff09511 expose mouse coords in callbacks for source/footprint click and hover 2023-09-20 19:28:42 +02:00
Matthieu Baumann
7a52a9f962 Fix #123, difficulty to get the radius in pixel of a circle at the edge of a projection. My solution: I project 4 vertices on the circle lying in the cardinal points N, S, E, W and I take as radius the minimal distance between the circle center and one these vertices lying on the circle 2023-09-14 06:20:07 +02:00
Matthieu Baumann
3bda0fcd75 Merge pull request #118 from szpetny/draw-function-fixes
Small changed regarding drawing a footprint
2023-08-31 16:55:53 +02:00
szpetny
98b0d0dff6 setOverlay added to Footprint bc draw function fails when it is lacking,
improvement - reading lineWidth from overlay if not specified in shape
2023-08-22 16:29:06 +02:00
40 changed files with 24474 additions and 472 deletions

3
.gitignore vendored
View File

@@ -11,3 +11,6 @@ src/core/Cargo.lock
aladin-lite*.tgz
.vscode
deploy.sh
deploy-beta.sh

View File

@@ -1,15 +0,0 @@
USER_ALADIN="matthieu.baumann"
DATEUPLOAD="$(date '+%Y-%m-%d')"
ssh $USER_ALADIN@aladin 'sg hips -c "mkdir -p /home/matthieu.baumann/al-tmp && rm -rf /home/matthieu.baumann/al-tmp/*"'
# Copy the dist files
scp dist/* $USER_ALADIN@aladin:~/al-tmp
# Copy the tgz
cp aladin-l*.tgz aladin-lite.tgz
scp aladin-lite.tgz $USER_ALADIN@aladin:~/al-tmp
ssh $USER_ALADIN@aladin "sg hips -c 'rm -rf /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD &&
mkdir -p /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD &&
cp /home/matthieu.baumann/al-tmp/* /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD &&
rm -rf /home/thomas.boch/AladinLite/www/api/v3/beta &&
ln -s /home/thomas.boch/AladinLite/www/api/v3/d$DATEUPLOAD /home/thomas.boch/AladinLite/www/api/v3/beta'"

View File

@@ -1,17 +0,0 @@
USER_ALADIN="matthieu.baumann"
DATEUPLOAD="$(date '+%Y-%m-%d')"
ssh $USER_ALADIN@aladin 'sg hips -c "mkdir -p /home/matthieu.baumann/al-tmp && rm -rf /home/matthieu.baumann/al-tmp/*"'
# Copy the dist files
# For compatibility with the docs, rename the UMD file into aladin.js
scp dist/aladin.umd.cjs $USER_ALADIN@aladin:~/al-tmp/aladin.js
# Copy the tgz
mv aladin-l*.tgz aladin-lite.tgz
scp aladin-lite.tgz $USER_ALADIN@aladin:~/al-tmp
ssh $USER_ALADIN@aladin "sg hips -c 'rm -rf /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD &&
mkdir -p /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD &&
cp /home/matthieu.baumann/al-tmp/* /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD &&
rm -rf /home/thomas.boch/AladinLite/www/api/v3/latest &&
ln -s /home/thomas.boch/AladinLite/www/api/v3/$DATEUPLOAD /home/thomas.boch/AladinLite/www/api/v3/latest &&
ln -s /home/thomas.boch/AladinLite/www/api/v3/latest/aladin-lite.tgz /home/thomas.boch/AladinLite/www/api/v3/latest/AladinLiteAssets.tar.gz'"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,34 @@
<!doctype html>
<html>
<head>
</head>
<body>
<div id="aladin-lite-div" style="width: 500px; height: 400px"></div>
<script type="module">
import A from '../src/js/A.js';
let aladin;
A.init.then(() => {
// Start up Aladin Lite
aladin = A.aladin('#aladin-lite-div', {survey: "CDS/P/DSS2/color", target: 'M 1', fov: 0.2, showContextMenu: true, fullScreen: true});
var overlay = A.graphicOverlay({color: '#ee2345', lineWidth: 3});
aladin.addOverlay(overlay);
overlay.add(A.circle(83.66067, 22.03081, 40.0, {color: 'cyan'})); // radius in degrees
aladin.on("footprintClicked", (footprint) => {
console.log("footprint clicked catched", footprint)
})
aladin.on("objectClicked", (object) => {
console.log("object clicked catched", object)
})
aladin.on("footprintHovered", (footprint) => {
console.log("footprint hovered catched", footprint)
})
aladin.on("objectHoveredStop", (object) => {
console.log("Object hovered stopped", object)
})
});
</script>
</body>
</html>

View File

@@ -14,7 +14,7 @@
<!--link rel="stylesheet" href="css/grids-responsive-min.css"-->
<link rel="stylesheet" href="https://unpkg.com/purecss@2.0.3/build/buttons.css">
<link rel="stylesheet" href="https://unpkg.com/purecss@2.0.3/build/buttons-core.css">
<!--meta name="viewport" content="initial-scale=1.0, user-scalable=no"-->
<meta name="viewport" content="viewport-fit=cover, width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no">
</head>
@@ -81,15 +81,15 @@
<div id="coo_epoca">
<a class="pure-button nav-button nav-goto" href="#">De Epoca</a><br>
&nbsp;&nbsp;<a class="pure-button nav-button nav-flyto" href="#">Move</a>
</div>
</div>
<div id="coo_legende">
<a class="pure-button nav-button nav-goto" href="#">Légende</a><br>
&nbsp;&nbsp;<a class="pure-button nav-button nav-flyto" href="#">Move</a>
</div>
</div>
<div id="coo_orion">
<a class="pure-button nav-button nav-goto" href="#">Orion</a><br>
&nbsp;&nbsp;<a class="pure-button nav-button nav-flyto" href="#">Move</a>
</div>
</div>
<div id="coo_magellan">
<a class="pure-button nav-button nav-goto" href="#">Magellan</a><br>
&nbsp;&nbsp;<a class="pure-button nav-button nav-flyto" href="#">Move</a>
@@ -97,7 +97,7 @@
<div id="coo_halley">
<a class="pure-button nav-button nav-goto" href="#">Halley</a><br>
&nbsp;&nbsp;<a class="pure-button nav-button nav-flyto" href="#">Move</a>
</div>
</div>
</div>
<style type="text/css"> .aladin-reticleColor { color: rgb(178, 50, 178); font-weight:bold;} </style>
@@ -106,27 +106,27 @@
html, body {
height: 100%;
}
body {
display: flex;
flex-direction: column;
}
.aladin-zoomControl {
top: 10% !important;
left: unset !important;
right: 4px !important;
}
.aladin-zoomControl a {
font-size: 24px !important;
padding: 22px !important;
}
#aladin{
flex: 1 1 0;
}
#explain {
padding: 4px;
top: 30%;
@@ -134,51 +134,51 @@
font-size: 11pt;
overflow: scroll;
}
#explain tbody tr:nth-child(even) {
background-color: #ffffff;
}
#explain tbody tr:nth-child(odd) {
background-color: #ccdaeb;
}
#layersControlLeft {
padding: 10px;
right: unset;
left: 4px;
top: 20vh;
}
#layersCL2 {
padding: 10px;
right: unset;
left: 4px;
top: 90vh;
}
#layersControlRight {
padding: 4px;
left: unset;
right: 4px;
top: 25vh;
}
#layersControlLeft, #layersControlRight, #layersCL2 input {
margin-right: 5px;
}
.img-hips {
padding: 5px;
margin: 5px;
}
#opacity-slider {
-webkit-appearance: none !important; /* Override default CSS styles */
width: 220px;
height: 25px;
}
#opacity-slider::-webkit-slider-thumb {
-webkit-appearance: none !important; /* Override default look */
appearance: none;
@@ -187,18 +187,18 @@
background: #4CAF50; /* Green background */
cursor: pointer; /* Cursor on hover */
}
#opacity-slider::-moz-range-thumb {
width: 25px; /* Set a specific slider handle width */
height: 25px; /* Slider handle height */
background: #4CAF50; /* Green background */
cursor: pointer; /* Cursor on hover */
}
.aladin-box {
font-size: 12px !important;
}
#calibCircle {
position: fixed;
border: 8px solid red;
@@ -210,16 +210,16 @@
z-index: 1000;
pointer-events: none;
}
.pure-table {
font-size: small;
}
.catcoro {
display: inline;
vertical-align: middle;
}
.coro-star {
vertical-align: middle;
}
@@ -360,7 +360,7 @@
});
// listen to click on objects
aladin.on('objectClicked', function(source) {
aladin.on('objectClicked', function(source, xyMouseCoords) {
var html = '<table class="pure-table">';
if (curSelectedSource != null) {
@@ -509,7 +509,7 @@
points[points.length-1].push(Utils.relMouseCoords(drawOverlayCanvas.imageCanvas, e));
drawOverlayCtx.beginPath();
for (var k=0; k<points.length; k++) {
drawOverlayCtx.moveTo(points[k][0].x, points[k][0].y);
for (var i = 1; i < points[k].length; i++) {

View File

@@ -11,7 +11,7 @@
let aladin;
A.init.then(() => {
// Start up Aladin Lite
aladin = A.aladin('#aladin-lite-div', {target: 'M 82', fov: 0.25});
aladin = A.aladin('#aladin-lite-div', {target: 'M 82', fov: 0.25, showContextMenu: true});
aladin.addCatalog(A.catalogFromSimbad('M 82', 0.1, {onClick: 'showTable'}));
aladin.addCatalog(A.catalogFromNED('09 55 52.4 +69 40 47', 0.1, {onClick: 'showPopup', shape: 'plus'}));

View File

@@ -17,9 +17,10 @@
var msg;
// define function triggered when a source is hovered
aladin.on('objectHovered', function(object) {
aladin.on('objectHovered', function(object, xyMouseCoords) {
if (object) {
msg = 'You hovered object ' + object.data.name + ' located at ' + object.ra + ', ' + object.dec;
msg = 'You hovered object ' + object.data.name + ' located at ' + object.ra + ', ' + object.dec + '; mouse coords - x: '
+ xyMouseCoords.x + ', y: ' + xyMouseCoords.y;
}
else {
msg = 'No object hovered';
@@ -27,20 +28,22 @@
$('#infoDiv').html(msg);
});
aladin.on('objectHoveredStop', function(object) {
aladin.on('objectHoveredStop', function(object, xyMouseCoords) {
if (object) {
msg = 'You stopped hove object ' + object.data.name + ' located at ' + object.ra + ', ' + object.dec;
msg = 'You stopped hove object ' + object.data.name + ' located at ' + object.ra + ', ' + object.dec + '; mouse coords - x: '
+ xyMouseCoords.x + ', y: ' + xyMouseCoords.y;
}
$('#infoDiv').html(msg);
});
// define function triggered when an object is clicked
var objClicked;
aladin.on('objectClicked', function(object) {
aladin.on('objectClicked', function(object, xyMouseCoords) {
if (object) {
objClicked = object;
object.select();
msg = 'You clicked object ' + object.data.name + ' located at ' + object.ra + ', ' + object.dec;
object.select();
msg = 'You clicked object ' + object.data.name + ' located at ' + object.ra + ', ' + object.dec + '; mouse coords - x: '
+ xyMouseCoords.x + ', y: ' + xyMouseCoords.y;
}
else {
objClicked.deselect();
@@ -51,4 +54,4 @@
});
</script>
</body>
</html>
</html>

View File

@@ -0,0 +1,26 @@
<!doctype html>
<html>
<head>
</head>
<body>
<div id="aladin-lite-div" style="width: 500px; height: 400px"></div>
<script type="module">
import A from '../src/js/A.js';
let aladin;
A.init.then(() => {
// Start up Aladin Lite
aladin = A.aladin('#aladin-lite-div', {fov: 122, showContextMenu: true, fullScreen: true});
var overlay = A.graphicOverlay({color: '#ee2345', lineWidth: 3});
aladin.addOverlay(overlay);
overlay.addFootprints([A.polygon(
[[264.375,-35.68533471265207], [258.75,-30.000000000000018], [264.375,-24.624318352164085], [270,-30.000000000000018]],
{color: '#808080', fillColor: '#808080', opacity: .4, lineWidth: 1, fill: true})]);
aladin.gotoRaDec(264.375,-24.624318352164085);
});
</script>
</body>
</html>

View File

@@ -21,17 +21,17 @@
]);
overlay.add(A.circle(83.66067, 22.03081, 0.04, {color: 'cyan'})); // radius in degrees
aladin.on("footprintClicked", (footprint) => {
console.log("footprint clicked catched", footprint)
aladin.on("footprintClicked", (footprint, xyMouseCoords) => {
console.log("footprint clicked catched: ", footprint, "mouse coords xy: ", xyMouseCoords.x, xyMouseCoords.y);
})
aladin.on("objectClicked", (object) => {
console.log("object clicked catched", object)
aladin.on("objectClicked", (object, xyMouseCoords) => {
console.log("object clicked catched: ", object, "mouse coords xy: ", xyMouseCoords.x, xyMouseCoords.y);
})
aladin.on("footprintHovered", (footprint) => {
console.log("footprint hovered catched", footprint)
aladin.on("footprintHovered", (footprint, xyMouseCoords) => {
console.log("footprint hovered catched: ", footprint, "mouse coords xy: ", xyMouseCoords.x, xyMouseCoords.y);
})
aladin.on("objectHoveredStop", (object) => {
console.log("Object hovered stopped", object)
aladin.on("objectHoveredStop", (object, xyMouseCoords) => {
console.log("Object hovered stopped: ", object, "mouse coords xy: ", xyMouseCoords.x, xyMouseCoords.y);
})
const cat = A.catalogFromVizieR('B/assocdata/obscore', 'M 1', 100, {onClick: 'showTable', limit: 1000});

View File

@@ -88,7 +88,7 @@
});
// listen to click on objects
aladin.on('objectClicked', function (source) {
aladin.on('objectClicked', function (source, xyMouseCoords) {
var html = '<table class="pure-table">';
if (curSelectedSource != null) {
@@ -142,4 +142,4 @@
});
});
</script>
</html>
</html>

113
examples/al-vr.html Normal file
View File

@@ -0,0 +1,113 @@
<!doctype html>
<html>
<head>
</head>
<script type="importmap">
{
"imports": {
"three": "https://unpkg.com/three@0.157.0/build/three.module.js",
"three/addons/": "https://unpkg.com/three@0.157.0/examples/jsm/"
}
}
</script>
<body>
<div id="aladin-lite-div" style="width: 1024px; height: 768px"></div>
<script type="module">
import A from '../src/js/A.js';
import * as THREE from 'three';
let aladin;
A.init.then(() => {
aladin = A.aladin(
'#aladin-lite-div',
{
survey: 'P/DSS2/color', // set a survey
projection: 'TAN', // set a projection
fov: 70, // initial field of view in degrees
target: '338.98958 33.96', // initial target
cooFrame: 'equatorial', // set galactic frame
showCooGrid: true, // set the grid
fullScreen: true,
vr: {animation: animate.bind(renderer)},
}
);
//aladin.setOverlayImageLayer("https://alasky.cds.unistra.fr/JWST/CDS_P_JWST_Stephans-Quintet_NIRCam+MIRI")
initScene(aladin.view.imageCanvas);
aladin.setRenderer(renderer);
});
let renderer = null;
let scene = null;
let camera = null;
let cubeMesh = null;
// let controls = null;
/**
* Initializes a 3D scene, camera, and renderer for virtual reality (VR).
*
* @param {HTMLCanvasElement} canvas - The HTML canvas element to render the
* 3D scene
*/
function initScene(canvas) {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.1, 1000);
scene.add(camera);
renderer = new THREE.WebGLRenderer({canvas: canvas, context: canvas.getContext('webgl2', {xrCompatible: true})}); // NOTE Une différence ici
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.xr.enabled = true;
// renderer.xr.setReferenceSpaceType('local');
renderer.autoClear = false;
const light = new THREE.PointLight(0xffffff, 10);
light.position.set(0, 2, 1);
scene.add(light);
const planeGeometry = new THREE.PlaneGeometry(10, 10);
const planeMaterial = new THREE.MeshPhongMaterial({ color: 0xff00ff });
const planeMesh = new THREE.Mesh(planeGeometry, planeMaterial);
planeMesh.position.set(0, -1, 0);
planeMesh.rotation.x = -Math.PI / 2;
scene.add(planeMesh);
const cubeGeometry = new THREE.BoxGeometry(1, 1, 1);
const cubeMaterial = new THREE.MeshPhongMaterial({ color: 0x00ff00 });
cubeMesh = new THREE.Mesh(cubeGeometry, cubeMaterial);
cubeMesh.position.set(0, 0, -2);
scene.add(cubeMesh);
}
/**
* Function to animate the 3D scene and rendering it.
*/
function animate() {
cubeMesh.rotation.x += 0.001;
cubeMesh.rotation.y += 0.001;
renderer.render( scene, camera );
}
// /**
// * Initializes a WebGL2 context and handles potential errors.
// */
// function initWebGL2() {
// // canvas = aladin.view.imageCanvas;
// canvas = document.getElementById(aladin.view.imageCanvas);
// // gl = canvas.getContext("webgl2", { alpha: true });
// gl = canvas.getContext('webgl2');
// if (!gl) { // If the gl didn't create properly
// alert('This browser doesn\'t support WebGL2');
// return;
// }
// }
</script>
</body>
</html>

View File

@@ -2,7 +2,7 @@
"homepage": "https://aladin.u-strasbg.fr/",
"name": "aladin-lite",
"type": "module",
"version": "3.2.0",
"version": "3.2.1",
"description": "An astronomical HiPS visualizer in the browser",
"author": "Thomas Boch and Matthieu Baumann",
"license": "GPL-3",
@@ -35,6 +35,7 @@
"wasm": "wasm-pack build ./src/core --target web --release --out-name core -- --features webgl2",
"predeploy": "npm run build && rm -rf aladin-lite.tgz && npm pack",
"deploy": "./deploy.sh",
"deploy:beta": "npm run predeploy && ./deploy-beta.sh",
"build": "npm run wasm && vite build && cp examples/index.html dist/index.html",
"dev": "npm run build && vite",
"serve": "npm run dev",

View File

@@ -9,8 +9,8 @@ pub mod vao {
use crate::object::element_array_buffer::ElementArrayBuffer;
use crate::webgl_ctx::WebGlContext;
use std::collections::HashMap;
use crate::Abort;
use std::collections::HashMap;
pub struct VertexArrayObject {
array_buffer: HashMap<&'static str, ArrayBuffer>,
@@ -88,7 +88,10 @@ pub mod vao {
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer.as_ref().unwrap_abort().num_elements()
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
}
pub fn num_instances(&self) -> i32 {
@@ -155,6 +158,7 @@ pub mod vao {
pub fn unbind(&self) {
self.vao.gl.bind_vertex_array(None);
self._shader.unbind(&self.vao.gl);
}
}
@@ -170,8 +174,9 @@ pub mod vao {
}
impl<'a, 'b> ShaderVertexArrayObjectBoundRef<'a, 'b> {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) -> &Self {
self.vao.gl.draw_arrays(mode, byte_offset, size);
self
}
pub fn draw_elements_with_i32(
@@ -180,11 +185,12 @@ pub mod vao {
num_elements: Option<i32>,
type_: u32,
byte_offset: i32,
) {
) -> &Self {
let num_elements = num_elements.unwrap_or(self.vao.num_elements() as i32);
self.vao
.gl
.draw_elements_with_i32(mode, num_elements, type_, byte_offset);
self
}
pub fn draw_elements_instanced_with_i32(
@@ -192,7 +198,7 @@ pub mod vao {
mode: u32,
offset_element_idx: i32,
num_instances: i32,
) {
) -> &Self {
self.vao.gl.draw_elements_instanced_with_i32(
mode,
self.vao.num_elements() as i32,
@@ -200,10 +206,12 @@ pub mod vao {
offset_element_idx,
num_instances,
);
self
}
pub fn unbind(&self) {
self.vao.gl.bind_vertex_array(None);
self._shader.unbind(&self.vao.gl);
}
}
@@ -444,7 +452,10 @@ pub mod vao {
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer.as_ref().unwrap_abort().num_elements()
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
}
pub fn num_instances(&self) -> i32 {
@@ -511,7 +522,8 @@ pub mod vao {
}
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
self.vao.gl.bind_vertex_array(None);
self._shader.unbind(&self.vao.gl);
}
}
@@ -528,13 +540,15 @@ pub mod vao {
}
use crate::object::array_buffer::VertexBufferObject;
impl<'a, 'b> ShaderVertexArrayObjectBoundRef<'a, 'b> {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) -> &Self {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
}
self.vao.gl.draw_arrays(mode, byte_offset, size);
self
}
pub fn draw_elements_with_i32(
@@ -543,7 +557,7 @@ pub mod vao {
num_elements: Option<i32>,
type_: u32,
byte_offset: i32,
) {
) -> &Self {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
@@ -555,6 +569,7 @@ pub mod vao {
self.vao
.gl
.draw_elements_with_i32(mode, num_elements, type_, byte_offset);
self
}
pub fn draw_elements_instanced_with_i32(
@@ -562,7 +577,7 @@ pub mod vao {
mode: u32,
offset_element_idx: i32,
num_instances: i32,
) {
) -> &Self {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
@@ -587,10 +602,12 @@ pub mod vao {
offset_element_idx,
num_instances,
);
self
}
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
self.vao.gl.bind_vertex_array(None);
self.shader.unbind(&self.vao.gl);
}
}
@@ -716,6 +733,9 @@ pub mod vao {
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
self.vao.gl.bind_vertex_array(None);
self.shader.unbind(&self.vao.gl);
}
}

View File

@@ -144,8 +144,9 @@ impl App {
// When it will be supported nearly everywhere, we will need to uncomment this line to
// enable it
//gl.enable(WebGl2RenderingContext::SCISSOR_TEST);
gl.enable(WebGl2RenderingContext::CULL_FACE);
gl.cull_face(WebGl2RenderingContext::BACK);
//gl.enable(WebGl2RenderingContext::CULL_FACE);
//gl.cull_face(WebGl2RenderingContext::BACK);
// The tile buffer responsible for the tile requests
let downloader = Downloader::new();
@@ -630,18 +631,12 @@ impl App {
survey.add_tile(&cell, image, time_req)?;
self.request_redraw = true;
//} else {
// self.downloader.delay_rsc(Resource::Tile(tile));
//}
//}
self.time_start_blending = Time::now();
//self.tile_fetcher.notify(1, &mut self.downloader);
}
}
}
} else {
//self.tile_fetcher
// .notify_tile(&tile, false, true, &mut self.downloader);
self.downloader.delay_rsc(Resource::Tile(tile));
}
}
@@ -703,16 +698,30 @@ impl App {
}
// We fetch when we does not move
let has_not_moved_recently =
/*let has_not_moved_recently =
(Time::now() - self.camera.get_time_of_last_move()) > DeltaTime(100.0);
if has_not_moved_recently && self.inertia.is_none() {
// Triggers the fetching of new queued tiles
self.tile_fetcher.notify(&mut self.downloader);
}*/
// If there is inertia, we do not fetch any new tiles
if self.inertia.is_none() {
let has_not_moved_recently =
(Time::now() - self.camera.get_time_of_last_move()) > DeltaTime(100.0);
let dt = if has_not_moved_recently {
None
} else {
Some(DeltaTime::from_millis(700.0))
};
self.tile_fetcher.notify(&mut self.downloader, dt);
}
}
// The update from the camera
self.layers.update(&mut self.camera, &self.projection);
//self.layers.update(&mut self.camera, &self.projection);
if self.request_for_new_tiles
&& Time::now() - self.last_time_request_for_new_tiles > DeltaTime::from(200.0)
@@ -866,7 +875,7 @@ impl App {
&self.colormaps,
&self.projection,
)?;
/*
// Draw the catalog
//let fbo_view = &self.fbo_view;
//catalogs.draw(&gl, shaders, camera, colormaps, fbo_view)?;
@@ -894,7 +903,7 @@ impl App {
self.line_renderer.draw(&self.camera)?;
//let dpi = self.camera.get_dpi();
//ui.draw(&gl, dpi)?;
*/
// Reset the flags about the user action
self.camera.reset();

View File

@@ -20,7 +20,6 @@ pub struct CameraViewPort {
// The rotation of the camera
rotation_center_angle: Angle<f64>,
w2m_rot: Rotation<f64>,
final_rot: Rotation<f64>,
w2m: Matrix4<f64>,
m2w: Matrix4<f64>,
@@ -103,7 +102,6 @@ impl CameraViewPort {
let zoomed = false;
let w2m_rot = Rotation::zero();
let final_rot = Rotation::zero();
// Get the initial size of the window
let window = web_sys::window().unwrap_abort();
@@ -147,7 +145,6 @@ impl CameraViewPort {
m2w,
dpi,
final_rot,
rotation_center_angle,
// The width over height ratio
aspect,
@@ -511,10 +508,11 @@ impl CameraViewPort {
pub fn set_longitude_reversed(&mut self, reversed_longitude: bool, proj: &ProjectionType) {
if self.reversed_longitude != reversed_longitude {
self.reversed_longitude = reversed_longitude;
self.rotation_center_angle = -self.rotation_center_angle;
self.update_rot_matrices(proj);
}
self.reversed_longitude = reversed_longitude;
// The camera is reversed => it has moved
self.moved = true;
@@ -526,18 +524,6 @@ impl CameraViewPort {
}
// Accessors
pub fn get_rotation(&self) -> &Rotation<f64> {
&self.w2m_rot
}
// This rotation is the final rotation, i.e. a composite of
// two rotations:
// - The current rotation of the sphere
// - The rotation around the center axis of a specific angle
pub fn get_final_rotation(&self) -> &Rotation<f64> {
&self.final_rot
}
pub fn get_w2m(&self) -> &cgmath::Matrix4<f64> {
&self.w2m
}
@@ -654,6 +640,11 @@ impl CameraViewPort {
}
fn update_center(&mut self) {
// Longitude reversed identity matrix
const ID_R: &Matrix4<f64> = &Matrix4::new(
-1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
// The center position is on the 3rd column of the w2m matrix
self.center = self.w2m.z;
@@ -662,9 +653,12 @@ impl CameraViewPort {
// Re-update the model matrix to take into account the rotation
// by theta around the center axis
self.final_rot = center_rot * self.w2m_rot;
let final_rot = center_rot * self.w2m_rot;
self.w2m = (&final_rot).into();
if self.reversed_longitude {
self.w2m = self.w2m * ID_R;
}
self.w2m = (&self.final_rot).into();
self.m2w = self.w2m.transpose();
}
}

View File

@@ -1,20 +1,19 @@
use crate::math::HALF_PI;
use crate::math::PI;
use cgmath::Vector3;
use crate::ProjectionType;
use crate::CameraViewPort;
use crate::LonLatT;
use crate::ProjectionType;
use cgmath::InnerSpace;
use cgmath::Vector3;
use crate::math::angle::SerializeFmt;
use crate::math::TWICE_PI;
use crate::grid::XYScreen;
use crate::math::angle::SerializeFmt;
use crate::math::lonlat::LonLat;
use crate::math::TWICE_PI;
use crate::math::angle::ToAngle;
use core::ops::Range;
use cgmath::Vector2;
use core::ops::Range;
const OFF_TANGENT: f64 = 35.0;
const OFF_BI_TANGENT: f64 = 5.0;
@@ -40,7 +39,7 @@ impl Label {
options: LabelOptions,
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: &SerializeFmt
fmt: &SerializeFmt,
) -> Option<Self> {
let fov = camera.get_field_of_view();
let d = if fov.contains_north_pole() {
@@ -60,17 +59,15 @@ impl Label {
LonLatT::new(lon.to_angle(), lat.to_angle())
}
LabelOptions::OnSide => LonLatT::new(lon.to_angle(), lat.start.to_angle())
LabelOptions::OnSide => LonLatT::new(lon.to_angle(), lat.start.to_angle()),
};
let m1: Vector3<_> = lonlat.vector();
let m2 = (m1 + d * 1e-3).normalize();
//let s1 = projection.model_to_screen_space(&(system.to_icrs_j2000::<f64>() * m1), camera, reversed_longitude)?;
let d1 = projection.model_to_screen_space(&m1.extend(1.0), camera)?;
let d2 = projection.model_to_screen_space(&m2.extend(1.0), camera)?;
//let s2 = projection.model_to_screen_space(&(system.to_icrs_j2000::<f64>() * m2), camera, reversed_longitude)?;
let dt = (d2 - d1).normalize();
let db = Vector2::new(dt.y.abs(), dt.x.abs());
@@ -108,7 +105,7 @@ impl Label {
let lon = camera.get_center().lon();
LonLatT::new(lon, lat.to_angle())
}
LabelOptions::OnSide => LonLatT::new(lon.start.to_angle(), lat.to_angle())
LabelOptions::OnSide => LonLatT::new(lon.start.to_angle(), lat.to_angle()),
};
let m1: Vector3<_> = lonlat.vector();
@@ -141,13 +138,17 @@ impl Label {
};
// rot is between -PI and +PI
let rot = dt.y.signum() * dt.x.acos() + PI;
let mut angle = dt.y.signum() * dt.x.acos();
// Detect if the label is upside-down fix the angle by adding PI
if angle.abs() >= HALF_PI {
angle += PI;
}
Some(Label {
position,
content,
rot,
rot: angle,
})
}
}

View File

@@ -21,6 +21,7 @@ pub mod domain;
use domain::{basic, cod::Cod, full::FullScreen, hpx::Hpx, par::Par};
/* S <-> NDC space conversion methods */
pub fn screen_to_ndc_space(
pos_screen_space: &Vector2<f64>,
camera: &CameraViewPort,
@@ -55,6 +56,7 @@ pub fn ndc_to_screen_space(
pos_screen_space / dpi
}
/* NDC <-> CLIP space conversion methods */
pub fn clip_to_ndc_space(pos_clip_space: &Vector2<f64>, camera: &CameraViewPort) -> Vector2<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
@@ -65,6 +67,20 @@ pub fn clip_to_ndc_space(pos_clip_space: &Vector2<f64>, camera: &CameraViewPort)
)
}
pub fn ndc_to_clip_space(
pos_normalized_device: &Vector2<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
pos_normalized_device.y * ndc_to_clip.y * clip_zoom_factor,
)
}
/* S <-> CLIP space conversion methods */
pub fn clip_to_screen_space(
pos_clip_space: &Vector2<f64>,
camera: &CameraViewPort,
@@ -81,19 +97,6 @@ pub fn screen_to_clip_space(
ndc_to_clip_space(&pos_normalized_device, camera)
}
pub fn ndc_to_clip_space(
pos_normalized_device: &Vector2<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
pos_normalized_device.y * ndc_to_clip.y * clip_zoom_factor,
)
}
use al_api::coo_system::CooSystem;
use cgmath::InnerSpace;
@@ -167,21 +170,15 @@ impl ProjectionType {
let pos_screen_space = *pos_screen_space;
let pos_normalized_device = screen_to_ndc_space(&pos_screen_space, camera);
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
let pos_clip_space = Vector2::new(
pos_normalized_device.x * ndc_to_clip.x * clip_zoom_factor,
pos_normalized_device.y * ndc_to_clip.y * clip_zoom_factor,
);
let pos_clip_space = ndc_to_clip_space(&pos_normalized_device, camera);
self.clip_to_world_space(&pos_clip_space)
.map(|mut pos_world_space| {
if camera.get_longitude_reversed() {
pos_world_space.x = -pos_world_space.x;
}
/*.map(|mut pos_world_space| {
if camera.get_longitude_reversed() {
pos_world_space.x = -pos_world_space.x;
}
pos_world_space.normalize()
})
pos_world_space.normalize()
})*/
}
/// Screen to model space deprojection
@@ -198,10 +195,7 @@ impl ProjectionType {
camera: &CameraViewPort,
) -> Option<Vector4<f64>> {
self.screen_to_world_space(pos_screen_space, camera)
.map(|world_pos| {
let r = camera.get_final_rotation();
r.rotate(&world_pos)
})
.map(|world_pos| camera.get_w2m() * world_pos)
}
pub fn normalized_device_to_model_space(
@@ -210,10 +204,7 @@ impl ProjectionType {
camera: &CameraViewPort,
) -> Option<XYZWModel> {
self.normalized_device_to_world_space(ndc_pos, camera)
.map(|world_pos| {
let r = camera.get_final_rotation();
r.rotate(&world_pos)
})
.map(|world_pos| camera.get_w2m() * world_pos)
}
pub fn model_to_screen_space(
@@ -295,18 +286,7 @@ impl ProjectionType {
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
self.world_to_clip_space(pos_world_space)
.map(|mut pos_clip_space| {
if camera.get_longitude_reversed() {
pos_clip_space.x = -pos_clip_space.x;
}
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_clip_space.x / (ndc_to_clip.x * clip_zoom_factor),
pos_clip_space.y / (ndc_to_clip.y * clip_zoom_factor),
)
})
.map(|pos_clip_space| clip_to_ndc_space(&pos_clip_space, camera))
}
pub fn normalized_device_to_world_space(
@@ -318,24 +298,6 @@ impl ProjectionType {
self.clip_to_world_space(&clip_pos)
}
/*pub fn world_to_normalized_device_space_unchecked(
&self,
pos_world_space: &Vector4<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let mut pos_clip_space = self.world_to_clip_space_unchecked(pos_world_space);
if camera.get_longitude_reversed() {
pos_clip_space.x = -pos_clip_space.x;
}
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
Vector2::new(
pos_clip_space.x / (ndc_to_clip.x * clip_zoom_factor),
pos_clip_space.y / (ndc_to_clip.y * clip_zoom_factor),
)
}*/
pub fn world_to_screen_space(
&self,
pos_world_space: &Vector4<f64>,

View File

@@ -12,12 +12,11 @@ pub mod moc;
use crate::renderable::line::RasterizedLineRenderer;
use super::utils::triangle::Triangle;
use wasm_bindgen::JsValue;
use hierarchy::MOCHierarchy;
use super::utils::Triangle;
use al_api::coo_system::CooSystem;
use al_api::moc::MOC as Cfg;

View File

@@ -19,12 +19,13 @@ use crate::math::{angle::Angle, vector::dist2};
use crate::ProjectionType;
use crate::camera::CameraViewPort;
use crate::renderable::utils::BuildPatchIndicesIter;
use crate::renderable::utils::index_patch::DefaultPatchIndexIter;
use crate::{math::lonlat::LonLatT, utils};
use crate::{shader::ShaderManager, survey::config::HiPSConfig};
use crate::downloader::request::allsky::Allsky;
use crate::healpix::{cell::HEALPixCell, coverage::HEALPixCoverage};
use crate::math::angle::ToAngle;
use crate::math::lonlat::LonLat;
use crate::time::Time;
@@ -42,14 +43,7 @@ use std::fmt::Debug;
use wasm_bindgen::JsValue;
use web_sys::WebGl2RenderingContext;
// Identity matrix
const ID: &Matrix4<f64> = &Matrix4::new(
1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
// Longitude reversed identity matrix
const ID_R: &Matrix4<f64> = &Matrix4::new(
-1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
use super::utils::index_patch::CCWCheckPatchIndexIter;
const M: f64 = 280.0 * 280.0;
const N: f64 = 150.0 * 150.0;
@@ -508,9 +502,6 @@ impl HiPS {
let textures = ImageSurveyTextures::new(gl, config)?;
let gl = gl.clone();
let _depth = 0;
let _depth_tile = 0;
let footprint_moc = None;
// request the allsky texture
Ok(HiPS {
@@ -598,9 +589,16 @@ impl HiPS {
self.textures.contains_tile(cell)
}
pub fn update(&mut self, camera: &mut CameraViewPort, projection: &ProjectionType) {
pub fn update(
&mut self,
raytracer: &RayTracer,
camera: &mut CameraViewPort,
projection: &ProjectionType,
) {
let raytracing = raytracer.is_rendering(camera);
let vertices_recomputation_needed =
self.textures.reset_available_tiles() | camera.has_moved();
!raytracing && (self.textures.reset_available_tiles() | camera.has_moved());
if vertices_recomputation_needed {
self.recompute_vertices(camera, projection);
}
@@ -821,7 +819,7 @@ impl HiPS {
pos.push(ndc);
}
let patch_indices_iter = BuildPatchIndicesIter::new(
let patch_indices_iter = CCWCheckPatchIndexIter::new(
&(0..=n_segments_by_side),
&(0..=n_segments_by_side),
n_vertices_per_segment,
@@ -965,7 +963,6 @@ impl HiPS {
pub fn draw(
&self,
//switch_from_raytrace_to_raster: bool,
shaders: &mut ShaderManager,
colormaps: &Colormaps,
camera: &CameraViewPort,
@@ -978,20 +975,11 @@ impl HiPS {
let hips_frame = hips_cfg.get_frame();
let c = selected_frame.to(hips_frame);
// Get whether the camera mode is longitude reversed
//let longitude_reversed = hips_cfg.longitude_reversed;
let rl = if camera.get_longitude_reversed() {
ID_R
} else {
ID
};
// Retrieve the model and inverse model matrix
let w2v = c * (*camera.get_w2m()) * rl;
let w2v = c * (*camera.get_w2m());
let v2w = w2v.transpose();
let raytracing = raytracer.is_rendering(camera);
let longitude_reversed = camera.get_longitude_reversed();
let config = self.get_config();
self.gl.enable(WebGl2RenderingContext::BLEND);
@@ -1011,9 +999,6 @@ impl HiPS {
blend_cfg.enable(&self.gl, || {
if raytracing {
// Triangle are defined in CCW
self.gl.cull_face(WebGl2RenderingContext::BACK);
let shader = get_raytracer_shader(cmap, &self.gl, shaders, &config)?;
let shader = shader.bind(&self.gl);
@@ -1031,16 +1016,6 @@ impl HiPS {
raytracer.draw(&shader);
} else {
// Depending on if the longitude is reversed, triangles are either defined in:
// - CCW for longitude_reversed = false
// - CW for longitude_reversed = true
// Get the reverse longitude flag
if longitude_reversed {
self.gl.cull_face(WebGl2RenderingContext::FRONT);
} else {
self.gl.cull_face(WebGl2RenderingContext::BACK);
}
// The rasterizer has a buffer containing:
// - The vertices of the HEALPix cells for the most refined survey
// - The starting and ending uv for the blending animation
@@ -1072,17 +1047,8 @@ impl HiPS {
Some(self.num_idx as i32),
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
);
}
// Depending on if the longitude is reversed, triangles are either defined in:
// - CCW for longitude_reversed = false
// - CW for longitude_reversed = true
// Get the reverse longitude flag
if longitude_reversed {
self.gl.cull_face(WebGl2RenderingContext::FRONT);
} else {
self.gl.cull_face(WebGl2RenderingContext::BACK);
)
.unbind();
}
Ok(())

View File

@@ -225,7 +225,8 @@ impl RayTracer {
None,
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
);
)
.unbind();
#[cfg(feature = "webgl2")]
shader
.attach_uniform("position_tex", &self.position_tex)
@@ -236,12 +237,13 @@ impl RayTracer {
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
)
.unbind();
}
pub fn is_rendering(&self, camera: &CameraViewPort) -> bool {
// Check whether the tile depth is 0 for square projection
// definition domains i.e. Mercator
let depth = camera.get_tile_depth();
camera.is_allsky() || depth == 0
camera.is_allsky() || depth <= 1
}
}

View File

@@ -4,7 +4,7 @@ use wcs::ImgXY;
use crate::camera::CameraViewPort;
use crate::math::angle::ToAngle;
use crate::math::projection::ProjectionType;
use crate::renderable::utils::BuildPatchIndicesIter;
use crate::renderable::utils::index_patch::CCWCheckPatchIndexIter;
use al_api::coo_system::CooSystem;
use wcs::WCS;
@@ -215,7 +215,7 @@ pub fn get_grid_vertices(
for idx_x_range in &idx_x_ranges {
for idx_y_range in &idx_y_ranges {
let build_indices_iter =
BuildPatchIndicesIter::new(idx_x_range, idx_y_range, num_x_vertices, &pos, camera);
CCWCheckPatchIndexIter::new(idx_x_range, idx_y_range, num_x_vertices, &pos, camera);
let patch_indices = build_indices_iter
.flatten()

View File

@@ -588,7 +588,7 @@ impl Image {
_ => return Err(JsValue::from_str("Image format type not supported")),
};
self.gl.disable(WebGl2RenderingContext::CULL_FACE);
//self.gl.disable(WebGl2RenderingContext::CULL_FACE);
// 2. Draw it if its opacity is not null
blend_cfg.enable(&self.gl, || {
@@ -612,7 +612,8 @@ impl Image {
Some(num_indices),
WebGl2RenderingContext::UNSIGNED_SHORT,
((off_indices as usize) * std::mem::size_of::<u16>()) as i32,
);
)
.unbind();
off_indices += self.num_indices[idx];
}
@@ -620,7 +621,7 @@ impl Image {
Ok(())
})?;
self.gl.enable(WebGl2RenderingContext::CULL_FACE);
//self.gl.enable(WebGl2RenderingContext::CULL_FACE);
self.gl.disable(WebGl2RenderingContext::BLEND);

View File

@@ -286,7 +286,7 @@ impl RasterizedLineRenderer {
WebGl2RenderingContext::ONE,
);
self.gl.disable(WebGl2RenderingContext::CULL_FACE);
//self.gl.disable(WebGl2RenderingContext::CULL_FACE);
let shader = self.shader.bind(&self.gl);
for meta in self.meta.iter() {
@@ -301,7 +301,7 @@ impl RasterizedLineRenderer {
);
}
self.gl.enable(WebGl2RenderingContext::CULL_FACE);
//self.gl.enable(WebGl2RenderingContext::CULL_FACE);
self.gl.disable(WebGl2RenderingContext::BLEND);
Ok(())

View File

@@ -259,7 +259,8 @@ impl Layers {
None,
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
);
)
.unbind();
}
// The first layer must be paint independently of its alpha channel
@@ -290,7 +291,7 @@ impl Layers {
// 1. Update the survey if necessary
let url = self.urls.get(layer).expect("Url should be found");
if let Some(survey) = self.surveys.get_mut(url) {
survey.update(camera, projection);
survey.update(&self.raytracer, camera, projection);
// 2. Draw it if its opacity is not null
survey.draw(shaders, colormaps, camera, raytracer, draw_opt)?;
@@ -428,6 +429,8 @@ impl Layers {
meta,
} = hips;
let img_ext = meta.img_format;
// 1. Add the layer name
let layer_already_found = self.layers.iter().any(|l| l == &layer);
@@ -440,7 +443,15 @@ impl Layers {
self.layers.insert(idx, layer.to_string());
// 2. Add the image survey
// 2. Add the meta information of the layer
self.meta.insert(layer.clone(), meta);
// Loop over all the meta for its longitude reversed property
// and set the camera to it if there is at least one
let longitude_reversed = self.meta.values().any(|meta| meta.longitude_reversed);
camera.set_longitude_reversed(longitude_reversed, proj);
// 3. Add the image survey
let url = String::from(properties.get_url());
// The layer does not already exist
// Let's check if no other hipses points to the
@@ -449,7 +460,7 @@ impl Layers {
if !url_already_found {
// The url is not processed yet
let cfg = HiPSConfig::new(&properties, meta.img_format)?;
let cfg = HiPSConfig::new(&properties, img_ext)?;
/*if let Some(initial_ra) = properties.get_initial_ra() {
if let Some(initial_dec) = properties.get_initial_dec() {
@@ -470,14 +481,6 @@ impl Layers {
self.urls.insert(layer.clone(), url.clone());
// 3. Add the meta information of the layer
self.meta.insert(layer.clone(), meta);
// Loop over all the meta for its longitude reversed property
// and set the camera to it if there is at least one
let longitude_reversed = self.meta.values().any(|meta| meta.longitude_reversed);
camera.set_longitude_reversed(longitude_reversed, proj);
let hips = self
.surveys
.get(&url)
@@ -612,12 +615,6 @@ impl Layers {
ready
}
pub fn update(&mut self, camera: &mut CameraViewPort, proj: &ProjectionType) {
for survey in self.surveys.values_mut() {
survey.update(camera, proj);
}
}
// Accessors
// HiPSes getters
pub fn get_hips_from_layer(&self, layer: &str) -> Option<&HiPS> {

View File

@@ -1,115 +0,0 @@
use std::ops::RangeInclusive;
use cgmath::BaseFloat;
use crate::CameraViewPort;
// This iterator construct indices from a set of vertices defining
// a grid.
// Triangles that are in a clockwise order will not be renderer
// Whereas other counter-clockwise triangle will be
pub struct BuildPatchIndicesIter<'a> {
pub idx_x_range: RangeInclusive<usize>,
pub idx_y_range: RangeInclusive<usize>,
pub num_x_vertices: usize,
cur_idx_x: usize,
cur_idx_y: usize,
ndc: &'a [Option<[f32; 2]>],
camera: &'a CameraViewPort,
}
impl<'a> BuildPatchIndicesIter<'a> {
pub fn new(idx_x_range: &RangeInclusive<usize>, idx_y_range: &RangeInclusive<usize>, num_x_vertices: usize, ndc: &'a [Option<[f32; 2]>], camera: &'a CameraViewPort) -> Self {
let cur_idx_x = *idx_x_range.start();
let cur_idx_y = *idx_y_range.start();
Self {
idx_x_range: idx_x_range.clone(),
idx_y_range: idx_y_range.clone(),
num_x_vertices,
cur_idx_x,
cur_idx_y,
ndc,
camera,
}
}
fn get_index_value(&self, idx_x: usize, idx_y: usize) -> usize {
idx_x + idx_y * self.num_x_vertices
}
}
impl<'a> Iterator for BuildPatchIndicesIter<'a> {
type Item = [(u16, u16, u16); 2];
fn next(&mut self) -> Option<Self::Item> {
if self.cur_idx_x == *self.idx_x_range.end() {
self.cur_idx_x = *self.idx_x_range.start();
self.cur_idx_y += 1;
if self.cur_idx_y == *self.idx_y_range.end() {
return None;
}
}
let idx_tl = self.get_index_value(self.cur_idx_x, self.cur_idx_y);
let idx_tr = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y);
let idx_bl = self.get_index_value(self.cur_idx_x, self.cur_idx_y + 1);
let idx_br = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y + 1);
self.cur_idx_x += 1;
let ndc_tl = &self.ndc[idx_tl];
let ndc_tr = &self.ndc[idx_tr];
let ndc_bl = &self.ndc[idx_bl];
let ndc_br = &self.ndc[idx_br];
match (ndc_tl, ndc_tr, ndc_bl, ndc_br) {
(Some(ndc_tl), Some(ndc_tr), Some(ndc_bl), Some(ndc_br)) => {
let t1 = Triangle::new(&ndc_tl, &ndc_tr, &ndc_bl);
let t2 = Triangle::new(&ndc_tr, &ndc_br, &ndc_bl);
if !t1.is_invalid(&self.camera) || !t2.is_invalid(&self.camera) {
self.next() // crossing projection tri
} else {
Some([
(idx_tl as u16, idx_tr as u16, idx_bl as u16),
(idx_tr as u16, idx_br as u16, idx_bl as u16)
])
}
},
_ => self.next() // out of proj
}
}
}
pub struct Triangle<'a, S>
where
S: BaseFloat
{
v1: &'a [S; 2],
v2: &'a [S; 2],
v3: &'a [S; 2],
}
impl<'a, S> Triangle<'a, S>
where
S: BaseFloat
{
pub fn new(v1: &'a [S; 2], v2: &'a [S; 2], v3: &'a [S; 2]) -> Self {
Self { v1, v2, v3 }
}
pub fn is_invalid(&self, camera: &CameraViewPort) -> bool {
let tri_ccw = self.is_ccw();
let reversed_longitude = camera.get_longitude_reversed();
(!reversed_longitude && tri_ccw) || (reversed_longitude && !tri_ccw)
}
pub fn is_ccw(&self) -> bool {
crate::math::utils::ccw_tri(&self.v1, &self.v2, &self.v3)
}
}

View File

@@ -0,0 +1,128 @@
use cgmath::BaseFloat;
use std::ops::RangeInclusive;
use super::triangle::Triangle;
use crate::CameraViewPort;
// This iterator construct indices from a set of vertices defining
// a grid.
// Triangles that are in a clockwise order will not be renderer
// Whereas other counter-clockwise triangle will be
pub struct CCWCheckPatchIndexIter<'a> {
patch_iter: DefaultPatchIndexIter,
ndc: &'a [Option<[f32; 2]>],
camera: &'a CameraViewPort,
}
impl<'a> CCWCheckPatchIndexIter<'a> {
pub fn new(
idx_x_range: &RangeInclusive<usize>,
idx_y_range: &RangeInclusive<usize>,
num_x_vertices: usize,
ndc: &'a [Option<[f32; 2]>],
camera: &'a CameraViewPort,
) -> Self {
let patch_iter = DefaultPatchIndexIter::new(idx_x_range, idx_y_range, num_x_vertices);
Self {
patch_iter,
ndc,
camera,
}
}
}
impl<'a> Iterator for CCWCheckPatchIndexIter<'a> {
type Item = [(u16, u16, u16); 2];
fn next(&mut self) -> Option<Self::Item> {
if let Some(indices) = self.patch_iter.next() {
let idx_tl = indices[0].0;
let idx_tr = indices[0].1;
let idx_bl = indices[0].2;
let idx_br = indices[1].1;
let ndc_tl = &self.ndc[idx_tl as usize];
let ndc_tr = &self.ndc[idx_tr as usize];
let ndc_bl = &self.ndc[idx_bl as usize];
let ndc_br = &self.ndc[idx_br as usize];
match (ndc_tl, ndc_tr, ndc_bl, ndc_br) {
(Some(ndc_tl), Some(ndc_tr), Some(ndc_bl), Some(ndc_br)) => {
let t1 = Triangle::new(&ndc_tl, &ndc_tr, &ndc_bl);
let t2 = Triangle::new(&ndc_tr, &ndc_br, &ndc_bl);
if !t1.is_invalid(&self.camera) || !t2.is_invalid(&self.camera) {
self.next() // crossing projection tri
} else {
Some(indices)
}
}
_ => self.next(), // out of proj
}
} else {
None
}
}
}
pub struct DefaultPatchIndexIter {
pub idx_x_range: RangeInclusive<usize>,
pub idx_y_range: RangeInclusive<usize>,
pub num_x_vertices: usize,
cur_idx_x: usize,
cur_idx_y: usize,
}
impl DefaultPatchIndexIter {
pub fn new(
idx_x_range: &RangeInclusive<usize>,
idx_y_range: &RangeInclusive<usize>,
num_x_vertices: usize,
) -> Self {
let cur_idx_x = *idx_x_range.start();
let cur_idx_y = *idx_y_range.start();
Self {
idx_x_range: idx_x_range.clone(),
idx_y_range: idx_y_range.clone(),
num_x_vertices,
cur_idx_x,
cur_idx_y,
}
}
fn get_index_value(&self, idx_x: usize, idx_y: usize) -> usize {
idx_x + idx_y * self.num_x_vertices
}
}
impl Iterator for DefaultPatchIndexIter {
type Item = [(u16, u16, u16); 2];
fn next(&mut self) -> Option<Self::Item> {
if self.cur_idx_x == *self.idx_x_range.end() {
self.cur_idx_x = *self.idx_x_range.start();
self.cur_idx_y += 1;
if self.cur_idx_y == *self.idx_y_range.end() {
return None;
}
}
let idx_tl = self.get_index_value(self.cur_idx_x, self.cur_idx_y);
let idx_tr = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y);
let idx_bl = self.get_index_value(self.cur_idx_x, self.cur_idx_y + 1);
let idx_br = self.get_index_value(self.cur_idx_x + 1, self.cur_idx_y + 1);
self.cur_idx_x += 1;
Some([
(idx_tl as u16, idx_tr as u16, idx_bl as u16),
(idx_tr as u16, idx_br as u16, idx_bl as u16),
])
}
}

View File

@@ -0,0 +1,2 @@
pub mod index_patch;
pub mod triangle;

View File

@@ -0,0 +1,31 @@
use crate::CameraViewPort;
use cgmath::BaseFloat;
pub struct Triangle<'a, S>
where
S: BaseFloat,
{
v1: &'a [S; 2],
v2: &'a [S; 2],
v3: &'a [S; 2],
}
impl<'a, S> Triangle<'a, S>
where
S: BaseFloat,
{
pub fn new(v1: &'a [S; 2], v2: &'a [S; 2], v3: &'a [S; 2]) -> Self {
Self { v1, v2, v3 }
}
pub fn is_invalid(&self, camera: &CameraViewPort) -> bool {
let tri_ccw = self.is_ccw();
let reversed_longitude = camera.get_longitude_reversed();
(!reversed_longitude && tri_ccw) || (reversed_longitude && !tri_ccw)
}
pub fn is_ccw(&self) -> bool {
crate::math::utils::ccw_tri(&self.v1, &self.v2, &self.v3)
}
}

View File

@@ -1,12 +1,9 @@
use crate::downloader::{query, Downloader};
use crate::renderable::HiPS;
use crate::time::{DeltaTime, Time};
use crate::Abort;
use std::collections::{VecDeque};
use std::collections::VecDeque;
const MAX_NUM_TILE_FETCHING: isize = 8;
const MAX_QUERY_QUEUE_LENGTH: usize = 100;
@@ -15,15 +12,18 @@ pub struct TileFetcherQueue {
// A stack of queries to fetch
queries: VecDeque<query::Tile>,
base_tile_queries: Vec<query::Tile>,
tiles_fetched_time: Time,
}
impl TileFetcherQueue {
pub fn new() -> Self {
let queries = VecDeque::new();
let base_tile_queries = Vec::new();
let tiles_fetched_time = Time::now();
Self {
queries,
base_tile_queries,
tiles_fetched_time,
}
}
@@ -48,8 +48,19 @@ impl TileFetcherQueue {
self.base_tile_queries.push(query);
}
pub fn notify(&mut self, downloader: &mut Downloader) {
self.fetch(downloader);
pub fn notify(&mut self, downloader: &mut Downloader, dt: Option<DeltaTime>) {
// notify all the x ms
let now = Time::now();
if let Some(dt) = dt {
if now - self.tiles_fetched_time >= dt {
self.tiles_fetched_time = now;
self.fetch(downloader);
}
} else {
self.tiles_fetched_time = now;
self.fetch(downloader);
}
}
fn fetch(&mut self, downloader: &mut Downloader) {

View File

@@ -50,6 +50,7 @@ import { ContextMenu } from "./gui/ContextMenu.js";
import { ALEvent } from "./events/ALEvent.js";
import { Color } from './Color.js';
import { ImageFITS } from "./ImageFITS.js";
import { VRButton } from "./VRButton.js";
import { DefaultActionsForContextMenu } from "./DefaultActionsForContextMenu.js";
import A from "./A.js";
@@ -458,7 +459,8 @@ export let Aladin = (function () {
//this.discoverytree = new DiscoveryTree(this);
//}
this.view.redraw();
// [ ] That might pose problems
//this.view.redraw();
// go to full screen ?
if (options.fullScreen) {
@@ -471,6 +473,11 @@ export let Aladin = (function () {
this.contextMenu = new ContextMenu(this);
this.contextMenu.attachTo(this.view.catalogCanvas, DefaultActionsForContextMenu.getDefaultActions(this));
}
// initialize the VR button
if (options.vr) {
this.aladinDiv.appendChild(VRButton.createButton(this.view));
}
};
/**** CONSTANTS ****/
@@ -489,7 +496,7 @@ export let Aladin = (function () {
target: "0 +0",
cooFrame: "J2000",
fov: 60,
backgroundColor: "rgb(0, 0, 0)",
backgroundColor: "rgb(60, 60, 60)",
showReticle: true,
showZoomControl: true,
showFullscreenControl: true,
@@ -671,6 +678,11 @@ export let Aladin = (function () {
});
};
// @API
Aladin.prototype.setRenderer = function(renderer) {
this.options.vr.renderer = renderer;
}
Aladin.prototype.setFrame = function (frameName) {
if (!frameName) {
return;

View File

@@ -163,31 +163,48 @@ export let Circle = (function() {
x: centerXyview[0],
y: centerXyview[1],
};
// compute value of radius in pixels in current projection
var ra = this.centerRaDec[0];
var dec = this.centerRaDec[1] + (ra>0 ? - this.radiusDegrees : this.radiusDegrees);
// First check, the point in the circle is defined
let circlePtXyView = AladinUtils.radecToViewXy(ra, dec, view);
if (!circlePtXyView) {
// the circle border goes out of the projection
// we do not draw it
return;
let hidden = true;
var ra, dec, vertOnCircle, dx, dy;
if (view.fov > 90) {
this.radius = Number.POSITIVE_INFINITY;
// Project 4 points lying on the circle and take the minimal dist with the center as radius
[[-1, 0], [1, 0], [0, -1], [0, 1]].forEach(([cardDirRa, cardDirDec]) => {
ra = this.centerRaDec[0] + cardDirRa * this.radiusDegrees;
dec = this.centerRaDec[1] + cardDirDec * this.radiusDegrees;
vertOnCircle = AladinUtils.radecToViewXy(ra, dec, view);
if (vertOnCircle) {
dx = vertOnCircle[0] - this.center.x;
dy = vertOnCircle[1] - this.center.y;
this.radius = Math.min(Math.sqrt(dx*dx + dy*dy), this.radius);
hidden = false;
}
});
} else {
ra = this.centerRaDec[0] + this.radiusDegrees;
dec = this.centerRaDec[1];
vertOnCircle = AladinUtils.radecToViewXy(ra, dec, view);
if (vertOnCircle) {
dx = vertOnCircle[0] - this.center.x;
dy = vertOnCircle[1] - this.center.y;
this.radius = Math.sqrt(dx*dx + dy*dy);
hidden = false;
}
}
// Second check, the radius is not too big in the clipping space
let [x1c, y1c] = AladinUtils.viewXyToClipXy(this.center.x, this.center.y, view);
let [x2c, y2c] = AladinUtils.viewXyToClipXy(circlePtXyView[0], circlePtXyView[1], view);
let mag2 = (x1c - x2c)*(x1c - x2c) + (y1c - y2c)*(y1c - y2c);
if (mag2 > 0.2) {
if (hidden) {
return;
}
// Then we can draw
var dx = circlePtXyView[0] - this.center.x;
var dy = circlePtXyView[1] - this.center.y;
this.radius = Math.sqrt(dx*dx + dy*dy);
var baseColor = this.color;
if (! baseColor && this.overlay) {

View File

@@ -46,13 +46,15 @@ export let Footprint= (function() {
this.shapes = shapes;
this.isShowing = true;
this.overlay = null;
};
Footprint.prototype.setCatalog = function(catalog) {
if (this.source) {
this.source.setCatalog(catalog);
}
};
};
Footprint.prototype.show = function() {
if (this.isShowing) {
@@ -91,7 +93,7 @@ export let Footprint= (function() {
Footprint.prototype.setSelectionColor = function(color) {
this.shapes.forEach((shape) => shape.setSelectionColor(color))
};
Footprint.prototype.isFootprint = function() {
return true;
}
@@ -125,6 +127,10 @@ export let Footprint= (function() {
return this.source && this.source.catalog;
};
Footprint.prototype.setOverlay = function(overlay) {
this.overlay = overlay;
};
Footprint.prototype.intersectsBBox = function(x, y, w, h, view) {
if(this.source) {
let s = this.source;

View File

@@ -70,7 +70,6 @@ HiPSProperties.fetchFromID = async function(ID) {
}
HiPSProperties.fetchFromUrl = async function(urlOrId) {
let addTextExt = false;
try {
urlOrId = new URL(urlOrId);
} catch (e) {
@@ -80,7 +79,7 @@ HiPSProperties.fetchFromUrl = async function(urlOrId) {
urlOrId = new URL(urlOrId);
addTextExt = true;
} catch(e) {
throw e;
}
@@ -97,10 +96,6 @@ HiPSProperties.fetchFromUrl = async function(urlOrId) {
}
url = url + '/properties';
if (addTextExt) {
url = url + '.txt';
}
// make URL absolute
url = Utils.getAbsoluteURL(url);
// fix for HTTPS support --> will work for all HiPS served by CDS

View File

@@ -41,11 +41,48 @@ import { ProjectionEnum, projectionNames } from "./ProjectionEnum.js";
export let Polyline= (function() {
function _calculateMag2ForNoSinProjections(line, view) {
// check if the line is too big (in the clip space) to be drawn
const [x1, y1] = AladinUtils.viewXyToClipXy(line.x1, line.y1, view);
const [x2, y2] = AladinUtils.viewXyToClipXy(line.x2, line.y2, view);
const mag2 = (x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2);
return mag2;
}
function _isAcrossCollignonZoneForHpxProjection(line, view) {
const [x1, y1] = AladinUtils.viewXyToClipXy(line.x1, line.y1, view);
const [x2, y2] = AladinUtils.viewXyToClipXy(line.x2, line.y2, view);
// x, y, between -1 and 1
let triIdxCollignionZone = function(x, y) {
let xZone = Math.floor((x * 0.5 + 0.5) * 4.0);
return xZone + 4 * (y > 0.0);
};
let isInCollignionZone = function(x, y) {
return Math.abs(y) > 0.5;
};
if (isInCollignionZone(x1, y1) && isInCollignionZone(x2, y2)) {
if (triIdxCollignionZone(x1, y1) === triIdxCollignionZone(x2, y2)) {
return false;
} else {
return true;
}
}
return false;
}
// constructor
let Polyline = function(radecArray, options) {
options = options || {};
this.color = options['color'] || undefined;
this.lineWidth = options["lineWidth"] || 2;
this.fill = options['fill'] || false;
this.fillColor = options['fillColor'] || undefined;
this.opacity = options['opacity'] || undefined;
this.lineWidth = options["lineWidth"] || undefined;
if (options["closed"]) {
this.closed = options["closed"];
@@ -138,7 +175,7 @@ export let Polyline= (function() {
this.overlay.reportChange();
}
};
Polyline.prototype.isFootprint = function() {
// The polyline is a footprint if it describes a polygon (i.e. a closed polyline)
return this.closed;
@@ -163,6 +200,10 @@ export let Polyline= (function() {
baseColor = '#ff0000';
}
if (!this.lineWidth) {
this.lineWidth = this.overlay.lineWidth || 2;
}
if (this.isSelected) {
if(this.selectionColor) {
ctx.strokeStyle = this.selectionColor;
@@ -204,69 +245,116 @@ export let Polyline= (function() {
}
let drawLine;
let fillPoly;
if (view.projection === ProjectionEnum.SIN) {
drawLine = (v0, v1) => {
const line = new Line(v0.x, v0.y, v1.x, v1.y);
if (line.isInsideView(view.width, view.height)) {
line.draw(ctx);
}
};
} else {
if (this.closed && this.fill) {
fillPoly = (v0, v1, index) => {
const line = new Line(v0.x, v0.y, v1.x, v1.y);
if (index === 0) {
ctx.beginPath();
ctx.moveTo(line.x1, line.y1);
} else {
ctx.lineTo(line.x1, line.y1);
}
return true;
};
}
} else if (view.projection === ProjectionEnum.HPX) {
drawLine = (v0, v1) => {
const line = new Line(v0.x, v0.y, v1.x, v1.y);
if (_isAcrossCollignonZoneForHpxProjection(line, view)) {
return;
}
if (line.isInsideView(view.width, view.height)) {
// check if the line is too big (in the clip space) to be drawn
const [x1, y1] = AladinUtils.viewXyToClipXy(line.x1, line.y1, view);
const [x2, y2] = AladinUtils.viewXyToClipXy(line.x2, line.y2, view);
const mag2 = (x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2);
const mag2 = _calculateMag2ForNoSinProjections(line, view);
if (mag2 < 0.1) {
line.draw(ctx);
}
}
};
}
// 3. Check whether the polygon do not cross the view
let nSegment = this.closed ? len : len - 1;
/*
let v0 = this.closed ? len - 1 : 0;
let v1 = this.closed ? 0 : 1;
let v2 = this.closed ? 1 : 2;
if (this.closed && this.fill) {
fillPoly = (v0, v1, index) => {
const line = new Line(v0.x, v0.y, v1.x, v1.y);
let drawPolygon = true;
for (var k = 0; k < nSegment; k++) {
let ccwTriOrder = ccwOrder(xyView[v0], xyView[v1], xyView[v2])
if (_isAcrossCollignonZoneForHpxProjection(line, view)) {
return;
}
if (ccwGoodOrder != ccwTriOrder) {
// if it cross the view, we end up here
drawPolygon = false;
const mag2 = _calculateMag2ForNoSinProjections(line, view);
return;
if (mag2 < 0.1) {
if (index === 0) {
ctx.beginPath();
ctx.moveTo(line.x1, line.y1);
} else {
ctx.lineTo(line.x1, line.y1);
}
return true;
} else {
return false;
}
};
}
} else {
drawLine = (v0, v1) => {
const line = new Line(v0.x, v0.y, v1.x, v1.y);
v0 = v1;
v1 = v2;
v2 = (v2 + 1) % len;
if (line.isInsideView(view.width, view.height)) {
const mag2 = _calculateMag2ForNoSinProjections(line, view);
if (mag2 < 0.1) {
line.draw(ctx);
}
}
};
if (this.closed && this.fill) {
fillPoly = (v0, v1, index) => {
const line = new Line(v0.x, v0.y, v1.x, v1.y);
const mag2 = _calculateMag2ForNoSinProjections(line, view);
if (mag2 < 0.1) {
if (index === 0) {
ctx.beginPath();
ctx.moveTo(line.x1, line.y1);
} else {
ctx.lineTo(line.x1, line.y1);
}
return true;
} else {
return false;
}
};
}
}
if (!drawPolygon) {
return;
}*/
// 4. Finally, draw all the polygon, segment by segment
let nSegment = this.closed ? len : len - 1;
let v0 = this.closed ? len - 1 : 0;
let v1 = this.closed ? 0 : 1;
ctx.lineWidth = this.lineWidth;
ctx.beginPath();
for (var k = 0; k < nSegment; k++) {
drawLine(xyView[v0], xyView[v1])
drawLine(xyView[v0], xyView[v1]);
v0 = v1;
v1 = v1 + 1;
@@ -275,6 +363,28 @@ export let Polyline= (function() {
if (!noStroke) {
ctx.stroke();
}
if (this.fill && this.closed) {
v0 = len - 1;
v1 = 0;
let index = 0;
for (var k = 0; k < nSegment; k++) {
if (fillPoly(xyView[v0], xyView[v1], index)) {
index++;
}
v0 = v1;
v1 = v1 + 1;
}
ctx.globalAlpha = 1;
ctx.save();
ctx.fillStyle = this.fillColor;
ctx.globalAlpha = this.opacity;
ctx.fill();
ctx.restore();
}
};
Polyline.prototype.isInStroke = function(ctx, view, x, y) {
@@ -303,7 +413,7 @@ export let Polyline= (function() {
if(this.closed) {
const line = new Line(pointXY[lastPointIdx].x, pointXY[lastPointIdx].y, pointXY[0].x, pointXY[0].y); // new segment
line.draw(ctx, true);
if (ctx.isPointInStroke(x, y)) { // x,y is on line?
return true;
}

252
src/js/VRButton.js Normal file
View File

@@ -0,0 +1,252 @@
/**
* This is an adaptation of the original VRButton.
* Original at:
* https://github.com/mrdoob/three.js/blob/dev/examples/jsm/webxr/VRButton.js
*/
/**
* VRButton class that handles the creation of a VR session
*
* @class VRButton
*/
class VRButton {
/**
* Constructs a VRButton
*
* @static
* @param {View} view - The aladin view
* @return {HTMLButtonElement|HTMLAnchorElement} The VR mode button or an
* error message
*/
static createButton(view) {
const button = document.createElement('button');
/**
* Function for handling the process of entering VR mode.
*/
function showEnterVR(/* device*/) {
let currentSession = null;
/**
* Callback function to handle when the XR session is started
*
* @param {XRSession} session - The XR session that has been started
*/
async function onSessionStarted(session) {
session.addEventListener('end', onSessionEnded);
let gl = view.imageCanvas.getContext('webgl2');
await gl.makeXRCompatible();
session.updateRenderState({
baseLayer: new XRWebGLLayer(session, gl)
});
await view.options.vr.renderer.xr.setSession(session);
button.textContent = 'EXIT VR';
// view.options.vr.renderer.setAnimationLoop(view.redrawVR.bind(view));
session.requestReferenceSpace('local-floor').then((refSpace) => {
const xrRefSpace = refSpace;
session.requestAnimationFrame((t, frame) => {view.redrawVR(t, frame, xrRefSpace)});
});
currentSession = session;
}
/**
* Function to render the whole scene
*/
// NOTE A supprimer
function onXRAnimationFrame(t, xrFrame) {
currentSession.requestAnimationFrame(onXRAnimationFrame);
view.redrawVR();
}
/**
* Callback function to handle when the XR session ends
*/
function onSessionEnded(/* event*/) {
currentSession.removeEventListener('end', onSessionEnded);
button.textContent = 'ENTER VR';
currentSession = null;
}
//
button.style.display = '';
button.style.cursor = 'pointer';
button.style.left = 'calc(50% - 50px)';
button.style.width = '100px';
button.textContent = 'ENTER VR';
button.onmouseenter = function() {
button.style.opacity = '1.0';
};
button.onmouseleave = function() {
button.style.opacity = '0.5';
};
button.onclick = function() {
if (currentSession === null) {
// WebXR's requestReferenceSpace only works if the corresponding
// feature was requested at session creation time. For simplicity,
// just ask for the interesting ones as optional features, but be
// aware that the requestReferenceSpace call will fail if it turns
// out to be unavailable.
// ('local' is always available for immersive sessions and doesn't
// need to be requested separately.)
const sessionInit = {optionalFeatures: ['local-floor']};
navigator.xr.requestSession(
'immersive-vr', sessionInit).then(onSessionStarted);
} else {
currentSession.end();
}
};
}
/**
* Function for disabling the VR mode button
*
* @param {HTMLButtonElement} button - The VR mode button element to
* be disabled
*/
function disableButton() {
button.style.display = '';
button.style.cursor = 'auto';
button.style.left = 'calc(50% - 75px)';
button.style.width = '150px';
button.onmouseenter = null;
button.onmouseleave = null;
button.onclick = null;
}
/**
* Function for handling the case where WebXR is not supported
*
* @description This function disables the VR mode button and displays a
* message indicating that VR is not supported
*
* @param {HTMLButtonElement} button - The VR mode button element to be
* disabled and updated with a message
*/
function showWebXRNotFound() {
disableButton();
button.textContent = 'VR NOT SUPPORTED';
}
/**
* Function for handling the case where VR is not allowed due to an
* exception
*
* @description This function disables the VR mode button, logs an
* exception to the console, and displays a message indicating that VR
* is not allowed
*
* @param {any} exception - The exception object or error that indicates
* why VR is not allowed
* @param {HTMLButtonElement} button - The VR mode button element to be
* disabled and updated with a message
*/
function showVRNotAllowed(exception) {
disableButton();
console.warn('Exception when trying to call xr.isSessionSupported',
exception);
button.textContent = 'VR NOT ALLOWED';
}
/**
* Function for styling an HTML element with specific CSS properties
*
* @param {HTMLElement} element - The HTML element to be styled
*/
function stylizeElement(element) {
element.style.position = 'absolute';
element.style.bottom = '20px';
element.style.padding = '12px 6px';
element.style.border = '1px solid #fff';
element.style.borderRadius = '4px';
element.style.background = 'rgba(0,0,0,0.1)';
element.style.color = '#fff';
element.style.font = 'normal 13px sans-serif';
element.style.textAlign = 'center';
element.style.opacity = '0.5';
element.style.outline = 'none';
element.style.zIndex = '999';
}
if ('xr' in navigator) {
button.id = 'VRButton';
button.style.display = 'none';
stylizeElement(button);
navigator.xr.isSessionSupported('immersive-vr').then(function(supported) {
supported ? showEnterVR() : showWebXRNotFound();
if (supported && VRButton.xrSessionIsGranted) {
button.click();
}
}).catch(showVRNotAllowed);
return button;
} else {
const message = document.createElement('a');
if (window.isSecureContext === false) {
message.href = document.location.href.replace(/^http:/, 'https:');
message.innerHTML = 'WEBXR NEEDS HTTPS';
} else {
message.href = 'https://immersiveweb.dev/';
message.innerHTML = 'WEBXR NOT AVAILABLE';
}
message.style.left = 'calc(50% - 90px)';
message.style.width = '180px';
message.style.textDecoration = 'none';
stylizeElement(message);
return message;
}
}
/**
* Registers a listener for the "sessiongranted" event to track the XR
* session being granted.
*
* @description This method checks if the WebXR API is available and
* registers a listener for the "sessiongranted" event to track when an
* XR session is granted. It sets the `VRButton.xrSessionIsGranted`
* property to `true` when the event is triggered.
*/
static registerSessionGrantedListener() {
if ('xr' in navigator) {
// WebXRViewer (based on Firefox) has a bug where addEventListener
// throws a silent exception and aborts execution entirely.
if (/WebXRViewer\//i.test(navigator.userAgent)) return;
navigator.xr.addEventListener('sessiongranted', () => {
VRButton.xrSessionIsGranted = true;
});
}
}
}
VRButton.xrSessionIsGranted = false;
VRButton.registerSessionGrantedListener();
export {VRButton};

View File

@@ -370,7 +370,7 @@ export let View = (function () {
}
this.computeNorder();
this.redraw();
//this.redraw();
};
var pixelateCanvasContext = function (ctx, pixelateFlag) {
@@ -708,13 +708,13 @@ export let View = (function () {
}
var objClickedFunction = view.aladin.callbacksByEventName['objectClicked'];
(typeof objClickedFunction === 'function') && objClickedFunction(o);
(typeof objClickedFunction === 'function') && objClickedFunction(o, xymouse);
if (o.isFootprint()) {
var footprintClickedFunction = view.aladin.callbacksByEventName['footprintClicked'];
if (typeof footprintClickedFunction === 'function' && o != view.lastClickedObject) {
var ret = footprintClickedFunction(o);
var ret = footprintClickedFunction(o, xymouse);
}
}
@@ -738,7 +738,7 @@ export let View = (function () {
}
var objClickedFunction = view.aladin.callbacksByEventName['objectClicked'];
(typeof objClickedFunction === 'function') && objClickedFunction(null);
(typeof objClickedFunction === 'function') && objClickedFunction(null, xymouse);
view.lastClickedObject = null;
}
@@ -867,12 +867,12 @@ export let View = (function () {
view.setCursor('pointer');
if (typeof objHoveredFunction === 'function' && o != lastHoveredObject) {
var ret = objHoveredFunction(o);
var ret = objHoveredFunction(o, xymouse);
}
if (o.isFootprint()) {
if (typeof footprintHoveredFunction === 'function' && o != lastHoveredObject) {
var ret = footprintHoveredFunction(o);
var ret = footprintHoveredFunction(o, xymouse);
}
}
@@ -888,7 +888,7 @@ export let View = (function () {
if (typeof objHoveredStopFunction === 'function') {
// call callback function to notify we left the hovered object
var ret = objHoveredStopFunction(lastHoveredObject);
var ret = objHoveredStopFunction(lastHoveredObject, xymouse);
}
}
@@ -1059,6 +1059,41 @@ export let View = (function () {
View.FPS_INTERVAL = 1000 / 140;
View.prototype.redrawVR = function (t, frame, xrRefSpace) {
const session = frame.session;
session.requestAnimationFrame((t, frame) => {this.redrawVR(t, frame, xrRefSpace)});
let pose = frame.getViewerPose(xrRefSpace);
if (!pose) return;
// Elapsed time since last loop
const now = Date.now();
const elapsedTime = now - this.then;
// If enough time has elapsed, draw the next frame
//if (elapsedTime >= View.FPS_INTERVAL) {
// Get ready for next frame by setting then=now, but also adjust for your
// specified fpsInterval not being a multiple of RAF's interval (16.7ms)
// Drawing code
try {
this.moving = this.wasm.update(elapsedTime);
} catch (e) {
console.warn(e)
}
////// 2. Draw catalogues////////
const isViewRendering = this.wasm.isRendering();
if (isViewRendering || this.needRedraw) {
this.drawAllOverlays();
}
this.needRedraw = false;
this.options.vr.animation();
}
/**
* redraw the whole view
*/

View File

@@ -25,6 +25,7 @@ export default defineConfig({
},
rollupOptions: {},
//formats: ["es"],
target: ["es2015", "chrome58", "edge16", "firefox57", "node12", "safari11"],
//target: ["es2015"],
// Relative to the root
outDir: resolve(__dirname, 'dist'),