Compare commits

...

24 Commits

Author SHA1 Message Date
Matthieu Baumann
e89769c87d update changelog for v3.4.2-beta 2024-07-02 18:57:51 +02:00
Matthieu Baumann
253c272262 remove some rust warnings 2024-07-02 18:55:38 +02:00
Matthieu Baumann
05c3eb5911 handle rotation for ICRS frame when exporting the WCS. CRVAL on the equator and Galactic frame with cylindrical projection are not handled. This targets issue https://github.com/cds-astro/aladin-lite/issues/170 2024-07-02 18:50:10 +02:00
Matthieu Baumann
631b2cdf4b WIP: polyline catalog renderer 2024-07-02 18:50:10 +02:00
Matthieu Baumann
3fee4a345d propose a removeHiPSFromFavorites method on the aladin object. Targets issue: https://github.com/cds-astro/aladin-lite/issues/171 2024-07-02 18:50:10 +02:00
Matthieu Baumann
2b69ae8a0d move grid rendering from line rasterizer to grid module 2024-07-02 18:50:10 +02:00
Matthieu Baumann
957f2b2414 rename coverage -> moc 2024-07-02 18:50:09 +02:00
Matthieu Baumann
d8cb01ddef cache the 12 base cell textures 2024-07-02 18:50:09 +02:00
Matthieu Baumann
1ad97180f3 move rendering part from line rasterizer to the moc renderable 2024-07-02 18:50:09 +02:00
Matthieu Baumann
8d9ca2e2b9 store shaders in the wasm, add a build:dbg vite bundle mode, projections on the gpu 2024-07-02 18:50:09 +02:00
Matthieu Baumann
776cd36969 use of instancing and impl inside the line rasterizer. Lyon is not needed anymore for plotting lines, but still used for plotting filled paths 2024-07-02 18:50:09 +02:00
Matthieu Baumann
49061a746b first commit 2024-07-02 18:50:06 +02:00
Xen0Xys
1065dbe714 Add new function to get view as differents data formats 2024-06-14 10:35:53 +02:00
Xen0Xys
dcbefb6daa 📝 Update changelog 2024-06-11 08:51:12 +02:00
Xen0Xys
4d1a3f08d7 🐛 Update Aladin#getViewWCS to have RADESYS instead of RADECSYS that is now deprecated for the fits standard 2024-06-11 08:51:12 +02:00
Matthieu Baumann
2b5f8a751a add some docs on UI customization, a jsdoc conf file, fix some class links in the doc 2024-06-11 08:32:05 +02:00
Matthieu Baumann
6df2ee9757 clean rust warnings 2024-06-11 08:32:05 +02:00
Matthieu Baumann
813589bfdc restore the use of last version of moclibrust and cdshealpix. Still need to wait for proper version release so that we can points towards them instead of the github repo. This should fix https://github.com/cds-astro/aladin-lite/issues/163 and https://github.com/cds-astro/aladin-lite/issues/150 2024-06-11 08:32:05 +02:00
Simon Torres
51a1c514ae Add selection color to 2024-06-10 23:42:53 +10:00
MARCHAND MANON
c881d1c01c maint: upgrade vite-top-level-await 2024-05-29 13:44:59 +02:00
Matthieu Baumann
93a7c7c642 points to moc/healpix released versions + multi selection 2024-05-29 19:26:35 +10:00
Matthieu Baumann
bada1dcecb fix: restore context after drawing overlay + rename Overlay -> GraphicOverlay 2024-05-21 17:40:22 +10:00
Philip Matsson
e080f9f7d0 Add documentation for the lineDash option 2024-05-21 17:40:22 +10:00
Philip Matsson
fea04ae118 Add line dash option to Overlay class 2024-05-21 17:40:22 +10:00
187 changed files with 3619 additions and 4095 deletions

View File

@@ -1,5 +1,12 @@
# Changelogs
## 3.4.2-beta
* [impr] Improve `WCS` view export with 3rd euler rotation encoding: <https://github.com/cds-astro/aladin-lite/issues/170>. Still some cases are to be handled like: crval on the equator or cylindrical with a galactic frame rotation.
* [fixed] Change `RADECSYS` to `RADESYS` for `Aladin#getViewWCS` to follow fits standard deprecation
* [feat] Add new method `Aladin#getViewImageBuffer` to get the current view as a PNG buffer
* [feat] New line rasterizer using GL instancing. This enhances the rendering speed of MOCs.
## 3.3.3
* [feat] UI: add HiPS basic filter that filters the `hipsList` given
@@ -353,4 +360,4 @@ New in the API:
### Fin 2013
* ajout catalogue progressif
* ajout on select, objectClicked, objectHovered
* ajout on select, objectClicked, objectHovered

View File

@@ -9,9 +9,10 @@
let aladin;
A.init.then(() => {
aladin = A.aladin("#aladin-lite-div", {
target: "12 25 41.512 +12 48 47.2",
inertia: false,
fov: 1,
target: "03 36 31.65 -35 17 43.1",
survey: "CDS/P/DES-DR2/ColorIRG",
fov: 3 / 60,
fullScreen: true,
showContextMenu: true,
showZoomControl: true,
showSettingsControl: true,
@@ -20,7 +21,7 @@
});
// define custom draw function
var hips = A.catalogHiPS(
/*var hips = A.catalogHiPS(
"https://axel.u-strasbg.fr/HiPSCatService/Simbad",
{
onClick: "showTable",
@@ -38,7 +39,24 @@
return A.ellipse(s.ra, s.dec, a / 60, b / 60, theta, { color: "cyan" });
},
}
);
);*/
var hips = A.catalogHiPS(
"https://axel.cds.unistra.fr/HiPSCatService/II/371/des_dr2",
{
onClick: "showTable",
name: "Simbad",
color: "cyan",
hoverColor: "red",
shape: (s) => {
let a = +s.data['Aimg']/3600;
let b = +s.data['Bimg']/3600;
let theta = +s.data['PA'];
return A.ellipse(s.ra, s.dec, a, b, theta, { color: "cyan" });
},
}
)
aladin.addCatalog(hips);
});
</script>

View File

@@ -1,7 +1,8 @@
<!doctype html>
<html>
<head>
</head>
<head>
<meta name="viewport" content="width=device-width, height=device-height, maximum-scale=1.0, initial-scale=1.0, user-scalable=no">
</head>
<body>
@@ -22,6 +23,7 @@
reticleColor: '#ff89ff', // change reticle color
reticleSize: 64, // change reticle size
showContextMenu: true,
fullScreen: true,
}
);
@@ -89,16 +91,12 @@
.myButton {
position: absolute;
bottom: 0;
bottom: 100px;
left: 0;
background-color: pink;
}
.aladin-cooFrame {
position: absolute;
top: 10rem;
}
</style>
</body>
</html>

View File

@@ -26,7 +26,6 @@ A.init.then(() => {
aladin.addCatalog(A.catalogFromURL('https://vizier.u-strasbg.fr/viz-bin/votable?-source=HIP2&-c=LMC&-out.add=_RAJ,_DEJ&-oc.form=dm&-out.meta=DhuL&-out.max=9999&-c.rm=180', {sourceSize:12, color: '#f08080'}));
aladin.addCatalog(A.catalogFromURL(vmc_cepheids, {onClick: 'showTable', sourceSize:14, color: '#fff080'}));

View File

@@ -19,11 +19,11 @@
overlay.addFootprints([
A.polygon([[83.64287, 22.01713], [83.59872, 22.01692], [83.59852, 21.97629], [83.64295, 21.97629]]),
A.polygon([[83.62807, 22.06330], [83.58397, 22.02280], [83.62792, 22.02258]]),
A.ellipse(10.6833, 41.2669, 3.33333/2, 1.1798333/2, 35, {color: 'cyan'}),
A.ellipse(10.6833, 41.2669, 3.33333/2, 1.1798333/2, 10, {color: 'cyan'}),
// NGC 3048
A.ellipse(180.470842, -18.867589, 5.2/120, 3.1/120, 80, {color: 'cyan'}),
A.ellipse(180.470842, -18.867589, 5.2/120, 3.1/120, 10, {color: 'cyan'}),
// NGC 3049
A.ellipse(180.4742, -18.8850, 3.1/120, 1.6/120, 50, {color: 'cyan'}),
A.ellipse(180.4742, -18.8850, 3.1/120, 1.6/120, 10, {color: 'cyan'}),
]);
//overlay.add(); // radius in degrees
});

View File

@@ -12,7 +12,7 @@
A.init.then(() => {
// Start up Aladin Lite
aladin = A.aladin('#aladin-lite-div', {survey: "CDS/P/DSS2/color", target: 'M 1', fov: 0.2, showContextMenu: true, fullScreen: true});
var overlay = A.graphicOverlay({color: '#ee2345', lineWidth: 3});
var overlay = A.graphicOverlay({color: '#ee2345', lineWidth: 3, lineDash: [2, 2]});
aladin.addOverlay(overlay);
overlay.addFootprints([
A.polygon([[83.64287, 22.01713], [83.59872, 22.01692], [83.59852, 21.97629], [83.64295, 21.97629]], {hoverColor: 'green'}),

View File

@@ -1,6 +1,8 @@
<!doctype html>
<html>
<head>
<meta name="viewport" content="width=device-width, height=device-height, initial-scale=1.0, user-scalable=no">
</head>
<body>
<div id="aladin-lite-div" style="width: 1024px; height: 768px"></div>
@@ -12,7 +14,7 @@
A.init.then(() => {
aladin = A.aladin('#aladin-lite-div', {showReticle: true, showSurveyStackControl: true, showOverlayStackControl: false, projection: "TAN", target: '15 16 57.636 -60 55 7.49', showProjectionControl: true, realFullscreen: true, showZoomControl: true, showSimbadPointerControl: true, showShareControl: true, showContextMenu: true, showCooGridControl: true, fullScreen: true, showCooGrid: true, fov: 90});
var moc_0_99 = A.MOCFromURL("./data//gw/gw_0.9.fits",{ name: "GW 90%", color: "#ff0000", opacity: 0.0, lineWidth: 3, fill: false, perimeter: true});
var moc_0_99 = A.MOCFromURL("./data//gw/gw_0.9.fits",{ name: "GW 90%", color: "#ff0000", opacity: 0.0, lineWidth: 10, fill: false, perimeter: true});
var moc_0_95 = A.MOCFromURL("./data/gw/gw_0.6.fits",{ name: "GW 60%", color: "#00ff00", opacity: 0.5, lineWidth: 3, fill: true, perimeter: true});
var moc_0_5 = A.MOCFromURL("./data/gw/gw_0.3.fits",{ name: "GW 30%", color: "#00ffff", opacity: 0.5, lineWidth: 3, fill: true, perimeter: true});
var moc_0_2 = A.MOCFromURL("./data/gw/gw_0.1.fits",{ name: "GW 10%", color: "#ff00ff", opacity: 0.5, lineWidth: 3, fill: true, perimeter: true});

View File

@@ -1,12 +1,13 @@
<!doctype html>
<html>
<head>
</head>
<head>
<meta name="viewport" content="width=device-width, height=device-height, maximum-scale=1.0, initial-scale=1.0, user-scalable=no">
</head>
<body>
<div id="aladin-lite-div" style="width: 1024px; height: 768px"></div>
<script> let aladin;
</script>
<div id="aladin-lite-div" style="width: 768px; height: 512px"></div>
<script>let aladin;</script>
<script type="module">
import A from '../src/js/A.js';
A.init.then(() => {
@@ -17,26 +18,17 @@
projection: 'AIT', // set a projection
fov: 1.5, // initial field of view in degrees
target: 'NGC 2175', // initial target
cooFrame: 'galactic', // set galactic frame
reticleColor: '#00ff00', // change reticle color
reticleSize: 40, // change reticle size
gridOptions: {color: 'pink'},
showCooGrid: false, // set the grid
fullScreen: true,
inertia: false,
showStatusBar: true,
showShareControl: true,
showSettingsControl: true,
showLayersControl: true,
showZoomControl: true,
cooFrame: 'icrs', // set galactic frame
reticleColor: '#ff89ff', // change reticle color
reticleSize: 64, // change reticle size
showContextMenu: true,
showCooGridControl: true,
//showSimbadPointerControl: true,
showFullscreenControl: true,
showCooGrid: true,
showFrame: true,
}
);
});
</script>
<style>
</style>
</body>
</html>
</html>

View File

@@ -1,7 +1,8 @@
<!doctype html>
<html>
<head>
</head>
<head>
<meta name="viewport" content="width=device-width, height=device-height, maximum-scale=1.0, initial-scale=1.0, user-scalable=no">
</head>
<body>
<div id="aladin-lite-div" style="width: 1024px; height: 768px">
<div id="ui" class="ui">
@@ -11,13 +12,13 @@
import A from '../src/js/A.js';
let aladin;
A.init.then(() => {
aladin = A.aladin('#aladin-lite-div', {target: '00 00 00 +07 00 00', fov: 130, survey: 'P/Mellinger/color'});
var moc11 = A.MOCFromURL('http://skies.esac.esa.int/HST/NICMOS/Moc.fits', {color: '#84f', lineWidth: 3, perimeter: true}, (moc) => {
aladin = A.aladin('#aladin-lite-div', {target: '00 00 00 +07 00 00', fov: 130, survey: 'P/Mellinger/color', showContextMenu: true});
var moc11 = A.MOCFromURL('http://skies.esac.esa.int/HST/NICMOS/Moc.fits', {color: '#84f', lineWidth: 3}, (moc) => {
// moc is ready
console.log(moc.contains(205.9019247, +2.4492764));
console.log(moc.contains(-205.9019247, +2.4492764));
});
var moc10 = A.MOCFromURL('https://alasky.unistra.fr/MocServer/query?ivorn=ivo%3A%2F%2FCDS%2FV%2F139%2Fsdss9&get=moc&order=11&fmt=fits', {color: '#ffffff', perimeter: true, fillColor: '#aabbcc', opacity: 0.1, lineWidth: 3});
var moc10 = A.MOCFromURL('https://alasky.unistra.fr/MocServer/query?ivorn=ivo%3A%2F%2FCDS%2FV%2F139%2Fsdss9&get=moc&order=11&fmt=fits', {color: '#ffffff', perimeter: true, fillColor: '#aabbcc', opacity: 0.3, lineWidth: 3});
var moc9 = A.MOCFromURL('https://alasky.unistra.fr/MocServer/query?ivorn=ivo%3A%2F%2FCDS%2FV%2F139%2Fsdss9&get=moc&order=4&fmt=fits', {color: '#00ff00', opacity: 0.5, lineWidth: 3, perimeter: true});
aladin.addMOC(moc11);

View File

@@ -16,6 +16,10 @@
var overlay = A.graphicOverlay({lineWidth: 2});
aladin.addOverlay(overlay);
overlay.add(A.polyline([ [2.29452158, 59.14978110], [10.12683778, 56.53733116], [14.1772154, 60.7167403], [21.45396446, 60.23528403], [28.59885697, 63.67010079] ], {color: 'green'}));
aladin.select('rect', (s) => {
console.log(s)
})
});
</script>
</body>

View File

@@ -25,6 +25,8 @@
survey3.setColormap('cubehelix', {stretch: 'asinh'});
aladin.setImageLayer(survey2);
aladin.removeHiPSFromFavorites(survey3);
});
</script>

View File

@@ -11,7 +11,7 @@
A.init.then(() => {
// Start up Aladin Lite
let aladin = A.aladin('#aladin-lite-div', {survey: "CDS/P/DSS2/color", target: 'Sgr a*', fov: 0.5, showContextMenu: true});
let aladin = A.aladin('#aladin-lite-div', {survey: "CDS/P/DSS2/color", target: 'Sgr a*', fov: 0.5, showContextMenu: true, showCooGrid: true});
// This table contains a s_region column containing stcs expressed regions
// that are automatically parsed
aladin.addCatalog(A.catalogFromURL('https://aladin.cds.unistra.fr/AladinLite/doc/API/examples/data/alma-footprints.xml', {name: 'ALMA footprints', onClick: 'showTable', hoverColor: 'lightgreen'}));

22
jsdoc.json Normal file
View File

@@ -0,0 +1,22 @@
{
"plugins": [],
"recurseDepth": 10,
"source": {
"includePattern": ".+\\.js(doc|x)?$",
"excludePattern": "(^|\\/|\\\\)_"
},
"sourceType": "module",
"tags": {
"allowUnknownTags": true,
"dictionaries": ["jsdoc","closure"]
},
"templates": {
"cleverLinks": true,
"monospaceLinks": true
},
"opts": {
"readme": "./README.md",
"destination": "./docs/",
"tutorials": "./tutorials"
}
}

View File

@@ -33,16 +33,20 @@
],
"scripts": {
"wasm": "wasm-pack build ./src/core --target web --release --out-name core -- --features webgl2 -Z build-std=panic_abort,std -Z build-std-features=panic_immediate_abort ",
"wasm:dbg": "wasm-pack build --dev ./src/core --target web --out-name core -- --features=webgl2,dbg -Z build-std=panic_abort,std -Z build-std-features=panic_immediate_abort ",
"predeploy": "npm run build && rm -rf aladin-lite*.tgz && npm pack",
"deploy": "python3 deploy/deploy.py",
"build": "npm run wasm && vite build && cp examples/index.html dist/index.html",
"build:dbg": "npm run wasm:dbg && vite build && cp examples/index.html dist/index.html",
"dev": "npm run build && vite",
"dev:dbg": "npm run build:dbg && vite",
"serve": "npm run dev",
"serve:dbg": "npm run dev:dbg",
"preview": "vite preview",
"test:build": "cd src/core && cargo test --release --features webgl2",
"test:unit": "vitest run",
"doc": "jsdoc -d doc --readme README.md src/js && cp aladin-logo.png doc/",
"doc:dev": "npm run doc && open doc/index.html"
"doc": "jsdoc -c jsdoc.json src/js src/js/shapes && cp aladin-logo.png docs/",
"doc:dev": "npm run doc && open docs/index.html"
},
"devDependencies": {
"happy-dom": "^10.11.0",
@@ -50,7 +54,7 @@
"vite": "^4.3.8",
"vite-plugin-css-injected-by-js": "^3.1.1",
"vite-plugin-glsl": "^1.1.2",
"vite-plugin-top-level-await": "^1.3.1",
"vite-plugin-top-level-await": "^1.4.1",
"vite-plugin-wasm": "^3.2.2",
"vite-plugin-wasm-pack": "^0.1.12",
"vitest": "^0.32.2"

View File

@@ -29,24 +29,27 @@ mapproj = "0.3.0"
fitsrs = "0.2.9"
wcs = "0.2.8"
colorgrad = "0.6.2"
lyon = "1.0.1"
#lyon = "1.0.1"
console_error_panic_hook = {version = "0.1.7", optional = true}
[features]
webgl1 = [ "al-core/webgl1", "al-api/webgl1", "web-sys/WebGlRenderingContext", "web-sys/AngleInstancedArrays", "web-sys/ExtSRgb", "web-sys/OesTextureFloat",]
webgl2 = [ "al-core/webgl2", "al-api/webgl2", "web-sys/WebGl2RenderingContext", "web-sys/WebGlVertexArrayObject", "web-sys/ExtColorBufferFloat",]
dbg = ['dep:console_error_panic_hook']
[dev-dependencies]
rand = "0.8"
[dependencies.healpix]
package = "cdshealpix"
git = "https://github.com/bmatthieu3/cds-healpix-rust"
branch = "polygonIntersectVertices"
git = "https://github.com/cds-astro/cds-healpix-rust"
branch = "master"
[dependencies.moclib]
package = "moc"
#path = "../../../cds-moc-rust/"
git = "https://github.com/bmatthieu3/cds-moc-rust"
branch = "cellsWithUnidirectionalNeigs"
branch = "overlap"
[dependencies.serde]
version = "^1.0.183"
@@ -68,16 +71,12 @@ version = "0.24.2"
default-features = false
features = [ "jpeg", "png",]
[build-dependencies]
# Shader preprocessing
walkdir = "2.3.2"
[profile.dev]
opt-level = "z"
debug = true
debug-assertions = true
overflow-checks = true
lto = true
panic = "unwind"
incremental = true
codegen-units = 256
rpath = false
[profile.release]
opt-level = "z"

View File

@@ -47,6 +47,7 @@ pub struct HiPSProperties {
tile_size: i32,
formats: Vec<ImageExt>,
#[allow(unused)]
is_planetary_body: Option<bool>,
bitpix: Option<i32>,
@@ -58,7 +59,9 @@ pub struct HiPSProperties {
hips_initial_dec: Option<f64>,
// Parametrable by the user
#[allow(unused)]
min_cutout: Option<f32>,
#[allow(unused)]
max_cutout: Option<f32>,
creator_did: String,

View File

@@ -6,6 +6,7 @@ use crate::webgl_ctx::WebGlContext;
pub struct ArrayBufferInstanced {
buffer: WebGlBuffer,
len: usize,
num_packed_data: usize,
offset_idx: u32,
@@ -39,7 +40,7 @@ impl ArrayBufferInstanced {
offset_idx: u32,
stride: usize,
sizes: &[usize],
_offsets: &[usize],
offsets: &[usize],
usage: u32,
data: B,
) -> ArrayBufferInstanced {
@@ -49,29 +50,43 @@ impl ArrayBufferInstanced {
let num_f32_in_buf = data.len() as i32;
let num_instances = num_f32_in_buf / (num_f32_per_instance as i32);
let len = data.len();
let buffer = gl.create_buffer().ok_or("failed to create buffer").unwrap_abort();
let buffer = gl
.create_buffer()
.ok_or("failed to create buffer")
.unwrap_abort();
// Bind the buffer
gl.bind_buffer(WebGlRenderingCtx::ARRAY_BUFFER, Some(buffer.as_ref()));
// Pass the vertices data to the buffer
f32::buffer_data_with_array_buffer_view(gl, data, WebGlRenderingCtx::ARRAY_BUFFER, usage);
// Link to the shader
let idx = offset_idx;
for (idx, (size, offset)) in sizes.iter().zip(offsets.iter()).enumerate() {
let idx = (idx as u32) + offset_idx;
f32::vertex_attrib_pointer_with_i32(gl, idx, *sizes.first().unwrap_abort() as i32, 0, 0);
gl.enable_vertex_attrib_array(idx);
f32::vertex_attrib_pointer_with_i32(
gl,
idx,
*size as i32,
stride as i32,
*offset as i32,
);
#[cfg(feature = "webgl2")]
gl.vertex_attrib_divisor(idx, 1);
#[cfg(feature = "webgl1")]
gl.ext.angles.vertex_attrib_divisor_angle(idx, 1);
gl.enable_vertex_attrib_array(idx);
#[cfg(feature = "webgl2")]
gl.vertex_attrib_divisor(idx, 1);
#[cfg(feature = "webgl1")]
gl.ext.angles.vertex_attrib_divisor_angle(idx, 1);
}
let num_packed_data = sizes.len();
let gl = gl.clone();
// Returns an instance that keeps only the buffer
ArrayBufferInstanced {
buffer,
len,
num_packed_data,
offset_idx,
@@ -119,13 +134,30 @@ impl ArrayBufferInstanced {
self.gl.disable_vertex_attrib_array(loc as u32);
}
pub fn update<'a, B: BufferDataStorage<'a, f32>>(&self, buffer: B) {
pub fn update<'a, B: BufferDataStorage<'a, f32>>(&mut self, usage: u32, data: B) {
self.bind();
f32::buffer_sub_data_with_i32_and_array_buffer_view(
if self.len >= data.len() {
f32::buffer_sub_data_with_i32_and_array_buffer_view(
&self.gl,
data,
WebGlRenderingCtx::ARRAY_BUFFER,
);
} else {
self.len = data.len();
f32::buffer_data_with_array_buffer_view(
&self.gl,
data,
WebGlRenderingCtx::ARRAY_BUFFER,
usage,
);
}
/*f32::buffer_sub_data_with_i32_and_array_buffer_view(
&self.gl,
buffer,
WebGlRenderingCtx::ARRAY_BUFFER,
);
);*/
/*self.gl.buffer_sub_data_with_i32_and_array_buffer_view(
WebGlRenderingCtx::ARRAY_BUFFER,
0,

View File

@@ -41,3 +41,20 @@ where
self.0.as_ptr()
}
}
impl<'a, T> BufferDataStorage<'a, T> for &'a [T]
where
T: VertexAttribPointerType,
{
fn get_slice(&self) -> &[T] {
self
}
fn len(&self) -> usize {
self.as_ref().len()
}
fn ptr(&self) -> *const T {
self.as_ptr()
}
}

View File

@@ -6,9 +6,6 @@ pub mod buffer_data;
pub mod element_array_buffer;
pub mod vertex_array_object;
pub use array_buffer::ArrayBuffer;
pub use array_buffer::VertexAttribPointerType;
pub use framebuffer::FrameBufferObject;
pub use vertex_array_object::vao::{
ShaderVertexArrayObjectBound, ShaderVertexArrayObjectBoundRef, VertexArrayObject,
};

View File

@@ -9,8 +9,8 @@ pub mod vao {
use crate::object::element_array_buffer::ElementArrayBuffer;
use crate::webgl_ctx::WebGlContext;
use std::collections::HashMap;
use crate::Abort;
use std::collections::HashMap;
pub struct VertexArrayObject {
array_buffer: HashMap<&'static str, ArrayBuffer>,
@@ -88,7 +88,10 @@ pub mod vao {
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer.as_ref().unwrap_abort().num_elements()
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
}
pub fn num_instances(&self) -> i32 {
@@ -143,13 +146,14 @@ pub mod vao {
pub fn update_instanced_array<B: BufferDataStorage<'a, f32>>(
&mut self,
attr: &'static str,
usage: u32,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer_instanced
.get_mut(attr)
.unwrap_abort()
.update(array_data);
.update(usage, array_data);
self
}
@@ -333,13 +337,14 @@ pub mod vao {
pub fn update_instanced_array<B: BufferDataStorage<'a, f32>>(
&mut self,
attr: &'static str,
usage: u32,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer_instanced
.get_mut(attr)
.unwrap_abort()
.update(array_data);
.update(usage, array_data);
self
}
@@ -444,7 +449,10 @@ pub mod vao {
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer.as_ref().unwrap_abort().num_elements()
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
}
pub fn num_instances(&self) -> i32 {
@@ -694,13 +702,14 @@ pub mod vao {
pub fn update_instanced_array<B: BufferDataStorage<'a, f32>>(
&mut self,
attr: &'static str,
usage: u32,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer_instanced
.get_mut(attr)
.expect("cannot get attribute from the array buffer")
.update(array_data);
.update(usage, array_data);
self
}

View File

@@ -1,8 +1,8 @@
use web_sys::{WebGlProgram, WebGlShader, WebGlUniformLocation};
use wasm_bindgen::JsValue;
use web_sys::{WebGlProgram, WebGlShader, WebGlUniformLocation};
use crate::Colormaps;
use crate::webgl_ctx::WebGlRenderingCtx;
use crate::Colormaps;
fn compile_shader(
gl: &WebGlContext,
shader_type: u32,
@@ -289,17 +289,6 @@ impl UniformType for TransferFunction {
}
}
/*use al_api::hips::GrayscaleParameter;
impl SendUniforms for GrayscaleParameter {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
shader
.attach_uniforms_from(&self.h)
.attach_uniform("min_value", &self.min_value)
.attach_uniform("max_value", &self.max_value);
shader
}
}*/
use al_api::hips::HiPSColor;
use al_api::hips::ImageMetadata;
@@ -314,7 +303,7 @@ impl SendUniforms for ImageMetadata {
}
impl SendUniforms for HiPSColor {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
let reversed = self.reversed as u8 as f32;
shader
@@ -326,14 +315,17 @@ impl SendUniforms for HiPSColor {
.attach_uniform("k_brightness", &self.k_brightness)
.attach_uniform("k_contrast", &self.k_contrast)
.attach_uniform("reversed", &reversed);
shader
}
}
impl SendUniformsWithParams<Colormaps> for HiPSColor {
fn attach_uniforms_with_params<'a>(&self, shader: &'a ShaderBound<'a>, cmaps: &Colormaps) -> &'a ShaderBound<'a> {
fn attach_uniforms_with_params<'a>(
&self,
shader: &'a ShaderBound<'a>,
cmaps: &Colormaps,
) -> &'a ShaderBound<'a> {
let reversed = self.reversed as u8 as f32;
let cmap = cmaps.get(&self.cmap_name.as_ref());
@@ -347,7 +339,7 @@ impl SendUniformsWithParams<Colormaps> for HiPSColor {
.attach_uniform("k_brightness", &self.k_brightness)
.attach_uniform("k_contrast", &self.k_contrast)
.attach_uniform("reversed", &reversed);
shader
}
}
@@ -375,7 +367,11 @@ impl<'a> ShaderBound<'a> {
self
}
pub fn attach_uniforms_with_params_from<P, T: SendUniformsWithParams<P>>(&'a self, t: &T, params: &P) -> &'a Self {
pub fn attach_uniforms_with_params_from<P, T: SendUniformsWithParams<P>>(
&'a self,
t: &T,
params: &P,
) -> &'a Self {
t.attach_uniforms_with_params(self, params);
self
@@ -422,5 +418,9 @@ pub trait SendUniforms {
}
pub trait SendUniformsWithParams<T> {
fn attach_uniforms_with_params<'a>(&self, shader: &'a ShaderBound<'a>, params: &T) -> &'a ShaderBound<'a>;
fn attach_uniforms_with_params<'a>(
&self,
shader: &'a ShaderBound<'a>,
params: &T,
) -> &'a ShaderBound<'a>;
}

105
src/core/build.rs Normal file
View File

@@ -0,0 +1,105 @@
use std::{error::Error, fs};
use walkdir::WalkDir;
extern crate walkdir;
use std::io::BufRead;
// All my shaders reside in the 'src/shaders' directory
fn generate_shaders() -> std::result::Result<(), Box<dyn Error>> {
println!("generate shaders");
let mut shaders = HashMap::new();
for entry in WalkDir::new("../glsl/webgl2/")
.into_iter()
.filter_map(|e| e.ok())
{
if entry.file_type().is_file() {
let path = entry.path();
if let Some(ext) = path.extension() {
if ext == "vert" || ext == "frag" {
let file_name = path.file_name().unwrap().to_str().unwrap();
let out_file_name = path
.strip_prefix("../glsl/webgl2/")
.unwrap()
//.with_extension("")
.to_string_lossy()
.to_owned()
.replace("/", "_");
//let out_name = format!("{}/{}", OUT_PATH, out_file_name);
let src = read_shader(path)?;
shaders.insert(out_file_name, src);
//fs::write(&out_name, result)?;
println!("cargo:rerun-if-changed=src/shaders/{}", file_name);
}
}
}
}
write("src/shaders.rs".into(), shaders)?;
Ok(())
}
fn read_shader<P: AsRef<std::path::Path>>(path: P) -> std::io::Result<String> {
let path = path.as_ref();
let file = fs::File::open(path.to_str().unwrap())?;
let shader_src = std::io::BufReader::new(file)
.lines()
.flatten()
.map(|l| {
if l.starts_with("#include") {
let incl_file_names: Vec<_> = l.split_terminator(&[';', ' '][..]).collect();
let incl_file_name_rel = incl_file_names[1];
let incl_file_name = path.parent().unwrap().join(incl_file_name_rel);
read_shader(incl_file_name.to_str().unwrap()).unwrap()
} else {
l
}
})
.collect::<Vec<_>>()
.join("\n");
Ok(shader_src)
}
use std::collections::HashMap;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
pub fn write(path: PathBuf, entries: HashMap<String, String>) -> Result<(), Box<dyn Error>> {
let mut all_the_files = File::create(&path)?;
writeln!(&mut all_the_files, r#"use std::collections::HashMap;"#,)?;
writeln!(&mut all_the_files, r#""#,)?;
writeln!(&mut all_the_files, r#"#[allow(dead_code)]"#,)?;
writeln!(
&mut all_the_files,
r#"pub fn get_all() -> HashMap<&'static str, &'static str> {{"#,
)?;
writeln!(&mut all_the_files, r#" let mut out = HashMap::new();"#,)?;
for (name, content) in entries {
writeln!(
&mut all_the_files,
r##" out.insert("{name}", r#"{content}"#);"##,
)?;
}
writeln!(&mut all_the_files, r#" out"#,)?;
writeln!(&mut all_the_files, r#"}}"#,)?;
Ok(())
}
fn main() {
if let Err(err) = generate_shaders() {
// panic here for a nicer error message, otherwise it will
// be flattened to one line for some reason
panic!("Unable to generate shaders\n{}", err);
}
}

View File

@@ -2,26 +2,22 @@ use crate::{
//async_task::{BuildCatalogIndex, ParseTableTask, TaskExecutor, TaskResult, TaskType},
camera::CameraViewPort,
downloader::Downloader,
grid::ProjetedGrid,
healpix::coverage::HEALPixCoverage,
inertia::Inertia,
math::{
self,
angle::{Angle, ArcDeg, ToAngle},
angle::{Angle, ArcDeg},
lonlat::{LonLat, LonLatT},
},
renderable::grid::ProjetedGrid,
renderable::Layers,
renderable::{
catalog::Manager, coverage::MOCRenderer, line::RasterizedLineRenderer, ImageCfg, Renderer,
catalog::Manager, line::RasterizedLineRenderer, moc::MOCRenderer, ImageCfg, Renderer,
},
shader::ShaderManager,
tile_fetcher::TileFetcherQueue,
time::DeltaTime,
};
use al_core::{
info, inforec,
log::{self, console_log},
};
use wasm_bindgen::prelude::*;
@@ -56,7 +52,7 @@ pub struct App {
//ui: GuiRef,
shaders: ShaderManager,
camera: CameraViewPort,
pub camera: CameraViewPort,
downloader: Downloader,
tile_fetcher: TileFetcherQueue,
@@ -98,7 +94,7 @@ pub struct App {
colormaps: Colormaps,
projection: ProjectionType,
pub projection: ProjectionType,
// Async data receivers
fits_send: async_channel::Sender<ImageCfg>,
@@ -111,7 +107,7 @@ pub struct App {
}
use cgmath::{Vector2, Vector3};
use futures::{io::BufReader, stream::StreamExt}; // for `next`
use futures::io::BufReader; // for `next`
use crate::math::projection::*;
pub const BLENDING_ANIM_DURATION: DeltaTime = DeltaTime::from_millis(200.0); // in ms
@@ -149,6 +145,7 @@ impl App {
//gl.enable(WebGl2RenderingContext::CULL_FACE);
//gl.cull_face(WebGl2RenderingContext::BACK);
//gl.enable(WebGl2RenderingContext::CULL_FACE);
// The tile buffer responsible for the tile requests
let downloader = Downloader::new();
@@ -169,7 +166,7 @@ impl App {
let manager = Manager::new(&gl, &mut shaders, &camera, &resources)?;
// Grid definition
let grid = ProjetedGrid::new(aladin_div)?;
let grid = ProjetedGrid::new(gl.clone(), aladin_div)?;
// Variable storing the location to move to
let inertia = None;
@@ -194,7 +191,7 @@ impl App {
let request_for_new_tiles = true;
let moc = MOCRenderer::new()?;
let moc = MOCRenderer::new(&gl)?;
gl.clear_color(0.15, 0.15, 0.15, 1.0);
let (fits_send, fits_recv) = async_channel::unbounded::<ImageCfg>();
@@ -268,7 +265,7 @@ impl App {
// Move the views of the different active surveys
self.tile_fetcher.clear();
// Loop over the surveys
let raytracer = self.layers.get_raytracer();
let _raytracer = self.layers.get_raytracer();
for survey in self.layers.values_mut_hips() {
if self.camera.get_texture_depth() == 0
@@ -408,7 +405,7 @@ use al_api::cell::HEALPixCellProjeted;
use crate::downloader::request::tile::Tile;
use crate::healpix::cell::HEALPixCell;
use crate::renderable::coverage::moc::MOC;
use al_api::color::ColorRGB;
impl App {
@@ -504,7 +501,7 @@ impl App {
pub(crate) fn add_moc(
&mut self,
mut cfg: al_api::moc::MOC,
cfg: al_api::moc::MOC,
moc: HEALPixCoverage,
) -> Result<(), JsValue> {
self.moc
@@ -526,12 +523,7 @@ impl App {
pub(crate) fn set_moc_cfg(&mut self, cfg: al_api::moc::MOC) -> Result<(), JsValue> {
self.moc
.set_cfg(
cfg,
&mut self.camera,
&self.projection,
&mut self.line_renderer,
)
.set_cfg(cfg, &mut self.camera, &self.projection, &mut self.shaders)
.ok_or_else(|| JsValue::from_str("MOC not found"))?;
self.request_redraw = true;
@@ -654,7 +646,7 @@ impl App {
};
use al_core::image::ImageType;
use fitsrs::fits::Fits;
use std::{io::Cursor, rc::Rc};
use std::io::Cursor;
if let Some(image) = image.as_ref() {
match &*image.lock().unwrap_abort() {
Some(ImageType::FitsImage {
@@ -755,7 +747,7 @@ impl App {
Resource::PixelMetadata(metadata) => {
if let Some(hips) = self.layers.get_mut_hips_from_cdid(&metadata.hips_cdid)
{
let mut cfg = hips.get_config_mut();
let cfg = hips.get_config_mut();
if let Some(metadata) = *metadata.value.lock().unwrap_abort() {
cfg.blank = metadata.blank;
@@ -857,16 +849,6 @@ impl App {
Ok(has_camera_moved)
}
pub(crate) fn reset_north_orientation(&mut self) {
// Reset the rotation around the center if there is one
self.camera
.set_rotation_around_center(Angle(0.0), &self.projection);
// Reset the camera position to its current position
// this will keep the current position but reset the orientation
// so that the north pole is at the top of the center.
self.set_center(&self.get_center());
}
pub(crate) fn read_pixel(&self, pos: &Vector2<f64>, layer: &str) -> Result<JsValue, JsValue> {
if let Some(lonlat) = self.screen_to_world(pos) {
if let Some(survey) = self.layers.get_hips_from_layer(layer) {
@@ -882,7 +864,7 @@ impl App {
}
pub(crate) fn draw_grid_labels(&mut self) -> Result<(), JsValue> {
self.grid.draw_labels(&self.camera)
self.grid.draw_labels()
}
pub(crate) fn draw(&mut self, force_render: bool) -> Result<(), JsValue> {
@@ -966,26 +948,24 @@ impl App {
//let fbo_view = &self.fbo_view;
//catalogs.draw(&gl, shaders, camera, colormaps, fbo_view)?;
//catalogs.draw(&gl, shaders, camera, colormaps, None, self.projection)?;
self.line_renderer.begin();
//Time::measure_perf("moc draw", || {
self.moc.draw(
&mut self.shaders,
&mut self.camera,
&self.projection,
&mut self.line_renderer,
);
&mut self.shaders,
//&mut self.line_renderer,
)?;
self.line_renderer.begin();
//Time::measure_perf("moc draw", || {
// Ok(())
//})?;
self.grid.draw(
&self.camera,
&mut self.shaders,
&self.projection,
&mut self.line_renderer,
)?;
self.grid
.draw(&self.camera, &self.projection, &mut self.shaders)?;
self.line_renderer.end();
self.line_renderer.draw(&self.camera)?;
self.line_renderer
.draw(&mut self.shaders, &self.camera, &self.projection)?;
//let dpi = self.camera.get_dpi();
//ui.draw(&gl, dpi)?;
@@ -1320,9 +1300,9 @@ impl App {
self.camera.get_longitude_reversed()
}
pub(crate) fn add_catalog(&mut self, name: String, table: JsValue, _colormap: String) {
pub(crate) fn add_catalog(&mut self, _name: String, table: JsValue, _colormap: String) {
//let mut exec_ref = self.exec.borrow_mut();
let table = table;
let _table = table;
/*exec_ref
.spawner()
@@ -1415,10 +1395,10 @@ impl App {
pub(crate) fn world_to_screen(&self, ra: f64, dec: f64) -> Option<Vector2<f64>> {
let lonlat = LonLatT::new(ArcDeg(ra).into(), ArcDeg(dec).into());
let model_pos_xyz = lonlat.vector();
let icrs_pos = lonlat.vector();
self.projection
.view_to_screen_space(&model_pos_xyz, &self.camera)
.icrs_celestial_to_screen_space(&icrs_pos, &self.camera)
}
pub(crate) fn screen_to_world(&self, pos: &Vector2<f64>) -> Option<LonLatT<f64>> {
@@ -1449,11 +1429,11 @@ impl App {
LonLatT::new(ra, dec)
}
/// lonlat must be given in icrs frame
pub(crate) fn set_center(&mut self, lonlat: &LonLatT<f64>) {
self.prev_cam_position = self.camera.get_center().truncate();
self.camera
.set_center(lonlat, CooSystem::ICRS, &self.projection);
self.camera.set_center(lonlat, &self.projection);
self.request_for_new_tiles = true;
// And stop the current inertia as well if there is one
@@ -1544,17 +1524,17 @@ impl App {
self.inertia = Some(Inertia::new(ampl.to_radians(), axis))
}
pub(crate) fn rotate_around_center(&mut self, theta: ArcDeg<f64>) {
pub(crate) fn set_view_center_pos_angle(&mut self, theta: ArcDeg<f64>) {
self.camera
.set_rotation_around_center(theta.into(), &self.projection);
.set_view_center_pos_angle(theta.into(), &self.projection);
// New tiles can be needed and some tiles can be removed
self.request_for_new_tiles = true;
self.request_redraw = true;
}
pub(crate) fn get_rotation_around_center(&self) -> &Angle<f64> {
self.camera.get_rotation_around_center()
pub(crate) fn get_north_shift_angle(&self) -> Angle<f64> {
self.camera.get_north_shift_angle()
}
pub(crate) fn set_fov(&mut self, fov: Angle<f64>) {

View File

@@ -5,17 +5,16 @@ use crate::math::projection::coo_space::{XYZWModel, XYZWWorld, XYNDC};
use crate::math::sph_geom::region::{Intersection, PoleContained, Region};
use crate::math::{projection::Projection, sph_geom::bbox::BoundingBox};
use crate::LonLatT;
use cgmath::Vector3;
use crate::ProjectionType;
use std::iter;
fn ndc_to_world(
ndc_coo: &[XYNDC],
ndc_coo: &[XYNDC<f64>],
ndc_to_clip: &Vector2<f64>,
clip_zoom_factor: f64,
projection: &ProjectionType,
) -> Option<Vec<XYZWWorld>> {
) -> Option<Vec<XYZWWorld<f64>>> {
// Deproject the FOV from ndc to the world space
let mut world_coo = Vec::with_capacity(ndc_coo.len());
@@ -35,7 +34,7 @@ fn ndc_to_world(
Some(world_coo)
}
fn world_to_model(world_coo: &[XYZWWorld], w2m: &Matrix4<f64>) -> Vec<XYZWModel> {
fn world_to_model(world_coo: &[XYZWWorld<f64>], w2m: &Matrix4<f64>) -> Vec<XYZWModel<f64>> {
let mut model_coo = Vec::with_capacity(world_coo.len());
for w in world_coo.iter() {
@@ -61,9 +60,9 @@ const NUM_VERTICES: usize = 4 + 2 * NUM_VERTICES_WIDTH + 2 * NUM_VERTICES_HEIGHT
// This struct belongs to the CameraViewPort
pub struct FieldOfView {
// Vertices
ndc_vertices: Vec<XYNDC>,
world_vertices: Option<Vec<XYZWWorld>>,
model_vertices: Option<Vec<XYZWModel>>,
ndc_vertices: Vec<XYNDC<f64>>,
world_vertices: Option<Vec<XYZWWorld<f64>>>,
model_vertices: Option<Vec<XYZWModel<f64>>>,
reg: Region,
}
@@ -163,9 +162,9 @@ impl FieldOfView {
self.reg.intersects_meridian(lon)
}
pub fn intersects_great_circle(&self, n: &Vector3<f64>) -> Intersection {
/*pub fn intersects_great_circle(&self, n: &Vector3<f64>) -> Intersection {
self.reg.intersects_great_circle(n)
}
}*/
pub fn intersects_great_circle_arc(
&self,
@@ -183,7 +182,7 @@ impl FieldOfView {
}
}
pub fn get_vertices(&self) -> Option<&Vec<XYZWModel>> {
pub fn get_vertices(&self) -> Option<&Vec<XYZWModel<f64>>> {
self.model_vertices.as_ref()
}

View File

@@ -1,7 +1,7 @@
pub mod viewport;
use crate::math::lonlat::LonLat;
use crate::math::projection::coo_space::XYZWModel;
pub use viewport::{CameraViewPort, UserAction};
pub use viewport::CameraViewPort;
pub mod fov;
pub use fov::FieldOfView;
@@ -14,7 +14,7 @@ use crate::ProjectionType;
pub fn build_fov_coverage(
depth: u8,
fov: &FieldOfView,
camera_center: &XYZWModel,
camera_center: &XYZWModel<f64>,
camera_frame: CooSystem,
frame: CooSystem,
proj: &ProjectionType,

View File

@@ -1,63 +1,11 @@
use crate::healpix::cell::HEALPixCell;
use crate::healpix::cell::MAX_HPX_DEPTH;
use crate::camera::XYZWModel;
use crate::healpix::cell::HEALPixCell;
use crate::math::projection::*;
use crate::HEALPixCoverage;
use std::ops::Range;
use al_api::cell::HEALPixCellProjeted;
use al_core::log::console_log;
pub fn project(
cell: HEALPixCellProjeted,
camera: &CameraViewPort,
projection: &ProjectionType,
) -> Option<HEALPixCellProjeted> {
match projection {
/*ProjectionType::Hpx(_) => {
let tri_idx_in_collignon_zone = |x: f64, y: f64| -> u8 {
let zoom_factor = camera.get_clip_zoom_factor() as f32;
let x = (((x as f32) / camera.get_width()) - 0.5) * zoom_factor;
let y = (((y as f32) / camera.get_height()) - 0.5) * zoom_factor;
let x_zone = ((x + 0.5) * 4.0).floor() as u8;
x_zone + 4 * ((y > 0.0) as u8)
};
let is_in_collignon = |_x: f64, y: f64| -> bool {
let y = (((y as f32) / camera.get_height()) - 0.5)
* (camera.get_clip_zoom_factor() as f32);
!(-0.25..=0.25).contains(&y)
};
if is_in_collignon(cell.vx[0], cell.vy[0])
&& is_in_collignon(cell.vx[1], cell.vy[1])
&& is_in_collignon(cell.vx[2], cell.vy[2])
&& is_in_collignon(cell.vx[3], cell.vy[3])
{
let all_vertices_in_same_collignon_region =
tri_idx_in_collignon_zone(cell.vx[0], cell.vy[0])
== tri_idx_in_collignon_zone(cell.vx[1], cell.vy[1])
&& (tri_idx_in_collignon_zone(cell.vx[0], cell.vy[0])
== tri_idx_in_collignon_zone(cell.vx[2], cell.vy[2]))
&& (tri_idx_in_collignon_zone(cell.vx[0], cell.vy[0])
== tri_idx_in_collignon_zone(cell.vx[3], cell.vy[3]));
if !all_vertices_in_same_collignon_region {
None
} else {
Some(cell)
}
} else {
Some(cell)
}
}*/
_ => Some(cell),
}
}
use moclib::moc::{range::op::degrade::degrade, RangeMOCIterator};
pub(super) struct ViewHpxCells {
hpx_cells: [HpxCells; NUM_COOSYSTEM],
@@ -82,7 +30,7 @@ impl ViewHpxCells {
&mut self,
camera_depth: u8,
fov: &FieldOfView,
center: &XYZWModel,
center: &XYZWModel<f64>,
camera_frame: CooSystem,
proj: &ProjectionType,
// survey frame
@@ -100,7 +48,7 @@ impl ViewHpxCells {
&mut self,
camera_depth: u8,
fov: &FieldOfView,
center: &XYZWModel,
center: &XYZWModel<f64>,
camera_frame: CooSystem,
proj: &ProjectionType,
// survey frame
@@ -120,7 +68,7 @@ impl ViewHpxCells {
&mut self,
camera_depth: u8,
fov: &FieldOfView,
center: &XYZWModel,
center: &XYZWModel<f64>,
camera_frame: CooSystem,
proj: &ProjectionType,
) {
@@ -132,28 +80,38 @@ impl ViewHpxCells {
}
}
pub(super) fn get_cells<'a>(
&'a mut self,
depth: u8,
frame: CooSystem,
) -> impl Iterator<Item = &'a HEALPixCell> {
pub(super) fn get_cells(&self, depth: u8, frame: CooSystem) -> Vec<HEALPixCell> {
self.hpx_cells[frame as usize].get_cells(depth)
}
pub(super) fn get_cov(&self, frame: CooSystem) -> &HEALPixCoverage {
self.hpx_cells[frame as usize].get_cov()
}
/*pub(super) fn has_changed(&mut self) -> bool {
let mut c = false;
for (frame, num_req) in self.reg_frames.iter().enumerate() {
// if there are surveys/camera requesting the coverage
if *num_req > 0 {
c |= self.hpx_cells[frame].has_view_changed();
}
}
c
}*/
}
// Contains the cells being in the FOV for a specific
pub struct HpxCells {
frame: CooSystem,
// the set of cells all depth
cells: Vec<HEALPixCell>,
//cells: Vec<HEALPixCell>,
// An index vector referring to the indices of each depth cells
idx_rng: [Option<Range<usize>>; MAX_HPX_DEPTH as usize + 1],
//idx_rng: [Option<Range<usize>>; MAX_HPX_DEPTH as usize + 1],
// Coverage created in the frame
cov: HEALPixCoverage,
// boolean refering to if the cells in the view has changed
//new_cells: bool,
}
impl Default for HpxCells {
@@ -162,24 +120,24 @@ impl Default for HpxCells {
}
}
use crate::camera::CameraViewPort;
use al_api::coo_system::{CooSystem, NUM_COOSYSTEM};
use moclib::moc::RangeMOCIntoIterator;
use super::FieldOfView;
impl HpxCells {
pub fn new(frame: CooSystem) -> Self {
let cells = Vec::new();
//let cells = Vec::new();
let cov = HEALPixCoverage::empty(29);
let idx_rng = Default::default();
//let idx_rng = Default::default();
Self {
cells,
idx_rng,
//cells,
//idx_rng,
cov,
frame,
//new_cells: true,
}
}
@@ -191,7 +149,7 @@ impl HpxCells {
&mut self,
camera_depth: u8,
fov: &FieldOfView,
center: &XYZWModel,
center: &XYZWModel<f64>,
camera_frame: CooSystem,
proj: &ProjectionType,
) {
@@ -200,63 +158,72 @@ impl HpxCells {
super::build_fov_coverage(camera_depth, fov, center, camera_frame, self.frame, proj);
// Clear the old cells
self.cells.clear();
/*let r = self.idx_rng[camera_depth as usize]
.as_ref()
.unwrap_or(&(0..0));
let old_cells = &self.cells[r.clone()];
self.idx_rng = Default::default();
let mut new_cells = false;
// Compute the cells at the tile_depth
let tile_depth_cells_iter = self
let cells = self
.cov
.flatten_to_fixed_depth_cells()
.map(|idx| HEALPixCell(camera_depth, idx));
.enumerate()
.map(|(j, idx)| {
let c = HEALPixCell(camera_depth, idx);
let num_past = self.cells.len();
self.cells.extend(tile_depth_cells_iter);
if j >= old_cells.len() || old_cells[j] != c {
new_cells = true;
}
c
})
.collect::<Vec<_>>();
if cells.len() != old_cells.len() {
new_cells = true;
}
self.cells = cells;
let num_cur = self.cells.len();
self.idx_rng[camera_depth as usize] = Some(0..num_cur);
self.idx_rng[camera_depth as usize] = Some(num_past..num_cur);
if new_cells {
self.new_cells = true;
}*/
}
// Accessors
// depth MUST be < to camera tile depth
pub fn get_cells<'a>(&'a mut self, depth: u8) -> impl Iterator<Item = &'a HEALPixCell> {
let Range { start, end } = if let Some(idx) = self.idx_rng[depth as usize].as_ref() {
idx.start..idx.end
pub fn get_cells(&self, depth: u8) -> Vec<HEALPixCell> {
let cov_depth = self.cov.depth_max();
if depth == cov_depth {
self.cov
.flatten_to_fixed_depth_cells()
.map(move |idx| HEALPixCell(depth, idx))
.collect()
} else if depth > self.cov.depth_max() {
let cov_d = self.cov.depth_max();
let dd = depth - cov_d;
// compute the cells from the coverage
let cells_iter = self
.cov
self.cov
.flatten_to_fixed_depth_cells()
.map(|idx| {
.flat_map(move |idx| {
// idx is at depth_max
HEALPixCell(cov_d, idx).get_children_cells(dd)
})
.flatten();
// add them and store the cells for latter reuse
let num_past = self.cells.len();
self.cells.extend(cells_iter);
let num_cur = self.cells.len();
self.idx_rng[depth as usize] = Some(num_past..num_cur);
num_past..num_cur
.collect()
} else {
// compute the cells from the coverage
let degraded_moc = self.cov.degraded(depth);
let cells_iter = degraded_moc
degrade((&self.cov.0).into_range_moc_iter(), depth)
.flatten_to_fixed_depth_cells()
.map(|idx| HEALPixCell(depth, idx));
// add them and store the cells for latter reuse
let num_past = self.cells.len();
self.cells.extend(cells_iter);
let num_cur = self.cells.len();
self.idx_rng[depth as usize] = Some(num_past..num_cur);
num_past..num_cur
};
self.cells[start..end].iter()
.map(move |idx| HEALPixCell(depth, idx))
.collect()
}
}
/*
@@ -301,8 +268,9 @@ impl HpxCells {
}*/
/*#[inline]
pub fn has_view_changed(&self) -> bool {
//self.new_cells.is_there_new_cells_added()
!self.view_unchanged
pub fn has_view_changed(&mut self) -> bool {
let new_cells = self.new_cells;
self.new_cells = false;
new_cells
}*/
}

View File

@@ -6,22 +6,25 @@ pub enum UserAction {
Starting = 4,
}
// Longitude reversed identity matrix
const ID_R: &Matrix4<f64> = &Matrix4::new(
-1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
use super::{fov::FieldOfView, view_hpx_cells::ViewHpxCells};
use crate::healpix::cell::HEALPixCell;
use crate::healpix::coverage::HEALPixCoverage;
use crate::math::angle::ToAngle;
use crate::math::{projection::coo_space::XYZWModel, projection::domain::sdf::ProjDef};
use al_core::log::console_log;
use al_core::{info, inforec, log};
use cgmath::{Matrix4, Vector2};
pub struct CameraViewPort {
// The field of view angle
aperture: Angle<f64>,
center: Vector4<f64>,
// The rotation of the camera
rotation_center_angle: Angle<f64>,
center: Vector4<f64>,
w2m_rot: Rotation<f64>,
center_rot: Angle<f64>,
w2m: Matrix4<f64>,
m2w: Matrix4<f64>,
@@ -98,8 +101,8 @@ impl CameraViewPort {
let w2m = Matrix4::identity();
let m2w = w2m;
let center = Vector4::new(0.0, 0.0, 1.0, 1.0);
let center_rot = Angle(0.0);
let center = Vector4::new(0.0, 0.0, 0.0, 1.0);
let moved = false;
let zoomed = false;
@@ -119,9 +122,6 @@ impl CameraViewPort {
let width = width * dpi;
let height = height * dpi;
//let dpi = 1.0;
//gl.scissor(0, 0, width as i32, height as i32);
let aspect = height / width;
let ndc_to_clip = Vector2::new(1.0, (height as f64) / (width as f64));
let clip_zoom_factor = 1.0;
@@ -131,7 +131,6 @@ impl CameraViewPort {
let is_allsky = true;
let time_last_move = Time::now();
let rotation_center_angle = Angle(0.0);
let reversed_longitude = false;
let texture_depth = 0;
@@ -140,6 +139,7 @@ impl CameraViewPort {
CameraViewPort {
// The field of view angle
aperture,
center_rot,
center,
// The rotation of the cameraq
w2m_rot,
@@ -147,7 +147,6 @@ impl CameraViewPort {
m2w,
dpi,
rotation_center_angle,
// The width over height ratio
aspect,
// The width of the screen in pixels
@@ -206,15 +205,15 @@ impl CameraViewPort {
);
}
/*pub fn has_new_hpx_cells(&mut self) -> bool {
self.view_hpx_cells.has_changed()
}*/
pub fn get_cov(&self, frame: CooSystem) -> &HEALPixCoverage {
self.view_hpx_cells.get_cov(frame)
}
pub fn get_hpx_cells<'a>(
&'a mut self,
mut depth: u8,
frame: CooSystem,
) -> impl Iterator<Item = &'a HEALPixCell> {
pub fn get_hpx_cells(&self, depth: u8, frame: CooSystem) -> Vec<HEALPixCell> {
self.view_hpx_cells.get_cells(depth, frame)
}
@@ -228,12 +227,12 @@ impl CameraViewPort {
// check the projection
match proj {
ProjectionType::Tan(_) => self.aperture >= 100.0_f64.to_radians().to_angle(),
ProjectionType::Mer(_) => self.aperture >= 200.0_f64.to_radians().to_angle(),
ProjectionType::Mer(_) => self.aperture >= 120.0_f64.to_radians().to_angle(),
ProjectionType::Stg(_) => self.aperture >= 200.0_f64.to_radians().to_angle(),
ProjectionType::Sin(_) => false,
ProjectionType::Ait(_) => false,
ProjectionType::Mol(_) => false,
ProjectionType::Zea(_) => false,
ProjectionType::Ait(_) => self.aperture >= 100.0_f64.to_radians().to_angle(),
ProjectionType::Mol(_) => self.aperture >= 100.0_f64.to_radians().to_angle(),
ProjectionType::Zea(_) => self.aperture >= 140.0_f64.to_radians().to_angle(),
}
}
@@ -341,7 +340,7 @@ impl CameraViewPort {
self.last_user_action
};
let can_unzoom_more = match proj {
let _can_unzoom_more = match proj {
ProjectionType::Tan(_)
| ProjectionType::Mer(_)
//| ProjectionType::Air(_)
@@ -443,7 +442,7 @@ impl CameraViewPort {
(smallest_cell_size_px / w_screen_px) * self.get_aperture().to_radians();
while depth_pixel > 0 {
if (crate::healpix::utils::MEAN_HPX_CELL_RES[depth_pixel] > hpx_cell_size_rad) {
if crate::healpix::utils::MEAN_HPX_CELL_RES[depth_pixel] > hpx_cell_size_rad {
break;
}
@@ -474,10 +473,11 @@ impl CameraViewPort {
self.update_rot_matrices(proj);
}
pub fn set_center(&mut self, lonlat: &LonLatT<f64>, coo_sys: CooSystem, proj: &ProjectionType) {
/// lonlat must be given in icrs frame
pub fn set_center(&mut self, lonlat: &LonLatT<f64>, proj: &ProjectionType) {
let icrs_pos: Vector4<_> = lonlat.vector();
let view_pos = coosys::apply_coo_system(coo_sys, self.get_coo_system(), &icrs_pos);
let view_pos = CooSystem::ICRS.to(self.get_coo_system()) * icrs_pos;
let rot = Rotation::from_sky_position(&view_pos);
// Apply the rotation to the camera to go
@@ -524,13 +524,8 @@ impl CameraViewPort {
if self.reversed_longitude != reversed_longitude {
self.reversed_longitude = reversed_longitude;
self.rotation_center_angle = -self.rotation_center_angle;
self.update_rot_matrices(proj);
}
// The camera is reversed => it has moved
self.moved = true;
self.time_last_move = Time::now();
}
pub fn get_longitude_reversed(&self) -> bool {
@@ -558,7 +553,7 @@ impl CameraViewPort {
self.clip_zoom_factor
}
pub fn get_vertices(&self) -> Option<&Vec<XYZWModel>> {
pub fn get_vertices(&self) -> Option<&Vec<XYZWModel<f64>>> {
self.fov.get_vertices()
}
@@ -596,14 +591,17 @@ impl CameraViewPort {
self.zoomed = false;
}
#[inline]
pub fn get_aperture(&self) -> Angle<f64> {
self.aperture
}
#[inline]
pub fn get_center(&self) -> &Vector4<f64> {
&self.center
}
#[inline]
pub fn is_allsky(&self) -> bool {
self.is_allsky
}
@@ -616,13 +614,14 @@ impl CameraViewPort {
self.coo_sys
}
pub fn set_rotation_around_center(&mut self, theta: Angle<f64>, proj: &ProjectionType) {
self.rotation_center_angle = theta;
pub fn set_view_center_pos_angle(&mut self, phi: Angle<f64>, proj: &ProjectionType) {
self.center_rot = phi;
self.update_rot_matrices(proj);
}
pub fn get_rotation_around_center(&self) -> &Angle<f64> {
&self.rotation_center_angle
pub fn get_north_shift_angle(&self) -> Angle<f64> {
(self.w2m.x.y).atan2(self.w2m.y.y).to_angle()
}
}
use crate::ProjectionType;
@@ -654,21 +653,14 @@ impl CameraViewPort {
}
fn update_center(&mut self) {
// Longitude reversed identity matrix
const ID_R: &Matrix4<f64> = &Matrix4::new(
-1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
);
// The center position is on the 3rd column of the w2m matrix
self.center = self.w2m.z;
let axis = &self.center.truncate();
let center_rot = Rotation::from_axis_angle(axis, self.rotation_center_angle);
// The center position is on the 3rd column of the w2m matrix
let center_axis = &self.center.truncate();
// Re-update the model matrix to take into account the rotation
// by theta around the center axis
let final_rot = center_rot * self.w2m_rot;
self.w2m = (&final_rot).into();
let r = Rotation::from_axis_angle(center_axis, self.center_rot) * self.w2m_rot;
self.w2m = (&r).into();
if self.reversed_longitude {
self.w2m = self.w2m * ID_R;
}
@@ -681,15 +673,8 @@ use al_core::shader::{SendUniforms, ShaderBound};
impl SendUniforms for CameraViewPort {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
shader
//.attach_uniforms_from(&self.last_user_action)
//.attach_uniform("to_icrs", &self.system.to_icrs_j2000::<f32>())
//.attach_uniform("to_galactic", &self.system.to_gal::<f32>())
//.attach_uniform("model", &self.w2m)
//.attach_uniform("inv_model", &self.m2w)
.attach_uniform("ndc_to_clip", &self.ndc_to_clip) // Send ndc to clip
.attach_uniform("czf", &self.clip_zoom_factor) // Send clip zoom factor
.attach_uniform("window_size", &self.get_screen_size()) // Window size
.attach_uniform("fov", &self.aperture);
.attach_uniform("czf", &self.clip_zoom_factor); // Send clip zoom factor
shader
}

View File

@@ -3,7 +3,7 @@ use cgmath::{BaseFloat, Vector4};
use al_api::coo_system::CooBaseFloat;
use al_api::coo_system::CooSystem;
use crate::math::lonlat::LonLat;
/// This is conversion method returning a transformation
/// matrix when the system requested by the user is not
/// icrs j2000.

View File

@@ -1,7 +1,7 @@
pub mod query;
pub mod request;
use crate::renderable::Url;
use std::collections::HashSet;
use query::QueryId;

View File

@@ -11,7 +11,7 @@ pub trait Query: Sized {
pub type QueryId = String;
use al_core::image::format::ImageFormatType;
use al_core::log::console_log;
#[derive(Eq, Hash, PartialEq, Clone)]
pub struct Tile {
pub cell: HEALPixCell,

View File

@@ -28,7 +28,6 @@ use crate::renderable::Url;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, RequestMode, Response};
use crate::downloader::query::Query;
use al_core::{image::raw::ImageBuffer, texture::pixel::Pixel};
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;

View File

@@ -38,7 +38,6 @@ impl From<PixelMetadataRequest> for RequestType {
}
}
use crate::downloader::query::Query;
use crate::renderable::Url;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;

View File

@@ -2,7 +2,7 @@ use crate::downloader::query;
use crate::renderable::CreatorDid;
use super::{Request, RequestType};
use crate::downloader::QueryId;
use crate::healpix::coverage::Smoc;
use moclib::deser::fits::MocType;
use moclib::qty::Hpx;
@@ -37,7 +37,7 @@ pub fn from_fits_hpx<T: Idx>(moc: MocType<T, Hpx<T>, Cursor<&[u8]>>) -> Smoc {
}
}
}
use crate::downloader::query::Query;
use crate::healpix::coverage::HEALPixCoverage;
use crate::Abort;
use moclib::deser::fits::MocIdxType;

View File

@@ -87,7 +87,7 @@ pub enum RequestType {
PixelMetadata(PixelMetadataRequest),
Moc(MOCRequest), //..
}
use super::query::Url;
use crate::downloader::QueryId;
impl RequestType {
pub fn id(&self) -> &QueryId {

View File

@@ -6,7 +6,6 @@ use crate::downloader::query;
use al_core::image::ImageType;
use super::{Request, RequestType};
use crate::downloader::query::Query;
use crate::downloader::QueryId;
pub struct TileRequest {

View File

@@ -6,7 +6,6 @@ pub struct HEALPixCell(pub u8, pub u64);
#[derive(Debug)]
pub struct CellVertices {
pub vertices: Vec<Box<[(f64, f64)]>>,
pub closed: bool,
}
const BIT_MASK_ALL_ONE_EXCEPT_FIRST: u32 = !0x1;
@@ -281,26 +280,21 @@ impl HEALPixCell {
self.path_along_cell_side(Cardinal::S, Cardinal::E, false, *se),
self.path_along_cell_side(Cardinal::E, Cardinal::N, true, *ne),
],
closed: true,
}),
// no edges
(None, None, None, None) => None,
// 1 edge found
(Some(s), None, None, None) => Some(CellVertices {
vertices: vec![self.path_along_cell_side(Cardinal::N, Cardinal::W, true, *s)],
closed: false,
}),
(None, Some(s), None, None) => Some(CellVertices {
vertices: vec![self.path_along_cell_side(Cardinal::W, Cardinal::S, true, *s)],
closed: false,
}),
(None, None, Some(s), None) => Some(CellVertices {
vertices: vec![self.path_along_cell_side(Cardinal::S, Cardinal::E, true, *s)],
closed: false,
}),
(None, None, None, Some(s)) => Some(CellVertices {
vertices: vec![self.path_along_cell_side(Cardinal::E, Cardinal::N, true, *s)],
closed: false,
}),
// 2 edges cases
(Some(nw), Some(sw), None, None) => Some(CellVertices {
@@ -308,42 +302,36 @@ impl HEALPixCell {
&[Cardinal::N, Cardinal::W, Cardinal::S],
&[*nw, *sw],
)],
closed: false,
}),
(Some(nw), None, Some(se), None) => Some(CellVertices {
vertices: vec![
self.path_along_cell_side(Cardinal::N, Cardinal::W, true, *nw),
self.path_along_cell_side(Cardinal::S, Cardinal::E, true, *se),
],
closed: false,
}),
(Some(nw), None, None, Some(ne)) => Some(CellVertices {
vertices: vec![chain_edge_vertices(
&[Cardinal::E, Cardinal::N, Cardinal::W],
&[*ne, *nw],
)],
closed: false,
}),
(None, Some(sw), Some(se), None) => Some(CellVertices {
vertices: vec![chain_edge_vertices(
&[Cardinal::W, Cardinal::S, Cardinal::E],
&[*sw, *se],
)],
closed: false,
}),
(None, Some(sw), None, Some(ne)) => Some(CellVertices {
vertices: vec![
self.path_along_cell_side(Cardinal::W, Cardinal::S, true, *sw),
self.path_along_cell_side(Cardinal::E, Cardinal::N, true, *ne),
],
closed: false,
}),
(None, None, Some(se), Some(ne)) => Some(CellVertices {
vertices: vec![chain_edge_vertices(
&[Cardinal::S, Cardinal::E, Cardinal::N],
&[*se, *ne],
)],
closed: false,
}),
// 3 edges cases
(Some(nw), Some(sw), Some(se), None) => Some(CellVertices {
@@ -351,28 +339,24 @@ impl HEALPixCell {
&[Cardinal::N, Cardinal::W, Cardinal::S, Cardinal::E],
&[*nw, *sw, *se],
)],
closed: false,
}),
(Some(nw), Some(sw), None, Some(ne)) => Some(CellVertices {
vertices: vec![chain_edge_vertices(
&[Cardinal::E, Cardinal::N, Cardinal::W, Cardinal::S],
&[*ne, *nw, *sw],
)],
closed: false,
}),
(Some(nw), None, Some(se), Some(ne)) => Some(CellVertices {
vertices: vec![chain_edge_vertices(
&[Cardinal::S, Cardinal::E, Cardinal::N, Cardinal::W],
&[*se, *ne, *nw],
)],
closed: false,
}),
(None, Some(sw), Some(se), Some(ne)) => Some(CellVertices {
vertices: vec![chain_edge_vertices(
&[Cardinal::W, Cardinal::S, Cardinal::E, Cardinal::N],
&[*sw, *se, *ne],
)],
closed: false,
}),
}
}
@@ -468,6 +452,7 @@ pub fn nside2depth(nside: u32) -> u8 {
crate::math::utils::log_2_unchecked(nside) as u8
}
#[cfg(test)]
mod tests {
use super::HEALPixCell;

View File

@@ -2,8 +2,12 @@ use crate::math::lonlat::LonLatT;
use crate::math::PI;
use crate::math::{self, lonlat::LonLat};
use cgmath::{Vector3, Vector4};
use moclib::{moc::range::RangeMOC, qty::Hpx, ranges::SNORanges};
use cgmath::{Vector4};
use moclib::{
moc::range::{CellSelection, RangeMOC},
qty::Hpx,
ranges::SNORanges,
};
pub type Smoc = RangeMOC<u64, Hpx<u64>>;
use crate::healpix::cell::HEALPixCell;
@@ -29,8 +33,12 @@ impl HEALPixCoverage {
.collect::<Vec<_>>();
let LonLatT(in_lon, in_lat) = inside.lonlat();
let moc =
RangeMOC::from_polygon_with_control_point(&lonlat[..], (in_lon.0, in_lat.0), depth);
let moc = RangeMOC::from_polygon_with_control_point(
&lonlat[..],
(in_lon.0, in_lat.0),
depth,
CellSelection::All,
);
HEALPixCoverage(moc)
}
@@ -64,6 +72,7 @@ impl HEALPixCoverage {
rad,
depth,
0,
CellSelection::All,
))
}
}

View File

@@ -64,6 +64,7 @@ impl IdxVec {
}
// Create an index vector from a list of segments
#[allow(unused)]
pub fn from_great_circle_arc(arcs: &mut [GreatCircleArc]) -> Self {
arcs.sort_unstable_by(|a1, a2| {
let bbox1 = a1.get_containing_hpx_cell();

View File

@@ -30,6 +30,7 @@ pub fn vertices_lonlat<S: BaseFloat>(cell: &HEALPixCell) -> [LonLatT<S>; 4] {
}
use crate::Abort;
/// Get the grid
#[allow(dead_code)]
pub fn grid_lonlat<S: BaseFloat>(cell: &HEALPixCell, n_segments_by_side: u16) -> Vec<LonLatT<S>> {
debug_assert!(n_segments_by_side > 0);
healpix::nested::grid(cell.depth(), cell.idx(), n_segments_by_side)

View File

@@ -1,2 +0,0 @@
pub mod fits;
pub mod jpg;

View File

@@ -16,7 +16,8 @@
//extern crate itertools_num;
//extern crate num;
//extern crate num_traits;
use crate::time::Time;
//use crate::time::Time;
#[cfg(feature = "dbg")]
use std::panic;
pub trait Abort {
@@ -73,9 +74,8 @@ extern "C" {
#[macro_use]
mod utils;
use al_core::log::console_log;
use math::projection::*;
use renderable::coverage::moc::MOC;
//use votable::votable::VOTableWrapper;
use wasm_bindgen::prelude::*;
use web_sys::HtmlElement;
@@ -85,11 +85,11 @@ use crate::math::angle::ToAngle;
mod app;
pub mod async_task;
mod camera;
mod shaders;
mod coosys;
mod downloader;
mod fifo_cache;
mod grid;
mod healpix;
mod inertia;
pub mod math;
@@ -141,7 +141,6 @@ pub struct WebClient {
use al_api::hips::ImageMetadata;
use std::convert::TryInto;
#[wasm_bindgen]
impl WebClient {
/// Create the Aladin Lite webgl backend
@@ -154,23 +153,19 @@ impl WebClient {
#[wasm_bindgen(constructor)]
pub fn new(
aladin_div: &HtmlElement,
shaders: JsValue,
//_shaders: JsValue,
resources: JsValue,
) -> Result<WebClient, JsValue> {
//panic::set_hook(Box::new(console_error_panic_hook::hook));
#[cfg(feature = "dbg")]
panic::set_hook(Box::new(console_error_panic_hook::hook));
let shaders = serde_wasm_bindgen::from_value(shaders)?;
//let shaders = serde_wasm_bindgen::from_value(shaders)?;
let resources = serde_wasm_bindgen::from_value(resources)?;
let gl = WebGlContext::new(aladin_div)?;
let shaders = ShaderManager::new(&gl, shaders).unwrap_abort();
let shaders = ShaderManager::new().unwrap_abort();
// Event listeners callbacks
//let callback_position_changed = js_sys::Function::new_no_args("");
let app = App::new(
&gl, aladin_div, shaders, resources,
//callback_position_changed,
)?;
let app = App::new(&gl, aladin_div, shaders, resources)?;
let dt = DeltaTime::zero();
@@ -499,20 +494,30 @@ impl WebClient {
/// # Arguments
///
/// * `theta` - The rotation angle in degrees
#[wasm_bindgen(js_name = setRotationAroundCenter)]
pub fn rotate_around_center(&mut self, theta: f64) -> Result<(), JsValue> {
#[wasm_bindgen(js_name = setViewCenterPosAngle)]
pub fn set_view_center_pos_angle(&mut self, theta: f64) -> Result<(), JsValue> {
let theta = ArcDeg(theta);
self.app.rotate_around_center(theta);
self.app.set_view_center_pos_angle(theta);
Ok(())
}
/// Get the absolute orientation angle of the view
#[wasm_bindgen(js_name = getRotationAroundCenter)]
pub fn get_rotation_around_center(&mut self) -> Result<f64, JsValue> {
let theta = self.app.get_rotation_around_center();
#[wasm_bindgen(js_name = getViewCenterFromNorthPoleAngle)]
pub fn get_north_shift_angle(&mut self) -> Result<f64, JsValue> {
let phi = self.app.get_north_shift_angle();
Ok(phi.to_degrees())
}
Ok(theta.0 * 360.0 / (2.0 * std::f64::consts::PI))
#[wasm_bindgen(js_name = getNorthPoleCelestialPosition)]
pub fn get_north_pole_celestial_position(&mut self) -> Result<Box<[f64]>, JsValue> {
let np = self
.app
.projection
.north_pole_celestial_space(&self.app.camera);
let (lon, lat) = (np.lon().to_degrees(), np.lat().to_degrees());
Ok(Box::new([lon, lat]))
}
/// Get if the longitude axis is reversed
@@ -576,12 +581,6 @@ impl WebClient {
Ok(Box::new([lon_deg.0, lat_deg.0]))
}
/// Rest the north pole orientation to the top of the screen
#[wasm_bindgen(js_name = resetNorthOrientation)]
pub fn reset_north_orientation(&mut self) {
self.app.reset_north_orientation();
}
/// Go from a location to another one
///
/// # Arguments
@@ -971,7 +970,7 @@ impl WebClient {
}
#[wasm_bindgen(js_name = parseVOTable)]
pub fn parse_votable(&mut self, s: &str) -> Result<JsValue, JsValue> {
pub fn parse_votable(&mut self, _s: &str) -> Result<JsValue, JsValue> {
/*let votable: VOTableWrapper<votable::impls::mem::InMemTableDataRows> =
votable::votable::VOTableWrapper::from_ivoa_xml_str(s)
.map_err(|err| JsValue::from_str(&format!("Error parsing votable: {:?}", err)))?;
@@ -1052,7 +1051,6 @@ impl WebClient {
ra_deg: &[f64],
dec_deg: &[f64],
) -> Result<(), JsValue> {
use cgmath::InnerSpace;
let tile_d = self.app.get_norder();
let pixel_d = tile_d + 9;
@@ -1118,14 +1116,14 @@ impl WebClient {
#[wasm_bindgen]
#[derive(Clone, Copy)]
struct LonLat {
pub struct LonLat {
pub lon: f64,
pub lat: f64,
}
#[wasm_bindgen]
#[derive(Clone, Copy)]
struct HPXVertices {
pub struct HPXVertices {
pub v1: LonLat,
pub v2: LonLat,
pub v3: LonLat,

View File

@@ -24,6 +24,7 @@ where
}
use cgmath::{Deg, Rad};
use serde::Deserialize;
// Convert a Rad<T> to an ArcDeg<T>
impl<T> From<Rad<T>> for ArcDeg<T>
where
@@ -244,7 +245,7 @@ pub enum SerializeFmt {
DMS,
HMS,
DMM,
DD
DD,
}
use al_api::angle_fmt::AngleSerializeFmt;
@@ -362,7 +363,8 @@ impl FormatType for HMS {
}
}
#[derive(Clone, Copy, Debug, Eq, Hash)]
#[derive(Clone, Copy, Debug, Eq, Hash, Deserialize)]
#[serde(rename_all = "camelCase")]
#[repr(C)]
pub struct Angle<S: BaseFloat>(pub S);
impl<S> Angle<S>
@@ -452,14 +454,14 @@ where
pub trait ToAngle<S>
where
S: BaseFloat
S: BaseFloat,
{
fn to_angle(self) -> Angle<S>;
}
impl<S> ToAngle<S> for S
where
S: BaseFloat
S: BaseFloat,
{
fn to_angle(self) -> Angle<S> {
Angle(self)

View File

@@ -8,9 +8,11 @@ pub trait LonLat<S: BaseFloat> {
fn lonlat(&self) -> LonLatT<S>;
fn from_lonlat(lonlat: &LonLatT<S>) -> Self;
}
use crate::math::angle::Angle;
#[derive(Clone, Copy, Debug)]
use serde::Deserialize;
#[derive(Clone, Copy, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
#[repr(C)]
pub struct LonLatT<S: BaseFloat>(pub Angle<S>, pub Angle<S>);
impl<S> LonLatT<S>
@@ -107,11 +109,10 @@ where
let theta = lonlat.lon();
let delta = lonlat.lat();
Vector3::<S>::new(
delta.cos() * theta.sin(),
delta.sin(),
delta.cos() * theta.cos(),
)
let (dc, ds) = (delta.cos(), delta.sin());
let (tc, ts) = (theta.cos(), theta.sin());
Vector3::<S>::new(dc * ts, ds, dc * tc)
}
}
@@ -180,21 +181,17 @@ pub fn xyzw_to_radec<S: BaseFloat>(v: &Vector4<S>) -> (Angle<S>, Angle<S>) {
#[inline]
pub fn radec_to_xyz<S: BaseFloat>(theta: Angle<S>, delta: Angle<S>) -> Vector3<S> {
Vector3::<S>::new(
delta.cos() * theta.sin(),
delta.sin(),
delta.cos() * theta.cos(),
)
let (dc, ds) = (delta.cos(), delta.sin());
let (tc, ts) = (theta.cos(), theta.sin());
Vector3::<S>::new(dc * ts, ds, dc * tc)
}
#[inline]
pub fn radec_to_xyzw<S: BaseFloat>(theta: Angle<S>, delta: Angle<S>) -> Vector4<S> {
Vector4::<S>::new(
delta.cos() * theta.sin(),
delta.sin(),
delta.cos() * theta.cos(),
S::one(),
)
let xyz = radec_to_xyz(theta, delta);
Vector4::<S>::new(xyz.x, xyz.y, xyz.z, S::one())
}
#[inline]
@@ -225,14 +222,14 @@ pub fn proj(
lonlat: &LonLatT<f64>,
projection: &ProjectionType,
camera: &CameraViewPort,
) -> Option<XYNDC> {
) -> Option<XYNDC<f64>> {
let xyzw = lonlat.vector();
projection.model_to_normalized_device_space(&xyzw, camera)
}
#[inline]
pub fn unproj(
ndc_xy: &XYNDC,
ndc_xy: &XYNDC<f64>,
projection: &ProjectionType,
camera: &CameraViewPort,
) -> Option<LonLatT<f64>> {
@@ -246,14 +243,14 @@ pub fn proj_to_screen(
lonlat: &LonLatT<f64>,
projection: &ProjectionType,
camera: &CameraViewPort,
) -> Option<XYScreen> {
) -> Option<XYScreen<f64>> {
let xyzw = lonlat.vector();
projection.model_to_screen_space(&xyzw, camera)
}
#[inline]
pub fn unproj_from_screen(
xy: &XYScreen,
xy: &XYScreen<f64>,
projection: &ProjectionType,
camera: &CameraViewPort,
) -> Option<LonLatT<f64>> {

View File

@@ -1,13 +1,18 @@
use cgmath::{
Vector2,
Vector3,
Vector4,
};
use cgmath::{Vector2, Vector3, Vector4};
pub type XYScreen = Vector2<f64>;
pub type XYNDC = Vector2<f64>;
pub type XYClip = Vector2<f64>;
pub type XYZWorld = Vector3<f64>;
pub type XYZWWorld = Vector4<f64>;
pub type XYZWModel = Vector4<f64>;
pub type XYZModel = Vector3<f64>;
pub type XYScreen<S> = Vector2<S>;
pub type XYNDC<S> = Vector2<S>;
pub type XYClip<S> = Vector2<S>;
pub type XYZWorld<S> = Vector3<S>;
pub type XYZModel<S> = Vector3<S>;
pub type XYZWWorld<S> = Vector4<S>;
pub type XYZWModel<S> = Vector4<S>;
pub enum CooSpace {
Screen,
NDC,
Clip,
World,
Model,
LonLat,
}

View File

@@ -1,14 +1,14 @@
use crate::math::projection::coo_space::XYClip;
pub struct Disk {
pub radius: f64
pub radius: f64,
}
use cgmath::InnerSpace;
use super::super::sdf::ProjDef;
use cgmath::InnerSpace;
impl ProjDef for Disk {
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
xy.magnitude() - self.radius
}
}
}

View File

@@ -8,13 +8,13 @@ pub struct Ellipse {
pub b: f64,
}
use cgmath::InnerSpace;
use super::super::sdf::ProjDef;
use cgmath::InnerSpace;
impl ProjDef for Ellipse {
fn sdf(&self, xy: &XYClip) -> f64 {
let mut p = Vector2::new( xy.x.abs(), xy.y.abs() );
let mut ab = Vector2::new( self.a, self.b );
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let mut p = Vector2::new(xy.x.abs(), xy.y.abs());
let mut ab = Vector2::new(self.a, self.b);
let sdf = if p.x == 0.0 {
-(self.b - p.y)
@@ -25,44 +25,44 @@ impl ProjDef for Ellipse {
p = Vector2::new(p.y, p.x);
ab = Vector2::new(ab.y, ab.x);
}
let l = ab.y*ab.y - ab.x*ab.x;
let m = ab.x*p.x/l;
let m2 = m*m;
let n = ab.y*p.y/l;
let n2 = n*n;
let c = (m2 + n2 - 1.0)/3.0;
let c3 = c*c*c;
let q = c3 + m2*n2*2.0;
let d = c3 + m2*n2;
let g = m + m*n2;
let l = ab.y * ab.y - ab.x * ab.x;
let m = ab.x * p.x / l;
let m2 = m * m;
let n = ab.y * p.y / l;
let n2 = n * n;
let c = (m2 + n2 - 1.0) / 3.0;
let c3 = c * c * c;
let q = c3 + m2 * n2 * 2.0;
let d = c3 + m2 * n2;
let g = m + m * n2;
let co = if d < 0.0 {
let p = (q/c3).acos()/3.0;
let p = (q / c3).acos() / 3.0;
let s = p.cos();
let t = p.sin()*(3.0_f64).sqrt();
let rx = ( -c*(s + t + 2.0) + m2 ).sqrt();
let ry = ( -c*(s - t + 2.0) + m2 ).sqrt();
( ry + (l).signum()*rx + ((g).abs()/(rx*ry)) - m)/2.0
let t = p.sin() * (3.0_f64).sqrt();
let rx = (-c * (s + t + 2.0) + m2).sqrt();
let ry = (-c * (s - t + 2.0) + m2).sqrt();
(ry + (l).signum() * rx + ((g).abs() / (rx * ry)) - m) / 2.0
} else {
let h = 2.0*m*n*(( d ).sqrt());
let s = (q+h).signum()*( (q+h).abs() ).powf( 1.0/3.0 );
let u = (q-h).signum()*( (q-h).abs() ).powf( 1.0/3.0 );
let rx = -s - u - c*4.0 + 2.0*m2;
let ry = (s - u)*(3.0_f64).sqrt();
let rm = ( rx*rx + ry*ry ).sqrt();
let p = ry/((rm-rx).sqrt());
(p + (2.0*g/rm) - m)/2.0
let h = 2.0 * m * n * ((d).sqrt());
let s = (q + h).signum() * ((q + h).abs()).powf(1.0 / 3.0);
let u = (q - h).signum() * ((q - h).abs()).powf(1.0 / 3.0);
let rx = -s - u - c * 4.0 + 2.0 * m2;
let ry = (s - u) * (3.0_f64).sqrt();
let rm = (rx * rx + ry * ry).sqrt();
let p = ry / ((rm - rx).sqrt());
(p + (2.0 * g / rm) - m) / 2.0
};
let si = ( 1.0 - co*co ).sqrt();
let q = Vector2::new( ab.x*co, ab.y*si );
(q-p).magnitude() * (p.y-q.y).signum()
let si = (1.0 - co * co).sqrt();
let q = Vector2::new(ab.x * co, ab.y * si);
(q - p).magnitude() * (p.y - q.y).signum()
};
sdf
}
}
}

View File

@@ -3,14 +3,14 @@ use crate::math::projection::coo_space::XYClip;
use cgmath::Vector2;
pub struct Parabola {
// Quadratic coefficient
pub k: f64
pub k: f64,
}
use super::super::sdf::ProjDef;
use cgmath::InnerSpace;
impl ProjDef for Parabola {
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let mut xy = *xy;
// There is a singularity around x == 0
@@ -20,21 +20,17 @@ impl ProjDef for Parabola {
xy.x += 1e-4;
}
xy.x = xy.x.abs();
let ik = 1.0/self.k;
let p = ik*(xy.y - 0.5*ik)/3.0;
let q = 0.25*ik*ik*xy.x;
let h = q*q - p*p*p;
let ik = 1.0 / self.k;
let p = ik * (xy.y - 0.5 * ik) / 3.0;
let q = 0.25 * ik * ik * xy.x;
let h = q * q - p * p * p;
let r = h.abs().sqrt();
let x = if h>0.0 {
(q+r).powf(1.0/3.0) - (q-r).abs().powf(1.0/3.0)*(r-q).signum()
let x = if h > 0.0 {
(q + r).powf(1.0 / 3.0) - (q - r).abs().powf(1.0 / 3.0) * (r - q).signum()
} else {
2.0*(r.atan2(q)/3.0).cos()*p.sqrt()
2.0 * (r.atan2(q) / 3.0).cos() * p.sqrt()
};
let a = if xy.x - x < 0.0 {
-1.0
} else {
1.0
};
(xy-Vector2::new(x, self.k*x*x)).magnitude() * a
let a = if xy.x - x < 0.0 { -1.0 } else { 1.0 };
(xy - Vector2::new(x, self.k * x * x)).magnitude() * a
}
}
}

View File

@@ -2,22 +2,19 @@ use crate::math::projection::coo_space::XYClip;
use cgmath::Vector2;
pub struct Rect {
pub dim: Vector2<f64>
pub dim: Vector2<f64>,
}
use super::super::sdf::ProjDef;
use cgmath::InnerSpace;
impl ProjDef for Rect {
fn sdf(&self, xy: &XYClip) -> f64 {
let d = Vector2::new(
xy.x.abs() - self.dim.x,
xy.y.abs() - self.dim.y
);
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let d = Vector2::new(xy.x.abs() - self.dim.x, xy.y.abs() - self.dim.y);
let a = Vector2::new(d.x.max(0.0), d.y.max(0.0));
let b = (d.x.max(d.y)).min(0.0);
a.magnitude() + b
}
}
}

View File

@@ -10,30 +10,27 @@ pub struct Triangle {
use super::super::sdf::ProjDef;
use cgmath::InnerSpace;
impl ProjDef for Triangle {
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let e0 = self.p1 - self.p0;
let e1 = self.p2 - self.p1;
let e2 = self.p0 - self.p2;
let v0 = xy - self.p0;
let v1 = xy - self.p1;
let v2 = xy - self.p2;
let pq0 = v0 - e0 * ( v0.dot(e0) / e0.dot(e0) ).clamp( 0.0, 1.0 );
let pq1 = v1 - e1 * ( v1.dot(e1) / e1.dot(e1) ).clamp( 0.0, 1.0 );
let pq2 = v2 - e2 * ( v2.dot(e2) / e2.dot(e2) ).clamp( 0.0, 1.0 );
let s = e0.x*e2.y - e0.y*e2.x;
let d1 = Vector2::new(pq0.dot( pq0 ), s*(v0.x*e0.y-v0.y*e0.x));
let d2 = Vector2::new(pq1.dot( pq1 ), s*(v1.x*e1.y-v1.y*e1.x));
let d3 = Vector2::new(pq2.dot( pq2 ), s*(v2.x*e2.y-v2.y*e2.x));
let d = Vector2::new(
d1.x.min(d2.x.min(d3.x)),
d1.y.min(d2.y.min(d3.y))
);
-d.x.sqrt()*(d.y.signum())
let pq0 = v0 - e0 * (v0.dot(e0) / e0.dot(e0)).clamp(0.0, 1.0);
let pq1 = v1 - e1 * (v1.dot(e1) / e1.dot(e1)).clamp(0.0, 1.0);
let pq2 = v2 - e2 * (v2.dot(e2) / e2.dot(e2)).clamp(0.0, 1.0);
let s = e0.x * e2.y - e0.y * e2.x;
let d1 = Vector2::new(pq0.dot(pq0), s * (v0.x * e0.y - v0.y * e0.x));
let d2 = Vector2::new(pq1.dot(pq1), s * (v1.x * e1.y - v1.y * e1.x));
let d3 = Vector2::new(pq2.dot(pq2), s * (v2.x * e2.y - v2.y * e2.x));
let d = Vector2::new(d1.x.min(d2.x.min(d3.x)), d1.y.min(d2.y.min(d3.y)));
-d.x.sqrt() * (d.y.signum())
}
}
}

View File

@@ -1,16 +1,13 @@
use crate::math::projection::coo_space::XYClip;
use cgmath::Vector2;
use crate::math::HALF_PI;
use crate::math::angle::PI;
use super::{
sdf::ProjDef,
basic::{ellipse::Ellipse, triangle::Triangle},
op::{Diff, Translate},
basic::{
triangle::Triangle,
ellipse::Ellipse,
}
sdf::ProjDef,
};
use crate::math::angle::PI;
use crate::math::HALF_PI;
pub struct Cod {
pub r_max: f64,
@@ -41,7 +38,7 @@ impl Cod {
}
}
fn to_clip(&self, xy: &Vector2<f64>) -> XYClip {
fn to_clip(&self, xy: &Vector2<f64>) -> XYClip<f64> {
let x = (xy.x - self.x_min) / (self.x_max - self.x_min);
let y = (xy.y - self.y_min) / (self.y_max - self.y_min);
@@ -50,20 +47,26 @@ impl Cod {
}
impl ProjDef for Cod {
fn sdf(&self, xy: &XYClip) -> f64 {
let y_mean = (self.y_min + self.y_max)*0.5;
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let y_mean = (self.y_min + self.y_max) * 0.5;
let center_ellipse = self.to_clip(&Vector2::new(0.0, self.y0 + y_mean));
// Big frontier ellipse
let a = 1.0;
let b = 2.0 * (2.356194490192345 + self.y0) / (2.356194490192345 + 3.0328465566001492);
let e = b / a;
let ext_ellipse = Translate { off: center_ellipse, def: Ellipse { a: a, b: b } };
let ext_ellipse = Translate {
off: center_ellipse,
def: Ellipse { a: a, b: b },
};
// Small ellipse where projection is not defined
let b_int = 2.0 * self.r_min / (2.356194490192345 + 3.0328465566001492);
let a_int = b_int / e;
let int_ellipse = Translate { off: center_ellipse, def: Ellipse { a: a_int, b: b_int } };
let int_ellipse = Translate {
off: center_ellipse,
def: Ellipse { a: a_int, b: b_int },
};
// The top edges
let gamma = PI * self.c - HALF_PI;
@@ -75,9 +78,9 @@ impl ProjDef for Cod {
let tri = Triangle {
p0: center_ellipse,
p1: self.to_clip(&b),
p2: self.to_clip(&c)
p2: self.to_clip(&c),
};
Diff::new(Diff::new(ext_ellipse, int_ellipse), tri).sdf(xy)
}
}
}

View File

@@ -3,12 +3,12 @@ use cgmath::Vector2;
pub struct FullScreen;
use super::{
basic::rect::Rect,
sdf::ProjDef,
};
use super::{basic::rect::Rect, sdf::ProjDef};
impl ProjDef for FullScreen {
fn sdf(&self, xy: &XYClip) -> f64 {
Rect { dim: Vector2::new(1.0, 1.0) }.sdf(xy)
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
Rect {
dim: Vector2::new(1.0, 1.0),
}
.sdf(xy)
}
}

View File

@@ -5,58 +5,57 @@ pub struct Hpx;
use super::sdf::ProjDef;
use super::{
basic::{rect::Rect, triangle::Triangle},
op::Union,
basic::{
triangle::Triangle,
rect::Rect
}
};
impl ProjDef for Hpx {
fn sdf(&self, xy: &XYClip) -> f64 {
let rect = Rect { dim: Vector2::new(1.0, 0.5) };
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let rect = Rect {
dim: Vector2::new(1.0, 0.5),
};
let t1 = Triangle {
p0: Vector2::new(1.0, 0.5),
p1: Vector2::new(0.5, 0.5),
p2: Vector2::new(0.75, 1.0)
p2: Vector2::new(0.75, 1.0),
};
let t2 = Triangle {
p0: Vector2::new(0.5, 0.5),
p1: Vector2::new(0.0, 0.5),
p2: Vector2::new(0.25, 1.0)
p2: Vector2::new(0.25, 1.0),
};
let t3 = Triangle {
p0: Vector2::new(-1.0, 0.5),
p1: Vector2::new(-0.5, 0.5),
p2: Vector2::new(-0.75, 1.0)
p2: Vector2::new(-0.75, 1.0),
};
let t4 = Triangle {
p0: Vector2::new(-0.5, 0.5),
p1: Vector2::new(-0.0, 0.5),
p2: Vector2::new(-0.25, 1.0)
p2: Vector2::new(-0.25, 1.0),
};
let t5 = Triangle {
p0: Vector2::new(-1.0, -0.5),
p1: Vector2::new(-0.5, -0.5),
p2: Vector2::new(-0.75, -1.0)
p2: Vector2::new(-0.75, -1.0),
};
let t6 = Triangle {
p0: Vector2::new(-0.5, -0.5),
p1: Vector2::new(-0.0, -0.5),
p2: Vector2::new(-0.25, -1.0)
p2: Vector2::new(-0.25, -1.0),
};
let t7 = Triangle {
p0: Vector2::new(1.0, -0.5),
p1: Vector2::new(0.5, -0.5),
p2: Vector2::new(0.75, -1.0)
p2: Vector2::new(0.75, -1.0),
};
let t8 = Triangle {
p0: Vector2::new(0.5, -0.5),
p1: Vector2::new(0.0, -0.5),
p2: Vector2::new(0.25, -1.0)
p2: Vector2::new(0.25, -1.0),
};
let t12 = Union::new(t1, t2);

View File

@@ -1,10 +1,10 @@
use super::sdf::ProjDef;
use crate::math::projection::XYClip;
use cgmath::Vector2;
use super::sdf::ProjDef;
pub struct Scale<T>
where
T: ProjDef
T: ProjDef,
{
pub scale: Vector2<f64>,
pub def: T,
@@ -12,17 +12,18 @@ where
impl<T> ProjDef for Scale<T>
where
T: ProjDef
T: ProjDef,
{
/// Signed distance function to the definition domain region
fn sdf(&self, xy: &XYClip) -> f64 {
self.def.sdf(&Vector2::new(xy.x / self.scale.x, xy.y / self.scale.y))
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
self.def
.sdf(&Vector2::new(xy.x / self.scale.x, xy.y / self.scale.y))
}
}
pub struct Translate<T>
where
T: ProjDef
T: ProjDef,
{
pub off: Vector2<f64>,
pub def: T,
@@ -30,10 +31,10 @@ where
impl<T> ProjDef for Translate<T>
where
T: ProjDef
T: ProjDef,
{
/// Signed distance function to the definition domain region
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
self.def.sdf(&(*xy - self.off))
}
}
@@ -42,7 +43,7 @@ where
pub struct Union<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
sdf1: T,
sdf2: U,
@@ -51,23 +52,20 @@ where
impl<T, U> Union<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
pub fn new(sdf1: T, sdf2: U) -> Self {
Self {
sdf1,
sdf2,
}
Self { sdf1, sdf2 }
}
}
impl<T, U> ProjDef for Union<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
/// Signed distance function to the definition domain region
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let s1 = self.sdf1.sdf(xy);
let s2 = self.sdf2.sdf(xy);
@@ -79,7 +77,7 @@ where
pub struct Inter<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
sdf1: T,
sdf2: U,
@@ -88,23 +86,20 @@ where
impl<T, U> Inter<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
pub fn new(sdf1: T, sdf2: U) -> Self {
Self {
sdf1,
sdf2,
}
Self { sdf1, sdf2 }
}
}
impl<T, U> ProjDef for Inter<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
/// Signed distance function to the definition domain region
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let s1 = self.sdf1.sdf(xy);
let s2 = self.sdf2.sdf(xy);
@@ -116,7 +111,7 @@ where
pub struct Diff<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
sdf1: T,
sdf2: U,
@@ -125,27 +120,24 @@ where
impl<T, U> Diff<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
pub fn new(sdf1: T, sdf2: U) -> Self {
Self {
sdf1,
sdf2,
}
Self { sdf1, sdf2 }
}
}
impl<T, U> ProjDef for Diff<T, U>
where
T: ProjDef,
U: ProjDef
U: ProjDef,
{
/// Signed distance function to the definition domain region
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let s1 = self.sdf1.sdf(xy);
let s2 = self.sdf2.sdf(xy);
// intersection
(-s2).max(s1)
}
}
}

View File

@@ -4,17 +4,23 @@ use cgmath::Vector2;
pub struct Par;
use super::{
sdf::ProjDef,
basic::parabola::Parabola,
op::{Translate, Inter}
op::{Inter, Translate},
sdf::ProjDef,
};
impl ProjDef for Par {
fn sdf(&self, xy: &XYClip) -> f64 {
fn sdf(&self, xy: &XYClip<f64>) -> f64 {
let xy = Vector2::new(xy.y, xy.x);
let p1 = Translate { off: Vector2::new(0.0, -1.0), def: Parabola { k: 1.0 } };
let p2 = Translate { off: Vector2::new(0.0, 1.0), def: Parabola { k: -1.0 } };
let p1 = Translate {
off: Vector2::new(0.0, -1.0),
def: Parabola { k: 1.0 },
};
let p2 = Translate {
off: Vector2::new(0.0, 1.0),
def: Parabola { k: -1.0 },
};
Inter::new(p1, p2).sdf(&xy)
}
}
}

View File

@@ -3,28 +3,28 @@ use crate::math::projection::coo_space::XYClip;
#[enum_dispatch(ProjDefType)]
pub trait ProjDef {
fn is_in(&self, xy: &XYClip) -> bool {
fn is_in(&self, xy: &XYClip<f64>) -> bool {
self.sdf(xy) <= 0.0
}
/// Signed distance function to the definition domain region
fn sdf(&self, xy: &XYClip) -> f64;
fn sdf(&self, xy: &XYClip<f64>) -> f64;
}
use crate::math::vector::NormedVector2;
/// Project a vertex on a valid region defined by a Signed Distance Function (SDF)
///
///
/// # Arguments
///
/// * `p` - A vertex in the clipping space
/// * `p` - A vertex in the clipping space
/// * `dir` - A direction of the normed vector
/// * `valid_reg` - The projection definition region
pub fn ray_marching<P>(p: &XYClip, dir: &NormedVector2, valid_reg: &P) -> Option<XYClip>
pub fn ray_marching<P>(p: &XYClip<f64>, dir: &NormedVector2, valid_reg: &P) -> Option<XYClip<f64>>
where
P: ProjDef
P: ProjDef,
{
// This is done so that we get further a little bit
let in_clip_space = |p: &XYClip| -> bool {
let in_clip_space = |p: &XYClip<f64>| -> bool {
((-1.0)..=1.0).contains(&p.x) && ((-1.0)..=1.0).contains(&p.y)
};
@@ -49,13 +49,7 @@ where
}
}
use super::{
basic::disk::Disk,
full::FullScreen,
hpx::Hpx,
par::Par,
cod::Cod
};
use super::{basic::disk::Disk, cod::Cod, full::FullScreen, hpx::Hpx, par::Par};
// List of all the footprints
// found in Aladin Lite

View File

@@ -9,23 +9,23 @@
// World space
use crate::camera::CameraViewPort;
use crate::domain::sdf::ProjDefType;
use crate::LonLatT;
use coo_space::XYZWModel;
//use crate::num_traits::FloatConst;
use crate::math::PI;
use crate::math::{rotation::Rotation, HALF_PI};
use cgmath::Vector2;
pub mod coo_space;
pub mod domain;
use domain::{basic, cod::Cod, full::FullScreen, hpx::Hpx, par::Par};
use domain::{basic, full::FullScreen};
/* S <-> NDC space conversion methods */
pub fn screen_to_ndc_space(
pos_screen_space: &Vector2<f64>,
pos_screen_space: &XYScreen<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
) -> XYNDC<f64> {
// Screen space in pixels to homogeneous screen space (values between [-1, 1])
let window_size = camera.get_screen_size();
let window_size = Vector2::new(window_size.x as f64, window_size.y as f64);
@@ -42,9 +42,9 @@ pub fn screen_to_ndc_space(
}
pub fn ndc_to_screen_space(
pos_normalized_device: &Vector2<f64>,
pos_normalized_device: &XYNDC<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
) -> XYScreen<f64> {
let window_size = camera.get_screen_size();
let dpi = camera.get_dpi() as f64;
@@ -57,7 +57,7 @@ pub fn ndc_to_screen_space(
}
/* NDC <-> CLIP space conversion methods */
pub fn clip_to_ndc_space(pos_clip_space: &Vector2<f64>, camera: &CameraViewPort) -> Vector2<f64> {
pub fn clip_to_ndc_space(pos_clip_space: &XYClip<f64>, camera: &CameraViewPort) -> XYNDC<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
@@ -68,9 +68,9 @@ pub fn clip_to_ndc_space(pos_clip_space: &Vector2<f64>, camera: &CameraViewPort)
}
pub fn ndc_to_clip_space(
pos_normalized_device: &Vector2<f64>,
pos_normalized_device: &XYNDC<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
) -> XYClip<f64> {
let ndc_to_clip = camera.get_ndc_to_clip();
let clip_zoom_factor = camera.get_clip_zoom_factor();
@@ -82,23 +82,22 @@ pub fn ndc_to_clip_space(
/* S <-> CLIP space conversion methods */
pub fn clip_to_screen_space(
pos_clip_space: &Vector2<f64>,
pos_clip_space: &XYClip<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
) -> XYScreen<f64> {
let pos_normalized_device = clip_to_ndc_space(pos_clip_space, camera);
ndc_to_screen_space(&pos_normalized_device, camera)
}
pub fn screen_to_clip_space(
pos_screen_space: &Vector2<f64>,
pos_screen_space: &XYScreen<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
) -> XYClip<f64> {
let pos_normalized_device = screen_to_ndc_space(pos_screen_space, camera);
ndc_to_clip_space(&pos_normalized_device, camera)
}
use al_api::coo_system::CooSystem;
use cgmath::InnerSpace;
use crate::coo_space::{XYClip, XYZWWorld};
@@ -151,7 +150,16 @@ pub enum ProjectionType {
//Hpx(mapproj::hybrid::hpx::Hpx),
}
use crate::math::lonlat::LonLat;
impl ProjectionType {
pub fn north_pole_celestial_space(&self, camera: &CameraViewPort) -> LonLatT<f64> {
// This is always defined
let np_world = self.north_pole_world_space();
let np_celestial = camera.get_w2m() * np_world;
np_celestial.lonlat()
}
/// Screen to model space deprojection
/// Perform a screen to the world space deprojection
@@ -162,9 +170,9 @@ impl ProjectionType {
/// * ``camera`` - The camera object
pub fn screen_to_world_space(
&self,
pos_screen_space: &Vector2<f64>,
pos_screen_space: &XYScreen<f64>,
camera: &CameraViewPort,
) -> Option<Vector4<f64>> {
) -> Option<XYZWWorld<f64>> {
// Change the screen position according to the dpi
//let dpi = camera.get_dpi();
let pos_screen_space = *pos_screen_space;
@@ -172,13 +180,6 @@ impl ProjectionType {
let pos_clip_space = ndc_to_clip_space(&pos_normalized_device, camera);
self.clip_to_world_space(&pos_clip_space)
/*.map(|mut pos_world_space| {
if camera.get_longitude_reversed() {
pos_world_space.x = -pos_world_space.x;
}
pos_world_space.normalize()
})*/
}
/// Screen to model space deprojection
@@ -191,72 +192,59 @@ impl ProjectionType {
/// * ``camera`` - The camera object
pub fn screen_to_model_space(
&self,
pos_screen_space: &Vector2<f64>,
pos_screen_space: &XYScreen<f64>,
camera: &CameraViewPort,
) -> Option<Vector4<f64>> {
) -> Option<XYZWModel<f64>> {
self.screen_to_world_space(pos_screen_space, camera)
.map(|world_pos| camera.get_w2m() * world_pos)
}
pub fn normalized_device_to_model_space(
&self,
ndc_pos: &XYNDC,
ndc_pos: &XYNDC<f64>,
camera: &CameraViewPort,
) -> Option<XYZWModel> {
) -> Option<XYZWModel<f64>> {
self.normalized_device_to_world_space(ndc_pos, camera)
.map(|world_pos| camera.get_w2m() * world_pos)
}
pub fn model_to_screen_space(
&self,
pos_model_space: &Vector4<f64>,
pos_model_space: &XYZWModel<f64>,
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
) -> Option<XYScreen<f64>> {
let m2w = camera.get_m2w();
let pos_world_space = m2w * pos_model_space;
self.world_to_screen_space(&pos_world_space, camera)
}
pub fn view_to_screen_space(
pub fn icrs_celestial_to_screen_space(
&self,
pos_model_space: &Vector4<f64>,
icrs_celestial_pos: &XYZWModel<f64>,
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
self.view_to_normalized_device_space(pos_model_space, camera)
) -> Option<XYScreen<f64>> {
self.icrs_celestial_to_normalized_device_space(icrs_celestial_pos, camera)
.map(|ndc_pos| crate::ndc_to_screen_space(&ndc_pos, camera))
}
pub fn view_to_normalized_device_space(
pub fn icrs_celestial_to_normalized_device_space(
&self,
pos_view_space: &Vector4<f64>,
icrs_celestial_pos: &XYZWModel<f64>,
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
) -> Option<XYNDC<f64>> {
let view_coosys = camera.get_coo_system();
let c = CooSystem::ICRS.to::<f64>(view_coosys);
let m2w = camera.get_m2w();
let pos_world_space = m2w * c * pos_view_space;
let pos_world_space = m2w * c * icrs_celestial_pos;
self.world_to_normalized_device_space(&pos_world_space, camera)
}
/*pub fn view_to_normalized_device_space_unchecked(
&self,
pos_view_space: &Vector4<f64>,
camera: &CameraViewPort,
) -> Vector2<f64> {
let view_coosys = camera.get_coo_system();
let c = CooSystem::ICRS.to::<f64>(view_coosys);
let m2w = camera.get_m2w();
let pos_world_space = m2w * c * pos_view_space;
self.world_to_normalized_device_space_unchecked(&pos_world_space, camera)
}*/
pub fn model_to_normalized_device_space(
&self,
pos_model_space: &XYZWModel,
pos_model_space: &XYZWModel<f64>,
camera: &CameraViewPort,
) -> Option<XYNDC> {
) -> Option<XYNDC<f64>> {
let m2w = camera.get_m2w();
let pos_world_space = m2w * pos_model_space;
self.world_to_normalized_device_space(&pos_world_space, camera)
@@ -264,9 +252,9 @@ impl ProjectionType {
pub fn model_to_clip_space(
&self,
pos_model_space: &XYZWModel,
pos_model_space: &XYZWModel<f64>,
camera: &CameraViewPort,
) -> Option<XYClip> {
) -> Option<XYClip<f64>> {
let m2w = camera.get_m2w();
let pos_world_space = m2w * pos_model_space;
self.world_to_clip_space(&pos_world_space)
@@ -282,39 +270,39 @@ impl ProjectionType {
/// * `y` - Y mouse position in homogenous screen space (between [-1, 1])
pub fn world_to_normalized_device_space(
&self,
pos_world_space: &Vector4<f64>,
pos_world_space: &XYZWWorld<f64>,
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
) -> Option<XYNDC<f64>> {
self.world_to_clip_space(pos_world_space)
.map(|pos_clip_space| clip_to_ndc_space(&pos_clip_space, camera))
}
pub fn normalized_device_to_world_space(
&self,
ndc_pos: &XYNDC,
ndc_pos: &XYNDC<f64>,
camera: &CameraViewPort,
) -> Option<XYZWWorld> {
) -> Option<XYZWWorld<f64>> {
let clip_pos = ndc_to_clip_space(ndc_pos, camera);
self.clip_to_world_space(&clip_pos)
}
pub fn world_to_screen_space(
&self,
pos_world_space: &Vector4<f64>,
pos_world_space: &XYZWWorld<f64>,
camera: &CameraViewPort,
) -> Option<Vector2<f64>> {
) -> Option<XYScreen<f64>> {
self.world_to_normalized_device_space(pos_world_space, camera)
.map(|pos_normalized_device| ndc_to_screen_space(&pos_normalized_device, camera))
}
pub(crate) fn is_allsky(&self) -> bool {
/*pub(crate) fn is_allsky(&self) -> bool {
match self {
ProjectionType::Sin(_) | ProjectionType::Tan(_) => false,
//| ProjectionType::Feye(_)
//| ProjectionType::Ncp(_) => false,
_ => true,
}
}
}*/
pub fn bounds_size_ratio(&self) -> f64 {
match self {
@@ -524,7 +512,7 @@ impl ProjectionType {
impl Projection for ProjectionType {
/// Deprojection
fn clip_to_world_space(&self, xy: &XYClip) -> Option<XYZWWorld> {
fn clip_to_world_space(&self, xy: &XYClip<f64>) -> Option<XYZWWorld<f64>> {
match self {
// Zenithal projections
/* TAN, Gnomonic projection */
@@ -580,7 +568,7 @@ impl Projection for ProjectionType {
}
// Projection
fn world_to_clip_space(&self, xyzw: &XYZWWorld) -> Option<XYClip> {
fn world_to_clip_space(&self, xyzw: &XYZWWorld<f64>) -> Option<XYClip<f64>> {
match self {
// Zenithal projections
/* TAN, Gnomonic projection */
@@ -636,6 +624,35 @@ impl Projection for ProjectionType {
}
}
use al_core::shader::UniformType;
use al_core::WebGlContext;
use web_sys::WebGlUniformLocation;
impl UniformType for ProjectionType {
fn uniform(gl: &WebGlContext, location: Option<&WebGlUniformLocation>, value: &Self) {
match value {
// Zenithal projections
/* TAN, Gnomonic projection */
ProjectionType::Tan(_) => gl.uniform1i(location, 0),
/* STG, Stereographic projection */
ProjectionType::Stg(_) => gl.uniform1i(location, 1),
/* SIN, Orthographic */
ProjectionType::Sin(_) => gl.uniform1i(location, 2),
/* ZEA, Equal-area */
ProjectionType::Zea(_) => gl.uniform1i(location, 3),
// Pseudo-cylindrical projections
/* AIT, Aitoff */
ProjectionType::Ait(_) => gl.uniform1i(location, 4),
// MOL, Mollweide */
ProjectionType::Mol(_) => gl.uniform1i(location, 5),
// Cylindrical projections
// MER, Mercator */
ProjectionType::Mer(_) => gl.uniform1i(location, 6),
}
}
}
use cgmath::Vector4;
use mapproj::CanonicalProjection;
@@ -645,20 +662,37 @@ pub trait Projection {
/// # Arguments
///
/// * ``pos_clip_space`` - The position in the clipping space (orthonorlized space)
fn clip_to_world_space(&self, xy_clip: &XYClip) -> Option<XYZWWorld>;
fn clip_to_world_space(&self, xy_clip: &XYClip<f64>) -> Option<XYZWWorld<f64>>;
/// World to the clipping space deprojection
///
/// # Arguments
///
/// * ``pos_world_space`` - The position in the world space
fn world_to_clip_space(&self, pos_world_space: &XYZWWorld) -> Option<XYClip>;
fn world_to_clip_space(&self, pos_world_space: &XYZWWorld<f64>) -> Option<XYClip<f64>>;
/// (`alpha_p`, `delta_p`) in the WCS II paper from Mark Calabretta.
#[inline]
fn north_pole_world_space(&self) -> XYZWWorld<f64> {
// This is always defined
self.clip_to_world_space(&XYClip::new(0.0, 1.0 - 1e-5))
.unwrap()
}
#[inline]
fn south_pole_world_space(&self) -> XYZWWorld<f64> {
// This is always defined
self.clip_to_world_space(&XYClip::new(0.0, -1.0 + 1e-5))
.unwrap()
}
}
use mapproj::ProjXY;
use self::coo_space::XYScreen;
use self::coo_space::XYNDC;
use super::angle::ToAngle;
use super::lonlat::LonLatT;
impl<'a, P> Projection for &'a P
where
P: CanonicalProjection,
@@ -668,7 +702,7 @@ where
/// # Arguments
///
/// * ``pos_clip_space`` - The position in the clipping space (orthonorlized space)
fn clip_to_world_space(&self, xy_clip: &XYClip) -> Option<XYZWWorld> {
fn clip_to_world_space(&self, xy_clip: &XYClip<f64>) -> Option<XYZWWorld<f64>> {
let proj_bounds = self.bounds();
// Scale the xy_clip space so that it maps the proj definition domain of mapproj
let xy_mapproj = {
@@ -701,7 +735,7 @@ where
/// # Arguments
///
/// * ``pos_world_space`` - The position in the world space
fn world_to_clip_space(&self, pos_world_space: &XYZWWorld) -> Option<XYClip> {
fn world_to_clip_space(&self, pos_world_space: &XYZWWorld<f64>) -> Option<XYClip<f64>> {
// Xmpp <-> Zal
// -Ympp <-> Xal
// Zmpp <-> Yal
@@ -734,10 +768,12 @@ where
}
}
#[cfg(test)]
mod tests {
#[test]
fn generate_maps() {
use super::*;
use cgmath::InnerSpace;
use cgmath::Vector2;
use image_decoder::{Rgb, RgbImage};

View File

@@ -1,6 +1,6 @@
use crate::math;
use cgmath::Quaternion;
use cgmath::{BaseFloat, InnerSpace};
use cgmath::{Euler, Quaternion};
use cgmath::{Vector3, Vector4};
#[derive(Clone, Copy, Debug)]
@@ -136,6 +136,31 @@ where
m2w * pos_model_space
}
pub fn euler(&self) -> Euler<Rad<S>> {
self.0.into()
}
/// Extract the 3 euler angles from the quaternion
/// Aladin Lite rotation basis is formed by Z, X, Y axis:
/// * Z axis is pointing towards us
/// * Y is pointing upward
/// * X is defined from the right-hand rule to form a basis
///
/// The first euler angle describes the longitude (rotation around the Y axis) <=> pitch
/// The second euler angle describes the latitude (rotation around the X' modified axis) <=> yaw
/// The third euler angle describes a rotation deviation from the north pole (rotation around the Z'' modified axis) <=> roll
///
/// Equations come from this paper (Appendix 6):
/// https://ntrs.nasa.gov/api/citations/19770024290/downloads/19770024290.pdf
pub fn euler_yxz(&self) -> (Angle<S>, Angle<S>, Angle<S>) {
let m: Matrix4<S> = self.0.into();
let a = m.x.z.atan2(m.z.z);
let b = (-m.z.y).atan2((S::one() - m.z.y * m.z.y).sqrt());
let c = m.x.y.atan2(m.y.y);
(Angle(a), Angle(b), Angle(c))
}
}
use std::ops::Mul;

View File

@@ -38,11 +38,11 @@ pub enum Intersection {
// The segment does not intersect the region
Empty,
// The segment does intersect the region
Intersect { vertices: Box<[XYZWModel]> },
Intersect { vertices: Box<[XYZWModel<f64>]> },
}
impl Region {
pub fn from_vertices(vertices: &[XYZWModel], control_point: &XYZWModel) -> Self {
pub fn from_vertices(vertices: &[XYZWModel<f64>], control_point: &XYZWModel<f64>) -> Self {
let (vertices, (lon, lat)): (Vec<_>, (Vec<_>, Vec<_>)) = vertices
.iter()
.map(|v| {
@@ -99,7 +99,7 @@ impl Region {
Region::AllSky => Intersection::Included,
Region::Polygon { polygon, .. } => {
let vertices = polygon
.intersect_parallel(lat)
.intersect_parallel_all(lat)
.iter()
.map(|v| XYZWModel::new(v.y(), v.z(), v.x(), 1.0))
.collect::<Vec<_>>();
@@ -134,7 +134,7 @@ impl Region {
Coo3D::from_sph_coo(lonlat2.lon().to_radians(), lonlat2.lat().to_radians());
let vertices: Vec<cgmath::Vector4<f64>> = polygon
.intersect_great_circle_arc(&coo1, &coo2)
.intersect_great_circle_arc_all(&coo1, &coo2)
.iter()
.map(|v| XYZWModel::new(v.y(), v.z(), v.x(), 1.0))
.collect::<Vec<_>>();
@@ -160,13 +160,13 @@ impl Region {
self.intersects_great_circle_arc(&s_pole_lonlat, &n_pole_lonlat)
}
pub fn intersects_great_circle(&self, n: &Vector3<f64>) -> Intersection {
fn intersects_great_circle(&self, n: &Vector3<f64>) -> Intersection {
match self {
// The polygon is included inside the region
Region::AllSky => Intersection::Included,
Region::Polygon { polygon, .. } => {
let vertices: Vec<cgmath::Vector4<f64>> = polygon
.intersect_great_circle(&UnitVect3::new_unsafe(n.z, n.x, n.y))
.intersect_great_circle_all(&UnitVect3::new_unsafe(n.z, n.x, n.y))
.iter()
.map(|v| XYZWModel::new(v.y(), v.z(), v.x(), 1.0))
.collect::<Vec<_>>();

View File

@@ -1,4 +1,3 @@
use crate::survey::texture::Texture;
use crate::ShaderManager;
use al_api::coo_system::CooSystem;
@@ -7,9 +6,7 @@ use al_api::resources::Resources;
use al_core::colormap::Colormap;
use al_core::Colormaps;
use al_core::FrameBufferObject;
use al_core::{Texture2D, VecData, VertexArrayObject, WebGlContext};
use al_core::image::format::{R8UI, RGBA8U};
use al_core::{VecData, VertexArrayObject, WebGlContext};
use crate::ProjectionType;
use std::collections::HashMap;
@@ -29,7 +26,8 @@ impl From<Error> for JsValue {
}
}
const NUM_SHAPES: usize = 5;
// Num of shapes
const _NUM_SHAPES: usize = 5;
pub struct Manager {
gl: WebGlContext,
//kernels: HashMap<&'static str, Texture2D>,
@@ -50,8 +48,8 @@ impl Manager {
resources: &Resources,
) -> Result<Self, JsValue> {
// Load the texture of the gaussian kernel
let kernel_filename = resources.get_filename("kernel").unwrap_abort();
let params = &[
let _kernel_filename = resources.get_filename("kernel").unwrap_abort();
let _params = &[
(
WebGl2RenderingContext::TEXTURE_MIN_FILTER,
WebGl2RenderingContext::LINEAR,
@@ -104,10 +102,10 @@ impl Manager {
-1.0_f32, -1.0_f32, 0.0_f32, 0.0_f32, 1.0_f32, -1.0_f32, 1.0_f32, 0.0_f32, 1.0_f32,
1.0_f32, 1.0_f32, 1.0_f32, -1.0_f32, 1.0_f32, 0.0_f32, 1.0_f32,
];
let position = [
let _position = [
-1.0_f32, -1.0_f32, 1.0_f32, -1.0_f32, 1.0_f32, 1.0_f32, -1.0_f32, 1.0_f32,
];
let uv = [
let _uv = [
0.0_f32, 0.0_f32, 1.0_f32, 0.0_f32, 1.0_f32, 1.0_f32, 0.0_f32, 1.0_f32,
];
@@ -243,10 +241,7 @@ impl Manager {
}
} else {
let depth = camera.get_texture_depth().min(7);
let cells: Vec<_> = camera
.get_hpx_cells(depth, CooSystem::ICRS)
.cloned()
.collect();
let cells = camera.get_hpx_cells(depth, CooSystem::ICRS);
for catalog in self.catalogs.values_mut() {
catalog.update(&cells);
@@ -460,7 +455,11 @@ impl Catalog {
#[cfg(feature = "webgl2")]
self.vertex_array_object_catalog
.bind_for_update()
.update_instanced_array("center", VecData(&sources));
.update_instanced_array(
"center",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&sources),
);
}
fn draw(

View File

@@ -1,376 +0,0 @@
use al_api::moc::MOC as Cfg;
use std::cmp::Ordering;
use std::ops::Range;
use std::vec;
use crate::camera::CameraViewPort;
use crate::healpix::cell::CellVertices;
use crate::healpix::coverage::HEALPixCoverage;
use crate::math::projection::ProjectionType;
use crate::renderable::coverage::mode::RenderMode;
use crate::renderable::coverage::Angle;
use crate::renderable::coverage::IdxVec;
use crate::renderable::line::PathVertices;
use crate::renderable::line::RasterizedLineRenderer;
use al_api::color::ColorRGBA;
use al_api::coo_system::CooSystem;
use super::mode::Node;
use cgmath::Vector2;
use wasm_bindgen::prelude::*;
pub struct MOC {
pub sky_fraction: f32,
pub max_order: u8,
inner: [Option<MOCIntern>; 3],
}
impl MOC {
pub(super) fn new(moc: &HEALPixCoverage, cfg: &Cfg) -> Self {
let sky_fraction = moc.sky_fraction() as f32;
let max_order = moc.depth_max();
let inner = [
if cfg.perimeter {
// draw only perimeter
Some(MOCIntern::new(
moc,
RenderModeType::Perimeter {
thickness: cfg.line_width,
color: cfg.color,
},
))
} else {
None
},
if cfg.filled {
// change color
let fill_color = cfg.fill_color;
// draw the edges
Some(MOCIntern::new(
moc,
RenderModeType::Filled { color: fill_color },
))
} else {
None
},
if cfg.edges {
Some(MOCIntern::new(
moc,
RenderModeType::Edge {
thickness: cfg.line_width,
color: cfg.color,
},
))
} else {
None
},
];
Self {
inner,
max_order,
sky_fraction,
}
}
pub(super) fn cell_indices_in_view(&mut self, camera: &mut CameraViewPort) {
for render in &mut self.inner {
if let Some(render) = render.as_mut() {
render.cell_indices_in_view(camera);
}
}
}
pub(super) fn num_cells_in_view(&self, camera: &mut CameraViewPort) -> usize {
self.inner
.iter()
.filter_map(|moc| moc.as_ref())
.map(|moc| moc.num_cells_in_view(camera))
.sum()
}
/*pub(super) fn num_vertices_in_view(&self, camera: &mut CameraViewPort) -> usize {
let mut num_vertices = 0;
for render in &self.0 {
if let Some(render) = render.as_ref() {
num_vertices += render.num_vertices_in_view(camera);
}
}
num_vertices
}*/
pub fn sky_fraction(&self) -> f32 {
self.sky_fraction
}
pub fn max_order(&self) -> u8 {
self.max_order
}
pub(super) fn draw(
&self,
camera: &mut CameraViewPort,
proj: &ProjectionType,
rasterizer: &mut RasterizedLineRenderer,
) {
for render in &self.inner {
if let Some(render) = render.as_ref() {
render.draw(camera, proj, rasterizer)
}
}
}
}
struct MOCIntern {
// HEALPix index vector
// Used for fast HEALPix cell retrieval
hpx_idx_vec: IdxVec,
// Node indices in view
indices: Vec<Range<usize>>,
nodes: Vec<Node>,
mode: RenderModeType,
}
#[derive(Clone)]
pub enum RenderModeType {
Perimeter { thickness: f32, color: ColorRGBA },
Edge { thickness: f32, color: ColorRGBA },
Filled { color: ColorRGBA },
}
impl MOCIntern {
fn new(moc: &HEALPixCoverage, mode: RenderModeType) -> Self {
let nodes = match mode {
RenderModeType::Edge { .. } => super::mode::edge::Edge::build(moc),
RenderModeType::Filled { .. } => super::mode::filled::Fill::build(moc),
RenderModeType::Perimeter { .. } => super::mode::perimeter::Perimeter::build(moc),
};
let hpx_idx_vec = IdxVec::from_hpx_cells(nodes.iter().map(|n| &n.cell));
Self {
nodes,
hpx_idx_vec,
indices: vec![],
mode,
}
}
fn cell_indices_in_view(&mut self, camera: &mut CameraViewPort) {
// Cache it for several reuse during the same frame
let view_depth = camera.get_texture_depth();
let cells_iter = camera.get_hpx_cells(view_depth, CooSystem::ICRS);
if self.nodes.is_empty() {
self.indices = vec![0..0];
return;
}
let indices: Vec<_> = if view_depth > 7 {
// Binary search version, we are using this alternative for retrieving
// MOC's cells to render for deep fields of view
let first_cell_rng = &self.nodes[0].cell.z_29_rng();
let last_cell_rng = &self.nodes[self.nodes.len() - 1].cell.z_29_rng();
cells_iter
.filter_map(|cell| {
let cell_rng = cell.z_29_rng();
// Quick rejection test
if cell_rng.end <= first_cell_rng.start || cell_rng.start >= last_cell_rng.end {
None
} else {
let contains_val = |hash_z29: u64| -> Result<usize, usize> {
self.nodes.binary_search_by(|node| {
let node_cell_rng = node.cell.z_29_rng();
if hash_z29 < node_cell_rng.start {
// the node cell range contains hash_z29
Ordering::Greater
} else if hash_z29 >= node_cell_rng.end {
Ordering::Less
} else {
Ordering::Equal
}
})
};
let start_idx = contains_val(cell_rng.start);
let end_idx = contains_val(cell_rng.end);
let cell_indices = match (start_idx, end_idx) {
(Ok(l), Ok(r)) => {
if l == r {
l..(r + 1)
} else {
l..r
}
}
(Err(l), Ok(r)) => l..r,
(Ok(l), Err(r)) => l..r,
(Err(l), Err(r)) => l..r,
};
Some(cell_indices)
}
})
.collect()
} else {
// Index Vector 7 order version
cells_iter
.map(|cell| self.hpx_idx_vec.get_item_indices_inside_hpx_cell(&cell))
.collect()
};
let indices = crate::utils::merge_overlapping_intervals(indices);
self.indices = indices;
}
/*fn num_vertices_in_view(&self, camera: &CameraViewPort) -> usize {
self.cells_in_view(camera)
.filter_map(|n| n.vertices.as_ref())
.map(|n_vertices| {
n_vertices
.vertices
.iter()
.map(|edge| edge.len())
.sum::<usize>()
})
.sum()
}*/
fn num_cells_in_view(&self, _camera: &CameraViewPort) -> usize {
self.indices
.iter()
.map(|range| range.end - range.start)
.sum()
}
fn cells_in_view<'a>(&'a self, _camera: &CameraViewPort) -> impl Iterator<Item = &'a Node> {
let nodes = &self.nodes;
self.indices
.iter()
.map(move |indices| nodes[indices.start..indices.end].iter())
.flatten()
}
fn vertices_in_view<'a>(
&'a self,
camera: &mut CameraViewPort,
_projection: &ProjectionType,
) -> impl Iterator<Item = &'a CellVertices> {
self.cells_in_view(camera)
.filter_map(move |node| node.vertices.as_ref())
}
fn draw(
&self,
camera: &mut CameraViewPort,
proj: &ProjectionType,
rasterizer: &mut RasterizedLineRenderer,
) {
// Determine if the view may lead to crossing edges/triangles
// This is dependant on the projection used
let crossing_edges_testing = if proj.is_allsky() {
let sky_percent_covered = camera.get_cov(CooSystem::ICRS).sky_fraction();
//al_core::info!("sky covered: ", sky_percent_covered);
sky_percent_covered > 0.80
} else {
// The projection is not allsky.
false
};
let camera_coosys = camera.get_coo_system();
let paths_iter = self
.vertices_in_view(camera, proj)
.filter_map(|cell_vertices| {
let vertices = &cell_vertices.vertices[..];
let mut ndc: Vec<[f32; 2]> = vec![];
for i in 0..vertices.len() {
let line_vertices = &vertices[i];
for k in 0..line_vertices.len() {
let (lon, lat) = line_vertices[k];
let xyzw = crate::math::lonlat::radec_to_xyzw(Angle(lon), Angle(lat));
let xyzw =
crate::coosys::apply_coo_system(CooSystem::ICRS, camera_coosys, &xyzw);
if let Some(p) = proj.model_to_normalized_device_space(&xyzw, camera) {
if ndc.len() > 0 && crossing_edges_testing {
let mag2 = crate::math::vector::dist2(
crate::math::projection::ndc_to_clip_space(&p, camera).as_ref(),
crate::math::projection::ndc_to_clip_space(
&Vector2::new(
ndc[ndc.len() - 1][0] as f64,
ndc[ndc.len() - 1][1] as f64,
),
camera,
)
.as_ref(),
);
//al_core::info!("mag", i, mag2);
if mag2 > 0.1 {
return None;
}
}
ndc.push([p.x as f32, p.y as f32]);
} else {
return None;
}
}
}
// Check the last
if cell_vertices.closed && crossing_edges_testing {
let mag2 = crate::math::vector::dist2(
crate::math::projection::ndc_to_clip_space(
&Vector2::new(ndc[0][0] as f64, ndc[0][1] as f64),
camera,
)
.as_ref(),
crate::math::projection::ndc_to_clip_space(
&Vector2::new(
ndc[ndc.len() - 1][0] as f64,
ndc[ndc.len() - 1][1] as f64,
),
camera,
)
.as_ref(),
);
if mag2 > 0.1 {
return None;
}
}
Some(PathVertices {
vertices: ndc,
closed: cell_vertices.closed,
})
});
match self.mode {
RenderModeType::Perimeter { thickness, color }
| RenderModeType::Edge { thickness, color } => {
rasterizer.add_stroke_paths(
paths_iter,
thickness,
&color,
&super::line::Style::None,
);
}
RenderModeType::Filled { color } => rasterizer.add_fill_paths(paths_iter, &color),
}
}
}

View File

@@ -1,323 +0,0 @@
use crate::renderable::coverage::moc::MOC;
use crate::{
healpix::{cell::HEALPixCell, coverage::HEALPixCoverage, index_vector::IdxVec},
math::angle::Angle,
CameraViewPort, ShaderManager,
};
mod graph;
pub mod mode;
pub mod hierarchy;
pub mod moc;
use crate::renderable::line::RasterizedLineRenderer;
use super::utils::triangle::Triangle;
use wasm_bindgen::JsValue;
use hierarchy::MOCHierarchy;
use al_api::coo_system::CooSystem;
use al_api::moc::MOC as Cfg;
pub struct MOCRenderer {
mocs: Vec<MOCHierarchy>,
cfgs: Vec<Cfg>,
}
use cgmath::Vector2;
fn is_crossing_projection(
cell: &HEALPixCell,
camera: &CameraViewPort,
projection: &ProjectionType,
) -> bool {
let vertices = cell
.path_along_cell_edge(1)
.iter()
.filter_map(|(lon, lat)| {
let xyzw = crate::math::lonlat::radec_to_xyzw(Angle(*lon), Angle(*lat));
let xyzw =
crate::coosys::apply_coo_system(CooSystem::ICRS, camera.get_coo_system(), &xyzw);
projection
.model_to_normalized_device_space(&xyzw, camera)
.map(|v| [v.x as f32, v.y as f32])
})
.collect::<Vec<_>>();
let cell_inside = vertices.len() == 4;
if cell_inside {
let c0 = &vertices[0];
let c1 = &vertices[1];
let c2 = &vertices[2];
let c3 = &vertices[3];
let t0 = Triangle::new(c0, c1, c2);
let t2 = Triangle::new(c2, c3, c0);
t0.is_invalid(camera) || t2.is_invalid(camera)
} else {
true
}
}
use al_api::cell::HEALPixCellProjeted;
pub fn rasterize_hpx_cell(
cell: &HEALPixCell,
n_segment_by_side: usize,
camera: &CameraViewPort,
idx_off: &mut u32,
proj: &ProjectionType,
) -> Option<(Vec<f32>, Vec<u32>)> {
let n_vertices_per_segment = n_segment_by_side + 1;
let vertices = cell
.grid(n_segment_by_side as u32)
.iter()
.filter_map(|(lon, lat)| {
let xyzw = crate::math::lonlat::radec_to_xyzw(Angle(*lon), Angle(*lat));
let xyzw =
crate::coosys::apply_coo_system(CooSystem::ICRS, camera.get_coo_system(), &xyzw);
proj.model_to_normalized_device_space(&xyzw, camera)
.map(|v| [v.x as f32, v.y as f32])
})
.flatten()
.collect::<Vec<_>>();
let cell_inside = vertices.len() == 2 * (n_segment_by_side + 1) * (n_segment_by_side + 1);
if cell_inside {
// Generate the iterator: idx_off + 1, idx_off + 1, .., idx_off + 4*n_segment - 1, idx_off + 4*n_segment - 1
let mut indices = Vec::with_capacity(n_segment_by_side * n_segment_by_side * 6);
let num_vertices = (n_segment_by_side + 1) * (n_segment_by_side + 1);
let longitude_reversed = camera.get_longitude_reversed();
let invalid_tri = |tri_ccw: bool, reversed_longitude: bool| -> bool {
(!reversed_longitude && !tri_ccw) || (reversed_longitude && tri_ccw)
};
for i in 0..n_segment_by_side {
for j in 0..n_segment_by_side {
let idx_0 = j + i * n_vertices_per_segment;
let idx_1 = j + 1 + i * n_vertices_per_segment;
let idx_2 = j + (i + 1) * n_vertices_per_segment;
let idx_3 = j + 1 + (i + 1) * n_vertices_per_segment;
let c0 = crate::math::projection::ndc_to_screen_space(
&Vector2::new(vertices[2 * idx_0] as f64, vertices[2 * idx_0 + 1] as f64),
camera,
);
let c1 = crate::math::projection::ndc_to_screen_space(
&Vector2::new(vertices[2 * idx_1] as f64, vertices[2 * idx_1 + 1] as f64),
camera,
);
let c2 = crate::math::projection::ndc_to_screen_space(
&Vector2::new(vertices[2 * idx_2] as f64, vertices[2 * idx_2 + 1] as f64),
camera,
);
let c3 = crate::math::projection::ndc_to_screen_space(
&Vector2::new(vertices[2 * idx_3] as f64, vertices[2 * idx_3 + 1] as f64),
camera,
);
let first_tri_ccw = !crate::math::vector::ccw_tri(&c0, &c1, &c2);
let second_tri_ccw = !crate::math::vector::ccw_tri(&c1, &c3, &c2);
if invalid_tri(first_tri_ccw, longitude_reversed)
|| invalid_tri(second_tri_ccw, longitude_reversed)
{
return None;
}
let vx = [c0.x, c1.x, c2.x, c3.x];
let vy = [c0.y, c1.y, c2.y, c3.y];
let projeted_cell = HEALPixCellProjeted {
ipix: cell.idx(),
vx,
vy,
};
crate::camera::view_hpx_cells::project(projeted_cell, camera, proj)?;
indices.push(*idx_off + idx_0 as u32);
indices.push(*idx_off + idx_1 as u32);
indices.push(*idx_off + idx_2 as u32);
indices.push(*idx_off + idx_1 as u32);
indices.push(*idx_off + idx_3 as u32);
indices.push(*idx_off + idx_2 as u32);
}
}
*idx_off += num_vertices as u32;
Some((vertices, indices))
} else {
None
}
}
use crate::ProjectionType;
use super::line;
impl MOCRenderer {
pub fn new() -> Result<Self, JsValue> {
// layout (location = 0) in vec2 ndc_pos;
//let vertices = vec![0.0; MAX_NUM_FLOATS_TO_DRAW];
//let indices = vec![0_u16; MAX_NUM_INDICES_TO_DRAW];
//let vertices = vec![];
/*let position = vec![];
let indices = vec![];
#[cfg(feature = "webgl2")]
vao.bind_for_update()
.add_array_buffer_single(
2,
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&position),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
.unbind();
#[cfg(feature = "webgl1")]
vao.bind_for_update()
.add_array_buffer(
2,
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&position),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
.unbind();
*/
let mocs = Vec::new();
let cfgs = Vec::new();
Ok(Self { mocs, cfgs })
}
pub fn push_back(
&mut self,
moc: HEALPixCoverage,
cfg: Cfg,
camera: &mut CameraViewPort,
proj: &ProjectionType,
) {
self.mocs.push(MOCHierarchy::from_full_res_moc(moc, &cfg));
self.cfgs.push(cfg);
camera.register_view_frame(CooSystem::ICRS, proj);
//self.layers.push(key);
}
pub fn get_hpx_coverage(&self, cfg: &Cfg) -> Option<&HEALPixCoverage> {
let name = cfg.get_uuid();
if let Some(idx) = self.cfgs.iter().position(|cfg| cfg.get_uuid() == name) {
Some(&self.mocs[idx].get_full_moc())
} else {
None
}
}
pub fn remove(
&mut self,
cfg: &Cfg,
camera: &mut CameraViewPort,
proj: &ProjectionType,
) -> Option<Cfg> {
let name = cfg.get_uuid();
if let Some(idx) = self.cfgs.iter().position(|cfg| cfg.get_uuid() == name) {
self.mocs.remove(idx);
camera.unregister_view_frame(CooSystem::ICRS, proj);
Some(self.cfgs.remove(idx))
} else {
None
}
}
pub fn set_cfg(
&mut self,
cfg: Cfg,
camera: &mut CameraViewPort,
projection: &ProjectionType,
line_renderer: &mut RasterizedLineRenderer,
) -> Option<Cfg> {
let name = cfg.get_uuid();
if let Some(idx) = self.cfgs.iter().position(|cfg| cfg.get_uuid() == name) {
let old_cfg = self.cfgs[idx].clone();
self.cfgs[idx] = cfg;
self.update(camera, projection, line_renderer);
Some(old_cfg)
} else {
// the cfg has not been found
None
}
}
/*pub fn get(&self, cfg: &Cfg) -> Option<&HEALPixCoverage> {
let key = cfg.get_uuid();
self.mocs.get(key).map(|coverage| coverage.get_full_moc())
}*/
fn update(
&mut self,
camera: &mut CameraViewPort,
proj: &ProjectionType,
line_renderer: &mut RasterizedLineRenderer,
) {
for (hmoc, cfg) in self.mocs.iter_mut().zip(self.cfgs.iter()) {
if cfg.show {
let moc = hmoc.select_moc_from_view(camera);
moc.draw(camera, proj, line_renderer);
}
}
/*self.vao.bind_for_update()
.update_array(
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&self.position),
)
.update_element_array(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&self.indices),
);*/
}
pub fn is_empty(&self) -> bool {
self.cfgs.is_empty()
}
pub fn draw(
&mut self,
_shaders: &mut ShaderManager,
camera: &mut CameraViewPort,
projection: &ProjectionType,
line_renderer: &mut RasterizedLineRenderer,
) {
if self.is_empty() {
return;
}
self.update(camera, projection, line_renderer);
}
}

View File

@@ -6,9 +6,9 @@ use crate::ProjectionType;
use cgmath::InnerSpace;
use cgmath::Vector3;
use crate::grid::XYScreen;
use crate::math::angle::SerializeFmt;
use crate::math::lonlat::LonLat;
use crate::math::projection::coo_space::XYScreen;
use crate::math::TWICE_PI;
use crate::math::angle::ToAngle;
@@ -26,7 +26,7 @@ pub enum LabelOptions {
#[derive(Debug)]
pub struct Label {
// The position
pub position: XYScreen,
pub position: XYScreen<f64>,
// the string content
pub content: String,
// in radians
@@ -39,7 +39,7 @@ impl Label {
options: LabelOptions,
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: &SerializeFmt,
_fmt: &SerializeFmt,
) -> Option<Self> {
let fov = camera.get_field_of_view();
let d = if fov.contains_north_pole() {

View File

@@ -7,7 +7,7 @@ use core::ops::Range;
use crate::math::MINUS_HALF_PI;
use crate::ProjectionType;
use crate::grid::angle::SerializeFmt;
use super::angle::SerializeFmt;
use crate::math::HALF_PI;
pub fn get_intersecting_meridian(

View File

@@ -2,23 +2,22 @@ pub mod label;
pub mod meridian;
pub mod parallel;
use crate::grid::parallel::Parallel;
use crate::math::projection::coo_space::XYScreen;
use crate::shader::ShaderManager;
use crate::Abort;
use al_core::VecData;
use parallel::Parallel;
use crate::camera::CameraViewPort;
use crate::math::angle;
use crate::math::HALF_PI;
use crate::renderable::line;
use crate::renderable::line::PathVertices;
use crate::renderable::Renderer;
use crate::ProjectionType;
use al_api::color::ColorRGBA;
use al_api::grid::GridCfg;
use cgmath::InnerSpace;
use al_core::VertexArrayObject;
use al_core::WebGlContext;
use web_sys::WebGl2RenderingContext;
use crate::grid::label::Label;
use label::Label;
pub struct ProjetedGrid {
// Properties
pub color: ColorRGBA,
@@ -31,23 +30,22 @@ pub struct ProjetedGrid {
text_renderer: TextRenderManager,
fmt: angle::SerializeFmt,
line_style: line::Style,
//line_style: line::Style,
meridians: Vec<Meridian>,
parallels: Vec<Parallel>,
vao: VertexArrayObject,
gl: WebGlContext,
}
use crate::shader::ShaderManager;
use wasm_bindgen::JsValue;
use crate::renderable::line::RasterizedLineRenderer;
use crate::renderable::text::TextRenderManager;
use crate::renderable::Renderer;
use wasm_bindgen::JsValue;
use web_sys::HtmlElement;
use self::meridian::Meridian;
impl ProjetedGrid {
pub fn new(aladin_div: &HtmlElement) -> Result<ProjetedGrid, JsValue> {
pub fn new(gl: WebGlContext, aladin_div: &HtmlElement) -> Result<ProjetedGrid, JsValue> {
let text_renderer = TextRenderManager::new(aladin_div)?;
let color = ColorRGBA {
@@ -59,15 +57,44 @@ impl ProjetedGrid {
let show_labels = true;
let enabled = false;
let label_scale = 1.0;
let line_style = line::Style::None;
//let line_style = line::Style::None;
let fmt = angle::SerializeFmt::DMS;
let thickness = 2.0;
let meridians = Vec::new();
let parallels = Vec::new();
let mut vao = VertexArrayObject::new(&gl);
vao.bind_for_update()
// Store the cartesian position of the center of the source in the a instanced VBO
.add_instanced_array_buffer(
"ndc_pos",
4 * std::mem::size_of::<f32>(),
&[2, 2],
&[0, 2 * std::mem::size_of::<f32>()],
WebGl2RenderingContext::DYNAMIC_DRAW,
&[] as &[f32],
)
.add_array_buffer(
"vertices",
2 * std::mem::size_of::<f32>(),
&[2],
&[0],
WebGl2RenderingContext::STATIC_DRAW,
&[
0_f32, -0.5_f32, 1_f32, -0.5_f32, 1_f32, 0.5_f32, 0_f32, 0.5_f32,
] as &[f32],
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::STATIC_DRAW,
&[0_u16, 1_u16, 2_u16, 0_u16, 2_u16, 3_u16] as &[u16],
)
// Unbind the buffer
.unbind();
let grid = ProjetedGrid {
color,
line_style,
//line_style,
show_labels,
enabled,
label_scale,
@@ -77,6 +104,9 @@ impl ProjetedGrid {
meridians,
parallels,
fmt,
vao,
gl,
};
// Initialize the vertices & labels
//grid.force_update(camera, projection, line_renderer);
@@ -144,90 +174,7 @@ impl ProjetedGrid {
Ok(())
}
// Update the grid whenever the camera moved
fn update(
&mut self,
camera: &CameraViewPort,
projection: &ProjectionType,
rasterizer: &mut RasterizedLineRenderer,
) -> Result<(), JsValue> {
let fov = camera.get_field_of_view();
let bbox = fov.get_bounding_box();
let max_dim_px = camera.get_width().max(camera.get_height()) as f64;
let step_line_px = max_dim_px * 0.2;
// update meridians
self.meridians = {
// Select the good step with a binary search
let step_lon_precised =
(bbox.get_lon_size() as f64) * step_line_px / (camera.get_width() as f64);
let step_lon = select_fixed_step(step_lon_precised);
// Add meridians
let start_lon = bbox.lon_min() - (bbox.lon_min() % step_lon);
let mut stop_lon = bbox.lon_max();
if bbox.all_lon() {
stop_lon -= 1e-3;
}
let mut meridians = vec![];
let mut lon = start_lon;
while lon < stop_lon {
if let Some(p) =
meridian::get_intersecting_meridian(lon, camera, projection, &self.fmt)
{
meridians.push(p);
}
lon += step_lon;
}
meridians
};
self.parallels = {
let step_lat_precised =
(bbox.get_lat_size() as f64) * step_line_px / (camera.get_height() as f64);
let step_lat = select_fixed_step(step_lat_precised);
let mut start_lat = bbox.lat_min() - (bbox.lat_min() % step_lat);
if start_lat == -HALF_PI {
start_lat += step_lat;
}
let stop_lat = bbox.lat_max();
let mut lat = start_lat;
let mut parallels = vec![];
while lat < stop_lat {
if let Some(p) = parallel::get_intersecting_parallel(lat, camera, projection) {
parallels.push(p);
}
lat += step_lat;
}
parallels
};
// update the line buffers
let paths = self
.meridians
.iter()
.map(|meridian| meridian.get_lines_vertices())
.chain(
self.parallels
.iter()
.map(|parallel| parallel.get_lines_vertices()),
)
.flatten()
.map(|vertices| PathVertices {
closed: false,
vertices,
});
let m = camera.get_screen_size().magnitude();
rasterizer.add_stroke_paths(paths, self.thickness, &self.color, &self.line_style);
Ok(())
}
pub fn draw_labels(&mut self, camera: &CameraViewPort) -> Result<(), JsValue> {
pub fn draw_labels(&mut self) -> Result<(), JsValue> {
if self.enabled && self.show_labels {
let labels = self
.meridians
@@ -235,7 +182,7 @@ impl ProjetedGrid {
.filter_map(|m| m.get_label())
.chain(self.parallels.iter().filter_map(|p| p.get_label()));
let dpi = camera.get_dpi();
//let dpi = camera.get_dpi();
self.text_renderer.begin();
for Label {
content,
@@ -256,12 +203,107 @@ impl ProjetedGrid {
pub fn draw(
&mut self,
camera: &CameraViewPort,
_shaders: &mut ShaderManager,
projection: &ProjectionType,
rasterizer: &mut RasterizedLineRenderer,
shaders: &mut ShaderManager,
) -> Result<(), JsValue> {
if self.enabled {
self.update(camera, projection, rasterizer)?;
let fov = camera.get_field_of_view();
let bbox = fov.get_bounding_box();
let max_dim_px = camera.get_width().max(camera.get_height()) as f64;
let step_line_px = max_dim_px * 0.2;
// update meridians
self.meridians = {
// Select the good step with a binary search
let step_lon_precised =
(bbox.get_lon_size() as f64) * step_line_px / (camera.get_width() as f64);
let step_lon = select_fixed_step(step_lon_precised);
// Add meridians
let start_lon = bbox.lon_min() - (bbox.lon_min() % step_lon);
let mut stop_lon = bbox.lon_max();
if bbox.all_lon() {
stop_lon -= 1e-3;
}
let mut meridians = vec![];
let mut lon = start_lon;
while lon < stop_lon {
if let Some(p) =
meridian::get_intersecting_meridian(lon, camera, projection, &self.fmt)
{
meridians.push(p);
}
lon += step_lon;
}
meridians
};
self.parallels = {
let step_lat_precised =
(bbox.get_lat_size() as f64) * step_line_px / (camera.get_height() as f64);
let step_lat = select_fixed_step(step_lat_precised);
let mut start_lat = bbox.lat_min() - (bbox.lat_min() % step_lat);
if start_lat == -HALF_PI {
start_lat += step_lat;
}
let stop_lat = bbox.lat_max();
let mut lat = start_lat;
let mut parallels = vec![];
while lat < stop_lat {
if let Some(p) = parallel::get_intersecting_parallel(lat, camera, projection) {
parallels.push(p);
}
lat += step_lat;
}
parallels
};
// update the line buffers
let paths = self
.meridians
.iter()
.map(|meridian| meridian.get_lines_vertices())
.chain(
self.parallels
.iter()
.map(|parallel| parallel.get_lines_vertices()),
)
.flatten();
let mut buf: Vec<f32> = vec![];
for vertices in paths {
let vertices = vertices.as_ref();
let path_vertices_buf_iter = vertices
.iter()
.zip(vertices.iter().skip(1))
.map(|(a, b)| [a[0], a[1], b[0], b[1]])
.flatten();
buf.extend(path_vertices_buf_iter);
}
self.vao.bind_for_update().update_instanced_array(
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&buf),
);
let num_instances = buf.len() / 4;
crate::shader::get_shader(&self.gl, shaders, "line_inst_ndc.vert", "line_base.frag")?
.bind(&self.gl)
.attach_uniform("u_color", &self.color)
.attach_uniform("u_width", &self.thickness)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
num_instances as i32,
);
}
Ok(())

View File

@@ -7,12 +7,12 @@ use al_api::hips::ImageMetadata;
use al_core::colormap::Colormap;
use al_core::colormap::Colormaps;
use al_core::image::format::ChannelType;
use al_core::image::format::ImageFormatType;
use al_core::image::Image;
use al_core::log::console_log;
use al_core::shader::Shader;
use al_core::webgl_ctx::GlWrapper;
use al_core::Texture2DArray;
use al_core::VecData;
use al_core::VertexArrayObject;
use al_core::WebGlContext;
@@ -21,16 +21,16 @@ use crate::math::{angle::Angle, vector::dist2};
use crate::ProjectionType;
use crate::camera::CameraViewPort;
use crate::renderable::utils::index_patch::DefaultPatchIndexIter;
use crate::{math::lonlat::LonLatT, utils};
use crate::{shader::ShaderManager, survey::config::HiPSConfig};
use crate::downloader::request::allsky::Allsky;
use crate::healpix::{cell::HEALPixCell, coverage::HEALPixCoverage};
use crate::math::angle::ToAngle;
use crate::math::lonlat::LonLat;
use crate::renderable::utils::index_patch::DefaultPatchIndexIter;
use crate::time::Time;
use al_core::log;
use std::collections::HashSet;
// Recursively compute the number of subdivision needed for a cell
@@ -42,14 +42,12 @@ use crate::survey::texture::Texture;
use raytracing::RayTracer;
use uv::{TileCorner, TileUVW};
use cgmath::{Matrix, Matrix4};
use cgmath::Matrix;
use std::fmt::Debug;
use std::rc::Rc;
use wasm_bindgen::JsValue;
use web_sys::WebGl2RenderingContext;
use super::utils::index_patch::CCWCheckPatchIndexIter;
const M: f64 = 280.0 * 280.0;
const N: f64 = 150.0 * 150.0;
const RAP: f64 = 0.7;
@@ -60,7 +58,7 @@ fn is_too_large(cell: &HEALPixCell, camera: &CameraViewPort, projection: &Projec
.iter()
.filter_map(|(lon, lat)| {
let vertex = crate::math::lonlat::radec_to_xyzw(Angle(*lon), Angle(*lat));
projection.view_to_screen_space(&vertex, camera)
projection.icrs_celestial_to_screen_space(&vertex, camera)
})
.collect::<Vec<_>>();
@@ -100,7 +98,7 @@ fn num_subdivision(cell: &HEALPixCell, camera: &CameraViewPort, projection: &Pro
let skewed_factor = (center_to_vertex_dist - smallest_center_to_vertex_dist)
/ (largest_center_to_vertex_dist - smallest_center_to_vertex_dist);
if is_too_large(cell, camera, projection) || cell.is_on_pole() || skewed_factor > 0.25 {
if skewed_factor > 0.25 || is_too_large(cell, camera, projection) || cell.is_on_pole() {
num_sub += 1;
}
@@ -286,28 +284,33 @@ pub fn get_raster_shader<'a>(
config: &HiPSConfig,
) -> Result<&'a Shader, JsValue> {
if config.get_format().is_colored() && cmap.label() == "native" {
crate::shader::get_shader(gl, shaders, "RasterizerVS", "RasterizerColorFS")
crate::shader::get_shader(
gl,
shaders,
"hips_rasterizer_raster.vert",
"hips_rasterizer_color.frag",
)
} else {
if config.tex_storing_unsigned_int {
crate::shader::get_shader(
gl,
shaders,
"RasterizerVS",
"RasterizerGrayscale2ColormapUnsignedFS",
"hips_rasterizer_raster.vert",
"hips_rasterizer_grayscale_to_colormap_u.frag",
)
} else if config.tex_storing_integers {
crate::shader::get_shader(
gl,
shaders,
"RasterizerVS",
"RasterizerGrayscale2ColormapIntegerFS",
"hips_rasterizer_raster.vert",
"hips_rasterizer_grayscale_to_colormap_i.frag",
)
} else {
crate::shader::get_shader(
gl,
shaders,
"RasterizerVS",
"RasterizerGrayscale2ColormapFS",
"hips_rasterizer_raster.vert",
"hips_rasterizer_grayscale_to_colormap.frag",
)
}
}
@@ -321,24 +324,34 @@ pub fn get_raytracer_shader<'a>(
) -> Result<&'a Shader, JsValue> {
//let colored_hips = config.is_colored();
if config.get_format().is_colored() && cmap.label() == "native" {
crate::shader::get_shader(gl, shaders, "RayTracerVS", "RayTracerColorFS")
crate::shader::get_shader(
gl,
shaders,
"hips_raytracer_raytracer.vert",
"hips_raytracer_color.frag",
)
} else {
if config.tex_storing_unsigned_int {
crate::shader::get_shader(
gl,
shaders,
"RayTracerVS",
"RayTracerGrayscale2ColormapUnsignedFS",
"hips_raytracer_raytracer.vert",
"hips_raytracer_grayscale_to_colormap_u.frag",
)
} else if config.tex_storing_integers {
crate::shader::get_shader(
gl,
shaders,
"RayTracerVS",
"RayTracerGrayscale2ColormapIntegerFS",
"hips_raytracer_raytracer.vert",
"hips_raytracer_grayscale_to_colormap_i.frag",
)
} else {
crate::shader::get_shader(gl, shaders, "RayTracerVS", "RayTracerGrayscale2ColormapFS")
crate::shader::get_shader(
gl,
shaders,
"hips_raytracer_raytracer.vert",
"hips_raytracer_grayscale_to_colormap.frag",
)
}
}
}
@@ -380,6 +393,9 @@ pub struct HiPS {
//min_depth_tile: u8,
footprint_moc: Option<HEALPixCoverage>,
// A buffer storing the cells in the view
hpx_cells_in_view: Vec<HEALPixCell>,
}
impl HiPS {
@@ -499,6 +515,7 @@ impl HiPS {
let gl = gl.clone();
let footprint_moc = None;
let hpx_cells_in_view = vec![];
// request the allsky texture
Ok(HiPS {
// The image survey texture buffer
@@ -517,14 +534,15 @@ impl HiPS {
m1,
idx_vertices,
//min_depth_tile,
footprint_moc,
hpx_cells_in_view,
})
}
pub fn look_for_new_tiles<'a>(
&'a mut self,
camera: &'a mut CameraViewPort,
camera: &'a CameraViewPort,
proj: &ProjectionType,
) -> Option<impl Iterator<Item = HEALPixCell> + 'a> {
// do not add tiles if the view is already at depth 0
@@ -559,9 +577,9 @@ impl HiPS {
// let texture_cell = cell.get_texture_cell(delta_depth);
// texture_cell.get_tile_cells(delta_depth)
//})
.into_iter()
.flat_map(move |tile_cell| {
let tex_cell = tile_cell.get_texture_cell(dd);
//console_log(&format!("{:?}, dd:{:?}", tex_cell, dd));
tex_cell.get_tile_cells(dd)
})
.filter(move |tile_cell| {
@@ -607,13 +625,42 @@ impl HiPS {
pub fn update(&mut self, camera: &mut CameraViewPort, projection: &ProjectionType) {
let raytracing = camera.is_raytracing(projection);
let vertices_recomputation_needed =
!raytracing && (self.textures.reset_available_tiles() | camera.has_moved());
if vertices_recomputation_needed {
if raytracing {
return;
}
// rasterizer mode
let available_tiles = self.textures.reset_available_tiles();
let new_cells_in_view = self.retrieve_cells_in_camera(camera);
if new_cells_in_view || available_tiles {
self.recompute_vertices(camera, projection);
}
}
// returns a boolean if the view cells has changed with respect to the last frame
pub fn retrieve_cells_in_camera(&mut self, camera: &CameraViewPort) -> bool {
let cfg = self.textures.config();
// Get the coo system transformation matrix
let hips_frame = cfg.get_frame();
let depth = camera.get_texture_depth().min(cfg.get_max_depth_texture());
let hpx_cells_in_view = camera.get_hpx_cells(depth, hips_frame);
let new_cells = if hpx_cells_in_view.len() != self.hpx_cells_in_view.len() {
true
} else {
!self
.hpx_cells_in_view
.iter()
.zip(hpx_cells_in_view.iter())
.all(|(&a, &b)| a == b)
};
self.hpx_cells_in_view = hpx_cells_in_view;
new_cells
}
#[inline]
pub fn set_moc(&mut self, moc: HEALPixCoverage) {
self.footprint_moc = Some(moc);
@@ -695,35 +742,29 @@ impl HiPS {
let cfg = self.textures.config();
// Get the coo system transformation matrix
let selected_frame = camera.get_coo_system();
let channel = cfg.get_format().get_channel();
let hips_frame = cfg.get_frame();
// Retrieve the model and inverse model matrix
let mut off_indices = 0;
let depth = camera.get_texture_depth().min(cfg.get_max_depth_texture());
let view_cells: Vec<_> = camera.get_hpx_cells(depth, hips_frame).cloned().collect();
for cell in &view_cells {
for cell in &self.hpx_cells_in_view {
// filter textures that are not in the moc
let cell = if let Some(moc) = self.footprint_moc.as_ref() {
if moc.intersects_cell(cell) {
Some(cell)
if moc.intersects_cell(&cell) {
Some(&cell)
} else {
if channel == ChannelType::RGB8U {
// Rasterizer does not render tiles that are not in the MOC
// This is not a problem for transparency rendered HiPses (FITS or PNG)
// but JPEG tiles do have black when no pixels data is found
// We therefore must draw in black for the tiles outside the HiPS MOC
Some(cell)
Some(&cell)
} else {
None
}
}
} else {
Some(cell)
Some(&cell)
};
if let Some(cell) = cell {
@@ -811,22 +852,20 @@ impl HiPS {
let n_vertices_per_segment = n_segments_by_side + 1;
let mut pos = vec![];
for (idx, lonlat) in
crate::healpix::utils::grid_lonlat::<f64>(cell, n_segments_by_side as u16)
.iter()
.enumerate()
{
let lon = lonlat.lon();
let lat = lonlat.lat();
let mut pos = Vec::with_capacity((n_segments_by_side + 1) * 4);
let xyzw = crate::math::lonlat::radec_to_xyzw(lon, lat);
let xyzw =
crate::coosys::apply_coo_system(hips_frame, selected_frame, &xyzw);
let grid_lonlat =
healpix::nested::grid(cell.depth(), cell.idx(), n_segments_by_side as u16);
let grid_lonlat_iter = grid_lonlat.into_iter();
let ndc = projection
.model_to_normalized_device_space(&xyzw, camera)
.map(|v| [v.x as f32, v.y as f32]);
for (idx, &(lon, lat)) in grid_lonlat_iter.enumerate() {
//let xyzw = crate::math::lonlat::radec_to_xyzw(lon, lat);
//let xyzw =
// crate::coosys::apply_coo_system(hips_frame, selected_frame, &xyzw);
//let ndc = projection
// .model_to_normalized_device_space(&xyzw, camera)
// .map(|v| [v.x as f32, v.y as f32]);
let i: usize = idx / n_vertices_per_segment;
let j: usize = idx % n_vertices_per_segment;
@@ -857,15 +896,13 @@ impl HiPS {
self.m1.push(miss_1);
self.time_tile_received.push(start_time);
pos.push(ndc);
pos.push([lon as f32, lat as f32]);
}
let patch_indices_iter = CCWCheckPatchIndexIter::new(
let patch_indices_iter = DefaultPatchIndexIter::new(
&(0..=n_segments_by_side),
&(0..=n_segments_by_side),
n_vertices_per_segment,
&pos,
camera,
)
.flatten()
.map(|indices| {
@@ -883,7 +920,7 @@ impl HiPS {
// Replace options with an arbitrary vertex
let position_iter = pos
.into_iter()
.map(|ndc| ndc.unwrap_or([0.0, 0.0]))
//.map(|ndc| ndc.unwrap_or([0.0, 0.0]))
.flatten();
self.position.extend(position_iter);
}
@@ -1017,10 +1054,6 @@ impl HiPS {
let hips_frame = hips_cfg.get_frame();
let c = selected_frame.to(hips_frame);
// Retrieve the model and inverse model matrix
let w2v = c * (*camera.get_w2m());
let v2w = w2v.transpose();
let raytracing = camera.is_raytracing(proj);
let config = self.get_config();
@@ -1041,6 +1074,8 @@ impl HiPS {
blend_cfg.enable(&self.gl, || {
if raytracing {
let w2v = c * (*camera.get_w2m());
let shader = get_raytracer_shader(cmap, &self.gl, shaders, &config)?;
let shader = shader.bind(&self.gl);
@@ -1051,13 +1086,14 @@ impl HiPS {
.attach_uniforms_with_params_from(cmap, colormaps)
.attach_uniforms_from(color)
.attach_uniform("model", &w2v)
.attach_uniform("inv_model", &v2w)
.attach_uniform("current_time", &utils::get_current_time())
.attach_uniform("opacity", opacity)
.attach_uniforms_from(colormaps);
raytracer.draw(&shader);
} else {
let v2w = (*camera.get_m2w()) * c.transpose();
// The rasterizer has a buffer containing:
// - The vertices of the HEALPix cells for the most refined survey
// - The starting and ending uv for the blending animation
@@ -1073,15 +1109,15 @@ impl HiPS {
let shader = get_raster_shader(cmap, &self.gl, shaders, &config)?.bind(&self.gl);
shader
.attach_uniforms_from(camera)
.attach_uniforms_from(&self.textures)
// send the cmap appart from the color config
.attach_uniforms_with_params_from(cmap, colormaps)
.attach_uniforms_from(color)
.attach_uniform("model", &w2v)
.attach_uniforms_from(camera)
.attach_uniform("inv_model", &v2w)
.attach_uniform("current_time", &utils::get_current_time())
.attach_uniform("opacity", opacity)
.attach_uniform("u_proj", proj)
.attach_uniforms_from(colormaps)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_with_i32(

View File

@@ -1,8 +1,8 @@
use crate::domain::sdf::ProjDefType;
use crate::survey::config::HiPSConfig;
use crate::{camera::CameraViewPort, math::projection::Projection};
use al_api::hips::HiPSCfg;
use crate::{math::projection::Projection};
use al_core::VecData;
use al_core::{shader::ShaderBound, Texture2D, VertexArrayObject, WebGlContext};

View File

@@ -238,8 +238,6 @@ pub fn get_grid_vertices(
#[cfg(test)]
mod tests {
use wcs::ImgXY;
#[test]
fn test_grid_vertices() {
let (x, y) = super::get_grid_params(&(0.0, 0.0), &(40.0, 40.0), 20, 4);

View File

@@ -572,19 +572,30 @@ impl Image {
} = cfg;
let shader = match self.channel {
ChannelType::R32F => crate::shader::get_shader(&self.gl, shaders, "FitsVS", "FitsFS")?,
#[cfg(feature = "webgl2")]
ChannelType::R32I => {
crate::shader::get_shader(&self.gl, shaders, "FitsVS", "FitsFSInteger")?
ChannelType::R32F => {
crate::shader::get_shader(&self.gl, shaders, "fits_base.vert", "fits_sampler.frag")?
}
#[cfg(feature = "webgl2")]
ChannelType::R16I => {
crate::shader::get_shader(&self.gl, shaders, "FitsVS", "FitsFSInteger")?
}
ChannelType::R32I => crate::shader::get_shader(
&self.gl,
shaders,
"fits_base.vert",
"fits_isampler.frag",
)?,
#[cfg(feature = "webgl2")]
ChannelType::R8UI => {
crate::shader::get_shader(&self.gl, shaders, "FitsVS", "FitsFSUnsigned")?
}
ChannelType::R16I => crate::shader::get_shader(
&self.gl,
shaders,
"fits_base.vert",
"fits_isampler.frag",
)?,
#[cfg(feature = "webgl2")]
ChannelType::R8UI => crate::shader::get_shader(
&self.gl,
shaders,
"fits_base.vert",
"fits_usampler.frag",
)?,
_ => return Err(JsValue::from_str("Image format type not supported")),
};

View File

@@ -1,17 +1,12 @@
use cgmath::Vector3;
use crate::ProjectionType;
use crate::CameraViewPort;
use crate::ProjectionType;
use cgmath::Vector3;
use cgmath::InnerSpace;
use crate::math::angle::ToAngle;
use cgmath::InnerSpace;
use crate::coo_space::XYNDC;
use crate::coo_space::XYZModel;
use crate::coo_space::XYNDC;
use crate::LonLatT;
const MAX_ITERATION: usize = 5;
@@ -21,7 +16,14 @@ const MAX_ITERATION: usize = 5;
// * Longitudes between [0; 2\pi[
// * (lon1 - lon2).abs() < PI so that is can only either cross the preimary meridian or opposite primary meridian
// (the latest is handled because of the longitudes intervals)
pub fn project(lon1: f64, lat1: f64, lon2: f64, lat2: f64, camera: &CameraViewPort, projection: &ProjectionType) -> Vec<XYNDC> {
pub fn project(
lon1: f64,
lat1: f64,
lon2: f64,
lat2: f64,
camera: &CameraViewPort,
projection: &ProjectionType,
) -> Vec<XYNDC<f64>> {
let mut vertices = vec![];
let lonlat1 = LonLatT::new(lon1.to_angle(), lat1.to_angle());
@@ -36,18 +38,16 @@ pub fn project(lon1: f64, lat1: f64, lon2: f64, lat2: f64, camera: &CameraViewPo
match (p1, p2) {
(Some(_), Some(_)) => {
project_line(&mut vertices, &v1, &v2, camera, projection, 0);
},
}
(None, Some(_)) => {
let (v1, v2) = sub_valid_domain(v2, v1, projection, camera);
project_line(&mut vertices, &v1, &v2, camera, projection, 0);
},
}
(Some(_), None) => {
let (v1, v2) = sub_valid_domain(v1, v2, projection, camera);
project_line(&mut vertices, &v1, &v2, camera, projection, 0);
},
(None, None) => {
}
(None, None) => {}
}
vertices
@@ -57,7 +57,12 @@ pub fn project(lon1: f64, lat1: f64, lon2: f64, lat2: f64, camera: &CameraViewPo
// * angular distance between valid_lon and invalid_lon is < PI
// * valid_lon and invalid_lon are well defined, i.e. they can be between [-PI; PI] or [0, 2PI] depending
// whether they cross or not the zero meridian
fn sub_valid_domain(valid_v: XYZModel, invalid_v: XYZModel, projection: &ProjectionType, camera: &CameraViewPort) -> (XYZModel, XYZModel) {
fn sub_valid_domain(
valid_v: XYZModel<f64>,
invalid_v: XYZModel<f64>,
projection: &ProjectionType,
camera: &CameraViewPort,
) -> (XYZModel<f64>, XYZModel<f64>) {
let d_alpha = camera.get_aperture().to_radians() * 0.02;
let mut vv = valid_v;
@@ -77,9 +82,9 @@ fn sub_valid_domain(valid_v: XYZModel, invalid_v: XYZModel, projection: &Project
}
fn project_line(
vertices: &mut Vec<XYNDC>,
v1: &XYZModel,
v2: &XYZModel,
vertices: &mut Vec<XYNDC<f64>>,
v1: &XYZModel<f64>,
v2: &XYZModel<f64>,
camera: &CameraViewPort,
projection: &ProjectionType,
iter: usize,
@@ -91,25 +96,14 @@ fn project_line(
// Project them. We are always facing the camera
let vm = (v1 + v2).normalize();
let pm = projection.model_to_normalized_device_space(&vm.extend(1.0), camera);
match (p1, pm, p2) {
(Some(p1), Some(pm), Some(p2)) => {
let d12 = crate::math::vector::angle3(v1, v2).to_radians();
// Subdivide when until it is > 30 degrees
if d12 > 30.0_f64.to_radians() {
subdivide(
vertices,
v1,
v2,
&vm,
p1,
p2,
pm,
camera,
projection,
iter
);
subdivide(vertices, v1, v2, &vm, p1, p2, pm, camera, projection, iter);
} else {
// enough to stop the recursion
let ab = pm - p1;
@@ -131,7 +125,7 @@ fn project_line(
// not colinear but enough to stop
vertices.push(p1);
vertices.push(pm);
vertices.push(pm);
vertices.push(p2);
}
@@ -151,65 +145,39 @@ fn project_line(
}
} else {
// Subdivide a->b and b->c
subdivide(
vertices,
v1,
v2,
&vm,
p1,
p2,
pm,
camera,
projection,
iter
);
subdivide(vertices, v1, v2, &vm, p1, p2, pm, camera, projection, iter);
}
}
}
true
},
_ => false
}
_ => false,
}
} else {
false
}
}
fn subdivide(
vertices: &mut Vec<XYNDC>,
v1: &XYZModel,
v2: &XYZModel,
vm: &XYZModel,
p1: XYNDC,
p2: XYNDC,
pm: XYNDC,
vertices: &mut Vec<XYNDC<f64>>,
v1: &XYZModel<f64>,
v2: &XYZModel<f64>,
vm: &XYZModel<f64>,
p1: XYNDC<f64>,
p2: XYNDC<f64>,
pm: XYNDC<f64>,
camera: &CameraViewPort,
projection: &ProjectionType,
iter: usize
iter: usize,
) {
// Subdivide a->b and b->c
if !project_line(
vertices,
v1,
vm,
camera,
projection,
iter + 1
) {
if !project_line(vertices, v1, vm, camera, projection, iter + 1) {
vertices.push(p1);
vertices.push(pm);
}
if !project_line(
vertices,
vm,
v2,
camera,
projection,
iter + 1
) {
if !project_line(vertices, vm, v2, camera, projection, iter + 1) {
vertices.push(pm);
vertices.push(p2);
}

View File

@@ -2,8 +2,9 @@
pub mod great_circle_arc;
pub mod parallel_arc;
use crate::Abort;
use al_core::shader::Shader;
use crate::math::projection::ProjectionType;
use crate::shader::ShaderManager;
use al_api::coo_system::CooSystem;
use al_core::VertexArrayObject;
use al_core::WebGlContext;
@@ -11,16 +12,12 @@ use super::Renderer;
use al_api::color::ColorRGBA;
use al_core::SliceData;
use lyon::algorithms::{
math::point,
measure::{PathMeasurements, SampleType},
path::Path,
};
struct Meta {
color: ColorRGBA,
thickness: f32,
off_indices: usize,
num_indices: usize,
coo_space: CooSpace,
}
#[derive(Clone)]
@@ -32,12 +29,16 @@ pub enum Style {
pub struct RasterizedLineRenderer {
gl: WebGlContext,
shader: Shader,
vao: VertexArrayObject,
vao_idx: usize,
vertices: Vec<f32>,
indices: Vec<u32>,
meta: Vec<Meta>,
instanced_line_vaos: Vec<VertexArrayObject>,
meta_instanced: Vec<Meta>,
}
use wasm_bindgen::JsValue;
@@ -46,14 +47,14 @@ use web_sys::WebGl2RenderingContext;
use crate::camera::CameraViewPort;
use lyon::tessellation::*;
use crate::coo_space::CooSpace;
pub struct PathVertices<T>
#[repr(C)]
pub struct PathVertices<V>
where
T: AsRef<[[f32; 2]]>,
V: AsRef<[[f32; 2]]>,
{
pub vertices: T,
pub closed: bool,
pub vertices: V,
}
impl RasterizedLineRenderer {
@@ -62,11 +63,6 @@ impl RasterizedLineRenderer {
let vertices = vec![];
let indices = vec![];
// Create the VAO for the screen
let shader = Shader::new(
&gl,
include_str!("../../../../glsl/webgl2/line/line_vertex.glsl"),
include_str!("../../../../glsl/webgl2/line/line_frag.glsl"),
)?;
let mut vao = VertexArrayObject::new(&gl);
vao.bind_for_update()
@@ -86,10 +82,15 @@ impl RasterizedLineRenderer {
.unbind();
let meta = vec![];
let meta_instanced = vec![];
let gl = gl.clone();
let instanced_line_vaos = vec![];
Ok(Self {
gl,
shader,
vao_idx: 0,
instanced_line_vaos,
meta_instanced,
vao,
meta,
vertices,
@@ -97,12 +98,13 @@ impl RasterizedLineRenderer {
})
}
pub fn add_fill_paths<T>(
/*pub fn add_fill_paths<V>(
&mut self,
paths: impl Iterator<Item = PathVertices<T>>,
paths: impl Iterator<Item = PathVertices<V>>,
color: &ColorRGBA,
coo_space: CooSpace,
) where
T: AsRef<[[f32; 2]]>,
V: AsRef<[[f32; 2]]>,
{
let mut num_indices = 0;
let off_indices = self.indices.len();
@@ -114,9 +116,11 @@ impl RasterizedLineRenderer {
for path in paths {
let mut path_builder = Path::builder();
let PathVertices { vertices, closed } = path;
let PathVertices {
vertices, /*, closed */
} = path;
let line: &[[f32; 2]] = vertices.as_ref();
let line = vertices.as_ref();
if !line.is_empty() {
let v = &line[0];
@@ -127,7 +131,7 @@ impl RasterizedLineRenderer {
path_builder.line_to(point(v[0], v[1]));
}
path_builder.end(closed);
path_builder.end(false);
}
// Create the destination vertex and index buffers.
@@ -164,28 +168,66 @@ impl RasterizedLineRenderer {
self.meta.push(Meta {
off_indices,
num_indices,
thickness: 1.0,
color: color.clone(),
coo_space,
});
}*/
fn create_instanced_vao(&mut self) {
let mut vao = VertexArrayObject::new(&self.gl);
vao.bind_for_update()
// Store the cartesian position of the center of the source in the a instanced VBO
.add_instanced_array_buffer(
"ndc_pos",
4 * std::mem::size_of::<f32>(),
&[2, 2],
&[0, 2 * std::mem::size_of::<f32>()],
WebGl2RenderingContext::DYNAMIC_DRAW,
&[] as &[f32],
)
.add_array_buffer(
"vertices",
2 * std::mem::size_of::<f32>(),
&[2],
&[0],
WebGl2RenderingContext::STATIC_DRAW,
&[
0_f32, -0.5_f32, 1_f32, -0.5_f32, 1_f32, 0.5_f32, 0_f32, 0.5_f32,
] as &[f32],
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::STATIC_DRAW,
&[0_u16, 1_u16, 2_u16, 0_u16, 2_u16, 3_u16] as &[u16],
)
// Unbind the buffer
.unbind();
self.instanced_line_vaos.push(vao);
}
pub fn add_stroke_paths<T>(
pub fn add_stroke_paths<V>(
&mut self,
paths: impl Iterator<Item = PathVertices<T>>,
paths: impl Iterator<Item = PathVertices<V>>,
thickness: f32,
color: &ColorRGBA,
style: &Style,
_style: &Style,
coo_space: CooSpace,
) where
T: AsRef<[[f32; 2]]>,
V: AsRef<[[f32; 2]]>,
{
let num_vertices = (self.vertices.len() / 2) as u32;
//let num_vertices = (self.vertices.len() / 2) as u32;
let mut path_builder = Path::builder();
/*let mut path_builder = Path::builder();
match &style {
Style::None => {
for path in paths {
let PathVertices { vertices, closed } = path;
for PathVertices {
vertices, /* , closed */
} in paths
{
let line: &[[f32; 2]] = vertices.as_ref();
if !line.is_empty() {
//let v = clamp_ndc_vertex(&line[0]);
@@ -197,7 +239,7 @@ impl RasterizedLineRenderer {
path_builder.line_to(point(v[0], v[1]));
}
path_builder.end(closed);
path_builder.end(false);
}
}
@@ -205,7 +247,9 @@ impl RasterizedLineRenderer {
}
Style::Dashed => {
for path in paths {
let PathVertices { vertices, closed } = path;
let PathVertices {
vertices, /* , closed */
} = path;
let line: &[[f32; 2]] = vertices.as_ref();
if !line.is_empty() {
@@ -220,7 +264,7 @@ impl RasterizedLineRenderer {
line_path_builder.line_to(point(v[0], v[1]));
}
line_path_builder.end(closed);
line_path_builder.end(false);
let path = line_path_builder.build();
// Build the acceleration structure.
@@ -262,22 +306,51 @@ impl RasterizedLineRenderer {
.unwrap_abort();
}
let VertexBuffers { vertices, indices } = geometry;
let VertexBuffers { vertices, indices } = geometry;*/
if self.vao_idx == self.instanced_line_vaos.len() {
// create a vao
self.create_instanced_vao();
}
let num_indices = indices.len();
let off_indices = self.indices.len();
let vao = &mut self.instanced_line_vaos[self.vao_idx];
self.vao_idx += 1;
self.vertices.extend(vertices.iter().flatten());
self.indices.extend(indices.iter());
let mut buf: Vec<f32> = vec![];
self.meta.push(Meta {
off_indices,
num_indices,
for PathVertices { vertices } in paths {
let vertices = vertices.as_ref();
let path_vertices_buf_iter = vertices
.iter()
.zip(vertices.iter().skip(1))
.map(|(a, b)| [a[0], a[1], b[0], b[1]])
.flatten();
buf.extend(path_vertices_buf_iter);
}
vao.bind_for_update().update_instanced_array(
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&buf),
);
let num_instances = buf.len() / 4;
self.meta_instanced.push(Meta {
off_indices: 0,
thickness,
num_indices: num_instances,
color: color.clone(),
coo_space,
});
}
pub fn draw(&mut self, _camera: &CameraViewPort) -> Result<(), JsValue> {
pub fn draw(
&mut self,
shaders: &mut ShaderManager,
camera: &CameraViewPort,
proj: &ProjectionType,
) -> Result<(), JsValue> {
self.gl.enable(WebGl2RenderingContext::BLEND);
self.gl.blend_func_separate(
WebGl2RenderingContext::SRC_ALPHA,
@@ -287,21 +360,71 @@ impl RasterizedLineRenderer {
);
//self.gl.disable(WebGl2RenderingContext::CULL_FACE);
let shader = self.shader.bind(&self.gl);
for meta in self.meta.iter() {
shader
.attach_uniform("u_color", &meta.color) // Strengh of the kernel
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_with_i32(
WebGl2RenderingContext::TRIANGLES,
Some(meta.num_indices as i32),
WebGl2RenderingContext::UNSIGNED_INT,
((meta.off_indices as usize) * std::mem::size_of::<u32>()) as i32,
);
{
let shader =
crate::shader::get_shader(&self.gl, shaders, "line_base.vert", "line_base.frag")?
.bind(&self.gl);
for meta in self.meta.iter() {
shader
.attach_uniform("u_color", &meta.color) // Strengh of the kernel
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_with_i32(
WebGl2RenderingContext::TRIANGLES,
Some(meta.num_indices as i32),
WebGl2RenderingContext::UNSIGNED_INT,
((meta.off_indices as usize) * std::mem::size_of::<u32>()) as i32,
);
}
}
//self.gl.enable(WebGl2RenderingContext::CULL_FACE);
// draw the instanced lines
for (idx, meta) in self.meta_instanced.iter().enumerate() {
match meta.coo_space {
CooSpace::NDC => {
crate::shader::get_shader(
&self.gl,
shaders,
"line_inst_ndc.vert",
"line_base.frag",
)?
.bind(&self.gl)
.attach_uniform("u_color", &meta.color)
.attach_uniform("u_width", &meta.thickness)
.bind_vertex_array_object_ref(&self.instanced_line_vaos[idx])
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
meta.num_indices as i32,
);
}
CooSpace::LonLat => {
let icrs2view = CooSystem::ICRS.to(camera.get_coo_system());
let view2world = camera.get_m2w();
let icrs2world = view2world * icrs2view;
crate::shader::get_shader(
&self.gl,
shaders,
"line_inst_lonlat.vert",
"line_base.frag",
)?
.bind(&self.gl)
.attach_uniforms_from(camera)
.attach_uniform("u_2world", &icrs2world)
.attach_uniform("u_color", &meta.color)
.attach_uniform("u_width", &meta.thickness)
.attach_uniform("u_proj", proj)
.bind_vertex_array_object_ref(&self.instanced_line_vaos[idx])
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
meta.num_indices as i32,
);
}
_ => (),
}
}
self.gl.disable(WebGl2RenderingContext::BLEND);
Ok(())
@@ -312,8 +435,10 @@ impl Renderer for RasterizedLineRenderer {
fn begin(&mut self) {
self.vertices.clear();
self.indices.clear();
self.meta.clear();
self.meta_instanced.clear();
self.vao_idx = 0;
}
fn end(&mut self) {

View File

@@ -1,13 +1,9 @@
use crate::healpix::coverage::HEALPixCoverage;
//use moclib::moc::range::CellAndNeighs;
use moclib::elem::cell::Cell;
use moclib::moc::range::CellAndNeighs;
use moclib::moc::RangeMOCIntoIterator;
use moclib::moc::RangeMOCIterator;
/*use crate::renderable::coverage::HEALPixCell;
use crate::renderable::coverage::HEALPixCell;
use healpix::compass_point::Ordinal;
use healpix::compass_point::{MainWind, Ordinal};
#[derive(Debug)]
pub(super) struct EdgeNeigs {
// Indices of the neighbors in the stack
@@ -91,11 +87,11 @@ impl NodeEdgeNeigs {
1 << delta_depth
}
}
pub(super) struct G {
*/
/*pub(super) struct G {
nodes: Vec<NodeEdgeNeigs>,
}
use crate::renderable::coverage::mode::Node;
impl G {
pub(super) fn new(moc: &HEALPixCoverage) -> Self {
let mut nodes: Vec<_> = (&moc.0)
@@ -294,3 +290,4 @@ fn find_neig_dir(mut cell: HEALPixCell, mut neig: HEALPixCell) -> Option<Ordinal
None
}
*/

View File

@@ -1,4 +1,4 @@
use super::moc::MOC;
use super::MOC;
use crate::{camera::CameraViewPort, HEALPixCoverage};
use al_api::moc::MOC as Cfg;
@@ -6,23 +6,21 @@ pub struct MOCHierarchy {
full_res_depth: u8,
// MOC at different resolution
mocs: Vec<MOC>,
coverage: HEALPixCoverage,
}
use al_core::WebGlContext;
impl MOCHierarchy {
pub fn from_full_res_moc(full_res_moc: HEALPixCoverage, cfg: &Cfg) -> Self {
pub fn from_full_res_moc(gl: WebGlContext, full_res_moc: HEALPixCoverage, cfg: &Cfg) -> Self {
let full_res_depth = full_res_moc.depth();
let mut mocs: Vec<_> = (0..full_res_depth)
.map(|d| MOC::new(&HEALPixCoverage(full_res_moc.degraded(d)), cfg))
.map(|d| MOC::new(gl.clone(), HEALPixCoverage(full_res_moc.degraded(d)), cfg))
.collect();
mocs.push(MOC::new(&full_res_moc, cfg));
mocs.push(MOC::new(gl, full_res_moc, cfg));
Self {
mocs,
full_res_depth,
coverage: full_res_moc,
}
}
@@ -53,9 +51,9 @@ impl MOCHierarchy {
(smallest_cell_size_px / w_screen_px) * camera.get_aperture().to_radians();
while d > 0 {
self.mocs[d].cell_indices_in_view(camera);
//self.mocs[d].cell_indices_in_view(camera);
if (crate::healpix::utils::MEAN_HPX_CELL_RES[d] > hpx_cell_size_rad) {
if crate::healpix::utils::MEAN_HPX_CELL_RES[d] > hpx_cell_size_rad {
break;
}
@@ -66,7 +64,7 @@ impl MOCHierarchy {
}
pub fn get_full_moc(&self) -> &HEALPixCoverage {
&self.coverage
&self.mocs.last().unwrap().moc
}
pub fn get_full_res_depth(&self) -> u8 {

View File

@@ -0,0 +1,654 @@
mod graph;
mod mode;
pub mod hierarchy;
pub mod renderer;
pub use renderer::MOCRenderer;
use crate::camera::CameraViewPort;
use crate::healpix::coverage::HEALPixCoverage;
use crate::math::projection::ProjectionType;
use crate::renderable::WebGl2RenderingContext;
use crate::shader::ShaderManager;
use al_api::moc::MOC as Cfg;
use wasm_bindgen::JsValue;
use crate::WebGlContext;
use al_core::VertexArrayObject;
use al_api::color::ColorRGBA;
use al_api::coo_system::CooSystem;
use moclib::elem::cell::Cell;
use moclib::moc::range::CellAndEdges;
use moclib::moc::RangeMOCIterator;
use crate::HEALPixCell;
use al_core::VecData;
pub struct MOC {
pub sky_fraction: f32,
pub max_order: u8,
inner: [Option<MOCIntern>; 3],
pub moc: HEALPixCoverage,
}
impl MOC {
pub(super) fn new(gl: WebGlContext, moc: HEALPixCoverage, cfg: &Cfg) -> Self {
let sky_fraction = moc.sky_fraction() as f32;
let max_order = moc.depth_max();
let inner = [
if cfg.perimeter {
// draw only perimeter
Some(MOCIntern::new(
gl.clone(),
RenderModeType::Perimeter {
thickness: cfg.line_width,
color: cfg.color,
},
))
} else {
None
},
if cfg.filled {
// change color
let fill_color = cfg.fill_color;
// draw the edges
Some(MOCIntern::new(
gl.clone(),
RenderModeType::Filled { color: fill_color },
))
} else {
None
},
if cfg.edges {
Some(MOCIntern::new(
gl,
RenderModeType::Edge {
thickness: cfg.line_width,
color: cfg.color,
},
))
} else {
None
},
];
Self {
inner,
max_order,
sky_fraction,
moc,
}
}
/*pub(super) fn cell_indices_in_view(&mut self, camera: &mut CameraViewPort) {
for render in &mut self.inner {
if let Some(render) = render.as_mut() {
render.cell_indices_in_view(camera);
}
}
}*/
/*pub(super) fn num_cells_in_view(&self, camera: &mut CameraViewPort) -> usize {
self.inner
.iter()
.filter_map(|moc| moc.as_ref())
.map(|moc| moc.num_cells_in_view(camera))
.sum()
}*/
/*pub(super) fn num_vertices_in_view(&self, camera: &mut CameraViewPort) -> usize {
let mut num_vertices = 0;
for render in &self.0 {
if let Some(render) = render.as_ref() {
num_vertices += render.num_vertices_in_view(camera);
}
}
num_vertices
}*/
pub fn sky_fraction(&self) -> f32 {
self.sky_fraction
}
pub fn max_order(&self) -> u8 {
self.max_order
}
pub(super) fn draw(
&mut self,
camera: &mut CameraViewPort,
proj: &ProjectionType,
shaders: &mut ShaderManager,
) -> Result<(), JsValue> {
for render in &mut self.inner {
if let Some(render) = render.as_mut() {
render.draw(&self.moc, camera, proj, shaders)?
}
}
Ok(())
}
}
struct MOCIntern {
// HEALPix index vector
// Used for fast HEALPix cell retrieval
//hpx_idx_vec: IdxVec,
// Node indices in view
//indices: Vec<Range<usize>>,
mode: RenderModeType,
gl: WebGlContext,
vao: VertexArrayObject,
}
#[derive(Clone)]
pub enum RenderModeType {
Perimeter { thickness: f32, color: ColorRGBA },
Edge { thickness: f32, color: ColorRGBA },
Filled { color: ColorRGBA },
}
impl MOCIntern {
fn new(gl: WebGlContext, mode: RenderModeType) -> Self {
let lonlat = vec![];
let vertices = [
0_f32, -0.5_f32, 1_f32, -0.5_f32, 1_f32, 0.5_f32, 0_f32, 0.5_f32,
];
let indices = [0_u16, 1_u16, 2_u16, 0_u16, 2_u16, 3_u16];
let vao = match mode {
RenderModeType::Perimeter { .. } | RenderModeType::Edge { .. } => {
let mut vao = VertexArrayObject::new(&gl);
vao.bind_for_update()
// Store the cartesian position of the center of the source in the a instanced VBO
.add_instanced_array_buffer(
"lonlat",
4 * std::mem::size_of::<f32>(),
&[2, 2],
&[0, 2 * std::mem::size_of::<f32>()],
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&lonlat),
)
.add_array_buffer(
"vertices",
2 * std::mem::size_of::<f32>(),
&[2],
&[0],
WebGl2RenderingContext::STATIC_DRAW,
&vertices as &[f32],
)
// Set the element buffer
.add_element_buffer(WebGl2RenderingContext::STATIC_DRAW, &indices as &[u16])
// Unbind the buffer
.unbind();
vao
}
RenderModeType::Filled { .. } => {
let mut vao = VertexArrayObject::new(&gl);
let indices = vec![];
vao.bind_for_update()
// Store the cartesian position of the center of the source in the a instanced VBO
.add_array_buffer(
"lonlat",
2 * std::mem::size_of::<f32>(),
&[2],
&[0],
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&lonlat),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
// Unbind the buffer
.unbind();
vao
}
};
/*let hpx_idx_vec =
IdxVec::from_hpx_cells((&moc.0).into_range_moc_iter().cells().flat_map(|cell| {
let cell = HEALPixCell(cell.depth, cell.idx);
let dd = if 3 >= cell.depth() {
3 - cell.depth()
} else {
0
};
cell.get_tile_cells(dd)
}));
*/
Self {
//nodes,
//moc,
//hpx_idx_vec,
//indices: vec![],
vao,
gl,
mode,
}
}
/*fn cell_indices_in_view(&mut self, moc: &HEALPixCoverage, camera: &mut CameraViewPort) {
// Cache it for several reuse during the same frame
let view_depth = camera.get_texture_depth();
let cells_iter = camera.get_hpx_cells(view_depth, CooSystem::ICRS);
if moc.is_empty() {
self.indices = vec![0..0];
return;
}
/*let indices: Vec<_> = if view_depth > 7 {
// Binary search version, we are using this alternative for retrieving
// MOC's cells to render for deep fields of view
let first_cell_rng = &self.nodes[0].cell.z_29_rng();
let last_cell_rng = &self.nodes[self.nodes.len() - 1].cell.z_29_rng();
cells_iter
.filter_map(|cell| {
let cell_rng = cell.z_29_rng();
// Quick rejection test
if cell_rng.end <= first_cell_rng.start || cell_rng.start >= last_cell_rng.end {
None
} else {
let contains_val = |hash_z29: u64| -> Result<usize, usize> {
self.nodes.binary_search_by(|node| {
let node_cell_rng = node.cell.z_29_rng();
if hash_z29 < node_cell_rng.start {
// the node cell range contains hash_z29
Ordering::Greater
} else if hash_z29 >= node_cell_rng.end {
Ordering::Less
} else {
Ordering::Equal
}
})
};
let start_idx = contains_val(cell_rng.start);
let end_idx = contains_val(cell_rng.end);
let cell_indices = match (start_idx, end_idx) {
(Ok(l), Ok(r)) => {
if l == r {
l..(r + 1)
} else {
l..r
}
}
(Err(l), Ok(r)) => l..r,
(Ok(l), Err(r)) => l..r,
(Err(l), Err(r)) => l..r,
};
Some(cell_indices)
}
})
.collect()
} else {
// Index Vector 7 order version
cells_iter
.map(|cell| self.hpx_idx_vec.get_item_indices_inside_hpx_cell(&cell))
.collect()
};*/
let indices = cells_iter
.map(|cell| self.hpx_idx_vec.get_item_indices_inside_hpx_cell(&cell))
.collect();
let indices = crate::utils::merge_overlapping_intervals(indices);
self.indices = indices;
}*/
/*fn num_vertices_in_view(&self, camera: &CameraViewPort) -> usize {
self.cells_in_view(camera)
.filter_map(|n| n.vertices.as_ref())
.map(|n_vertices| {
n_vertices
.vertices
.iter()
.map(|edge| edge.len())
.sum::<usize>()
})
.sum()
}*/
/*fn num_cells_in_view(&self, _camera: &CameraViewPort) -> usize {
self.indices
.iter()
.map(|range| range.end - range.start)
.sum()
}*/
/*fn cells_in_view<'a>(&'a self, _camera: &CameraViewPort) -> impl Iterator<Item = Node> {
let nodes = &self.nodes;
self.indices
.iter()
.map(move |indices| nodes[indices.start..indices.end].iter())
.flatten()
}*/
fn vertices_in_view<'a>(
&self,
moc: &'a HEALPixCoverage,
camera: &'a mut CameraViewPort,
) -> impl Iterator<Item = [(f64, f64); 4]> + 'a {
let view_moc = camera.get_cov(CooSystem::ICRS);
//self.cells_in_view(camera)
// .filter_map(move |node| node.vertices.as_ref())
moc.overlapped_by_iter(view_moc)
.cells()
.flat_map(|cell| {
let Cell { idx, depth } = cell;
let cell = HEALPixCell(depth, idx);
let dd = if 3 >= cell.depth() {
3 - cell.depth()
} else {
0
};
cell.get_tile_cells(dd)
})
.map(|hpx_cell| hpx_cell.vertices())
}
fn draw(
&mut self,
moc: &HEALPixCoverage,
camera: &mut CameraViewPort,
proj: &ProjectionType,
shaders: &mut ShaderManager,
) -> Result<(), JsValue> {
//let _ = crate::Time::measure_perf("rasterize moc", move || {
match self.mode {
RenderModeType::Perimeter { thickness, color } => {
let moc_in_view = moc
.overlapped_by_iter(&camera.get_cov(CooSystem::ICRS))
.into_range_moc();
let perimeter_vertices_iter = moc_in_view
.border_elementary_edges()
.filter_map(|CellAndEdges { uniq, edges }| {
if edges.is_empty() {
None
} else {
let mut paths = vec![];
let c = Cell::from_uniq_hpx(uniq);
let cell = HEALPixCell(c.depth, c.idx);
let v = cell.vertices();
if edges.get(moclib::moc::range::Ordinal::SE) {
paths.extend([
v[0].0 as f32,
v[0].1 as f32,
v[1].0 as f32,
v[1].1 as f32,
]);
}
if edges.get(moclib::moc::range::Ordinal::NE) {
paths.extend([
v[1].0 as f32,
v[1].1 as f32,
v[2].0 as f32,
v[2].1 as f32,
]);
}
if edges.get(moclib::moc::range::Ordinal::NW) {
paths.extend([
v[2].0 as f32,
v[2].1 as f32,
v[3].0 as f32,
v[3].1 as f32,
]);
}
if edges.get(moclib::moc::range::Ordinal::SW) {
paths.extend([
v[3].0 as f32,
v[3].1 as f32,
v[0].0 as f32,
v[0].1 as f32,
])
}
Some(paths)
}
})
.flatten();
let mut buf: Vec<_> = vec![];
buf.extend(perimeter_vertices_iter);
self.vao.bind_for_update().update_instanced_array(
"lonlat",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&buf),
);
let num_instances = buf.len() / 4;
let icrs2view = CooSystem::ICRS.to(camera.get_coo_system());
let view2world = camera.get_m2w();
let icrs2world = view2world * icrs2view;
crate::shader::get_shader(
&self.gl,
shaders,
"line_inst_lonlat.vert",
"line_base.frag",
)?
.bind(&self.gl)
.attach_uniforms_from(camera)
.attach_uniform("u_2world", &icrs2world)
.attach_uniform("u_color", &color)
.attach_uniform("u_width", &thickness)
.attach_uniform("u_proj", proj)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
num_instances as i32,
);
}
RenderModeType::Edge { thickness, color } => {
let mut buf: Vec<_> = vec![];
buf.extend(self.compute_edge_paths_iter(moc, camera));
//let mut buf = self.compute_edge_paths_iter(moc, camera).collect();
self.vao.bind_for_update().update_instanced_array(
"lonlat",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&buf),
);
let num_instances = buf.len() / 4;
let icrs2view = CooSystem::ICRS.to(camera.get_coo_system());
let view2world = camera.get_m2w();
let icrs2world = view2world * icrs2view;
crate::shader::get_shader(
&self.gl,
shaders,
"line_inst_lonlat.vert",
"line_base.frag",
)?
.bind(&self.gl)
.attach_uniforms_from(camera)
.attach_uniform("u_2world", &icrs2world)
.attach_uniform("u_color", &color)
.attach_uniform("u_width", &thickness)
.attach_uniform("u_proj", proj)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
num_instances as i32,
);
/*rasterizer.add_stroke_paths(
,
thickness,
&color,
&super::line::Style::None,
CooSpace::LonLat,
);*/
}
RenderModeType::Filled { color } => {
let mut off_idx = 0;
let mut indices: Vec<u32> = vec![];
let vertices = self
.vertices_in_view(moc, camera)
.map(|v| {
let vertices = [
v[0].0 as f32,
v[0].1 as f32,
v[1].0 as f32,
v[1].1 as f32,
v[2].0 as f32,
v[2].1 as f32,
v[3].0 as f32,
v[3].1 as f32,
];
indices.extend_from_slice(&[
off_idx + 1,
off_idx + 0,
off_idx + 3,
off_idx + 1,
off_idx + 3,
off_idx + 2,
]);
off_idx += 4;
vertices
})
.flatten()
.collect();
let num_idx = indices.len() as i32;
self.vao
.bind_for_update()
.update_array(
"lonlat",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&vertices),
)
.update_element_array(WebGl2RenderingContext::DYNAMIC_DRAW, VecData(&indices));
let icrs2view = CooSystem::ICRS.to(camera.get_coo_system());
let view2world = camera.get_m2w();
let icrs2world = view2world * icrs2view;
self.gl.enable(WebGl2RenderingContext::BLEND);
crate::shader::get_shader(&self.gl, shaders, "moc_base.vert", "moc_base.frag")?
.bind(&self.gl)
.attach_uniforms_from(camera)
.attach_uniform("u_2world", &icrs2world)
.attach_uniform("u_color", &color)
.attach_uniform("u_proj", proj)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_with_i32(
WebGl2RenderingContext::TRIANGLES,
Some(num_idx),
WebGl2RenderingContext::UNSIGNED_INT,
0,
);
self.gl.disable(WebGl2RenderingContext::BLEND);
}
}
Ok(())
//});
}
fn compute_edge_paths_iter<'a>(
&self,
moc: &'a HEALPixCoverage,
camera: &'a mut CameraViewPort,
) -> impl Iterator<Item = f32> + 'a {
/*self.vertices_in_view(view_moc, moc, camera)
.filter_map(move |cell_vertices| {
let mut ndc: [[f32; 2]; 5] =
[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]];
let vertices = cell_vertices;
for i in 0..4 {
let line_vertices = vertices[i];
//for k in 0..line_vertices.len() {
let (lon, lat) = line_vertices;
let xyzw = crate::math::lonlat::radec_to_xyzw(Angle(lon), Angle(lat));
let xyzw =
crate::coosys::apply_coo_system(CooSystem::ICRS, camera_coosys, &xyzw);
if let Some(p) = proj.model_to_normalized_device_space(&xyzw, camera) {
if i > 0 && crossing_edges_testing {
let mag2 = crate::math::vector::dist2(
crate::math::projection::ndc_to_clip_space(&p, camera).as_ref(),
crate::math::projection::ndc_to_clip_space(
&Vector2::new(ndc[i - 1][0] as f64, ndc[i - 1][1] as f64),
camera,
)
.as_ref(),
);
//al_core::info!("mag", i, mag2);
if mag2 > 0.1 {
return None;
}
}
ndc[i] = [p.x as f32, p.y as f32];
} else {
return None;
}
//ndc[i] = [xyzw.x as f32, xyzw.y as f32];
//ndc[i] = [lon as f32, lat as f32];
}
ndc[4] = ndc[0].clone();
Some(PathVertices { vertices: ndc })
})*/
self.vertices_in_view(moc, camera)
.map(|v| {
let vertices = [
v[0].0 as f32,
v[0].1 as f32,
v[1].0 as f32,
v[1].1 as f32,
v[1].0 as f32,
v[1].1 as f32,
v[2].0 as f32,
v[2].1 as f32,
v[2].0 as f32,
v[2].1 as f32,
v[3].0 as f32,
v[3].1 as f32,
v[3].0 as f32,
v[3].1 as f32,
v[0].0 as f32,
v[0].1 as f32,
];
vertices
})
.flatten()
}
}

View File

@@ -1,13 +1,3 @@
use super::super::graph;
use super::Node;
use super::RenderMode;
use crate::HEALPixCoverage;
use healpix::{
compass_point::{Ordinal, OrdinalMap},
};
@@ -16,6 +6,7 @@ use healpix::{
/*
pub struct Edge;
@@ -198,3 +189,4 @@ impl RenderMode for Edge {
.collect()
}
}
*/

View File

@@ -1,11 +1,12 @@
use super::super::graph::NodeEdgeNeigs;
use super::Node;
use super::RenderMode;
use crate::HEALPixCoverage;
use healpix::compass_point::{Ordinal, OrdinalMap};
/*
use super::super::graph::G;
pub struct Fill;
impl RenderMode for Fill {
@@ -189,3 +190,4 @@ impl RenderMode for Fill {
.collect()
}
}
*/

View File

@@ -1,5 +1,5 @@
use crate::healpix::cell::CellVertices;
use crate::renderable::coverage::HEALPixCell;
use crate::HEALPixCell;
use crate::HEALPixCoverage;
pub mod edge;
@@ -7,7 +7,7 @@ pub mod filled;
pub mod perimeter;
pub(super) trait RenderMode {
fn build(moc: &HEALPixCoverage) -> Vec<Node>;
fn build(moc: &HEALPixCoverage) -> impl Iterator<Item = Node>;
}
#[derive(Debug)]

View File

@@ -1,21 +1,16 @@
use super::Node;
use super::RenderMode;
use crate::healpix::cell::HEALPixCell;
use healpix::{
compass_point::{Ordinal, OrdinalMap},
};
use healpix::compass_point::{Ordinal, OrdinalMap};
use moclib::elem::cell::Cell;
use crate::HEALPixCoverage;
use moclib::moc::range::CellAndEdges;
pub struct Perimeter;
impl RenderMode for Perimeter {
fn build(moc: &HEALPixCoverage) -> Vec<Node> {
fn build(moc: &HEALPixCoverage) -> impl Iterator<Item = Node> {
moc.0
.border_elementary_edges()
.map(|CellAndEdges { uniq, edges }| {
@@ -23,16 +18,16 @@ impl RenderMode for Perimeter {
let cell = HEALPixCell(c.depth, c.idx);
let mut map = OrdinalMap::new();
if edges.get(Ordinal::SE) {
if edges.get(moclib::moc::range::Ordinal::SE) {
map.put(Ordinal::SE, 1);
}
if edges.get(Ordinal::SW) {
if edges.get(moclib::moc::range::Ordinal::SW) {
map.put(Ordinal::SW, 1);
}
if edges.get(Ordinal::NE) {
if edges.get(moclib::moc::range::Ordinal::NE) {
map.put(Ordinal::NE, 1);
}
if edges.get(Ordinal::NW) {
if edges.get(moclib::moc::range::Ordinal::NW) {
map.put(Ordinal::NW, 1);
}
@@ -40,6 +35,5 @@ impl RenderMode for Perimeter {
Node { cell, vertices }
})
.collect()
}
}

View File

@@ -0,0 +1,160 @@
use crate::{healpix::coverage::HEALPixCoverage, CameraViewPort, ShaderManager};
use web_sys::WebGl2RenderingContext;
use al_core::WebGlContext;
use wasm_bindgen::JsValue;
use super::hierarchy::MOCHierarchy;
use al_api::coo_system::CooSystem;
use al_api::moc::MOC as Cfg;
pub struct MOCRenderer {
mocs: Vec<MOCHierarchy>,
cfgs: Vec<Cfg>,
gl: WebGlContext,
}
use crate::ProjectionType;
impl MOCRenderer {
pub fn new(gl: &WebGlContext) -> Result<Self, JsValue> {
// layout (location = 0) in vec2 ndc_pos;
//let vertices = vec![0.0; MAX_NUM_FLOATS_TO_DRAW];
//let indices = vec![0_u16; MAX_NUM_INDICES_TO_DRAW];
//let vertices = vec![];
/*let position = vec![];
let indices = vec![];
#[cfg(feature = "webgl2")]
vao.bind_for_update()
.add_array_buffer_single(
2,
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&position),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
.unbind();
#[cfg(feature = "webgl1")]
vao.bind_for_update()
.add_array_buffer(
2,
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&position),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
.unbind();
*/
let mocs = Vec::new();
let cfgs = Vec::new();
Ok(Self {
gl: gl.clone(),
mocs,
cfgs,
})
}
pub fn push_back(
&mut self,
moc: HEALPixCoverage,
cfg: Cfg,
camera: &mut CameraViewPort,
proj: &ProjectionType,
) {
self.mocs
.push(MOCHierarchy::from_full_res_moc(self.gl.clone(), moc, &cfg));
self.cfgs.push(cfg);
camera.register_view_frame(CooSystem::ICRS, proj);
//self.layers.push(key);
}
pub fn get_hpx_coverage(&self, cfg: &Cfg) -> Option<&HEALPixCoverage> {
let name = cfg.get_uuid();
if let Some(idx) = self.cfgs.iter().position(|cfg| cfg.get_uuid() == name) {
Some(&self.mocs[idx].get_full_moc())
} else {
None
}
}
pub fn remove(
&mut self,
cfg: &Cfg,
camera: &mut CameraViewPort,
proj: &ProjectionType,
) -> Option<Cfg> {
let name = cfg.get_uuid();
if let Some(idx) = self.cfgs.iter().position(|cfg| cfg.get_uuid() == name) {
self.mocs.remove(idx);
camera.unregister_view_frame(CooSystem::ICRS, proj);
Some(self.cfgs.remove(idx))
} else {
None
}
}
pub fn set_cfg(
&mut self,
cfg: Cfg,
camera: &mut CameraViewPort,
projection: &ProjectionType,
shaders: &mut ShaderManager,
) -> Option<Cfg> {
let name = cfg.get_uuid();
if let Some(idx) = self.cfgs.iter().position(|cfg| cfg.get_uuid() == name) {
let old_cfg = self.cfgs[idx].clone();
self.cfgs[idx] = cfg;
let _ = self.draw(camera, projection, shaders);
Some(old_cfg)
} else {
// the cfg has not been found
None
}
}
pub fn is_empty(&self) -> bool {
self.cfgs.is_empty()
}
pub fn draw(
&mut self,
camera: &mut CameraViewPort,
proj: &ProjectionType,
shaders: &mut ShaderManager,
) -> Result<(), JsValue> {
if !self.is_empty() {
self.gl.enable(WebGl2RenderingContext::CULL_FACE);
for (hmoc, cfg) in self.mocs.iter_mut().zip(self.cfgs.iter()) {
if cfg.show {
let moc = hmoc.select_moc_from_view(camera);
moc.draw(camera, proj, shaders)?;
}
}
self.gl.disable(WebGl2RenderingContext::CULL_FACE);
}
Ok(())
}
}

View File

@@ -1,16 +1,18 @@
pub mod catalog;
pub mod coverage;
pub mod final_pass;
pub mod grid;
pub mod hips;
pub mod image;
pub mod line;
pub mod moc;
pub mod shape;
pub mod text;
pub mod utils;
use crate::renderable::image::Image;
use al_core::image::format::ChannelType;
use al_core::Texture2DArray;
pub use hips::HiPS;
pub use catalog::Manager;
@@ -21,9 +23,8 @@ use al_api::hips::ImageMetadata;
use al_api::image::ImageParams;
use al_core::colormap::Colormaps;
use al_core::image::format::NUM_CHANNELS;
use al_core::shader::Shader;
use al_core::SliceData;
use al_core::VertexArrayObject;
use al_core::WebGlContext;
@@ -38,9 +39,8 @@ use crate::{shader::ShaderManager, survey::config::HiPSConfig};
use hips::raytracing::RayTracer;
use std::borrow::Cow;
use std::collections::HashMap;
use std::rc::Rc;
use wasm_bindgen::JsValue;
use web_sys::WebGl2RenderingContext;
@@ -79,16 +79,19 @@ const DEFAULT_BACKGROUND_COLOR: ColorRGB = ColorRGB {
b: 0.05,
};
fn get_backgroundcolor_shader<'a>(gl: &WebGlContext, shaders: &'a mut ShaderManager) -> &'a Shader {
fn get_backgroundcolor_shader<'a>(
gl: &WebGlContext,
shaders: &'a mut ShaderManager,
) -> Result<&'a Shader, JsValue> {
shaders
.get(
gl,
&ShaderId(
Cow::Borrowed("RayTracerFontVS"),
Cow::Borrowed("RayTracerFontFS"),
ShaderId(
"hips_raytracer_backcolor.vert",
"hips_raytracer_backcolor.frag",
),
)
.unwrap_abort()
.map_err(|e| e.into())
}
pub struct ImageCfg {
@@ -136,29 +139,12 @@ impl Layers {
2,
"pos_clip_space",
WebGl2RenderingContext::STATIC_DRAW,
SliceData::<f32>(&[-1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0]),
&[-1.0_f32, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0] as &[f32],
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::STATIC_DRAW,
SliceData::<u16>(&[0, 1, 2, 0, 2, 3]),
)
// Unbind the buffer
.unbind();
#[cfg(feature = "webgl1")]
screen_vao
.bind_for_update()
.add_array_buffer(
2,
"pos_clip_space",
WebGl2RenderingContext::STATIC_DRAW,
SliceData::<f32>(&[-1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0]),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::STATIC_DRAW,
SliceData::<u16>(&[0, 1, 2, 0, 2, 3]),
&[0_u16, 1, 2, 0, 2, 3] as &[u16],
)
// Unbind the buffer
.unbind();
@@ -182,7 +168,7 @@ impl Layers {
}
pub fn set_survey_url(&mut self, cdid: &CreatorDid, new_url: String) -> Result<(), JsValue> {
if let Some(mut survey) = self.surveys.get_mut(cdid) {
if let Some(survey) = self.surveys.get_mut(cdid) {
// update the root_url
survey.get_config_mut().set_root_url(new_url.clone());
@@ -258,7 +244,7 @@ impl Layers {
&self.screen_vao
};
get_backgroundcolor_shader(&self.gl, shaders)
get_backgroundcolor_shader(&self.gl, shaders)?
.bind(&self.gl)
.attach_uniforms_from(camera)
.attach_uniform("color", &background_color)

View File

@@ -0,0 +1 @@

View File

View File

@@ -0,0 +1,62 @@
use crate::math::{angle::Angle, lonlat::LonLatT};
use al_api::color::ColorRGBA;
use serde::Deserialize;
mod circle;
mod ellipsis;
mod image;
mod polyline;
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Shape {
Box {
/// Center of the box
c: LonLatT<f32>,
/// Size following the RA axis
ra_w: Angle<f32>,
/// Size following the Dec axis
dec_h: Angle<f32>,
/// Rotation of the box in the RA-Dec space
rot: Angle<f32>,
},
Circle {
/// Center of the circle
c: LonLatT<f32>,
/// Radius of the circle
rad: Angle<f32>,
},
PolyLine(Box<[LonLatT<f32>]>),
Ellipsis {
/// Center of the ellipsis
c: LonLatT<f32>,
/// Semi-major axis
a: Angle<f32>,
/// Semi-minor axis
b: Angle<f32>,
/// Rotation angle of the ellipsis. Origin aligns the ellipsis' major axis with the north pole. Positive angle points towards the east.
rot: Angle<f32>,
},
// TODO
Image,
}
#[derive(Debug, Deserialize)]
pub enum Style {
None,
Dashed,
Dotted,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Footprint {
shapes: Vec<Shape>,
/// Some styling meta data
color: ColorRGBA,
filled: bool,
thickness: f32,
style: Style,
}
pub type Catalog = Footprint;

View File

@@ -0,0 +1,210 @@
use crate::math::projection::ProjectionType;
use crate::shader::ShaderManager;
use al_api::coo_system::CooSystem;
use al_core::VertexArrayObject;
use al_core::WebGlContext;
use al_api::color::ColorRGBA;
pub struct PolylineRenderer {
gl: WebGlContext,
vao: VertexArrayObject,
color: ColorRGBA,
thickness: f32,
num_instances: usize,
}
use wasm_bindgen::JsValue;
use al_core::VecData;
use web_sys::WebGl2RenderingContext;
use crate::camera::CameraViewPort;
use super::Shape;
use super::Catalog;
impl PolylineRenderer {
/// Init the buffers, VAO and shader
pub fn new<'a>(gl: &WebGlContext, catalog: &Catalog) -> Result<Self, JsValue> {
let lines = catalog
.shapes
.iter()
.flat_map(|s| {
let mut v = vec![];
match s {
Shape::PolyLine(vertices) => {
for (v1, v2) in vertices.iter().zip(vertices.iter().skip(1)) {
v.extend_from_slice(&[
v1.lon().to_radians(),
v1.lat().to_radians(),
v2.lon().to_radians(),
v2.lat().to_radians(),
])
}
}
_ => (),
}
v
})
.collect::<Vec<_>>();
let num_instances = lines.len() / 4;
// Create the VAO for the screen
let mut vao = VertexArrayObject::new(&gl);
vao.bind_for_update()
// Store the cartesian position of the center of the source in the a instanced VBO
.add_instanced_array_buffer(
"line",
9 * std::mem::size_of::<f32>(),
&[2, 2],
&[0, 2 * std::mem::size_of::<f32>()],
WebGl2RenderingContext::STATIC_DRAW,
VecData::<f32>(&lines),
)
.add_array_buffer(
"vertices",
2 * std::mem::size_of::<f32>(),
&[2],
&[0],
WebGl2RenderingContext::STATIC_DRAW,
&[
0_f32, -0.5_f32, 1_f32, -0.5_f32, 1_f32, 0.5_f32, 0_f32, 0.5_f32,
] as &[f32],
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::STATIC_DRAW,
&[0_u16, 1_u16, 2_u16, 0_u16, 2_u16, 3_u16] as &[u16],
)
// Unbind the buffer
.unbind();
let gl = gl.clone();
Ok(Self {
gl,
vao,
color: catalog.color,
thickness: catalog.thickness,
num_instances,
})
}
/*pub fn add_fill_paths<V>(
&mut self,
paths: impl Iterator<Item = PathVertices<V>>,
color: &ColorRGBA,
coo_space: CooSpace,
) where
V: AsRef<[[f32; 2]]>,
{
let mut num_indices = 0;
let off_indices = self.indices.len();
let mut geometry: VertexBuffers<[f32; 2], u32> = VertexBuffers::new();
let mut tessellator = FillTessellator::new();
//let mut num_vertices = 0;
for path in paths {
let mut path_builder = Path::builder();
let PathVertices {
vertices, /*, closed */
} = path;
let line = vertices.as_ref();
if !line.is_empty() {
let v = &line[0];
path_builder.begin(point(v[0], v[1]));
for v in line.iter().skip(1) {
//let v = clamp_ndc_vertex(v);
path_builder.line_to(point(v[0], v[1]));
}
path_builder.end(false);
}
// Create the destination vertex and index buffers.
let p = path_builder.build();
// Let's use our own custom vertex type instead of the default one.
// Will contain the result of the tessellation.
let num_vertices = (self.vertices.len() / 2) as u32;
// Compute the tessellation.
tessellator
.tessellate_with_ids(
p.id_iter(),
&p,
Some(&p),
&FillOptions::default()
.with_intersections(false)
.with_fill_rule(FillRule::NonZero)
.with_tolerance(5e-3),
&mut BuffersBuilder::new(&mut geometry, |vertex: FillVertex| {
vertex.position().to_array()
})
.with_vertex_offset(num_vertices),
)
.unwrap_abort();
}
let VertexBuffers { vertices, indices } = geometry;
num_indices += indices.len();
self.vertices.extend(vertices.iter().flatten());
self.indices.extend(indices.iter());
//al_core::info!("num vertices fill", nv);
self.meta.push(Meta {
off_indices,
num_indices,
thickness: 1.0,
color: color.clone(),
coo_space,
});
}*/
pub fn draw(
&mut self,
shaders: &mut ShaderManager,
camera: &CameraViewPort,
proj: &ProjectionType,
) -> Result<(), JsValue> {
self.gl.enable(WebGl2RenderingContext::BLEND);
self.gl.blend_func_separate(
WebGl2RenderingContext::SRC_ALPHA,
WebGl2RenderingContext::ONE_MINUS_SRC_ALPHA,
WebGl2RenderingContext::ONE,
WebGl2RenderingContext::ONE,
);
// draw the instanced lines
let icrs2view = CooSystem::ICRS.to(camera.get_coo_system());
let view2world = camera.get_m2w();
let icrs2world = view2world * icrs2view;
crate::shader::get_shader(&self.gl, shaders, "line_inst_lonlat.vert", "line_base.frag")?
.bind(&self.gl)
.attach_uniforms_from(camera)
.attach_uniform("u_2world", &icrs2world)
.attach_uniform("u_color", &self.color)
.attach_uniform("u_width", &self.thickness)
.attach_uniform("u_proj", proj)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
self.num_instances as i32,
);
self.gl.disable(WebGl2RenderingContext::BLEND);
Ok(())
}
}

View File

@@ -1,10 +1,9 @@
use super::Renderer;
use al_core::log::console_log;
use web_sys::CanvasRenderingContext2d;
pub struct TextRenderManager {
// The text canvas
canvas: HtmlCanvasElement,
ctx: CanvasRenderingContext2d,
color: JsValue,
font_size: u32,
@@ -13,9 +12,7 @@ pub struct TextRenderManager {
use cgmath::{Rad, Vector2};
use wasm_bindgen::JsValue;
use crate::camera::CameraViewPort;
use al_api::color::{ColorRGB, ColorRGBA};
use web_sys::HtmlCanvasElement;
use al_api::color::ColorRGBA;
use crate::Abort;
use wasm_bindgen::JsCast;
@@ -42,7 +39,6 @@ impl TextRenderManager {
Ok(Self {
font_size,
color,
canvas,
ctx,
})
}

View File

@@ -1,4 +1,3 @@
use cgmath::BaseFloat;
use std::ops::RangeInclusive;
use super::triangle::Triangle;

View File

@@ -1,17 +1,15 @@
use al_core::shader::Shader;
use al_core::WebGlContext;
pub type VertId = Cow<'static, str>;
pub type FragId = Cow<'static, str>;
type FileId = Cow<'static, str>;
pub type VertId = &'static str;
pub type FragId = &'static str;
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
pub struct ShaderId(pub VertId, pub FragId);
pub struct ShaderManager {
// Compiled shaders stored in an HashMap
shaders: HashMap<ShaderId, Shader>,
// Shaders sources coming from the javascript
src: HashMap<FileId, String>,
src: HashMap<&'static str, &'static str>,
}
#[derive(Debug)]
@@ -20,6 +18,7 @@ pub enum Error {
ShaderNotFound { message: &'static str },
ShaderCompilingLinking { message: JsValue },
FileNotFound { message: &'static str },
Io { message: String },
}
use wasm_bindgen::JsValue;
@@ -35,7 +34,8 @@ impl From<Error> for JsValue {
Error::FileNotFound { message } => {
JsValue::from_str(&format!("Shader not found: {:?}", message))
}
Error::ShaderCompilingLinking { message } => message
Error::ShaderCompilingLinking { message } => message,
Error::Io { message } => message.into(),
}
}
}
@@ -49,15 +49,40 @@ pub struct FileSrc {
use std::collections::hash_map::Entry;
use std::collections::HashMap;
impl ShaderManager {
pub fn new(_gl: &WebGlContext, files: Vec<FileSrc>) -> Result<ShaderManager, Error> {
let src = files
.into_iter()
.map(|file| {
let FileSrc { id, content } = file;
(Cow::Owned(id), content)
})
.collect::<HashMap<_, _>>();
pub fn new() -> Result<ShaderManager, Error> {
let src = crate::shaders::get_all();
// Loop over the entries in the directory
/*let _src = std::fs::read_dir("./shaders")
.map_err(|e| Error::Io {
message: e.to_string(),
})?
.into_iter()
.filter_map(|entry| {
let entry = entry.ok()?;
let path = entry.path();
console_log(&format!("aaa"));
if path.is_file() {
let file_name = path.to_str()?;
console_log(&format!("{}", file_name));
// read the file into a bufreader
let file = File::open(file_name).ok()?;
let mut reader = std::io::BufReader::new(file);
let mut content = String::new();
reader.read_to_string(&mut content).ok()?;
Some((Cow::Owned(file_name.to_owned()), content))
} else {
None
}
})
.collect::<HashMap<_, _>>();*/
Ok(ShaderManager {
shaders: HashMap::new(),
@@ -65,21 +90,23 @@ impl ShaderManager {
})
}
pub fn get(&mut self, gl: &WebGlContext, id: &ShaderId) -> Result<&Shader, Error> {
pub fn get(&mut self, gl: &WebGlContext, id: ShaderId) -> Result<&Shader, Error> {
let shader = match self.shaders.entry(id.clone()) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(v) => {
let ShaderId(vert_id, frag_id) = id;
let vert_src = self.src.get(vert_id).ok_or(Error::FileNotFound {
message: "Vert not found",
})?;
let frag_src = self.src.get(frag_id).ok_or(Error::FileNotFound {
message: "Frag not found",
})?;
let shader = Shader::new(gl, vert_src, frag_src).map_err(|err| Error::ShaderCompilingLinking {
message: err,
})?;
let &vert_src = self
.src
.get(vert_id)
.ok_or(Error::FileNotFound { message: vert_id })?;
let &frag_src = self
.src
.get(frag_id)
.ok_or(Error::FileNotFound { message: frag_id })?;
let shader = Shader::new(gl, vert_src, frag_src)
.map_err(|err| Error::ShaderCompilingLinking { message: err })?;
v.insert(shader)
}
};
@@ -87,60 +114,14 @@ impl ShaderManager {
Ok(shader)
}
}
use std::borrow::Cow;
/*use paste::paste;
macro_rules! define_shader_getter {
($renderer_type:ident, $shader_type:ident, $vert_key:tt, $frag_key:tt) => {
paste! {
pub fn [< get_ $renderer_type _shader_ $shader_type >]<'a>(
gl: &WebGlContext,
shaders: &'a mut ShaderManager
) -> &'a Shader {
shaders.get(
gl,
&ShaderId(
Cow::Borrowed($vert_key),
Cow::Borrowed($frag_key),
),
)
.unwrap_abort()
}
}
}
pub(crate) fn get_shader<'a>(
gl: &WebGlContext,
shaders: &'a mut ShaderManager,
vert: &'static str,
frag: &'static str,
) -> Result<&'a Shader, JsValue> {
shaders
.get(gl, ShaderId(vert, frag))
.map_err(|err| err.into())
}
/* Raytracer shaders */
define_shader_getter!(raytracer, color, "RayTracerVS", "RayTracerColorFS");
define_shader_getter!(raytracer, gray2colormap, "RayTracerVS", "RayTracerGrayscale2ColormapFS");
define_shader_getter!(raytracer, gray2color, "RayTracerVS", "RayTracerGrayscale2ColorFS");
define_shader_getter!(raytracer, gray2colormap_integer, "RayTracerVS", "RayTracerGrayscale2ColormapIntegerFS");
define_shader_getter!(raytracer, gray2color_integer, "RayTracerVS", "RayTracerGrayscale2ColorIntegerFS");
define_shader_getter!(raytracer, gray2colormap_unsigned, "RayTracerVS", "RayTracerGrayscale2ColormapUnsignedFS");
define_shader_getter!(raytracer, gray2color_unsigned, "RayTracerVS", "RayTracerGrayscale2ColorUnsignedFS");
/* Rasterizer shaders */
define_shader_getter!(raster, color, "RasterizerVS", "RasterizerColorFS");
define_shader_getter!(raster, gray2colormap, "RasterizerVS", "RasterizerGrayscale2ColormapFS");
define_shader_getter!(raster, gray2color, "RasterizerVS", "RasterizerGrayscale2ColorFS");
define_shader_getter!(raster, gray2colormap_integer, "RasterizerVS", "RasterizerGrayscale2ColormapIntegerFS");
define_shader_getter!(raster, gray2color_integer, "RasterizerVS", "RasterizerGrayscale2ColorIntegerFS");
define_shader_getter!(raster, gray2colormap_unsigned, "RasterizerVS", "RasterizerGrayscale2ColormapUnsignedFS");
define_shader_getter!(raster, gray2color_unsigned, "RasterizerVS", "RasterizerGrayscale2ColorUnsignedFS");
/* Pass shaders */
define_shader_getter!(pass, post, "PostVS", "PostFS");
/* Catalog shaders */
define_shader_getter!(catalog, ait, "CatalogAitoffVS", "CatalogFS");
define_shader_getter!(catalog, mol, "CatalogMollVS", "CatalogFS");
define_shader_getter!(catalog, arc, "CatalogArcVS", "CatalogFS");
define_shader_getter!(catalog, hpx, "CatalogHEALPixVS", "CatalogFS");
define_shader_getter!(catalog, mer, "CatalogMercatVS", "CatalogFS");
define_shader_getter!(catalog, ort, "CatalogOrthoVS", "CatalogOrthoFS");
define_shader_getter!(catalog, tan, "CatalogTanVS", "CatalogFS");*/
pub(crate) fn get_shader<'a>(gl: &WebGlContext, shaders: &'a mut ShaderManager, vert: &'static str, frag: &'static str) -> Result<&'a Shader, JsValue> {
shaders.get(
gl,
&ShaderId(Cow::Borrowed(vert), Cow::Borrowed(frag)),
).map_err(|err| err.into())
}

View File

@@ -1,10 +1,9 @@
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::collections::HashMap;
use std::rc::Rc;
use al_core::image::format::ChannelType;
use al_core::log::console_log;
use cgmath::Vector3;
use al_api::hips::ImageExt;
@@ -134,7 +133,7 @@ pub struct ImageSurveyTextures {
size: usize,
pub textures: HashMap<HEALPixCell, Texture>,
//pub base_textures: [Texture; NUM_HPX_TILES_DEPTH_ZERO],
pub base_textures: [Texture; NUM_HPX_TILES_DEPTH_ZERO],
//pub cutoff_values_tile: Rc<RefCell<HashMap<HEALPixCell, (f32, f32)>>>,
// Array of 2D textures
@@ -169,15 +168,15 @@ fn create_texture_array<F: ImageFormat>(
impl ImageSurveyTextures {
pub fn new(gl: &WebGlContext, config: HiPSConfig) -> Result<ImageSurveyTextures, JsValue> {
let size = config.num_textures();
let size = config.num_textures() - NUM_HPX_TILES_DEPTH_ZERO;
// Ensures there is at least space for the 12
// root textures
debug_assert!(size >= NUM_HPX_TILES_DEPTH_ZERO);
//debug_assert!(size >= NUM_HPX_TILES_DEPTH_ZERO);
let heap = HEALPixCellHeap::with_capacity(size);
let textures = HashMap::with_capacity(size);
let now = Time::now();
/*let base_textures = [
let base_textures = [
Texture::new(&HEALPixCell(0, 0), 0, now),
Texture::new(&HEALPixCell(0, 1), 1, now),
Texture::new(&HEALPixCell(0, 2), 2, now),
@@ -190,7 +189,7 @@ impl ImageSurveyTextures {
Texture::new(&HEALPixCell(0, 9), 9, now),
Texture::new(&HEALPixCell(0, 10), 10, now),
Texture::new(&HEALPixCell(0, 11), 11, now),
];*/
];
let channel = config.get_format().get_channel();
let texture_2d_array = match channel {
@@ -221,7 +220,7 @@ impl ImageSurveyTextures {
size,
//num_root_textures_available,
textures,
//base_textures,
base_textures,
//num_base_textures,
texture_2d_array,
available_tiles_during_frame,
@@ -253,7 +252,7 @@ impl ImageSurveyTextures {
};
let now = Time::now();
/*self.base_textures = [
self.base_textures = [
Texture::new(&HEALPixCell(0, 0), 0, now),
Texture::new(&HEALPixCell(0, 1), 1, now),
Texture::new(&HEALPixCell(0, 2), 2, now),
@@ -266,7 +265,7 @@ impl ImageSurveyTextures {
Texture::new(&HEALPixCell(0, 9), 9, now),
Texture::new(&HEALPixCell(0, 10), 10, now),
Texture::new(&HEALPixCell(0, 11), 11, now),
];*/
];
self.heap.clear();
self.textures.clear();
@@ -315,11 +314,12 @@ impl ImageSurveyTextures {
time_request: Time,
) -> Result<(), JsValue> {
if !self.contains_tile(cell) {
let dd = self.config.delta_depth();
// Get the texture cell in which the tile has to be
let tex_cell = cell.get_texture_cell(self.config.delta_depth());
let tex_cell = cell.get_texture_cell(dd);
let tex_cell_is_root = tex_cell.is_root(self.config.delta_depth());
if !self.textures.contains_key(&tex_cell) {
let tex_cell_is_root = tex_cell.is_root(dd);
if !tex_cell_is_root && !self.textures.contains_key(&tex_cell) {
// The texture is not among the essential ones
// (i.e. is not a root texture)
let texture = if self.is_heap_full() {
@@ -346,6 +346,7 @@ impl ImageSurveyTextures {
// The heap buffer is not full, let's create a new
// texture with an unique idx
// The idx is computed based on the current size of the buffer
/*let idx = if tex_cell_is_root {
self.num_base_textures += 1;
tex_cell.idx() as usize
@@ -353,7 +354,8 @@ impl ImageSurveyTextures {
//NUM_HPX_TILES_DEPTH_ZERO + (self.heap.len() - self.num_base_textures)
self.heap.len()
};*/
let idx = self.heap.len();
//let idx = NUM_HPX_TILES_DEPTH_ZERO + (self.heap.len() - self.num_base_textures);
let idx = NUM_HPX_TILES_DEPTH_ZERO + self.heap.len();
Texture::new(&tex_cell, idx as i32, time_request)
};
@@ -370,14 +372,14 @@ impl ImageSurveyTextures {
// We can safely push it
// First get the texture
let texture = //if !tex_cell_is_root {
let texture = if !tex_cell_is_root {
self.textures
.get_mut(&tex_cell)
.expect("the cell has to be in the tile buffer");
/* } else {
.expect("the cell has to be in the tile buffer")
} else {
let HEALPixCell(_, idx) = tex_cell;
&mut self.base_textures[idx as usize]
};*/
};
let missing = image.is_none();
send_to_gpu(
@@ -445,22 +447,23 @@ impl ImageSurveyTextures {
// For that purpose, we first need to verify that its
// texture ancestor exists and then, it it contains the tile
pub fn contains_tile(&self, cell: &HEALPixCell) -> bool {
let texture_cell = cell.get_texture_cell(self.config.delta_depth());
let dd = self.config.delta_depth();
let texture_cell = cell.get_texture_cell(dd);
//let tex_cell_is_root = texture_cell.is_root(self.config.delta_depth());
//if tex_cell_is_root {
// let HEALPixCell(_, idx) = texture_cell;
// self.base_textures[idx as usize].contains(cell)
//} else {
if let Some(texture) = self.get(&texture_cell) {
// The texture is present in the buffer
// We must check whether it contains the tile
texture.contains(cell)
let tex_cell_is_root = texture_cell.is_root(dd);
if tex_cell_is_root {
let HEALPixCell(_, idx) = texture_cell;
self.base_textures[idx as usize].contains(cell)
} else {
// The texture in which cell should be is not present
false
if let Some(texture) = self.get(&texture_cell) {
// The texture is present in the buffer
// We must check whether it contains the tile
texture.contains(cell)
} else {
// The texture in which cell should be is not present
false
}
}
//}
}
// Update the priority of the texture containing the tile
@@ -469,10 +472,11 @@ impl ImageSurveyTextures {
debug_assert!(self.contains_tile(cell));
// Get the texture cell in which the tile has to be
let texture_cell = cell.get_texture_cell(self.config.delta_depth());
//if texture_cell.is_root(self.config().delta_depth()) {
// return;
//}
let dd = self.config.delta_depth();
let texture_cell = cell.get_texture_cell(dd);
if texture_cell.is_root(dd) {
return;
}
let texture = self
.textures
@@ -552,33 +556,33 @@ impl ImageSurveyTextures {
/// Accessors
pub fn get(&self, texture_cell: &HEALPixCell) -> Option<&Texture> {
//if texture_cell.is_root(self.config().delta_depth()) {
// let HEALPixCell(_, idx) = texture_cell;
// Some(&self.base_textures[*idx as usize])
//} else {
self.textures.get(texture_cell)
//}
if texture_cell.is_root(self.config().delta_depth()) {
let HEALPixCell(_, idx) = texture_cell;
Some(&self.base_textures[*idx as usize])
} else {
self.textures.get(texture_cell)
}
}
// Get the nearest parent tile found in the CPU buffer
pub fn get_nearest_parent(&self, cell: &HEALPixCell) -> Option<HEALPixCell> {
let dd = self.config.delta_depth();
/*if cell.is_root(dd) {
if cell.is_root(dd) {
// Root cells are in the buffer by definition
*cell
} else {*/
let mut parent_cell = cell.parent();
while !self.contains(&parent_cell) && !parent_cell.is_root(dd) {
parent_cell = parent_cell.parent();
}
if self.contains(&parent_cell) {
Some(parent_cell)
Some(*cell)
} else {
None
let mut parent_cell = cell.parent();
while !self.contains(&parent_cell) && !parent_cell.is_root(dd) {
parent_cell = parent_cell.parent();
}
if self.contains(&parent_cell) {
Some(parent_cell)
} else {
None
}
}
//}
}
pub fn config(&self) -> &HiPSConfig {
@@ -680,14 +684,14 @@ impl SendUniforms for ImageSurveyTextures {
for idx in 0..NUM_HPX_TILES_DEPTH_ZERO {
let cell = HEALPixCell(0, idx as u64);
if let Some(texture) = self.get(&cell) {
let texture_uniforms = TextureUniforms::new(texture, idx as i32);
shader.attach_uniforms_from(&texture_uniforms);
} else {
let texture = self.get(&cell).unwrap();
let texture_uniforms = TextureUniforms::new(texture, idx as i32);
shader.attach_uniforms_from(&texture_uniforms);
/*else {
let texture = &Texture::new(&cell, idx as i32, Time::now());
let texture_uniforms = TextureUniforms::new(texture, idx as i32);
shader.attach_uniforms_from(&texture_uniforms);
}
}*/
}
//}

View File

@@ -1,5 +1,5 @@
use al_api::hips::ImageExt;
use al_core::log::console_log;
use al_core::{image::format::ImageFormat, image::raw::ImageBuffer};
#[derive(Debug)]

View File

@@ -188,7 +188,6 @@ impl<'a> TextureUniforms<'a> {
}
use al_core::{
log::console_log,
shader::{SendUniforms, ShaderBound},
};
impl<'a> SendUniforms for TextureUniforms<'a> {

View File

@@ -65,6 +65,7 @@ pub unsafe fn transmute_vec<I, O>(mut s: Vec<I>) -> Result<Vec<O>, &'static str>
}
}
#[allow(unused)]
pub(super) fn merge_overlapping_intervals(mut intervals: Vec<Range<usize>>) -> Vec<Range<usize>> {
intervals.sort_unstable_by(|a, b| {
let cmp = a.start.cmp(&b.start);

View File

@@ -10,6 +10,8 @@
/* media query on the aladin lite container. not supported everywhere.
There can be a more supported alternative here: https://caniuse.com/?search=grid-template-columns */
/*container-type: inline-size;*/
font-size: 1rem;
}
.aladin-imageCanvas {
@@ -104,7 +106,9 @@
}
.aladin-measurement-div table tr td a {
display: block;
color: green;
}
.aladin-measurement-div table tr td a:hover {
color: greenyellow;
}
@@ -839,6 +843,7 @@ canvas {
cursor: pointer;
font-family: monospace;
font-size: 1rem;
width: 100%;
/* <option> colors */
/* Remove focus outline */
/* Remove IE arrow */
@@ -1095,6 +1100,7 @@ canvas {
font-size: 1rem;
border-radius: 5px;
height: 1.7rem;
width: 5rem;
}
/*
@@ -1116,14 +1122,10 @@ canvas {
padding: 0.5rem;
}
.aladin-stack-box .aladin-input-select {
width: 100%;
}
.aladin-location {
position: absolute;
top: 0.2rem;
left: 6.9rem;
left: 5.4rem;
font-family: monospace;
color: white;

View File

@@ -1,31 +0,0 @@
precision lowp float;
attribute vec2 offset;
attribute vec2 uv;
attribute vec3 center;
uniform float current_time;
uniform mat4 model;
uniform mat4 inv_model;
uniform vec2 ndc_to_clip;
uniform float czf;
uniform vec2 kernel_size;
varying vec2 out_uv;
varying vec3 out_p;
@import ../hips/projection;
void main() {
vec3 p = vec3(inv_model * vec4(center, 1.0));
//p = check_inversed_longitude(p);
vec2 center_pos_clip_space = world2clip_aitoff(p);
vec2 pos_clip_space = center_pos_clip_space;
gl_Position = vec4((pos_clip_space / (ndc_to_clip * czf)) + offset * kernel_size , 0.0, 1.0);
out_uv = uv;
out_p = p;
}

View File

@@ -1,29 +0,0 @@
precision lowp float;
attribute vec2 offset;
attribute vec2 uv;
attribute vec3 center;
uniform float current_time;
uniform mat4 inv_model;
uniform vec2 ndc_to_clip;
uniform float czf;
uniform vec2 kernel_size;
varying vec2 out_uv;
varying vec3 out_p;
@import ../hips/projection;
void main() {
vec3 p = vec3(inv_model * vec4(center, 1.0));
//p = check_inversed_longitude(p);
vec2 center_pos_clip_space = world2clip_arc(p);
vec2 pos_clip_space = center_pos_clip_space;
gl_Position = vec4((pos_clip_space / (ndc_to_clip * czf)) + offset * kernel_size , 0.0, 1.0);
out_uv = uv;
out_p = p;
}

View File

@@ -1,14 +0,0 @@
precision lowp float;
varying vec2 out_uv;
varying vec3 out_p;
uniform sampler2D kernel_texture;
uniform float fov;
uniform float strength;
void main() {
vec4 color = texture2D(kernel_texture, out_uv) / max(log2(fov*100.0), 1.0);
color.r *= strength;
gl_FragColor = color;
}

View File

@@ -1,30 +0,0 @@
precision lowp float;
attribute vec2 offset;
attribute in vec2 uv;
attribute in vec3 center;
uniform float current_time;
uniform mat4 inv_model;
uniform vec2 ndc_to_clip;
uniform float czf;
uniform vec2 kernel_size;
varying vec2 out_uv;
varying vec3 out_p;
@import ../hips/projection;
void main() {
vec3 p = vec3(inv_model * vec4(center, 1.0));
//p = check_inversed_longitude(p);
vec2 center_pos_clip_space = world2clip_mercator(p);
vec2 pos_clip_space = center_pos_clip_space;
gl_Position = vec4((pos_clip_space / (ndc_to_clip * czf)) + offset * kernel_size , 0.0, 1.0);
out_uv = uv;
out_p = p;
}

View File

@@ -1,29 +0,0 @@
precision lowp float;
attribute vec2 offset;
attribute vec2 uv;
attribute vec3 center;
uniform float current_time;
uniform mat4 inv_model;
uniform vec2 ndc_to_clip;
uniform float czf;
uniform vec2 kernel_size;
out vec2 out_uv;
out vec3 out_p;
@import ../hips/projection;
void main() {
vec3 p = vec3(inv_model * vec4(center, 1.0));
//p = check_inversed_longitude(p);
vec2 center_pos_clip_space = world2clip_mollweide(p);
vec2 pos_clip_space = center_pos_clip_space;
gl_Position = vec4((pos_clip_space / (ndc_to_clip * czf)) + offset * kernel_size , 0.0, 1.0);
out_uv = uv;
out_p = p;
}

View File

@@ -1,18 +0,0 @@
precision lowp float;
varying vec2 out_uv;
varying vec3 out_p;
uniform sampler2D kernel_texture;
uniform float fov;
uniform float strength;
void main() {
if (out_p.z < 0.0) {
discard;
}
vec4 color = texture2D(kernel_texture, out_uv).rgba / max(log2(fov*100.0), 1.0);
color.r *= strength;
gl_FragColor = color;
}

Some files were not shown because too many files have changed in this diff Show More