wip fits support

This commit is contained in:
Matthieu BAUMANN
2023-01-26 01:51:59 -08:00
parent f8c74f275f
commit fd1393b639
35 changed files with 2364 additions and 15160 deletions

1
.gitignore vendored
View File

@@ -2,6 +2,7 @@ node_modules
dist
examples/.DS_Store
examples/fits
.DS_Store
package-lock.json
src/core/target/

File diff suppressed because one or more lines are too long

View File

@@ -14,10 +14,16 @@
<script type="text/javascript">
let aladin;
A.init.then(() => {
aladin = A.aladin('#aladin-lite-div', {target: "0 0", cooFrame: "gal"});
aladin = A.aladin('#aladin-lite-div', {target: "m51", cooFrame: "icrs"});
aladin.displayFITS(
'./cutout-CDS_P_HST_PHAT_F475W.fits',
//'./fits/HorseHead.fits'
//'./fits/neowise.fits'
//'./fits/irac.fits'
'./fits/cutout-CDS_P_HST_PHAT_F475W.fits',
//'./fits/FOCx38i0101t_c0f.fits'
//'./fits/ngc1316o.fits'
//'./fits/panstarrs-rotated-around-orion.fits'
);
});
</script>

View File

@@ -12,7 +12,6 @@ members = [
"al-core",
"al-api",
"al-task-exec",
#"al-ui"
]
[lib]
@@ -29,15 +28,15 @@ serde = { version = "^1.0.59", features = ["derive"] }
serde_json = "1.0"
serde-wasm-bindgen = "0.4"
console_error_panic_hook = "0.1.7"
fitsrs = { package = "fitsrs", git = 'https://github.com/cds-astro/fitsrs', branch = 'master' }
fitsrs = "0.1.1"
enum_dispatch = "0.3.8"
wasm-bindgen = "0.2.79"
wasm-streams = "0.3.0"
al-core = { path = "./al-core" }
#al-ui = { path = "./al-ui" }
al-task-exec = { path = "./al-task-exec" }
al-api = { path = "./al-api" }
mapproj = "0.3.0"
wcs = { path = "./../../../wcs" }
[features]
webgl1 = [

View File

@@ -20,7 +20,7 @@ impl Default for BlendCfg {
fn default() -> Self {
Self {
src_color_factor: BlendFactor::SrcAlpha,
dst_color_factor: BlendFactor::OneMinusConstantAlpha,
dst_color_factor: BlendFactor::OneMinusSrcAlpha,
func: BlendFunc::FuncAdd,
}
}

View File

@@ -9,7 +9,7 @@ js-sys = "0.3.47"
cgmath = "*"
jpeg-decoder = "0.3.0"
png = "0.17.6"
fitsrs = { package = "fitsrs", git = 'https://github.com/cds-astro/fitsrs', branch = 'master' }
fitsrs = "0.1.1"
al-api = { path = "../al-api" }
serde = { version = "^1.0.59", features = ["derive"] }
serde_json = "1.0"

View File

@@ -34,7 +34,7 @@ impl<'a> Fits<'a> {
let height = header.get_axis_size(2)
.ok_or_else(|| JsValue::from_str("NAXIS2 not found in the fits"))?;
let data = match data {
fitsrs::hdu::data::DataBorrowed::U8(slice) => {
Data::U8(Cow::Borrowed(slice))
@@ -234,40 +234,16 @@ impl Image for Fits<'_> {
}
}
use wasm_bindgen::JsValue;
use crate::image::format::ImageFormat;
pub trait FitsImageFormat: ImageFormat {
type Type: Clone;
type ArrayBufferView: AsRef<js_sys::Object>;
const BITPIX: i8;
/// Creates a JS typed array which is a view into wasm's linear memory at the slice specified.
/// This function returns a new typed array which is a view into wasm's memory. This view does not copy the underlying data.
///
/// # Safety
///
/// Views into WebAssembly memory are only valid so long as the backing buffer isn't resized in JS. Once this function is called any future calls to Box::new (or malloc of any form) may cause the returned value here to be invalidated. Use with caution!
///
/// Additionally the returned object can be safely mutated but the input slice isn't guaranteed to be mutable.
///
/// Finally, the returned object is disconnected from the input slice's lifetime, so there's no guarantee that the data is read at the right time.
unsafe fn view(s: &[Self::Type]) -> Self::ArrayBufferView;
}
use crate::image::R32F;
impl FitsImageFormat for R32F {
const BITPIX: i8 = -32;
type Type = f32;
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[Self::Type]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
@@ -275,45 +251,17 @@ use crate::image::{R16I, R32I, R8UI, R64F};
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R64F {
const BITPIX: i8 = -64;
type Type = f64;
type ArrayBufferView = js_sys::Float64Array;
unsafe fn view(s: &[Self::Type]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R32I {
const BITPIX: i8 = 32;
type Type = i32;
type ArrayBufferView = js_sys::Int32Array;
unsafe fn view(s: &[Self::Type]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R16I {
const BITPIX: i8 = 16;
type Type = i16;
type ArrayBufferView = js_sys::Int16Array;
unsafe fn view(s: &[Self::Type]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R8UI {
const BITPIX: i8 = 8;
type Type = u8;
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[Self::Type]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}

View File

@@ -7,6 +7,7 @@ pub enum Bytes<'a> {
pub trait ImageFormat {
type P: Pixel;
type ArrayBufferView: AsRef<js_sys::Object>;
const NUM_CHANNELS: usize;
const EXT: &'static str;
@@ -15,6 +16,18 @@ pub trait ImageFormat {
const INTERNAL_FORMAT: i32;
const TYPE: u32;
/// Creates a JS typed array which is a view into wasm's linear memory at the slice specified.
/// This function returns a new typed array which is a view into wasm's memory. This view does not copy the underlying data.
///
/// # Safety
///
/// Views into WebAssembly memory are only valid so long as the backing buffer isn't resized in JS. Once this function is called any future calls to Box::new (or malloc of any form) may cause the returned value here to be invalidated. Use with caution!
///
/// Additionally the returned object can be safely mutated but the input slice isn't guaranteed to be mutable.
///
/// Finally, the returned object is disconnected from the input slice's lifetime, so there's no guarantee that the data is read at the right time.
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str>;
}
use crate::webgl_ctx::WebGlRenderingCtx;
@@ -36,6 +49,12 @@ impl ImageFormat for RGB8U {
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
@@ -57,6 +76,12 @@ impl ImageFormat for RGBA8U {
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl1")]
impl ImageFormat for RGBA8U {
@@ -75,6 +100,12 @@ impl ImageFormat for RGBA8U {
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
@@ -97,6 +128,12 @@ impl ImageFormat for RGBA32F {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
@@ -118,6 +155,12 @@ impl ImageFormat for RGB32F {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
@@ -143,6 +186,12 @@ impl ImageFormat for R32F {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
@@ -171,6 +220,12 @@ impl ImageFormat for R64F {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
@@ -190,6 +245,12 @@ impl ImageFormat for R8UI {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
@@ -209,6 +270,12 @@ impl ImageFormat for R16I {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Int16Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
@@ -228,6 +295,12 @@ impl ImageFormat for R32I {
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Int32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]

View File

@@ -302,6 +302,18 @@ impl SendUniforms for GrayscaleParameter {
}*/
use al_api::colormap::Colormap;
use al_api::hips::HiPSColor;
use al_api::hips::ImageSurveyMeta;
impl SendUniforms for ImageSurveyMeta {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
shader
.attach_uniforms_from(&self.color)
.attach_uniform("opacity", &self.opacity);
shader
}
}
impl SendUniforms for HiPSColor {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
match self {

View File

@@ -130,45 +130,28 @@ impl Texture2D {
width: i32,
height: i32,
tex_params: &'static [(u32, u32)],
pixels: Option<&[u8]>,
data: Option<&[<F::P as Pixel>::Item]>,
) -> Result<Texture2D, JsValue> {
let texture = gl.create_texture();
gl.bind_texture(WebGlRenderingCtx::TEXTURE_2D, texture.as_ref());
for (pname, param) in tex_params.iter() {
gl.tex_parameteri(WebGlRenderingCtx::TEXTURE_2D, *pname, *param as i32);
}
gl.tex_image_2d_with_i32_and_i32_and_i32_and_format_and_type_and_opt_u8_array(
WebGlRenderingCtx::TEXTURE_2D,
0,
F::INTERNAL_FORMAT,
let texture = Texture2D::create_empty_with_format::<F>(
gl,
width,
height,
0,
F::FORMAT,
F::TYPE,
pixels,
)
.expect("Texture 2D");
tex_params
)?;
let gl = gl.clone();
let metadata = Some(Rc::new(RefCell::new(Texture2DMeta {
width: width as u32,
height: height as u32,
internal_format: F::INTERNAL_FORMAT,
format: F::FORMAT,
type_: F::TYPE,
})));
if let Some(data) = data {
let buf_data = unsafe { F::view(data) };
texture.bind()
.tex_sub_image_2d_with_i32_and_i32_and_u32_and_type_and_opt_array_buffer_view(
0,
0,
width,
height,
Some(buf_data.as_ref()),
);
}
Ok(Texture2D {
texture,
gl,
metadata,
})
Ok(texture)
}
pub fn create_empty_unsized(
@@ -578,4 +561,4 @@ impl<'a> Texture2DBoundMut<'a> {
height: height as u32,
})));
}
}
}

View File

@@ -28,7 +28,7 @@ impl Texture2DArray {
) -> Result<Texture2DArray, JsValue> {
let textures: Result<Vec<_>, _> = (0..num_slices)
.map(|_| {
Texture2D::create_from_raw_pixels::<F>(gl, width, height, tex_params, None)
Texture2D::create_empty_with_format::<F>(gl, width, height, tex_params)
})
.collect();

View File

@@ -12,10 +12,11 @@ use crate::{
catalog::{Manager, Source},
grid::ProjetedGrid,
moc::MOC,
image::FitsImage,
},
healpix::coverage::HEALPixCoverage,
shader::ShaderManager,
survey::ImageSurveys,
renderable::Layers,
tile_fetcher::TileFetcherQueue,
time::DeltaTime,
};
@@ -51,7 +52,7 @@ pub struct App {
downloader: Downloader,
tile_fetcher: TileFetcherQueue,
surveys: ImageSurveys,
layers: Layers,
time_start_blending: Time,
request_redraw: bool,
@@ -86,6 +87,8 @@ pub struct App {
colormaps: Colormaps,
projection: ProjectionType,
images: Vec<FitsImage>,
}
use cgmath::{Vector2, Vector3};
@@ -159,7 +162,7 @@ impl App {
let _fbo_ui = FrameBufferObject::new(&gl, screen_size.x as usize, screen_size.y as usize)?;
// The surveys storing the textures of the resolved tiles
let surveys = ImageSurveys::new(&gl, &projection);
let layers = Layers::new(&gl, &projection)?;
let time_start_blending = Time::now();
@@ -194,6 +197,7 @@ impl App {
gl.clear_color(0.15, 0.15, 0.15, 1.0);
let images = vec![];
Ok(App {
gl,
start_time_frame,
@@ -205,7 +209,7 @@ impl App {
last_time_request_for_new_tiles,
request_for_new_tiles,
downloader,
surveys,
layers,
time_start_blending,
rendering,
@@ -233,7 +237,8 @@ impl App {
tile_fetcher,
colormaps,
projection
projection,
images,
})
}
@@ -241,7 +246,7 @@ impl App {
// Move the views of the different active surveys
self.tile_fetcher.clear();
// Loop over the surveys
for (_, survey) in self.surveys.iter_mut() {
for survey in self.layers.values_mut_hips() {
// do not add tiles if the view is already at depth 0
let view = survey.get_view();
let depth_tile = view.get_depth();
@@ -477,7 +482,7 @@ use crate::downloader::request::tile::Tile;
impl App {
pub(crate) fn set_background_color(&mut self, color: ColorRGB) {
self.surveys.set_background_color(color);
self.layers.set_background_color(color);
self.request_redraw = true;
}
@@ -510,7 +515,7 @@ impl App {
}
pub(crate) fn is_ready(&self) -> Result<bool, JsValue> {
let res = self.surveys.is_ready();
let res = self.layers.is_ready();
Ok(res)
}
@@ -540,6 +545,12 @@ impl App {
Ok(())
}
pub(crate) fn add_fits_image(&mut self, raw_bytes: &[u8]) -> Result<(), JsValue> {
self.images.push(FitsImage::new(&self.gl, raw_bytes)?);
Ok(())
}
pub(crate) fn update(&mut self, _dt: DeltaTime) -> Result<(), JsValue> {
//let available_tiles = self.run_tasks(dt)?;
if let Some(InertiaAnimation {
@@ -597,7 +608,7 @@ impl App {
tile_copied = true;
let is_tile_root = tile.is_root;
if let Some(survey) = self.surveys.get_mut(&tile.get_hips_url()) {
if let Some(survey) = self.layers.get_mut_hips(&tile.get_hips_url()) {
if is_tile_root {
let is_missing = tile.missing();
let Tile {
@@ -661,7 +672,7 @@ impl App {
Resource::Allsky(allsky) => {
let hips_url = allsky.get_hips_url();
if let Some(survey) = self.surveys.get_mut(hips_url) {
if let Some(survey) = self.layers.get_mut_hips(hips_url) {
let is_missing = allsky.missing();
if is_missing {
// The allsky image is missing so we donwload all the tiles contained into
@@ -684,8 +695,8 @@ impl App {
}
},
Resource::PixelMetadata(metadata) => {
if let Some(survey) = self.surveys.get_mut(&metadata.hips_url) {
let mut cfg = survey.get_config_mut();
if let Some(hips) = self.layers.get_mut_hips(&metadata.hips_url) {
let mut cfg = hips.get_config_mut();
if let Some(metadata) = *metadata.value.lock().unwrap_abort() {
cfg.blank = metadata.blank;
@@ -697,14 +708,14 @@ impl App {
Resource::Moc(moc) => {
let moc_url = moc.get_url();
let url = &moc_url[..moc_url.find("/Moc.fits").unwrap_abort()];
if let Some(survey) = self.surveys.get_mut(url) {
if let Some(hips) = self.layers.get_mut_hips(url) {
let request::moc::Moc {
moc,
..
} = moc;
if let Some(moc) = &*moc.lock().unwrap_abort() {
survey.set_moc(moc.clone());
hips.set_moc(moc.clone());
self.request_for_new_tiles = true;
self.request_redraw = true;
@@ -722,14 +733,14 @@ impl App {
.notify(num_tile_received, &mut self.downloader);
self.time_start_blending = Time::now();
}
//self.surveys.add_resolved_tiles(resolved_tiles);
//self.layers.add_resolved_tiles(resolved_tiles);
// 3. Try sending new tile requests after
//self.downloader.try_sending_tile_requests()?;
}
// Then, check for new tiles
if has_camera_moved {
self.surveys.refresh_views(&mut self.camera);
self.layers.refresh_views(&mut self.camera);
}
if self.request_for_new_tiles && Time::now() - self.last_time_request_for_new_tiles > DeltaTime::from(500_f32) {
@@ -744,8 +755,8 @@ impl App {
(Time::now().0 - self.time_start_blending.0) < BLENDING_ANIM_DURATION;
let mut start_fading = false;
for survey in self.surveys.values() {
if let Some(start_time) = survey.get_ready_time() {
for hips in self.layers.values_hips() {
if let Some(start_time) = hips.get_ready_time() {
start_fading |= Time::now().0 - start_time.0 < BLENDING_ANIM_DURATION;
if start_fading {
break;
@@ -760,10 +771,14 @@ impl App {
// Finally update the camera that reset the flag camera changed
if has_camera_moved {
// Catalogues update
/*if let Some(view) = self.surveys.get_view() {
/*if let Some(view) = self.layers.get_view() {
self.manager.update(&self.camera, view);
}*/
self.grid.update(&self.camera, &self.projection);
// Update the fits images buffers
for image in &mut self.images {
image.update_buffers(&self.camera, &self.projection)?;
}
// MOCs update
self.moc.update(&self.camera, &self.projection);
}
@@ -797,9 +812,8 @@ impl App {
pub(crate) fn read_pixel(&self, pos: &Vector2<f64>, layer_id: &str) -> Result<JsValue, JsValue> {
if let Some(lonlat) = self.screen_to_world(pos) {
let survey = self
.surveys
.get_from_layer(layer_id)
let survey = self.layers
.get_hips_from_layer(layer_id)
.ok_or_else(|| JsValue::from_str("Survey not found"))?;
survey.read_pixel(&lonlat, &self.camera)
@@ -820,7 +834,7 @@ impl App {
let camera = &self.camera;
let grid = &mut self.grid;
let surveys = &mut self.surveys;
let layers = &mut self.layers;
let catalogs = &self.manager;
let colormaps = &self.colormaps;
let fbo_view = &self.fbo_view;
@@ -831,7 +845,7 @@ impl App {
gl.clear_color(0.00, 0.00, 0.00, 1.0);
gl.clear(WebGl2RenderingContext::COLOR_BUFFER_BIT);
surveys.draw(camera, shaders, colormaps);
layers.draw(camera, shaders, colormaps);
// Draw the catalog
catalogs.draw(&gl, shaders, camera, colormaps, fbo_view)?;
@@ -867,7 +881,7 @@ impl App {
self.final_rendering_pass.draw_on_screen(&self.fbo_ui);
}
self.surveys.reset_frame();*/
self.layers.reset_frame();*/
let scene_redraw = self.rendering | force_render;
//let mut ui = self.ui.lock();
@@ -877,7 +891,7 @@ impl App {
let shaders = &mut self.shaders;
let grid = &mut self.grid;
let surveys = &mut self.surveys;
let layers = &mut self.layers;
//let catalogs = &self.manager;
let colormaps = &self.colormaps;
let camera = &self.camera;
@@ -885,9 +899,13 @@ impl App {
// Clear all the screen first (only the region set by the scissor)
self.gl.clear(web_sys::WebGl2RenderingContext::COLOR_BUFFER_BIT);
surveys.draw(camera, shaders, colormaps, &self.projection)?;
layers.draw(camera, shaders, colormaps, &self.projection)?;
self.moc.draw(shaders, camera);
for image in &self.images {
image.draw(shaders, colormaps)?;
}
// Draw the catalog
//let fbo_view = &self.fbo_view;
//catalogs.draw(&gl, shaders, camera, colormaps, fbo_view)?;
@@ -901,7 +919,7 @@ impl App {
self.camera.reset();
if self.rendering {
self.surveys.reset_frame();
self.layers.reset_frame();
self.moc.reset_frame();
}
}
@@ -910,10 +928,10 @@ impl App {
}
pub(crate) fn set_image_surveys(&mut self, hipses: Vec<SimpleHiPS>) -> Result<(), JsValue> {
self.surveys.set_image_surveys(hipses, &self.gl, &mut self.camera, &self.projection)?;
self.layers.set_image_surveys(hipses, &self.gl, &mut self.camera, &self.projection)?;
for survey in self.surveys.surveys.values_mut() {
let cfg = survey.get_config();
for hips in self.layers.values_hips() {
let cfg = hips.get_config();
// Request for the allsky first
// The allsky is not mandatory present in a HiPS service but it is better to first try to search for it
self.downloader.fetch(query::PixelMetadata::new(cfg));
@@ -944,8 +962,8 @@ impl App {
Ok(())
}
pub(crate) fn get_image_survey_color_cfg(&self, layer: &str) -> Result<ImageSurveyMeta, JsValue> {
self.surveys.get_image_survey_color_cfg(layer)
pub(crate) fn get_layer_cfg(&self, layer: &str) -> Result<ImageSurveyMeta, JsValue> {
self.layers.get_layer_cfg(layer)
}
pub(crate) fn set_image_survey_color_cfg(
@@ -955,11 +973,11 @@ impl App {
) -> Result<(), JsValue> {
self.request_redraw = true;
self.surveys.set_image_survey_color_cfg(layer, meta, &self.camera, &self.projection)
self.layers.set_layer_cfg(layer, meta, &self.camera, &self.projection)
}
pub(crate) fn set_image_survey_img_format(&mut self, layer: String, format: HiPSTileFormat) -> Result<(), JsValue> {
let survey = self.surveys.get_mut_from_layer(&layer)
let survey = self.layers.get_mut_hips_from_layer(&layer)
.ok_or_else(|| JsValue::from_str("Layer not found"))?;
survey.set_img_format(format)?;
// Request for the allsky first
@@ -997,16 +1015,18 @@ impl App {
}
// Width and height given are in pixels
pub(crate) fn set_projection(&mut self, projection: ProjectionType) {
pub(crate) fn set_projection(&mut self, projection: ProjectionType) -> Result<(), JsValue> {
self.projection = projection;
// Recompute the ndc_to_clip
self.camera.set_projection(&self.projection);
// Recompute clip zoom factor
self.surveys.set_projection(&self.projection);
self.layers.set_projection(&self.projection)?;
self.request_for_new_tiles = true;
self.request_redraw = true;
Ok(())
}
pub(crate) fn get_max_fov(&self) -> f64 {
@@ -1059,23 +1079,9 @@ impl App {
}
pub(crate) fn set_survey_url(&mut self, past_url: String, new_url: String) -> Result<(), JsValue> {
self.surveys.set_survey_url(past_url, new_url)
self.layers.set_survey_url(past_url, new_url)
}
/*pub(crate) fn set_catalog_colormap(&mut self, name: String, colormap: String) -> Result<(), JsValue> {
let colormap = self.colormaps.get(&colormap);
let catalog = self.manager.get_mut_catalog(&name).map_err(|e| {
let err: JsValue = e.into();
err
})?;
catalog.set_colormap(colormap);
self.request_redraw = true;
Ok(())
}*/
pub(crate) fn set_catalog_opacity(&mut self, name: String, opacity: f32) -> Result<(), JsValue> {
let catalog = self.manager.get_mut_catalog(&name).map_err(|e| {
let err: JsValue = e.into();
@@ -1281,7 +1287,7 @@ impl App {
}
pub(crate) fn get_norder(&self) -> i32 {
self.surveys.get_depth() as i32
self.layers.get_depth() as i32
}
pub(crate) fn get_clip_zoom_factor(&self) -> f64 {

View File

@@ -1,7 +1,7 @@
pub mod query;
pub mod request;
use crate::survey::Url;
use crate::renderable::Url;
use std::collections::HashSet;
use query::QueryId;

View File

@@ -26,7 +26,7 @@ impl From<AllskyRequest> for RequestType {
}
}
use crate::survey::Url;
use crate::renderable::Url;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, RequestMode, Response};

View File

@@ -40,7 +40,7 @@ impl From<PixelMetadataRequest> for RequestType {
}
use crate::survey::Url;
use crate::renderable::Url;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, RequestMode, Response};
use crate::downloader::query::Query;

View File

@@ -18,7 +18,7 @@ impl From<MOCRequest> for RequestType {
RequestType::Moc(request)
}
}
use crate::survey::Url;
use crate::renderable::Url;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, RequestMode, Response};
use wasm_bindgen::JsCast;

View File

@@ -31,7 +31,7 @@ impl From<TileRequest> for RequestType {
}
use al_core::image::html::HTMLImage;
use wasm_bindgen::JsValue;
use crate::survey::Url;
use crate::renderable::Url;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, RequestMode, Response};
use wasm_bindgen::JsCast;

View File

@@ -19,6 +19,7 @@
extern crate wasm_streams;
extern crate console_error_panic_hook;
extern crate mapproj;
extern crate wcs;
use std::panic;
pub trait Abort {
@@ -86,7 +87,6 @@ mod survey;
mod tile_fetcher;
mod time;
mod fifo_cache;
mod wcs;
use crate::{
camera::CameraViewPort, colormap::Colormaps, math::lonlat::LonLatT, shader::ShaderManager, time::DeltaTime,
@@ -224,18 +224,17 @@ impl WebClient {
let mut mol_proj = mapproj::pseudocyl::mol::Mol::new();
mol_proj.set_n_iter(10);
mol_proj.set_epsilon(1e-12);
self.app.set_projection(ProjectionType::Mol(mol_proj));
self.app.set_projection(ProjectionType::Mol(mol_proj))
},
// Conic
"COD" => self.app.set_projection(ProjectionType::Cod(mapproj::conic::cod::Cod::new())),
// Hybrid
"HPX" => self.app.set_projection(ProjectionType::Hpx(mapproj::hybrid::hpx::Hpx::new())),
_ => {
return Err(JsValue::from_str("Not a valid projection name. AIT, ARC, SIN, TAN, MOL, HPX and MER are accepted"));
},
Err(JsValue::from_str("Not a valid projection name. AIT, ARC, SIN, TAN, MOL, HPX and MER are accepted"))
}
}
Ok(())
}
/// Check whether the app is ready
@@ -318,8 +317,8 @@ impl WebClient {
}
#[wasm_bindgen(js_name = getImageSurveyMeta)]
pub fn get_survey_color_cfg(&self, layer: String) -> Result<ImageSurveyMeta, JsValue> {
self.app.get_image_survey_color_cfg(&layer)
pub fn get_layer_cfg(&self, layer: String) -> Result<ImageSurveyMeta, JsValue> {
self.app.get_layer_cfg(&layer)
}
// Set a new color associated with a layer
@@ -780,22 +779,12 @@ impl WebClient {
Ok(())
}
/*#[wasm_bindgen(js_name = addFITSImage)]
#[wasm_bindgen(js_name = addFITSImage)]
pub fn add_fits_image(&mut self, raw_bytes: &[u8]) -> Result<(), JsValue> {
use al_core::image::fits::Fits;
let fits = Fits::new(raw_bytes)?;
self.app.add_fits_image(raw_bytes)?;
use crate::wcs::WCS2;
let wcs = WCS2::new(&fits).map_err(|e| JsValue::from_str(e))?;
use crate::math::lonlat::LonLat;
use crate::math::angle::Angle;
let xyz = LonLatT::new(Angle(0.19283736400376558), Angle(0.726503953787)).vector();
let p = wcs.proj(&xyz)?
.unwrap();
al_core::info!(wcs, p);
Ok(())
}*/
}
#[wasm_bindgen(js_name = removeMoc)]
pub fn remove_moc(&mut self, params: &al_api::moc::MOC) -> Result<(), JsValue> {

View File

@@ -340,6 +340,10 @@ where
pub fn max_value() -> Self {
Angle(S::max_value())
}
pub fn to_radians(&self) -> S {
self.0
}
}
pub trait ToAngle<S>

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,15 @@
pub mod triangulation;
use crate::{camera::CameraViewPort, math::projection::Projection};
use crate::domain::sdf::ProjDefType;
use al_core::VecData;
use al_core::{shader::ShaderBound, Texture2D, VertexArrayObject, WebGlContext};
pub use super::triangulation::Triangulation;
pub trait RayTracingProjection {
fn get_raytracer_vertex_array_object(raytracer: &RayTracer) -> &VertexArrayObject;
}
pub use triangulation::Triangulation;
use crate::domain::sdf::ProjDefType;
fn create_vertices_array(proj_area: &ProjDefType) -> (Vec<f32>, Vec<u16>) {
let Triangulation { vertices, idx } = Triangulation::build(proj_area);
@@ -95,37 +94,7 @@ fn generate_xyz_position(projection: &ProjectionType) -> Vec<f32> {
data
}
/*
fn generate_lonlat_position<P: Projection>() -> Vec<f32> {
let (w, h) = (SIZE_POSITION_TEX as f64, SIZE_POSITION_TEX as f64);
let mut data = vec![];
for y in 0..(h as u32) {
for x in 0..(w as u32) {
let xy = Vector2::new(x, y);
let clip_xy = Vector2::new(
2.0 * ((xy.x as f64) / (w as f64)) - 1.0,
2.0 * ((xy.y as f64) / (h as f64)) - 1.0,
);
if let Some(pos) = P::clip_to_world_space(&clip_xy) {
let pos = pos.truncate().normalize();
let (lon, lat) = crate::math::lonlat::xyz_to_radec::<f64>(&pos);
/*let mut d: u32 = 0;
d |= 3 << 30;
d |= (((pos.z * 0.5 + 0.5) * (1024.0 as f64)) as u32) << 20;
d |= (((pos.y * 0.5 + 0.5) * (1024.0 as f64)) as u32) << 10;
d |= ((pos.x * 0.5 + 0.5) * (1024.0 as f64)) as u32;
data.push(d);*/
data.extend(&[lon.0 as f32, lat.0 as f32, 1.0]);
} else {
data.extend(&[1.0, 1.0, 1.0]);
}
}
}
data
}
*/
#[cfg(feature = "webgl1")]
use cgmath::Rad;
#[cfg(feature = "webgl1")]
@@ -150,55 +119,10 @@ fn generate_hash_dxdy<P: Projection>(depth: u8) -> Vec<f32> {
data
}
use crate::Abort;
fn create_f32_texture_from_raw(
gl: &WebGlContext,
width: i32,
height: i32,
data: &[f32],
) -> Texture2D {
let tex = Texture2D::create_empty_with_format::<al_core::image::format::RGB32F>(
gl,
width,
height,
&[
(
WebGl2RenderingContext::TEXTURE_MIN_FILTER,
WebGl2RenderingContext::NEAREST,
),
(
WebGl2RenderingContext::TEXTURE_MAG_FILTER,
WebGl2RenderingContext::NEAREST,
),
// Prevents s-coordinate wrapping (repeating)
(
WebGl2RenderingContext::TEXTURE_WRAP_S,
WebGl2RenderingContext::CLAMP_TO_EDGE,
),
// Prevents t-coordinate wrapping (repeating)
(
WebGl2RenderingContext::TEXTURE_WRAP_T,
WebGl2RenderingContext::CLAMP_TO_EDGE,
),
],
)
.unwrap_abort();
let buf_data = unsafe { js_sys::Float32Array::view(data) };
tex.bind()
.tex_sub_image_2d_with_i32_and_i32_and_u32_and_type_and_opt_array_buffer_view(
0,
0,
width,
height,
Some(&buf_data),
);
tex
}
use crate::ProjectionType;
use wasm_bindgen::JsValue;
impl RayTracer {
pub fn new(gl: &WebGlContext, proj: &ProjectionType) -> RayTracer {
pub fn new(gl: &WebGlContext, proj: &ProjectionType) -> Result<RayTracer, JsValue> {
let proj_area = proj.get_area();
let (vertices, idx) = create_vertices_array(proj_area);
@@ -239,12 +163,32 @@ impl RayTracer {
.unbind();
// create position data
let data = generate_xyz_position(proj);
let position_tex = create_f32_texture_from_raw(
let position_tex = Texture2D::create_from_raw_pixels::<al_core::image::format::RGB32F>(
gl,
SIZE_POSITION_TEX as i32,
SIZE_POSITION_TEX as i32,
&data,
);
&[
(
WebGl2RenderingContext::TEXTURE_MIN_FILTER,
WebGl2RenderingContext::NEAREST,
),
(
WebGl2RenderingContext::TEXTURE_MAG_FILTER,
WebGl2RenderingContext::NEAREST,
),
// Prevents s-coordinate wrapping (repeating)
(
WebGl2RenderingContext::TEXTURE_WRAP_S,
WebGl2RenderingContext::CLAMP_TO_EDGE,
),
// Prevents t-coordinate wrapping (repeating)
(
WebGl2RenderingContext::TEXTURE_WRAP_T,
WebGl2RenderingContext::CLAMP_TO_EDGE,
),
],
Some(&data),
)?;
// create ang2pix texture for webgl1 app
#[cfg(feature = "webgl1")]
@@ -258,13 +202,14 @@ impl RayTracer {
)
};
RayTracer {
Ok(RayTracer {
vao,
position_tex,
#[cfg(feature = "webgl1")]
ang2pix_tex,
}
})
}
pub fn get_vao(&self) -> &VertexArrayObject {

View File

@@ -15,60 +15,13 @@ impl Triangulation {
pub(super) fn build(proj_def: &ProjDefType) -> Triangulation {
let (mut vertices, mut idx) = (Vec::new(), Vec::new());
match proj_def {
/*ProjDefType::Hpx(_) => {
// The HEALPix 2d projection space is not convex
// We can define it by creating triangles from the projection
// of the HEALPix cells at order 2
let mut off_idx = 0_u16;
vertices = HEALPixCell::allsky(3)
.flat_map(|cell| {
idx.extend([
off_idx,
off_idx + 1,
off_idx + 2,
off_idx + 3,
off_idx + 4,
off_idx + 5,
]);
let (c_ra, c_dec) = cell.center();
let v = cell.vertices().map(|(ra, dec)| {
let ra = lerp(ra, c_ra, 1e-6);
let dec = lerp(dec, c_dec, 1e-6);
let v = math::lonlat::radec_to_xyzw(Angle(ra), Angle(dec));
ProjectionType::Hpx(mapproj::hybrid::hpx::Hpx)
.world_to_clip_space(&v)
.unwrap_abort()
});
let mut vertices = [v[0], v[3], v[2], v[2], v[1], v[0]];
if !crate::math::vector::ccw_tri(&vertices[3], &vertices[4], &vertices[5]) {
// triangles are crossing
vertices[3].x = 1.0;
vertices[5].x = 1.0;
}
off_idx += 6;
vertices
})
.collect::<Vec<_>>();
},*/
_ => {
// get the validity domain
let root = Face::new(Vector2::new(-1_f64, -1_f64), Vector2::new(1_f64, 1_f64));
let children = root.split(2);
let root = Face::new(Vector2::new(-1_f64, -1_f64), Vector2::new(1_f64, 1_f64));
let children = root.split(2);
let depth = 3;
for child in children {
recursive_triangulation(&child, &mut vertices, &mut idx, depth, proj_def);
}
}
let depth = 3;
for child in children {
recursive_triangulation(&child, &mut vertices, &mut idx, depth, proj_def);
}
Triangulation { vertices, idx }

View File

@@ -0,0 +1,484 @@
use std::vec;
use al_api::hips::ImageSurveyMeta;
use moclib::moc::range::RangeMOC;
use moclib::qty::Hpx;
use moclib::elem::cell::Cell;
use moclib::moc::{RangeMOCIterator, RangeMOCIntoIterator};
use web_sys::WebGl2RenderingContext;
use al_api::cell::HEALPixCellProjeted;
use al_api::coo_system::CooSystem;
use al_api::blend::BlendCfg;
use al_api::colormap::Colormap;
use al_api::hips::{TransferFunction, GrayscaleColor, HiPSColor};
use al_core::{VertexArrayObject, Texture2D};
use al_core::WebGlContext;
use al_core::VecData;
use al_core::webgl_ctx::GlWrapper;
use crate::math::projection::coo_space::XYNDC;
use crate::camera::CameraViewPort;
use crate::ProjectionType;
use crate::healpix::cell::HEALPixCell;
use crate::ShaderManager;
use crate::Colormaps;
use fitsrs::{
fits::Fits,
hdu::{
HDU,
data::DataBorrowed
}
};
use wcs::ImgXY;
use wcs::WCS;
use wasm_bindgen::JsValue;
pub struct FitsImage {
// The vertex array object of the screen in NDC
vao: VertexArrayObject,
moc: RangeMOC<u64, Hpx<u64>>,
wcs: WCS,
pos: Vec<f32>,
uv: Vec<f32>,
indices: Vec<u32>,
gl: WebGlContext,
texture: Texture2D,
cfg: ImageSurveyMeta,
blank: f32,
scale: f32,
offset: f32,
}
use al_core::{inforec, log};
impl FitsImage {
pub fn new<'a>(
gl: &WebGlContext,
raw_bytes: &'a [u8],
) -> Result<Self, JsValue> {
// Load the fits file
let Fits { hdu: HDU { header, data } } = Fits::from_reader(raw_bytes)
.map_err(|_| JsValue::from_str("Fits cannot be parsed"))?;
let scale = header
.get_parsed::<f64>(b"BSCALE ")
.unwrap_or(Ok(1.0))
.unwrap() as f32;
let offset = header
.get_parsed::<f64>(b"BZERO ")
.unwrap_or(Ok(0.0))
.unwrap() as f32;
let blank = header
.get_parsed::<f64>(b"BLANK ")
.unwrap_or(Ok(std::f64::NAN))
.unwrap() as f32;
// Create a WCS from a specific header unit
let wcs = WCS::new(&header).map_err(|_| JsValue::from_str("Failed to parse the WCS"))?;
let (w, h) = wcs.img_dimensions();
let width = w as f64;
let height = h as f64;
let tex_params = &[
(
WebGl2RenderingContext::TEXTURE_MIN_FILTER,
WebGl2RenderingContext::NEAREST,
),
(
WebGl2RenderingContext::TEXTURE_MAG_FILTER,
WebGl2RenderingContext::NEAREST,
),
// Prevents s-coordinate wrapping (repeating)
(
WebGl2RenderingContext::TEXTURE_WRAP_S,
WebGl2RenderingContext::CLAMP_TO_EDGE,
),
// Prevents t-coordinate wrapping (repeating)
(
WebGl2RenderingContext::TEXTURE_WRAP_T,
WebGl2RenderingContext::CLAMP_TO_EDGE,
),
];
let values: Vec<f32> = match data {
DataBorrowed::U8(data) => {
data.into_iter().map(|v| {
*v as f32
})
.collect()
},
DataBorrowed::I16(data) => {
data.into_iter().map(|v| {
*v as f32
})
.collect()
},
DataBorrowed::I32(data) => {
data.into_iter().map(|v| {
*v as f32
})
.collect()
},
DataBorrowed::I64(data) => {
data.into_iter().map(|v| {
*v as f32
})
.collect()
},
DataBorrowed::F32(data) => {
data.into_iter().map(|v| *v).collect()
},
DataBorrowed::F64(data) => {
data.into_iter().map(|v| {
*v as f32
})
.collect()
},
};
let texture = Texture2D::create_from_raw_pixels::<al_core::image::format::R32F>(gl, w as i32, h as i32, tex_params, Some(&values))?;
let bl = wcs.unproj_lonlat(&ImgXY::new(0.0, 0.0)).ok_or(JsValue::from_str("(0, 0) px cannot be unprojected"))?;
let br = wcs.unproj_lonlat(&ImgXY::new(width - 1.0, 0.0)).ok_or(JsValue::from_str("(w - 1, 0) px cannot be unprojected"))?;
let tr = wcs.unproj_lonlat(&ImgXY::new(width - 1.0, height - 1.0)).ok_or(JsValue::from_str("(w - 1, h - 1) px cannot be unprojected"))?;
let tl = wcs.unproj_lonlat(&ImgXY::new(0.0, height - 1.0)).ok_or(JsValue::from_str("(0, h - 1) px cannot be unprojected"))?;
let control_point = wcs.unproj_lonlat(&ImgXY::new(width / 2.0, height / 2.0)).ok_or(JsValue::from_str("(w / 2, h / 2) px cannot be unprojected"))?;
let mut num_moc_cells = std::usize::MAX;
let mut depth = 11;
let mut moc = RangeMOC::new_empty(0);
while num_moc_cells > 5 && depth > 3 {
depth = depth - 1;
moc = RangeMOC::from_polygon_with_control_point(
&[
(bl.lon(), bl.lat()),
(br.lon(), br.lat()),
(tr.lon(), tr.lat()),
(tl.lon(), tl.lat()),
],
(control_point.lon(), control_point.lat()),
depth
);
num_moc_cells = (&moc).into_range_moc_iter().cells().count();
}
al_core::info!(depth);
let pos = vec![];
let uv = vec![];
let indices = vec![];
// Define the buffers
let vao = {
let mut vao = VertexArrayObject::new(gl);
#[cfg(feature = "webgl2")]
vao.bind_for_update()
// layout (location = 0) in vec2 ndc_pos;
.add_array_buffer_single(
2,
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&pos),
)
.add_array_buffer_single(
2,
"uv",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&uv),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
.unbind();
#[cfg(feature = "webgl1")]
vao.bind_for_update()
.add_array_buffer_single(
2,
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&pos),
)
.add_array_buffer_single(
2,
"uv",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<f32>(&uv),
)
// Set the element buffer
.add_element_buffer(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&indices),
)
.unbind();
vao
};
// Automatic methods to compute the min and max cut values
let mut values = values.into_iter()
.filter(|x| !x.is_nan() && *x != blank)
.collect::<Vec<_>>();
values.sort_by(|a, b| a.partial_cmp(b).unwrap());
let idx_1_percent = ((values.len() as f32) * 0.01) as usize;
let idx_99_percent = ((values.len() as f32) * 0.99) as usize;
let min_val = values[idx_1_percent];
let max_val = values[idx_99_percent];
//let min_val = 0.0;
//let max_val = 1.0;
let cfg = ImageSurveyMeta {
/// Color config
color: HiPSColor::Grayscale {
tf: TransferFunction::Asinh,
min_cut: Some(min_val),
max_cut: Some(max_val),
color: GrayscaleColor::Colormap {
name: Colormap::Spectral,
reversed: false,
},
},
blend_cfg: BlendCfg::default(),
opacity: 0.8,
longitude_reversed: false,
};
let gl = gl.clone();
let image = FitsImage {
vao,
wcs,
moc,
gl,
pos,
uv,
indices,
texture,
cfg,
scale,
offset,
blank,
};
Ok(image)
}
pub fn update_buffers(&mut self, camera: &CameraViewPort, projection: &ProjectionType) -> Result<(), JsValue> {
self.indices.clear();
self.uv.clear();
self.pos.clear();
let mut idx_off = 0;
let depth_max = self.moc.depth_max();
for Cell { depth, idx, .. } in (&self.moc).into_range_moc_iter().cells() {
let delta_depth = (depth_max as i32 - depth as i32).max(0);
let n_segment_by_side = (1 << delta_depth) as usize;
let cell = HEALPixCell(depth, idx);
if depth < 3 {
let mut ndc_cells_d3 = vec![];
let mut uv_cells_d3 = vec![];
let depth_sub_cell = 3;
let delta_depth_sub_cell = depth_max - depth_sub_cell;
let n_segment_by_side_sub_cell = (1 << delta_depth_sub_cell) as usize;
for sub_cell in cell.get_children_cells(3 - depth) {
if let Some((ndc_sub_cell, uv_sub_cell, indices_sub_cell)) = self::rasterize_hpx_cell(
&sub_cell,
n_segment_by_side_sub_cell,
&mut idx_off,
camera,
projection,
&self.wcs
) {
self.indices.extend(indices_sub_cell);
ndc_cells_d3.extend(ndc_sub_cell);
uv_cells_d3.extend(uv_sub_cell);
}
}
self.pos.extend(&ndc_cells_d3);
self.uv.extend(&uv_cells_d3);
} else if let Some((ndc_cell, uv_cell, indices_cell)) = self::rasterize_hpx_cell(
&cell,
n_segment_by_side,
&mut idx_off,
camera,
projection,
&self.wcs,
) {
// Generate the iterator: idx_off + 1, idx_off + 1, .., idx_off + 4*n_segment - 1, idx_off + 4*n_segment - 1
self.indices.extend(indices_cell);
self.pos.extend(&ndc_cell);
self.uv.extend(&uv_cell);
}
}
// vertices contains ndc positions and texture UVs
self.vao.bind_for_update()
.update_array(
"ndc_pos",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&self.pos),
)
.update_array(
"uv",
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData(&self.uv),
)
.update_element_array(
WebGl2RenderingContext::DYNAMIC_DRAW,
VecData::<u32>(&self.indices),
);
Ok(())
}
pub fn draw(&self, shaders: &mut ShaderManager, colormaps: &Colormaps) -> Result<(), JsValue> {
if self.cfg.visible() {
self.gl.enable(WebGl2RenderingContext::BLEND);
let ImageSurveyMeta {
color,
opacity,
blend_cfg,
..
} = self.cfg;
// 2. Draw it if its opacity is not null
blend_cfg.enable(&self.gl, || {
let shader = crate::shader::get_shader(&self.gl, shaders, "FitsVS", "FitsFS")?;
shader
.bind(&self.gl)
.attach_uniforms_from(colormaps)
.attach_uniforms_from(&color)
.attach_uniform("opacity", &opacity)
.attach_uniform("tex", &self.texture)
.attach_uniform("scale", &self.scale)
.attach_uniform("offset", &self.offset)
.attach_uniform("blank", &self.blank)
.bind_vertex_array_object_ref(&self.vao)
.draw_elements_with_i32(
WebGl2RenderingContext::TRIANGLES,
Some(self.indices.len() as i32),
WebGl2RenderingContext::UNSIGNED_INT,
0,
);
Ok(())
})?;
self.gl.disable(WebGl2RenderingContext::BLEND);
}
Ok(())
}
}
use crate::math::angle::ToAngle;
fn rasterize_hpx_cell(cell: &HEALPixCell, n_segment_by_side: usize, idx_off: &mut u32, camera: &CameraViewPort, projection: &ProjectionType, wcs: &WCS) -> Option<(Vec<f32>, Vec<f32>, Vec<u32>)> {
let n_vertices_per_segment = n_segment_by_side + 1;
let (w, h) = wcs.img_dimensions();
let w = w as f64;
let h = h as f64;
let mut uv = vec![];
let mut ndc_pos = vec![];
for (lon, lat) in cell.grid(n_segment_by_side as u32).iter() {
let xyzw = crate::math::lonlat::radec_to_xyzw(lon.to_angle(), lat.to_angle());
let xyzw = crate::coosys::apply_coo_system(&CooSystem::ICRSJ2000, camera.get_system(), &xyzw);
if let Some((pos_vert, uv_vert)) = projection.model_to_normalized_device_space(&xyzw, camera)
.map(|v| {
wcs.proj(&wcs::LonLat::new(*lon, *lat))
.map(|xy| {
let uv = ImgXY::new(xy.x() / w, xy.y() / h);
(
[v.x as f32, v.y as f32],
[uv.x() as f32, uv.y() as f32]
)
})
}).flatten() {
ndc_pos.extend(pos_vert);
uv.extend(uv_vert);
}
}
let cell_inside = ndc_pos.len() == 2*(n_segment_by_side+1)*(n_segment_by_side+1);
if cell_inside {
// Generate the iterator: idx_off + 1, idx_off + 1, .., idx_off + 4*n_segment - 1, idx_off + 4*n_segment - 1
let mut indices = Vec::with_capacity(n_segment_by_side * n_segment_by_side * 6);
let num_vertices = (n_segment_by_side+1)*(n_segment_by_side+1);
let longitude_reversed = camera.get_longitude_reversed();
let invalid_tri = |tri_ccw: bool, reversed_longitude: bool| -> bool {
(!reversed_longitude && !tri_ccw) || (reversed_longitude && tri_ccw)
};
for i in 0..n_segment_by_side {
for j in 0..n_segment_by_side {
let idx_0 = j + i * n_vertices_per_segment;
let idx_1 = j + 1 + i * n_vertices_per_segment;
let idx_2 = j + (i + 1) * n_vertices_per_segment;
let idx_3 = j + 1 + (i + 1) * n_vertices_per_segment;
let c0 = crate::math::projection::ndc_to_screen_space(&XYNDC::new(ndc_pos[2*idx_0] as f64, ndc_pos[2*idx_0 + 1] as f64), camera);
let c1 = crate::math::projection::ndc_to_screen_space(&XYNDC::new(ndc_pos[2*idx_1] as f64, ndc_pos[2*idx_1 + 1] as f64), camera);
let c2 = crate::math::projection::ndc_to_screen_space(&XYNDC::new(ndc_pos[2*idx_2] as f64, ndc_pos[2*idx_2 + 1] as f64), camera);
let c3 = crate::math::projection::ndc_to_screen_space(&XYNDC::new(ndc_pos[2*idx_3] as f64, ndc_pos[2*idx_3 + 1] as f64), camera);
let first_tri_ccw = !crate::math::vector::ccw_tri(&c0, &c1, &c2);
let second_tri_ccw = !crate::math::vector::ccw_tri(&c1, &c3, &c2);
if invalid_tri(first_tri_ccw, longitude_reversed) || invalid_tri(second_tri_ccw, longitude_reversed) {
return None;
}
let vx = [c0.x, c1.x, c2.x, c3.x];
let vy = [c0.y, c1.y, c2.y, c3.y];
let projeted_cell = HEALPixCellProjeted {
ipix: cell.idx(),
vx,
vy
};
crate::survey::view::project(projeted_cell, camera, projection)?;
indices.push(*idx_off + idx_0 as u32);
indices.push(*idx_off + idx_1 as u32);
indices.push(*idx_off + idx_2 as u32);
indices.push(*idx_off + idx_1 as u32);
indices.push(*idx_off + idx_3 as u32);
indices.push(*idx_off + idx_2 as u32);
}
}
*idx_off += num_vertices as u32;
Some((ndc_pos, uv, indices))
} else {
None
}
}

View File

@@ -97,7 +97,7 @@ fn path_along_edge(cell: &HEALPixCell, n_segment_by_side: usize, camera: &Camera
}
}
use al_api::cell::HEALPixCellProjeted;
fn rasterize_hpx_cell(cell: &HEALPixCell, n_segment_by_side: usize, camera: &CameraViewPort, idx_off: &mut u32, projection: &ProjectionType) -> Option<(Vec<f32>, Vec<u32>)> {
pub fn rasterize_hpx_cell(cell: &HEALPixCell, n_segment_by_side: usize, camera: &CameraViewPort, idx_off: &mut u32, projection: &ProjectionType) -> Option<(Vec<f32>, Vec<u32>)> {
let n_vertices_per_segment = n_segment_by_side + 1;
let vertices = cell
@@ -108,7 +108,9 @@ fn rasterize_hpx_cell(cell: &HEALPixCell, n_segment_by_side: usize, camera: &Cam
let xyzw = crate::coosys::apply_coo_system(&CooSystem::ICRSJ2000, camera.get_system(), &xyzw);
projection.model_to_normalized_device_space(&xyzw, camera)
.map(|v| [v.x as f32, v.y as f32])
.map(|v| {
[v.x as f32, v.y as f32]
})
})
.flatten()
.collect::<Vec<_>>();
@@ -462,7 +464,7 @@ impl MOC {
})
.flatten()
.collect::<Vec<_>>();
self.first_idx.push(self.indices.len());
self.num_indices.push(indices_moc.len());

View File

@@ -3,8 +3,502 @@ pub mod final_pass;
pub mod grid;
pub mod labels;
pub mod moc;
pub mod image;
pub mod hips;
pub use hips::HiPS;
pub use labels::TextRenderManager;
pub use catalog::Manager;
pub use grid::ProjetedGrid;
use al_api::hips::ImageSurveyMeta;
use al_api::color::ColorRGB;
use al_core::VertexArrayObject;
use al_core::SliceData;
use al_core::shader::Shader;
use al_core::WebGlContext;
use al_core::image::format::ImageFormatType;
use al_core::webgl_ctx::GlWrapper;
use crate::Abort;
use crate::ProjectionType;
use crate::renderable::image::FitsImage;
use crate::camera::CameraViewPort;
use crate::colormap::Colormaps;
use crate::shader::ShaderId;
use crate::{shader::ShaderManager, survey::config::HiPSConfig};
use crate::SimpleHiPS;
// Recursively compute the number of subdivision needed for a cell
// to not be too much skewed
use hips::raytracing::RayTracer;
use web_sys::{WebGl2RenderingContext};
use wasm_bindgen::JsValue;
use std::borrow::Cow;
use std::collections::{HashSet, HashMap};
pub(crate) type Url = String;
type LayerId = String;
pub struct Layers {
// Surveys to query
surveys: HashMap<Url, HiPS>,
images: HashMap<Url, FitsImage>,
// The meta data associated with a layer
meta: HashMap<LayerId, ImageSurveyMeta>,
// Hashmap between urls and layers
urls: HashMap<LayerId, Url>,
// Layers given in a specific order to draw
ids: Vec<LayerId>,
most_precise_survey: Url,
raytracer: RayTracer,
// A vao that takes all the screen
screen_vao: VertexArrayObject,
background_color: ColorRGB,
depth: u8,
gl: WebGlContext,
}
const DEFAULT_BACKGROUND_COLOR: ColorRGB = ColorRGB { r: 0.05, g: 0.05, b: 0.05 };
fn get_backgroundcolor_shader<'a>(gl: &WebGlContext, shaders: &'a mut ShaderManager) -> &'a Shader {
shaders.get(
gl,
&ShaderId(
Cow::Borrowed("RayTracerFontVS"),
Cow::Borrowed("RayTracerFontFS"),
),
)
.unwrap_abort()
}
impl Layers {
pub fn new(
gl: &WebGlContext,
projection: &ProjectionType
) -> Result<Self, JsValue> {
let surveys = HashMap::new();
let images = HashMap::new();
let meta = HashMap::new();
let urls = HashMap::new();
let ids = Vec::new();
// - The raytracer is a mesh covering the view. Each pixel of this mesh
// is unprojected to get its (ra, dec). Then we query ang2pix to get
// the HEALPix cell in which it is located.
// We get the texture from this cell and draw the pixel
// This mode of rendering is used for big FoVs
let raytracer = RayTracer::new(gl, &projection)?;
let gl = gl.clone();
let most_precise_survey = String::new();
let mut screen_vao = VertexArrayObject::new(&gl);
#[cfg(feature = "webgl2")]
screen_vao.bind_for_update()
.add_array_buffer_single(
2,
"pos_clip_space",
WebGl2RenderingContext::STATIC_DRAW,
SliceData::<f32>(&[
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
]),
)
// Set the element buffer
.add_element_buffer(WebGl2RenderingContext::STATIC_DRAW, SliceData::<u16>(&[0, 1, 2, 0, 2, 3]))
// Unbind the buffer
.unbind();
#[cfg(feature = "webgl1")]
screen_vao.bind_for_update()
.add_array_buffer(
2,
"pos_clip_space",
WebGl2RenderingContext::STATIC_DRAW,
SliceData::<f32>(&[
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
]),
)
// Set the element buffer
.add_element_buffer(WebGl2RenderingContext::STATIC_DRAW, SliceData::<u16>(&[0, 1, 2, 0, 2, 3]))
// Unbind the buffer
.unbind();
let depth = 0;
let background_color = DEFAULT_BACKGROUND_COLOR;
Ok(Layers {
surveys,
images,
meta,
urls,
ids,
most_precise_survey,
raytracer,
depth,
background_color,
screen_vao,
gl,
})
}
pub fn set_survey_url(&mut self, past_url: String, new_url: String) -> Result<(), JsValue> {
if let Some(mut survey) = self.surveys.remove(&past_url) {
// update the root_url
survey.get_config_mut()
.set_root_url(new_url.clone());
self.surveys.insert(new_url.clone(), survey);
// update all the layer urls
for url in self.urls.values_mut() {
if *url == past_url {
*url = new_url.clone();
}
}
if self.most_precise_survey == past_url {
self.most_precise_survey = new_url.clone();
}
Ok(())
} else {
Err(JsValue::from_str("Survey not found"))
}
}
pub fn reset_frame(&mut self) {
for survey in self.surveys.values_mut() {
survey.reset_frame();
}
}
pub fn set_projection(&mut self, projection: &ProjectionType) -> Result<(), JsValue> {
// Recompute the raytracer
self.raytracer = RayTracer::new(&self.gl, &projection)?;
Ok(())
}
pub fn set_background_color(&mut self, color: ColorRGB) {
self.background_color = color;
}
pub fn draw(
&mut self,
camera: &CameraViewPort,
shaders: &mut ShaderManager,
colormaps: &Colormaps,
projection: &ProjectionType
) -> Result<(), JsValue> {
let raytracer = &self.raytracer;
let raytracing = raytracer.is_rendering(camera/* , depth_texture*/);
// The first layer must be paint independently of its alpha channel
self.gl.enable(WebGl2RenderingContext::BLEND);
// Check whether a survey to plot is allsky
// if neither are, we draw a font
// if there are, we do not draw nothing
if !self.surveys.is_empty() {
let not_render_transparency_font = self.ids.iter()
.any(|layer| {
let meta = self.meta.get(layer).unwrap_abort();
let url = self.urls.get(layer).unwrap_abort();
let survey = self.surveys.get(url).unwrap_abort();
let hips_cfg = survey.get_config();
(survey.is_allsky() || hips_cfg.get_format() == ImageFormatType::RGB8U) && meta.opacity == 1.0
});
// Need to render transparency font
if !not_render_transparency_font {
let opacity = self.surveys.values()
.fold(std::f32::MAX, |mut a, s| {
a = a.min(s.get_fading_factor()); a
});
let background_color = &self.background_color * opacity;
let vao = if raytracing {
raytracer.get_vao()
} else {
// define a vao that consists of 2 triangles for the screen
&self.screen_vao
};
get_backgroundcolor_shader(&self.gl, shaders).bind(&self.gl).attach_uniforms_from(camera)
.attach_uniform("color", &background_color)
.attach_uniform("opacity", &opacity)
.bind_vertex_array_object_ref(vao)
.draw_elements_with_i32(
WebGl2RenderingContext::TRIANGLES,
None,
WebGl2RenderingContext::UNSIGNED_SHORT,
0,
);
}
}
// Pre loop over the layers to see if a HiPS is entirely covering those behind
// so that we do not have to render those
let mut idx_start_layer = 0;
for (idx_layer, layer) in self.ids.iter().enumerate().skip(1) {
let meta = self.meta.get(layer).expect("Meta should be found");
let url = self.urls.get(layer).expect("Url should be found");
let survey = self.surveys.get_mut(url).unwrap_abort();
let hips_cfg = survey.get_config();
let fully_covering_survey = (survey.is_allsky() || hips_cfg.get_format() == ImageFormatType::RGB8U) && meta.opacity == 1.0;
if fully_covering_survey {
idx_start_layer = idx_layer;
}
}
let rendered_layers = &self.ids[idx_start_layer..];
for layer in rendered_layers {
let meta = self.meta.get(layer).expect("Meta should be found");
if meta.visible() {
// 1. Update the survey if necessary
let url = self.urls.get(layer).expect("Url should be found");
let survey = self.surveys.get_mut(url).unwrap_abort();
survey.update(camera, projection);
let ImageSurveyMeta {
color,
opacity,
blend_cfg,
..
} = meta;
// 2. Draw it if its opacity is not null
blend_cfg.enable(&self.gl, || {
survey.draw(
raytracer,
shaders,
camera,
color,
*opacity,
colormaps,
)?;
Ok(())
})?;
}
}
self.gl.blend_func_separate(
WebGl2RenderingContext::SRC_ALPHA,
WebGl2RenderingContext::ONE,
WebGl2RenderingContext::ONE,
WebGl2RenderingContext::ONE,
);
self.gl.disable(WebGl2RenderingContext::BLEND);
Ok(())
}
pub fn set_image_surveys(
&mut self,
hipses: Vec<SimpleHiPS>,
gl: &WebGlContext,
camera: &mut CameraViewPort,
projection: &ProjectionType
) -> Result<(), JsValue> {
// 1. Check if layer duplicated have been given
for i in 0..hipses.len() {
for j in 0..i {
if hipses[i].get_layer() == hipses[j].get_layer() {
let layer = &hipses[i].get_layer();
return Err(JsValue::from_str(&format!(
"{:?} layer name are duplicates",
layer
)));
}
}
}
let mut current_needed_surveys = HashSet::new();
for hips in hipses.iter() {
let url = hips.get_properties().get_url();
current_needed_surveys.insert(url);
}
// Remove surveys that are not needed anymore
self.surveys = self
.surveys
.drain()
.filter(|(_, m)| current_needed_surveys.contains(&m.get_config().root_url))
.collect();
// Create the new surveys
let mut max_depth_among_surveys = 0;
self.meta.clear();
self.ids.clear();
self.urls.clear();
let _num_surveys = hipses.len();
let mut longitude_reversed = false;
for SimpleHiPS {
layer,
properties,
meta,
img_format,
..
} in hipses.into_iter()
{
let config = HiPSConfig::new(&properties, img_format)?;
//camera.set_longitude_reversed(meta.longitude_reversed);
// Get the most precise survey from all the ones given
let url = properties.get_url();
let max_order = properties.get_max_order();
if max_order > max_depth_among_surveys {
max_depth_among_surveys = max_order;
self.most_precise_survey = url.clone();
}
// Add the new surveys
if !self.surveys.contains_key(&url) {
let survey = HiPS::new(config, gl, camera)?;
self.surveys.insert(url.clone(), survey);
// A new survey has been added and it is lonely
/*if num_surveys == 1 {
if let Some(initial_ra) = properties.get_initial_ra() {
if let Some(initial_dec) = properties.get_initial_dec() {
camera.set_center::<P>(&LonLatT(Angle((initial_ra).to_radians()), Angle((initial_dec).to_radians())), &properties.get_frame());
}
}
if let Some(initial_fov) = properties.get_initial_fov() {
camera.set_aperture::<P>(Angle((initial_fov).to_radians()));
}
}*/
}
longitude_reversed |= meta.longitude_reversed;
self.meta.insert(layer.clone(), meta);
self.urls.insert(layer.clone(), url);
self.ids.push(layer);
}
camera.set_longitude_reversed(longitude_reversed, &projection);
Ok(())
}
pub fn get_layer_cfg(&self, layer: &str) -> Result<ImageSurveyMeta, JsValue> {
self.meta
.get(layer)
.cloned()
.ok_or_else(|| JsValue::from(js_sys::Error::new("Survey not found")))
}
pub fn set_layer_cfg(
&mut self,
layer: String,
meta: ImageSurveyMeta,
camera: &CameraViewPort,
projection: &ProjectionType,
) -> Result<(), JsValue> {
if let Some(meta_old) = self.meta.get(&layer) {
if !meta_old.visible() && meta.visible() {
if let Some(survey) = self.get_mut_hips_from_layer(&layer) {
survey.recompute_vertices(camera, projection);
}
if let Some(image) = self.get_mut_image_from_layer(&layer) {
image.update_buffers(camera, projection)?;
}
}
}
// Expect the image survey to be found in the hash map
self.meta.insert(layer.clone(), meta).ok_or_else(|| {
JsValue::from(js_sys::Error::new(&format!("{:?} layer not found", layer)))
})?;
Ok(())
}
pub fn is_ready(&self) -> bool {
let ready = self
.surveys
.iter()
.map(|(_, survey)| survey.is_ready())
.fold(true, |acc, x| acc & x);
ready
}
pub fn refresh_views(&mut self, camera: &mut CameraViewPort) {
self.depth = 0;
for survey in self.surveys.values_mut() {
survey.refresh_view(camera);
self.depth = self.depth.max(survey.get_depth());
}
}
// Accessors
pub fn get_depth(&self) -> u8 {
self.depth
}
// HiPSes getters
pub fn get_hips_from_layer(&self, id: &str) -> Option<&HiPS> {
self.urls.get(id).map(|url| self.surveys.get(url).unwrap_abort())
}
pub fn get_mut_hips_from_layer(&mut self, id: &str) -> Option<&mut HiPS> {
let url = self.urls.get_mut(id);
if let Some(url) = url {
self.surveys.get_mut(url)
} else {
None
}
}
pub fn get_mut_hips(&mut self, root_url: &str) -> Option<&mut HiPS> {
self.surveys.get_mut(root_url)
}
pub fn values_hips(&self) -> impl Iterator<Item = &HiPS> {
self.surveys.values()
}
pub fn values_mut_hips(&mut self) -> impl Iterator<Item = &mut HiPS> {
self.surveys.values_mut()
}
// Fits images getters
pub fn get_mut_image_from_layer(&mut self, id: &str) -> Option<&mut FitsImage> {
let url = self.urls.get_mut(id);
if let Some(url) = url {
self.images.get_mut(url)
} else {
None
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +0,0 @@
pub mod rasterizer;
pub mod ray_tracer;

View File

@@ -1 +0,0 @@
pub mod uv;

View File

@@ -1,494 +0,0 @@
/*use fitsrs::PrimaryHeader;
use cgmath::{Matrix2, Matrix4, Matrix, Vector2, Vector4, SquareMatrix};
use al_core::image::fits::FitsBorrowed;
use crate::{Gnomonic, Orthographic};
use crate::math::projection::Projection;
// Number of axis
const NAXIS: u8 = 2;
/* Implementation of the paper from M. R. Calabretta and E. W. Greisen
Representations of celestial coordinates in FITS */
#[derive(Debug)]
pub struct WCS2 {
// Pixel coordinates of a coordinate reference point (r_j)
crpix: Vector2<f64>,
// Pixel to interm coordianates keywords
pixel_interm_params: Pixel2IntermParams,
// Coordinate value at reference point [deg]
crval: Vector2<f64>,
// Coordinate reference (radec, galactic lonlat, ...)
ctype_coo_ref: CTYPE_COO_REF,
// Spherical projection
ctype_proj: CTYPE_PROJ,
// Native longitude of celestial pole [deg]
lonpole: f64,
// Native latitude of celestial pole [deg]
latpole: f64,
// Coordinate system
radesys: RADESYS,
// Coordinate rotation matrix coding the native to celestial spherical coordinates
r: Matrix4<f64>,
// Inv of r
r_inv: Matrix4<f64>
}
#[derive(Debug)]
enum Pixel2IntermParams {
CDELT_PC {
// Coordinate increment at reference point [deg] (s_i)
cdelt: Vector2<f64>,
// Linear transformation matrix (m_ij)
pc: Matrix2<f64>,
// Inv of the linear transformation matrix (m_ij)
pc_inv: Matrix2<f64>,
},
CD {
// Linear transformation matrix (s_i x m_ij)
cd: Matrix2<f64>,
// Inv of linear transformation matrix (s_i x m_ij)
cd_inv: Matrix2<f64>,
},
CROTA
}
use fitsrs::{FITSCardValue, FITSCard};
fn has_specific_card<'a>(hdu: &'a PrimaryHeader<'a>, keyword: &'static str) -> bool {
hdu.get(keyword).is_some()
}
fn get_card_float_value<'a>(hdu: &'a PrimaryHeader<'a>, keyword: &'static str, default: f64) -> Result<f64, &'static str> {
match hdu.get(keyword) {
None => Ok(default),
Some(FITSCard::Other { value: FITSCardValue::FloatingPoint(v), .. }) => Ok(*v),
_ => Err("Keyword does not refer to a floating point value"),
}
}
fn get_card_str_value<'a>(hdu: &'a PrimaryHeader<'a>, keyword: &'static str) -> Result<&'a str, &'static str> {
match hdu.get(keyword) {
Some(FITSCard::Other { value: FITSCardValue::CharacterString(s), .. }) => Ok(s),
_ => Err("Keyword does not refer to a string"),
}
}
// (Y-X'-Y'') Euler angles matrix
// See: https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix
fn create_yxy_euler_rotation_matrix(alpha: f64, delta: f64, phi: f64) -> Matrix4<f64> {
let (s1, c1) = phi.sin_cos();
let (s2, c2) = delta.sin_cos();
let (s3, c3) = alpha.sin_cos();
let r11 = c1*c3 - c2*s1*s3;
let r12 = s1*s2;
let r13 = c1*s3 + c2*c3*s1;
let r21 = s2*s3;
let r22 = c2;
let r23 = -c3*s2;
let r31 = -c3*s1 - c1*c2*s3;
let r32 = c1*s2;
let r33 = c1*c2*c3 - s1*s3;
Matrix4::new(
r11, r21, r31, 0.0,
r12, r22, r32, 0.0,
r13, r23, r33, 0.0,
0.0, 0.0, 0.0, 1.0
)
}
impl WCS2 {
pub fn new<'a>(fits: &FitsBorrowed<'a>) -> Result<Self, &'static str> {
let hdu = fits.get_header();
let crpix1 = get_card_float_value(hdu, "CRPIX1", 0.0)?;
let crpix2 = get_card_float_value(hdu, "CRPIX2", 0.0)?;
let pixel_interm_params = (if has_specific_card(hdu, "CDELT1") {
let cdelt1 = get_card_float_value(hdu, "CDELT1", 1.0)?;
let cdelt2 = get_card_float_value(hdu, "CDELT2", 1.0)?;
// Linear transformation matrix (m_ij)
let pc11 = get_card_float_value(hdu, "PC1_1", 1.0)?;
let pc12 = get_card_float_value(hdu, "PC1_2", 0.0)?;
let pc21 = get_card_float_value(hdu, "PC2_1", 0.0)?;
let pc22 = get_card_float_value(hdu, "PC2_2", 1.0)?;
let pc = Matrix2::new(pc11, pc21, pc12, pc22);
let pc_inv = pc.transpose();
Ok(Pixel2IntermParams::CDELT_PC {
cdelt: Vector2::new(cdelt1.to_radians(), cdelt2.to_radians()),
pc: pc,
pc_inv: pc_inv
})
} else if has_specific_card(hdu, "CD1_1") {
// Linear transformation matrix (m_ij * s_i)
let cd11 = get_card_float_value(hdu, "CD1_1", 1.0)?;
let cd12 = get_card_float_value(hdu, "CD1_2", 0.0)?;
let cd21 = get_card_float_value(hdu, "CD2_1", 0.0)?;
let cd22 = get_card_float_value(hdu, "CD2_2", 1.0)?;
let cd = Matrix2::new(cd11, cd21, cd12, cd22);
let cd_inv = cd.transpose();
Ok(Pixel2IntermParams::CD {
cd,
cd_inv
})
} else if has_specific_card(hdu, "CROTA") {
Err("CROTA not implemented")
} else {
// Default case
let pc = Matrix2::identity();
Ok(Pixel2IntermParams::CDELT_PC {
cdelt: Vector2::new(1.0_f64.to_radians(), 1.0_f64.to_radians()),
pc_inv: pc.clone(),
pc
})
})?;
let crval1 = get_card_float_value(hdu, "CRVAL1", 0.0)?;
let crval2 = get_card_float_value(hdu, "CRVAL2", 0.0)?;
let ctype1 = get_card_str_value(hdu, "CTYPE1")?;
let ctype2 = get_card_str_value(hdu, "CTYPE2")?;
let ctype_proj = match &ctype1[5..8] {
/* Zenithal projections */
// zenithal/azimuthal perspective
"AZP" => Ok(CTYPE_PROJ::AZP),
// slant zenithal perspective
"SZP" => Ok(CTYPE_PROJ::SZP),
// Gnomonic
"TAN" => Ok(CTYPE_PROJ::TAN),
// Orthographic
"SIN" => Ok(CTYPE_PROJ::SIN),
/* Cylindrical projections */
// Mollweides projection
"MOL" => Ok(CTYPE_PROJ::MOL),
// Hammer-Aitoff
"AIT" => Ok(CTYPE_PROJ::AIT),
_ => Err("CTYPE last 3-character not recognized")
}?;
let ctype_coo_ref = match (&ctype1[0..4], &ctype2[0..4]) {
("RA--", "DEC-") => Ok(CTYPE_COO_REF::RA_DEC),
("GLON", "GLAT") => Ok(CTYPE_COO_REF::G_LON_LAT),
("ELON", "ELAT") => Ok(CTYPE_COO_REF::E_LON_LAT),
("RLON", "RLAT") => Ok(CTYPE_COO_REF::R_LON_LAT),
("HLON", "HLAT") => Ok(CTYPE_COO_REF::H_LON_LAT),
_ => Err("CTYPE first 4-character not recognized")
}?;
let default_lonpole = if ctype_proj.is_zenithal() {
// For zenithal projection, as theta_0, the latitude of the fiducial point
// corresponds to the native point, then lonpole = 0, unless delta_0 equals 90.0
if crval2 == 90.0 {
0.0
} else {
180.0
}
} else if ctype_proj.is_cylindrical() {
if crval2 >= 0.0 {
0.0
} else {
180.0
}
} else {
return Err("Other from cylindrical and zenithal projections not yet implemented!");
};
let lonpole = get_card_float_value(hdu, "LONPOLE", default_lonpole)?;
let latpole = get_card_float_value(hdu, "LATPOLE", 0.0)?;
let radesys = match get_card_str_value(hdu, "RADESYS")? {
// International Celestial Reference System
"ICRS" => Ok(RADESYS::ICRS),
// Mean place, new (IAU 1984) system
"FK5" => Ok(RADESYS::FK5),
// Mean place, old (Nessell-Newcomb) system
"FK4" => Ok(RADESYS::FK4),
// Mean place, old system without e-terms
"FK4_NO_E" => Ok(RADESYS::FK4_NO_E),
// Geocentric apparent place, IAU 1984 system
"GAPPT" => Ok(RADESYS::GAPPT),
_ => Err("Reference system not recognized")
}?;
// Native to celestial coordinates matrix
let (alpha_p, delta_p, phi_p) = if ctype_proj.is_zenithal() {
Ok((crval1.to_radians(), crval2.to_radians(), lonpole.to_radians()))
} else {
Err("cylindrical alpha_p, delta_p, phi_p not yet implemented. See p1080 of the reference paper")
}?;
// (Y-X'-Y'') Euler angles matrix
let r = create_yxy_euler_rotation_matrix(-alpha_p, delta_p, -phi_p + crate::math::PI);
let r_inv = r.transpose();
Ok(WCS2 {
crpix: Vector2::new(crpix1, crpix2),
crval: Vector2::new(crval1.to_radians(), crval2.to_radians()),
pixel_interm_params,
r,
r_inv,
lonpole: lonpole.to_radians(),
latpole: latpole.to_radians(),
ctype_coo_ref,
ctype_proj,
radesys
})
}
/* Pixel <=> projection plane coordinates transformation */
fn pixel_to_interm_world_coordinates(&self, p: &Vector2<f64>) -> Result<Vector2<f64>, &'static str> {
let p_off = p - self.crpix;
match &self.pixel_interm_params {
Pixel2IntermParams::CDELT_PC { pc, cdelt, .. } => {
let p_rot = pc * p_off;
Ok(Vector2::new(
-p_rot.x * cdelt.x,
p_rot.y * cdelt.y,
))
},
Pixel2IntermParams::CD { cd, .. } => {
let p_rot = cd * p_off;
Ok(Vector2::new(
-p_rot.x,
p_rot.y,
))
},
_ => Err("CROTA not implemented")
}
}
fn interm_world_to_pixel_coordinates(&self, x: &Vector2<f64>) -> Result<Vector2<f64>, &'static str> {
let p_off = (match &self.pixel_interm_params {
Pixel2IntermParams::CDELT_PC { pc_inv, cdelt, .. } => {
let p_rot = Vector2::new(
-x.x / cdelt.x,
x.y / cdelt.y,
);
Ok(pc_inv * p_rot)
},
Pixel2IntermParams::CD { cd_inv, .. } => {
let p_rot = Vector2::new(
-x.x ,
x.y,
);
Ok(cd_inv * p_rot)
},
_ => Err("CROTA not implemented")
})?;
Ok(p_off + self.crpix)
}
/* Projection plane <=> native spherical coordinates transformation */
fn interm_world_to_native_spherical_coordinates(&self, x: &Vector2<f64>) -> Result<Option<Vector4<f64>>, &'static str> {
match self.ctype_proj {
// Zenithal/azimuthal perspective
CTYPE_PROJ::AZP => Err("AZP not implemented yet!"),
// Slant zenithal perspective
CTYPE_PROJ::SZP => Err("SZP not implemented yet!"),
// Gnomonic
CTYPE_PROJ::TAN => {
Ok(Gnomonic.clip_to_world_space(x))
},
// Orthographic
CTYPE_PROJ::SIN => {
Ok(Orthographic.clip_to_world_space(x))
},
// Mollweides projection
CTYPE_PROJ::MOL => Err("MOL not implemented yet!"),
// Hammer-Aitoff
CTYPE_PROJ::AIT => Err("AIT not implemented yet!"),
}
}
fn native_spherical_to_interm_world_coordinates(&self, xyz: &Vector4<f64>) -> Result<Option<Vector2<f64>>, &'static str> {
match self.ctype_proj {
// Zenithal/azimuthal perspective
CTYPE_PROJ::AZP => Err("AZP not implemented yet!"),
// Slant zenithal perspective
CTYPE_PROJ::SZP => Err("SZP not implemented yet!"),
// Gnomonic
CTYPE_PROJ::TAN => {
Ok(Gnomonic.world_to_clip_space(xyz))
},
// Orthographic
CTYPE_PROJ::SIN => {
Ok(Orthographic.world_to_clip_space(xyz))
},
// Mollweides projection
CTYPE_PROJ::MOL => Err("MOL not implemented yet!"),
// Hammer-Aitoff
CTYPE_PROJ::AIT => Err("AIT not implemented yet!"),
}
}
/* Native <=> celestial spherical coordinates transformation */
fn native_to_celestial_spherical_coordinates(&self, xyz: &Vector4<f64>) -> Vector4<f64> {
self.r_inv * xyz
}
fn celestial_to_native_spherical_coordinates(&self, xyz: &Vector4<f64>) -> Vector4<f64> {
self.r * xyz
}
pub fn proj(&self, xyz: &Vector4<f64>) -> Result<Option<Vector2<f64>>, &'static str> {
let x = self.native_spherical_to_interm_world_coordinates(
&self.celestial_to_native_spherical_coordinates(xyz)
)?;
if let Some(x) = x {
Ok(Some(self.interm_world_to_pixel_coordinates(&x)?))
} else {
Ok(None)
}
}
pub fn deproj(&self, p: &Vector2<f64>) -> Result<Option<Vector4<f64>>, &'static str> {
let xyz = self.interm_world_to_native_spherical_coordinates(
&self.pixel_to_interm_world_coordinates(p)?
)?
.map(|xyz| self.native_to_celestial_spherical_coordinates(&xyz));
Ok(xyz)
}
}
#[derive(Debug)]
enum CTYPE_COO_REF {
RA_DEC,
G_LON_LAT,
E_LON_LAT,
R_LON_LAT,
H_LON_LAT,
}
#[derive(Debug)]
enum CTYPE_PROJ {
// zenithal/azimuthal perspective
AZP,
// slant zenithal perspective
SZP,
// Orthographic
SIN,
// Gnomonic
TAN,
// Mollweides projection
MOL,
// Hammer-Aitoff
AIT,
}
impl CTYPE_PROJ {
fn is_zenithal(&self) -> bool {
match self {
CTYPE_PROJ::AZP | CTYPE_PROJ::SZP | CTYPE_PROJ::SIN | CTYPE_PROJ::TAN => true,
_ => false
}
}
fn is_cylindrical(&self) -> bool {
match self {
CTYPE_PROJ::MOL | CTYPE_PROJ::AIT => true,
_ => false
}
}
}
#[derive(Debug)]
enum RADESYS {
// International Celestial Reference System
ICRS,
// Mean place, new (IAU 1984) system
FK5,
// Mean place, old (Nessell-Newcomb) system
FK4,
// Mean place, old system without e-terms
FK4_NO_E,
// Geocentric apparent place, IAU 1984 system
GAPPT,
}
mod tests {
use std::fs::File;
use std::io::Read;
use cgmath::Vector2;
use crate::wcs::WCS2;
use al_core::image::fits::FitsBorrowed;
macro_rules! assert_delta {
($x:expr, $y:expr, $d:expr) => {
if ($x - $y).abs() > $d { assert!(false); }
}
}
#[test]
fn proj_deproj_round_trip() {
let mut f = File::open("../../examples/cutout-CDS_P_HST_PHAT_F475W.fits").unwrap();
let mut buf = Vec::new();
f.read_to_end(&mut buf).unwrap();
let fits = FitsBorrowed::new(&buf).unwrap();
let wcs = WCS2::new(&fits).unwrap();
let p = Vector2::new(0.0, 0.0);
let xyz = wcs.deproj(&p).unwrap().unwrap();
let p_prim = wcs.proj(&xyz).unwrap().unwrap();
assert_delta!(p.x, p_prim.x, 1e-6);
assert_delta!(p.y, p_prim.y, 1e-6);
}
use crate::math::angle::Angle;
#[test]
fn deproj() {
let mut f = File::open("../../examples/cutout-CDS_P_HST_PHAT_F475W.fits").unwrap();
let mut buf = Vec::new();
f.read_to_end(&mut buf).unwrap();
let fits = FitsBorrowed::new(&buf).unwrap();
let wcs = WCS2::new(&fits).unwrap();
let p = Vector2::new(1500.0, 1500.0);
let xyz = wcs.deproj(&p).unwrap().unwrap();
let (Angle(lon), Angle(lat)) = crate::math::lonlat::xyzw_to_radec(&xyz);
assert_delta!(lon, wcs.crval.x, 1e-6);
assert_delta!(lat, wcs.crval.y, 1e-6);
}
use crate::LonLatT;
use crate::ArcDeg;
#[test]
fn proj() {
let mut f = File::open("../../examples/cutout-CDS_P_HST_PHAT_F475W.fits").unwrap();
let mut buf = Vec::new();
f.read_to_end(&mut buf).unwrap();
let fits = FitsBorrowed::new(&buf).unwrap();
let wcs = WCS2::new(&fits).unwrap();
let xyz = LonLatT::new(Angle(wcs.crval.x), Angle(wcs.crval.y)).vector();
let p = dbg!(wcs.proj(&xyz).unwrap().unwrap());
assert_delta!(p.x, 1500.0, 1e-6);
assert_delta!(p.y, 1500.0, 1e-6);
}
}
*/

View File

@@ -0,0 +1,43 @@
#version 300 es
precision highp float;
precision highp sampler2D;
precision highp isampler2D;
precision mediump int;
out vec4 out_frag_color;
in vec2 frag_uv;
uniform sampler2D tex;
// fits values
uniform float scale;
uniform float offset;
uniform float blank;
// stretch values
uniform float min_value;
uniform float max_value;
uniform int H;
uniform float opacity;
@include "../colormaps/colormap.glsl"
@include "../hips/transfer_funcs.glsl"
void main() {
if (frag_uv.x >= 0.0 && frag_uv.x <= 1.0 && frag_uv.y >= 0.0 && frag_uv.y <= 1.0) {
float x = texture(tex, frag_uv).r;
float alpha = x * scale + offset;
alpha = transfer_func(H, alpha, min_value, max_value);
out_frag_color = mix(
colormap_f(alpha),
vec4(0.0),
float(x == blank || isnan(x))
);
out_frag_color.a = out_frag_color.a * opacity;
} else {
discard;
}
}

View File

@@ -0,0 +1,14 @@
#version 300 es
precision highp float;
precision mediump int;
layout (location = 0) in vec2 ndc_pos;
layout (location = 1) in vec2 uv;
out vec2 frag_uv;
uniform sampler2D tex;
void main() {
gl_Position = vec4(ndc_pos, 0.0, 1.0);
frag_uv = uv;
}

View File

@@ -1570,13 +1570,42 @@ Aladin.prototype.displayFITS = function (url, options, successCallback, errorCal
}
let self = this;
/*fetch(url)
fetch(url)
.then((resp) => resp.arrayBuffer())
.then((arrayBuffer) => {
console.log('received fits', arrayBuffer)
self.view.aladin.webglAPI.addFITSImage(new Uint8Array(arrayBuffer));
console.log("parsed")
});*/
});
};
// @API
/*
* Creates remotely a HiPS from a JPEG or PNG image with astrometry info
* and display it
*/
Aladin.prototype.displayJPG = Aladin.prototype.displayPNG = function (url, options, successCallback, errorCallback) {
options = options || {};
options.color = true;
options.label = "JPG/PNG image";
options.outputFormat = 'png';
options = options || {};
var data = { url: url };
if (options.color) {
data.color = true;
}
if (options.outputFormat) {
data.format = options.outputFormat;
}
if (options.order) {
data.order = options.order;
}
if (options.nocache) {
data.nocache = options.nocache;
}
let self = this;
const request = ( url, params = {}, method = 'GET' ) => {
let options = {
@@ -1624,19 +1653,7 @@ Aladin.prototype.displayFITS = function (url, options, successCallback, errorCal
// This has to be fixed in the backend but a fast fix is just to wait
// before setting a new image survey
});
};
// @API
/*
* Creates remotely a HiPS from a JPEG or PNG image with astrometry info
* and display it
*/
Aladin.prototype.displayJPG = Aladin.prototype.displayPNG = function (url, options, successCallback, errorCallback) {
options = options || {};
options.color = true;
options.label = "JPG/PNG image";
options.outputFormat = 'png';
this.displayFITS(url, options, successCallback, errorCallback);
};
Aladin.prototype.setReduceDeformations = function (reduce) {

View File

@@ -45,6 +45,10 @@ import RasterizerGrayscale2ColormapUnsignedFS from '../glsl/webgl2/hips/rasteriz
import PostVS from '../glsl/webgl2/passes/post_vertex_100es.glsl'
import PostFS from '../glsl/webgl2/passes/post_fragment_100es.glsl'
// Shader fits image
import FitsVS from '../glsl/webgl2/fits/vert.glsl'
import FitsFS from '../glsl/webgl2/fits/frag.glsl'
let shaders = [
// Catalog shaders
{
@@ -185,6 +189,15 @@ let shaders = [
id: "PostFS",
content: PostFS,
},
// Fits
{
id: "FitsVS",
content: FitsVS,
},
{
id: "FitsFS",
content: FitsFS,
},
];
export function loadShadersWebGL2() {