Compare commits

..

1 Commits

Author SHA1 Message Date
bmatthieu3
945672a846 Reverse the longitude axis globally
This method does add:
* reverseLongitude method on aladin object
* add a longitudeReversed flag in AladinOptions, when creating the
aladin view to reverse the longitude axis globally at start.
* BREAKS the HiPS options API by removing the longitudeReversed option
at the HiPS level, replacing it by a global flag on the aladin lite view
* fix: when adding two times the same survey, we could not change its
hips options

This commit also impl #191
2025-04-09 16:03:55 +02:00
215 changed files with 5158 additions and 5041 deletions

View File

@@ -25,10 +25,6 @@ jobs:
- name: "Install wasm-pack"
run: |
curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -y
- name: "Run cargo clippy"
run: cd src/core && cargo clippy --all-targets --features=webgl2 -- -D warnings
- name: Format check
run: cd src/core && cargo fmt --all -- --check
- name: "Install dependencies"
run: |
npm install

View File

@@ -15,13 +15,7 @@ A new [API technical documentation](https://cds-astro.github.io/aladin-lite/) is
[![API Documentation](https://img.shields.io/badge/API-documentation-blue.svg)](https://cds-astro.github.io/aladin-lite)
[![Release page](https://img.shields.io/badge/Release-download-yellow.svg)](https://aladin.cds.unistra.fr/AladinLite/doc/release/)
Try Aladin Lite [here](https://aladin.u-strasbg.fr/AladinLite).
Aladin Lite is made possible thanks to pure Rust core libraries:
* [cdshealpix](https://github.com/cds-astro/cds-healpix-rust) - for HEALPix projection and unprojection to/from sky coordinates
* [mapproj](https://github.com/cds-astro/cds-mapproj-rust) - for computing (un)projections described by a WCS
* [fitsrs](https://github.com/cds-astro/fitsrs) - for reading and parsing FITS images
* [moc](https://github.com/cds-astro/cds-moc-rust) - for parsing, manipulating, and serializing multi-order HEALPix coverage maps
Aladin Lite is available [at this link](https://aladin.u-strasbg.fr/AladinLite).
## Running & editable JS examples
@@ -108,15 +102,14 @@ Aladin Lite can be imported with:
* [X] FITS images support
* [X] WCS parsing, displaying an (JPEG/PNG) image in aladin lite view
* [X] Display customized shapes (e.g. proper motions) from astronomical catalog data
* [X] AVM tags parsing support inside JPEG
* [X] AVM tags parsing support
* [X] Easy sharing of current « view »
* [ ] All VOTable serializations
* [ ] FITS tables
* [X] Creating HiPS instance from an URL
* [X] Local HiPS loading
* [X] Multiple mirrors handling for HiPS tile retrival
* [X] HiPS cube
* [ ] HiPS3D
* [ ] HiPS cube
## Licence

View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
<rect x="0" fill="none" width="20" height="20"/>
<g>

Before

Width:  |  Height:  |  Size: 529 B

View File

@@ -14,7 +14,7 @@
aladin = A.aladin('#aladin-lite-div', {showSettingsControl: true, survey: "P/PanSTARRS/DR1/color-z-zg-g", showReticle: false, projection: "AIT", cooFrame: 'icrs', target: "stephan's quintet", fov: startFov, showGotoControl: false, showFrame: false, fullScreen: true, showLayersControl: true, showCooGridControl: false});
const chft = aladin.createImageSurvey('CFHT', "CFHT deep view of NGC7331 and Stephan's quintet u+g+r", "https://cds.unistra.fr/~derriere/PR_HiPS/2022_Duc/", null, null, {imgFormat: 'png'});
const nircamJWST = aladin.createImageSurvey('Nircam', "Stephans Quintet NIRCam+MIRI", "https://alasky.cds.unistra.fr/JWST/CDS_P_JWST_Stephans-Quintet_NIRCam-MIRI/", null, null, {imgFormat: 'png', colormap: "viridis"});
const nircamJWST = aladin.createImageSurvey('Nircam', "Stephans Quintet NIRCam+MIRI", "http://alasky.cds.unistra.fr/JWST/CDS_P_JWST_Stephans-Quintet_NIRCam+MIRI/", null, null, {imgFormat: 'png', colormap: "viridis"});
aladin.setOverlayImageLayer("CFHT", "CFHT");
aladin.setOverlayImageLayer("Nircam", "Nircam");

View File

@@ -10,6 +10,8 @@ import A from '../src/js/A.js';
A.init.then(() => {
let aladin = A.aladin('#aladin-lite-div', {projection: "TAN", survey: "P/HSC/DR2/deep/g", target: '02 21 36.529 -05 31 20.16', fov: 0.1});
aladin.reverseLongitude(true)
let hscGreenSurvey = aladin.getBaseImageLayer();
hscGreenSurvey.setImageFormat("fits");
hscGreenSurvey.setColormap("green", { stretch: "asinh" });

View File

@@ -26,17 +26,8 @@
limit: 1000,
//orderBy: 'nb_ref',
onClick: 'showTable',
onlyFootprints: false,
color: (s) => {
let coo = A.coo();
coo.parse(s.data['RAJ2000'] + ' ' + s.data['DEJ2000'])
let a = (0.1 * Math.pow(10, +s.data.logD25)) / 60;
let b = (1.0 / Math.pow(10, +s.data.logR25)) * a
return `rgb(${s.data["logR25"]*255.0}, ${s.data["logR25"]*255.0}, 255)`
},
hoverColor: 'red',
color: 'yellow',
hoverColor: 'blue',
shape: (s) => {
let coo = A.coo();
coo.parse(s.data['RAJ2000'] + ' ' + s.data['DEJ2000'])

View File

@@ -31,21 +31,15 @@
hoverColor: 'yellow',
selectionColor: 'white',
// Footprint associated to sources
color: (s) => {
// discard drawing a vector for big pm
let totalPmSquared = s.data.pmra*s.data.pmra + s.data.pmdec*s.data.pmdec;
if (totalPmSquared > 6) {
return;
}
return rainbowColorMap((totalPmSquared - 2.5) / 2)
},
shape: (s) => {
// discard drawing a vector for big pm
let totalPmSquared = s.data.pmra*s.data.pmra + s.data.pmdec*s.data.pmdec;
if (totalPmSquared > 6) {
return;
}
let color = rainbowColorMap((totalPmSquared - 2.5) / 2)
// Compute the mean of pm over the catalog sources
if (!pmraMean || !pmdecMean) {
pmraMean = 0, pmdecMean = 0;
@@ -68,24 +62,13 @@
s.dec,
s.ra + dra,
s.dec + ddec,
{color}
)
}
},
() => {
aladin.addCatalog(pmCat);
pmCat.select((s) => {
let totalPmSquared = s.data.pmra*s.data.pmra + s.data.pmdec*s.data.pmdec;
if (totalPmSquared > 6) {
return false;
}
return totalPmSquared < 3.0;
});
});
aladin.addCatalog(pmCat);
});
function rainbowColorMap(value) {
// Ensure value is within range [0, 1]
value = Math.max(0, Math.min(1, value));

View File

@@ -64,10 +64,6 @@
console.log(proj)
});
aladin.on('positionChanged', function(pos) {
console.log(pos)
});
aladin.on('layerChanged', function(imageLayer, layer, state){
console.log(imageLayer, layer, state)
});

View File

@@ -8,11 +8,11 @@
import A from '../src/js/A.js';
A.init.then(() => {
let aladin = A.aladin('#aladin-lite-div', {fov: 30, target: "280 +0", projection: "AIT", showShareControl:true, showSettingsControl: true, showContextMenu:true});
aladin.setOverlayImageLayer(A.image(
"https://www.virtualastronomy.org/files/avm_examples/spitzer/ssc2005-24a1.jpg",
"https://www.virtualastronomy.org/images/sig05-013.jpg",
{
name: "spitzer-ssc2005-24a1",
name: "sig05-017",
successCallback: (ra, dec, fov, image) => {
console.log(ra, dec)
aladin.gotoRaDec(ra, dec);

View File

@@ -14,7 +14,7 @@
{
name: "M61",
wcs: {
NAXIS: 2, // Minimal header
NAXIS: 0, // Minimal header
CTYPE1: 'RA---TAN', // TAN (gnomic) projection
CTYPE2: 'DEC--TAN', // TAN (gnomic) projection
EQUINOX: 2000.0, // Equatorial coordinates definition (yr)

View File

@@ -14,7 +14,7 @@
'#aladin-lite-div',
{
showSimbadPointerControl: true,
survey: 'https://skies.esac.esa.int/AKARI/color/', // set initial image survey
survey: 'P/allWISE/color', // set initial image survey
projection: 'AIT', // set a projection
fov: 360, // initial field of view in degrees
target: 'orion', // initial target

View File

@@ -12,7 +12,7 @@
import A from '../src/js/A.js';
let aladin;
A.init.then(() => {
aladin = A.aladin('#aladin-lite-div', {inertia: true, target: '00 00 00 +07 00 00', fov: 130, survey: 'P/Mellinger/color', showContextMenu: true, fullScreen: true});
aladin = A.aladin('#aladin-lite-div', {inertia: false, target: '00 00 00 +07 00 00', fov: 130, survey: 'P/Mellinger/color', showContextMenu: true, fullScreen: true});
var moc11 = A.MOCFromURL('http://skies.esac.esa.int/HST/NICMOS/Moc.fits', {color: '#84f', lineWidth: 3}, (moc) => {
// moc is ready
console.log(moc.contains(205.9019247, +2.4492764));

View File

@@ -1,123 +0,0 @@
<!doctype html>
<html>
<head>
<script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.9.4/Chart.js"></script>
</head>
<body>
<div id="aladin-lite-div" style="width: 768px; height: 512px"></div>
<canvas id="myChart" style="width:100%;max-width:600px"></canvas>
<script>let aladin;</script>
<script type="module">
function getPixelsOnLine(startX, startY, endX, endY){
const pixelCols = [];
var x = Math.floor(startX);
var y = Math.floor(startY);
const xx = Math.floor(endX);
const yy = Math.floor(endY);
const dx = Math.abs(xx - x);
const sx = x < xx ? 1 : -1;
const dy = -Math.abs(yy - y);
const sy = y < yy ? 1 : -1;
var err = dx + dy;
var e2;
var end = false;
while (!end) {
pixelCols.push([x,y]);
if ((x === xx && y === yy)) {
end = true;
} else {
e2 = 2 * err;
if (e2 >= dy) {
err += dy;
x += sx;
}
if (e2 <= dx) {
err += dx;
y += sy;
}
}
}
return pixelCols;
}
import A from '../src/js/A.js';
A.init.then(() => {
aladin = A.aladin(
'#aladin-lite-div',
{
showSimbadPointerControl: true,
survey: 'P/allWISE/color', // set initial image survey
projection: 'AIT', // set a projection
fov: 360, // initial field of view in degrees
target: 'orion', // initial target
cooFrame: 'icrs', // set galactic frame
reticleColor: '#ff89ff', // change reticle color
reticleSize: 64, // change reticle size
showContextMenu: true,
showShareControl: true,
showFrame: true,
showZoomControl:true,
showSettingsControl:true,
showColorPickerControl: true,
showCooGrid: true,
fullScreen: true,
samp: true,
realFullscreen: true,
}
);
let base = aladin.getBaseImageLayer();
aladin.select('line', p => {
let xValues = [];
let rValues = [];
let gValues = [];
let bValues = [];
let i = 0;
for(var [r, g, b] of base.probe({type: 'line', x1: p.a.x, y1: p.a.y, x2: p.b.x, y2: p.b.y})) {
xValues.push(i)
rValues.push(r)
gValues.push(g)
bValues.push(b)
i++;
}
new Chart("myChart", {
type: "line",
data: {
labels: xValues,
datasets: [{
fill: false,
lineTension: 0,
backgroundColor: "rgba(255,0,0,1.0)",
data: rValues
},
{
fill: false,
lineTension: 0,
backgroundColor: "rgba(0,255,0,1.0)",
data: gValues
},
{
fill: false,
lineTension: 0,
backgroundColor: "rgba(0,0,255,1.0)",
data: bValues
}]
},
options: {
legend: {display: false},
scales: {
yAxes: [{ticks: {min: 0, max:255}}],
}
}
});
})
});
</script>
</body>
</html>

View File

@@ -33,16 +33,10 @@ var myFilterFunction = function(source) {
return color>colorThreshold;
}
aladin = A.aladin('#aladin-lite-div', {target: 'M 81', fov: 0.5, survey: 'CDS/P/SDSS9/color'});
var cat = A.catalogFromSimbad('M 81', 0.25, {
shape: (s) => {
return A.circle(s.ra, s.dec, 0.003, {lineWidth: 3});
},
onClick: 'showTable', verbosity: 3, filter: myFilterFunction
aladin = A.aladin('#aladin-lite-div', {target: 'M 81', fov: 0.5, survey: 'CDS/P/SDSS9/color'});
var cat = A.catalogFromSimbad('M 81', 0.25, {onClick: 'showTable', verbosity: 3, filter: myFilterFunction});
aladin.addCatalog(cat);
});
aladin.addCatalog(cat);
});
</script>
</body>

View File

@@ -17,9 +17,6 @@
aladin.on("zoomChanged", () => {
console.log("zoomChanged")
})
aladin.on("rotationChanged", (rotation) => {
console.log("Rotation just changed to ", rotation);
})
aladin.on("positionChanged", ({ra, dec}) => {
console.log('call to aladin', aladin.pix2world(300, 300))
console.log('positionChanged in icrs', ra, dec)
@@ -28,7 +25,6 @@
aladin.gotoRaDec(0, 20);
aladin.on('rightClickMove', (x, y) => {
aladin.setRotation(aladin.getRotation() + 2)
console.log("right click move", x, y)
})
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

View File

@@ -26,7 +26,8 @@ wasm-bindgen = "=0.2.92"
wasm-streams = "0.3.0"
async-channel = "1.8.0"
mapproj = "0.3.0"
fitsrs = "0.3.4"
fitsrs = "0.2.11"
wcs = "0.3.1"
colorgrad = "0.6.2"
[features]
@@ -64,7 +65,7 @@ path = "./al-api"
[dependencies.web-sys]
version = "0.3.56"
features = [ "console", "CssStyleDeclaration", "Document", "Element", "HtmlCollection", "HtmlElement", "HtmlImageElement", "HtmlCanvasElement", "Blob", "ImageBitmap", "ImageData", "CanvasRenderingContext2d", "WebGlBuffer", "WebGlContextAttributes", "WebGlFramebuffer", "WebGlProgram", "WebGlShader", "WebGlUniformLocation", "WebGlTexture", "WebGlActiveInfo", "Headers", "Window", "Request", "RequestInit", "RequestMode", "RequestCredentials", "Response", "XmlHttpRequest", "XmlHttpRequestResponseType", "PerformanceTiming", "Performance", "Url", "ReadableStream", "File", "FileList",]
features = [ "console", "CssStyleDeclaration", "Document", "Element", "HtmlCollection", "HtmlElement", "HtmlImageElement", "HtmlCanvasElement", "Blob", "ImageBitmap", "ImageData", "CanvasRenderingContext2d", "WebGlBuffer", "WebGlContextAttributes", "WebGlFramebuffer", "WebGlProgram", "WebGlShader", "WebGlUniformLocation", "WebGlTexture", "WebGlActiveInfo", "Headers", "Window", "Request", "RequestInit", "RequestMode", "Response", "XmlHttpRequest", "XmlHttpRequestResponseType", "PerformanceTiming", "Performance", "Url", "ReadableStream", "File", "FileList",]
[dev-dependencies.image-decoder]
package = "image"

View File

@@ -1,6 +1,6 @@
[package]
name = "al-api"
version = "3.7.0"
version = "3.6.5"
authors = ["baumannmatthieu0@gmail.com", "matthieu.baumann@astro.unistra.fr"]
edition = "2018"

View File

@@ -32,5 +32,5 @@ use std::cmp::Eq;
#[wasm_bindgen]
pub enum Formatter {
Sexagesimal,
Decimal,
}
Decimal
}

View File

@@ -4,6 +4,8 @@ use wasm_bindgen::prelude::wasm_bindgen;
#[cfg(feature = "webgl2")]
pub type WebGlRenderingCtx = web_sys::WebGl2RenderingContext;
#[cfg(feature = "webgl1")]
pub type WebGlRenderingCtx = web_sys::WebGlRenderingContext;
#[derive(Deserialize, Debug, Clone, Copy)]
#[serde(rename_all = "camelCase")]
@@ -92,7 +94,7 @@ impl fmt::Display for BlendFactor {
BlendFactor::OneMinusSrcAlpha => "OneMinusSrcAlpha",
BlendFactor::OneMinusConstantColor => "OneMinusConstantColor",
};
write!(f, "{str}")
write!(f, "{}", str)
}
}
impl fmt::Display for BlendFunc {
@@ -111,6 +113,6 @@ impl fmt::Display for BlendFunc {
#[cfg(feature = "webgl2")]
BlendFunc::Max => "Max",*/
};
write!(f, "{str}")
write!(f, "{}", str)
}
}

View File

@@ -1,8 +1,9 @@
use serde::{Deserialize, Serialize};
use serde::{Serialize, Deserialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
#[derive(Clone, Debug)]
#[derive(Deserialize, Serialize)]
pub struct HEALPixCellProjeted {
pub ipix: u64,
pub vx: [f64; 4],
pub vy: [f64; 4],
}
}

View File

@@ -34,7 +34,7 @@ pub struct ColorRGBA {
}
use std::ops::Mul;
impl Mul<f32> for &ColorRGB {
impl<'a> Mul<f32> for &'a ColorRGB {
// The multiplication of rational numbers is a closed operation.
type Output = ColorRGB;

View File

@@ -1,30 +1,39 @@
use cgmath::Matrix3;
const GAL2ICRS: &Matrix3<f64> = &Matrix3::new(
-0.444_829_721_222_053_7,
0.746_982_183_984_509_4,
0.494_109_437_197_107_65,
-0.198_076_337_275_070_57,
0.455_983_813_691_152_4,
-0.867_666_137_557_162_6,
-0.873_437_051_955_779_1,
-0.483_835_073_616_418_37,
-0.054_875_657_712_619_68,
const GAL2ICRS: &'static Matrix3<f64> = &Matrix3::new(
-0.44482972122205372312012370920248,
0.74698218398450941835110635824212,
0.49410943719710765017955928850141,
-0.19807633727507056817237662907031,
0.45598381369115237931077906137440,
-0.86766613755716255824577781583414,
-0.87343705195577915249273984034980,
-0.48383507361641838378786914298189,
-0.05487565771261968232908806948676,
);
const ICRS2GAL: &Matrix3<f64> = &Matrix3::new(
-0.444_829_721_222_053_7,
-0.198_076_337_275_070_57,
-0.873_437_051_955_779_1,
0.746_982_183_984_509_4,
0.455_983_813_691_152_4,
-0.483_835_073_616_418_37,
0.494_109_437_197_107_65,
-0.867_666_137_557_162_6,
-0.054_875_657_712_619_68,
const ICRS2GAL: &'static Matrix3<f64> = &Matrix3::new(
-0.44482972122205372312012370920248,
-0.19807633727507056817237662907031,
-0.87343705195577915249273984034980,
0.74698218398450941835110635824212,
0.45598381369115237931077906137440,
-0.48383507361641838378786914298189,
0.49410943719710765017955928850141,
-0.86766613755716255824577781583414,
-0.05487565771261968232908806948676,
);
const ID: &'static Matrix3<f64> = &Matrix3::new(
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
);
const ID: &Matrix3<f64> = &Matrix3::new(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0);
use serde::Deserialize;
use wasm_bindgen::prelude::*;

View File

@@ -57,9 +57,6 @@ pub struct HiPSProperties {
max_cutout: Option<f32>,
creator_did: String,
request_credentials: String,
request_mode: String,
}
impl HiPSProperties {
@@ -127,16 +124,6 @@ impl HiPSProperties {
pub fn get_initial_dec(&self) -> Option<f64> {
self.hips_initial_dec
}
#[inline(always)]
pub fn get_request_credentials(&self) -> &str {
&self.request_credentials
}
#[inline(always)]
pub fn get_request_mode(&self) -> &str {
&self.request_mode
}
}
#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -163,10 +150,9 @@ impl std::fmt::Display for ImageExt {
use serde::Serialize;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
#[derive(Clone, Copy, PartialEq, Debug, Deserialize, Serialize, Default)]
#[derive(Clone, Copy, PartialEq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum TransferFunction {
#[default]
Linear,
Sqrt,
Log,
@@ -190,6 +176,12 @@ impl TransferFunction {
}
}
impl Default for TransferFunction {
fn default() -> Self {
TransferFunction::Linear
}
}
impl From<String> for TransferFunction {
fn from(id: String) -> Self {
TransferFunction::new(&id)

View File

@@ -2,29 +2,27 @@
It is used by al-ui and any javascript application calling
the WASM core of aladin lite v3
*/
pub mod angle;
pub mod blend;
pub mod cell;
pub mod color;
pub mod colormap;
pub mod coo_system;
pub mod fov;
pub mod grid;
pub mod hips;
pub mod image;
pub mod moc;
pub mod resources;
pub mod cell;
pub mod fov;
pub mod image;
pub mod angle;
pub trait Abort {
type Item;
fn unwrap_abort(self) -> Self::Item
where
Self: Sized;
fn unwrap_abort(self) -> Self::Item where Self: Sized;
}
impl<T> Abort for Option<T> {
type Item = T;
#[inline]
fn unwrap_abort(self) -> Self::Item {
use std::process;

View File

@@ -1,6 +1,7 @@
use wasm_bindgen::prelude::wasm_bindgen;
use super::color::{Color, ColorRGBA};
#[derive(Clone, Debug)]
#[wasm_bindgen]
pub struct MOCOptions {
@@ -18,7 +19,6 @@ use crate::{color::ColorRGB, Abort};
use std::convert::TryInto;
#[wasm_bindgen]
impl MOCOptions {
#[allow(clippy::too_many_arguments)]
#[wasm_bindgen(constructor)]
pub fn new(
uuid: String,

View File

@@ -1,6 +1,6 @@
[package]
name = "al-core"
version = "3.7.0"
version = "3.6.5"
authors = ["baumannmatthieu0@gmail.com", "matthieu.baumann@astro.unistra.fr"]
edition = "2018"
@@ -9,7 +9,7 @@ js-sys = "0.3.47"
cgmath = "*"
jpeg-decoder = "0.3.0"
png = "0.17.6"
fitsrs = "0.3.4"
fitsrs = "0.2.10"
al-api = { path = "../al-api" }
serde = { version = "^1.0.59", features = ["derive"] }
serde_json = "1.0"

View File

@@ -2,13 +2,13 @@ use std::collections::HashMap;
use colorgrad::Color;
use crate::shader::SendUniformsWithParams;
use crate::Texture2D;
use crate::WebGlContext;
use crate::image::format;
use crate::shader::SendUniformsWithParams;
use crate::texture::format::RGBA8U;
use crate::webgl_ctx::WebGlRenderingCtx;
use wasm_bindgen::JsValue;
use crate::webgl_ctx::WebGlRenderingCtx;
const WIDTH_CMAP_TEX: usize = 256;
@@ -20,10 +20,7 @@ pub struct Colormap {
}
impl Colormap {
pub fn new(label: &str, grad: colorgrad::Gradient) -> Self {
Self {
label: label.to_string(),
grad,
}
Self { label: label.to_string(), grad }
}
pub fn label(&self) -> &Label {
@@ -32,20 +29,18 @@ impl Colormap {
}
fn build_cmaps_texture(gl: &WebGlContext, cmaps: &[Colormap]) -> Result<Texture2D, JsValue> {
let tex_bytes: Vec<u8> = cmaps
.iter()
.flat_map(|cmap| {
let tex_bytes: Vec<u8> = cmaps.iter()
.map(|cmap| {
let mut values = [0_u8; 1024];
for ix in 0..WIDTH_CMAP_TEX {
let rgba = cmap.grad.at(ix as f64 / WIDTH_CMAP_TEX as f64).to_rgba8();
let ptr = values[4 * ix..].as_mut_ptr() as *mut [u8; 4];
unsafe {
*ptr = rgba;
}
let ptr = values[4*ix..].as_mut_ptr() as *mut [u8; 4];
unsafe { *ptr = rgba; }
}
values
})
.flatten()
.collect();
let tex_params = &[
(
@@ -68,12 +63,12 @@ fn build_cmaps_texture(gl: &WebGlContext, cmaps: &[Colormap]) -> Result<Texture2
),
];
Texture2D::create_from_raw_pixels::<RGBA8U>(
Texture2D::create_from_raw_pixels::<format::RGBA8U>(
gl,
WIDTH_CMAP_TEX as i32,
cmaps.len() as i32,
tex_params,
Some(&tex_bytes[..]),
Some(&tex_bytes[..])
)
}
@@ -92,38 +87,20 @@ use crate::Abort;
impl Colormaps {
pub fn new(gl: &WebGlContext) -> Result<Self, JsValue> {
let labels: Vec<_> = [
"blues",
"cividis",
"cubehelix",
"eosb",
"grayscale",
"inferno",
"magma",
"native",
"parula",
"plasma",
"rainbow",
"rdbu",
"rdylbu",
"redtemperature",
"sinebow",
"spectral",
"summer",
"viridis",
"ylgnbu",
"ylorbr",
"red",
"green",
"blue",
"blues", "cividis", "cubehelix", "eosb",
"grayscale", "inferno", "magma", "native",
"parula", "plasma", "rainbow", "rdbu",
"rdylbu", "redtemperature", "sinebow", "spectral", "summer",
"viridis", "ylgnbu", "ylorbr", "red", "green", "blue"
]
.iter()
.map(|cmap_name| cmap_name.to_string())
.collect();
let indices = labels
.iter()
.enumerate()
.map(|(id, label)| (label.clone(), id as i32))
let indices = labels.iter().enumerate()
.map(|(id, label)| {
(label.clone(), id as i32)
})
.collect();
let cmaps = vec![
@@ -134,14 +111,14 @@ impl Colormaps {
Colormap::new("grayscale", {
colorgrad::CustomGradient::new()
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("inferno", colorgrad::inferno()),
Colormap::new("magma", colorgrad::magma()),
Colormap::new("native", {
colorgrad::CustomGradient::new()
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("parula", {
colorgrad::CustomGradient::new()
@@ -155,7 +132,7 @@ impl Colormaps {
Color::from_rgba8(249, 250, 20, 255),
])
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("plasma", colorgrad::plasma()),
Colormap::new("rainbow", {
@@ -173,7 +150,7 @@ impl Colormaps {
Color::from_rgba8(255, 0, 0, 255),
])
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("rdbu", colorgrad::rd_bu()),
Colormap::new("rdylbu", colorgrad::rd_yl_bu()),
@@ -186,7 +163,7 @@ impl Colormaps {
Color::new(1.0, 1.0, 1.0, 1.0),
])
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("sinebow", colorgrad::sinebow()),
Colormap::new("spectral", colorgrad::spectral()),
@@ -201,7 +178,7 @@ impl Colormaps {
Color::new(1.0, 0.0, 0.0, 1.0),
])
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("green", {
colorgrad::CustomGradient::new()
@@ -210,7 +187,7 @@ impl Colormaps {
Color::new(0.0, 1.0, 0.0, 1.0),
])
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
Colormap::new("blue", {
colorgrad::CustomGradient::new()
@@ -219,20 +196,14 @@ impl Colormaps {
Color::new(0.0, 0.0, 1.0, 1.0),
])
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?
}),
];
let cmaps_tex = build_cmaps_texture(gl, &cmaps[..])?;
let gl = gl.clone();
Ok(Self {
cmaps,
cmaps_tex,
labels,
indices,
gl,
})
Ok(Self { cmaps, cmaps_tex, labels, indices, gl })
}
#[inline]
@@ -242,14 +213,12 @@ impl Colormaps {
#[inline]
pub fn get(&self, label: &str) -> &Colormap {
if let Some(&id) = self.get_id(label) {
if let Some(id) = self.get_id(label).map(|id| *id) {
&self.cmaps[id as usize]
} else {
crate::log::console_warn(format!(
"{label:?} is not a valid colormap, replaced with 'grayscale'.",
));
let id_greys = self.get_id("grayscale").unwrap_abort();
&self.cmaps[*id_greys as usize]
crate::log::console_warn(&format!("{:?} is not a valid colormap, replaced with 'grayscale'.", label));
let id_greys = self.get_id("grayscale").map(|id| *id).unwrap_abort();
&self.cmaps[id_greys as usize]
}
}
@@ -259,13 +228,13 @@ impl Colormaps {
}
pub fn add_cmap(&mut self, label: Label, cmap: Colormap) -> Result<(), JsValue> {
if let Some(&id) = self.get_id(&label) {
if let Some(id) = self.get_id(&label).map(|id| *id) {
let colormap = &mut self.cmaps[id as usize];
*colormap = cmap;
} else {
let num_cmaps = self.labels.len();
self.labels.push(label.clone());
self.indices.insert(label, num_cmaps as i32);
self.cmaps.push(cmap);
}
@@ -277,7 +246,7 @@ impl Colormaps {
}
}
use crate::shader::{SendUniforms, ShaderBound};
use crate::shader::{ShaderBound, SendUniforms};
impl SendUniforms for Colormaps {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
shader
@@ -289,11 +258,7 @@ impl SendUniforms for Colormaps {
}
impl SendUniformsWithParams<Colormaps> for Colormap {
fn attach_uniforms_with_params<'a>(
&self,
shader: &'a ShaderBound<'a>,
cmaps: &Colormaps,
) -> &'a ShaderBound<'a> {
fn attach_uniforms_with_params<'a>(&self, shader: &'a ShaderBound<'a>, cmaps: &Colormaps) -> &'a ShaderBound<'a> {
let cmap_id = cmaps.get_id(&self.label).unwrap_abort();
shader.attach_uniform("colormap_id", &(*cmap_id as f32));
shader

View File

@@ -6,11 +6,11 @@ pub struct Bitmap<F> {
format: std::marker::PhantomData<F>,
}
use crate::image::format::ImageFormat;
use crate::image::Image;
use crate::texture::format::TextureFormat;
impl<F> Bitmap<F>
where
F: TextureFormat + Clone,
F: ImageFormat + Clone,
{
pub fn new(image: web_sys::ImageBitmap) -> Self {
Self {
@@ -23,7 +23,7 @@ use crate::texture::Tex3D;
use wasm_bindgen::JsValue;
impl<F> Image for Bitmap<F>
where
F: TextureFormat + Clone,
F: ImageFormat + Clone,
{
fn insert_into_3d_texture<T: Tex3D>(
&self,
@@ -34,8 +34,4 @@ where
Ok(())
}
fn get_size(&self) -> (u32, u32) {
(self.image.width(), self.image.height())
}
}

View File

@@ -7,7 +7,7 @@ pub struct Canvas<F> {
impl<F> Canvas<F>
where
F: TextureFormat + Clone,
F: ImageFormat + Clone,
{
pub fn new(canvas: web_sys::HtmlCanvasElement) -> Self {
Self {
@@ -17,14 +17,14 @@ where
}
}
use crate::image::format::ImageFormat;
use crate::image::Image;
use crate::texture::format::TextureFormat;
use crate::texture::Tex3D;
use cgmath::Vector3;
use wasm_bindgen::JsValue;
impl<F> Image for Canvas<F>
where
F: TextureFormat,
F: ImageFormat,
{
fn insert_into_3d_texture<T: Tex3D>(
&self,
@@ -42,8 +42,4 @@ where
Ok(())
}
fn get_size(&self) -> (u32, u32) {
(self.canvas.width(), self.canvas.height())
}
}

View File

@@ -1,113 +1,123 @@
use crate::texture::format::TextureFormat;
use crate::texture::format::R8U;
use cgmath::Vector3;
use fitsrs::card::Value;
use fitsrs::hdu::header::extension::image::Image as XImage;
use fitsrs::hdu::header::Bitpix;
use fitsrs::hdu::header::Header;
use fitsrs::WCS;
use fitsrs::{Fits, HDU};
use std::fmt::Debug;
use std::io::Cursor;
use wasm_bindgen::JsValue;
use cgmath::{Vector2, Vector3};
#[derive(Debug)]
pub struct FitsImage<'a> {
// get a reference to the header
pub header: Header<XImage>,
// image size
pub width: u32,
pub height: u32,
pub depth: u32,
// bitpix
pub bitpix: Bitpix,
// 1.0 by default
pub bscale: f32,
// 0.0 by default
pub bzero: f32,
// blank
pub blank: Option<f32>,
// optional wcs
pub wcs: Option<WCS>,
// raw bytes of the data image (in Big-Endian)
pub raw_bytes: &'a [u8],
pub struct Fits<'a> {
// Tile size
size: Vector2<i32>,
pub data: Data<'a>,
}
impl<'a> FitsImage<'a> {
/// Get all the hdu images from a fits file
pub fn from_raw_bytes(bytes: &'a [u8]) -> Result<Vec<Self>, JsValue> {
let mut fits = Fits::from_reader(Cursor::new(bytes));
let mut images = vec![];
use std::borrow::Cow;
use std::fmt::Debug;
#[derive(Debug)]
pub enum Data<'a> {
U8(Cow<'a, [u8]>),
I16(Cow<'a, [i16]>),
I32(Cow<'a, [i32]>),
F32(Cow<'a, [f32]>),
}
use fitsrs::{fits::Fits as FitsData, hdu::data::InMemData};
use std::io::Cursor;
while let Some(Ok(hdu)) = fits.next() {
match hdu {
HDU::XImage(hdu) | HDU::Primary(hdu) => {
// Prefer getting the dimension directly from NAXIS1/NAXIS2 instead of from the WCS
// because it may not exist in all HDU images
let width = hdu.get_header().get_xtension().get_naxisn(1);
let height = hdu.get_header().get_xtension().get_naxisn(2);
impl<'a> Fits<'a> {
pub fn from_byte_slice(bytes_reader: &'a mut Cursor<&[u8]>) -> Result<Self, JsValue> {
let FitsData { hdu } = FitsData::from_reader(bytes_reader)
.map_err(|_| JsValue::from_str(&"Parsing fits error"))?;
if let (Some(&width), Some(&height)) = (width, height) {
let depth =
*hdu.get_header().get_xtension().get_naxisn(3).unwrap_or(&1) as u32;
let header = hdu.get_header();
let xtension = header.get_xtension();
let width = xtension
.get_naxisn(1)
.ok_or_else(|| JsValue::from_str("NAXIS1 not found in the fits"))?;
let header = hdu.get_header();
let height = xtension
.get_naxisn(2)
.ok_or_else(|| JsValue::from_str("NAXIS2 not found in the fits"))?;
let bscale = match header.get("BSCALE") {
Some(Value::Integer { value, .. }) => *value as f32,
Some(Value::Float { value, .. }) => *value as f32,
_ => 1.0,
};
let bzero = match header.get("BZERO") {
Some(Value::Integer { value, .. }) => *value as f32,
Some(Value::Float { value, .. }) => *value as f32,
_ => 0.0,
};
let blank = match header.get("BLANK") {
Some(Value::Integer { value, .. }) => Some(*value as f32),
Some(Value::Float { value, .. }) => Some(*value as f32),
_ => None,
};
let off = hdu.get_data_unit_byte_offset() as usize;
let len = hdu.get_data_unit_byte_size() as usize;
let raw_bytes = &bytes[off..(off + len)];
let bitpix = hdu.get_header().get_xtension().get_bitpix();
let wcs = hdu.wcs().ok();
images.push(Self {
header: hdu.get_header().clone(),
width: width as u32,
height: height as u32,
depth,
bitpix,
bscale,
wcs,
bzero,
blank,
raw_bytes,
});
}
}
_ => (),
let data = hdu.get_data();
let data = match *data {
InMemData::U8(slice) => Data::U8(Cow::Borrowed(slice)),
InMemData::I16(slice) => Data::I16(Cow::Borrowed(slice)),
InMemData::I32(slice) => Data::I32(Cow::Borrowed(slice)),
InMemData::I64(slice) => {
let data = slice.iter().map(|v| *v as i32).collect();
Data::I32(Cow::Owned(data))
}
}
InMemData::F32(slice) => Data::F32(Cow::Borrowed(slice)),
InMemData::F64(slice) => {
let data = slice.iter().map(|v| *v as f32).collect();
Data::F32(Cow::Owned(data))
}
};
if !images.is_empty() {
Ok(images)
} else {
Err(JsValue::from_str("Image HDU not found in the FITS"))
}
Ok(Self {
// Tile size
size: Vector2::new(*width as i32, *height as i32),
// Allocation info of the layout
data,
})
}
pub fn get_size(&self) -> &Vector2<i32> {
&self.size
}
}
/*impl Fits<'static> {
pub async fn from_async_reader(reader: IntoAsyncRead<'static>) -> Result<Self, JsValue> {
let fitsrs::fits::AsyncFits { hdu: AsyncHDU { data, header } } = fitsrs::fits::AsyncFits::from_reader(futures::io::BufReader::new(reader))
.await
.map_err(|err| {
JsValue::from_str(&format!("Parsing fits error: {}", err))
})?;
let width = header.get_axis_size(1)
.ok_or_else(|| JsValue::from_str("NAXIS1 not found in the fits"))?;
let height = header.get_axis_size(2)
.ok_or_else(|| JsValue::from_str("NAXIS2 not found in the fits"))?;
let data = match data {
fitsrs::hdu::data_async::DataOwned::U8(stream) => {
let data = stream.collect().await;
Data::U8(Cow::Owned(data))
},
fitsrs::hdu::data_async::DataOwned::I16(stream) => {
let data = stream.collect().await;
Data::I16(Cow::Owned(data))
},
fitsrs::hdu::data_async::DataOwned::I32(stream) => {
let data = stream.collect().await;
Data::I32(Cow::Owned(data))
},
fitsrs::hdu::data_async::DataOwned::I64(stream) => {
let data = stream.map(|v| v as i32).collect().await;
Data::I32(Cow::Owned(data))
},
fitsrs::hdu::data_async::DataOwned::F32(stream) => {
let data = stream.collect().await;
Data::F32(Cow::Owned(data))
},
fitsrs::hdu::data_async::DataOwned::F64(stream) => {
let data = stream.map(|v| v as f32).collect().await;
Data::F32(Cow::Owned(data))
}
};
Ok(Self {
// Tile size
size: Vector2::new(*width as i32, *height as i32),
// Allocation info of the layout
data
})
}
}*/
use crate::{image::Image, texture::Tex3D};
impl Image for FitsImage<'_> {
impl Image for Fits<'_> {
fn insert_into_3d_texture<T: Tex3D>(
&self,
// The texture array
@@ -115,21 +125,89 @@ impl Image for FitsImage<'_> {
// An offset to write the image in the texture array
offset: &Vector3<i32>,
) -> Result<(), JsValue> {
let view = unsafe { R8U::view(self.raw_bytes) };
textures.tex_sub_image_3d_with_opt_array_buffer_view(
offset.x,
offset.y,
offset.z,
self.width as i32,
self.height as i32,
self.depth as i32,
Some(view.as_ref()),
);
match &self.data {
Data::U8(data) => {
let view = unsafe { R8UI::view(&data) };
textures.tex_sub_image_3d_with_opt_array_buffer_view(
offset.x,
offset.y,
offset.z,
self.size.x,
self.size.y,
1,
Some(view.as_ref()),
);
}
Data::I16(data) => {
let view = unsafe { R16I::view(&data) };
textures.tex_sub_image_3d_with_opt_array_buffer_view(
offset.x,
offset.y,
offset.z,
self.size.x,
self.size.y,
1,
Some(view.as_ref()),
);
}
Data::I32(data) => {
let view = unsafe { R32I::view(&data) };
textures.tex_sub_image_3d_with_opt_array_buffer_view(
offset.x,
offset.y,
offset.z,
self.size.x,
self.size.y,
1,
Some(view.as_ref()),
);
}
Data::F32(data) => {
let view = unsafe { R8UI::view(&std::slice::from_raw_parts(data.as_ptr() as *const u8, data.len() * 4)) };
textures.tex_sub_image_3d_with_opt_array_buffer_view(
offset.x,
offset.y,
offset.z,
self.size.x,
self.size.y,
1,
Some(view.as_ref()),
);
}
}
Ok(())
}
fn get_size(&self) -> (u32, u32) {
(self.width, self.height)
}
}
use crate::image::format::ImageFormat;
use wasm_bindgen::JsValue;
pub trait FitsImageFormat: ImageFormat {
const BITPIX: i8;
}
use crate::image::R32F;
impl FitsImageFormat for R32F {
const BITPIX: i8 = -32;
}
#[cfg(feature = "webgl2")]
use crate::image::{R16I, R32I, R64F, R8UI};
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R64F {
const BITPIX: i8 = -64;
}
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R32I {
const BITPIX: i8 = 32;
}
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R16I {
const BITPIX: i8 = 16;
}
#[cfg(feature = "webgl2")]
impl FitsImageFormat for R8UI {
const BITPIX: i8 = 8;
}

View File

@@ -1,9 +1,314 @@
use crate::texture::format::PixelType;
use crate::texture::pixel::Pixel;
use al_api::hips::ImageExt;
pub enum Bytes<'a> {
Borrowed(&'a [u8]),
Owned(Vec<u8>),
}
pub trait ImageFormat {
type P: Pixel;
type ArrayBufferView: AsRef<js_sys::Object>;
const NUM_CHANNELS: usize;
const FORMAT: u32;
const INTERNAL_FORMAT: i32;
const TYPE: u32;
const CHANNEL_TYPE: ChannelType;
/// Creates a JS typed array which is a view into wasm's linear memory at the slice specified.
/// This function returns a new typed array which is a view into wasm's memory. This view does not copy the underlying data.
///
/// # Safety
///
/// Views into WebAssembly memory are only valid so long as the backing buffer isn't resized in JS. Once this function is called any future calls to Box::new (or malloc of any form) may cause the returned value here to be invalidated. Use with caution!
///
/// Additionally the returned object can be safely mutated but the input slice isn't guaranteed to be mutable.
///
/// Finally, the returned object is disconnected from the input slice's lifetime, so there's no guarantee that the data is read at the right time.
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str>;
}
use crate::webgl_ctx::WebGlRenderingCtx;
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct RGB8U;
impl ImageFormat for RGB8U {
type P = [u8; 3];
const NUM_CHANNELS: usize = 3;
const FORMAT: u32 = WebGlRenderingCtx::RGB as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGB8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const CHANNEL_TYPE: ChannelType = ChannelType::RGB8U;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
let mut decoder = jpeg::Decoder::new(raw_bytes);
let bytes = decoder
.decode()
.map_err(|_| "Cannot decoder jpeg. This image may not be compressed.")?;
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct RGBA8U;
#[cfg(feature = "webgl2")]
impl ImageFormat for RGBA8U {
type P = [u8; 4];
const NUM_CHANNELS: usize = 4;
const FORMAT: u32 = WebGlRenderingCtx::RGBA as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const CHANNEL_TYPE: ChannelType = ChannelType::RGBA8U;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
let mut decoder = jpeg::Decoder::new(raw_bytes);
let bytes = decoder
.decode()
.map_err(|_| "Cannot decoder png. This image may not be compressed.")?;
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct RGBA32F;
impl ImageFormat for RGBA32F {
type P = [f32; 4];
const NUM_CHANNELS: usize = 4;
const FORMAT: u32 = WebGlRenderingCtx::RGBA as u32;
#[cfg(feature = "webgl2")]
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA32F as i32;
#[cfg(feature = "webgl1")]
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA as i32;
const CHANNEL_TYPE: ChannelType = ChannelType::RGBA32F;
const TYPE: u32 = WebGlRenderingCtx::FLOAT;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct RGB32F;
impl ImageFormat for RGB32F {
type P = [f32; 3];
const NUM_CHANNELS: usize = 3;
const FORMAT: u32 = WebGlRenderingCtx::RGB as u32;
#[cfg(feature = "webgl2")]
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGB32F as i32;
#[cfg(feature = "webgl1")]
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGB as i32;
const CHANNEL_TYPE: ChannelType = ChannelType::RGB32F;
const TYPE: u32 = WebGlRenderingCtx::FLOAT;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Float32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R32F;
impl ImageFormat for R32F {
type P = [u8; 4];
const NUM_CHANNELS: usize = 4;
const FORMAT: u32 = WebGlRenderingCtx::RGBA as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const CHANNEL_TYPE: ChannelType = ChannelType::R32F;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R64F;
impl ImageFormat for R64F {
type P = [u8; 4];
const NUM_CHANNELS: usize = 4;
const FORMAT: u32 = WebGlRenderingCtx::RGBA as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const CHANNEL_TYPE: ChannelType = ChannelType::R32F;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R8UI;
#[cfg(feature = "webgl2")]
impl ImageFormat for R8UI {
type P = [u8; 1];
const NUM_CHANNELS: usize = 1;
const FORMAT: u32 = WebGlRenderingCtx::RED_INTEGER as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::R8UI as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const CHANNEL_TYPE: ChannelType = ChannelType::R8UI;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R16I;
#[cfg(feature = "webgl2")]
impl ImageFormat for R16I {
type P = [i16; 1];
const NUM_CHANNELS: usize = 1;
const FORMAT: u32 = WebGlRenderingCtx::RED_INTEGER as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::R16I as i32;
const TYPE: u32 = WebGlRenderingCtx::SHORT;
const CHANNEL_TYPE: ChannelType = ChannelType::R16I;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Int16Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[cfg(feature = "webgl2")]
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R32I;
#[cfg(feature = "webgl2")]
impl ImageFormat for R32I {
type P = [i32; 1];
const NUM_CHANNELS: usize = 1;
const FORMAT: u32 = WebGlRenderingCtx::RED_INTEGER as u32;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::R32I as i32;
const TYPE: u32 = WebGlRenderingCtx::INT;
const CHANNEL_TYPE: ChannelType = ChannelType::R32I;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Int32Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub enum ChannelType {
RGBA32F,
RGB32F,
RGBA8U,
RGB8U,
R32F,
#[cfg(feature = "webgl2")]
R64F,
#[cfg(feature = "webgl2")]
R8UI,
#[cfg(feature = "webgl2")]
R16I,
#[cfg(feature = "webgl2")]
R32I,
}
impl ChannelType {
pub fn is_colored(&self) -> bool {
match self {
ChannelType::RGBA32F
| ChannelType::RGB32F
| ChannelType::RGBA8U
| ChannelType::RGB8U => true,
_ => false,
}
}
}
pub const NUM_CHANNELS: usize = 9;
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub struct ImageFormatType {
pub ext: ImageExt,
pub fmt: PixelType,
pub channel: ChannelType,
}
impl ImageFormatType {
@@ -11,11 +316,11 @@ impl ImageFormatType {
&self.ext
}
pub fn get_pixel_format(&self) -> PixelType {
self.fmt
pub fn get_channel(&self) -> ChannelType {
self.channel
}
pub fn is_colored(&self) -> bool {
!matches!(self.ext, ImageExt::Fits)
self.channel.is_colored()
}
}

View File

@@ -7,7 +7,7 @@ pub struct HTMLImage<F> {
impl<F> HTMLImage<F>
where
F: TextureFormat + Clone,
F: ImageFormat + Clone,
{
pub fn new(image: web_sys::HtmlImageElement) -> Self {
Self {
@@ -17,14 +17,14 @@ where
}
}
use crate::image::format::ImageFormat;
use crate::image::Image;
use crate::texture::format::TextureFormat;
use crate::texture::Tex3D;
use cgmath::Vector3;
use wasm_bindgen::JsValue;
impl<F> Image for HTMLImage<F>
where
F: TextureFormat,
F: ImageFormat,
{
fn insert_into_3d_texture<T: Tex3D>(
&self,
@@ -42,8 +42,4 @@ where
Ok(())
}
fn get_size(&self) -> (u32, u32) {
(self.image.width(), self.image.height())
}
}

View File

@@ -5,10 +5,6 @@ pub mod format;
pub mod html;
pub mod raw;
use crate::image::bitmap::Bitmap;
use crate::image::raw::ImageBuffer;
use crate::texture::format::RGB8U;
use crate::texture::format::RGBA8U;
pub trait ArrayBuffer: AsRef<js_sys::Object> + std::fmt::Debug {
type Item: std::cmp::PartialOrd + Clone + Copy + std::fmt::Debug + cgmath::Zero;
@@ -37,7 +33,8 @@ impl ArrayBuffer for ArrayU8 {
fn empty(size: u32, blank_value: Self::Item) -> Self {
let uint8_arr = js_sys::Uint8Array::new_with_length(size).fill(blank_value, 0, size);
ArrayU8(uint8_arr)
let array = ArrayU8(uint8_arr);
array
}
fn to_vec(&self) -> Vec<Self::Item> {
@@ -68,7 +65,8 @@ impl ArrayBuffer for ArrayI16 {
fn empty(size: u32, blank_value: Self::Item) -> Self {
let int16_arr = js_sys::Int16Array::new_with_length(size).fill(blank_value, 0, size);
ArrayI16(int16_arr)
let array = ArrayI16(int16_arr);
array
}
fn to_vec(&self) -> Vec<Self::Item> {
@@ -99,7 +97,8 @@ impl ArrayBuffer for ArrayI32 {
fn empty(size: u32, blank_value: Self::Item) -> Self {
let int32_arr = js_sys::Int32Array::new_with_length(size).fill(blank_value, 0, size);
ArrayI32(int32_arr)
let array = ArrayI32(int32_arr);
array
}
fn to_vec(&self) -> Vec<Self::Item> {
@@ -130,7 +129,8 @@ impl ArrayBuffer for ArrayF32 {
}
fn empty(size: u32, blank_value: Self::Item) -> Self {
let f32_arr = js_sys::Float32Array::new_with_length(size).fill(blank_value, 0, size);
ArrayF32(f32_arr)
let array = ArrayF32(f32_arr);
array
}
fn to_vec(&self) -> Vec<Self::Item> {
@@ -162,7 +162,8 @@ impl ArrayBuffer for ArrayF64 {
}
fn empty(size: u32, blank_value: Self::Item) -> Self {
let f64_arr = js_sys::Float64Array::new_with_length(size).fill(blank_value, 0, size);
ArrayF64(f64_arr)
let array = ArrayF64(f64_arr);
array
}
fn to_vec(&self) -> Vec<Self::Item> {
@@ -179,7 +180,6 @@ impl ArrayBuffer for ArrayF64 {
}
use self::canvas::Canvas;
use self::fits::FitsImage;
use self::html::HTMLImage;
use wasm_bindgen::JsValue;
pub trait Image {
@@ -190,11 +190,9 @@ pub trait Image {
// An offset to write the image in the texture array
offset: &Vector3<i32>,
) -> Result<(), JsValue>;
fn get_size(&self) -> (u32, u32);
}
impl<I> Image for &I
impl<'a, I> Image for &'a I
where
I: Image,
{
@@ -210,15 +208,9 @@ where
Ok(())
}
#[inline]
fn get_size(&self) -> (u32, u32) {
let image = &**self;
image.get_size()
}
}
use std::rc::Rc;
use std::{io::Cursor, rc::Rc};
impl<I> Image for Rc<I>
where
I: Image,
@@ -235,56 +227,65 @@ where
Ok(())
}
#[inline]
fn get_size(&self) -> (u32, u32) {
let image = &**self;
image.get_size()
}
}
use crate::texture::format::{R16I, R32F, R32I, R8U};
use crate::texture::Tex3D;
/*impl<I> Image for Arc<Mutex<Option<I>>>
where
I: Image,
{
fn tex_sub_image_3d(
&self,
// The texture array
textures: &Texture2DArray,
// An offset to write the image in the texture array
offset: &Vector3<i32>,
) -> Result<(), JsValue> {
if let Some(image) = &*self.lock().unwrap_abort() {
image.tex_sub_image_3d(textures, offset)?;
}
Ok(())
}
}*/
#[cfg(feature = "webgl2")]
use crate::image::format::{R16I, R32I, R64F, R8UI};
use crate::{
image::format::{R32F, RGB8U, RGBA8U},
texture::Tex3D,
};
use bitmap::Bitmap;
use fits::Fits;
use raw::ImageBuffer;
#[derive(Debug)]
#[cfg(feature = "webgl2")]
pub enum ImageType {
FitsRawBytes {
raw_bytes: js_sys::Uint8Array,
size: (u32, u32),
},
Canvas {
canvas: Canvas<RGBA8U>,
},
ImageRgba8u {
image: Bitmap<RGBA8U>,
},
ImageRgb8u {
image: Bitmap<RGB8U>,
},
HTMLImageRgba8u {
image: HTMLImage<RGBA8U>,
},
HTMLImageRgb8u {
image: HTMLImage<RGB8U>,
},
RawRgb8u {
image: ImageBuffer<RGB8U>,
},
RawRgba8u {
image: ImageBuffer<RGBA8U>,
},
RawR32f {
image: ImageBuffer<R32F>,
},
RawR32i {
image: ImageBuffer<R32I>,
},
RawR16i {
image: ImageBuffer<R16I>,
},
RawR8ui {
image: ImageBuffer<R8U>,
},
FitsImage { raw_bytes: js_sys::Uint8Array },
Canvas { canvas: Canvas<RGBA8U> },
ImageRgba8u { image: Bitmap<RGBA8U> },
ImageRgb8u { image: Bitmap<RGB8U> },
HTMLImageRgba8u { image: HTMLImage<RGBA8U> },
HTMLImageRgb8u { image: HTMLImage<RGB8U> },
RawRgb8u { image: ImageBuffer<RGB8U> },
RawRgba8u { image: ImageBuffer<RGBA8U> },
RawR32f { image: ImageBuffer<R32F> },
RawR32i { image: ImageBuffer<R32I> },
RawR16i { image: ImageBuffer<R16I> },
RawR8ui { image: ImageBuffer<R8UI> },
}
#[cfg(feature = "webgl1")]
pub enum ImageType {
FitsImage { raw_bytes: js_sys::Uint8Array },
Canvas { canvas: Canvas<RGBA8U> },
PngHTMLImageRgba8u { image: HTMLImage<RGBA8U> },
JpgHTMLImageRgb8u { image: HTMLImage<RGB8U> },
PngImageRgba8u { image: Bitmap<RGBA8U> },
JpgImageRgb8u { image: Bitmap<RGB8U> },
RawRgb8u { image: ImageBuffer<RGB8U> },
RawRgba8u { image: ImageBuffer<RGBA8U> },
RawR32f { image: ImageBuffer<R32F> },
}
use cgmath::Vector3;
@@ -297,16 +298,16 @@ impl Image for ImageType {
offset: &Vector3<i32>,
) -> Result<(), JsValue> {
match self {
ImageType::FitsRawBytes {
ImageType::FitsImage {
raw_bytes: raw_bytes_buf,
..
} => {
let raw_bytes = raw_bytes_buf.to_vec();
let num_bytes = raw_bytes_buf.length() as usize;
let mut raw_bytes = vec![0; num_bytes];
raw_bytes_buf.copy_to(&mut raw_bytes[..]);
let images = FitsImage::from_raw_bytes(&raw_bytes)?;
for image in images {
image.insert_into_3d_texture(textures, offset)?
}
let mut bytes_reader = Cursor::new(raw_bytes.as_slice());
let fits_img = Fits::from_byte_slice(&mut bytes_reader)?;
fits_img.insert_into_3d_texture(textures, offset)?
}
ImageType::Canvas { canvas } => canvas.insert_into_3d_texture(textures, offset)?,
ImageType::ImageRgba8u { image } => image.insert_into_3d_texture(textures, offset)?,
@@ -327,21 +328,4 @@ impl Image for ImageType {
Ok(())
}
fn get_size(&self) -> (u32, u32) {
match self {
ImageType::FitsRawBytes { size, .. } => *size,
ImageType::Canvas { canvas } => canvas.get_size(),
ImageType::ImageRgba8u { image } => image.get_size(),
ImageType::ImageRgb8u { image } => image.get_size(),
ImageType::HTMLImageRgba8u { image } => image.get_size(),
ImageType::HTMLImageRgb8u { image } => image.get_size(),
ImageType::RawRgb8u { image } => image.get_size(),
ImageType::RawRgba8u { image } => image.get_size(),
ImageType::RawR32f { image } => image.get_size(),
ImageType::RawR32i { image } => image.get_size(),
ImageType::RawR16i { image } => image.get_size(),
ImageType::RawR8ui { image } => image.get_size(),
}
}
}

View File

@@ -1,18 +1,17 @@
use crate::texture::format::TextureFormat;
use crate::image::format::ImageFormat;
use crate::texture::pixel::Pixel;
use crate::texture::Tex3D;
#[derive(Debug)]
#[allow(dead_code)]
pub struct ImageBuffer<T>
where
T: TextureFormat,
T: ImageFormat,
{
pub data: Vec<<<T as TextureFormat>::P as Pixel>::Item>,
pub data: Vec<<<T as ImageFormat>::P as Pixel>::Item>,
pub size: Vector2<i32>,
}
use crate::texture::format::Bytes;
use crate::image::format::Bytes;
pub struct ImageBufferView {
pub x: i32,
@@ -23,13 +22,9 @@ pub struct ImageBufferView {
use wasm_bindgen::JsValue;
impl<T> ImageBuffer<T>
where
T: TextureFormat,
T: ImageFormat,
{
pub fn new(
data: Vec<<<T as TextureFormat>::P as Pixel>::Item>,
width: i32,
height: i32,
) -> Self {
pub fn new(data: Vec<<<T as ImageFormat>::P as Pixel>::Item>, width: i32, height: i32) -> Self {
let size_buf = width * height * (T::NUM_CHANNELS as i32);
debug_assert!(size_buf == data.len() as i32);
//let buf = <<T as ImageFormat>::P as Pixel>::Container::new(buf);
@@ -42,19 +37,16 @@ where
width: i32,
height: i32,
) -> Result<Self, JsValue> {
let mut decoded_bytes = match T::decode(raw_bytes).map_err(JsValue::from_str)? {
let mut decoded_bytes = match T::decode(raw_bytes).map_err(|e| JsValue::from_str(e))? {
Bytes::Borrowed(bytes) => bytes.to_vec(),
Bytes::Owned(bytes) => bytes,
};
let decoded_pixels = unsafe {
decoded_bytes.set_len(
decoded_bytes.len()
/ std::mem::size_of::<<<T as TextureFormat>::P as Pixel>::Item>(),
decoded_bytes.len() / std::mem::size_of::<<<T as ImageFormat>::P as Pixel>::Item>(),
);
std::mem::transmute::<Vec<u8>, Vec<<<T as TextureFormat>::P as Pixel>::Item>>(
decoded_bytes,
)
std::mem::transmute(decoded_bytes)
};
Ok(Self::new(decoded_pixels, width, height))
@@ -65,8 +57,10 @@ where
debug_assert!(size_buf == raw_bytes.len() as i32);
let decoded_pixels = unsafe {
raw_bytes.set_len(raw_bytes.len() / std::mem::size_of::<<T::P as Pixel>::Item>());
std::mem::transmute::<Vec<u8>, Vec<<T::P as Pixel>::Item>>(raw_bytes)
raw_bytes.set_len(
raw_bytes.len() / std::mem::size_of::<<<T as ImageFormat>::P as Pixel>::Item>(),
);
std::mem::transmute(raw_bytes)
};
Self::new(decoded_pixels, width, height)
@@ -77,7 +71,7 @@ where
Self { data: vec![], size }
}
pub fn allocate(pixel_fill: &T::P, width: i32, height: i32) -> ImageBuffer<T> {
pub fn allocate(pixel_fill: &<T as ImageFormat>::P, width: i32, height: i32) -> ImageBuffer<T> {
let size_buf = ((width * height) as usize) * (T::NUM_CHANNELS);
let data = pixel_fill
@@ -116,11 +110,11 @@ where
}
}
pub fn iter(&self) -> impl Iterator<Item = &<T::P as Pixel>::Item> {
pub fn iter(&self) -> impl Iterator<Item = &<<T as ImageFormat>::P as Pixel>::Item> {
self.data.iter()
}
pub fn get_data(&self) -> &[<T::P as Pixel>::Item] {
pub fn get_data(&self) -> &[<<T as ImageFormat>::P as Pixel>::Item] {
&self.data
}
@@ -133,12 +127,12 @@ where
}
}
use crate::texture::format::{R16I, R32F, R32I, R8U, RGB8U, RGBA8U};
use crate::image::format::{R16I, R32F, R32I, R8UI, RGB8U, RGBA8U};
pub enum ImageBufferType {
JPG(ImageBuffer<RGB8U>),
PNG(ImageBuffer<RGBA8U>),
R32F(ImageBuffer<R32F>),
R8UI(ImageBuffer<R8U>),
R8UI(ImageBuffer<R8UI>),
R16I(ImageBuffer<R16I>),
R32I(ImageBuffer<R32I>),
}
@@ -147,7 +141,7 @@ use crate::image::{ArrayBuffer, Image};
use cgmath::{Vector2, Vector3};
impl<I> Image for ImageBuffer<I>
where
I: TextureFormat,
I: ImageFormat,
{
fn insert_into_3d_texture<T: Tex3D>(
&self,
@@ -156,7 +150,8 @@ where
// An offset to write the image in the texture array
offset: &Vector3<i32>,
) -> Result<(), JsValue> {
let js_array = <<I::P as Pixel>::Container as ArrayBuffer>::new(&self.data);
let js_array =
<<<I as ImageFormat>::P as Pixel>::Container as ArrayBuffer>::new(&self.data);
textures.tex_sub_image_3d_with_opt_array_buffer_view(
offset.x,
offset.y,
@@ -171,7 +166,7 @@ where
}
// The size of the image
fn get_size(&self) -> (u32, u32) {
(self.size.x as u32, self.size.y as u32)
}
/*fn get_size(&self) -> &Vector2<i32> {
&self.size
}*/
}

View File

@@ -6,11 +6,6 @@ extern "C" {
pub fn log(s: &str);
}
#[macro_export]
macro_rules! al_print {
($($arg:tt)*) => { al_core::log(&format!("{:?}", $($arg),*)) };
}
// ----------------------------------------------------------------------------
// Helpers to hide some of the verbosity of web_sys

View File

@@ -409,9 +409,9 @@ impl ArrayBuffer {
}
}
pub fn set_vertex_attrib_pointer_by_name<T: VertexAttribPointerType>(
pub fn set_vertex_attrib_pointer_by_name<'a, T: VertexAttribPointerType>(
&self,
shader: &ShaderBound<'_>,
shader: &ShaderBound<'a>,
location: &str,
) {
let loc = shader.get_attrib_location(&self.gl, location);
@@ -434,7 +434,11 @@ impl ArrayBuffer {
.vertex_attrib_divisor_angle(loc as u32, 0);
}
pub fn disable_vertex_attrib_pointer_by_name(&self, shader: &ShaderBound<'_>, location: &str) {
pub fn disable_vertex_attrib_pointer_by_name<'a>(
&self,
shader: &ShaderBound<'a>,
location: &str,
) {
let loc = shader.get_attrib_location(&self.gl, location);
self.gl.disable_vertex_attrib_array(loc as u32);
}

View File

@@ -49,7 +49,7 @@ impl ArrayBufferInstanced {
// Total length
let num_f32_in_buf = data.len() as i32;
let num_instances = num_f32_in_buf / num_f32_per_instance;
let num_instances = num_f32_in_buf / (num_f32_per_instance as i32);
let len = data.len();
let buffer = gl
@@ -98,9 +98,9 @@ impl ArrayBufferInstanced {
}
}
pub fn set_vertex_attrib_pointer_by_name<T: VertexAttribPointerType>(
pub fn set_vertex_attrib_pointer_by_name<'a, T: VertexAttribPointerType>(
&self,
shader: &ShaderBound<'_>,
shader: &ShaderBound<'a>,
location: &str,
) {
let loc = shader.get_attrib_location(&self.gl, location);
@@ -124,7 +124,11 @@ impl ArrayBufferInstanced {
.vertex_attrib_divisor_angle(loc as u32, 1);
}
pub fn disable_vertex_attrib_pointer_by_name(&self, shader: &ShaderBound<'_>, location: &str) {
pub fn disable_vertex_attrib_pointer_by_name<'a>(
&self,
shader: &ShaderBound<'a>,
location: &str,
) {
let loc = shader.get_attrib_location(&self.gl, location);
self.gl.disable_vertex_attrib_array(loc as u32);

View File

@@ -37,10 +37,7 @@ impl ElementArrayBuffer {
usage: u32,
data: B,
) -> ElementArrayBuffer {
let buffer = gl
.create_buffer()
.ok_or("failed to create buffer")
.unwrap_abort();
let buffer = gl.create_buffer().ok_or("failed to create buffer").unwrap_abort();
// Bind the buffer
gl.bind_buffer(
WebGlRenderingCtx::ELEMENT_ARRAY_BUFFER,

View File

@@ -2,7 +2,7 @@ use {wasm_bindgen::prelude::*, web_sys::WebGlFramebuffer};
use crate::webgl_ctx::WebGlRenderingCtx;
// Internal format used for the framebuffer final texture
use crate::texture::format::RGBA8U;
use crate::image::format::RGBA8U;
pub struct FrameBufferObject {
gl: WebGlContext,

View File

@@ -117,7 +117,7 @@ pub mod vao {
}
use web_sys::WebGl2RenderingContext;
impl<'a> ShaderVertexArrayObjectBound<'a, '_> {
impl<'a, 'b> ShaderVertexArrayObjectBound<'a, 'b> {
pub fn update_array<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
attr: &'static str,
@@ -162,7 +162,7 @@ pub mod vao {
}
}
impl Drop for ShaderVertexArrayObjectBound<'_, '_> {
impl<'a, 'b> Drop for ShaderVertexArrayObjectBound<'a, 'b> {
fn drop(&mut self) {
self.unbind();
}
@@ -173,7 +173,7 @@ pub mod vao {
_shader: &'b ShaderBound<'b>,
}
impl ShaderVertexArrayObjectBoundRef<'_, '_> {
impl<'a, 'b> ShaderVertexArrayObjectBoundRef<'a, 'b> {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) {
self.vao.gl.draw_arrays(mode, byte_offset, size);
}
@@ -211,7 +211,7 @@ pub mod vao {
}
}
impl Drop for ShaderVertexArrayObjectBoundRef<'_, '_> {
impl<'a, 'b> Drop for ShaderVertexArrayObjectBoundRef<'a, 'b> {
fn drop(&mut self) {
self.unbind();
}
@@ -362,7 +362,373 @@ pub mod vao {
}
}
impl Drop for VertexArrayObjectBound<'_> {
impl<'a> Drop for VertexArrayObjectBound<'a> {
fn drop(&mut self) {
self.unbind();
}
}
}
#[cfg(feature = "webgl1")]
pub mod vao {
use crate::object::array_buffer::ArrayBuffer;
use crate::object::array_buffer_instanced::ArrayBufferInstanced;
use crate::object::buffer_data::BufferDataStorage;
use crate::object::element_array_buffer::ElementArrayBuffer;
use crate::webgl_ctx::WebGlContext;
use crate::Abort;
use std::collections::HashMap;
pub struct VertexArrayObject {
array_buffer: HashMap<&'static str, ArrayBuffer>,
array_buffer_instanced: HashMap<&'static str, ArrayBufferInstanced>,
element_array_buffer: Option<ElementArrayBuffer>,
idx: u32, // Number of vertex attributes
gl: WebGlContext,
}
impl VertexArrayObject {
pub fn new(gl: &WebGlContext) -> VertexArrayObject {
let array_buffer = HashMap::new();
let array_buffer_instanced = HashMap::new();
let element_array_buffer = None;
let idx = 0;
let gl = gl.clone();
VertexArrayObject {
array_buffer,
array_buffer_instanced,
element_array_buffer,
idx,
gl,
}
}
// Shader has to be already bound before calling this
// This returns a ShaderVertexArrayObjectBound for which it is possible
// to add some buffers and or draw the buffers
pub fn bind<'a, 'b>(
&'a mut self,
_shader: &'b ShaderBound<'b>,
) -> ShaderVertexArrayObjectBound<'a, 'b> {
//self.gl.bind_vertex_array(Some(self.vao.as_ref()));
ShaderVertexArrayObjectBound { vao: self, _shader }
}
// Shader has to be already bound before calling this
// This returns a ShaderVertexArrayObjectBound for which it is possible
// to add some buffers and or draw the buffers
pub fn bind_ref<'a, 'b>(
&'a self,
shader: &'b ShaderBound<'b>,
) -> ShaderVertexArrayObjectBoundRef<'a, 'b> {
//self.gl.bind_vertex_array(Some(self.vao.as_ref()));
ShaderVertexArrayObjectBoundRef { vao: self, shader }
}
// No need to bind a shader here
// This returns a VertexArrayObjectBound for which it is only possible to
// update the buffers
pub fn bind_for_update<'a>(&'a mut self) -> VertexArrayObjectBound<'a> {
//self.gl.bind_vertex_array(Some(self.vao.as_ref()));
VertexArrayObjectBound { vao: self }
}
/*pub fn bind_ref(&self) {
self.gl.bind_vertex_array(Some(self.vao.as_ref()));
}*/
pub fn num_elements(&self) -> usize {
self.element_array_buffer
.as_ref()
.unwrap_abort()
.num_elements()
}
pub fn num_instances(&self) -> i32 {
self.array_buffer_instanced
.values()
.next()
.unwrap_abort()
.num_instances()
}
}
impl Drop for VertexArrayObject {
fn drop(&mut self) {
//self.unbind();
//self.gl.delete_vertex_array(Some(self.vao.as_ref()));
}
}
use crate::shader::ShaderBound;
pub struct ShaderVertexArrayObjectBound<'a, 'b> {
vao: &'a mut VertexArrayObject,
_shader: &'b ShaderBound<'b>,
}
use crate::VertexAttribPointerType;
impl<'a, 'b> ShaderVertexArrayObjectBound<'a, 'b> {
pub fn update_array<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
attr: &'static str,
usage: u32,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer
.get_mut(attr)
.unwrap_abort()
.update(usage, array_data);
self
}
pub fn update_element_array<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
usage: u32,
element_data: B,
) -> &mut Self {
if let Some(ref mut element_array_buffer) = self.vao.element_array_buffer {
element_array_buffer.update(usage, element_data);
}
self
}
pub fn update_instanced_array<B: BufferDataStorage<'a, f32>>(
&mut self,
attr: &'static str,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer_instanced
.get_mut(attr)
.unwrap_abort()
.update(array_data);
self
}
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
}
}
impl<'a, 'b> Drop for ShaderVertexArrayObjectBound<'a, 'b> {
fn drop(&mut self) {
self.unbind();
}
}
use crate::webgl_ctx::WebGlRenderingCtx;
pub struct ShaderVertexArrayObjectBoundRef<'a, 'b> {
vao: &'a VertexArrayObject,
shader: &'b ShaderBound<'b>,
}
use crate::object::array_buffer::VertexBufferObject;
impl<'a, 'b> ShaderVertexArrayObjectBoundRef<'a, 'b> {
pub fn draw_arrays(&self, mode: u32, byte_offset: i32, size: i32) {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
}
self.vao.gl.draw_arrays(mode, byte_offset, size);
}
pub fn draw_elements_with_i32(
&self,
mode: u32,
num_elements: Option<i32>,
type_: u32,
byte_offset: i32,
) {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
}
let e = self.vao.element_array_buffer.as_ref().unwrap_abort();
e.bind();
let num_elements = num_elements.unwrap_or(self.vao.num_elements() as i32);
self.vao
.gl
.draw_elements_with_i32(mode, num_elements, type_, byte_offset);
}
pub fn draw_elements_instanced_with_i32(
&self,
mode: u32,
offset_element_idx: i32,
num_instances: i32,
) {
for (attr, buf) in self.vao.array_buffer.iter() {
buf.bind();
buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
}
for (attr, inst_buf) in self.vao.array_buffer_instanced.iter() {
inst_buf.bind();
inst_buf.set_vertex_attrib_pointer_by_name::<f32>(self.shader, attr);
}
let e = self.vao.element_array_buffer.as_ref().unwrap_abort();
e.bind();
self.vao
.gl
.ext
.angles
.draw_elements_instanced_angle_with_i32(
mode,
self.vao.num_elements() as i32,
WebGlRenderingCtx::UNSIGNED_SHORT,
offset_element_idx,
num_instances,
);
}
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
}
}
impl<'a, 'b> Drop for ShaderVertexArrayObjectBoundRef<'a, 'b> {
fn drop(&mut self) {
self.unbind();
}
}
// Struct defined when only the Vertex Array Object is
// defined
pub struct VertexArrayObjectBound<'a> {
vao: &'a mut VertexArrayObject,
}
impl<'a> VertexArrayObjectBound<'a> {
/// Precondition: self must be bound
pub fn add_array_buffer<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
size: usize,
attr: &'static str,
usage: u32,
data: B,
) -> &mut Self {
let array_buffer =
ArrayBuffer::new(&self.vao.gl, self.vao.idx, 0, &[size], &[0], usage, data);
// Update the number of vertex attrib
self.vao.idx += 1;
self.vao.array_buffer.insert(attr, array_buffer);
self
}
/// Precondition: self must be bound
pub fn add_instanced_array_buffer<B: BufferDataStorage<'a, f32>>(
&mut self,
size: usize,
attr: &'static str,
usage: u32,
data: B,
) -> &mut Self {
let array_buffer = ArrayBufferInstanced::new(
&self.vao.gl,
self.vao.idx,
0,
&[size],
&[0],
usage,
data,
);
// Update the number of vertex attrib
self.vao.idx += 1;
self.vao.array_buffer_instanced.insert(attr, array_buffer);
self
}
/// Precondition: self must be bound
pub fn add_element_buffer<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
usage: u32,
data: B,
) -> &mut Self {
let element_buffer = ElementArrayBuffer::new(&self.vao.gl, usage, data);
self.vao.element_array_buffer = Some(element_buffer);
self
}
pub fn update_array<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
attr: &'static str,
usage: u32,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer
.get_mut(attr)
.expect("cannot get attribute from the array buffer")
.update(usage, array_data);
self
}
pub fn update_element_array<T: VertexAttribPointerType, B: BufferDataStorage<'a, T>>(
&mut self,
usage: u32,
element_data: B,
) -> &mut Self {
if let Some(ref mut element_array_buffer) = self.vao.element_array_buffer {
element_array_buffer.update(usage, element_data);
}
self
}
pub fn update_instanced_array<B: BufferDataStorage<'a, f32>>(
&mut self,
attr: &'static str,
usage: u32,
array_data: B,
) -> &mut Self {
self.vao
.array_buffer_instanced
.get_mut(attr)
.expect("cannot get attribute from the array buffer")
.update(usage, array_data);
self
}
/*pub fn append_to_instanced_array<B: BufferDataStorage<'a, f32>>(
&mut self,
idx: usize,
buffer: B,
) -> &mut Self {
self.vao.array_buffer_instanced[idx].append(buffer);
self
}*/
pub fn unbind(&self) {
//self.vao.gl.bind_vertex_array(None);
}
}
impl<'a> Drop for VertexArrayObjectBound<'a> {
fn drop(&mut self) {
self.unbind();
}

View File

@@ -112,7 +112,7 @@ impl Shader {
pub trait UniformType {
fn uniform(gl: &WebGlContext, location: Option<&WebGlUniformLocation>, value: &Self);
fn attach_uniform(name: &str, value: &Self, shader: &ShaderBound<'_>) {
fn attach_uniform<'a>(name: &str, value: &Self, shader: &ShaderBound<'a>) {
let location = shader.get_uniform_location(name);
Self::uniform(&shader.gl, location, value);
}
@@ -256,7 +256,7 @@ impl UniformType for ColorRGB {
gl.uniform3f(location, value.r, value.g, value.b);
}
}
impl UniformType for &ColorRGB {
impl<'a> UniformType for &'a ColorRGB {
fn uniform(gl: &WebGlContext, location: Option<&WebGlUniformLocation>, value: &Self) {
gl.uniform3f(location, value.r, value.g, value.b);
}
@@ -268,7 +268,7 @@ impl UniformType for ColorRGBA {
gl.uniform4f(location, value.r, value.g, value.b, value.a);
}
}
impl UniformType for &ColorRGBA {
impl<'a> UniformType for &'a ColorRGBA {
fn uniform(gl: &WebGlContext, location: Option<&WebGlUniformLocation>, value: &Self) {
gl.uniform4f(location, value.r, value.g, value.b, value.a);
}
@@ -328,9 +328,8 @@ impl SendUniformsWithParams<Colormaps> for HiPSColor {
) -> &'a ShaderBound<'a> {
let reversed = self.reversed as u8 as f32;
let cmap = cmaps.get(self.cmap_name.as_ref());
let cmap = cmaps.get(&self.cmap_name.as_ref());
shader
.attach_uniforms_from(cmaps)
.attach_uniforms_with_params_from(cmap, cmaps)
.attach_uniform("H", &self.stretch)
.attach_uniform("min_value", &self.min_cut.unwrap_or(0.0))
@@ -408,7 +407,7 @@ impl<'a> ShaderBound<'a> {
}
}
impl Drop for ShaderBound<'_> {
impl<'a> Drop for ShaderBound<'a> {
fn drop(&mut self) {
self.unbind(&self.gl);
}

View File

@@ -1,4 +1,4 @@
use crate::texture::format::TextureFormat;
use crate::image::format::ImageFormat;
use web_sys::HtmlCanvasElement;
use web_sys::WebGlTexture;
@@ -23,7 +23,7 @@ pub struct Texture3D {
}
impl Texture3D {
pub fn create_empty<F: TextureFormat>(
pub fn create_empty<F: ImageFormat>(
gl: &WebGlContext,
// The weight of the individual textures
width: i32,
@@ -54,9 +54,9 @@ impl Texture3D {
let metadata = Some(Rc::new(RefCell::new(Texture2DMeta {
width: width as u32,
height: height as u32,
internal_format: F::INTERNAL_FORMAT,
format: F::FORMAT,
ty: F::TYPE,
pixel_type: F::PIXEL_TYPE,
type_: F::TYPE,
})));
Ok(Texture3D {
@@ -71,7 +71,7 @@ impl Texture3D {
self.gl.generate_mipmap(WebGlRenderingCtx::TEXTURE_3D);
}
pub fn bind(&self) -> Texture3DBound<'_> {
pub fn bind(&self) -> Texture3DBound {
self.gl
.bind_texture(WebGlRenderingCtx::TEXTURE_3D, self.texture.as_ref());
@@ -113,7 +113,7 @@ pub struct Texture3DBound<'a> {
tex: &'a Texture3D,
}
impl Texture3DBound<'_> {
impl<'a> Texture3DBound<'a> {
pub fn tex_sub_image_3d_with_html_image_element(
&self,
dx: i32,
@@ -135,7 +135,7 @@ impl Texture3DBound<'_> {
image.height() as i32,
1,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 3d");
@@ -162,7 +162,7 @@ impl Texture3DBound<'_> {
canvas.height() as i32,
1,
metadata.format,
metadata.ty,
metadata.type_,
canvas,
)
.expect("Sub texture 2d");
@@ -189,13 +189,12 @@ impl Texture3DBound<'_> {
image.height() as i32,
1,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
}
#[allow(clippy::too_many_arguments)]
pub fn tex_sub_image_3d_with_opt_array_buffer_view(
&self,
dx: i32,
@@ -220,14 +219,13 @@ impl Texture3DBound<'_> {
h,
d,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
}
#[allow(dead_code)]
#[allow(clippy::too_many_arguments)]
pub fn tex_sub_image_3d_with_opt_u8_array(
&self,
idx: i32,
@@ -251,7 +249,7 @@ impl Texture3DBound<'_> {
h,
d,
metadata.format,
metadata.ty,
metadata.type_,
pixels,
)
.expect("Sub texture 2d");

View File

@@ -1,5 +1,4 @@
use crate::texture::format::PixelType;
use crate::texture::format::TextureFormat;
use crate::image::format::ImageFormat;
use web_sys::HtmlCanvasElement;
use web_sys::WebGlTexture;
@@ -22,7 +21,7 @@ pub struct Texture2DArray {
}
impl Texture2DArray {
pub fn create_empty<F: TextureFormat>(
pub fn create_empty<F: ImageFormat>(
gl: &WebGlContext,
// The weight of the individual textures
width: i32,
@@ -53,9 +52,9 @@ impl Texture2DArray {
let metadata = Some(Rc::new(RefCell::new(Texture2DMeta {
width: width as u32,
height: height as u32,
pixel_type: F::PIXEL_TYPE,
ty: F::TYPE,
internal_format: F::INTERNAL_FORMAT,
format: F::FORMAT,
type_: F::TYPE,
})));
Ok(Texture2DArray {
@@ -70,7 +69,7 @@ impl Texture2DArray {
self.gl.generate_mipmap(WebGlRenderingCtx::TEXTURE_2D_ARRAY);
}
pub fn bind(&self) -> Texture2DArrayBound<'_> {
pub fn bind(&self) -> Texture2DArrayBound {
self.gl
.bind_texture(WebGlRenderingCtx::TEXTURE_2D_ARRAY, self.texture.as_ref());
@@ -115,31 +114,35 @@ impl Texture2DArray {
self.gl
.viewport(0, 0, metadata.width as i32, metadata.height as i32);
let value = match metadata.pixel_type {
PixelType::R8U => {
#[cfg(feature = "webgl2")]
let value = match (metadata.format, metadata.type_) {
(WebGlRenderingCtx::RED_INTEGER, WebGlRenderingCtx::UNSIGNED_BYTE) => {
let p = <[u8; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::R16I => {
(WebGlRenderingCtx::RED_INTEGER, WebGlRenderingCtx::SHORT) => {
let p = <[i16; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::R32I => {
(WebGlRenderingCtx::RED_INTEGER, WebGlRenderingCtx::INT) => {
let p = <[i32; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::R32F => {
(WebGlRenderingCtx::RED, WebGlRenderingCtx::FLOAT) => {
let p = <[f32; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::RGB8U => {
(WebGlRenderingCtx::RGB, WebGlRenderingCtx::UNSIGNED_BYTE) => {
let p = <[u8; 3]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p)?)
}
PixelType::RGBA8U => {
(WebGlRenderingCtx::RGBA, WebGlRenderingCtx::UNSIGNED_BYTE) => {
let p = <[u8; 4]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p)?)
}
_ => Err(JsValue::from_str(
"Pixel retrieval not implemented for that texture format.",
)),
};
// Unbind the framebuffer
@@ -191,7 +194,7 @@ pub struct Texture2DArrayBound<'a> {
tex: &'a Texture2DArray,
}
impl Texture2DArrayBound<'_> {
impl<'a> Texture2DArrayBound<'a> {
pub fn tex_sub_image_3d_with_html_image_element(
&self,
dx: i32,
@@ -213,7 +216,7 @@ impl Texture2DArrayBound<'_> {
image.height() as i32,
1,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 3d");
@@ -240,7 +243,7 @@ impl Texture2DArrayBound<'_> {
canvas.height() as i32,
1,
metadata.format,
metadata.ty,
metadata.type_,
canvas,
)
.expect("Sub texture 2d");
@@ -267,7 +270,7 @@ impl Texture2DArrayBound<'_> {
image.height() as i32,
1,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
@@ -296,7 +299,7 @@ impl Texture2DArrayBound<'_> {
h,
1,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
@@ -325,7 +328,7 @@ impl Texture2DArrayBound<'_> {
h,
1,
metadata.format,
metadata.ty,
metadata.type_,
pixels,
)
.expect("Sub texture 2d");

View File

@@ -1,204 +0,0 @@
use crate::texture::pixel::Pixel;
pub type Bytes<'a> = std::borrow::Cow<'a, [u8]>;
pub trait TextureFormat {
type P: Pixel;
type ArrayBufferView: AsRef<js_sys::Object>;
const NUM_CHANNELS: usize;
const FORMAT: u32;
const INTERNAL_FORMAT: i32;
const TYPE: u32;
const PIXEL_TYPE: PixelType;
/// Creates a JS typed array which is a view into wasm's linear memory at the slice specified.
/// This function returns a new typed array which is a view into wasm's memory. This view does not copy the underlying data.
///
/// # Safety
///
/// Views into WebAssembly memory are only valid so long as the backing buffer isn't resized in JS. Once this function is called any future calls to Box::new (or malloc of any form) may cause the returned value here to be invalidated. Use with caution!
///
/// Additionally the returned object can be safely mutated but the input slice isn't guaranteed to be mutable.
///
/// Finally, the returned object is disconnected from the input slice's lifetime, so there's no guarantee that the data is read at the right time.
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str>;
}
use crate::webgl_ctx::WebGlRenderingCtx;
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct RGB8U;
impl TextureFormat for RGB8U {
type P = [u8; 3];
const NUM_CHANNELS: usize = 3;
const FORMAT: u32 = WebGlRenderingCtx::RGB;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGB8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const PIXEL_TYPE: PixelType = PixelType::RGB8U;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
let mut decoder = jpeg::Decoder::new(raw_bytes);
let bytes = decoder
.decode()
.map_err(|_| "Cannot decoder jpeg. This image may not be compressed.")?;
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct RGBA8U;
impl TextureFormat for RGBA8U {
type P = [u8; 4];
const NUM_CHANNELS: usize = 4;
const FORMAT: u32 = WebGlRenderingCtx::RGBA;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const PIXEL_TYPE: PixelType = PixelType::RGBA8U;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
let mut decoder = jpeg::Decoder::new(raw_bytes);
let bytes = decoder
.decode()
.map_err(|_| "Cannot decoder png. This image may not be compressed.")?;
Ok(Bytes::Owned(bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R32F;
impl TextureFormat for R32F {
type P = [u8; 4];
const NUM_CHANNELS: usize = 4;
const FORMAT: u32 = WebGlRenderingCtx::RGBA;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const PIXEL_TYPE: PixelType = PixelType::R32F;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R8U;
impl TextureFormat for R8U {
type P = [u8; 1];
const FORMAT: u32 = WebGlRenderingCtx::RED;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::R8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const NUM_CHANNELS: usize = 1;
const PIXEL_TYPE: PixelType = PixelType::R8U;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R16I;
impl TextureFormat for R16I {
type P = [u8; 2];
const NUM_CHANNELS: usize = 2;
const FORMAT: u32 = WebGlRenderingCtx::RG;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RG8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const PIXEL_TYPE: PixelType = PixelType::R16I;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub struct R32I;
impl TextureFormat for R32I {
type P = [u8; 4];
const FORMAT: u32 = WebGlRenderingCtx::RGBA;
const INTERNAL_FORMAT: i32 = WebGlRenderingCtx::RGBA8 as i32;
const TYPE: u32 = WebGlRenderingCtx::UNSIGNED_BYTE;
const NUM_CHANNELS: usize = 4;
const PIXEL_TYPE: PixelType = PixelType::R32I;
fn decode(raw_bytes: &[u8]) -> Result<Bytes<'_>, &'static str> {
Ok(Bytes::Borrowed(raw_bytes))
}
type ArrayBufferView = js_sys::Uint8Array;
unsafe fn view(s: &[<Self::P as Pixel>::Item]) -> Self::ArrayBufferView {
Self::ArrayBufferView::view(s)
}
}
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub enum PixelType {
R8U,
R16I,
R32I,
R32F,
RGB8U,
RGBA8U,
}
impl PixelType {
pub const fn num_channels(&self) -> usize {
match self {
Self::RGB8U => 3,
Self::RGBA8U => 4,
_ => 1,
}
}
}
pub const NUM_CHANNELS: usize = 6;

View File

@@ -1,7 +1,6 @@
pub mod array;
pub use array::Texture2DArray;
pub mod format;
pub mod pixel;
pub use pixel::*;
@@ -12,7 +11,6 @@ pub use mod_3d::Texture3D;
use web_sys::HtmlCanvasElement;
use web_sys::WebGlTexture;
use crate::texture::format::PixelType;
use crate::webgl_ctx::WebGlContext;
use crate::webgl_ctx::WebGlRenderingCtx;
use wasm_bindgen::prelude::*;
@@ -25,8 +23,8 @@ pub static mut CUR_IDX_TEX_UNIT: u8 = 0;
#[allow(dead_code)]
pub struct Texture2DMeta {
pub format: u32,
pub ty: u32,
pub pixel_type: PixelType,
pub internal_format: i32,
pub type_: u32,
pub width: u32,
pub height: u32,
@@ -47,13 +45,13 @@ pub enum SamplerType {
Unsigned,
}
use crate::texture::format::TextureFormat;
use crate::image::format::ImageFormat;
//use super::pixel::PixelType;
use std::cell::RefCell;
use std::path::Path;
use std::rc::Rc;
impl Texture2D {
pub fn create_from_path<P: AsRef<Path>, F: TextureFormat>(
pub fn create_from_path<P: AsRef<Path>, F: ImageFormat>(
gl: &WebGlContext,
name: &'static str,
src: &P,
@@ -61,11 +59,12 @@ impl Texture2D {
) -> Result<Texture2D, JsValue> {
let image = HtmlImageElement::new().unwrap_abort();
#[cfg(feature = "webgl2")]
let texture = gl.create_texture();
let onerror = {
Closure::wrap(Box::new(move || {
println!("Cannot load texture located at: {name:?}");
println!("Cannot load texture located at: {:?}", name);
}) as Box<dyn Fn()>)
};
@@ -73,13 +72,14 @@ impl Texture2D {
let height = image.height();
let metadata = Rc::new(RefCell::new(Texture2DMeta {
width,
height,
width: width,
height: height,
internal_format: F::INTERNAL_FORMAT,
format: F::FORMAT,
ty: F::TYPE,
pixel_type: F::PIXEL_TYPE,
type_: F::TYPE,
}));
#[cfg(feature = "webgl2")]
let onload = {
let image = image.clone();
let gl = gl.clone();
@@ -129,6 +129,7 @@ impl Texture2D {
let gl = gl.clone();
Ok(Texture2D {
#[cfg(feature = "webgl2")]
texture,
gl,
@@ -137,7 +138,7 @@ impl Texture2D {
})
}
pub fn create_from_raw_pixels<F: TextureFormat>(
pub fn create_from_raw_pixels<F: ImageFormat>(
gl: &WebGlContext,
width: i32,
height: i32,
@@ -162,12 +163,12 @@ impl Texture2D {
Ok(texture)
}
pub fn create_from_raw_bytes<F: TextureFormat>(
pub fn create_from_raw_bytes<F: ImageFormat>(
gl: &WebGlContext,
width: i32,
height: i32,
tex_params: &'static [(u32, u32)],
bytes: &[u8],
bytes: Option<&[u8]>,
) -> Result<Texture2D, JsValue> {
let texture = gl.create_texture();
@@ -184,14 +185,7 @@ impl Texture2D {
width,
height,
);
let view = unsafe {
let len = bytes.len() / (std::mem::size_of::<<F::P as Pixel>::Item>());
let pixels =
std::slice::from_raw_parts(bytes.as_ptr() as *const <F::P as Pixel>::Item, len);
F::view(pixels)
};
gl.tex_sub_image_2d_with_i32_and_i32_and_u32_and_type_and_opt_array_buffer_view(
gl.tex_sub_image_2d_with_i32_and_i32_and_u32_and_type_and_opt_u8_array(
WebGlRenderingCtx::TEXTURE_2D,
0,
0,
@@ -200,7 +194,7 @@ impl Texture2D {
height,
F::FORMAT,
F::TYPE,
Some(view.as_ref()),
bytes,
)
.expect("Texture 2D");
@@ -208,9 +202,9 @@ impl Texture2D {
let metadata = Some(Rc::new(RefCell::new(Texture2DMeta {
width: width as u32,
height: height as u32,
internal_format: F::INTERNAL_FORMAT,
format: F::FORMAT,
ty: F::TYPE,
pixel_type: F::PIXEL_TYPE,
type_: F::TYPE,
})));
Ok(Texture2D {
@@ -222,7 +216,7 @@ impl Texture2D {
})
}
pub fn create_empty_with_format<F: TextureFormat>(
pub fn create_empty_with_format<F: ImageFormat>(
gl: &WebGlContext,
width: i32,
height: i32,
@@ -248,14 +242,15 @@ impl Texture2D {
let metadata = Some(Rc::new(RefCell::new(Texture2DMeta {
width: width as u32,
height: height as u32,
internal_format: F::INTERNAL_FORMAT,
format: F::FORMAT,
ty: F::TYPE,
pixel_type: F::PIXEL_TYPE,
type_: F::TYPE,
})));
Ok(Texture2D {
texture,
gl,
metadata,
})
}
@@ -295,7 +290,7 @@ impl Texture2D {
self
}
pub fn bind(&self) -> Texture2DBound<'_> {
pub fn bind(&self) -> Texture2DBound {
self.gl
.bind_texture(WebGlRenderingCtx::TEXTURE_2D, self.texture.as_ref());
@@ -335,31 +330,35 @@ impl Texture2D {
self.gl
.viewport(0, 0, metadata.width as i32, metadata.height as i32);
let value = match metadata.pixel_type {
PixelType::R8U => {
#[cfg(feature = "webgl2")]
let value = match (metadata.format, metadata.type_) {
(WebGlRenderingCtx::RED_INTEGER, WebGlRenderingCtx::UNSIGNED_BYTE) => {
let p = <[u8; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::R16I => {
(WebGlRenderingCtx::RED_INTEGER, WebGlRenderingCtx::SHORT) => {
let p = <[i16; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::R32I => {
(WebGlRenderingCtx::RED_INTEGER, WebGlRenderingCtx::INT) => {
let p = <[i32; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::R32F => {
(WebGlRenderingCtx::RED, WebGlRenderingCtx::FLOAT) => {
let p = <[f32; 1]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p[0])?)
}
PixelType::RGB8U => {
(WebGlRenderingCtx::RGB, WebGlRenderingCtx::UNSIGNED_BYTE) => {
let p = <[u8; 3]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p)?)
}
PixelType::RGBA8U => {
(WebGlRenderingCtx::RGBA, WebGlRenderingCtx::UNSIGNED_BYTE) => {
let p = <[u8; 4]>::read_pixel(&self.gl, x, y)?;
Ok(serde_wasm_bindgen::to_value(&p)?)
}
_ => Err(JsValue::from_str(
"Pixel retrieval not implemented for that texture format.",
)),
};
// Unbind the framebuffer
@@ -400,7 +399,7 @@ pub struct Texture2DBound<'a> {
texture_2d: &'a Texture2D,
}
impl Texture2DBound<'_> {
impl<'a> Texture2DBound<'a> {
pub fn tex_sub_image_2d_with_u32_and_u32_and_html_image_element(
&self,
dx: i32,
@@ -418,10 +417,24 @@ impl Texture2DBound<'_> {
dx,
dy,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
#[cfg(feature = "webgl1")]
self.texture_2d
.gl
.tex_sub_image_2d_with_u32_and_u32_and_image(
WebGlRenderingCtx::TEXTURE_2D,
0,
dx,
dy,
metadata.format,
metadata.type_,
image,
)
.expect("Sub texture 2d");
//self.texture_2d.gl.flush();
}
pub fn tex_sub_image_2d_with_u32_and_u32_and_html_canvas_element(
@@ -441,10 +454,24 @@ impl Texture2DBound<'_> {
dx,
dy,
metadata.format,
metadata.ty,
metadata.type_,
canvas,
)
.expect("Sub texture 2d");
#[cfg(feature = "webgl1")]
self.texture_2d
.gl
.tex_sub_image_2d_with_u32_and_u32_and_canvas(
WebGlRenderingCtx::TEXTURE_2D,
0,
dx,
dy,
metadata.format,
metadata.type_,
canvas,
)
.expect("Sub texture 2d");
//self.texture_2d.gl.flush();
}
pub fn tex_sub_image_2d_with_u32_and_u32_and_image_bitmap(
@@ -464,7 +491,7 @@ impl Texture2DBound<'_> {
dx,
dy,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
@@ -477,7 +504,7 @@ impl Texture2DBound<'_> {
dx,
dy,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
@@ -503,7 +530,7 @@ impl Texture2DBound<'_> {
width,
height,
metadata.format,
metadata.ty,
metadata.type_,
image,
)
.expect("Sub texture 2d");
@@ -529,7 +556,7 @@ impl Texture2DBound<'_> {
width,
height,
metadata.format,
metadata.ty,
metadata.type_,
pixels,
)
.expect("Sub texture 2d");
@@ -561,7 +588,6 @@ pub trait Tex3D {
image: &web_sys::ImageBitmap,
);
#[allow(clippy::too_many_arguments)]
fn tex_sub_image_3d_with_opt_array_buffer_view(
&self,
dx: i32,
@@ -573,7 +599,6 @@ pub trait Tex3D {
view: Option<&js_sys::Object>,
);
#[allow(clippy::too_many_arguments)]
fn tex_sub_image_3d_with_opt_u8_array(
&self,
dx: i32,

View File

@@ -21,32 +21,133 @@ pub trait Pixel:
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue>;
}
impl Pixel for [f32; 1] {
impl Pixel for [f32; 4] {
type Item = f32;
type Container = ArrayF32;
const BLACK: Self = [f32::NAN];
const BLACK: Self = [std::f32::NAN; 4];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let p = js_sys::Uint8Array::new_with_length(4);
let pixels = js_sys::Float32Array::new_with_length(4);
#[cfg(feature = "webgl2")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RGBA32F,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
#[cfg(feature = "webgl1")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RGBA,
WebGlRenderingCtx::UNSIGNED_BYTE,
Some(&p),
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
Ok([f32::from_le_bytes([
p.at(0).unwrap(),
p.at(1).unwrap(),
p.at(2).unwrap(),
p.at(3).unwrap(),
])])
let pixels = pixels.to_vec();
Ok([pixels[0], pixels[1], pixels[2], pixels[3]])
}
}
impl Pixel for [f32; 3] {
type Item = f32;
type Container = ArrayF32;
const BLACK: Self = [std::f32::NAN; 3];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let pixels = js_sys::Float32Array::new_with_length(3);
#[cfg(feature = "webgl2")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RGB32F,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
#[cfg(feature = "webgl1")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RGB,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
let pixels = pixels.to_vec();
Ok([pixels[0], pixels[1], pixels[2]])
}
}
impl Pixel for [f32; 1] {
type Item = f32;
type Container = ArrayF32;
const BLACK: Self = [std::f32::NAN];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let pixels = js_sys::Float32Array::new_with_length(1);
#[cfg(feature = "webgl2")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RED,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
#[cfg(feature = "webgl1")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::LUMINANCE_ALPHA,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
Ok([pixels.to_vec()[0]])
}
}
/*use crate::image::ArrayF64;
impl Pixel for [f64; 1] {
type Item = f64;
type Container = ArrayF64;
const BLACK: Self = [std::f64::NAN];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let pixels = js_sys::Float32Array::new_with_length(1);
#[cfg(feature = "webgl2")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RED,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
#[cfg(feature = "webgl1")]
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::LUMINANCE_ALPHA,
WebGlRenderingCtx::FLOAT,
Some(&pixels),
)?;
Ok([pixels.to_vec()[0] as f64])
}
}*/
impl Pixel for [u8; 4] {
type Item = u8;
type Container = ArrayU8;
@@ -88,27 +189,7 @@ impl Pixel for [u8; 3] {
Ok([pixels[0], pixels[1], pixels[2]])
}
}
impl Pixel for [u8; 2] {
type Item = u8;
type Container = ArrayU8;
const BLACK: Self = [0, 0];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let pixels = js_sys::Uint8Array::new_with_length(2);
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RG,
WebGlRenderingCtx::UNSIGNED_BYTE,
Some(&pixels),
)?;
let pixels = pixels.to_vec();
Ok([pixels[0], pixels[1]])
}
}
#[cfg(feature = "webgl2")]
impl Pixel for [u8; 1] {
type Item = u8;
type Container = ArrayU8;
@@ -129,50 +210,45 @@ impl Pixel for [u8; 1] {
Ok([pixels.to_vec()[0]])
}
}
#[cfg(feature = "webgl2")]
impl Pixel for [i16; 1] {
type Item = i16;
type Container = ArrayI16;
const BLACK: Self = [i16::MIN];
const BLACK: Self = [std::i16::MIN];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let p = js_sys::Uint8Array::new_with_length(2);
let pixels = js_sys::Int16Array::new_with_length(1);
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RG,
WebGlRenderingCtx::UNSIGNED_BYTE,
Some(&p),
WebGlRenderingCtx::RED_INTEGER,
WebGlRenderingCtx::SHORT,
Some(&pixels),
)?;
Ok([i16::from_le_bytes([p.at(0).unwrap(), p.at(1).unwrap()])])
Ok([pixels.to_vec()[0]])
}
}
#[cfg(feature = "webgl2")]
impl Pixel for [i32; 1] {
type Item = i32;
type Container = ArrayI32;
const BLACK: Self = [i32::MIN];
const BLACK: Self = [std::i32::MIN];
fn read_pixel(gl: &WebGlContext, x: i32, y: i32) -> Result<Self, JsValue> {
let p = js_sys::Uint8Array::new_with_length(4);
let pixels = js_sys::Int32Array::new_with_length(1);
gl.read_pixels_with_opt_array_buffer_view(
x,
y,
1,
1,
WebGlRenderingCtx::RGBA,
WebGlRenderingCtx::UNSIGNED_BYTE,
Some(&p),
WebGlRenderingCtx::RED_INTEGER,
WebGlRenderingCtx::INT,
Some(&pixels),
)?;
Ok([i32::from_le_bytes([
p.at(0).unwrap(),
p.at(1).unwrap(),
p.at(2).unwrap(),
p.at(3).unwrap(),
])])
Ok([pixels.to_vec()[0]])
}
}

View File

@@ -4,11 +4,17 @@ use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
use web_sys::HtmlElement;
#[cfg(feature = "webgl2")]
pub type WebGlRenderingCtx = web_sys::WebGl2RenderingContext;
#[cfg(feature = "webgl1")]
pub type WebGlRenderingCtx = web_sys::WebGlRenderingContext;
#[derive(Clone)]
pub struct WebGlContext {
inner: Rc<WebGlRenderingCtx>,
#[cfg(feature = "webgl1")]
pub ext: WebGlExt,
}
#[derive(Clone)]
@@ -49,12 +55,45 @@ impl WebGlContext {
#[cfg(feature = "webgl2")]
{
if let Ok(r) =
get_extension::<web_sys::ExtColorBufferFloat>(&gl, "EXT_color_buffer_float")
{
let _ = r;
}
let ctx = WebGlContext { inner: gl };
Ok(ctx)
}
#[cfg(feature = "webgl1")]
{
let angles_ext =
get_extension::<web_sys::AngleInstancedArrays>(&gl, "ANGLE_instanced_arrays")?;
let _ = get_extension::<web_sys::OesTextureFloat>(&gl, "OES_texture_float")?;
let _ = get_extension::<web_sys::ExtSRgb>(&gl, "EXT_sRGB")?;
Ok(WebGlContext {
inner: gl,
ext: WebGlExt { angles: angles_ext },
})
}
}
}
fn get_extension<T>(context: &WebGlRenderingCtx, name: &str) -> Result<T, JsValue>
where
T: wasm_bindgen::JsCast,
{
// `unchecked_into` is used here because WebGL extensions aren't actually JS classes
// these objects are duck-type representations of the actual Rust classes
// https://github.com/rustwasm/wasm-bindgen/pull/1449
context
.get_extension(name)
.ok()
.and_then(|maybe_ext| maybe_ext.map(|ext| ext.unchecked_into::<T>()))
.ok_or_else(|| JsValue::from_str("Failed to load ext"))
}
use std::ops::Deref;
impl Deref for WebGlContext {
type Target = WebGlRenderingCtx;

View File

@@ -23,7 +23,7 @@ fn generate_shaders() -> std::result::Result<(), Box<dyn Error>> {
.unwrap()
//.with_extension("")
.to_string_lossy()
.into_owned()
.to_owned()
.replace("/", "_")
.replace("\\", "_");
//let out_name = format!("{}/{}", OUT_PATH, out_file_name);
@@ -31,7 +31,8 @@ fn generate_shaders() -> std::result::Result<(), Box<dyn Error>> {
let src = read_shader(path)?;
shaders.insert(out_file_name, src);
println!("cargo:rerun-if-changed=src/shaders/{file_name}");
//fs::write(&out_name, result)?;
println!("cargo:rerun-if-changed=src/shaders/{}", file_name);
}
}
}
@@ -48,15 +49,13 @@ fn read_shader<P: AsRef<std::path::Path>>(path: P) -> std::io::Result<String> {
let shader_src = std::io::BufReader::new(file)
.lines()
.map_while(Result::ok)
.flatten()
.map(|l| {
if l.starts_with("#include") {
let incl_file_names: Vec<_> = l.split_terminator(&[';', ' '][..]).collect();
let incl_file_name_rel = incl_file_names[1];
let incl_file_name = path.parent().unwrap().join(incl_file_name_rel);
println!("{}", incl_file_name.to_string_lossy());
read_shader(incl_file_name.to_str().unwrap()).unwrap()
} else {
l
@@ -77,6 +76,7 @@ pub fn write(path: PathBuf, entries: HashMap<String, String>) -> Result<(), Box<
let mut all_the_files = File::create(&path)?;
writeln!(&mut all_the_files, r#"use std::collections::HashMap;"#,)?;
writeln!(&mut all_the_files, r#""#,)?;
writeln!(&mut all_the_files, r#"#[allow(dead_code)]"#,)?;
writeln!(
&mut all_the_files,
@@ -87,10 +87,7 @@ pub fn write(path: PathBuf, entries: HashMap<String, String>) -> Result<(), Box<
for (name, content) in entries {
writeln!(
&mut all_the_files,
r##" out.insert(
r"{name}",
r#"{content}"#,
);"##,
r##" out.insert(r"{name}", r#"{content}"#);"##,
)?;
}

View File

@@ -1,14 +1,10 @@
use crate::renderable::image::Image;
use crate::renderable::ImageLayer;
use crate::tile_fetcher::HiPSLocalFiles;
use al_core::image::fits::FitsImage;
use al_core::image::ImageType;
use fitsrs::WCS;
use std::io::Cursor;
use crate::math::angle::ToAngle;
use crate::renderable::hips::HiPS;
use crate::{
//async_task::{BuildCatalogIndex, ParseTableTask, TaskExecutor, TaskResult, TaskType},
camera::CameraViewPort,
downloader::Downloader,
healpix::coverage::HEALPixCoverage,
@@ -26,6 +22,7 @@ use crate::{
time::DeltaTime,
};
use al_api::moc::MOCOptions;
use wcs::WCS;
use wasm_bindgen::prelude::*;
@@ -38,6 +35,7 @@ use al_api::{
grid::GridCfg,
hips::{HiPSCfg, ImageMetadata},
};
use fitsrs::{fits::AsyncFits, hdu::extension::AsyncXtensionHDU};
use web_sys::{HtmlElement, WebGl2RenderingContext};
@@ -100,15 +98,17 @@ pub struct App {
pub projection: ProjectionType,
// Async data receivers
//img_send: async_channel::Sender<ImageLayer>,
img_send: async_channel::Sender<ImageLayer>,
img_recv: async_channel::Receiver<ImageLayer>,
ack_img_send: async_channel::Sender<ImageParams>,
//ack_img_recv: async_channel::Receiver<ImageParams>,
ack_img_recv: async_channel::Receiver<ImageParams>,
// callbacks
//callback_position_changed: js_sys::Function,
}
use cgmath::{Vector2, Vector3};
use futures::io::BufReader; // for `next`
use crate::math::projection::*;
pub const BLENDING_ANIM_DURATION: DeltaTime = DeltaTime::from_millis(200.0); // in ms
@@ -133,6 +133,7 @@ impl App {
//let exec = Rc::new(RefCell::new(TaskExecutor::new()));
let projection = ProjectionType::Sin(mapproj::zenithal::sin::Sin);
gl.enable(WebGl2RenderingContext::BLEND);
// TODO: https://caniuse.com/?search=scissor is not supported for safari <= 14.1
// When it will be supported nearly everywhere, we will need to uncomment this line to
@@ -189,8 +190,8 @@ impl App {
let moc = MOCRenderer::new(&gl)?;
gl.clear_color(0.1, 0.1, 0.1, 1.0);
let (_, img_recv) = async_channel::unbounded::<ImageLayer>();
let (ack_img_send, _) = async_channel::unbounded::<ImageParams>();
let (img_send, img_recv) = async_channel::unbounded::<ImageLayer>();
let (ack_img_send, ack_img_recv) = async_channel::unbounded::<ImageParams>();
//let line_renderer = RasterizedLineRenderer::new(&gl)?;
@@ -247,10 +248,10 @@ impl App {
colormaps,
projection,
//img_send,
img_send,
img_recv,
ack_img_send,
//ack_img_recv,
ack_img_recv,
})
}
@@ -259,7 +260,7 @@ impl App {
self.tile_fetcher.clear();
// Loop over the hipss
for hips in self.layers.get_mut_hipses() {
if self.camera.get_tile_depth() == 0 {
if self.camera.get_texture_depth() == 0 {
let allsky_query = match hips {
HiPS::D2(h) => query::Allsky::new(h.get_config(), None),
HiPS::D3(h) => query::Allsky::new(h.get_config(), Some(h.get_slice() as u32)),
@@ -272,21 +273,21 @@ impl App {
let cfg = hips.get_config();
let min_tile_depth = cfg.get_min_depth_tile();
let min_tile_depth = cfg.delta_depth().max(cfg.get_min_depth_tile());
let mut ancestors = HashSet::new();
if let Some(tiles) = hips.look_for_new_tiles(&self.camera) {
if let Some(tiles) = hips.look_for_new_tiles(&mut self.camera, &self.projection) {
for tile_cell in tiles {
self.tile_fetcher.append(hips.get_tile_query(&tile_cell));
// check if we are starting aladin lite or not.
// If so we want to retrieve only the tiles in the view and access them
// directly i.e. without blending them with less precised tiles
if self.tile_fetcher.get_num_tile_fetched() > 0
&& tile_cell.depth() >= min_tile_depth + 3
{
let ancestor_tile_cell = tile_cell.ancestor(3);
ancestors.insert(ancestor_tile_cell);
if self.tile_fetcher.get_num_tile_fetched() > 0 {
if tile_cell.depth() >= min_tile_depth + 3 {
let ancestor_tile_cell = tile_cell.ancestor(3);
ancestors.insert(ancestor_tile_cell);
}
}
}
}
@@ -430,9 +431,11 @@ impl App {
);
// 3. project on screen
self.projection
.model_to_clip_space(&xyz, &self.camera)
.map(|p| [p.x, p.y])
if let Some(p) = self.projection.model_to_clip_space(&xyz, &self.camera) {
Some([p.x, p.y])
} else {
None
}
};
if let (Some(c1), Some(c2), Some(c3), Some(c4)) = (
@@ -514,7 +517,7 @@ impl App {
}*/
pub(crate) fn is_inerting(&self) -> bool {
self.inertia.is_some()
return self.inertia.is_some();
}
pub(crate) fn update(&mut self, dt: DeltaTime) -> Result<bool, JsValue> {
@@ -575,14 +578,16 @@ impl App {
}
// Tiles are fetched if:
let fetch_tiles =
let fetch_tiles = self.inertia.is_none() &&
// * the user is not zooming
!self.camera.has_zoomed() &&
// * no inertia action is in progress
(
// * the user is not panning the view
!self.dragging ||
// * or the user is but did not move for at least 100ms
(Time::now() - self.camera.get_time_of_last_move() >= DeltaTime(100.0) || !self.dragging) &&
// * no inertia action is in progress
self.inertia.is_none() &&
// * the user is not zooming
!self.camera.has_zoomed();
(self.dragging && Time::now() - self.camera.get_time_of_last_move() >= DeltaTime(100.0))
);
if fetch_tiles {
self.tile_fetcher.notify(self.downloader.clone(), None);
@@ -591,66 +596,126 @@ impl App {
let rscs_received = self.downloader.borrow_mut().get_received_resources();
//let mut tile_copied = false;
let mut tile_copied = false;
for rsc in rscs_received {
match rsc {
Resource::Tile(tile) => {
//if !_has_camera_zoomed {
if let Some(hips) = self.layers.get_mut_hips_from_cdid(tile.get_hips_cdid()) {
if let Some(hips) = self.layers.get_mut_hips_from_cdid(&tile.get_hips_cdid()) {
let cfg = hips.get_config_mut();
if cfg.get_format() == tile.format {
let delta_depth = cfg.delta_depth();
let fov_coverage = self.camera.get_cov(cfg.get_frame());
let included_in_coverage = fov_coverage.intersects_cell(tile.cell());
let included_or_near_coverage = tile
.cell()
.get_texture_cell(delta_depth)
.get_tile_cells(delta_depth)
.any(|neighbor_tile_cell| {
fov_coverage.intersects_cell(&neighbor_tile_cell)
});
//let is_tile_root = tile.cell().depth() == delta_depth;
//let _depth = tile.cell().depth();
// do not perform tex_sub costly GPU calls while the camera is zooming
if tile.cell().is_root() || included_in_coverage {
//if let Some(image) = image.as_ref() {
if let Some(ImageType::FitsRawBytes {
raw_bytes: raw_bytes_buf,
..
}) = &*tile.image.borrow()
{
// check if the metadata has not been set
if hips.get_fits_params().is_none() {
let raw_bytes = raw_bytes_buf.to_vec();
if tile.cell().is_root() || included_or_near_coverage {
//let is_missing = tile.missing();
/*self.tile_fetcher.notify_tile(
&tile,
true,
false,
&mut self.downloader,
);*/
let FitsImage {
bscale,
bzero,
blank,
..
} = FitsImage::from_raw_bytes(raw_bytes.as_slice())?[0];
hips.set_fits_params(bscale, bzero, blank);
/*let image = if is_missing {
// Otherwise we push nothing, it is probably the case where:
// - an request error occured on a valid tile
// - the tile is not present, e.g. chandra HiPS have not the 0, 1 and 2 order tiles
None
} else {
Some(image)
};*/
use al_core::image::ImageType;
use fitsrs::fits::Fits;
use std::io::Cursor;
//if let Some(image) = image.as_ref() {
match &*tile.image.borrow() {
Some(ImageType::FitsImage {
raw_bytes: raw_bytes_buf,
}) => {
// check if the metadata has not been set
if !cfg.fits_metadata {
let num_bytes = raw_bytes_buf.length() as usize;
let mut raw_bytes = vec![0; num_bytes];
raw_bytes_buf.copy_to(&mut raw_bytes[..]);
let mut bytes_reader =
Cursor::new(raw_bytes.as_slice());
let Fits { hdu } = Fits::from_reader(&mut bytes_reader)
.map_err(|_| {
JsValue::from_str("Parsing fits error")
})?;
let header = hdu.get_header();
let bscale =
if let Some(fitsrs::card::Value::Float(bscale)) =
header.get(b"BSCALE ")
{
*bscale as f32
} else {
1.0
};
let bzero =
if let Some(fitsrs::card::Value::Float(bzero)) =
header.get(b"BZERO ")
{
*bzero as f32
} else {
0.0
};
let blank =
if let Some(fitsrs::card::Value::Float(blank)) =
header.get(b"BLANK ")
{
*blank as f32
} else {
std::f32::NAN
};
cfg.set_fits_metadata(bscale, bzero, blank);
}
}
_ => (),
};
//}
let image = tile.image.clone();
if let Some(img) = &*image.borrow() {
/*if tile_copied {
self.downloader
.borrow_mut()
.delay(Resource::Tile(tile));
continue;
}*/
self.request_redraw = true;
//tile_copied = true;
match hips {
HiPS::D2(hips) => {
hips.add_tile(&tile.cell, img, tile.time_req)?
match &*image.borrow() {
Some(img) => {
if tile_copied {
self.downloader
.borrow_mut()
.delay(Resource::Tile(tile));
continue;
}
HiPS::D3(hips) => hips.add_tile(
&tile.cell,
img,
tile.time_req,
tile.channel.unwrap() as u16,
)?,
self.request_redraw = true;
tile_copied = true;
match hips {
HiPS::D2(hips) => {
hips.add_tile(&tile.cell, img, tile.time_req)?
}
HiPS::D3(hips) => hips.add_tile(
&tile.cell,
img,
tile.time_req,
tile.channel.unwrap() as u16,
)?,
}
self.time_start_blending = Time::now();
}
self.time_start_blending = Time::now();
_ => (),
};
}
}
@@ -664,9 +729,12 @@ impl App {
if is_missing {
// The allsky image is missing so we donwload all the tiles contained into
// the 0's cell
for base_hpx_cell in crate::healpix::cell::ALLSKY_HPX_CELLS_D0 {
let query = hips.get_tile_query(base_hpx_cell);
self.tile_fetcher.append_base_tile(query);
let cfg = hips.get_config();
for texture_cell in crate::healpix::cell::ALLSKY_HPX_CELLS_D0 {
for cell in texture_cell.get_tile_cells(cfg.delta_depth()) {
let query = hips.get_tile_query(&cell);
self.tile_fetcher.append_base_tile(query);
}
}
} else {
// tell the hips to not download tiles which order is <= 3 because the allsky
@@ -677,6 +745,17 @@ impl App {
}
}
}
Resource::PixelMetadata(metadata) => {
if let Some(hips) = self.layers.get_mut_hips_from_cdid(&metadata.hips_cdid) {
let cfg = hips.get_config_mut();
if let Some(metadata) = &*metadata.value.borrow() {
cfg.blank = metadata.blank;
cfg.offset = metadata.offset;
cfg.scale = metadata.scale;
}
}
}
Resource::Moc(moc) => {
let moc_hips_cdid = moc.get_hips_cdid();
//let url = &moc_url[..moc_url.find("/Moc.fits").unwrap_abort()];
@@ -710,32 +789,20 @@ impl App {
Ok(has_camera_moved)
}
pub(crate) fn read_pixel(&self, x: f64, y: f64, layer: &str) -> Result<JsValue, JsValue> {
if let Some(hips) = self.layers.get_hips_from_layer(layer) {
hips.read_pixel(x, y, &self.camera, &self.projection)
} else if let Some(_image) = self.layers.get_image_from_layer(layer) {
// FIXME handle the case of an image
Ok(JsValue::null())
pub(crate) fn read_pixel(&self, pos: &Vector2<f64>, layer: &str) -> Result<JsValue, JsValue> {
if let Some(lonlat) = self.screen_to_world(pos) {
if let Some(hips) = self.layers.get_hips_from_layer(layer) {
hips.read_pixel(&lonlat, &self.camera)
} else if let Some(_image) = self.layers.get_image_from_layer(layer) {
Err(JsValue::from_str("TODO: read pixel value"))
} else {
Err(JsValue::from_str("Survey not found"))
}
} else {
Err(JsValue::from_str("Survey not found"))
Err(JsValue::from_str(&"position is out of projection"))
}
}
pub(crate) fn read_line_of_pixels(
&self,
x1: f64,
y1: f64,
x2: f64,
y2: f64,
layer: &str,
) -> Result<Vec<JsValue>, JsValue> {
let pixels = crate::math::utils::bresenham(x1, y1, x2, y2)
.map(|(x, y)| self.read_pixel(x, y, layer))
.collect::<Result<Vec<_>, _>>()?;
Ok(pixels)
}
pub(crate) fn draw_grid_labels(&mut self) -> Result<(), JsValue> {
self.grid.draw_labels()
}
@@ -828,7 +895,6 @@ impl App {
// Render the scene
// Clear all the screen first (only the region set by the scissor)
gl.clear(WebGl2RenderingContext::COLOR_BUFFER_BIT);
// set the blending options
layers.draw(camera, shaders, colormaps, projection)?;
@@ -844,13 +910,14 @@ impl App {
);*/
moc.draw(camera, projection, shaders)?;
/*gl.blend_func_separate(
gl.blend_func_separate(
WebGl2RenderingContext::SRC_ALPHA,
WebGl2RenderingContext::ONE,
WebGl2RenderingContext::ONE,
WebGl2RenderingContext::ONE,
);*/
);
grid.draw(camera, projection, shaders)?;
// Ok(())
// },
// None,
@@ -876,7 +943,7 @@ impl App {
}
pub(crate) fn rename_layer(&mut self, layer: &str, new_layer: &str) -> Result<(), JsValue> {
self.layers.rename_layer(layer, new_layer)
self.layers.rename_layer(&layer, &new_layer)
}
pub(crate) fn swap_layers(
@@ -920,122 +987,263 @@ impl App {
Ok(())
}
pub(crate) fn add_rgba_image(
pub(crate) fn add_image_from_blob_and_wcs(
&mut self,
layer: String,
bytes: &[u8],
stream: web_sys::ReadableStream,
wcs: WCS,
cfg: ImageMetadata,
) -> Result<js_sys::Promise, JsValue> {
let gl = self.gl.clone();
let img_sender = self.img_send.clone();
let ack_img_recv = self.ack_img_recv.clone();
// Stop the current inertia
self.inertia = None;
// And disable it while the fits has not been loaded
let disable_inertia = self.disable_inertia.clone();
*(disable_inertia.borrow_mut()) = true;
let camera_coo_sys = self.camera.get_coo_system();
match Image::from_rgba_bytes(&gl, bytes, wcs, camera_coo_sys) {
Ok(image) => {
let layer = ImageLayer {
images: vec![image],
id: layer.clone(),
layer,
meta: cfg,
};
let fut = async move {
use crate::renderable::image::Image;
use futures::future::Either;
use futures::TryStreamExt;
use js_sys::Uint8Array;
use wasm_streams::ReadableStream;
let params = layer.get_params();
let body = ReadableStream::from_raw(stream.dyn_into()?);
self.layers.add_image(
layer,
&mut self.camera,
&self.projection,
&mut self.tile_fetcher,
)?;
// Convert the JS ReadableStream to a Rust stream
let bytes_reader = match body.try_into_async_read() {
Ok(async_read) => Either::Left(async_read),
Err((_err, body)) => Either::Right(
body.into_stream()
.map_ok(|js_value| {
js_value.dyn_into::<Uint8Array>().unwrap_throw().to_vec()
})
.map_err(|_js_error| {
std::io::Error::new(std::io::ErrorKind::Other, "failed to read")
})
.into_async_read(),
),
};
use al_core::image::format::RGBA8U;
match Image::from_reader_and_wcs::<_, RGBA8U>(
&gl,
bytes_reader,
wcs,
None,
None,
None,
camera_coo_sys,
)
.await
{
Ok(image) => {
let img = ImageLayer {
images: vec![image],
id: layer.clone(),
layer,
meta: cfg,
};
self.request_redraw = true;
img_sender.send(img).await.unwrap();
let promise = js_sys::Promise::resolve(&serde_wasm_bindgen::to_value(&params)?);
Ok(promise)
// Wait for the ack here
let image_params = ack_img_recv
.recv()
.await
.map_err(|_| JsValue::from_str("Problem receiving fits"))?;
serde_wasm_bindgen::to_value(&image_params).map_err(|e| e.into())
}
Err(error) => Err(error),
}
Err(error) => Err(error),
}
};
let reenable_inertia = Closure::new(move || {
// renable inertia again
*(disable_inertia.borrow_mut()) = false;
});
let promise = wasm_bindgen_futures::future_to_promise(fut)
// Reenable inertia independantly from whether the
// fits has been correctly parsed or not
.finally(&reenable_inertia);
// forget the closure, it is not very proper to do this as
// it won't be deallocated
reenable_inertia.forget();
Ok(promise)
}
pub(crate) fn add_fits_image(
pub(crate) fn add_image_fits(
&mut self,
bytes: &[u8],
stream: web_sys::ReadableStream,
meta: ImageMetadata,
layer: String,
) -> Result<js_sys::Promise, JsValue> {
let gl = self.gl.clone();
let fits_sender = self.img_send.clone();
let ack_fits_recv = self.ack_img_recv.clone();
// Stop the current inertia
self.inertia = None;
// And disable it while the fits has not been loaded
let disable_inertia = self.disable_inertia.clone();
*(disable_inertia.borrow_mut()) = true;
let camera_coo_sys = self.camera.get_coo_system();
// FIXME: this is done to prevent the view inerting after being unblocked
let fut = async move {
use crate::renderable::image::Image;
use futures::future::Either;
use futures::TryStreamExt;
use js_sys::Uint8Array;
use wasm_streams::ReadableStream;
let gz = fitsrs::gz::GzReader::new(Cursor::new(bytes))
.map_err(|_| JsValue::from_str("Error creating gz wrapper"))?;
// Get the response's body as a JS ReadableStream
let body = ReadableStream::from_raw(stream.dyn_into()?);
let parse_fits_images_from_bytes = |raw_bytes: &[u8]| -> Result<Vec<Image>, JsValue> {
Ok(FitsImage::from_raw_bytes(raw_bytes)?
.into_iter()
.filter_map(
|FitsImage {
bitpix,
bscale,
bzero,
blank,
wcs,
raw_bytes,
..
}| {
if let Some(wcs) = wcs {
let image = Image::from_fits_hdu(
&gl,
wcs,
bitpix,
raw_bytes,
bscale,
bzero,
blank,
camera_coo_sys,
)
.ok()?;
Some(image)
} else {
None
}
},
)
.collect::<Vec<_>>())
};
let images = match gz {
fitsrs::gz::GzReader::GzReader(bytes) => parse_fits_images_from_bytes(bytes.get_ref())?,
fitsrs::gz::GzReader::Reader(bytes) => parse_fits_images_from_bytes(bytes.get_ref())?,
};
if images.is_empty() {
Err(JsValue::from_str("no images have been parsed"))
} else {
let layer = ImageLayer {
images,
id: layer.clone(),
layer,
meta,
// Convert the JS ReadableStream to a Rust stream
let bytes_reader = match body.try_into_async_read() {
Ok(async_read) => Either::Left(async_read),
Err((_err, body)) => Either::Right(
body.into_stream()
.map_ok(|js_value| {
js_value.dyn_into::<Uint8Array>().unwrap_throw().to_vec()
})
.map_err(|_js_error| {
std::io::Error::new(std::io::ErrorKind::Other, "failed to read")
})
.into_async_read(),
),
};
let params = layer.get_params();
self.layers.add_image(
layer,
&mut self.camera,
&self.projection,
&mut self.tile_fetcher,
)?;
self.request_redraw = true;
let mut reader = BufReader::new(bytes_reader);
let promise = js_sys::Promise::resolve(&serde_wasm_bindgen::to_value(&params)?);
Ok(promise)
}
let AsyncFits { mut hdu } = AsyncFits::from_reader(&mut reader)
.await
.map_err(|e| JsValue::from_str(&format!("Fits file parsing: reason: {}", e)))?;
let mut hdu_ext_idx = 0;
let mut images = vec![];
match Image::from_fits_hdu_async(&gl, &mut hdu.0, camera_coo_sys).await {
Ok(image) => {
images.push(image);
let mut hdu_ext = hdu.next().await;
// Continue parsing the file extensions here
while let Ok(Some(mut xhdu)) = hdu_ext {
match &mut xhdu {
AsyncXtensionHDU::Image(xhdu_img) => {
match Image::from_fits_hdu_async(&gl, xhdu_img, camera_coo_sys)
.await
{
Ok(image) => {
images.push(image);
}
Err(error) => {
al_core::log::console_warn(&
format!("The extension {hdu_ext_idx} has not been parsed, reason:")
);
al_core::log::console_warn(error);
}
}
}
_ => {
al_core::log::console_warn(&
format!("The extension {hdu_ext_idx} is a BinTable/AsciiTable and is thus discarded")
);
}
}
hdu_ext_idx += 1;
hdu_ext = xhdu.next().await;
}
}
Err(error) => {
al_core::log::console_warn(error);
let mut hdu_ext = hdu.next().await;
while let Ok(Some(mut xhdu)) = hdu_ext {
match &mut xhdu {
AsyncXtensionHDU::Image(xhdu_img) => {
match Image::from_fits_hdu_async(&gl, xhdu_img, camera_coo_sys)
.await
{
Ok(image) => {
images.push(image);
}
Err(error) => {
al_core::log::console_warn(&
format!("The extension {hdu_ext_idx} has not been parsed, reason:")
);
al_core::log::console_warn(error);
}
}
}
_ => {
al_core::log::console_warn(&
format!("The extension {hdu_ext_idx} is a BinTable/AsciiTable and is thus discarded")
);
}
}
hdu_ext_idx += 1;
hdu_ext = xhdu.next().await;
}
}
}
if images.is_empty() {
Err(JsValue::from_str("no images have been parsed"))
} else {
let fits = ImageLayer {
images,
id: layer.clone(),
layer,
meta,
};
fits_sender.send(fits).await.unwrap();
// Wait for the ack here
let image_params = ack_fits_recv
.recv()
.await
.map_err(|_| JsValue::from_str("Problem receiving fits"))?;
serde_wasm_bindgen::to_value(&image_params).map_err(|e| e.into())
}
};
let reenable_inertia = Closure::new(move || {
// renable inertia again
*(disable_inertia.borrow_mut()) = false;
});
let promise = wasm_bindgen_futures::future_to_promise(fut)
// Reenable inertia independantly from whether the
// fits has been correctly parsed or not
.finally(&reenable_inertia);
// forget the closure, it is not very proper to do this as
// it won't be deallocated
reenable_inertia.forget();
Ok(promise)
}
pub(crate) fn get_layer_cfg(&self, layer: &str) -> Result<ImageMetadata, JsValue> {
@@ -1045,7 +1253,7 @@ impl App {
pub(crate) fn set_hips_slice_number(&mut self, layer: &str, slice: u32) -> Result<(), JsValue> {
let hips = self
.layers
.get_mut_hips_from_layer(layer)
.get_mut_hips_from_layer(&layer)
.ok_or_else(|| JsValue::from_str("Layer not found"))?;
self.request_for_new_tiles = true;
@@ -1116,8 +1324,7 @@ impl App {
}
pub(crate) fn set_longitude_reversed(&mut self, longitude_reversed: bool) {
self.camera
.set_longitude_reversed(longitude_reversed, &self.projection);
self.camera.set_longitude_reversed(longitude_reversed, &self.projection);
}
pub(crate) fn add_catalog(&mut self, _name: String, table: JsValue, _colormap: String) {
@@ -1318,6 +1525,7 @@ impl App {
return;
}
let now = Time::now();
let dragging_duration = (now - self.time_start_dragging).as_secs();
let dragging_vel = self.dist_dragging / dragging_duration;
@@ -1338,7 +1546,7 @@ impl App {
let axis = self.prev_cam_position.cross(*center).normalize();
//let delta_time = ((now - time_of_last_move).0 as f64).max(1.0);
let delta_angle = math::vector::angle3(&self.prev_cam_position, center).to_radians();
let delta_angle = math::vector::angle3(&self.prev_cam_position, &center).to_radians();
let ampl = delta_angle * (dragging_vel as f64) * 5e-3;
//let ampl = (dragging_vel * 0.01) as f64;
@@ -1391,7 +1599,7 @@ impl App {
let prev_pos = w1;
let cur_pos = w2;
if prev_pos != cur_pos {
let prev_cam_position = *self.camera.get_center();
let prev_cam_position = self.camera.get_center().clone();
if self.north_up {
let lonlat1 = prev_pos.lonlat();
@@ -1399,24 +1607,15 @@ impl App {
let dlon = lonlat2.lon() - lonlat1.lon();
let dlat = lonlat2.lat() - lonlat1.lat();
self.camera
.apply_lonlat_rotation(dlon, dlat, &self.projection);
self.camera.apply_lonlat_rotation(dlon, dlat, &self.projection);
// Detect if a pole has been crossed
let north_pole = Vector3::new(0.0, 1.0, 0.0);
let south_pole = Vector3::new(0.0, -1.0, 0.0);
let cross_north_pole = crate::math::lonlat::is_in(
&prev_cam_position,
self.camera.get_center(),
&north_pole,
);
let cross_south_pole = crate::math::lonlat::is_in(
&prev_cam_position,
self.camera.get_center(),
&south_pole,
);
let cross_north_pole = crate::math::lonlat::is_in(&prev_cam_position, &self.camera.get_center(), &north_pole);
let cross_south_pole = crate::math::lonlat::is_in(&prev_cam_position, &self.camera.get_center(), &south_pole);
let cross_pole = cross_north_pole | cross_south_pole;
@@ -1428,7 +1627,7 @@ impl App {
};
let fov = self.camera.get_aperture();
let pole = if center.y >= 0.0 {
north_pole
} else {
@@ -1441,12 +1640,10 @@ impl App {
// too near to the pole
let axis = center.cross(pole).normalize();
use crate::math::rotation::Rotation;
let new_center = Rotation::from_axis_angle(&axis, (-5e-3 * fov).to_angle())
.rotate(&pole);
let new_center = Rotation::from_axis_angle(&axis, (-5e-3 * fov).to_angle()).rotate(&pole);
self.camera.set_center_xyz(&new_center, &self.projection);
self.camera
.set_position_angle(0.0.to_angle(), &self.projection);
self.camera.set_position_angle(0.0.to_angle(), &self.projection);
}
} else {
/* 1. Rotate by computing the angle between the last and current position */
@@ -1454,8 +1651,7 @@ impl App {
let d = math::vector::angle3(&prev_pos, &cur_pos);
let axis = prev_pos.cross(cur_pos).normalize();
self.camera
.apply_axis_rotation(&(-axis), d, &self.projection);
self.camera.apply_axis_rotation(&(-axis), d, &self.projection);
}
self.prev_cam_position = prev_cam_position;
@@ -1480,7 +1676,7 @@ impl App {
}
pub(crate) fn get_norder(&self) -> i32 {
self.camera.get_tile_depth() as i32
self.camera.get_texture_depth() as i32
}
pub(crate) fn get_zoom_factor(&self) -> f64 {

View File

@@ -79,14 +79,16 @@ impl FieldOfView {
) -> Self {
let mut x_ndc = linspace(-1., 1., NUM_VERTICES_WIDTH + 2);
x_ndc.extend(iter::repeat_n(1.0, NUM_VERTICES_HEIGHT));
x_ndc.extend(iter::repeat(1.0).take(NUM_VERTICES_HEIGHT));
x_ndc.extend(linspace(1., -1., NUM_VERTICES_WIDTH + 2));
x_ndc.extend(iter::repeat_n(-1.0, NUM_VERTICES_HEIGHT));
x_ndc.extend(iter::repeat(-1.0).take(NUM_VERTICES_HEIGHT));
let mut y_ndc = iter::repeat_n(-1.0, NUM_VERTICES_WIDTH + 1).collect::<Vec<_>>();
let mut y_ndc = iter::repeat(-1.0)
.take(NUM_VERTICES_WIDTH + 1)
.collect::<Vec<_>>();
y_ndc.extend(linspace(-1., 1., NUM_VERTICES_HEIGHT + 2));
y_ndc.extend(iter::repeat_n(1.0, NUM_VERTICES_WIDTH));
y_ndc.extend(iter::repeat(1.0).take(NUM_VERTICES_WIDTH));
y_ndc.extend(linspace(1., -1., NUM_VERTICES_HEIGHT + 2));
y_ndc.pop();

View File

@@ -50,8 +50,9 @@ pub fn build_fov_coverage(
let inside_vertex = crate::coosys::apply_coo_system(camera_frame, frame, camera_center);
// Prefer to query from_polygon with depth >= 2
let moc = HEALPixCoverage::from_3d_coos(depth, vertices_iter, &inside_vertex);
HEALPixCoverage::from_3d_coos(depth, vertices_iter, &inside_vertex)
moc
}
} else {
let center_xyz = crate::coosys::apply_coo_system(camera_frame, frame, camera_center);

View File

@@ -8,14 +8,18 @@ pub enum UserAction {
use web_sys::WebGl2RenderingContext;
// Longitude reversed identity matrix
const ID_R: &Matrix3<f64> = &Matrix3::new(-1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0);
const ID_R: &Matrix3<f64> = &Matrix3::new(
-1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
);
use cgmath::{Vector3, InnerSpace};
use super::{fov::FieldOfView, view_hpx_cells::ViewHpxCells};
use crate::healpix::cell::HEALPixCell;
use crate::healpix::coverage::HEALPixCoverage;
use crate::math::angle::ToAngle;
use crate::math::{projection::coo_space::XYZModel, projection::domain::sdf::ProjDef};
use cgmath::{InnerSpace, Vector3};
use cgmath::{Matrix3, Vector2};
const APERTURE_LOWER_LIMIT_RAD: f64 = (1.0_f64 / 36000.0).to_radians();
@@ -281,8 +285,8 @@ impl CameraViewPort {
}
pub fn set_screen_size(&mut self, width: f32, height: f32, projection: &ProjectionType) {
self.width = width * self.dpi;
self.height = height * self.dpi;
self.width = (width as f32) * self.dpi;
self.height = (height as f32) * self.dpi;
self.aspect = width / height;
// Compute the new clip zoom factor
@@ -374,10 +378,10 @@ impl CameraViewPort {
}
}
let can_unzoom_more = !matches!(
proj,
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_)
);
let can_unzoom_more = match proj {
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_) => false,
_ => true,
};
if !can_unzoom_more && self.zoom_factor >= 1.0 {
return true;
@@ -410,10 +414,10 @@ impl CameraViewPort {
self.last_user_action
};
let can_unzoom_more = !matches!(
proj,
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_)
);
let can_unzoom_more = match proj {
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_) => false,
_ => true,
};
let aperture_start: f64 = proj.aperture_start().to_radians();
@@ -487,10 +491,10 @@ impl CameraViewPort {
self.last_user_action
};
let can_unzoom_more = !matches!(
proj,
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_)
);
let can_unzoom_more = match proj {
ProjectionType::Tan(_) | ProjectionType::Mer(_) | ProjectionType::Stg(_) => false,
_ => true,
};
// Set the zoom factor
self.zoom_factor = zoom_factor;
@@ -594,7 +598,7 @@ impl CameraViewPort {
let w_screen_px = self.width as f64;
let smallest_cell_size_px = self.dpi as f64;
let mut depth_pixel = 29_usize;
let mut depth_pixel = 29 as usize;
let hpx_cell_size_rad = (smallest_cell_size_px / w_screen_px) * self.get_aperture();
@@ -603,7 +607,7 @@ impl CameraViewPort {
break;
}
depth_pixel -= 1;
depth_pixel = depth_pixel - 1;
}
depth_pixel += 1;
const DEPTH_OFFSET_TEXTURE: usize = 9;
@@ -614,7 +618,7 @@ impl CameraViewPort {
};
}
pub fn get_tile_depth(&self) -> u8 {
pub fn get_texture_depth(&self) -> u8 {
self.texture_depth
}
@@ -635,13 +639,10 @@ impl CameraViewPort {
&mut self,
dlon: Angle<f64>,
dlat: Angle<f64>,
proj: &ProjectionType,
proj: &ProjectionType
) {
let center = self.get_center();
let rot =
Rotation::from_axis_angle(&Vector3::new(center.z, 0.0, -center.x).normalize(), dlat)
* Rotation::from_axis_angle(&Vector3::unit_y(), -dlon)
* Rotation::from_sky_position(center);
let rot = Rotation::from_axis_angle(&Vector3::new(center.z, 0.0, -center.x).normalize(), dlat) * Rotation::from_axis_angle(&Vector3::unit_y(), -dlon) * Rotation::from_sky_position(&center);
self.set_rotation(&rot, proj);
}

View File

@@ -1,5 +1,5 @@
use al_api::coo_system::CooSystem;
use cgmath::Vector3;
use al_api::coo_system::CooSystem;
/// This is conversion method returning a transformation
/// matrix when the system requested by the user is not

View File

@@ -18,12 +18,6 @@ use crate::fifo_cache::Cache;
use query::Query;
use request::{RequestType, Resource};
impl Default for Downloader {
fn default() -> Self {
Self::new()
}
}
impl Downloader {
pub fn new() -> Downloader {
let requests = Vec::with_capacity(32);

View File

@@ -11,7 +11,7 @@ pub type QueryId = String;
use al_core::image::format::ImageFormatType;
#[derive(Eq, PartialEq, Clone)]
#[derive(Eq, Hash, PartialEq, Clone)]
pub struct Tile {
pub cell: HEALPixCell,
pub format: ImageFormatType,
@@ -19,9 +19,6 @@ pub struct Tile {
pub hips_cdid: CreatorDid,
// The total url of the query
pub url: Url,
pub size: u32, // size of the tile requested
pub credentials: RequestCredentials,
pub mode: RequestMode,
pub id: QueryId,
pub channel: Option<u32>,
}
@@ -29,14 +26,11 @@ pub struct Tile {
use crate::healpix::cell::HEALPixCell;
use crate::renderable::hips::config::HiPSConfig;
use crate::renderable::CreatorDid;
use web_sys::{RequestCredentials, RequestMode};
impl Tile {
pub fn new(cell: &HEALPixCell, channel: Option<u32>, cfg: &HiPSConfig) -> Self {
let hips_cdid = cfg.get_creator_did();
let hips_url = cfg.get_root_url();
let format = cfg.get_format();
let credentials = cfg.get_request_credentials();
let mode = cfg.get_request_mode();
let ext = format.get_ext_file();
@@ -44,17 +38,17 @@ impl Tile {
let dir_idx = (idx / 10000) * 10000;
let mut url = format!("{hips_url}/Norder{depth}/Dir{dir_idx}/Npix{idx}");
let mut url = format!("{}/Norder{}/Dir{}/Npix{}", hips_url, depth, dir_idx, idx);
// handle cube case
if let Some(channel) = channel {
if channel > 0 {
url.push_str(&format!("_{channel:?}"));
url.push_str(&format!("_{:?}", channel));
}
}
// add the tile format
url.push_str(&format!(".{ext}"));
url.push_str(&format!(".{}", ext));
let id = format!(
"{}{}{}{}{}",
@@ -65,17 +59,13 @@ impl Tile {
ext
);
let size = cfg.get_tile_size();
Tile {
hips_cdid: hips_cdid.to_string(),
url,
cell: *cell,
format,
credentials,
mode,
id,
channel,
size: size as u32,
}
}
}
@@ -93,40 +83,34 @@ impl Query for Tile {
pub struct Allsky {
pub format: ImageFormatType,
pub tile_size: i32,
pub allsky_tile_size: i32,
pub texture_size: i32,
pub channel: Option<u32>,
// The root url of the HiPS
pub hips_cdid: CreatorDid,
// The total url of the query
pub url: Url,
pub credentials: RequestCredentials,
pub mode: RequestMode,
pub id: QueryId,
}
impl Allsky {
pub fn new(cfg: &HiPSConfig, channel: Option<u32>) -> Self {
let hips_cdid = cfg.get_creator_did().to_string();
let allsky_tile_size = cfg.allsky_tile_size();
let tile_size = cfg.get_tile_size();
let texture_size = cfg.get_texture_size();
let format = cfg.get_format();
let ext = format.get_ext_file();
let credentials = cfg.get_request_credentials();
let mode = cfg.get_request_mode();
let mut url = format!("{}/Norder3/Allsky", cfg.get_root_url());
// handle cube case
if let Some(channel) = channel {
if channel > 0 {
url.push_str(&format!("_{channel:?}"));
url.push_str(&format!("_{:?}", channel));
}
}
// add the tile format
url.push_str(&format!(".{ext}"));
url.push_str(&format!(".{}", ext));
let id = format!(
"{}Allsky{}{}",
@@ -137,13 +121,11 @@ impl Allsky {
Allsky {
tile_size,
allsky_tile_size,
texture_size,
hips_cdid,
url,
format,
id,
credentials,
mode,
channel,
}
}
@@ -159,30 +141,55 @@ impl Query for Allsky {
}
/* ---------------------------------- */
use al_api::moc::MOCOptions;
pub struct PixelMetadata {
pub format: ImageFormatType,
// The root url of the HiPS
pub hips_cdid: CreatorDid,
// The total url of the query
pub url: Url,
pub id: QueryId,
}
impl PixelMetadata {
pub fn new(cfg: &HiPSConfig) -> Self {
let hips_cdid = cfg.get_creator_did().to_string();
let format = cfg.get_format();
let ext = format.get_ext_file();
let url = format!("{}/Norder3/Allsky.{}", cfg.get_root_url(), ext);
let id = format!("{}Allsky{}", hips_cdid, ext);
PixelMetadata {
hips_cdid,
url,
format,
id,
}
}
}
use super::request::blank::PixelMetadataRequest;
impl Query for PixelMetadata {
type Request = PixelMetadataRequest;
fn id(&self) -> &QueryId {
&self.id
}
}
use al_api::moc::MOCOptions;
/* ---------------------------------- */
pub struct Moc {
// The total url of the query
pub url: Url,
pub mode: RequestMode,
pub credentials: RequestCredentials,
pub params: MOCOptions,
pub hips_cdid: CreatorDid,
}
impl Moc {
pub fn new(
url: String,
mode: RequestMode,
credentials: RequestCredentials,
hips_cdid: CreatorDid,
params: MOCOptions,
) -> Self {
pub fn new(url: String, hips_cdid: CreatorDid, params: MOCOptions) -> Self {
Moc {
url,
params,
hips_cdid,
mode,
credentials,
}
}
}

View File

@@ -1,16 +1,18 @@
use std::io::Cursor;
use crate::downloader::query;
use crate::renderable::CreatorDid;
use al_core::image::fits::FitsImage;
use al_core::image::format::ChannelType;
use al_core::image::ImageType;
use al_core::texture::format::PixelType;
use fitsrs::hdu::header::Bitpix;
use fitsrs::{fits::Fits, hdu::data::InMemData};
use super::{Request, RequestType};
use crate::downloader::QueryId;
pub struct AllskyRequest {
pub hips_cdid: CreatorDid,
pub url: Url,
//pub depth_tile: u8,
pub depth_tile: u8,
pub id: QueryId,
pub channel: Option<u32>,
@@ -26,17 +28,26 @@ impl From<AllskyRequest> for RequestType {
use super::Url;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestCredentials, RequestInit, Response};
use web_sys::{RequestInit, RequestMode, Response};
use al_core::{image::raw::ImageBuffer, texture::pixel::Pixel};
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
async fn query_allsky(
url: &str,
credentials: RequestCredentials,
) -> Result<ImageBuffer<RGBA8U>, JsValue> {
let image = super::query_html_image(url, credentials).await?;
async fn query_image(url: &str) -> Result<ImageBuffer<RGBA8U>, JsValue> {
let image = web_sys::HtmlImageElement::new().unwrap_abort();
let image_cloned = image.clone();
let html_img_elt_promise = js_sys::Promise::new(
&mut (Box::new(move |resolve, reject| {
image_cloned.set_cross_origin(Some(""));
image_cloned.set_onload(Some(&resolve));
image_cloned.set_onerror(Some(&reject));
image_cloned.set_src(&url);
}) as Box<dyn FnMut(js_sys::Function, js_sys::Function)>),
);
let _ = JsFuture::from(html_img_elt_promise).await?;
// The image has been received here
let document = web_sys::window().unwrap_abort().document().unwrap_abort();
@@ -68,54 +79,60 @@ impl From<query::Allsky> for AllskyRequest {
tile_size,
url,
hips_cdid,
allsky_tile_size,
texture_size,
id,
credentials,
mode,
channel: slice,
} = query;
//let depth_tile = crate::math::utils::log_2_unchecked(texture_size / tile_size) as u8;
let channel = format.get_pixel_format();
let depth_tile = crate::math::utils::log_2_unchecked(texture_size / tile_size) as u8;
let channel = format.get_channel();
let url_clone = url.clone();
let request = Request::new(async move {
match channel {
PixelType::RGB8U => {
let allsky = query_allsky(&url_clone, credentials).await?;
ChannelType::RGB8U => {
let allsky_tile_size = std::cmp::min(tile_size, 64);
let allsky = query_image(&url_clone).await?;
let allsky_tiles =
handle_allsky_file::<RGBA8U>(allsky, allsky_tile_size, tile_size)?
.map(|image| {
let ImageBuffer { data, size } = image;
let data = data
.into_iter()
.enumerate()
.filter(|&(i, _)| i % 4 != 3)
.map(|(_, v)| v)
.collect();
let image = ImageBuffer::new(data, size.x, size.y);
ImageType::RawRgb8u { image }
})
let allsky_tiles = handle_allsky_file::<RGBA8U>(
allsky,
allsky_tile_size,
texture_size,
tile_size,
)?
.into_iter()
.map(|image| {
let ImageBuffer { data, size } = image;
let data = data
.into_iter()
.enumerate()
.filter(|&(i, _)| i % 4 != 3)
.map(|(_, v)| v)
.collect();
let image = ImageBuffer::new(data, size.x, size.y);
ImageType::RawRgb8u { image }
})
.collect();
Ok(allsky_tiles)
}
PixelType::RGBA8U => {
let allsky = query_allsky(&url_clone, credentials).await?;
ChannelType::RGBA8U => {
let allsky_tile_size = std::cmp::min(tile_size, 64);
let allsky = query_image(&url_clone).await?;
let allsky_tiles = handle_allsky_file(allsky, allsky_tile_size, tile_size)?
.map(|image| ImageType::RawRgba8u { image })
.collect();
let allsky_tiles =
handle_allsky_file(allsky, allsky_tile_size, texture_size, tile_size)?
.into_iter()
.map(|image| ImageType::RawRgba8u { image })
.collect();
Ok(allsky_tiles)
}
_ => {
let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(mode);
opts.credentials(credentials);
opts.mode(RequestMode::Cors);
let window = web_sys::window().unwrap_abort();
let request = web_sys::Request::new_with_str_and_init(&url_clone, &opts)?;
@@ -130,66 +147,64 @@ impl From<query::Allsky> for AllskyRequest {
// Convert the JS ReadableStream to a Rust stream
let mut reader = body.try_into_async_read().map_err(|_| JsValue::from_str("readable stream locked"))?;*/
let buf = JsFuture::from(resp.array_buffer()?).await?;
let raw_bytes = js_sys::Uint8Array::new(&buf).to_vec();
let array_buffer = JsFuture::from(resp.array_buffer()?).await?;
let bytes_buffer = js_sys::Uint8Array::new(&array_buffer);
let FitsImage {
raw_bytes, bitpix, ..
} = FitsImage::from_raw_bytes(raw_bytes.as_slice())?[0];
match bitpix {
Bitpix::U8 => {
Ok(handle_allsky_fits(raw_bytes, tile_size, allsky_tile_size)?
let num_bytes = bytes_buffer.length() as usize;
let mut raw_bytes = Vec::with_capacity(num_bytes);
unsafe {
raw_bytes.set_len(num_bytes);
}
bytes_buffer.copy_to(&mut raw_bytes[..]);
let mut reader = Cursor::new(&raw_bytes[..]);
let Fits { hdu } = Fits::from_reader(&mut reader)
.map_err(|_| JsValue::from_str("Parsing fits error of allsky"))?;
let data = hdu.get_data();
match data {
InMemData::U8(data) => {
Ok(handle_allsky_fits(&data, tile_size, texture_size)?
.map(|image| ImageType::RawR8ui { image })
.collect())
}
Bitpix::I16 => {
Ok(handle_allsky_fits(raw_bytes, tile_size, allsky_tile_size)?
InMemData::I16(data) => {
Ok(handle_allsky_fits(&data, tile_size, texture_size)?
.map(|image| ImageType::RawR16i { image })
.collect())
}
Bitpix::I32 => {
Ok(handle_allsky_fits(raw_bytes, tile_size, allsky_tile_size)?
InMemData::I32(data) => {
Ok(handle_allsky_fits(&data, tile_size, texture_size)?
.map(|image| ImageType::RawR32i { image })
.collect())
}
Bitpix::I64 => {
let data = unsafe {
std::slice::from_raw_parts(
raw_bytes.as_ptr() as *const i64,
raw_bytes.len() / 8,
)
};
InMemData::I64(data) => {
let data = data.iter().map(|v| *v as i32).collect::<Vec<_>>();
let raw_bytes = unsafe {
Ok(handle_allsky_fits(&data, tile_size, texture_size)?
.map(|image| ImageType::RawR32i { image })
.collect())
}
InMemData::F32(data) => {
let data = unsafe {
std::slice::from_raw_parts(
data.as_ptr() as *const u8,
data.len() * 4,
)
};
Ok(handle_allsky_fits(raw_bytes, tile_size, allsky_tile_size)?
.map(|image| ImageType::RawR32i { image })
.collect())
}
Bitpix::F32 => {
Ok(handle_allsky_fits(raw_bytes, tile_size, allsky_tile_size)?
Ok(handle_allsky_fits(&data, tile_size, texture_size)?
.map(|image| ImageType::RawRgba8u { image })
.collect())
}
Bitpix::F64 => {
let data = unsafe {
std::slice::from_raw_parts(
raw_bytes.as_ptr() as *const f64,
raw_bytes.len() / 8,
)
};
InMemData::F64(data) => {
let data = data.iter().map(|v| *v as f32).collect::<Vec<_>>();
let raw_bytes = unsafe {
let data = unsafe {
std::slice::from_raw_parts(
data.as_ptr() as *const u8,
data.len() * 4,
)
};
Ok(handle_allsky_fits(raw_bytes, tile_size, allsky_tile_size)?
Ok(handle_allsky_fits(&data, tile_size, texture_size)?
.map(|image| ImageType::RawRgba8u { image })
.collect())
}
@@ -201,7 +216,7 @@ impl From<query::Allsky> for AllskyRequest {
Self {
id,
hips_cdid,
//depth_tile,
depth_tile,
url,
request,
channel: slice,
@@ -209,40 +224,44 @@ impl From<query::Allsky> for AllskyRequest {
}
}
use al_core::image::format::ImageFormat;
use al_core::image::raw::ImageBufferView;
use al_core::texture::format::TextureFormat;
fn handle_allsky_file<F: TextureFormat>(
image: ImageBuffer<F>,
fn handle_allsky_file<F: ImageFormat>(
allsky: ImageBuffer<F>,
allsky_tile_size: i32,
texture_size: i32,
tile_size: i32,
) -> Result<impl Iterator<Item = ImageBuffer<F>>, JsValue> {
let d3_tile_allsky_size = std::cmp::min(tile_size, 64);
let num_tiles_per_texture = (texture_size / tile_size) * (texture_size / tile_size);
let num_tiles = num_tiles_per_texture * 12;
let num_allsky_tiles_per_tile = (tile_size / allsky_tile_size) * (tile_size / allsky_tile_size);
let mut src_idx = 0;
let tiles = (0..12).map(move |_| {
let tiles = (0..num_tiles).map(move |_| {
let mut base_tile =
ImageBuffer::<F>::allocate(&F::P::BLACK, allsky_tile_size, allsky_tile_size);
for idx_tile in 0..64 {
ImageBuffer::<F>::allocate(&<F as ImageFormat>::P::BLACK, tile_size, tile_size);
for idx_tile in 0..num_allsky_tiles_per_tile {
let (x, y) = crate::utils::unmortonize(idx_tile as u64);
let dx = x * (d3_tile_allsky_size as u32);
let dy = y * (d3_tile_allsky_size as u32);
let dx = x * (allsky_tile_size as u32);
let dy = y * (allsky_tile_size as u32);
let sx = (src_idx % 27) * d3_tile_allsky_size;
let sy = (src_idx / 27) * d3_tile_allsky_size;
let sx = (src_idx % 27) * allsky_tile_size;
let sy = (src_idx / 27) * allsky_tile_size;
let s = ImageBufferView {
x: sx,
y: sy,
w: d3_tile_allsky_size,
h: d3_tile_allsky_size,
x: sx as i32,
y: sy as i32,
w: allsky_tile_size as i32,
h: allsky_tile_size as i32,
};
let d = ImageBufferView {
x: dx as i32,
y: dy as i32,
w: d3_tile_allsky_size,
h: d3_tile_allsky_size,
w: allsky_tile_size as i32,
h: allsky_tile_size as i32,
};
base_tile.tex_sub(&image, &s, &d);
base_tile.tex_sub(&allsky, &s, &d);
src_idx += 1;
}
@@ -253,46 +272,48 @@ fn handle_allsky_file<F: TextureFormat>(
Ok(tiles)
}
fn handle_allsky_fits<F: TextureFormat>(
image: &[<F::P as Pixel>::Item],
fn handle_allsky_fits<F: ImageFormat>(
allsky_data: &[<<F as ImageFormat>::P as Pixel>::Item],
tile_size: i32,
allsky_tile_size: i32,
texture_size: i32,
) -> Result<impl Iterator<Item = ImageBuffer<F>>, JsValue> {
let d3_tile_allsky_size = std::cmp::min(tile_size, 64);
let width_allsky_px = 27 * d3_tile_allsky_size;
let height_allsky_px = 29 * d3_tile_allsky_size;
let allsky_tile_size = std::cmp::min(tile_size, 64);
let width_allsky_px = 27 * allsky_tile_size;
let height_allsky_px = 29 * allsky_tile_size;
// The fits image layout stores rows in reverse
let reversed_rows_data = image
let reversed_rows_data = allsky_data
.chunks(width_allsky_px as usize * F::NUM_CHANNELS)
.rev()
.flatten()
.copied()
.collect::<Vec<_>>();
let image = ImageBuffer::<F>::new(reversed_rows_data, width_allsky_px, height_allsky_px);
let allsky = ImageBuffer::<F>::new(reversed_rows_data, width_allsky_px, height_allsky_px);
let allsky_tiles_iter =
handle_allsky_file::<F>(image, allsky_tile_size, tile_size)?.map(move |image| {
// The GPU does a specific transformation on the UV for FITS tiles
// We must revert this to be compatible with this GPU transformation
let new_image_data = image
.get_data()
.chunks((allsky_tile_size * allsky_tile_size) as usize * F::NUM_CHANNELS)
.flat_map(|c| {
c.chunks(allsky_tile_size as usize * F::NUM_CHANNELS)
.rev()
.flatten()
})
.cloned()
.collect();
handle_allsky_file::<F>(allsky, allsky_tile_size, texture_size, tile_size)?.map(
move |image| {
// The GPU does a specific transformation on the UV for FITS tiles
// We must revert this to be compatible with this GPU transformation
let new_image_data = image
.get_data()
.chunks((tile_size * tile_size) as usize * F::NUM_CHANNELS)
.flat_map(|c| {
c.chunks(tile_size as usize * F::NUM_CHANNELS)
.rev()
.flatten()
})
.cloned()
.collect();
ImageBuffer::<F>::new(new_image_data, allsky_tile_size, allsky_tile_size)
});
ImageBuffer::<F>::new(new_image_data, tile_size, tile_size)
},
);
Ok(allsky_tiles_iter)
}
use al_core::texture::format::RGBA8U;
use al_core::image::format::RGBA8U;
use crate::time::Time;
use std::cell::RefCell;
@@ -300,7 +321,8 @@ use std::rc::Rc;
pub struct Allsky {
pub image: Rc<RefCell<Option<Vec<ImageType>>>>,
pub time_req: Time,
//pub depth_tile: u8,
pub depth_tile: u8,
pub hips_cdid: CreatorDid,
url: Url,
pub channel: Option<u32>,
@@ -327,7 +349,7 @@ impl<'a> From<&'a AllskyRequest> for Option<Allsky> {
let AllskyRequest {
request,
hips_cdid,
//depth_tile,
depth_tile,
url,
channel,
..
@@ -342,7 +364,7 @@ impl<'a> From<&'a AllskyRequest> for Option<Allsky> {
image: data.clone(),
hips_cdid: hips_cdid.clone(),
url: url.clone(),
//depth_tile: *depth_tile,
depth_tile: *depth_tile,
channel: *channel,
})
} else {

View File

@@ -0,0 +1,164 @@
use al_core::image::format::ChannelType;
use std::io::Cursor;
use crate::downloader::query;
use crate::renderable::CreatorDid;
use fitsrs::fits::Fits;
#[derive(Debug, Clone, Copy)]
pub struct Metadata {
pub blank: f32,
pub scale: f32,
pub offset: f32,
}
impl Default for Metadata {
fn default() -> Self {
Metadata {
blank: -1.0,
scale: 1.0,
offset: 0.0,
}
}
}
use super::{Request, RequestType};
use crate::downloader::QueryId;
pub struct PixelMetadataRequest {
pub id: QueryId,
pub url: Url,
pub hips_cdid: CreatorDid,
request: Request<Metadata>,
}
impl From<PixelMetadataRequest> for RequestType {
fn from(request: PixelMetadataRequest) -> Self {
RequestType::PixelMetadata(request)
}
}
use super::Url;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, RequestMode, Response};
impl From<query::PixelMetadata> for PixelMetadataRequest {
// Create a tile request associated to a HiPS
fn from(query: query::PixelMetadata) -> Self {
let query::PixelMetadata {
format,
url,
hips_cdid,
id,
} = query;
let url_clone = url.clone();
let channel = format.get_channel();
let window = web_sys::window().unwrap_abort();
let request = match channel {
ChannelType::R32F | ChannelType::R32I | ChannelType::R16I | ChannelType::R8UI => {
Request::new(async move {
let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(RequestMode::Cors);
let request =
web_sys::Request::new_with_str_and_init(&url_clone, &opts).unwrap_abort();
let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;
// `resp_value` is a `Response` object.
debug_assert!(resp_value.is_instance_of::<Response>());
let resp: Response = resp_value.dyn_into()?;
// See https://github.com/MattiasBuelens/wasm-streams/blob/f6dacf58a8826dc67923ab4a3bae87635690ca64/examples/fetch_as_stream.rs#L25-L33
/*let raw_body = resp.body().ok_or(JsValue::from_str("Cannot extract readable stream"))?;
let body = ReadableStream::from_raw(raw_body.dyn_into()?);
// Convert the JS ReadableStream to a Rust stream
let mut reader = body.try_into_async_read().map_err(|_| JsValue::from_str("readable stream locked"))?;
let image = Fits::new(reader).await?;*/
let array_buffer = JsFuture::from(resp.array_buffer()?).await?;
let bytes_buffer = js_sys::Uint8Array::new(&array_buffer);
let num_bytes = bytes_buffer.length() as usize;
let mut raw_bytes = Vec::with_capacity(num_bytes);
unsafe {
raw_bytes.set_len(num_bytes);
}
bytes_buffer.copy_to(&mut raw_bytes[..]);
let mut reader = Cursor::new(&raw_bytes[..]);
let Fits { hdu } = Fits::from_reader(&mut reader)
.map_err(|_| JsValue::from_str("Parsing fits error"))?;
let header = hdu.get_header();
let scale =
if let Some(fitsrs::card::Value::Float(bscale)) = header.get(b"BSCALE ") {
*bscale as f32
} else {
1.0
};
let offset =
if let Some(fitsrs::card::Value::Float(bzero)) = header.get(b"BZERO ") {
*bzero as f32
} else {
0.0
};
let blank =
if let Some(fitsrs::card::Value::Float(blank)) = header.get(b"BLANK ") {
*blank as f32
} else {
std::f32::NAN
};
Ok(Metadata {
blank,
scale,
offset,
})
})
}
_ => Request::new(async move { Ok(Metadata::default()) }),
};
Self {
id,
url,
hips_cdid,
request,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
#[derive(Debug)]
pub struct PixelMetadata {
pub value: Rc<RefCell<Option<Metadata>>>,
pub hips_cdid: CreatorDid,
pub url: String,
}
use crate::Abort;
impl<'a> From<&'a PixelMetadataRequest> for Option<PixelMetadata> {
fn from(request: &'a PixelMetadataRequest) -> Self {
let PixelMetadataRequest {
request,
hips_cdid,
url,
..
} = request;
if request.is_resolved() {
let Request::<Metadata> { data, .. } = request;
// It will always be resolved and found as we will request a well know tile (Norder0/Tile0)
Some(PixelMetadata {
hips_cdid: hips_cdid.clone(),
url: url.to_string(),
value: data.clone(),
})
} else {
None
}
}
}

View File

@@ -24,7 +24,7 @@ use super::Url;
use moclib::deser::fits;
use wasm_bindgen::JsCast;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, Response};
use web_sys::{RequestInit, RequestMode, Response};
use moclib::moc::range::op::convert::convert_to_u64;
@@ -55,8 +55,6 @@ impl From<query::Moc> for MOCRequest {
url,
params,
hips_cdid,
credentials,
mode,
} = query;
let url_clone = url.clone();
@@ -65,8 +63,7 @@ impl From<query::Moc> for MOCRequest {
let request = Request::new(async move {
let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(mode);
opts.credentials(credentials);
opts.mode(RequestMode::Cors);
let request = web_sys::Request::new_with_str_and_init(&url_clone, &opts).unwrap_abort();
let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;
@@ -77,7 +74,10 @@ impl From<query::Moc> for MOCRequest {
let bytes_buf = js_sys::Uint8Array::new(&array_buffer);
let num_bytes = bytes_buf.length() as usize;
let mut bytes = vec![0; num_bytes];
let mut bytes = Vec::with_capacity(num_bytes);
unsafe {
bytes.set_len(num_bytes);
}
bytes_buf.copy_to(&mut bytes[..]);
// Coosys is permissive because we load a moc

View File

@@ -1,10 +1,11 @@
// A request image should not be used outside this module
// but contained inside a more specific type of query (e.g. for a tile or allsky)
pub mod allsky;
pub mod blank;
pub mod moc;
pub mod tile;
use wasm_bindgen_futures::JsFuture;
/* ------------------------------------- */
use crate::time::Time;
use std::cell::{Cell, RefCell};
@@ -78,11 +79,13 @@ where
}
use allsky::AllskyRequest;
use blank::PixelMetadataRequest;
use moc::MOCRequest;
use tile::TileRequest;
pub enum RequestType {
Tile(TileRequest),
Allsky(AllskyRequest),
PixelMetadata(PixelMetadataRequest),
Moc(MOCRequest), //..
}
@@ -92,6 +95,7 @@ impl RequestType {
match self {
RequestType::Tile(request) => &request.id,
RequestType::Allsky(request) => &request.id,
RequestType::PixelMetadata(request) => &request.id,
RequestType::Moc(request) => &request.hips_cdid,
}
}
@@ -102,47 +106,33 @@ impl<'a> From<&'a RequestType> for Option<Resource> {
match request {
RequestType::Tile(request) => Option::<Tile>::from(request).map(Resource::Tile),
RequestType::Allsky(request) => Option::<Allsky>::from(request).map(Resource::Allsky),
RequestType::PixelMetadata(request) => {
Option::<PixelMetadata>::from(request).map(Resource::PixelMetadata)
}
RequestType::Moc(request) => Option::<Moc>::from(request).map(Resource::Moc),
}
}
}
use crate::Abort;
use allsky::Allsky;
use blank::PixelMetadata;
use moc::Moc;
use tile::Tile;
pub enum Resource {
Tile(Tile),
Allsky(Allsky),
PixelMetadata(PixelMetadata),
Moc(Moc),
}
use web_sys::RequestCredentials;
async fn query_html_image(
url: &str,
credentials: RequestCredentials,
) -> Result<web_sys::HtmlImageElement, JsValue> {
let image = web_sys::HtmlImageElement::new().unwrap_abort();
let image_cloned = image.clone();
// Set the CORS and credentials options for the image
let cors_value = match credentials {
RequestCredentials::Include => Some("use-credentials"),
RequestCredentials::SameOrigin => Some("anonymous"),
_ => Some(""),
};
let promise = js_sys::Promise::new(
&mut (Box::new(move |resolve, reject| {
// Ask for CORS permissions
image_cloned.set_cross_origin(cors_value);
image_cloned.set_onload(Some(&resolve));
image_cloned.set_onerror(Some(&reject));
image_cloned.set_src(url);
}) as Box<dyn FnMut(js_sys::Function, js_sys::Function)>),
);
let _ = JsFuture::from(promise).await?;
Ok(image)
}
/*
impl Resource {
pub fn id(&self) -> &String {
match self {
Resource::Tile(tile) => &format!("{:?}:{:?}", tile.cell.depth(), tile.cell.idx()),
Resource::Allsky(allsky) => allsky.get_hips_cdid(),
Resource::PixelMetadata(PixelMetadata { hips_cdid, .. }) => hips_cdid,
Resource::Moc(moc) => moc.get_hips_cdid(),
}
}
}*/

View File

@@ -1,15 +1,14 @@
use crate::healpix::cell::HEALPixCell;
use crate::renderable::CreatorDid;
use al_core::image::format::ImageFormatType;
use al_core::texture::format::{PixelType, RGB8U, RGBA8U};
use al_core::image::format::{ChannelType, ImageFormatType, RGB8U, RGBA8U};
use crate::downloader::query;
use al_core::image::ImageType;
use super::Url;
use super::{Request, RequestType};
use crate::downloader::request::query_html_image;
use crate::downloader::QueryId;
pub struct TileRequest {
request: Request<ImageType>,
pub id: QueryId,
@@ -27,11 +26,30 @@ impl From<TileRequest> for RequestType {
}
}
async fn query_html_image(url: &str) -> Result<web_sys::HtmlImageElement, JsValue> {
let image = web_sys::HtmlImageElement::new().unwrap_abort();
let image_cloned = image.clone();
let promise = js_sys::Promise::new(
&mut (Box::new(move |resolve, reject| {
// Ask for CORS permissions
image_cloned.set_cross_origin(Some(""));
image_cloned.set_onload(Some(&resolve));
image_cloned.set_onerror(Some(&reject));
image_cloned.set_src(&url);
}) as Box<dyn FnMut(js_sys::Function, js_sys::Function)>),
);
let _ = JsFuture::from(promise).await?;
Ok(image)
}
use al_core::image::html::HTMLImage;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
use wasm_bindgen_futures::JsFuture;
use web_sys::{RequestInit, Response};
use web_sys::{RequestInit, RequestMode, Response};
impl From<query::Tile> for TileRequest {
// Create a tile request associated to a HiPS
fn from(query: query::Tile) -> Self {
@@ -40,70 +58,120 @@ impl From<query::Tile> for TileRequest {
cell,
url,
hips_cdid,
credentials,
mode,
id,
channel: slice,
size,
} = query;
let url_clone = url.clone();
let channel = format.get_pixel_format();
let channel = format.get_channel();
let window = web_sys::window().unwrap_abort();
let request = match channel {
PixelType::RGB8U => Request::new(async move {
ChannelType::RGB8U => Request::new(async move {
/*let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(RequestMode::Cors);
let request = web_sys::Request::new_with_str_and_init(&url_clone, &opts).unwrap_abort();
let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;
// `resp_value` is a `Response` object.
debug_assert!(resp_value.is_instance_of::<Response>());
let resp: Response = resp_value.dyn_into()?;*/
/*/// Bitmap version
let blob = JsFuture::from(resp.blob()?).await?.into();
let image = JsFuture::from(window.create_image_bitmap_with_blob(&blob)?)
.await?
.into();
let image = Bitmap::new(image);
Ok(ImageType::JpgImageRgb8u { image })*/
/*
/// Raw image decoding
let buf = JsFuture::from(resp.array_buffer()?).await?;
let raw_bytes = js_sys::Uint8Array::new(&buf).to_vec();
let image = ImageBuffer::<RGB8U>::from_raw_bytes(&raw_bytes[..], 512, 512)?;
Ok(ImageType::RawRgb8u { image })
*/
// HTMLImageElement
let image = query_html_image(&url_clone, credentials).await?;
let image = query_html_image(&url_clone).await?;
// The image has been resolved
Ok(ImageType::HTMLImageRgb8u {
image: HTMLImage::<RGB8U>::new(image),
})
}),
PixelType::RGBA8U => Request::new(async move {
ChannelType::RGBA8U => Request::new(async move {
/*let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(RequestMode::Cors);
let request = web_sys::Request::new_with_str_and_init(&url_clone, &opts).unwrap_abort();
let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;
// `resp_value` is a `Response` object.
debug_assert!(resp_value.is_instance_of::<Response>());
let resp: Response = resp_value.dyn_into()?;*/
/*/// Bitmap version
let blob = JsFuture::from(resp.blob()?).await?.into();
let image = JsFuture::from(window.create_image_bitmap_with_blob(&blob)?)
.await?
.into();
let image = Bitmap::new(image);
Ok(ImageType::PngImageRgba8u { image })*/
/*
/// Raw image decoding
let buf = JsFuture::from(resp.array_buffer()?).await?;
let raw_bytes = js_sys::Uint8Array::new(&buf).to_vec();
let image = ImageBuffer::<RGBA8U>::from_raw_bytes(&raw_bytes[..], 512, 512)?;
Ok(ImageType::RawRgba8u { image })
*/
// HTMLImageElement
let image = query_html_image(&url_clone, credentials).await?;
let image = query_html_image(&url_clone).await?;
// The image has been resolved
Ok(ImageType::HTMLImageRgba8u {
image: HTMLImage::<RGBA8U>::new(image),
})
}),
PixelType::R32F | PixelType::R32I | PixelType::R16I | PixelType::R8U => {
Request::new(async move {
let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(mode);
opts.credentials(credentials);
ChannelType::R32F
| ChannelType::R64F
| ChannelType::R32I
| ChannelType::R16I
| ChannelType::R8UI => Request::new(async move {
let mut opts = RequestInit::new();
opts.method("GET");
opts.mode(RequestMode::Cors);
let request =
web_sys::Request::new_with_str_and_init(&url_clone, &opts).unwrap_abort();
let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;
// `resp_value` is a `Response` object.
debug_assert!(resp_value.is_instance_of::<Response>());
let resp: Response = resp_value.dyn_into()?;
// See https://github.com/MattiasBuelens/wasm-streams/blob/f6dacf58a8826dc67923ab4a3bae87635690ca64/examples/fetch_as_stream.rs#L25-L33
/*let raw_body = resp.body().ok_or(JsValue::from_str("Cannot extract readable stream"))?;
let body = ReadableStream::from_raw(raw_body.dyn_into()?);
let request =
web_sys::Request::new_with_str_and_init(&url_clone, &opts).unwrap_abort();
let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;
// `resp_value` is a `Response` object.
debug_assert!(resp_value.is_instance_of::<Response>());
let resp: Response = resp_value.dyn_into()?;
// See https://github.com/MattiasBuelens/wasm-streams/blob/f6dacf58a8826dc67923ab4a3bae87635690ca64/examples/fetch_as_stream.rs#L25-L33
/*let raw_body = resp.body().ok_or(JsValue::from_str("Cannot extract readable stream"))?;
let body = ReadableStream::from_raw(raw_body.dyn_into()?);
// Convert the JS ReadableStream to a Rust stream
let mut reader = body.try_into_async_read().map_err(|_| JsValue::from_str("readable stream locked"))?;
let image = Fits::new(reader).await?;
*/
if resp.ok() {
let array_buffer = JsFuture::from(resp.array_buffer()?).await?;
let raw_bytes = js_sys::Uint8Array::new(&array_buffer);
// Convert the JS ReadableStream to a Rust stream
let mut reader = body.try_into_async_read().map_err(|_| JsValue::from_str("readable stream locked"))?;
let image = Fits::new(reader).await?;
*/
if resp.ok() {
let array_buffer = JsFuture::from(resp.array_buffer()?).await?;
let raw_bytes = js_sys::Uint8Array::new(&array_buffer);
Ok(ImageType::FitsRawBytes {
raw_bytes,
size: (size, size),
})
} else {
Err(JsValue::from_str(
"Response status code not between 200-299.",
))
}
})
}
Ok(ImageType::FitsImage { raw_bytes })
} else {
Err(JsValue::from_str(
"Response status code not between 200-299.",
))
}
}),
_ => todo!(),
};
Self {

View File

@@ -16,7 +16,7 @@ use healpix::compass_point::Ordinal;
use healpix::compass_point::OrdinalMap;
use crate::utils;
use crate::Abort;
impl HEALPixCell {
// Build the parent cell
#[inline(always)]
@@ -141,7 +141,7 @@ impl HEALPixCell {
let mut smallest_ancestor = c1.smallest_common_ancestor(c2);
while let (Some(ancestor), Some(cell)) = (smallest_ancestor, cells.next()) {
smallest_ancestor = ancestor.smallest_common_ancestor(cell);
smallest_ancestor = ancestor.smallest_common_ancestor(&cell);
}
smallest_ancestor
@@ -225,6 +225,21 @@ impl HEALPixCell {
}
}
#[inline(always)]
pub(crate) fn has_7_neigh(&self) -> bool {
let base_cell = self.ancestor(self.depth());
let nside_minus_one = (self.nside() - 1) as u32;
let (x, y) = self.offset_in_parent(&base_cell);
match base_cell.idx() {
0..=3 => (x == 0 && y == nside_minus_one) || (y == 0 && x == nside_minus_one),
4..=7 => (x == 0 && y == 0) || (x == nside_minus_one && y == nside_minus_one),
8..=11 => (x == 0 && y == nside_minus_one) || (y == 0 && x == nside_minus_one),
_ => unreachable!()
}
}
#[inline(always)]
pub(crate) fn is_on_base_cell_edges(&self) -> bool {
let base_cell = self.ancestor(self.depth());
@@ -474,12 +489,12 @@ impl Iterator for HEALPixTilesIter {
// Follow the z-order curve
impl PartialOrd for HEALPixCell {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
self.z_29().partial_cmp(&other.z_29())
}
}
impl Ord for HEALPixCell {
fn cmp(&self, other: &Self) -> Ordering {
self.z_29().cmp(&other.z_29())
self.partial_cmp(other).unwrap_abort()
}
}

View File

@@ -87,8 +87,7 @@ impl HEALPixCoverage {
}
pub fn contains_lonlat(&self, lonlat: &LonLatT<f64>) -> bool {
self.0
.is_in(lonlat.lon().to_radians(), lonlat.lat().to_radians())
self.0.is_in(lonlat.lon().to_radians(), lonlat.lat().to_radians())
}
// O(log2(N))

View File

@@ -70,7 +70,7 @@ impl IdxVec {
let bbox1 = a1.get_containing_hpx_cell();
let bbox2 = a2.get_containing_hpx_cell();
bbox1.cmp(bbox2)
bbox1.cmp(&bbox2)
});
// At this point the arcs are sorted by the z-order curve of their

View File

@@ -1,4 +1,4 @@
pub mod cell;
pub mod coverage;
pub mod index_vector;
pub mod utils;
pub mod index_vector;

View File

@@ -1,11 +1,12 @@
use crate::healpix::cell::HEALPixCell;
use crate::math::lonlat::LonLatT;
use crate::math::angle::ToAngle;
/// A simple wrapper around sore core methods
/// of cdshealpix
///
/// cdshealpix is developped by F-X. Pineau.
/// Please check its github repo: https://github.com/cds-astro/cds-healpix-rust
use crate::healpix::cell::HEALPixCell;
use crate::math::angle::ToAngle;
use crate::math::lonlat::LonLatT;
/// Get the vertices of an HEALPix cell
use cgmath::BaseFloat;
#[allow(dead_code)]

View File

@@ -23,7 +23,7 @@ impl Inertia {
ampl,
speed: ampl,
axis,
north_up,
north_up
}
}

View File

@@ -88,11 +88,9 @@ use math::projection::*;
use moclib::moc::RangeMOCIntoIterator;
//use votable::votable::VOTableWrapper;
use crate::tile_fetcher::HiPSLocalFiles;
use al_api::moc::MOCOptions;
use wasm_bindgen::prelude::*;
use web_sys::HtmlElement;
use fitsrs::{WCSParams, WCS};
use al_api::moc::MOCOptions;
use crate::math::angle::ToAngle;
@@ -189,7 +187,7 @@ impl WebClient {
#[wasm_bindgen(js_name = isInerting)]
pub fn is_inerting(&self) -> bool {
self.app.is_inerting()
return self.app.is_inerting();
}
/// Update the view
@@ -352,31 +350,33 @@ impl WebClient {
Ok(())
}
#[wasm_bindgen(js_name = addFITSImage)]
pub fn add_fits_image(
#[wasm_bindgen(js_name = addImageFITS)]
pub fn add_image_fits(
&mut self,
bytes: &[u8],
stream: web_sys::ReadableStream,
cfg: JsValue,
layer: String,
) -> Result<js_sys::Promise, JsValue> {
let cfg: ImageMetadata = serde_wasm_bindgen::from_value(cfg)?;
self.app.add_fits_image(bytes, cfg, layer)
self.app.add_image_fits(stream, cfg, layer)
}
#[wasm_bindgen(js_name = addRGBAImage)]
pub fn add_rgba_image(
#[wasm_bindgen(js_name = addImageWithWCS)]
pub fn add_image_with_wcs(
&mut self,
bytes: &[u8],
stream: web_sys::ReadableStream,
wcs: JsValue,
cfg: JsValue,
layer: String,
) -> Result<js_sys::Promise, JsValue> {
use wcs::{WCSParams, WCS};
let cfg: ImageMetadata = serde_wasm_bindgen::from_value(cfg)?;
let wcs_params: WCSParams = serde_wasm_bindgen::from_value(wcs)?;
let wcs = WCS::new(&wcs_params).map_err(|e| JsValue::from_str(&format!("{:?}", e)))?;
let wcs = WCS::new(&wcs_params).map_err(|e| JsValue::from_str(&format!("{e:?}")))?;
self.app.add_rgba_image(layer, bytes, wcs, cfg)
self.app
.add_image_from_blob_and_wcs(layer, stream, wcs, cfg)
}
#[wasm_bindgen(js_name = removeLayer)]
@@ -595,8 +595,7 @@ impl WebClient {
/// Set the zoom factor of the view
#[wasm_bindgen(js_name = setZoomFactor)]
pub fn set_zoom_factor(&mut self, zoom_factor: f64) -> Result<(), JsValue> {
self.app.set_zoom_factor(zoom_factor);
Ok(())
Ok(self.app.set_zoom_factor(zoom_factor))
}
/// Set the center of the view in ICRS coosys
@@ -726,12 +725,16 @@ impl WebClient {
let vertices = lon
.iter()
.zip(lat.iter())
.flat_map(|(&lon, &lat)| {
self.app
.map(|(&lon, &lat)| {
let xy = self
.app
.world_to_screen(lon, lat)
.map(|v| [v.x, v.y])
.unwrap_or([0.0, 0.0])
.unwrap_or([0.0, 0.0]);
xy
})
.flatten()
.collect::<Vec<_>>();
vertices.into_boxed_slice()
@@ -897,7 +900,7 @@ impl WebClient {
Ok(())
}
/// Project a great circle arc on the screen
/// Project a line to the screen
///
/// # Returns
///
@@ -912,33 +915,23 @@ impl WebClient {
/// * `lat1` - The latitude in degrees of the starting line point
/// * `lon2` - The longitude in degrees of the ending line point
/// * `lat2` - The latitude in degrees of the ending line point
#[wasm_bindgen(js_name = projectGreatCircleArc)]
pub fn project_great_circle_arc(
/*#[wasm_bindgen(js_name = projectLine)]
pub fn project_line(
&self,
lon1: f64,
lat1: f64,
lon2: f64,
lat2: f64,
) -> Result<Box<[f64]>, JsValue> {
let vertices = crate::renderable::line::great_circle_arc::project(
lon1.to_radians(),
lat1.to_radians(),
lon2.to_radians(),
lat2.to_radians(),
&self.app.camera,
&self.app.projection,
);
let vertices = self.app.project_line(lon1, lat1, lon2, lat2);
let vertices = vertices
.into_iter()
.flat_map(|ndc| {
let sxy = crate::math::projection::ndc_to_screen_space(&ndc, &self.app.camera);
[sxy.x, sxy.y]
})
.flat_map(|v| vec![v.x, v.y])
.collect::<Vec<_>>();
Ok(vertices.into_boxed_slice())
}
}*/
/// Get the list of colormap supported
///
@@ -982,7 +975,7 @@ impl WebClient {
let grad = colorgrad::CustomGradient::new()
.colors(&rgba_colors?)
.build()
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?;
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?;
let cmap = Colormap::new(&label, grad);
self.app.add_cmap(label, cmap)?;
@@ -1012,21 +1005,10 @@ impl WebClient {
/// * `x` - The x screen coordinate in pixels
/// * `y` - The y screen coordinate in pixels
/// * `base_url` - The base url of the hips identifying it
#[wasm_bindgen(js_name = probePixel)]
pub fn probe_pixel(&self, x: f64, y: f64, layer: String) -> Result<JsValue, JsValue> {
self.app.read_pixel(x, y, layer.as_str())
}
#[wasm_bindgen(js_name = probeLineOfPixels)]
pub fn probe_line_of_pixels(
&self,
x1: f64,
y1: f64,
x2: f64,
y2: f64,
layer: String,
) -> Result<Vec<JsValue>, JsValue> {
self.app.read_line_of_pixels(x1, y1, x2, y2, layer.as_str())
#[wasm_bindgen(js_name = readPixel)]
pub fn read_pixel(&self, x: f64, y: f64, layer: String) -> Result<JsValue, JsValue> {
let pixel = self.app.read_pixel(&Vector2::new(x, y), layer.as_str())?;
Ok(pixel)
}
#[wasm_bindgen(js_name = getVisibleCells)]
@@ -1059,7 +1041,11 @@ impl WebClient {
}
#[wasm_bindgen(js_name = addJSONMoc)]
pub fn add_json_moc(&mut self, options: MOCOptions, data: &JsValue) -> Result<(), JsValue> {
pub fn add_json_moc(
&mut self,
options: MOCOptions,
data: &JsValue,
) -> Result<(), JsValue> {
let str: String = js_sys::JSON::stringify(data)?.into();
let moc = moclib::deser::json::from_json_aladin::<u64, Hpx<u64>>(&str)
@@ -1076,7 +1062,7 @@ impl WebClient {
#[wasm_bindgen(js_name = addFITSMOC)]
pub fn add_fits_moc(&mut self, options: MOCOptions, data: &[u8]) -> Result<(), JsValue> {
//let bytes = js_sys::Uint8Array::new(array_buffer).to_vec();
let moc = match fits::from_fits_ivoa_custom(Cursor::new(data), false)
let moc = match fits::from_fits_ivoa_custom(Cursor::new(&data[..]), false)
.map_err(|e| JsValue::from_str(&e.to_string()))?
{
MocIdxType::U16(MocQtyType::<u16, _>::Hpx(moc)) => {
@@ -1136,7 +1122,7 @@ impl WebClient {
let v_in = &Vector3::new(1.0, 0.0, 0.0);
let mut moc = HEALPixCoverage::from_3d_coos(pixel_d as u8 - 1, vertex_it, v_in);
let mut moc = HEALPixCoverage::from_3d_coos(pixel_d as u8 - 1, vertex_it, &v_in);
if moc.sky_fraction() > 0.5 {
moc = moc.not();
}
@@ -1161,7 +1147,12 @@ impl WebClient {
}
#[wasm_bindgen(js_name = mocContains)]
pub fn moc_contains(&mut self, moc_uuid: String, lon: f64, lat: f64) -> Result<bool, JsValue> {
pub fn moc_contains(
&mut self,
moc_uuid: String,
lon: f64,
lat: f64,
) -> Result<bool, JsValue> {
let moc = self
.app
.get_moc(&moc_uuid)
@@ -1188,9 +1179,9 @@ impl WebClient {
.cells()
.to_json_aladin(None, &mut buf)
.map(|()| unsafe { String::from_utf8_unchecked(buf) })
.map_err(|err| JsValue::from_str(&format!("{err:?}")))?;
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))?;
serde_wasm_bindgen::to_value(&json).map_err(|err| JsValue::from_str(&format!("{err:?}")))
serde_wasm_bindgen::to_value(&json).map_err(|err| JsValue::from_str(&format!("{:?}", err)))
}
#[wasm_bindgen(js_name = getMOCSkyFraction)]

View File

@@ -1,5 +1,5 @@
use crate::Abort;
use cgmath::BaseFloat;
use crate::Abort;
// ArcDeg wrapper structure
#[derive(Clone, Copy)]
pub struct ArcDeg<T: BaseFloat>(pub T);
@@ -42,12 +42,12 @@ where
}
}
impl<T> Display for ArcDeg<T>
impl<T> ToString for ArcDeg<T>
where
T: BaseFloat + Display,
T: BaseFloat + ToString,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
fn to_string(&self) -> String {
self.0.to_string()
}
}
@@ -55,6 +55,7 @@ where
#[derive(Clone, Copy)]
pub struct ArcHour<T: BaseFloat>(pub T);
impl<T> From<Rad<T>> for ArcHour<T>
where
T: BaseFloat,
@@ -80,12 +81,12 @@ where
}
}
impl<T> Display for ArcHour<T>
impl<T> ToString for ArcHour<T>
where
T: BaseFloat + Display,
T: BaseFloat + ToString,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
fn to_string(&self) -> String {
self.0.to_string()
}
}
@@ -131,12 +132,12 @@ where
}
}
impl<T> Display for ArcMin<T>
impl<T> ToString for ArcMin<T>
where
T: BaseFloat + Display,
T: BaseFloat + ToString,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
fn to_string(&self) -> String {
self.0.to_string()
}
}
@@ -170,12 +171,12 @@ where
}
}
impl<T> Display for ArcSec<T>
impl<T> ToString for ArcSec<T>
where
T: BaseFloat + Display,
T: BaseFloat + ToString,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
fn to_string(&self) -> String {
self.0.to_string()
}
}
@@ -191,6 +192,130 @@ where
}
use al_api::angle::Format;
/*
pub enum SerializeFmt {
DMS,
HMS,
DMM,
DD,
}
use al_api::angle_fmt::AngleSerializeFmt;
impl From<AngleSerializeFmt> for SerializeFmt {
fn from(value: AngleSerializeFmt) -> Self {
match value {
AngleSerializeFmt::DMS => SerializeFmt::DMS,
AngleSerializeFmt::HMS => SerializeFmt::HMS,
AngleSerializeFmt::DMM => SerializeFmt::DMM,
AngleSerializeFmt::DD => SerializeFmt::DD,
}
}
}
impl SerializeFmt {
pub fn to_string<S: BaseFloat + ToString>(&self, angle: Angle<S>) -> String {
match &self {
Self::DMS => DMS::to_string(angle),
Self::HMS => HMS::to_string(angle),
Self::DMM => DMM::to_string(angle),
Self::DD => DD::to_string(angle),
}
}
}*/
/*pub trait SerializeToString {
fn to_string(&self) -> String;
}
impl<S> SerializeToString for Angle<S>
where
S: BaseFloat + ToString,
{
fn to_string<F: FormatType>(&self) -> String {
F::to_string(*self)
}
}*/
/*
pub struct DMS;
pub struct HMS;
pub struct DMM;
pub struct DD;
pub trait FormatType {
fn to_string<S: BaseFloat + ToString>(angle: Angle<S>) -> String;
}
impl FormatType for DD {
fn to_string<S: BaseFloat + ToString>(angle: Angle<S>) -> String {
let angle = Rad(angle.0);
let degrees: ArcDeg<S> = angle.into();
degrees.to_string()
}
}
impl FormatType for DMM {
fn to_string<S: BaseFloat + ToString>(angle: Angle<S>) -> String {
let angle = Rad(angle.0);
let mut degrees: ArcDeg<S> = angle.into();
let minutes = degrees.get_frac_minutes();
degrees.truncate();
let mut result = degrees.to_string() + " ";
result += &minutes.to_string();
result
}
}
impl FormatType for DMS {
fn to_string<S: BaseFloat + ToString>(angle: Angle<S>) -> String {
let angle = Rad(angle.0);
let degrees: ArcDeg<S> = angle.into();
let minutes = degrees.get_frac_minutes();
let seconds = minutes.get_frac_seconds();
let num_sec_per_minutes = S::from(60).unwrap_abort();
let degrees = degrees.trunc();
let minutes = minutes.trunc() % num_sec_per_minutes;
let seconds = seconds.trunc() % num_sec_per_minutes;
let mut result = degrees.to_string() + "°";
result += &minutes.to_string();
result += "\'";
result += &seconds.to_string();
result += "\'\'";
result
}
}
impl FormatType for HMS {
fn to_string<S: BaseFloat + ToString>(angle: Angle<S>) -> String {
let angle = Rad(angle.0);
let hours: ArcHour<S> = angle.into();
let minutes = hours.get_frac_minutes();
let seconds = minutes.get_frac_seconds();
let num_sec_per_minutes = S::from(60).unwrap_abort();
let hours = hours.trunc();
let minutes = minutes.trunc() % num_sec_per_minutes;
let seconds = seconds.trunc() % num_sec_per_minutes;
let mut result = hours.to_string() + "h";
result += &minutes.to_string();
result += "\'";
result += &seconds.to_string();
result += "\'\'";
result
}
}*/
#[derive(Clone, Copy, Debug, Eq, Hash, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -205,10 +330,7 @@ where
{
pub fn new<T: Into<Rad<S>>>(angle: T) -> Angle<S> {
let radians: Rad<S> = angle.into();
Angle {
rad: radians.0,
fmt: AngleFormatter::default(),
}
Angle { rad: radians.0, fmt: AngleFormatter::default() }
}
pub fn cos(&self) -> S {
@@ -307,10 +429,7 @@ where
S: BaseFloat,
{
fn to_angle(self) -> Angle<S> {
Angle {
rad: self,
fmt: Default::default(),
}
Angle { rad: self, fmt: Default::default() }
}
}
@@ -566,7 +685,7 @@ pub enum AngleFormatter {
Decimal {
/// Number of digit of precision
prec: u8,
},
}
}
impl Default for AngleFormatter {
@@ -592,7 +711,7 @@ impl Display for Angle<f64> {
// Format the unit value to sexagesimal.
// The precision 8 corresponds to the formatting: deg/hour min sec.ddd
write!(f, "{}", Format::toSexagesimal(unit, 8, plus))
}
},
AngleFormatter::Decimal { prec } => {
write!(f, "{:.1$}°", self.to_degrees(), prec as usize)
}

View File

@@ -27,7 +27,7 @@ where
/// * ``lat`` - Latitude
pub fn new(mut lon: Angle<S>, lat: Angle<S>) -> LonLatT<S> {
if lon.to_radians() < S::zero() {
lon += S::from(TWICE_PI).unwrap_abort();
lon = lon + S::from(TWICE_PI).unwrap_abort();
}
LonLatT(lon, lat)
@@ -49,8 +49,8 @@ where
}
use crate::math::angle::ToAngle;
impl From<fitsrs::wcs::LonLat> for LonLatT<f64> {
fn from(lonlat: fitsrs::wcs::LonLat) -> Self {
impl From<wcs::LonLat> for LonLatT<f64> {
fn from(lonlat: wcs::LonLat) -> Self {
Self(lonlat.lon().to_angle(), lonlat.lat().to_angle())
}
}
@@ -121,8 +121,7 @@ pub fn ang_between_lonlat<S: BaseFloat>(lonlat1: LonLatT<S>, lonlat2: LonLatT<S>
let abs_diff_lon = (lonlat1.lon() - lonlat2.lon()).abs();
(lonlat1.lat().sin() * lonlat2.lat().sin()
+ lonlat1.lat().cos() * lonlat2.lat().cos() * abs_diff_lon.cos())
.acos()
.to_angle()
.acos().to_angle()
}
#[inline]
@@ -181,7 +180,7 @@ pub fn unproj(
camera: &CameraViewPort,
) -> Option<LonLatT<f64>> {
projection
.normalized_device_to_model_space(ndc_xy, camera)
.normalized_device_to_model_space(&ndc_xy, camera)
.map(|model_pos| model_pos.lonlat())
}
@@ -202,14 +201,13 @@ pub fn unproj_from_screen(
camera: &CameraViewPort,
) -> Option<LonLatT<f64>> {
projection
.screen_to_model_space(xy, camera)
.screen_to_model_space(&xy, camera)
.map(|model_pos| model_pos.lonlat())
}
#[inline]
pub fn is_in(v1: &Vector3<f64>, v2: &Vector3<f64>, v: &Vector3<f64>) -> bool {
let theta = crate::math::vector::angle3(v1, v2).abs();
let theta = crate::math::vector::angle3(&v1, &v2).abs();
crate::math::vector::angle3(v1, v).abs() < theta
&& crate::math::vector::angle3(v, v2).abs() < theta
crate::math::vector::angle3(&v1, &v).abs() < theta && crate::math::vector::angle3(&v, &v2).abs() < theta
}

View File

@@ -5,7 +5,7 @@ pub const HALF_PI: f64 = std::f64::consts::PI * 0.5;
pub const MINUS_HALF_PI: f64 = -std::f64::consts::PI * 0.5;
pub const TWO_SQRT_TWO: f64 = 2.82842712475;
pub const SQRT_TWO: f64 = std::f64::consts::SQRT_2;
pub const SQRT_TWO: f64 = 1.41421356237;
pub const ZERO: f64 = 0.0;

View File

@@ -16,7 +16,7 @@ impl ProjDef for Ellipse {
let mut p = Vector2::new(xy.x.abs(), xy.y.abs());
let mut ab = Vector2::new(self.a, self.b);
if p.x == 0.0 {
let sdf = if p.x == 0.0 {
-(self.b - p.y)
} else if p.y == 0.0 {
-(self.a - p.x)
@@ -61,6 +61,8 @@ impl ProjDef for Ellipse {
let q = Vector2::new(ab.x * co, ab.y * si);
(q - p).magnitude() * (p.y - q.y).signum()
}
};
sdf
}
}

View File

@@ -2,4 +2,4 @@ pub mod disk;
pub mod ellipse;
pub mod parabola;
pub mod rect;
pub mod triangle;
pub mod triangle;

View File

@@ -22,12 +22,6 @@ pub struct Cod {
pub x_max: f64,
}
impl Default for Cod {
fn default() -> Self {
Self::new()
}
}
impl Cod {
pub const fn new() -> Self {
Self {
@@ -63,7 +57,7 @@ impl ProjDef for Cod {
let e = b / a;
let ext_ellipse = Translate {
off: center_ellipse,
def: Ellipse { a, b },
def: Ellipse { a: a, b: b },
};
// Small ellipse where projection is not defined

View File

@@ -5,5 +5,5 @@ pub mod par;
pub mod basic;
pub mod op;
pub mod sdf;
pub mod op;

View File

@@ -36,7 +36,7 @@ where
let d = valid_reg.sdf(&v);
// Perform the ray marching advancement
v += dir * d;
v = v + dir * d;
is_in = valid_reg.is_in(&v);
i += 1;

View File

@@ -163,7 +163,7 @@ impl ProjectionType {
}
/// Screen to model space deprojection
///
/// Perform a screen to the world space deprojection
///
/// # Arguments
@@ -184,6 +184,8 @@ impl ProjectionType {
self.clip_to_world_space(&pos_clip_space)
}
/// Screen to model space deprojection
/// Perform a screen to the world space deprojection
///
/// # Arguments
@@ -260,6 +262,8 @@ impl ProjectionType {
self.world_to_clip_space(&pos_world_space)
}
/// World to screen space projection
/// World to screen space transformation
///
/// # Arguments
@@ -678,7 +682,7 @@ use self::coo_space::XYNDC;
use super::lonlat::LonLatT;
impl<P> Projection for &P
impl<'a, P> Projection for &'a P
where
P: CanonicalProjection,
{
@@ -771,8 +775,8 @@ mod tests {
for y in 0..(h as u32) {
let xy = Vector2::new(x, y);
let clip_xy = Vector2::new(
2.0 * ((xy.x as f64) / w) - 1.0,
2.0 * ((xy.y as f64) / h) - 1.0,
2.0 * ((xy.x as f64) / (w as f64)) - 1.0,
2.0 * ((xy.y as f64) / (h as f64)) - 1.0,
);
let rgb = if let Some(pos) = projection.clip_to_world_space(&clip_xy) {
let pos = pos.normalize();
@@ -785,7 +789,7 @@ mod tests {
Rgb([255, 255, 255])
};
img.put_pixel(x, y, rgb);
img.put_pixel(x as u32, y as u32, rgb);
}
}
img.save(filename).unwrap_abort();

View File

@@ -1,15 +1,15 @@
use crate::math;
use crate::math::angle::ToAngle;
use cgmath::Vector3;
use cgmath::{BaseFloat, InnerSpace};
use cgmath::{Euler, Quaternion};
use cgmath::Vector3;
#[derive(Clone, Copy, Debug)]
// Internal structure of a rotation, a quaternion
// All operations are done on it
pub struct Rotation<S: BaseFloat>(pub Quaternion<S>);
use cgmath::Matrix3;
use cgmath::{Matrix3};
impl<S> From<&Matrix3<S>> for Rotation<S>
where
S: BaseFloat,
@@ -117,7 +117,7 @@ where
// Define a rotation from a normalized vector
pub fn from_sky_position(pos: &Vector3<S>) -> Rotation<S> {
let (lon, lat) = math::lonlat::xyz_to_radec(pos);
let (lon, lat) = math::lonlat::xyz_to_radec(&pos);
let rot_y = Matrix3::from_angle_y(lon);
let rot_x = Matrix3::from_angle_x(-lat);

View File

@@ -38,18 +38,22 @@ impl BoundingBox {
PoleContained::None => {
// The polygon does not contain any pole
// Meridian 0deg is not crossing the polygon
let (min_lat, max_lat) = lat.iter().fold((f64::MAX, f64::MIN), |(min, max), &b| {
(min.min(b), max.max(b))
});
let (min_lat, max_lat) = lat
.iter()
.fold((std::f64::MAX, std::f64::MIN), |(min, max), &b| {
(min.min(b), max.max(b))
});
let (min_lon, max_lon) = lon.iter().fold((f64::MAX, f64::MIN), |(min, max), &b| {
(min.min(b), max.max(b))
});
let (min_lon, max_lon) = lon
.iter()
.fold((std::f64::MAX, std::f64::MIN), |(min, max), &b| {
(min.min(b), max.max(b))
});
(min_lon..max_lon, min_lat..max_lat)
}
PoleContained::South => {
let max_lat = lat.iter().fold(f64::MIN, |a, b| a.max(*b));
let max_lat = lat.iter().fold(std::f64::MIN, |a, b| a.max(*b));
(
if intersect_zero_meridian {
-PI..PI
@@ -60,7 +64,7 @@ impl BoundingBox {
)
}
PoleContained::North => {
let min_lat = lat.iter().fold(f64::MAX, |a, b| a.min(*b));
let min_lat = lat.iter().fold(std::f64::MAX, |a, b| a.min(*b));
(
if intersect_zero_meridian {
-PI..PI

View File

@@ -13,18 +13,18 @@ pub enum HEALPixBBox {
impl PartialOrd for HEALPixBBox {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
match (self, other) {
(HEALPixBBox::AllSky, HEALPixBBox::AllSky) => Some(Ordering::Equal),
(HEALPixBBox::AllSky, HEALPixBBox::Cell(_)) => Some(Ordering::Greater),
(HEALPixBBox::Cell(_), HEALPixBBox::AllSky) => Some(Ordering::Less),
(HEALPixBBox::Cell(c1), HEALPixBBox::Cell(c2)) => c1.partial_cmp(c2),
}
}
}
impl Ord for HEALPixBBox {
fn cmp(&self, other: &Self) -> Ordering {
match (self, other) {
(HEALPixBBox::AllSky, HEALPixBBox::AllSky) => Ordering::Equal,
(HEALPixBBox::AllSky, HEALPixBBox::Cell(_)) => Ordering::Greater,
(HEALPixBBox::Cell(_), HEALPixBBox::AllSky) => Ordering::Less,
(HEALPixBBox::Cell(c1), HEALPixBBox::Cell(c2)) => c1.cmp(c2),
}
self.partial_cmp(other).unwrap_abort()
}
}

View File

@@ -1,26 +1,26 @@
pub mod bbox;
pub mod great_circle_arc;
pub mod region;
pub mod great_circle_arc;
use super::{PI, TWICE_PI};
#[inline]
pub fn is_in_lon_range(lon0: f64, lon1: f64, lon2: f64) -> bool {
// First version of the code:
// First version of the code:
// ((v2.lon() - v1.lon()).abs() > PI) != ((v2.lon() > coo.lon()) != (v1.lon() > coo.lon()))
//
// Lets note
//
// Lets note
// - lonA = v1.lon()
// - lonB = v2.lon()
// - lon0 = coo.lon()
// When (lonB - lonA).abs() <= PI
// When (lonB - lonA).abs() <= PI
// => lonB > lon0 != lonA > lon0 like in PNPOLY
// A B lonA <= lon0 && lon0 < lonB
// --[++++[--
// B A lonB <= lon0 && lon0 < lonA
//
// But when (lonB - lonA).abs() > PI, then the test should be
// => lonA >= lon0 == lonB >= lon0
// But when (lonB - lonA).abs() > PI, then the test should be
// => lonA >= lon0 == lonB >= lon0
// <=> !(lonA >= lon0 != lonB >= lon0)
// A | B (lon0 < lonB) || (lonA <= lon0)
// --[++|++[--
@@ -32,17 +32,17 @@ pub fn is_in_lon_range(lon0: f64, lon1: f64, lon2: f64) -> bool {
// --]++|++]--
// B | A (lon0 <= lonA) || (lonB < lon0)
//
// So the previous code was bugged in this very specific case:
// So the previous code was bugged in this very specific case:
// - `lon0` has the same value as a vertex being part of:
// - one segment that do not cross RA=0
// - plus one segment crossing RA=0.
// - the point have an odd number of intersections with the polygon
// - the point have an odd number of intersections with the polygon
// (since it will be counted 0 or 2 times instead of 1).
let dlon = lon2 - lon1;
if dlon < 0.0 {
(dlon >= -PI) == (lon2 <= lon0 && lon0 < lon1)
} else {
(dlon <= PI) == (lon1 <= lon0 && lon0 < lon2)
(dlon <= PI) == (lon1 <= lon0 && lon0 < lon2)
}
}
@@ -58,3 +58,4 @@ pub fn distance_from_two_lon(lon1: f64, lon2: f64) -> f64 {
lon2 - lon1
}
}

View File

@@ -248,7 +248,7 @@ impl Region {
Region::AllSky => true,
Region::Polygon { polygon, bbox, .. } => {
// Fast checking with the bbox
if !bbox.contains_lonlat(lonlat) {
if !bbox.contains_lonlat(&lonlat) {
return false;
}

View File

@@ -114,77 +114,3 @@ pub fn ccw_tri<S: BaseFloat>(a: &[S; 2], b: &[S; 2], c: &[S; 2]) -> bool {
a[0] * b[1] + a[1] * c[0] + b[0] * c[1] - c[0] * b[1] - c[1] * a[0] - b[0] * a[1] >= S::zero()
}
struct PixelBresenhamIter {
x: i32,
y: i32,
xx: i32,
yy: i32,
dx: i32,
sx: i32,
dy: i32,
sy: i32,
err: i32,
end: bool,
}
impl PixelBresenhamIter {
fn new(sx: f64, sy: f64, ex: f64, ey: f64) -> Self {
let x = sx.floor() as i32;
let y = sy.floor() as i32;
let xx = ex.floor() as i32;
let yy = ey.floor() as i32;
let dx = (xx - x).abs();
let sx = if x < xx { 1 } else { -1 };
let dy = -(yy - y).abs();
let sy = if y < yy { 1 } else { -1 };
let err = dx + dy;
let end = false;
Self {
x,
y,
xx,
yy,
dx,
sx,
dy,
sy,
err,
end,
}
}
}
impl Iterator for PixelBresenhamIter {
type Item = (f64, f64);
fn next(&mut self) -> Option<Self::Item> {
if self.end {
None
} else {
let item = (self.x as f64, self.y as f64);
if self.x == self.xx && self.y == self.yy {
self.end = true;
} else {
let e2 = 2 * self.err;
if e2 >= self.dy {
self.err += self.dy;
self.x += self.sx;
}
if e2 <= self.dx {
self.err += self.dx;
self.y += self.sy;
}
}
Some(item)
}
}
}
pub fn bresenham(sx: f64, sy: f64, ex: f64, ey: f64) -> impl Iterator<Item = (f64, f64)> {
PixelBresenhamIter::new(sx, sy, ex, ey)
}

View File

@@ -1,6 +1,6 @@
use crate::math::angle::Angle;
use crate::math::angle::ToAngle;
use cgmath::{BaseFloat, InnerSpace, Vector2, Vector3};
use crate::math::angle::ToAngle;
#[inline]
pub fn angle2<S: BaseFloat>(ab: &Vector2<S>, bc: &Vector2<S>) -> Angle<S> {
@@ -60,12 +60,13 @@ impl NormedVector2 {
Self(normed_v)
}
pub const fn new_unsafe(x: f64, y: f64) -> Self {
pub const unsafe fn new_unsafe(x: f64, y: f64) -> Self {
let v = Vector2::new(x, y);
Self(v)
}
}
use std::ops::Deref;
impl Deref for NormedVector2 {
type Target = Vector2<f64>;
@@ -76,7 +77,7 @@ impl Deref for NormedVector2 {
}
use std::ops::Mul;
impl Mul<f64> for &NormedVector2 {
impl<'a> Mul<f64> for &'a NormedVector2 {
// The multiplication of rational numbers is a closed operation.
type Output = Vector2<f64>;

View File

@@ -189,7 +189,7 @@ impl Manager {
) {
// Create the HashMap storing the source indices with respect to the
// HEALPix cell at depth 7 in which they are contained
let catalog = Catalog::new(&self.gl, colormap, sources);
let catalog = Catalog::new::<P>(&self.gl, colormap, sources);
// Update the number of sources loaded
//self.num_sources += num_instances_in_catalog as usize;
@@ -223,7 +223,7 @@ impl Manager {
pub fn get_mut_catalog(&mut self, name: &str) -> Result<&mut Catalog, Error> {
self.catalogs.get_mut(name).ok_or(Error::CatalogNotPresent {
message: format!("{name} catalog is not present!"),
message: format!("{} catalog is not present!", name),
})
}
@@ -239,7 +239,7 @@ impl Manager {
catalog.update(cells);
}
} else {
let depth = camera.get_tile_depth().min(7);
let depth = camera.get_texture_depth().min(7);
let cells = camera.get_hpx_cells(depth, CooSystem::ICRS);
for catalog in self.catalogs.values_mut() {
@@ -286,7 +286,11 @@ const MAX_SOURCES_PER_CATALOG: f32 = 50000.0;
use crate::Abort;
impl Catalog {
fn new(gl: &WebGlContext, colormap: Colormap, mut lonlat: Box<[LonLatT<f32>]>) -> Catalog {
fn new<P: Projection>(
gl: &WebGlContext,
colormap: Colormap,
mut lonlat: Box<[LonLatT<f32>]>,
) -> Catalog {
let alpha = 1_f32;
let strength = 1_f32;
let index_vec = IdxVec::from_coo(&mut lonlat);
@@ -402,7 +406,7 @@ impl Catalog {
for cell in cells {
let sources_idx = self.index_vec.get_item_indices_inside_hpx_cell(cell);
total_sources += sources_idx.end - sources_idx.start;
total_sources += (sources_idx.end - sources_idx.start) as usize;
}
total_sources
@@ -420,7 +424,8 @@ impl Catalog {
for c in cell.get_children_cells(delta_depth as u8) {
// Define the total number of sources being in this kernel depth tile
let sources_in_cell = self.index_vec.get_item_indices_inside_hpx_cell(&c);
let num_sources_in_kernel_cell = sources_in_cell.end - sources_in_cell.start;
let num_sources_in_kernel_cell =
(sources_in_cell.end - sources_in_cell.start) as usize;
if num_sources_in_kernel_cell > 0 {
let num_sources = (((num_sources_in_kernel_cell as f32) / num_sources_in_fov)
* MAX_SOURCES_PER_CATALOG) as usize;
@@ -454,7 +459,6 @@ impl Catalog {
);
}
#[allow(clippy::too_many_arguments)]
fn draw(
&self,
gl: &WebGlContext,
@@ -511,7 +515,7 @@ impl Catalog {
.draw_elements_instanced_with_i32(
WebGl2RenderingContext::TRIANGLES,
0,
self.num_instances,
self.num_instances as i32,
);
Ok(())
},

View File

@@ -10,8 +10,8 @@ use crate::math::lonlat::LonLat;
use crate::math::projection::coo_space::XYScreen;
use crate::math::TWICE_PI;
use crate::math::angle::AngleFormatter;
use crate::math::angle::ToAngle;
use crate::math::angle::AngleFormatter;
use al_api::angle::Formatter;
use cgmath::Vector2;
use core::ops::Range;
@@ -40,7 +40,7 @@ impl Label {
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: Formatter,
grid_decimal_prec: u8,
grid_decimal_prec: u8
) -> Option<Self> {
let fov = camera.get_field_of_view();
let d = if fov.contains_north_pole() {
@@ -79,16 +79,12 @@ impl Label {
let mut angle = lon.to_angle();
let fmt = match fmt {
Formatter::Decimal => AngleFormatter::Decimal {
prec: grid_decimal_prec,
Formatter::Decimal => {
AngleFormatter::Decimal { prec: grid_decimal_prec }
},
Formatter::Sexagesimal => {
// Sexagesimal formatting for longitudes is HMS
AngleFormatter::Sexagesimal {
prec: grid_decimal_prec,
plus: false,
hours: true,
}
AngleFormatter::Sexagesimal { prec: grid_decimal_prec, plus: false, hours: true }
}
};
angle.set_format(fmt);
@@ -117,7 +113,7 @@ impl Label {
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: Formatter,
grid_decimal_prec: u8,
grid_decimal_prec: u8
) -> Option<Self> {
let lonlat = match options {
LabelOptions::Centered => {
@@ -149,16 +145,12 @@ impl Label {
let mut angle = lat.to_angle();
let fmt = match fmt {
Formatter::Decimal => AngleFormatter::Decimal {
prec: grid_decimal_prec,
Formatter::Decimal => {
AngleFormatter::Decimal { prec: grid_decimal_prec }
},
Formatter::Sexagesimal => {
// Sexagesimal formatting for latitudes is DMS with an optional '+' character
AngleFormatter::Sexagesimal {
prec: grid_decimal_prec,
plus: true,
hours: false,
}
AngleFormatter::Sexagesimal { prec: grid_decimal_prec, plus: true, hours: false }
}
};
angle.set_format(fmt);

View File

@@ -16,7 +16,7 @@ pub fn get_intersecting_meridian(
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: Formatter,
grid_decimal_prec: u8,
grid_decimal_prec: u8
) -> Option<Meridian> {
let fov = camera.get_field_of_view();
if fov.contains_both_poles() {
@@ -27,7 +27,7 @@ pub fn get_intersecting_meridian(
camera,
projection,
fmt,
grid_decimal_prec,
grid_decimal_prec
);
Some(meridian)
} else {
@@ -42,7 +42,7 @@ pub fn get_intersecting_meridian(
camera,
projection,
fmt,
grid_decimal_prec,
grid_decimal_prec
);
Some(meridian)
}
@@ -60,15 +60,7 @@ pub fn get_intersecting_meridian(
lat1..MINUS_HALF_PI
};
Meridian::new(
lon,
&lat,
LabelOptions::OnSide,
camera,
projection,
fmt,
grid_decimal_prec,
)
Meridian::new(lon, &lat, LabelOptions::OnSide, camera, projection, fmt, grid_decimal_prec)
}
2 => {
// full intersection
@@ -85,7 +77,7 @@ pub fn get_intersecting_meridian(
camera,
projection,
fmt,
grid_decimal_prec,
grid_decimal_prec
)
}
_ => Meridian::new(
@@ -95,8 +87,8 @@ pub fn get_intersecting_meridian(
camera,
projection,
fmt,
grid_decimal_prec,
),
grid_decimal_prec
)
};
Some(meridian)
@@ -121,17 +113,9 @@ impl Meridian {
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: Formatter,
grid_decimal_prec: u8,
grid_decimal_prec: u8
) -> Self {
let label = Label::from_meridian(
lon,
lat,
label_options,
camera,
projection,
fmt,
grid_decimal_prec,
);
let label = Label::from_meridian(lon, lat, label_options, camera, projection, fmt, grid_decimal_prec);
// Draw the full parallel
let vertices = crate::renderable::line::great_circle_arc::project(

View File

@@ -36,12 +36,12 @@ pub struct ProjetedGrid {
vao: VertexArrayObject,
gl: WebGlContext,
}
use self::meridian::Meridian;
use crate::renderable::text::TextRenderManager;
use crate::renderable::Renderer;
use al_api::angle::Formatter;
use wasm_bindgen::JsValue;
use web_sys::HtmlElement;
use al_api::angle::Formatter;
use self::meridian::Meridian;
impl ProjetedGrid {
pub fn new(gl: WebGlContext, aladin_div: &HtmlElement) -> Result<ProjetedGrid, JsValue> {
let text_renderer = TextRenderManager::new(aladin_div)?;
@@ -181,7 +181,7 @@ impl ProjetedGrid {
{
let position = position.cast::<f32>().unwrap_abort();
self.text_renderer
.add_label(content, &position, cgmath::Rad(*rot as f32))?;
.add_label(&content, &position, cgmath::Rad(*rot as f32))?;
}
self.text_renderer.end();
}
@@ -198,15 +198,14 @@ impl ProjetedGrid {
if self.enabled {
let fov = camera.get_field_of_view();
let bbox = fov.get_bounding_box();
//let max_dim_px = camera.get_width().max(camera.get_height()) as f64;
//let step_line_px = max_dim_px * 0.15;
let aspect = camera.get_aspect() as f64;
let max_dim_px = camera.get_width().max(camera.get_height()) as f64;
let step_line_px = max_dim_px * 0.2;
// update meridians
self.meridians = {
// Select the good step with a binary search
let step_lon_precised = bbox.get_lon_size() * 0.15;
let step_lon_precised =
(bbox.get_lon_size() as f64) * step_line_px / (camera.get_width() as f64);
let step_lon = select_fixed_step(step_lon_precised);
let decimal_lon_prec = step_lon.to_degrees().log10().abs().ceil() as u8;
@@ -221,13 +220,9 @@ impl ProjetedGrid {
let mut meridians = vec![];
let mut lon = start_lon;
while lon < stop_lon {
if let Some(p) = meridian::get_intersecting_meridian(
lon,
camera,
projection,
self.fmt,
decimal_lon_prec,
) {
if let Some(p) =
meridian::get_intersecting_meridian(lon, camera, projection, self.fmt, decimal_lon_prec)
{
meridians.push(p);
}
lon += step_lon;
@@ -236,7 +231,8 @@ impl ProjetedGrid {
};
self.parallels = {
let step_lat_precised = aspect * bbox.get_lat_size() * 0.15;
let step_lat_precised =
(bbox.get_lat_size() as f64) * step_line_px / (camera.get_height() as f64);
let step_lat = select_fixed_step(step_lat_precised);
let decimal_lat_prec = step_lat.to_degrees().log10().abs().ceil() as u8;
@@ -250,13 +246,7 @@ impl ProjetedGrid {
let mut parallels = vec![];
while lat < stop_lat {
if let Some(p) = parallel::get_intersecting_parallel(
lat,
camera,
projection,
self.fmt,
decimal_lat_prec,
) {
if let Some(p) = parallel::get_intersecting_parallel(lat, camera, projection, self.fmt, decimal_lat_prec) {
parallels.push(p);
}
lat += step_lat;
@@ -279,10 +269,12 @@ impl ProjetedGrid {
let mut buf: Vec<f32> = vec![];
for vertices in paths {
let vertices = vertices.as_ref();
let path_vertices_buf_iter = vertices
.iter()
.zip(vertices.iter().skip(1))
.flat_map(|(a, b)| [a[0], a[1], b[0], b[1]]);
.map(|(a, b)| [a[0], a[1], b[0], b[1]])
.flatten();
buf.extend(path_vertices_buf_iter);
}
@@ -349,7 +341,7 @@ const GRID_STEPS: &[f64] = &[
0.08726647,
0.17453293,
0.34906585,
std::f64::consts::FRAC_PI_6,
std::f64::consts::FRAC_PI_4,
];
fn select_fixed_step(fov: f64) -> f64 {

View File

@@ -12,12 +12,13 @@ use crate::renderable::line;
use core::ops::Range;
pub fn get_intersecting_parallel(
lat: f64,
camera: &CameraViewPort,
projection: &ProjectionType,
fmt: Formatter,
grid_decimal_prec: u8,
grid_decimal_prec: u8
) -> Option<Parallel> {
let fov = camera.get_field_of_view();
if fov.get_bounding_box().get_lon_size() > PI {
@@ -32,7 +33,7 @@ pub fn get_intersecting_parallel(
LabelOptions::Centered,
projection,
fmt,
grid_decimal_prec,
grid_decimal_prec
))
} else {
// Longitude fov < PI
@@ -49,7 +50,7 @@ pub fn get_intersecting_parallel(
LabelOptions::Centered,
projection,
fmt,
grid_decimal_prec,
grid_decimal_prec
))
}
Intersection::Intersect { vertices } => {
@@ -73,7 +74,7 @@ pub fn get_intersecting_parallel(
LabelOptions::OnSide,
projection,
fmt,
grid_decimal_prec,
grid_decimal_prec
))
}
Intersection::Empty => None,
@@ -99,17 +100,9 @@ impl Parallel {
label_options: LabelOptions,
projection: &ProjectionType,
fmt: Formatter,
grid_decimal_prec: u8,
grid_decimal_prec: u8
) -> Self {
let label = Label::from_parallel(
lat,
lon,
label_options,
camera,
projection,
fmt,
grid_decimal_prec,
);
let label = Label::from_parallel(lat, lon, label_options, camera, projection, fmt, grid_decimal_prec);
// Draw the full parallel
let vertices = if lon.end - lon.start > PI {

View File

@@ -1,9 +1,6 @@
use al_api::hips::ImageExt;
use al_core::image::format::ImageFormatType;
use al_core::texture::format::PixelType;
use web_sys::{RequestCredentials, RequestMode};
use al_core::image::format::{ChannelType, ImageFormatType};
#[derive(Debug)]
pub struct HiPSConfig {
pub root_url: String,
@@ -11,28 +8,46 @@ pub struct HiPSConfig {
// TODO: Make that independant of the HiPS but of the ImageFormat
// The size of the texture images
pub texture_size: i32,
tile_size: i32,
// Delta depth i.e. log2(texture_size / tile_size)
delta_depth: u8,
min_depth_tile: u8,
min_depth_texture: u8,
// the number of slices for cubes
cube_depth: Option<u32>,
// Num tiles per texture
num_tiles_per_texture: usize,
// Max depth of the current HiPS tiles
max_depth_texture: u8,
max_depth_tile: u8,
pub is_allsky: bool,
// TODO: store this values in the ImageSurvey
// These are proper to the survey (FITS one) and not
// to a specific survey color
pub fits_metadata: bool,
pub scale: f32,
pub offset: f32,
pub blank: f32,
pub tex_storing_integers: bool,
pub tex_storing_fits: bool,
pub tex_storing_unsigned_int: bool,
pub size_tile_uv: f32,
pub frame: CooSystem,
pub bitpix: Option<i32>,
format: ImageFormatType,
//dataproduct_subtype: Option<Vec<String>>,
//colored: bool,
pub creator_did: String,
pub request_credentials: RequestCredentials,
pub request_mode: RequestMode,
}
use crate::math;
use crate::HiPSProperties;
use al_api::coo_system::CooSystem;
use wasm_bindgen::JsValue;
@@ -57,7 +72,12 @@ impl HiPSConfig {
// Determine the size of the texture to copy
// it cannot be > to 512x512px
let _fmt = properties.get_formats();
let bitpix = properties.get_bitpix();
let mut tex_storing_unsigned_int = false;
let mut tex_storing_integers = false;
let mut tex_storing_fits = false;
if !properties.get_formats().contains(&img_ext) {
return Err(js_sys::Error::new("HiPS format not available").into());
@@ -67,18 +87,45 @@ impl HiPSConfig {
ImageExt::Fits => {
// Check the bitpix to determine the internal format of the tiles
if let Some(bitpix) = bitpix {
let fmt = (match bitpix {
8 => Ok(PixelType::R8U),
16 => Ok(PixelType::R16I),
32 => Ok(PixelType::R32I),
-32 => Ok(PixelType::R32F),
-64 => Ok(PixelType::R32F),
let channel = (match bitpix {
#[cfg(feature = "webgl2")]
8 => {
tex_storing_fits = true;
tex_storing_unsigned_int = true;
Ok(ChannelType::R8UI)
}
#[cfg(feature = "webgl2")]
16 => {
tex_storing_fits = true;
tex_storing_integers = true;
Ok(ChannelType::R16I)
}
#[cfg(feature = "webgl2")]
32 => {
tex_storing_fits = true;
tex_storing_integers = true;
Ok(ChannelType::R32I)
}
-32 => {
tex_storing_fits = true;
tex_storing_integers = false;
Ok(ChannelType::R32F)
}
-64 => {
tex_storing_fits = true;
tex_storing_integers = false;
//Err(JsValue::from_str("f64 FITS files not supported"))
Ok(ChannelType::R64F)
}
_ => Err(JsValue::from_str(
"Fits tiles exists but the BITPIX is not correct in the property file",
)),
})?;
Ok(ImageFormatType { ext: img_ext, fmt })
Ok(ImageFormatType {
ext: img_ext,
channel,
})
} else {
Err(JsValue::from_str(
"Fits tiles exists but the BITPIX is not found",
@@ -87,53 +134,82 @@ impl HiPSConfig {
}
ImageExt::Png | ImageExt::Webp => Ok(ImageFormatType {
ext: img_ext,
fmt: PixelType::RGBA8U,
channel: ChannelType::RGBA8U,
}),
ImageExt::Jpeg => Ok(ImageFormatType {
ext: img_ext,
fmt: PixelType::RGB8U,
channel: ChannelType::RGB8U,
}),
}?;
/*let dataproduct_subtype = properties.get_dataproduct_subtype().clone();
let colored = if tex_storing_fits {
false
} else {
if let Some(subtypes) = &dataproduct_subtype {
subtypes.iter().any(|subtype| subtype == "color")
} else {
false
}
};*/
let texture_size = std::cmp::min(512, tile_size << max_depth_tile);
//let texture_size = tile_size;
let num_tile_per_side_texture = (texture_size / tile_size) as usize;
let delta_depth = math::utils::log_2_unchecked(num_tile_per_side_texture) as u8;
let num_tiles_per_texture = num_tile_per_side_texture * num_tile_per_side_texture;
let max_depth_texture = max_depth_tile - delta_depth;
let size_tile_uv = 1_f32 / ((1 << delta_depth) as f32);
let frame = properties.get_frame();
let sky_fraction = properties.get_sky_fraction().unwrap_or(1.0);
let is_allsky = sky_fraction >= 1.0;
let min_depth_tile = properties.get_min_order().unwrap_or(0);
let request_credentials = match properties.get_request_credentials() {
"include" => RequestCredentials::Include,
"same-origin" => RequestCredentials::SameOrigin,
"omit" => RequestCredentials::Omit,
_ => RequestCredentials::Omit,
let min_depth_texture = if min_depth_tile >= delta_depth {
min_depth_tile - delta_depth
} else {
0
};
let request_mode = match properties.get_request_mode() {
"cors" => RequestMode::Cors,
"no-cors" => RequestMode::NoCors,
"same-origin" => RequestMode::SameOrigin,
_ => RequestMode::Cors,
};
let hips_config = HiPSConfig {
creator_did,
// HiPS name
root_url: root_url.to_string(),
// Texture config
// The size of the texture images
texture_size,
// Delta depth i.e. log2(texture_size / tile_size)
delta_depth,
// Num tiles per texture
num_tiles_per_texture,
// Max depth of the current HiPS tiles
max_depth_texture,
max_depth_tile,
min_depth_texture,
min_depth_tile,
is_allsky,
fits_metadata: false,
scale: 1.0,
offset: 0.0,
blank: -1.0, // by default, set it to -1
tex_storing_fits,
tex_storing_integers,
tex_storing_unsigned_int,
// the number of slices in a cube
cube_depth,
size_tile_uv,
frame,
bitpix,
format,
tile_size,
request_credentials,
request_mode,
};
Ok(hips_config)
@@ -144,32 +220,66 @@ impl HiPSConfig {
ImageExt::Fits => {
// Check the bitpix to determine the internal format of the tiles
if let Some(bitpix) = self.bitpix {
let fmt = (match bitpix {
8 => Ok(PixelType::R8U),
16 => Ok(PixelType::R16I),
32 => Ok(PixelType::R32I),
-32 => Ok(PixelType::R32F),
-64 => Ok(PixelType::R32F),
let channel = (match bitpix {
#[cfg(feature = "webgl2")]
8 => {
self.tex_storing_fits = true;
self.tex_storing_unsigned_int = true;
Ok(ChannelType::R8UI)
}
#[cfg(feature = "webgl2")]
16 => {
self.tex_storing_fits = true;
self.tex_storing_integers = true;
Ok(ChannelType::R16I)
}
#[cfg(feature = "webgl2")]
32 => {
self.tex_storing_fits = true;
self.tex_storing_integers = true;
Ok(ChannelType::R32I)
}
-32 => {
self.tex_storing_fits = true;
self.tex_storing_integers = false;
Ok(ChannelType::R32F)
}
-64 => {
self.tex_storing_fits = true;
self.tex_storing_integers = false;
//Err(JsValue::from_str("f64 FITS files not supported"))
Ok(ChannelType::R64F)
}
_ => Err(JsValue::from_str(
"Fits tiles exists but the BITPIX is not correct in the property file",
)),
})?;
Ok(ImageFormatType { ext, fmt })
Ok(ImageFormatType { ext, channel })
} else {
Err(JsValue::from_str(
"Fits tiles exists but the BITPIX is not found",
))
}
}
ImageExt::Png | ImageExt::Webp => Ok(ImageFormatType {
ext,
fmt: PixelType::RGBA8U,
}),
ImageExt::Jpeg => Ok(ImageFormatType {
ext,
fmt: PixelType::RGB8U,
}),
ImageExt::Png | ImageExt::Webp => {
self.tex_storing_fits = false;
self.tex_storing_unsigned_int = false;
self.tex_storing_integers = false;
Ok(ImageFormatType {
ext,
channel: ChannelType::RGBA8U,
})
}
ImageExt::Jpeg => {
self.tex_storing_fits = false;
self.tex_storing_unsigned_int = false;
self.tex_storing_integers = false;
Ok(ImageFormatType {
ext,
channel: ChannelType::RGB8U,
})
}
}?;
self.format = format;
@@ -192,8 +302,26 @@ impl HiPSConfig {
}
#[inline(always)]
pub fn allsky_tile_size(&self) -> i32 {
(self.get_tile_size() << 3).min(512)
pub fn set_fits_metadata(&mut self, bscale: f32, bzero: f32, blank: f32) {
self.scale = bscale;
self.offset = bzero;
self.blank = blank;
self.fits_metadata = true;
}
#[inline(always)]
pub fn delta_depth(&self) -> u8 {
self.delta_depth
}
#[inline(always)]
pub fn num_tiles_per_texture(&self) -> usize {
self.num_tiles_per_texture
}
#[inline(always)]
pub fn get_texture_size(&self) -> i32 {
self.texture_size
}
#[inline(always)]
@@ -201,6 +329,11 @@ impl HiPSConfig {
self.min_depth_tile
}
#[inline(always)]
pub fn get_min_depth_texture(&self) -> u8 {
self.min_depth_texture
}
#[inline(always)]
pub fn get_creator_did(&self) -> &str {
&self.creator_did
@@ -211,6 +344,11 @@ impl HiPSConfig {
self.tile_size
}
#[inline(always)]
pub fn get_max_depth_texture(&self) -> u8 {
self.max_depth_texture
}
#[inline(always)]
pub fn get_max_depth_tile(&self) -> u8 {
self.max_depth_tile
@@ -230,16 +368,6 @@ impl HiPSConfig {
pub fn is_colored(&self) -> bool {
self.format.is_colored()
}
#[inline(always)]
pub fn get_request_credentials(&self) -> RequestCredentials {
self.request_credentials
}
#[inline(always)]
pub fn get_request_mode(&self) -> RequestMode {
self.request_mode
}
}
use al_core::shader::{SendUniforms, ShaderBound};
@@ -247,7 +375,13 @@ use al_core::shader::{SendUniforms, ShaderBound};
impl SendUniforms for HiPSConfig {
fn attach_uniforms<'a>(&self, shader: &'a ShaderBound<'a>) -> &'a ShaderBound<'a> {
// Send max depth
shader.attach_uniform("max_depth", &(self.max_depth_tile as i32));
shader
.attach_uniform("max_depth", &(self.max_depth_texture as i32))
.attach_uniform("size_tile_uv", &self.size_tile_uv)
.attach_uniform("tex_storing_fits", &self.tex_storing_fits)
.attach_uniform("scale", &self.scale)
.attach_uniform("offset", &self.offset)
.attach_uniform("blank", &self.blank);
shader
}

Some files were not shown because too many files have changed in this diff Show More