You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
911 lines
31 KiB
911 lines
31 KiB
/** @module geotiffimage */
|
|
import { getFloat16 } from '@petamoriken/float16';
|
|
import getAttribute from 'xml-utils/get-attribute.js';
|
|
import findTagsByName from 'xml-utils/find-tags-by-name.js';
|
|
|
|
import { photometricInterpretations, ExtraSamplesValues } from './globals.js';
|
|
import { fromWhiteIsZero, fromBlackIsZero, fromPalette, fromCMYK, fromYCbCr, fromCIELab } from './rgb.js';
|
|
import { getDecoder } from './compression/index.js';
|
|
import { resample, resampleInterleaved } from './resample.js';
|
|
|
|
/**
|
|
* @typedef {Object} ReadRasterOptions
|
|
* @property {Array<number>} [window=whole window] the subset to read data from in pixels.
|
|
* @property {Array<number>} [bbox=whole image] the subset to read data from in
|
|
* geographical coordinates.
|
|
* @property {Array<number>} [samples=all samples] the selection of samples to read from. Default is all samples.
|
|
* @property {boolean} [interleave=false] whether the data shall be read
|
|
* in one single array or separate
|
|
* arrays.
|
|
* @property {Pool} [pool=null] The optional decoder pool to use.
|
|
* @property {number} [width] The desired width of the output. When the width is not the
|
|
* same as the images, resampling will be performed.
|
|
* @property {number} [height] The desired height of the output. When the width is not the
|
|
* same as the images, resampling will be performed.
|
|
* @property {string} [resampleMethod='nearest'] The desired resampling method.
|
|
* @property {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
|
|
* to be aborted
|
|
* @property {number|number[]} [fillValue] The value to use for parts of the image
|
|
* outside of the images extent. When multiple
|
|
* samples are requested, an array of fill values
|
|
* can be passed.
|
|
*/
|
|
|
|
/** @typedef {import("./geotiff.js").TypedArray} TypedArray */
|
|
|
|
function sum(array, start, end) {
|
|
let s = 0;
|
|
for (let i = start; i < end; ++i) {
|
|
s += array[i];
|
|
}
|
|
return s;
|
|
}
|
|
|
|
function arrayForType(format, bitsPerSample, size) {
|
|
switch (format) {
|
|
case 1: // unsigned integer data
|
|
if (bitsPerSample <= 8) {
|
|
return new Uint8Array(size);
|
|
} else if (bitsPerSample <= 16) {
|
|
return new Uint16Array(size);
|
|
} else if (bitsPerSample <= 32) {
|
|
return new Uint32Array(size);
|
|
}
|
|
break;
|
|
case 2: // twos complement signed integer data
|
|
if (bitsPerSample === 8) {
|
|
return new Int8Array(size);
|
|
} else if (bitsPerSample === 16) {
|
|
return new Int16Array(size);
|
|
} else if (bitsPerSample === 32) {
|
|
return new Int32Array(size);
|
|
}
|
|
break;
|
|
case 3: // floating point data
|
|
switch (bitsPerSample) {
|
|
case 16:
|
|
case 32:
|
|
return new Float32Array(size);
|
|
case 64:
|
|
return new Float64Array(size);
|
|
default:
|
|
break;
|
|
}
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
throw Error('Unsupported data format/bitsPerSample');
|
|
}
|
|
|
|
function needsNormalization(format, bitsPerSample) {
|
|
if ((format === 1 || format === 2) && bitsPerSample <= 32 && bitsPerSample % 8 === 0) {
|
|
return false;
|
|
} else if (format === 3 && (bitsPerSample === 16 || bitsPerSample === 32 || bitsPerSample === 64)) {
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
function normalizeArray(inBuffer, format, planarConfiguration, samplesPerPixel, bitsPerSample, tileWidth, tileHeight) {
|
|
// const inByteArray = new Uint8Array(inBuffer);
|
|
const view = new DataView(inBuffer);
|
|
const outSize = planarConfiguration === 2
|
|
? tileHeight * tileWidth
|
|
: tileHeight * tileWidth * samplesPerPixel;
|
|
const samplesToTransfer = planarConfiguration === 2
|
|
? 1 : samplesPerPixel;
|
|
const outArray = arrayForType(format, bitsPerSample, outSize);
|
|
// let pixel = 0;
|
|
|
|
const bitMask = parseInt('1'.repeat(bitsPerSample), 2);
|
|
|
|
if (format === 1) { // unsigned integer
|
|
// translation of https://github.com/OSGeo/gdal/blob/master/gdal/frmts/gtiff/geotiff.cpp#L7337
|
|
let pixelBitSkip;
|
|
// let sampleBitOffset = 0;
|
|
if (planarConfiguration === 1) {
|
|
pixelBitSkip = samplesPerPixel * bitsPerSample;
|
|
// sampleBitOffset = (samplesPerPixel - 1) * bitsPerSample;
|
|
} else {
|
|
pixelBitSkip = bitsPerSample;
|
|
}
|
|
|
|
// Bits per line rounds up to next byte boundary.
|
|
let bitsPerLine = tileWidth * pixelBitSkip;
|
|
if ((bitsPerLine & 7) !== 0) {
|
|
bitsPerLine = (bitsPerLine + 7) & (~7);
|
|
}
|
|
|
|
for (let y = 0; y < tileHeight; ++y) {
|
|
const lineBitOffset = y * bitsPerLine;
|
|
for (let x = 0; x < tileWidth; ++x) {
|
|
const pixelBitOffset = lineBitOffset + (x * samplesToTransfer * bitsPerSample);
|
|
for (let i = 0; i < samplesToTransfer; ++i) {
|
|
const bitOffset = pixelBitOffset + (i * bitsPerSample);
|
|
const outIndex = (((y * tileWidth) + x) * samplesToTransfer) + i;
|
|
|
|
const byteOffset = Math.floor(bitOffset / 8);
|
|
const innerBitOffset = bitOffset % 8;
|
|
if (innerBitOffset + bitsPerSample <= 8) {
|
|
outArray[outIndex] = (view.getUint8(byteOffset) >> (8 - bitsPerSample) - innerBitOffset) & bitMask;
|
|
} else if (innerBitOffset + bitsPerSample <= 16) {
|
|
outArray[outIndex] = (view.getUint16(byteOffset) >> (16 - bitsPerSample) - innerBitOffset) & bitMask;
|
|
} else if (innerBitOffset + bitsPerSample <= 24) {
|
|
const raw = (view.getUint16(byteOffset) << 8) | (view.getUint8(byteOffset + 2));
|
|
outArray[outIndex] = (raw >> (24 - bitsPerSample) - innerBitOffset) & bitMask;
|
|
} else {
|
|
outArray[outIndex] = (view.getUint32(byteOffset) >> (32 - bitsPerSample) - innerBitOffset) & bitMask;
|
|
}
|
|
|
|
// let outWord = 0;
|
|
// for (let bit = 0; bit < bitsPerSample; ++bit) {
|
|
// if (inByteArray[bitOffset >> 3]
|
|
// & (0x80 >> (bitOffset & 7))) {
|
|
// outWord |= (1 << (bitsPerSample - 1 - bit));
|
|
// }
|
|
// ++bitOffset;
|
|
// }
|
|
|
|
// outArray[outIndex] = outWord;
|
|
// outArray[pixel] = outWord;
|
|
// pixel += 1;
|
|
}
|
|
// bitOffset = bitOffset + pixelBitSkip - bitsPerSample;
|
|
}
|
|
}
|
|
} else if (format === 3) { // floating point
|
|
// Float16 is handled elsewhere
|
|
// normalize 16/24 bit floats to 32 bit floats in the array
|
|
// console.time();
|
|
// if (bitsPerSample === 16) {
|
|
// for (let byte = 0, outIndex = 0; byte < inBuffer.byteLength; byte += 2, ++outIndex) {
|
|
// outArray[outIndex] = getFloat16(view, byte);
|
|
// }
|
|
// }
|
|
// console.timeEnd()
|
|
}
|
|
|
|
return outArray.buffer;
|
|
}
|
|
|
|
/**
|
|
* GeoTIFF sub-file image.
|
|
*/
|
|
class GeoTIFFImage {
|
|
/**
|
|
* @constructor
|
|
* @param {Object} fileDirectory The parsed file directory
|
|
* @param {Object} geoKeys The parsed geo-keys
|
|
* @param {DataView} dataView The DataView for the underlying file.
|
|
* @param {Boolean} littleEndian Whether the file is encoded in little or big endian
|
|
* @param {Boolean} cache Whether or not decoded tiles shall be cached
|
|
* @param {Source} source The datasource to read from
|
|
*/
|
|
constructor(fileDirectory, geoKeys, dataView, littleEndian, cache, source) {
|
|
this.fileDirectory = fileDirectory;
|
|
this.geoKeys = geoKeys;
|
|
this.dataView = dataView;
|
|
this.littleEndian = littleEndian;
|
|
this.tiles = cache ? {} : null;
|
|
this.isTiled = !fileDirectory.StripOffsets;
|
|
const planarConfiguration = fileDirectory.PlanarConfiguration;
|
|
this.planarConfiguration = (typeof planarConfiguration === 'undefined') ? 1 : planarConfiguration;
|
|
if (this.planarConfiguration !== 1 && this.planarConfiguration !== 2) {
|
|
throw new Error('Invalid planar configuration.');
|
|
}
|
|
|
|
this.source = source;
|
|
}
|
|
|
|
/**
|
|
* Returns the associated parsed file directory.
|
|
* @returns {Object} the parsed file directory
|
|
*/
|
|
getFileDirectory() {
|
|
return this.fileDirectory;
|
|
}
|
|
|
|
/**
|
|
* Returns the associated parsed geo keys.
|
|
* @returns {Object} the parsed geo keys
|
|
*/
|
|
getGeoKeys() {
|
|
return this.geoKeys;
|
|
}
|
|
|
|
/**
|
|
* Returns the width of the image.
|
|
* @returns {Number} the width of the image
|
|
*/
|
|
getWidth() {
|
|
return this.fileDirectory.ImageWidth;
|
|
}
|
|
|
|
/**
|
|
* Returns the height of the image.
|
|
* @returns {Number} the height of the image
|
|
*/
|
|
getHeight() {
|
|
return this.fileDirectory.ImageLength;
|
|
}
|
|
|
|
/**
|
|
* Returns the number of samples per pixel.
|
|
* @returns {Number} the number of samples per pixel
|
|
*/
|
|
getSamplesPerPixel() {
|
|
return typeof this.fileDirectory.SamplesPerPixel !== 'undefined'
|
|
? this.fileDirectory.SamplesPerPixel : 1;
|
|
}
|
|
|
|
/**
|
|
* Returns the width of each tile.
|
|
* @returns {Number} the width of each tile
|
|
*/
|
|
getTileWidth() {
|
|
return this.isTiled ? this.fileDirectory.TileWidth : this.getWidth();
|
|
}
|
|
|
|
/**
|
|
* Returns the height of each tile.
|
|
* @returns {Number} the height of each tile
|
|
*/
|
|
getTileHeight() {
|
|
if (this.isTiled) {
|
|
return this.fileDirectory.TileLength;
|
|
}
|
|
if (typeof this.fileDirectory.RowsPerStrip !== 'undefined') {
|
|
return Math.min(this.fileDirectory.RowsPerStrip, this.getHeight());
|
|
}
|
|
return this.getHeight();
|
|
}
|
|
|
|
getBlockWidth() {
|
|
return this.getTileWidth();
|
|
}
|
|
|
|
getBlockHeight(y) {
|
|
if (this.isTiled || (y + 1) * this.getTileHeight() <= this.getHeight()) {
|
|
return this.getTileHeight();
|
|
} else {
|
|
return this.getHeight() - (y * this.getTileHeight());
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Calculates the number of bytes for each pixel across all samples. Only full
|
|
* bytes are supported, an exception is thrown when this is not the case.
|
|
* @returns {Number} the bytes per pixel
|
|
*/
|
|
getBytesPerPixel() {
|
|
let bytes = 0;
|
|
for (let i = 0; i < this.fileDirectory.BitsPerSample.length; ++i) {
|
|
bytes += this.getSampleByteSize(i);
|
|
}
|
|
return bytes;
|
|
}
|
|
|
|
getSampleByteSize(i) {
|
|
if (i >= this.fileDirectory.BitsPerSample.length) {
|
|
throw new RangeError(`Sample index ${i} is out of range.`);
|
|
}
|
|
return Math.ceil(this.fileDirectory.BitsPerSample[i] / 8);
|
|
}
|
|
|
|
getReaderForSample(sampleIndex) {
|
|
const format = this.fileDirectory.SampleFormat
|
|
? this.fileDirectory.SampleFormat[sampleIndex] : 1;
|
|
const bitsPerSample = this.fileDirectory.BitsPerSample[sampleIndex];
|
|
switch (format) {
|
|
case 1: // unsigned integer data
|
|
if (bitsPerSample <= 8) {
|
|
return DataView.prototype.getUint8;
|
|
} else if (bitsPerSample <= 16) {
|
|
return DataView.prototype.getUint16;
|
|
} else if (bitsPerSample <= 32) {
|
|
return DataView.prototype.getUint32;
|
|
}
|
|
break;
|
|
case 2: // twos complement signed integer data
|
|
if (bitsPerSample <= 8) {
|
|
return DataView.prototype.getInt8;
|
|
} else if (bitsPerSample <= 16) {
|
|
return DataView.prototype.getInt16;
|
|
} else if (bitsPerSample <= 32) {
|
|
return DataView.prototype.getInt32;
|
|
}
|
|
break;
|
|
case 3:
|
|
switch (bitsPerSample) {
|
|
case 16:
|
|
return function (offset, littleEndian) {
|
|
return getFloat16(this, offset, littleEndian);
|
|
};
|
|
case 32:
|
|
return DataView.prototype.getFloat32;
|
|
case 64:
|
|
return DataView.prototype.getFloat64;
|
|
default:
|
|
break;
|
|
}
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
throw Error('Unsupported data format/bitsPerSample');
|
|
}
|
|
|
|
getSampleFormat(sampleIndex = 0) {
|
|
return this.fileDirectory.SampleFormat
|
|
? this.fileDirectory.SampleFormat[sampleIndex] : 1;
|
|
}
|
|
|
|
getBitsPerSample(sampleIndex = 0) {
|
|
return this.fileDirectory.BitsPerSample[sampleIndex];
|
|
}
|
|
|
|
getArrayForSample(sampleIndex, size) {
|
|
const format = this.getSampleFormat(sampleIndex);
|
|
const bitsPerSample = this.getBitsPerSample(sampleIndex);
|
|
return arrayForType(format, bitsPerSample, size);
|
|
}
|
|
|
|
/**
|
|
* Returns the decoded strip or tile.
|
|
* @param {Number} x the strip or tile x-offset
|
|
* @param {Number} y the tile y-offset (0 for stripped images)
|
|
* @param {Number} sample the sample to get for separated samples
|
|
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
|
|
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
|
|
* to be aborted
|
|
* @returns {Promise.<ArrayBuffer>}
|
|
*/
|
|
async getTileOrStrip(x, y, sample, poolOrDecoder, signal) {
|
|
const numTilesPerRow = Math.ceil(this.getWidth() / this.getTileWidth());
|
|
const numTilesPerCol = Math.ceil(this.getHeight() / this.getTileHeight());
|
|
let index;
|
|
const { tiles } = this;
|
|
if (this.planarConfiguration === 1) {
|
|
index = (y * numTilesPerRow) + x;
|
|
} else if (this.planarConfiguration === 2) {
|
|
index = (sample * numTilesPerRow * numTilesPerCol) + (y * numTilesPerRow) + x;
|
|
}
|
|
|
|
let offset;
|
|
let byteCount;
|
|
if (this.isTiled) {
|
|
offset = this.fileDirectory.TileOffsets[index];
|
|
byteCount = this.fileDirectory.TileByteCounts[index];
|
|
} else {
|
|
offset = this.fileDirectory.StripOffsets[index];
|
|
byteCount = this.fileDirectory.StripByteCounts[index];
|
|
}
|
|
const slice = (await this.source.fetch([{ offset, length: byteCount }], signal))[0];
|
|
|
|
let request;
|
|
if (tiles === null || !tiles[index]) {
|
|
// resolve each request by potentially applying array normalization
|
|
request = (async () => {
|
|
let data = await poolOrDecoder.decode(this.fileDirectory, slice);
|
|
const sampleFormat = this.getSampleFormat();
|
|
const bitsPerSample = this.getBitsPerSample();
|
|
if (needsNormalization(sampleFormat, bitsPerSample)) {
|
|
data = normalizeArray(
|
|
data,
|
|
sampleFormat,
|
|
this.planarConfiguration,
|
|
this.getSamplesPerPixel(),
|
|
bitsPerSample,
|
|
this.getTileWidth(),
|
|
this.getBlockHeight(y),
|
|
);
|
|
}
|
|
return data;
|
|
})();
|
|
|
|
// set the cache
|
|
if (tiles !== null) {
|
|
tiles[index] = request;
|
|
}
|
|
} else {
|
|
// get from the cache
|
|
request = tiles[index];
|
|
}
|
|
|
|
// cache the tile request
|
|
return { x, y, sample, data: await request };
|
|
}
|
|
|
|
/**
|
|
* Internal read function.
|
|
* @private
|
|
* @param {Array} imageWindow The image window in pixel coordinates
|
|
* @param {Array} samples The selected samples (0-based indices)
|
|
* @param {TypedArray[]|TypedArray} valueArrays The array(s) to write into
|
|
* @param {Boolean} interleave Whether or not to write in an interleaved manner
|
|
* @param {import("./geotiff").Pool|AbstractDecoder} poolOrDecoder the decoder or decoder pool
|
|
* @param {number} width the width of window to be read into
|
|
* @param {number} height the height of window to be read into
|
|
* @param {number} resampleMethod the resampling method to be used when interpolating
|
|
* @param {AbortSignal} [signal] An AbortSignal that may be signalled if the request is
|
|
* to be aborted
|
|
* @returns {Promise<TypedArray[]>|Promise<TypedArray>}
|
|
*/
|
|
async _readRaster(imageWindow, samples, valueArrays, interleave, poolOrDecoder, width,
|
|
height, resampleMethod, signal) {
|
|
const tileWidth = this.getTileWidth();
|
|
const tileHeight = this.getTileHeight();
|
|
const imageWidth = this.getWidth();
|
|
const imageHeight = this.getHeight();
|
|
|
|
const minXTile = Math.max(Math.floor(imageWindow[0] / tileWidth), 0);
|
|
const maxXTile = Math.min(
|
|
Math.ceil(imageWindow[2] / tileWidth),
|
|
Math.ceil(imageWidth / tileWidth),
|
|
);
|
|
const minYTile = Math.max(Math.floor(imageWindow[1] / tileHeight), 0);
|
|
const maxYTile = Math.min(
|
|
Math.ceil(imageWindow[3] / tileHeight),
|
|
Math.ceil(imageHeight / tileHeight),
|
|
);
|
|
const windowWidth = imageWindow[2] - imageWindow[0];
|
|
|
|
let bytesPerPixel = this.getBytesPerPixel();
|
|
|
|
const srcSampleOffsets = [];
|
|
const sampleReaders = [];
|
|
for (let i = 0; i < samples.length; ++i) {
|
|
if (this.planarConfiguration === 1) {
|
|
srcSampleOffsets.push(sum(this.fileDirectory.BitsPerSample, 0, samples[i]) / 8);
|
|
} else {
|
|
srcSampleOffsets.push(0);
|
|
}
|
|
sampleReaders.push(this.getReaderForSample(samples[i]));
|
|
}
|
|
|
|
const promises = [];
|
|
const { littleEndian } = this;
|
|
|
|
for (let yTile = minYTile; yTile < maxYTile; ++yTile) {
|
|
for (let xTile = minXTile; xTile < maxXTile; ++xTile) {
|
|
for (let sampleIndex = 0; sampleIndex < samples.length; ++sampleIndex) {
|
|
const si = sampleIndex;
|
|
const sample = samples[sampleIndex];
|
|
if (this.planarConfiguration === 2) {
|
|
bytesPerPixel = this.getSampleByteSize(sampleIndex);
|
|
}
|
|
const promise = this.getTileOrStrip(xTile, yTile, sample, poolOrDecoder, signal);
|
|
promises.push(promise);
|
|
promise.then((tile) => {
|
|
const buffer = tile.data;
|
|
const dataView = new DataView(buffer);
|
|
const blockHeight = this.getBlockHeight(tile.y);
|
|
const firstLine = tile.y * tileHeight;
|
|
const firstCol = tile.x * tileWidth;
|
|
const lastLine = firstLine + blockHeight;
|
|
const lastCol = (tile.x + 1) * tileWidth;
|
|
const reader = sampleReaders[si];
|
|
|
|
const ymax = Math.min(blockHeight, blockHeight - (lastLine - imageWindow[3]), imageHeight - firstLine);
|
|
const xmax = Math.min(tileWidth, tileWidth - (lastCol - imageWindow[2]), imageWidth - firstCol);
|
|
|
|
for (let y = Math.max(0, imageWindow[1] - firstLine); y < ymax; ++y) {
|
|
for (let x = Math.max(0, imageWindow[0] - firstCol); x < xmax; ++x) {
|
|
const pixelOffset = ((y * tileWidth) + x) * bytesPerPixel;
|
|
const value = reader.call(
|
|
dataView, pixelOffset + srcSampleOffsets[si], littleEndian,
|
|
);
|
|
let windowCoordinate;
|
|
if (interleave) {
|
|
windowCoordinate = ((y + firstLine - imageWindow[1]) * windowWidth * samples.length)
|
|
+ ((x + firstCol - imageWindow[0]) * samples.length)
|
|
+ si;
|
|
valueArrays[windowCoordinate] = value;
|
|
} else {
|
|
windowCoordinate = (
|
|
(y + firstLine - imageWindow[1]) * windowWidth
|
|
) + x + firstCol - imageWindow[0];
|
|
valueArrays[si][windowCoordinate] = value;
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
}
|
|
}
|
|
await Promise.all(promises);
|
|
|
|
if ((width && (imageWindow[2] - imageWindow[0]) !== width)
|
|
|| (height && (imageWindow[3] - imageWindow[1]) !== height)) {
|
|
let resampled;
|
|
if (interleave) {
|
|
resampled = resampleInterleaved(
|
|
valueArrays,
|
|
imageWindow[2] - imageWindow[0],
|
|
imageWindow[3] - imageWindow[1],
|
|
width, height,
|
|
samples.length,
|
|
resampleMethod,
|
|
);
|
|
} else {
|
|
resampled = resample(
|
|
valueArrays,
|
|
imageWindow[2] - imageWindow[0],
|
|
imageWindow[3] - imageWindow[1],
|
|
width, height,
|
|
resampleMethod,
|
|
);
|
|
}
|
|
resampled.width = width;
|
|
resampled.height = height;
|
|
return resampled;
|
|
}
|
|
|
|
valueArrays.width = width || imageWindow[2] - imageWindow[0];
|
|
valueArrays.height = height || imageWindow[3] - imageWindow[1];
|
|
|
|
return valueArrays;
|
|
}
|
|
|
|
/**
|
|
* Reads raster data from the image. This function reads all selected samples
|
|
* into separate arrays of the correct type for that sample or into a single
|
|
* combined array when `interleave` is set. When provided, only a subset
|
|
* of the raster is read for each sample.
|
|
*
|
|
* @param {ReadRasterOptions} [options={}] optional parameters
|
|
* @returns {Promise.<(TypedArray|TypedArray[])>} the decoded arrays as a promise
|
|
*/
|
|
async readRasters({
|
|
window: wnd, samples = [], interleave, pool = null,
|
|
width, height, resampleMethod, fillValue, signal,
|
|
} = {}) {
|
|
const imageWindow = wnd || [0, 0, this.getWidth(), this.getHeight()];
|
|
|
|
// check parameters
|
|
if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {
|
|
throw new Error('Invalid subsets');
|
|
}
|
|
|
|
const imageWindowWidth = imageWindow[2] - imageWindow[0];
|
|
const imageWindowHeight = imageWindow[3] - imageWindow[1];
|
|
const numPixels = imageWindowWidth * imageWindowHeight;
|
|
const samplesPerPixel = this.getSamplesPerPixel();
|
|
|
|
if (!samples || !samples.length) {
|
|
for (let i = 0; i < samplesPerPixel; ++i) {
|
|
samples.push(i);
|
|
}
|
|
} else {
|
|
for (let i = 0; i < samples.length; ++i) {
|
|
if (samples[i] >= samplesPerPixel) {
|
|
return Promise.reject(new RangeError(`Invalid sample index '${samples[i]}'.`));
|
|
}
|
|
}
|
|
}
|
|
let valueArrays;
|
|
if (interleave) {
|
|
const format = this.fileDirectory.SampleFormat
|
|
? Math.max.apply(null, this.fileDirectory.SampleFormat) : 1;
|
|
const bitsPerSample = Math.max.apply(null, this.fileDirectory.BitsPerSample);
|
|
valueArrays = arrayForType(format, bitsPerSample, numPixels * samples.length);
|
|
if (fillValue) {
|
|
valueArrays.fill(fillValue);
|
|
}
|
|
} else {
|
|
valueArrays = [];
|
|
for (let i = 0; i < samples.length; ++i) {
|
|
const valueArray = this.getArrayForSample(samples[i], numPixels);
|
|
if (Array.isArray(fillValue) && i < fillValue.length) {
|
|
valueArray.fill(fillValue[i]);
|
|
} else if (fillValue && !Array.isArray(fillValue)) {
|
|
valueArray.fill(fillValue);
|
|
}
|
|
valueArrays.push(valueArray);
|
|
}
|
|
}
|
|
|
|
const poolOrDecoder = pool || await getDecoder(this.fileDirectory);
|
|
|
|
const result = await this._readRaster(
|
|
imageWindow, samples, valueArrays, interleave, poolOrDecoder, width, height, resampleMethod, signal,
|
|
);
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Reads raster data from the image as RGB. The result is always an
|
|
* interleaved typed array.
|
|
* Colorspaces other than RGB will be transformed to RGB, color maps expanded.
|
|
* When no other method is applicable, the first sample is used to produce a
|
|
* greayscale image.
|
|
* When provided, only a subset of the raster is read for each sample.
|
|
*
|
|
* @param {Object} [options] optional parameters
|
|
* @param {Array<number>} [options.window] the subset to read data from in pixels.
|
|
* @param {boolean} [options.interleave=true] whether the data shall be read
|
|
* in one single array or separate
|
|
* arrays.
|
|
* @param {import("./geotiff").Pool} [options.pool=null] The optional decoder pool to use.
|
|
* @param {number} [options.width] The desired width of the output. When the width is no the
|
|
* same as the images, resampling will be performed.
|
|
* @param {number} [options.height] The desired height of the output. When the width is no the
|
|
* same as the images, resampling will be performed.
|
|
* @param {string} [options.resampleMethod='nearest'] The desired resampling method.
|
|
* @param {boolean} [options.enableAlpha=false] Enable reading alpha channel if present.
|
|
* @param {AbortSignal} [options.signal] An AbortSignal that may be signalled if the request is
|
|
* to be aborted
|
|
* @returns {Promise<TypedArray|TypedArray[]>} the RGB array as a Promise
|
|
*/
|
|
async readRGB({ window, interleave = true, pool = null, width, height,
|
|
resampleMethod, enableAlpha = false, signal } = {}) {
|
|
const imageWindow = window || [0, 0, this.getWidth(), this.getHeight()];
|
|
|
|
// check parameters
|
|
if (imageWindow[0] > imageWindow[2] || imageWindow[1] > imageWindow[3]) {
|
|
throw new Error('Invalid subsets');
|
|
}
|
|
|
|
const pi = this.fileDirectory.PhotometricInterpretation;
|
|
|
|
if (pi === photometricInterpretations.RGB) {
|
|
let s = [0, 1, 2];
|
|
if ((!(this.fileDirectory.ExtraSamples === ExtraSamplesValues.Unspecified)) && enableAlpha) {
|
|
s = [];
|
|
for (let i = 0; i < this.fileDirectory.BitsPerSample.length; i += 1) {
|
|
s.push(i);
|
|
}
|
|
}
|
|
return this.readRasters({
|
|
window,
|
|
interleave,
|
|
samples: s,
|
|
pool,
|
|
width,
|
|
height,
|
|
resampleMethod,
|
|
signal,
|
|
});
|
|
}
|
|
|
|
let samples;
|
|
switch (pi) {
|
|
case photometricInterpretations.WhiteIsZero:
|
|
case photometricInterpretations.BlackIsZero:
|
|
case photometricInterpretations.Palette:
|
|
samples = [0];
|
|
break;
|
|
case photometricInterpretations.CMYK:
|
|
samples = [0, 1, 2, 3];
|
|
break;
|
|
case photometricInterpretations.YCbCr:
|
|
case photometricInterpretations.CIELab:
|
|
samples = [0, 1, 2];
|
|
break;
|
|
default:
|
|
throw new Error('Invalid or unsupported photometric interpretation.');
|
|
}
|
|
|
|
const subOptions = {
|
|
window: imageWindow,
|
|
interleave: true,
|
|
samples,
|
|
pool,
|
|
width,
|
|
height,
|
|
resampleMethod,
|
|
signal,
|
|
};
|
|
const { fileDirectory } = this;
|
|
const raster = await this.readRasters(subOptions);
|
|
|
|
const max = 2 ** this.fileDirectory.BitsPerSample[0];
|
|
let data;
|
|
switch (pi) {
|
|
case photometricInterpretations.WhiteIsZero:
|
|
data = fromWhiteIsZero(raster, max);
|
|
break;
|
|
case photometricInterpretations.BlackIsZero:
|
|
data = fromBlackIsZero(raster, max);
|
|
break;
|
|
case photometricInterpretations.Palette:
|
|
data = fromPalette(raster, fileDirectory.ColorMap);
|
|
break;
|
|
case photometricInterpretations.CMYK:
|
|
data = fromCMYK(raster);
|
|
break;
|
|
case photometricInterpretations.YCbCr:
|
|
data = fromYCbCr(raster);
|
|
break;
|
|
case photometricInterpretations.CIELab:
|
|
data = fromCIELab(raster);
|
|
break;
|
|
default:
|
|
throw new Error('Unsupported photometric interpretation.');
|
|
}
|
|
|
|
// if non-interleaved data is requested, we must split the channels
|
|
// into their respective arrays
|
|
if (!interleave) {
|
|
const red = new Uint8Array(data.length / 3);
|
|
const green = new Uint8Array(data.length / 3);
|
|
const blue = new Uint8Array(data.length / 3);
|
|
for (let i = 0, j = 0; i < data.length; i += 3, ++j) {
|
|
red[j] = data[i];
|
|
green[j] = data[i + 1];
|
|
blue[j] = data[i + 2];
|
|
}
|
|
data = [red, green, blue];
|
|
}
|
|
|
|
data.width = raster.width;
|
|
data.height = raster.height;
|
|
return data;
|
|
}
|
|
|
|
/**
|
|
* Returns an array of tiepoints.
|
|
* @returns {Object[]}
|
|
*/
|
|
getTiePoints() {
|
|
if (!this.fileDirectory.ModelTiepoint) {
|
|
return [];
|
|
}
|
|
|
|
const tiePoints = [];
|
|
for (let i = 0; i < this.fileDirectory.ModelTiepoint.length; i += 6) {
|
|
tiePoints.push({
|
|
i: this.fileDirectory.ModelTiepoint[i],
|
|
j: this.fileDirectory.ModelTiepoint[i + 1],
|
|
k: this.fileDirectory.ModelTiepoint[i + 2],
|
|
x: this.fileDirectory.ModelTiepoint[i + 3],
|
|
y: this.fileDirectory.ModelTiepoint[i + 4],
|
|
z: this.fileDirectory.ModelTiepoint[i + 5],
|
|
});
|
|
}
|
|
return tiePoints;
|
|
}
|
|
|
|
/**
|
|
* Returns the parsed GDAL metadata items.
|
|
*
|
|
* If sample is passed to null, dataset-level metadata will be returned.
|
|
* Otherwise only metadata specific to the provided sample will be returned.
|
|
*
|
|
* @param {number} [sample=null] The sample index.
|
|
* @returns {Object}
|
|
*/
|
|
getGDALMetadata(sample = null) {
|
|
const metadata = {};
|
|
if (!this.fileDirectory.GDAL_METADATA) {
|
|
return null;
|
|
}
|
|
const string = this.fileDirectory.GDAL_METADATA;
|
|
|
|
let items = findTagsByName(string, 'Item');
|
|
|
|
if (sample === null) {
|
|
items = items.filter((item) => getAttribute(item, 'sample') === undefined);
|
|
} else {
|
|
items = items.filter((item) => Number(getAttribute(item, 'sample')) === sample);
|
|
}
|
|
|
|
for (let i = 0; i < items.length; ++i) {
|
|
const item = items[i];
|
|
metadata[getAttribute(item, 'name')] = item.inner;
|
|
}
|
|
return metadata;
|
|
}
|
|
|
|
/**
|
|
* Returns the GDAL nodata value
|
|
* @returns {number|null}
|
|
*/
|
|
getGDALNoData() {
|
|
if (!this.fileDirectory.GDAL_NODATA) {
|
|
return null;
|
|
}
|
|
const string = this.fileDirectory.GDAL_NODATA;
|
|
return Number(string.substring(0, string.length - 1));
|
|
}
|
|
|
|
/**
|
|
* Returns the image origin as a XYZ-vector. When the image has no affine
|
|
* transformation, then an exception is thrown.
|
|
* @returns {Array<number>} The origin as a vector
|
|
*/
|
|
getOrigin() {
|
|
const tiePoints = this.fileDirectory.ModelTiepoint;
|
|
const modelTransformation = this.fileDirectory.ModelTransformation;
|
|
if (tiePoints && tiePoints.length === 6) {
|
|
return [
|
|
tiePoints[3],
|
|
tiePoints[4],
|
|
tiePoints[5],
|
|
];
|
|
}
|
|
if (modelTransformation) {
|
|
return [
|
|
modelTransformation[3],
|
|
modelTransformation[7],
|
|
modelTransformation[11],
|
|
];
|
|
}
|
|
throw new Error('The image does not have an affine transformation.');
|
|
}
|
|
|
|
/**
|
|
* Returns the image resolution as a XYZ-vector. When the image has no affine
|
|
* transformation, then an exception is thrown.
|
|
* @param {GeoTIFFImage} [referenceImage=null] A reference image to calculate the resolution from
|
|
* in cases when the current image does not have the
|
|
* required tags on its own.
|
|
* @returns {Array<number>} The resolution as a vector
|
|
*/
|
|
getResolution(referenceImage = null) {
|
|
const modelPixelScale = this.fileDirectory.ModelPixelScale;
|
|
const modelTransformation = this.fileDirectory.ModelTransformation;
|
|
|
|
if (modelPixelScale) {
|
|
return [
|
|
modelPixelScale[0],
|
|
-modelPixelScale[1],
|
|
modelPixelScale[2],
|
|
];
|
|
}
|
|
if (modelTransformation) {
|
|
return [
|
|
modelTransformation[0],
|
|
modelTransformation[5],
|
|
modelTransformation[10],
|
|
];
|
|
}
|
|
|
|
if (referenceImage) {
|
|
const [refResX, refResY, refResZ] = referenceImage.getResolution();
|
|
return [
|
|
refResX * referenceImage.getWidth() / this.getWidth(),
|
|
refResY * referenceImage.getHeight() / this.getHeight(),
|
|
refResZ * referenceImage.getWidth() / this.getWidth(),
|
|
];
|
|
}
|
|
|
|
throw new Error('The image does not have an affine transformation.');
|
|
}
|
|
|
|
/**
|
|
* Returns whether or not the pixels of the image depict an area (or point).
|
|
* @returns {Boolean} Whether the pixels are a point
|
|
*/
|
|
pixelIsArea() {
|
|
return this.geoKeys.GTRasterTypeGeoKey === 1;
|
|
}
|
|
|
|
/**
|
|
* Returns the image bounding box as an array of 4 values: min-x, min-y,
|
|
* max-x and max-y. When the image has no affine transformation, then an
|
|
* exception is thrown.
|
|
* @returns {Array<number>} The bounding box
|
|
*/
|
|
getBoundingBox() {
|
|
const origin = this.getOrigin();
|
|
const resolution = this.getResolution();
|
|
|
|
const x1 = origin[0];
|
|
const y1 = origin[1];
|
|
|
|
const x2 = x1 + (resolution[0] * this.getWidth());
|
|
const y2 = y1 + (resolution[1] * this.getHeight());
|
|
|
|
return [
|
|
Math.min(x1, x2),
|
|
Math.min(y1, y2),
|
|
Math.max(x1, x2),
|
|
Math.max(y1, y2),
|
|
];
|
|
}
|
|
}
|
|
|
|
export default GeoTIFFImage;
|