Skip to content

Commit

Permalink
feat(tile-converter): support attributes data from textures (#2511)
Browse files Browse the repository at this point in the history
Co-authored-by: Victor Belomestnov <[email protected]>
  • Loading branch information
mspivak-actionengine and belom88 authored Jun 26, 2023
1 parent 5784aa0 commit e176dfd
Show file tree
Hide file tree
Showing 15 changed files with 801 additions and 114 deletions.
11 changes: 6 additions & 5 deletions modules/gltf/src/lib/api/gltf-scenegraph.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,13 @@ export class GLTFScenegraph {
byteLength: number;

// TODO - why is this not GLTFWithBuffers - what happens to images?
constructor(gltf?: {json: GLTF; buffers?: any[]}) {
constructor(gltf?: {json: GLTF; buffers?: any[]; images?: any[]}) {
// Declare locally so

this.gltf = {
json: gltf?.json || makeDefaultGLTFJson(),
buffers: gltf?.buffers || []
buffers: gltf?.buffers || [],
images: gltf?.images || []
};
this.sourceBuffers = [];
this.byteLength = 0;
Expand Down Expand Up @@ -461,7 +462,7 @@ export class GLTFScenegraph {
* Add one untyped source buffer, create a matching glTF `bufferView`, and return its index
* @param buffer
*/
addBufferView(buffer: any): number {
addBufferView(buffer: any, bufferIndex = 0, byteOffset = this.byteLength): number {
const byteLength = buffer.byteLength;
assert(Number.isFinite(byteLength));

Expand All @@ -470,9 +471,9 @@ export class GLTFScenegraph {
this.sourceBuffers.push(buffer);

const glTFBufferView = {
buffer: 0,
buffer: bufferIndex,
// Write offset from the start of the binary body
byteOffset: this.byteLength,
byteOffset,
byteLength
};

Expand Down
276 changes: 258 additions & 18 deletions modules/gltf/src/lib/extensions/deprecated/EXT_feature_metadata.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
/* eslint-disable camelcase */
import type {GLTF} from '../../types/gltf-json-schema';

import {GLTFScenegraph} from '../../api/gltf-scenegraph';
import {getImageData} from '@loaders.gl/images';
import {
ClassProperty,
EXT_feature_metadata_class_object,
EXT_feature_metadata_feature_table,
FeatureTableProperty,
GLTF_EXT_feature_metadata
GLTF_EXT_feature_metadata,
EXT_feature_metadata_feature_texture,
FeatureTextureProperty,
GLTFMeshPrimitive
} from '../../types/gltf-json-schema';
import {getComponentTypeFromArray} from '../../gltf-utils/gltf-utils';

/** Extension name */
const EXT_FEATURE_METADATA = 'EXT_feature_metadata';
Expand All @@ -26,19 +30,11 @@ export async function decode(gltfData: {json: GLTF}): Promise<void> {
*/
function decodeExtFeatureMetadata(scenegraph: GLTFScenegraph): void {
const extension: GLTF_EXT_feature_metadata | null = scenegraph.getExtension(EXT_FEATURE_METADATA);
const schemaClasses = extension?.schema?.classes;
const featureTables = extension?.featureTables;
const featureTextures = extension?.featureTextures;

if (featureTextures) {
/*
* TODO add support for featureTextures
* Spec - https://github.com/CesiumGS/glTF/tree/3d-tiles-next/extensions/2.0/Vendor/EXT_feature_metadata#feature-textures
*/
// eslint-disable-next-line no-console
console.warn('featureTextures is not yet supported in the "EXT_feature_metadata" extension.');
}
if (!extension) return;

const schemaClasses = extension.schema?.classes;

const {featureTables} = extension;
if (schemaClasses && featureTables) {
for (const schemaName in schemaClasses) {
const schemaClass = schemaClasses[schemaName];
Expand All @@ -49,6 +45,18 @@ function decodeExtFeatureMetadata(scenegraph: GLTFScenegraph): void {
}
}
}

const {featureTextures} = extension;
if (schemaClasses && featureTextures) {
for (const schemaName in schemaClasses) {
const schemaClass = schemaClasses[schemaName];
const featureTexture = findFeatureTextureByName(featureTextures, schemaName);

if (featureTexture) {
handleFeatureTextureProperties(scenegraph, featureTexture, schemaClass);
}
}
}
}

/**
Expand Down Expand Up @@ -79,6 +87,30 @@ function handleFeatureTableProperties(
}
}

/**
* Navigate throw all properies in feature texture and gets properties data.
* Data will be stored in featureTexture.properties[propertyName].data
* @param scenegraph
* @param featureTexture
* @param schemaClass
*/
function handleFeatureTextureProperties(
scenegraph: GLTFScenegraph,
featureTexture: EXT_feature_metadata_feature_texture,
schemaClass: EXT_feature_metadata_class_object
): void {
const attributeName = featureTexture.class;

for (const propertyName in schemaClass.properties) {
const featureTextureProperty = featureTexture?.properties?.[propertyName];

if (featureTextureProperty) {
const data = getPropertyDataFromTexture(scenegraph, featureTextureProperty, attributeName);
featureTextureProperty.data = data;
}
}
}

/**
* Decode properties from binary sourse based on property type.
* @param scenegraph
Expand All @@ -94,20 +126,213 @@ function getPropertyDataFromBinarySource(
): Uint8Array | string[] {
const bufferView = featureTableProperty.bufferView;
// TODO think maybe we shouldn't get data only in Uint8Array format.
let data: Uint8Array | string[] = scenegraph.getTypedArrayForBufferView(bufferView);
const dataArray: Uint8Array = scenegraph.getTypedArrayForBufferView(bufferView);

switch (schemaProperty.type) {
case 'STRING': {
// stringOffsetBufferView should be available for string type.
const stringOffsetBufferView = featureTableProperty.stringOffsetBufferView!;
const offsetsData = scenegraph.getTypedArrayForBufferView(stringOffsetBufferView);
data = getStringAttributes(data, offsetsData, numberOfFeatures);
break;
return getStringAttributes(dataArray, offsetsData, numberOfFeatures);
}
default:
}

return data;
return dataArray;
}

/**
* Get properties from texture associated with all mesh primitives.
* @param scenegraph
* @param featureTextureProperty
* @param attributeName
* @returns Feature texture data
*/
function getPropertyDataFromTexture(
scenegraph: GLTFScenegraph,
featureTextureProperty: FeatureTextureProperty,
attributeName: string
): number[] {
const json = scenegraph.gltf.json;
if (!json.meshes) {
return [];
}
const featureTextureTable: number[] = [];
for (const mesh of json.meshes) {
for (const primitive of mesh.primitives) {
processPrimitiveTextures(
scenegraph,
attributeName,
featureTextureProperty,
featureTextureTable,
primitive
);
}
}
return featureTextureTable;
}

// eslint-disable-next-line max-statements
/**
* Processes data encoded in the texture associated with the primitive. This data will be accessible through the attributes.
* @param scenegraph
* @param attributeName
* @param featureTextureProperty
* @param featureTextureTable
* @param primitive
*/
function processPrimitiveTextures(
scenegraph: GLTFScenegraph,
attributeName: string,
featureTextureProperty: FeatureTextureProperty,
featureTextureTable: number[],
primitive: GLTFMeshPrimitive
): void {
/*
texture.index is an index for the "textures" array.
The texture object referenced by this index looks like this:
{
"sampler": 0,
"source": 0
}
"sampler" is an index for the "samplers" array
"source" is an index for the "images" array that contains data. These data are stored in rgba channels of the image.
texture.texCoord is a number-suffix (like 1) for an attribute like "TEXCOORD_1" in meshes.primitives
The value of "TEXCOORD_1" is an accessor that is used to get coordinates. These coordinates ared used to get data from the image.
*/
const json = scenegraph.gltf.json;
const textureData: number[] = [];
const texCoordAccessorKey = `TEXCOORD_${featureTextureProperty.texture.texCoord}`;
const texCoordAccessorIndex = primitive.attributes[texCoordAccessorKey];
const texCoordBufferView = scenegraph.getBufferView(texCoordAccessorIndex);
const texCoordArray: Uint8Array = scenegraph.getTypedArrayForBufferView(texCoordBufferView);

const textureCoordinates: Float32Array = new Float32Array(
texCoordArray.buffer,
texCoordArray.byteOffset,
texCoordArray.length / 4
);
// textureCoordinates contains UV coordinates of the actual data stored in the texture
// accessor.count is a number of UV pairs (they are stored as VEC2)

const textureIndex = featureTextureProperty.texture.index;
const texture = json.textures?.[textureIndex];
const imageIndex = texture?.source;
if (typeof imageIndex !== 'undefined') {
const image = json.images?.[imageIndex];
const mimeType = image?.mimeType;
const parsedImage = scenegraph.gltf.images?.[imageIndex];
if (parsedImage) {
for (let index = 0; index < textureCoordinates.length; index += 2) {
const value = getImageValueByCoordinates(
parsedImage,
mimeType,
textureCoordinates,
index,
featureTextureProperty.channels
);
textureData.push(value);
}
}
}
/*
featureTextureTable will contain unique values, e.g.
textureData = [24, 35, 28, 24]
featureTextureTable = [24, 35, 28]
featureIndices will contain indices hat refer featureTextureTable, e.g.
featureIndices = [0, 1, 2, 0]
*/
const featureIndices: number[] = [];
for (const texelData of textureData) {
let index = featureTextureTable.findIndex((item) => item === texelData);
if (index === -1) {
index = featureTextureTable.push(texelData) - 1;
}
featureIndices.push(index);
}
const typedArray = new Uint32Array(featureIndices);
const bufferIndex =
scenegraph.gltf.buffers.push({
arrayBuffer: typedArray.buffer,
byteOffset: 0,
byteLength: typedArray.byteLength
}) - 1;
const bufferViewIndex = scenegraph.addBufferView(typedArray, bufferIndex, 0);
const accessorIndex = scenegraph.addAccessor(bufferViewIndex, {
size: 1,
componentType: getComponentTypeFromArray(typedArray),
count: typedArray.length
});
primitive.attributes[attributeName] = accessorIndex;
}

function getImageValueByCoordinates(
parsedImage: any,
mimeType: string | undefined,
textureCoordinates: Float32Array,
index: number,
channels: string
) {
const CHANNELS_MAP = {
r: {offset: 0, shift: 0},
g: {offset: 1, shift: 8},
b: {offset: 2, shift: 16},
a: {offset: 3, shift: 24}
};

const u = textureCoordinates[index];
const v = textureCoordinates[index + 1];

let components = 1;
if (mimeType && (mimeType.indexOf('image/jpeg') !== -1 || mimeType.indexOf('image/png') !== -1))
components = 4;
const offset = coordinatesToOffset(u, v, parsedImage, components);
let value = 0;
for (const c of channels) {
const map = CHANNELS_MAP[c];
const val = getVal(parsedImage, offset + map.offset);
value |= val << map.shift;
}
return value;
}

function getVal(parsedImage: any, offset: number): number {
const imageData = getImageData(parsedImage);
if (imageData.data.length <= offset) {
throw new Error(`${imageData.data.length} <= ${offset}`);
}
return imageData.data[offset];
}

function coordinatesToOffset(
u: number,
v: number,
parsedImage: any,
componentsCount: number = 1
): number {
const w = parsedImage.width;
const iX = emod(u) * (w - 1);
const indX = Math.round(iX);

const h = parsedImage.height;
const iY = emod(v) * (h - 1);
const indY = Math.round(iY);
const components = parsedImage.components ? parsedImage.components : componentsCount;
// components is a number of channels in the image
const offset = (indY * w + indX) * components;
return offset;
}

// The following is taken from tile-converter\src\i3s-converter\helpers\batch-ids-extensions.ts
/**
* Handle UVs if they are out of range [0,1].
* @param n
* @param m
*/
function emod(n: number): number {
const a = ((n % 1) + 1) % 1;
return a;
}

/**
Expand All @@ -130,6 +355,21 @@ function findFeatureTableByName(
return null;
}

function findFeatureTextureByName(
featureTextures: {[key: string]: EXT_feature_metadata_feature_texture},
schemaClassName: string
): EXT_feature_metadata_feature_texture | null {
for (const featureTexturesName in featureTextures) {
const featureTable = featureTextures[featureTexturesName];

if (featureTable.class === schemaClassName) {
return featureTable;
}
}

return null;
}

/**
* Getting string attributes from binary data.
* Spec - https://github.com/CesiumGS/3d-tiles/tree/main/specification/Metadata#strings
Expand Down
13 changes: 4 additions & 9 deletions modules/gltf/src/lib/parsers/parse-gltf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,23 +47,18 @@ export async function parseGLTF(

preprocessExtensions(gltf, options, context);

const promises: Promise<any>[] = [];

// Load linked buffers asynchronously and decodes base64 buffers in parallel
if (options?.gltf?.loadBuffers && gltf.json.buffers) {
await loadBuffers(gltf, options, context);
}

// loadImages and decodeExtensions should not be running in parallel, because
// decodeExtensions uses data from images taken during the loadImages call.
if (options?.gltf?.loadImages) {
const promise = loadImages(gltf, options, context);
promises.push(promise);
await loadImages(gltf, options, context);
}

const promise = decodeExtensions(gltf, options, context);
promises.push(promise);

// Parallelize image loading and buffer loading/extension decoding
await Promise.all(promises);
await decodeExtensions(gltf, options, context);

return gltf;
}
Expand Down
Loading

0 comments on commit e176dfd

Please sign in to comment.