mirror of
https://github.com/galacean/engine.git
synced 2026-05-06 22:23:05 +08:00
chore: remove unused code
This commit is contained in:
@@ -1,34 +0,0 @@
|
||||
## Installation
|
||||
|
||||
To install, use:
|
||||
|
||||
```sh
|
||||
npm install @galacean/engine-physics-physx
|
||||
```
|
||||
|
||||
This will allow you to import engine entirely using:
|
||||
|
||||
```javascript
|
||||
import * as PHYSICS_PHYSX from "@galacean/engine-physics-physx";
|
||||
```
|
||||
|
||||
or individual classes using:
|
||||
|
||||
```javascript
|
||||
import { PhysXPhysics } from "@galacean/engine-physics-physx";
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
// Create engine by passing in the HTMLCanvasElement id and adjust canvas size
|
||||
const engine = await WebGLEngine.create({ canvas: "canvas-id" });
|
||||
|
||||
// Initialize physics manager with PhysXPhysics.
|
||||
engine.physicsManager.initialize(PhysXPhysics);
|
||||
|
||||
......
|
||||
|
||||
// Run engine.
|
||||
engine.run();
|
||||
```
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"name": "@galacean/engine-physics-physx",
|
||||
"version": "0.0.0-experimental-backup.0",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://registry.npmjs.org"
|
||||
},
|
||||
"repository": {
|
||||
"url": "https://github.com/galacean/engine.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "dist/main.js",
|
||||
"module": "dist/module.js",
|
||||
"debug": "src/index.ts",
|
||||
"browser": "dist/browser.js",
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"b:types": "tsc"
|
||||
},
|
||||
"umd": {
|
||||
"name": "Galacean.PhysicsPhysX",
|
||||
"globals": {
|
||||
"@galacean/engine": "Galacean"
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist/**/*",
|
||||
"libs/**/*",
|
||||
"types/**/*"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@galacean/engine-design": "workspace:*",
|
||||
"@galacean/engine": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@galacean/engine": "workspace:*"
|
||||
}
|
||||
}
|
||||
@@ -1,192 +0,0 @@
|
||||
import { ICharacterController } from "@galacean/engine-design";
|
||||
import { Vector3 } from "@galacean/engine";
|
||||
import { PhysXPhysics } from "./PhysXPhysics";
|
||||
import { PhysXPhysicsScene } from "./PhysXPhysicsScene";
|
||||
import { PhysXBoxColliderShape } from "./shape/PhysXBoxColliderShape";
|
||||
import { ColliderShapeUpAxis, PhysXCapsuleColliderShape } from "./shape/PhysXCapsuleColliderShape";
|
||||
import { PhysXColliderShape } from "./shape/PhysXColliderShape";
|
||||
|
||||
/**
|
||||
* Base class for character controllers.
|
||||
*/
|
||||
export class PhysXCharacterController implements ICharacterController {
|
||||
private static _tempVec = new Vector3();
|
||||
|
||||
/** @internal */
|
||||
_scene: PhysXPhysicsScene = null;
|
||||
/** @internal */
|
||||
_id: number;
|
||||
/** @internal */
|
||||
_pxController: any;
|
||||
/** @internal */
|
||||
_pxManager: PhysXPhysicsScene;
|
||||
/** @internal */
|
||||
_shape: PhysXColliderShape;
|
||||
private _shapeScaledPosition = new Vector3();
|
||||
private _worldPosition: Vector3 = null;
|
||||
|
||||
private _physXPhysics: PhysXPhysics;
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics) {
|
||||
this._physXPhysics = physXPhysics;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.move }
|
||||
*/
|
||||
move(disp: Vector3, minDist: number, elapsedTime: number): number {
|
||||
return this._pxController?.move(disp, minDist, elapsedTime) ?? 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.setWorldPosition }
|
||||
*/
|
||||
setWorldPosition(position: Vector3): void {
|
||||
this._worldPosition = position;
|
||||
this._updateNativePosition();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.getWorldPosition }
|
||||
*/
|
||||
getWorldPosition(position: Vector3): void {
|
||||
if (this._pxController) {
|
||||
position.copyFrom(this._pxController.getPosition());
|
||||
position.subtract(this._shapeScaledPosition);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.setStepOffset }
|
||||
*/
|
||||
setStepOffset(offset: number): void {
|
||||
this._pxController?.setStepOffset(offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.setNonWalkableMode }
|
||||
*/
|
||||
setNonWalkableMode(flag: number): void {
|
||||
this._pxController?.setNonWalkableMode(flag);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.setUpDirection }
|
||||
*/
|
||||
setUpDirection(up: Vector3): void {
|
||||
this._pxController?.setUpDirection(up);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.setSlopeLimit }
|
||||
*/
|
||||
setSlopeLimit(slopeLimit: number): void {
|
||||
this._pxController?.setSlopeLimit(Math.cos((slopeLimit * Math.PI) / 180));
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.addShape }
|
||||
*/
|
||||
addShape(shape: PhysXColliderShape): void {
|
||||
// Add shape should sync latest position and world scale to pxController
|
||||
this._updateShapePosition(shape._position, shape._worldScale);
|
||||
// When CharacterController is disabled, set shape property need check pxController whether exist because of this._pxManager is null and won't create pxController
|
||||
this._pxManager && this._createPXController(this._pxManager, shape);
|
||||
this._shape = shape;
|
||||
shape._controllers.add(this);
|
||||
this._pxController?.setContactOffset(shape._contractOffset);
|
||||
this._scene?._addColliderShape(shape._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.removeShape }
|
||||
*/
|
||||
removeShape(shape: PhysXColliderShape): void {
|
||||
this._destroyPXController();
|
||||
this._shape = null;
|
||||
shape._controllers.delete(this);
|
||||
this._scene?._removeColliderShape(shape._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.setCollisionLayer }
|
||||
*/
|
||||
setCollisionLayer(layer: number): void {
|
||||
const actor = this._pxController?.getActor();
|
||||
|
||||
if (actor) {
|
||||
this._physXPhysics._physX.setGroup(actor, layer);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICharacterController.destroy }
|
||||
*/
|
||||
destroy(): void {
|
||||
this._destroyPXController();
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_createPXController(pxManager: PhysXPhysicsScene, shape: PhysXColliderShape): void {
|
||||
let desc: any;
|
||||
if (shape instanceof PhysXBoxColliderShape) {
|
||||
desc = new this._physXPhysics._physX.PxBoxControllerDesc();
|
||||
desc.halfHeight = shape._halfSize.y;
|
||||
desc.halfSideExtent = shape._halfSize.x;
|
||||
desc.halfForwardExtent = shape._halfSize.z;
|
||||
if (shape._rotation.lengthSquared() > 0) {
|
||||
console.warn("Box character controller `rotation` is not supported in PhysX and will be ignored");
|
||||
}
|
||||
} else if (shape instanceof PhysXCapsuleColliderShape) {
|
||||
desc = new this._physXPhysics._physX.PxCapsuleControllerDesc();
|
||||
desc.radius = shape._radius;
|
||||
desc.height = shape._halfHeight * 2;
|
||||
desc.climbingMode = 1; // constraint mode
|
||||
|
||||
if (shape._rotation.lengthSquared() > 0) {
|
||||
console.warn("Capsule character controller `rotation` is not supported in PhysX and will be ignored");
|
||||
}
|
||||
if (shape._upAxis !== ColliderShapeUpAxis.Y) {
|
||||
console.warn("Capsule character controller `upAxis` is not supported in PhysX and will be ignored");
|
||||
}
|
||||
} else {
|
||||
throw "unsupported shape type";
|
||||
}
|
||||
|
||||
desc.setMaterial(shape._pxMaterial);
|
||||
this._pxController = pxManager._getControllerManager().createController(desc);
|
||||
desc.delete();
|
||||
|
||||
this._pxController.setUUID(shape._id);
|
||||
|
||||
this._updateNativePosition();
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_destroyPXController(): void {
|
||||
if (this._pxController) {
|
||||
this._pxController.release();
|
||||
this._pxController = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_updateShapePosition(shapePosition: Vector3, worldScale: Vector3): void {
|
||||
Vector3.multiply(shapePosition, worldScale, this._shapeScaledPosition);
|
||||
this._updateNativePosition();
|
||||
}
|
||||
|
||||
private _updateNativePosition(): void {
|
||||
const worldPosition = this._worldPosition;
|
||||
if (this._pxController && worldPosition) {
|
||||
Vector3.add(worldPosition, this._shapeScaledPosition, PhysXCharacterController._tempVec);
|
||||
this._pxController.setPosition(PhysXCharacterController._tempVec);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
import { ICollider } from "@galacean/engine-design";
|
||||
import { Quaternion, Vector3 } from "@galacean/engine";
|
||||
import { PhysXPhysics } from "./PhysXPhysics";
|
||||
import { PhysXColliderShape } from "./shape/PhysXColliderShape";
|
||||
import { PhysXPhysicsScene } from "./PhysXPhysicsScene";
|
||||
|
||||
/**
|
||||
* Abstract class of physical collider.
|
||||
*/
|
||||
export abstract class PhysXCollider implements ICollider {
|
||||
private static _tempTransform: {
|
||||
translation: Vector3;
|
||||
rotation: Quaternion;
|
||||
} = { translation: null, rotation: null };
|
||||
|
||||
/** @internal */
|
||||
_scene: PhysXPhysicsScene = null;
|
||||
/** @internal */
|
||||
_pxActor: any;
|
||||
/** @internal */
|
||||
_shapes = new Array<PhysXColliderShape>();
|
||||
|
||||
protected _physXPhysics: PhysXPhysics;
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics) {
|
||||
this._physXPhysics = physXPhysics;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.addShape }
|
||||
*/
|
||||
addShape(shape: PhysXColliderShape): void {
|
||||
this._pxActor.attachShape(shape._pxShape);
|
||||
this._shapes.push(shape);
|
||||
this._scene?._addColliderShape(shape._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.removeShape }
|
||||
*/
|
||||
removeShape(shape: PhysXColliderShape): void {
|
||||
this._pxActor.detachShape(shape._pxShape, true);
|
||||
const shapes = this._shapes;
|
||||
shapes.splice(shapes.indexOf(shape), 1);
|
||||
this._scene?._removeColliderShape(shape._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.setWorldTransform }
|
||||
*/
|
||||
setWorldTransform(position: Vector3, rotation: Quaternion): void {
|
||||
this._pxActor.setGlobalPose(this._transform(position, rotation), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.getWorldTransform }
|
||||
*/
|
||||
getWorldTransform(outPosition: Vector3, outRotation: Quaternion): void {
|
||||
const transform = this._pxActor.getGlobalPose();
|
||||
outPosition.set(transform.translation.x, transform.translation.y, transform.translation.z);
|
||||
outRotation.set(transform.rotation.x, transform.rotation.y, transform.rotation.z, transform.rotation.w);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.setCollisionLayer }
|
||||
*/
|
||||
setCollisionLayer(layer: number): void {
|
||||
this._physXPhysics._physX.setGroup(this._pxActor, layer);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICollider.destroy }
|
||||
*/
|
||||
destroy(): void {
|
||||
this._pxActor.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_transform(pos: Vector3, rot: Quaternion): { translation: Vector3; rotation: Quaternion } {
|
||||
const transform = PhysXCollider._tempTransform;
|
||||
transform.translation = pos;
|
||||
transform.rotation = rot.normalize();
|
||||
return transform;
|
||||
}
|
||||
}
|
||||
@@ -1,263 +0,0 @@
|
||||
import { IDynamicCollider } from "@galacean/engine-design";
|
||||
import { MathUtil, Quaternion, Vector3 } from "@galacean/engine";
|
||||
import { PhysXCollider } from "./PhysXCollider";
|
||||
import { PhysXPhysics } from "./PhysXPhysics";
|
||||
|
||||
/**
|
||||
* The collision detection mode constants used for PhysXDynamicCollider.collisionDetectionMode.
|
||||
* */
|
||||
export enum CollisionDetectionMode {
|
||||
/** Continuous collision detection is off for this dynamic collider. */
|
||||
Discrete,
|
||||
/** Continuous collision detection is on for colliding with static mesh geometry. */
|
||||
Continuous,
|
||||
/** Continuous collision detection is on for colliding with static and dynamic geometry. */
|
||||
ContinuousDynamic,
|
||||
/** Speculative continuous collision detection is on for static and dynamic geometries */
|
||||
ContinuousSpeculative
|
||||
}
|
||||
|
||||
/**
|
||||
* A dynamic collider can act with self-defined movement or physical force
|
||||
*/
|
||||
export class PhysXDynamicCollider extends PhysXCollider implements IDynamicCollider {
|
||||
private static _tempTranslation = new Vector3();
|
||||
private static _tempRotation = new Quaternion();
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics, position: Vector3, rotation: Quaternion) {
|
||||
super(physXPhysics);
|
||||
const transform = this._transform(position, rotation);
|
||||
this._pxActor = physXPhysics._pxPhysics.createRigidDynamic(transform);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setLinearDamping }
|
||||
*/
|
||||
setLinearDamping(value: number): void {
|
||||
this._pxActor.setLinearDamping(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setAngularDamping }
|
||||
*/
|
||||
setAngularDamping(value: number): void {
|
||||
this._pxActor.setAngularDamping(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.getLinearVelocity }
|
||||
*/
|
||||
getLinearVelocity(out: Vector3): Vector3 {
|
||||
const velocity = this._pxActor.getLinearVelocity();
|
||||
return out.set(velocity.x, velocity.y, velocity.z);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setLinearVelocity }
|
||||
*/
|
||||
setLinearVelocity(value: Vector3): void {
|
||||
this._pxActor.setLinearVelocity(value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.getAngularVelocity }
|
||||
*/
|
||||
getAngularVelocity(out: Vector3): Vector3 {
|
||||
const velocity = this._pxActor.getAngularVelocity();
|
||||
return out.set(
|
||||
MathUtil.radianToDegree(velocity.x),
|
||||
MathUtil.radianToDegree(velocity.y),
|
||||
MathUtil.radianToDegree(velocity.z)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setAngularVelocity }
|
||||
*/
|
||||
setAngularVelocity(value: Vector3): void {
|
||||
PhysXDynamicCollider._tempTranslation.set(
|
||||
MathUtil.degreeToRadian(value.x),
|
||||
MathUtil.degreeToRadian(value.y),
|
||||
MathUtil.degreeToRadian(value.z)
|
||||
);
|
||||
this._pxActor.setAngularVelocity(PhysXDynamicCollider._tempTranslation, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setMass }
|
||||
*/
|
||||
setMass(value: number): void {
|
||||
this._pxActor.setMass(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.getCenterOfMass }
|
||||
*/
|
||||
getCenterOfMass(out: Vector3): Vector3 {
|
||||
const { translation } = this._pxActor.getCMassLocalPose();
|
||||
return out.set(translation.x, translation.y, translation.z);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setCenterOfMass }
|
||||
*/
|
||||
setCenterOfMass(position: Vector3): void {
|
||||
this._pxActor.setCMassLocalPose(position);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setInertiaTensor }
|
||||
*/
|
||||
setInertiaTensor(value: Vector3): void {
|
||||
this._pxActor.setMassSpaceInertiaTensor(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.getInertiaTensor }
|
||||
*/
|
||||
getInertiaTensor(out: Vector3): Vector3 {
|
||||
const inertia = this._pxActor.getMassSpaceInertiaTensor();
|
||||
return out.set(inertia.x, inertia.y, inertia.z);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setMassAndUpdateInertia }
|
||||
*/
|
||||
setMassAndUpdateInertia(mass: number): void {
|
||||
this._pxActor.setMassAndUpdateInertia(mass);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setMaxAngularVelocity }
|
||||
*/
|
||||
setMaxAngularVelocity(value: number): void {
|
||||
this._pxActor.setMaxAngularVelocity(MathUtil.degreeToRadian(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setMaxDepenetrationVelocity }
|
||||
*/
|
||||
setMaxDepenetrationVelocity(value: number): void {
|
||||
this._pxActor.setMaxDepenetrationVelocity(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setSleepThreshold }
|
||||
* @default 1e-5f * PxTolerancesScale::speed * PxTolerancesScale::speed
|
||||
*/
|
||||
setSleepThreshold(value: number): void {
|
||||
this._pxActor.setSleepThreshold(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setSolverIterations }
|
||||
*/
|
||||
setSolverIterations(value: number): void {
|
||||
this._pxActor.setSolverIterationCounts(value, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setCollisionDetectionMode }
|
||||
*/
|
||||
setCollisionDetectionMode(value: number): void {
|
||||
const physX = this._physXPhysics._physX;
|
||||
|
||||
switch (value) {
|
||||
case CollisionDetectionMode.Continuous:
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_CCD, true);
|
||||
break;
|
||||
case CollisionDetectionMode.ContinuousDynamic:
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_CCD, false);
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_CCD_FRICTION, true);
|
||||
break;
|
||||
case CollisionDetectionMode.ContinuousSpeculative:
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_CCD, false);
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_SPECULATIVE_CCD, true);
|
||||
break;
|
||||
case CollisionDetectionMode.Discrete:
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_CCD, false);
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_CCD_FRICTION, false);
|
||||
this._pxActor.setRigidBodyFlag(physX.PxRigidBodyFlag.eENABLE_SPECULATIVE_CCD, false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setUseGravity }
|
||||
*/
|
||||
setUseGravity(value: boolean): void {
|
||||
this._pxActor.setActorFlag(this._physXPhysics._physX.PxActorFlag.eDISABLE_GRAVITY, !value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setIsKinematic }
|
||||
*/
|
||||
setIsKinematic(value: boolean): void {
|
||||
if (value) {
|
||||
this._pxActor.setRigidBodyFlag(this._physXPhysics._physX.PxRigidBodyFlag.eKINEMATIC, true);
|
||||
} else {
|
||||
this._pxActor.setRigidBodyFlag(this._physXPhysics._physX.PxRigidBodyFlag.eKINEMATIC, false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.setConstraints }
|
||||
*/
|
||||
setConstraints(flags: number): void {
|
||||
this._pxActor.setRigidDynamicLockFlags(flags);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.addForce }
|
||||
*/
|
||||
addForce(force: Vector3) {
|
||||
this._pxActor.addForce({ x: force.x, y: force.y, z: force.z });
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.addTorque }
|
||||
*/
|
||||
addTorque(torque: Vector3) {
|
||||
this._pxActor.addTorque({ x: torque.x, y: torque.y, z: torque.z });
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.move }
|
||||
*/
|
||||
move(positionOrRotation: Vector3 | Quaternion, rotation?: Quaternion): void {
|
||||
if (rotation) {
|
||||
this._pxActor.setKinematicTarget(positionOrRotation, rotation);
|
||||
return;
|
||||
}
|
||||
|
||||
const tempTranslation = PhysXDynamicCollider._tempTranslation;
|
||||
const tempRotation = PhysXDynamicCollider._tempRotation;
|
||||
this.getWorldTransform(tempTranslation, tempRotation);
|
||||
if (positionOrRotation instanceof Vector3) {
|
||||
this._pxActor.setKinematicTarget(positionOrRotation, tempRotation);
|
||||
} else {
|
||||
this._pxActor.setKinematicTarget(tempTranslation, positionOrRotation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.sleep }
|
||||
*/
|
||||
sleep(): void {
|
||||
return this._pxActor.putToSleep();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.isSleeping }
|
||||
*/
|
||||
isSleeping(): boolean {
|
||||
return this._pxActor.isSleeping();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IDynamicCollider.wakeUp }
|
||||
*/
|
||||
wakeUp(): void {
|
||||
return this._pxActor.wakeUp();
|
||||
}
|
||||
}
|
||||
@@ -1,310 +0,0 @@
|
||||
import { Quaternion, Vector3 } from "@galacean/engine";
|
||||
import {
|
||||
IBoxColliderShape,
|
||||
ICapsuleColliderShape,
|
||||
ICharacterController,
|
||||
ICollision,
|
||||
IDynamicCollider,
|
||||
IFixedJoint,
|
||||
IHingeJoint,
|
||||
IPhysics,
|
||||
IPhysicsManager,
|
||||
IPhysicsMaterial,
|
||||
IPhysicsScene,
|
||||
IPlaneColliderShape,
|
||||
ISphereColliderShape,
|
||||
ISpringJoint,
|
||||
IStaticCollider
|
||||
} from "@galacean/engine-design";
|
||||
import { PhysXCharacterController } from "./PhysXCharacterController";
|
||||
import { PhysXCollider } from "./PhysXCollider";
|
||||
import { PhysXDynamicCollider } from "./PhysXDynamicCollider";
|
||||
import { PhysXPhysicsManager } from "./PhysXPhysicsManager";
|
||||
import { PhysXPhysicsMaterial } from "./PhysXPhysicsMaterial";
|
||||
import { PhysXPhysicsScene } from "./PhysXPhysicsScene";
|
||||
import { PhysXStaticCollider } from "./PhysXStaticCollider";
|
||||
import { PhysXRuntimeMode } from "./enum/PhysXRuntimeMode";
|
||||
import { PhysXFixedJoint } from "./joint/PhysXFixedJoint";
|
||||
import { PhysXHingeJoint } from "./joint/PhysXHingeJoint";
|
||||
import { PhysXSpringJoint } from "./joint/PhysXSpringJoint";
|
||||
import { PhysXBoxColliderShape } from "./shape/PhysXBoxColliderShape";
|
||||
import { PhysXCapsuleColliderShape } from "./shape/PhysXCapsuleColliderShape";
|
||||
import { PhysXPlaneColliderShape } from "./shape/PhysXPlaneColliderShape";
|
||||
import { PhysXSphereColliderShape } from "./shape/PhysXSphereColliderShape";
|
||||
|
||||
/**
|
||||
* PhysX object creation.
|
||||
*/
|
||||
|
||||
export class PhysXPhysics implements IPhysics {
|
||||
/** @internal PhysX wasm object */
|
||||
_physX: any;
|
||||
/** @internal PhysX Foundation SDK singleton class */
|
||||
_pxFoundation: any;
|
||||
/** @internal PhysX physics object */
|
||||
_pxPhysics: any;
|
||||
|
||||
private _runTimeMode: PhysXRuntimeMode;
|
||||
private _initializeState: InitializeState = InitializeState.Uninitialized;
|
||||
private _initializePromise: Promise<void>;
|
||||
private _defaultErrorCallback: any;
|
||||
private _allocator: any;
|
||||
private _tolerancesScale: any;
|
||||
private _wasmModeUrl: string;
|
||||
private _downgradeModeUrl: string;
|
||||
|
||||
/**
|
||||
* Create a PhysXPhysics instance.
|
||||
* @param runtimeMode - Runtime use WebAssembly mode or downgrade JavaScript mode, `Auto` prefers webAssembly mode if supported @see {@link PhysXRuntimeMode}
|
||||
* @param runtimeUrls - Manually specify the `PhysXRuntimeMode.WebAssembly` mode and `PhysXRuntimeMode.JavaScript` mode URL
|
||||
*/
|
||||
constructor(runtimeMode: PhysXRuntimeMode = PhysXRuntimeMode.Auto, runtimeUrls?: PhysXRuntimeUrls) {
|
||||
this._runTimeMode = runtimeMode;
|
||||
this._wasmModeUrl =
|
||||
runtimeUrls?.wasmModeUrl ??
|
||||
"https://mdn.alipayobjects.com/rms/afts/file/A*m04iQojeKRgAAAAASWAAAAgAehQnAQ/physx.release.js";
|
||||
this._downgradeModeUrl =
|
||||
runtimeUrls?.javaScriptModeUrl ??
|
||||
"https://mdn.alipayobjects.com/rms/afts/file/A*13gEToqpJWcAAAAAgEAAAAgAehQnAQ/physx.release.downgrade.js";
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize PhysXPhysics.
|
||||
* @param runtimeMode - Runtime mode
|
||||
* @returns Promise object
|
||||
*/
|
||||
initialize(): Promise<void> {
|
||||
if (this._initializeState === InitializeState.Initialized) {
|
||||
return Promise.resolve();
|
||||
} else if (this._initializeState === InitializeState.Initializing) {
|
||||
return this._initializePromise;
|
||||
}
|
||||
|
||||
let runtimeMode = this._runTimeMode;
|
||||
const scriptPromise = new Promise((resolve, reject) => {
|
||||
const script = document.createElement("script");
|
||||
document.body.appendChild(script);
|
||||
script.async = true;
|
||||
script.onload = resolve;
|
||||
script.onerror = reject;
|
||||
if (runtimeMode == PhysXRuntimeMode.Auto) {
|
||||
const supported = (() => {
|
||||
try {
|
||||
if (typeof WebAssembly === "object" && typeof WebAssembly.instantiate === "function") {
|
||||
const wasmModule = new WebAssembly.Module(Uint8Array.of(0x0, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00));
|
||||
if (wasmModule instanceof WebAssembly.Module)
|
||||
return new WebAssembly.Instance(wasmModule) instanceof WebAssembly.Instance;
|
||||
}
|
||||
} catch (e) {}
|
||||
return false;
|
||||
})();
|
||||
if (supported) {
|
||||
runtimeMode = PhysXRuntimeMode.WebAssembly;
|
||||
} else {
|
||||
runtimeMode = PhysXRuntimeMode.JavaScript;
|
||||
}
|
||||
}
|
||||
|
||||
if (runtimeMode == PhysXRuntimeMode.JavaScript) {
|
||||
script.src = this._downgradeModeUrl;
|
||||
} else if (runtimeMode == PhysXRuntimeMode.WebAssembly) {
|
||||
script.src = this._wasmModeUrl;
|
||||
}
|
||||
});
|
||||
|
||||
const initializePromise = new Promise<void>((resolve, reject) => {
|
||||
scriptPromise
|
||||
.then(
|
||||
() =>
|
||||
(<any>window).PHYSX().then((PHYSX) => {
|
||||
this._init(PHYSX);
|
||||
this._initializeState = InitializeState.Initialized;
|
||||
this._initializePromise = null;
|
||||
console.log("PhysX loaded.");
|
||||
resolve();
|
||||
}, reject),
|
||||
reject
|
||||
)
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
this._initializePromise = initializePromise;
|
||||
return initializePromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy PhysXPhysics.
|
||||
*/
|
||||
destroy(): void {
|
||||
this._physX.PxCloseExtensions();
|
||||
this._pxPhysics.release();
|
||||
this._pxFoundation.release();
|
||||
this._defaultErrorCallback.delete();
|
||||
this._allocator.delete();
|
||||
this._tolerancesScale.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createPhysicsManager }
|
||||
*/
|
||||
createPhysicsManager(): IPhysicsManager {
|
||||
return new PhysXPhysicsManager();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createPhysicsScene }
|
||||
*/
|
||||
createPhysicsScene(
|
||||
physicsManager: PhysXPhysicsManager,
|
||||
onContactBegin?: (collision: ICollision) => void,
|
||||
onContactEnd?: (collision: ICollision) => void,
|
||||
onContactStay?: (collision: ICollision) => void,
|
||||
onTriggerBegin?: (obj1: number, obj2: number) => void,
|
||||
onTriggerEnd?: (obj1: number, obj2: number) => void,
|
||||
onTriggerStay?: (obj1: number, obj2: number) => void
|
||||
): IPhysicsScene {
|
||||
const scene = new PhysXPhysicsScene(
|
||||
this,
|
||||
physicsManager,
|
||||
onContactBegin,
|
||||
onContactEnd,
|
||||
onContactStay,
|
||||
onTriggerBegin,
|
||||
onTriggerEnd,
|
||||
onTriggerStay
|
||||
);
|
||||
return scene;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createStaticCollider }
|
||||
*/
|
||||
createStaticCollider(position: Vector3, rotation: Quaternion): IStaticCollider {
|
||||
return new PhysXStaticCollider(this, position, rotation);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createDynamicCollider }
|
||||
*/
|
||||
createDynamicCollider(position: Vector3, rotation: Quaternion): IDynamicCollider {
|
||||
return new PhysXDynamicCollider(this, position, rotation);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createCharacterController }
|
||||
*/
|
||||
createCharacterController(): ICharacterController {
|
||||
return new PhysXCharacterController(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createPhysicsMaterial }
|
||||
*/
|
||||
createPhysicsMaterial(
|
||||
staticFriction: number,
|
||||
dynamicFriction: number,
|
||||
bounciness: number,
|
||||
frictionCombine: number,
|
||||
bounceCombine: number
|
||||
): IPhysicsMaterial {
|
||||
return new PhysXPhysicsMaterial(this, staticFriction, dynamicFriction, bounciness, frictionCombine, bounceCombine);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createBoxColliderShape }
|
||||
*/
|
||||
createBoxColliderShape(uniqueID: number, size: Vector3, material: PhysXPhysicsMaterial): IBoxColliderShape {
|
||||
return new PhysXBoxColliderShape(this, uniqueID, size, material);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createSphereColliderShape }
|
||||
*/
|
||||
createSphereColliderShape(uniqueID: number, radius: number, material: PhysXPhysicsMaterial): ISphereColliderShape {
|
||||
return new PhysXSphereColliderShape(this, uniqueID, radius, material);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createPlaneColliderShape }
|
||||
*/
|
||||
createPlaneColliderShape(uniqueID: number, material: PhysXPhysicsMaterial): IPlaneColliderShape {
|
||||
return new PhysXPlaneColliderShape(this, uniqueID, material);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createCapsuleColliderShape }
|
||||
*/
|
||||
createCapsuleColliderShape(
|
||||
uniqueID: number,
|
||||
radius: number,
|
||||
height: number,
|
||||
material: PhysXPhysicsMaterial
|
||||
): ICapsuleColliderShape {
|
||||
return new PhysXCapsuleColliderShape(this, uniqueID, radius, height, material);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createFixedJoint }
|
||||
*/
|
||||
createFixedJoint(collider: PhysXCollider): IFixedJoint {
|
||||
return new PhysXFixedJoint(this, collider);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createHingeJoint }
|
||||
*/
|
||||
createHingeJoint(collider: PhysXCollider): IHingeJoint {
|
||||
return new PhysXHingeJoint(this, collider);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.createSpringJoint }
|
||||
*/
|
||||
createSpringJoint(collider: PhysXCollider): ISpringJoint {
|
||||
return new PhysXSpringJoint(this, collider);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.getColliderLayerCollision }
|
||||
*/
|
||||
getColliderLayerCollision(layer1: number, layer2: number): boolean {
|
||||
return this._physX.getGroupCollisionFlag(layer1, layer2);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysics.setColliderLayerCollision }
|
||||
*/
|
||||
setColliderLayerCollision(layer1: number, layer2: number, isCollide: boolean): void {
|
||||
this._physX.setGroupCollisionFlag(layer1, layer2, isCollide);
|
||||
}
|
||||
|
||||
private _init(physX: any): void {
|
||||
const version = physX.PX_PHYSICS_VERSION;
|
||||
const defaultErrorCallback = new physX.PxDefaultErrorCallback();
|
||||
const allocator = new physX.PxDefaultAllocator();
|
||||
const pxFoundation = physX.PxCreateFoundation(version, allocator, defaultErrorCallback);
|
||||
const tolerancesScale = new physX.PxTolerancesScale();
|
||||
const pxPhysics = physX.PxCreatePhysics(version, pxFoundation, tolerancesScale, false, null);
|
||||
|
||||
physX.PxInitExtensions(pxPhysics, null);
|
||||
this._physX = physX;
|
||||
this._pxFoundation = pxFoundation;
|
||||
this._pxPhysics = pxPhysics;
|
||||
this._defaultErrorCallback = defaultErrorCallback;
|
||||
this._allocator = allocator;
|
||||
this._tolerancesScale = tolerancesScale;
|
||||
}
|
||||
}
|
||||
|
||||
enum InitializeState {
|
||||
Uninitialized,
|
||||
Initializing,
|
||||
Initialized
|
||||
}
|
||||
|
||||
interface PhysXRuntimeUrls {
|
||||
/*** The URL of `PhysXRuntimeMode.WebAssembly` mode. */
|
||||
wasmModeUrl?: string;
|
||||
/*** The URL of `PhysXRuntimeMode.JavaScript` mode. */
|
||||
javaScriptModeUrl?: string;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import { IPhysicsManager } from "@galacean/engine-design";
|
||||
import { TriggerEvent } from "./PhysXPhysicsScene";
|
||||
|
||||
export class PhysXPhysicsManager implements IPhysicsManager {
|
||||
/** @internal */
|
||||
_eventMap: Record<number, Record<number, TriggerEvent>> = {};
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
import { IPhysicsMaterial } from "@galacean/engine-design";
|
||||
import { PhysXPhysics } from "./PhysXPhysics";
|
||||
|
||||
/**
|
||||
* Physics material describes how to handle colliding objects (friction, bounciness).
|
||||
*/
|
||||
export class PhysXPhysicsMaterial implements IPhysicsMaterial {
|
||||
/** @internal */
|
||||
_pxMaterial: any;
|
||||
|
||||
protected _physXPhysics: PhysXPhysics;
|
||||
|
||||
constructor(
|
||||
physXPhysics: PhysXPhysics,
|
||||
staticFriction: number,
|
||||
dynamicFriction: number,
|
||||
bounciness: number,
|
||||
frictionCombine: CombineMode,
|
||||
bounceCombine: CombineMode
|
||||
) {
|
||||
this._physXPhysics = physXPhysics;
|
||||
const pxMaterial = physXPhysics._pxPhysics.createMaterial(staticFriction, dynamicFriction, bounciness);
|
||||
pxMaterial.setFrictionCombineMode(frictionCombine);
|
||||
pxMaterial.setRestitutionCombineMode(bounceCombine);
|
||||
this._pxMaterial = pxMaterial;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsMaterial.setBounciness }
|
||||
*/
|
||||
setBounciness(value: number) {
|
||||
this._pxMaterial.setRestitution(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsMaterial.setDynamicFriction }
|
||||
*/
|
||||
setDynamicFriction(value: number) {
|
||||
this._pxMaterial.setDynamicFriction(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsMaterial.setStaticFriction }
|
||||
*/
|
||||
setStaticFriction(value: number) {
|
||||
this._pxMaterial.setStaticFriction(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsMaterial.setBounceCombine }
|
||||
*/
|
||||
setBounceCombine(value: CombineMode) {
|
||||
this._pxMaterial.setRestitutionCombineMode(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsMaterial.setFrictionCombine }
|
||||
*/
|
||||
setFrictionCombine(value: CombineMode) {
|
||||
this._pxMaterial.setFrictionCombineMode(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsMaterial.destroy }
|
||||
*/
|
||||
destroy(): void {
|
||||
this._pxMaterial.release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes how physics materials of the colliding objects are combined.
|
||||
*/
|
||||
enum CombineMode {
|
||||
/** Averages the friction/bounce of the two colliding materials. */
|
||||
Average,
|
||||
/** Uses the smaller friction/bounce of the two colliding materials. */
|
||||
Minimum,
|
||||
/** Multiplies the friction/bounce of the two colliding materials. */
|
||||
Multiply,
|
||||
/** Uses the larger friction/bounce of the two colliding materials. */
|
||||
Maximum
|
||||
}
|
||||
@@ -1,570 +0,0 @@
|
||||
import { Ray, Vector3, DisorderedArray, Quaternion } from "@galacean/engine";
|
||||
import { ICollision, IPhysicsScene } from "@galacean/engine-design";
|
||||
import { PhysXCharacterController } from "./PhysXCharacterController";
|
||||
import { PhysXCollider } from "./PhysXCollider";
|
||||
import { PhysXPhysics } from "./PhysXPhysics";
|
||||
import { PhysXPhysicsManager } from "./PhysXPhysicsManager";
|
||||
|
||||
/**
|
||||
* A manager is a collection of colliders and constraints which can interact.
|
||||
*/
|
||||
export class PhysXPhysicsScene implements IPhysicsScene {
|
||||
/** @internal */
|
||||
_pxControllerManager: any = null;
|
||||
|
||||
private static _tempPosition: Vector3 = new Vector3();
|
||||
private static _tempQuaternion: Quaternion = new Quaternion();
|
||||
private static _tempNormal: Vector3 = new Vector3();
|
||||
private static _tempPose: { translation: Vector3; rotation: Quaternion } = {
|
||||
translation: new Vector3(),
|
||||
rotation: new Quaternion()
|
||||
};
|
||||
private static _tempShapeIDs: number[] = [];
|
||||
|
||||
// Cached geometry objects for reuse
|
||||
private _boxGeometry: any = null;
|
||||
private _sphereGeometry: any = null;
|
||||
private _capsuleGeometry: any = null;
|
||||
|
||||
private _physXPhysics: PhysXPhysics;
|
||||
private _physXManager: PhysXPhysicsManager;
|
||||
private _pxRaycastHit: any;
|
||||
private _pxFilterData: any;
|
||||
|
||||
private _pxScene: any;
|
||||
private _physXSimulationCallbackInstance: any;
|
||||
|
||||
private readonly _onContactEnter?: (collision: ICollision) => void;
|
||||
private readonly _onContactExit?: (collision: ICollision) => void;
|
||||
private readonly _onContactStay?: (collision: ICollision) => void;
|
||||
private readonly _onTriggerEnter?: (index1: number, index2: number) => void;
|
||||
private readonly _onTriggerExit?: (index1: number, index2: number) => void;
|
||||
private readonly _onTriggerStay?: (index1: number, index2: number) => void;
|
||||
|
||||
private _currentEvents: DisorderedArray<TriggerEvent> = new DisorderedArray<TriggerEvent>();
|
||||
|
||||
private _eventPool: TriggerEvent[] = [];
|
||||
|
||||
constructor(
|
||||
physXPhysics: PhysXPhysics,
|
||||
physicsManager: PhysXPhysicsManager,
|
||||
onContactEnter?: (collision: ICollision) => void,
|
||||
onContactExit?: (collision: ICollision) => void,
|
||||
onContactStay?: (collision: ICollision) => void,
|
||||
onTriggerEnter?: (obj1: number, obj2: number) => void,
|
||||
onTriggerExit?: (obj1: number, obj2: number) => void,
|
||||
onTriggerStay?: (obj1: number, obj2: number) => void
|
||||
) {
|
||||
this._physXPhysics = physXPhysics;
|
||||
this._physXManager = physicsManager;
|
||||
|
||||
const physX = physXPhysics._physX;
|
||||
|
||||
this._pxRaycastHit = new physX.PxRaycastHit();
|
||||
this._pxFilterData = new physX.PxQueryFilterData();
|
||||
this._pxFilterData.flags = new physX.PxQueryFlags(QueryFlag.STATIC | QueryFlag.DYNAMIC | QueryFlag.PRE_FILTER);
|
||||
|
||||
this._onContactEnter = onContactEnter;
|
||||
this._onContactExit = onContactExit;
|
||||
this._onContactStay = onContactStay;
|
||||
this._onTriggerEnter = onTriggerEnter;
|
||||
this._onTriggerExit = onTriggerExit;
|
||||
this._onTriggerStay = onTriggerStay;
|
||||
|
||||
const triggerCallback = {
|
||||
onContactBegin: (collision) => {
|
||||
this._onContactEnter(collision);
|
||||
},
|
||||
onContactEnd: (collision) => {
|
||||
this._onContactExit(collision);
|
||||
},
|
||||
onContactPersist: (collision) => {
|
||||
this._onContactStay(collision);
|
||||
},
|
||||
onTriggerBegin: (index1, index2) => {
|
||||
const event = index1 < index2 ? this._getTrigger(index1, index2) : this._getTrigger(index2, index1);
|
||||
event.state = TriggerEventState.Enter;
|
||||
this._currentEvents.add(event);
|
||||
},
|
||||
onTriggerEnd: (index1, index2) => {
|
||||
let event: TriggerEvent;
|
||||
if (index1 < index2) {
|
||||
const subMap = this._physXManager._eventMap[index1];
|
||||
event = subMap[index2];
|
||||
subMap[index2] = undefined;
|
||||
} else {
|
||||
const subMap = this._physXManager._eventMap[index2];
|
||||
event = subMap[index1];
|
||||
subMap[index1] = undefined;
|
||||
}
|
||||
event.state = TriggerEventState.Exit;
|
||||
}
|
||||
};
|
||||
|
||||
const pxPhysics = physXPhysics._pxPhysics;
|
||||
this._physXSimulationCallbackInstance = physX.PxSimulationEventCallback.implement(triggerCallback);
|
||||
const sceneDesc = physX.getDefaultSceneDesc(
|
||||
pxPhysics.getTolerancesScale(),
|
||||
0,
|
||||
this._physXSimulationCallbackInstance
|
||||
);
|
||||
this._pxScene = pxPhysics.createScene(sceneDesc);
|
||||
sceneDesc.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.setGravity }
|
||||
*/
|
||||
setGravity(value: Vector3) {
|
||||
this._pxScene.setGravity(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.addCollider }
|
||||
*/
|
||||
addCollider(collider: PhysXCollider): void {
|
||||
collider._scene = this;
|
||||
this._pxScene.addActor(collider._pxActor, null);
|
||||
const shapes = collider._shapes;
|
||||
for (let i = 0, n = shapes.length; i < n; i++) {
|
||||
this._addColliderShape(shapes[i]._id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.removeCollider }
|
||||
*/
|
||||
removeCollider(collider: PhysXCollider): void {
|
||||
collider._scene = null;
|
||||
this._pxScene.removeActor(collider._pxActor, true);
|
||||
const shapes = collider._shapes;
|
||||
for (let i = 0, n = shapes.length; i < n; i++) {
|
||||
this._removeColliderShape(shapes[i]._id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.addCharacterController }
|
||||
*/
|
||||
addCharacterController(characterController: PhysXCharacterController): void {
|
||||
characterController._scene = this;
|
||||
|
||||
// Physx have no API to remove/readd cct into scene.
|
||||
if (!characterController._pxController) {
|
||||
const shape = characterController._shape;
|
||||
if (shape) {
|
||||
const lastPXManager = characterController._pxManager;
|
||||
if (lastPXManager !== this) {
|
||||
lastPXManager && characterController._destroyPXController();
|
||||
characterController._createPXController(this, shape);
|
||||
}
|
||||
this._addColliderShape(shape._id);
|
||||
}
|
||||
}
|
||||
characterController._pxManager = this;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.removeCharacterController }
|
||||
*/
|
||||
removeCharacterController(characterController: PhysXCharacterController): void {
|
||||
characterController._scene = null;
|
||||
characterController._pxManager = null;
|
||||
characterController._destroyPXController();
|
||||
const shape = characterController._shape;
|
||||
shape && this._removeColliderShape(shape._id);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.update }
|
||||
*/
|
||||
update(elapsedTime: number): void {
|
||||
this._simulate(elapsedTime);
|
||||
this._fetchResults();
|
||||
this._fireEvent();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.raycast }
|
||||
*/
|
||||
raycast(
|
||||
ray: Ray,
|
||||
distance: number,
|
||||
onRaycast: (obj: number) => boolean,
|
||||
hit?: (shapeUniqueID: number, distance: number, position: Vector3, normal: Vector3) => void
|
||||
): boolean {
|
||||
const { _pxRaycastHit: pxHitResult } = this;
|
||||
distance = Math.min(distance, 3.4e38); // float32 max value limit in physX raycast.
|
||||
|
||||
const raycastCallback = {
|
||||
preFilter: (filterData, index, actor) => {
|
||||
if (onRaycast(index)) {
|
||||
return 2; // eBLOCK
|
||||
} else {
|
||||
return 0; // eNONE
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const pxRaycastCallback = this._physXPhysics._physX.PxQueryFilterCallback.implement(raycastCallback);
|
||||
const result = this._pxScene.raycastSingle(
|
||||
ray.origin,
|
||||
ray.direction,
|
||||
distance,
|
||||
pxHitResult,
|
||||
this._pxFilterData,
|
||||
pxRaycastCallback
|
||||
);
|
||||
|
||||
pxRaycastCallback.delete();
|
||||
|
||||
if (result && hit != undefined) {
|
||||
const { _tempPosition: position, _tempNormal: normal } = PhysXPhysicsScene;
|
||||
const { position: pxPosition, normal: pxNormal } = pxHitResult;
|
||||
position.set(pxPosition.x, pxPosition.y, pxPosition.z);
|
||||
normal.set(pxNormal.x, pxNormal.y, pxNormal.z);
|
||||
|
||||
hit(pxHitResult.getShape().getUUID(), pxHitResult.distance, position, normal);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.boxCast }
|
||||
*/
|
||||
boxCast(
|
||||
center: Vector3,
|
||||
orientation: Quaternion,
|
||||
halfExtents: Vector3,
|
||||
direction: Vector3,
|
||||
distance: number,
|
||||
onSweep: (obj: number) => boolean,
|
||||
outHitResult?: (shapeUniqueID: number, distance: number, position: Vector3, normal: Vector3) => void
|
||||
): boolean {
|
||||
if (!this._boxGeometry) {
|
||||
this._boxGeometry = new this._physXPhysics._physX.PxBoxGeometry(halfExtents.x, halfExtents.y, halfExtents.z);
|
||||
} else {
|
||||
this._boxGeometry.halfExtents = halfExtents;
|
||||
}
|
||||
|
||||
const pose = PhysXPhysicsScene._tempPose;
|
||||
pose.translation.copyFrom(center);
|
||||
pose.rotation.copyFrom(orientation);
|
||||
return this._sweepSingle(this._boxGeometry, pose, direction, distance, onSweep, outHitResult);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.sphereCast }
|
||||
*/
|
||||
sphereCast(
|
||||
center: Vector3,
|
||||
radius: number,
|
||||
direction: Vector3,
|
||||
distance: number,
|
||||
onSweep: (obj: number) => boolean,
|
||||
outHitResult?: (shapeUniqueID: number, distance: number, position: Vector3, normal: Vector3) => void
|
||||
): boolean {
|
||||
if (!this._sphereGeometry) {
|
||||
this._sphereGeometry = new this._physXPhysics._physX.PxSphereGeometry(radius);
|
||||
} else {
|
||||
this._sphereGeometry.radius = radius;
|
||||
}
|
||||
|
||||
const tempQuat = PhysXPhysicsScene._tempQuaternion;
|
||||
tempQuat.set(0, 0, 0, 1); // Identity quaternion
|
||||
const pose = { translation: center, rotation: tempQuat };
|
||||
return this._sweepSingle(this._sphereGeometry, pose, direction, distance, onSweep, outHitResult);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.capsuleCast }
|
||||
*/
|
||||
capsuleCast(
|
||||
center: Vector3,
|
||||
radius: number,
|
||||
height: number,
|
||||
orientation: Quaternion,
|
||||
direction: Vector3,
|
||||
distance: number,
|
||||
onSweep: (obj: number) => boolean,
|
||||
outHitResult?: (shapeUniqueID: number, distance: number, position: Vector3, normal: Vector3) => void
|
||||
): boolean {
|
||||
if (!this._capsuleGeometry) {
|
||||
this._capsuleGeometry = new this._physXPhysics._physX.PxCapsuleGeometry(radius, height * 0.5);
|
||||
} else {
|
||||
this._capsuleGeometry.radius = radius;
|
||||
this._capsuleGeometry.halfHeight = height * 0.5;
|
||||
}
|
||||
|
||||
const pose = PhysXPhysicsScene._tempPose;
|
||||
pose.translation.copyFrom(center);
|
||||
pose.rotation.copyFrom(orientation);
|
||||
return this._sweepSingle(this._capsuleGeometry, pose, direction, distance, onSweep, outHitResult);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.overlapBoxAll }
|
||||
*/
|
||||
overlapBoxAll(
|
||||
center: Vector3,
|
||||
orientation: Quaternion,
|
||||
halfExtents: Vector3,
|
||||
onOverlap: (obj: number) => boolean
|
||||
): number[] {
|
||||
if (!this._boxGeometry) {
|
||||
this._boxGeometry = new this._physXPhysics._physX.PxBoxGeometry(halfExtents.x, halfExtents.y, halfExtents.z);
|
||||
} else {
|
||||
this._boxGeometry.halfExtents = halfExtents;
|
||||
}
|
||||
|
||||
const pose = PhysXPhysicsScene._tempPose;
|
||||
pose.translation.copyFrom(center);
|
||||
pose.rotation.copyFrom(orientation);
|
||||
return this._overlapMultiple(this._boxGeometry, pose, onOverlap);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.overlapSphereAll }
|
||||
*/
|
||||
overlapSphereAll(center: Vector3, radius: number, onOverlap: (obj: number) => boolean): number[] {
|
||||
if (!this._sphereGeometry) {
|
||||
this._sphereGeometry = new this._physXPhysics._physX.PxSphereGeometry(radius);
|
||||
} else {
|
||||
this._sphereGeometry.radius = radius;
|
||||
}
|
||||
|
||||
const tempQuat = PhysXPhysicsScene._tempQuaternion;
|
||||
tempQuat.set(0, 0, 0, 1);
|
||||
const pose = { translation: center, rotation: tempQuat };
|
||||
return this._overlapMultiple(this._sphereGeometry, pose, onOverlap);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.overlapCapsuleAll }
|
||||
*/
|
||||
overlapCapsuleAll(
|
||||
center: Vector3,
|
||||
radius: number,
|
||||
height: number,
|
||||
orientation: Quaternion,
|
||||
onOverlap: (obj: number) => boolean
|
||||
): number[] {
|
||||
if (!this._capsuleGeometry) {
|
||||
this._capsuleGeometry = new this._physXPhysics._physX.PxCapsuleGeometry(radius, height * 0.5);
|
||||
} else {
|
||||
this._capsuleGeometry.radius = radius;
|
||||
this._capsuleGeometry.halfHeight = height * 0.5;
|
||||
}
|
||||
|
||||
const pose = PhysXPhysicsScene._tempPose;
|
||||
pose.translation.copyFrom(center);
|
||||
pose.rotation.copyFrom(orientation);
|
||||
return this._overlapMultiple(this._capsuleGeometry, pose, onOverlap);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IPhysicsScene.destroy }
|
||||
*/
|
||||
destroy(): void {
|
||||
this._boxGeometry?.delete();
|
||||
this._sphereGeometry?.delete();
|
||||
this._capsuleGeometry?.delete();
|
||||
|
||||
this._physXSimulationCallbackInstance.delete();
|
||||
this._pxRaycastHit.delete();
|
||||
this._pxFilterData.flags.delete();
|
||||
this._pxFilterData.delete();
|
||||
// Need to release the controller manager before release the scene.
|
||||
this._pxControllerManager?.release();
|
||||
this._pxScene.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_getControllerManager(): any {
|
||||
let pxControllerManager = this._pxControllerManager;
|
||||
if (pxControllerManager === null) {
|
||||
this._pxControllerManager = pxControllerManager = this._pxScene.createControllerManager();
|
||||
}
|
||||
return pxControllerManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_addColliderShape(id: number) {
|
||||
this._physXManager._eventMap[id] = Object.create(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_removeColliderShape(id: number) {
|
||||
const { _eventPool: eventPool, _currentEvents: currentEvents } = this;
|
||||
const { _eventMap: eventMap } = this._physXManager;
|
||||
currentEvents.forEach((event, i) => {
|
||||
if (event.index1 == id) {
|
||||
currentEvents.deleteByIndex(i);
|
||||
eventPool.push(event);
|
||||
} else if (event.index2 == id) {
|
||||
currentEvents.deleteByIndex(i);
|
||||
eventPool.push(event);
|
||||
// If the shape is big index, should clear from the small index shape subMap
|
||||
eventMap[event.index1][id] = undefined;
|
||||
}
|
||||
});
|
||||
delete eventMap[id];
|
||||
}
|
||||
|
||||
private _sweepSingle(
|
||||
geometry: any,
|
||||
pose: { translation: Vector3; rotation: Quaternion },
|
||||
direction: Vector3,
|
||||
distance: number,
|
||||
onSweep: (obj: number) => boolean,
|
||||
outHitResult?: (shapeUniqueID: number, distance: number, position: Vector3, normal: Vector3) => void
|
||||
): boolean {
|
||||
distance = Math.min(distance, 3.4e38); // float32 max value limit in physx sweep
|
||||
|
||||
const sweepCallback = {
|
||||
preFilter: (filterData, index, actor) => {
|
||||
if (onSweep(index)) {
|
||||
return 2; // eBLOCK
|
||||
} else {
|
||||
return 0; // eNONE
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const pxSweepCallback = this._physXPhysics._physX.PxQueryFilterCallback.implement(sweepCallback);
|
||||
const pxSweepHit = new this._physXPhysics._physX.PxSweepHit();
|
||||
const result = this._pxScene.sweepSingle(
|
||||
geometry,
|
||||
pose,
|
||||
direction,
|
||||
distance,
|
||||
pxSweepHit,
|
||||
this._pxFilterData,
|
||||
pxSweepCallback
|
||||
);
|
||||
|
||||
if (result && outHitResult != undefined) {
|
||||
const { _tempPosition: position, _tempNormal: normal } = PhysXPhysicsScene;
|
||||
const { position: pxPosition, normal: pxNormal } = pxSweepHit;
|
||||
position.set(pxPosition.x, pxPosition.y, pxPosition.z);
|
||||
normal.set(pxNormal.x, pxNormal.y, pxNormal.z);
|
||||
outHitResult(pxSweepHit.getShape().getUUID(), pxSweepHit.distance, position, normal);
|
||||
}
|
||||
|
||||
pxSweepCallback.delete();
|
||||
pxSweepHit.delete();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private _overlapMultiple(
|
||||
geometry: any,
|
||||
pose: { translation: Vector3; rotation: Quaternion },
|
||||
onOverlap: (obj: number) => boolean
|
||||
): number[] {
|
||||
const overlapCallback = {
|
||||
preFilter: (filterData, index, actor) => (onOverlap(index) ? 2 : 0)
|
||||
};
|
||||
|
||||
const pxOverlapCallback = this._physXPhysics._physX.PxQueryFilterCallback.implement(overlapCallback);
|
||||
const maxHits = 256;
|
||||
const hits: any = (this._pxScene as any).overlapMultiple(
|
||||
geometry,
|
||||
pose,
|
||||
maxHits,
|
||||
this._pxFilterData,
|
||||
pxOverlapCallback
|
||||
);
|
||||
|
||||
const result = PhysXPhysicsScene._tempShapeIDs;
|
||||
result.length = 0;
|
||||
if (hits) {
|
||||
// PhysX overlapMultiple returns a collection with size() method
|
||||
for (let i = 0, n = hits.size(); i < n; i++) {
|
||||
result.push(hits.get(i).getShape().getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
pxOverlapCallback.delete();
|
||||
hits?.delete();
|
||||
return result;
|
||||
}
|
||||
|
||||
private _simulate(elapsedTime: number): void {
|
||||
this._pxScene.simulate(elapsedTime, true);
|
||||
}
|
||||
|
||||
private _fetchResults(block: boolean = true): void {
|
||||
this._pxScene.fetchResults(block);
|
||||
}
|
||||
|
||||
private _getTrigger(index1: number, index2: number): TriggerEvent {
|
||||
let event: TriggerEvent;
|
||||
if (this._eventPool.length) {
|
||||
event = this._eventPool.pop();
|
||||
event.index1 = index1;
|
||||
event.index2 = index2;
|
||||
} else {
|
||||
event = new TriggerEvent(index1, index2);
|
||||
}
|
||||
this._physXManager._eventMap[index1][index2] = event;
|
||||
return event;
|
||||
}
|
||||
|
||||
private _fireEvent(): void {
|
||||
const { _eventPool: eventPool, _currentEvents: currentEvents } = this;
|
||||
currentEvents.forEach((event, i) => {
|
||||
if (event.state == TriggerEventState.Enter) {
|
||||
this._onTriggerEnter(event.index1, event.index2);
|
||||
event.state = TriggerEventState.Stay;
|
||||
} else if (event.state == TriggerEventState.Stay) {
|
||||
this._onTriggerStay(event.index1, event.index2);
|
||||
} else if (event.state == TriggerEventState.Exit) {
|
||||
currentEvents.deleteByIndex(i);
|
||||
this._onTriggerExit(event.index1, event.index2);
|
||||
eventPool.push(event);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filtering flags for scene queries.
|
||||
*/
|
||||
enum QueryFlag {
|
||||
STATIC = 1 << 0,
|
||||
DYNAMIC = 1 << 1,
|
||||
PRE_FILTER = 1 << 2,
|
||||
POST_FILTER = 1 << 3,
|
||||
ANY_HIT = 1 << 4,
|
||||
NO_BLOCK = 1 << 5
|
||||
}
|
||||
|
||||
/**
|
||||
* Physics state
|
||||
*/
|
||||
enum TriggerEventState {
|
||||
Enter,
|
||||
Stay,
|
||||
Exit
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger event to store interactive object ids and state.
|
||||
*/
|
||||
export class TriggerEvent {
|
||||
state: TriggerEventState;
|
||||
index1: number;
|
||||
index2: number;
|
||||
|
||||
constructor(index1: number, index2: number) {
|
||||
this.index1 = index1;
|
||||
this.index2 = index2;
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { IStaticCollider } from "@galacean/engine-design";
|
||||
import { Quaternion, Vector3 } from "@galacean/engine";
|
||||
import { PhysXCollider } from "./PhysXCollider";
|
||||
import { PhysXPhysics } from "./PhysXPhysics";
|
||||
|
||||
/**
|
||||
* A static collider component that will not move.
|
||||
* @remarks Mostly used for object which always stays at the same place and never moves around.
|
||||
*/
|
||||
export class PhysXStaticCollider extends PhysXCollider implements IStaticCollider {
|
||||
constructor(physXPhysics: PhysXPhysics, position: Vector3, rotation: Quaternion) {
|
||||
super(physXPhysics);
|
||||
this._pxActor = physXPhysics._pxPhysics.createRigidStatic(this._transform(position, rotation));
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* PhysX runtime mode.
|
||||
*/
|
||||
export enum PhysXRuntimeMode {
|
||||
/** Use webAssembly mode first, if WebAssembly mode is not supported, roll back to JavaScript mode. */
|
||||
Auto,
|
||||
/** WebAssembly mode. */
|
||||
WebAssembly,
|
||||
/** JavaScript mode. */
|
||||
JavaScript
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
export { PhysXPhysics } from "./PhysXPhysics";
|
||||
export { PhysXRuntimeMode } from "./enum/PhysXRuntimeMode";
|
||||
|
||||
//@ts-ignore
|
||||
export const version = `__buildVersion`;
|
||||
|
||||
console.log(`Galacean Engine Physics PhysX Version: ${version}`);
|
||||
@@ -1,22 +0,0 @@
|
||||
import { IFixedJoint } from "@galacean/engine-design";
|
||||
import { PhysXCollider } from "../PhysXCollider";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXJoint } from "./PhysXJoint";
|
||||
|
||||
/**
|
||||
* A fixed joint permits no relative movement between two colliders. ie the bodies are glued together.
|
||||
*/
|
||||
export class PhysXFixedJoint extends PhysXJoint implements IFixedJoint {
|
||||
constructor(physXPhysics: PhysXPhysics, collider: PhysXCollider) {
|
||||
super(physXPhysics);
|
||||
this._collider = collider;
|
||||
this._pxJoint = physXPhysics._pxPhysics.createFixedJoint(
|
||||
collider._pxActor,
|
||||
PhysXJoint._defaultVec,
|
||||
PhysXJoint._defaultQuat,
|
||||
null,
|
||||
PhysXJoint._defaultVec,
|
||||
PhysXJoint._defaultQuat
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,126 +0,0 @@
|
||||
import { IHingeJoint } from "@galacean/engine-design";
|
||||
import { MathUtil, Quaternion, Vector3 } from "@galacean/engine";
|
||||
import { PhysXCollider } from "../PhysXCollider";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXJoint } from "./PhysXJoint";
|
||||
|
||||
/**
|
||||
* A joint which behaves in a similar way to a hinge or axle.
|
||||
*/
|
||||
export class PhysXHingeJoint extends PhysXJoint implements IHingeJoint {
|
||||
protected static _xAxis = new Vector3(1, 0, 0);
|
||||
|
||||
private _axis: Vector3;
|
||||
private _axisRotationQuaternion = new Quaternion();
|
||||
private _connectedAxisRotationQuaternion = new Quaternion();
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics, collider: PhysXCollider) {
|
||||
super(physXPhysics);
|
||||
this._collider = collider;
|
||||
this._pxJoint = physXPhysics._pxPhysics.createRevoluteJoint(
|
||||
collider._pxActor,
|
||||
PhysXJoint._defaultVec,
|
||||
PhysXJoint._defaultQuat,
|
||||
null,
|
||||
PhysXJoint._defaultVec,
|
||||
PhysXJoint._defaultQuat
|
||||
);
|
||||
}
|
||||
|
||||
override setRotation(value: Quaternion): void {
|
||||
const axis = this._axis;
|
||||
this._rotation.copyFrom(value);
|
||||
axis && this.setAxis(axis);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setAxis }
|
||||
*/
|
||||
setAxis(value: Vector3): void {
|
||||
this._axis = value;
|
||||
const xAxis = PhysXHingeJoint._xAxis;
|
||||
const axisRotationQuaternion = this._axisRotationQuaternion;
|
||||
xAxis.set(1, 0, 0);
|
||||
const angle = Math.acos(Vector3.dot(xAxis, value));
|
||||
Vector3.cross(xAxis, value, xAxis);
|
||||
Quaternion.rotationAxisAngle(xAxis, angle, axisRotationQuaternion);
|
||||
this._setLocalPose(0, this._anchor, axisRotationQuaternion);
|
||||
const connectedAxisRotationQuaternion = this._connectedAxisRotationQuaternion;
|
||||
Quaternion.multiply(this._rotation, axisRotationQuaternion, connectedAxisRotationQuaternion);
|
||||
this._setLocalPose(1, this._connectedAnchor, connectedAxisRotationQuaternion);
|
||||
}
|
||||
|
||||
override setAnchor(value: Vector3): void {
|
||||
this._setLocalPose(0, value, this._axisRotationQuaternion);
|
||||
this._anchor = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setConnectedAnchor }
|
||||
*/
|
||||
override setConnectedAnchor(value: Vector3): void {
|
||||
this._setLocalPose(1, value, this._connectedAxisRotationQuaternion);
|
||||
this._connectedAnchor = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.getAngle }
|
||||
*/
|
||||
getAngle(): number {
|
||||
return MathUtil.radianToDegree(this._pxJoint.getAngle());
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.getVelocity }
|
||||
*/
|
||||
getVelocity(): Readonly<number> {
|
||||
return this._pxJoint.getVelocity();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setHardLimitCone }
|
||||
*/
|
||||
setHardLimit(lowerLimit: number, upperLimit: number, contactDist: number): void {
|
||||
this._pxJoint.setHardLimit(MathUtil.degreeToRadian(lowerLimit), MathUtil.degreeToRadian(upperLimit), contactDist);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setHardLimitCone }
|
||||
*/
|
||||
setSoftLimit(lowerLimit: number, upperLimit: number, stiffness: number, damping: number): void {
|
||||
this._pxJoint.setSoftLimit(
|
||||
MathUtil.degreeToRadian(lowerLimit),
|
||||
MathUtil.degreeToRadian(upperLimit),
|
||||
stiffness,
|
||||
damping
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setDriveVelocity }
|
||||
*/
|
||||
setDriveVelocity(velocity: number, autowake: boolean = true): void {
|
||||
this._pxJoint.setDriveVelocity(velocity, autowake);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setDriveForceLimit }
|
||||
*/
|
||||
setDriveForceLimit(limit: number): void {
|
||||
this._pxJoint.setDriveForceLimit(limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setDriveGearRatio }
|
||||
*/
|
||||
setDriveGearRatio(ratio: number): void {
|
||||
this._pxJoint.setDriveGearRatio(ratio);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IHingeJoint.setHingeJointFlag }
|
||||
*/
|
||||
setHingeJointFlag(flag: number, value: boolean): void {
|
||||
this._pxJoint.setRevoluteJointFlag(flag, value);
|
||||
}
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
import { IJoint } from "@galacean/engine-design";
|
||||
import { Quaternion, Vector3 } from "@galacean/engine";
|
||||
import { PhysXCollider } from "../PhysXCollider";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
|
||||
/**
|
||||
* a base interface providing common functionality for PhysX joints
|
||||
*/
|
||||
export class PhysXJoint implements IJoint {
|
||||
protected static _defaultVec = new Vector3();
|
||||
protected static _defaultQuat = new Quaternion();
|
||||
|
||||
protected _pxJoint: any;
|
||||
protected _anchor: Vector3;
|
||||
protected _connectedAnchor: Vector3;
|
||||
protected _rotation: Quaternion = new Quaternion();
|
||||
protected _collider: PhysXCollider;
|
||||
private _breakForce: number = Number.MAX_VALUE;
|
||||
private _breakTorque: number = Number.MAX_VALUE;
|
||||
|
||||
protected _physXPhysics: PhysXPhysics;
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics) {
|
||||
this._physXPhysics = physXPhysics;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setConnectedCollider }
|
||||
*/
|
||||
setConnectedCollider(value: PhysXCollider): void {
|
||||
this._pxJoint.setActors(this._collider?._pxActor || null, value?._pxActor || null);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setConnectedAnchor }
|
||||
*/
|
||||
setAnchor(value: Vector3): void {
|
||||
this._setLocalPose(0, value, PhysXJoint._defaultQuat);
|
||||
this._anchor = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setConnectedAnchor }
|
||||
*/
|
||||
setConnectedAnchor(value: Vector3): void {
|
||||
this._setLocalPose(1, value, this._rotation);
|
||||
this._connectedAnchor = value;
|
||||
}
|
||||
|
||||
setRotation(value: Quaternion): void {
|
||||
this._setLocalPose(1, this._connectedAnchor, value);
|
||||
this._rotation.copyFrom(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setMassScale }
|
||||
*/
|
||||
setMassScale(value: number): void {
|
||||
this._pxJoint.setInvMassScale0(1 / value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setConnectedMassScale }
|
||||
*/
|
||||
setConnectedMassScale(value: number): void {
|
||||
this._pxJoint.setInvMassScale1(1 / value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setInertiaScale }
|
||||
*/
|
||||
setInertiaScale(value: number): void {
|
||||
this._pxJoint.setInvInertiaScale0(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setConnectedInertiaScale }
|
||||
*/
|
||||
setConnectedInertiaScale(value: number): void {
|
||||
this._pxJoint.setInvInertiaScale1(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setBreakForce }
|
||||
*/
|
||||
setBreakForce(value: number): void {
|
||||
this._breakForce = value;
|
||||
this._pxJoint.setBreakForce(this._breakForce, this._breakTorque);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.setBreakTorque }
|
||||
*/
|
||||
setBreakTorque(value: number): void {
|
||||
this._breakTorque = value;
|
||||
this._pxJoint.setBreakForce(this._breakForce, this._breakTorque);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IJoint.destroy }
|
||||
*/
|
||||
destroy(): void {
|
||||
if (!this._pxJoint) return;
|
||||
this._pxJoint.release();
|
||||
this._collider = null;
|
||||
}
|
||||
/**
|
||||
* Set the joint local pose for an actor.
|
||||
* @param actor 0 for the first actor, 1 for the second actor.
|
||||
* @param position the local position for the actor this joint
|
||||
* @param rotation the local rotation for the actor this joint
|
||||
*/
|
||||
protected _setLocalPose(actor: number, position: Vector3, rotation: Quaternion): void {
|
||||
this._pxJoint.setLocalPose(actor, position, rotation);
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXJoint } from "./PhysXJoint";
|
||||
import { ISpringJoint } from "@galacean/engine-design";
|
||||
import { PhysXCollider } from "../PhysXCollider";
|
||||
import { Vector3 } from "@galacean/engine";
|
||||
|
||||
/**
|
||||
* a joint that maintains an upper or lower bound (or both) on the distance between two points on different objects
|
||||
*/
|
||||
export class PhysXSpringJoint extends PhysXJoint implements ISpringJoint {
|
||||
constructor(physXPhysics: PhysXPhysics, collider: PhysXCollider) {
|
||||
super(physXPhysics);
|
||||
this._collider = collider;
|
||||
this._pxJoint = physXPhysics._pxPhysics.createDistanceJoint(
|
||||
null,
|
||||
PhysXJoint._defaultVec,
|
||||
PhysXJoint._defaultQuat,
|
||||
collider._pxActor,
|
||||
PhysXJoint._defaultVec,
|
||||
PhysXJoint._defaultQuat
|
||||
);
|
||||
this._pxJoint.setDistanceJointFlag(2, true); // enable max distance;
|
||||
this._pxJoint.setDistanceJointFlag(4, true); // enable min distance;
|
||||
this._pxJoint.setDistanceJointFlag(8, true); // enable spring;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ISpringJoint.setMinDistance }
|
||||
*/
|
||||
setMinDistance(distance: number): void {
|
||||
this._pxJoint.setMinDistance(distance);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ISpringJoint.setMaxDistance }
|
||||
*/
|
||||
setMaxDistance(distance: number): void {
|
||||
this._pxJoint.setMaxDistance(distance);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ISpringJoint.setTolerance }
|
||||
*/
|
||||
setTolerance(tolerance: number): void {
|
||||
this._pxJoint.setTolerance(tolerance);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ISpringJoint.setStiffness }
|
||||
*/
|
||||
setStiffness(stiffness: number): void {
|
||||
this._pxJoint.setStiffness(stiffness);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ISpringJoint.setDamping }
|
||||
*/
|
||||
setDamping(damping: number): void {
|
||||
this._pxJoint.setDamping(damping);
|
||||
}
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
import { Vector3 } from "@galacean/engine";
|
||||
import { IBoxColliderShape } from "@galacean/engine-design";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXPhysicsMaterial } from "../PhysXPhysicsMaterial";
|
||||
import { PhysXColliderShape } from "./PhysXColliderShape";
|
||||
|
||||
/**
|
||||
* Box collider shape in PhysX.
|
||||
*/
|
||||
export class PhysXBoxColliderShape extends PhysXColliderShape implements IBoxColliderShape {
|
||||
private static _tempHalfExtents = new Vector3();
|
||||
/** @internal */
|
||||
_halfSize: Vector3 = new Vector3();
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics, uniqueID: number, size: Vector3, material: PhysXPhysicsMaterial) {
|
||||
super(physXPhysics);
|
||||
const halfSize = this._halfSize;
|
||||
halfSize.set(size.x * 0.5, size.y * 0.5, size.z * 0.5);
|
||||
this._pxGeometry = new physXPhysics._physX.PxBoxGeometry(halfSize.x, halfSize.y, halfSize.z);
|
||||
this._initialize(material, uniqueID);
|
||||
this._setLocalPose();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IBoxColliderShape.setSize }
|
||||
*/
|
||||
setSize(value: Vector3): void {
|
||||
const halfSize = this._halfSize;
|
||||
const tempExtents = PhysXBoxColliderShape._tempHalfExtents;
|
||||
halfSize.set(value.x * 0.5, value.y * 0.5, value.z * 0.5);
|
||||
Vector3.multiply(halfSize, this._worldScale, tempExtents);
|
||||
this._pxGeometry.halfExtents = tempExtents;
|
||||
this._pxShape.setGeometry(this._pxGeometry);
|
||||
|
||||
this._updateController(tempExtents);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setRotation }
|
||||
*/
|
||||
override setRotation(value: Vector3): void {
|
||||
super.setRotation(value);
|
||||
if (this._controllers.length > 0) {
|
||||
console.warn("Box character controller `rotation` is not supported in PhysX and will be ignored");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setWorldScale }
|
||||
*/
|
||||
override setWorldScale(scale: Vector3): void {
|
||||
super.setWorldScale(scale);
|
||||
const tempExtents = PhysXBoxColliderShape._tempHalfExtents;
|
||||
Vector3.multiply(this._halfSize, this._worldScale, tempExtents);
|
||||
this._pxGeometry.halfExtents = tempExtents;
|
||||
this._pxShape.setGeometry(this._pxGeometry);
|
||||
|
||||
this._updateController(tempExtents);
|
||||
}
|
||||
|
||||
private _updateController(extents: Vector3) {
|
||||
const controllers = this._controllers;
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
const pxController = controllers.get(i)._pxController;
|
||||
|
||||
if (pxController) {
|
||||
pxController.setHalfHeight(extents.y);
|
||||
pxController.setHalfSideExtent(extents.x);
|
||||
pxController.setHalfForwardExtent(extents.z);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
import { ICapsuleColliderShape } from "@galacean/engine-design";
|
||||
import { Quaternion, Vector3 } from "@galacean/engine";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXPhysicsMaterial } from "../PhysXPhysicsMaterial";
|
||||
import { PhysXColliderShape } from "./PhysXColliderShape";
|
||||
/**
|
||||
* Capsule collider shape in PhysX.
|
||||
*/
|
||||
export class PhysXCapsuleColliderShape extends PhysXColliderShape implements ICapsuleColliderShape {
|
||||
/** @internal */
|
||||
_radius: number;
|
||||
/** @internal */
|
||||
_halfHeight: number;
|
||||
/** @internal */
|
||||
_upAxis: ColliderShapeUpAxis = ColliderShapeUpAxis.Y;
|
||||
|
||||
constructor(
|
||||
physXPhysics: PhysXPhysics,
|
||||
uniqueID: number,
|
||||
radius: number,
|
||||
height: number,
|
||||
material: PhysXPhysicsMaterial
|
||||
) {
|
||||
super(physXPhysics);
|
||||
|
||||
this._radius = radius;
|
||||
this._halfHeight = height * 0.5;
|
||||
this._axis = new Quaternion(0, 0, PhysXColliderShape.halfSqrt, PhysXColliderShape.halfSqrt);
|
||||
this._physXRotation.copyFrom(this._axis);
|
||||
|
||||
this._pxGeometry = new physXPhysics._physX.PxCapsuleGeometry(radius, this._halfHeight);
|
||||
this._initialize(material, uniqueID);
|
||||
this._setLocalPose();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICapsuleColliderShape.setRadius }
|
||||
*/
|
||||
setRadius(value: number): void {
|
||||
this._radius = value;
|
||||
const sizeScale = this._worldScale;
|
||||
switch (this._upAxis) {
|
||||
case ColliderShapeUpAxis.X:
|
||||
this._pxGeometry.radius = this._radius * Math.max(sizeScale.y, sizeScale.z);
|
||||
break;
|
||||
case ColliderShapeUpAxis.Y:
|
||||
this._pxGeometry.radius = this._radius * Math.max(sizeScale.x, sizeScale.z);
|
||||
break;
|
||||
case ColliderShapeUpAxis.Z:
|
||||
this._pxGeometry.radius = this._radius * Math.max(sizeScale.x, sizeScale.y);
|
||||
break;
|
||||
}
|
||||
this._pxShape.setGeometry(this._pxGeometry);
|
||||
|
||||
const radius = this._pxGeometry.radius;
|
||||
const controllers = this._controllers;
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
controllers.get(i)._pxController?.setRadius(radius);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICapsuleColliderShape.setHeight }
|
||||
*/
|
||||
setHeight(value: number): void {
|
||||
this._halfHeight = value * 0.5;
|
||||
const sizeScale = this._worldScale;
|
||||
switch (this._upAxis) {
|
||||
case ColliderShapeUpAxis.X:
|
||||
this._pxGeometry.halfHeight = this._halfHeight * sizeScale.x;
|
||||
break;
|
||||
case ColliderShapeUpAxis.Y:
|
||||
this._pxGeometry.halfHeight = this._halfHeight * sizeScale.y;
|
||||
break;
|
||||
case ColliderShapeUpAxis.Z:
|
||||
this._pxGeometry.halfHeight = this._halfHeight * sizeScale.z;
|
||||
break;
|
||||
}
|
||||
this._pxShape.setGeometry(this._pxGeometry);
|
||||
|
||||
const height = this._pxGeometry.halfHeight * 2;
|
||||
const controllers = this._controllers;
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
controllers.get(i)._pxController?.setHeight(height);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICapsuleColliderShape.setRotation }
|
||||
*/
|
||||
override setRotation(value: Vector3): void {
|
||||
super.setRotation(value);
|
||||
if (this._controllers.length > 0) {
|
||||
console.warn("Capsule character controller `rotation` is not supported in PhysX and will be ignored");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ICapsuleColliderShape.setUpAxis }
|
||||
*/
|
||||
setUpAxis(upAxis: ColliderShapeUpAxis): void {
|
||||
const { _rotation: rotation, _axis: axis, _physXRotation: physXRotation } = this;
|
||||
|
||||
this._upAxis = upAxis;
|
||||
switch (this._upAxis) {
|
||||
case ColliderShapeUpAxis.X:
|
||||
axis.set(0, 0, 0, 1);
|
||||
break;
|
||||
case ColliderShapeUpAxis.Y:
|
||||
axis.set(0, 0, PhysXColliderShape.halfSqrt, PhysXColliderShape.halfSqrt);
|
||||
break;
|
||||
case ColliderShapeUpAxis.Z:
|
||||
axis.set(0, PhysXColliderShape.halfSqrt, 0, PhysXColliderShape.halfSqrt);
|
||||
break;
|
||||
}
|
||||
if (rotation) {
|
||||
Quaternion.rotationYawPitchRoll(rotation.y, rotation.x, rotation.z, physXRotation);
|
||||
Quaternion.multiply(physXRotation, axis, physXRotation);
|
||||
} else {
|
||||
physXRotation.copyFrom(axis);
|
||||
}
|
||||
this._setLocalPose();
|
||||
|
||||
if (this._controllers.length > 0) {
|
||||
console.warn("Capsule character controller `upAxis` is not supported in PhysX and will be ignored");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setWorldScale }
|
||||
*/
|
||||
override setWorldScale(scale: Vector3): void {
|
||||
super.setWorldScale(scale);
|
||||
const sizeScale = this._worldScale;
|
||||
const geometry = this._pxGeometry;
|
||||
switch (this._upAxis) {
|
||||
case ColliderShapeUpAxis.X:
|
||||
geometry.radius = this._radius * Math.max(sizeScale.y, sizeScale.z);
|
||||
geometry.halfHeight = this._halfHeight * sizeScale.x;
|
||||
break;
|
||||
case ColliderShapeUpAxis.Y:
|
||||
geometry.radius = this._radius * Math.max(sizeScale.x, sizeScale.z);
|
||||
geometry.halfHeight = this._halfHeight * sizeScale.y;
|
||||
break;
|
||||
case ColliderShapeUpAxis.Z:
|
||||
geometry.radius = this._radius * Math.max(sizeScale.x, sizeScale.y);
|
||||
geometry.halfHeight = this._halfHeight * sizeScale.z;
|
||||
break;
|
||||
}
|
||||
this._pxShape.setGeometry(geometry);
|
||||
|
||||
const radius = geometry.radius;
|
||||
const height = geometry.halfHeight * 2;
|
||||
const controllers = this._controllers;
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
const pxController = controllers.get(i)._pxController;
|
||||
if (pxController) {
|
||||
pxController.setRadius(radius);
|
||||
pxController.setHeight(height);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The up axis of the collider shape.
|
||||
*/
|
||||
export enum ColliderShapeUpAxis {
|
||||
/** Up axis is X. */
|
||||
X,
|
||||
/** Up axis is Y. */
|
||||
Y,
|
||||
/** Up axis is Z. */
|
||||
Z
|
||||
}
|
||||
@@ -1,185 +0,0 @@
|
||||
import { Quaternion, Vector3, DisorderedArray, Vector4, MathUtil } from "@galacean/engine";
|
||||
import { IColliderShape } from "@galacean/engine-design";
|
||||
import { PhysXCharacterController } from "../PhysXCharacterController";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXPhysicsMaterial } from "../PhysXPhysicsMaterial";
|
||||
|
||||
/**
|
||||
* Flags which affect the behavior of Shapes.
|
||||
*/
|
||||
export enum ShapeFlag {
|
||||
/** The shape will partake in collision in the physical simulation. */
|
||||
SIMULATION_SHAPE = 1 << 0,
|
||||
/** The shape will partake in scene queries (ray casts, overlap tests, sweeps, ...). */
|
||||
SCENE_QUERY_SHAPE = 1 << 1,
|
||||
/** The shape is a trigger which can send reports whenever other shapes enter/leave its volume. */
|
||||
TRIGGER_SHAPE = 1 << 2
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract class for collider shapes.
|
||||
*/
|
||||
export abstract class PhysXColliderShape implements IColliderShape {
|
||||
static readonly halfSqrt: number = 0.70710678118655;
|
||||
static transform = {
|
||||
translation: new Vector3(),
|
||||
rotation: null
|
||||
};
|
||||
|
||||
protected static _tempVector4 = new Vector4();
|
||||
|
||||
/** @internal */
|
||||
_controllers: DisorderedArray<PhysXCharacterController> = new DisorderedArray<PhysXCharacterController>();
|
||||
/** @internal */
|
||||
_contractOffset: number = 0.02;
|
||||
|
||||
/** @internal */
|
||||
_worldScale: Vector3 = new Vector3(1, 1, 1);
|
||||
/** @internal */
|
||||
_position: Vector3 = new Vector3();
|
||||
/** @internal */
|
||||
_pxMaterial: any;
|
||||
/** @internal */
|
||||
_pxShape: any;
|
||||
/** @internal */
|
||||
/** @internal */
|
||||
_id: number;
|
||||
/** @internal */
|
||||
_rotation: Vector3 = new Vector3();
|
||||
|
||||
protected _physXPhysics: PhysXPhysics;
|
||||
protected _pxGeometry: any;
|
||||
protected _axis: Quaternion = null;
|
||||
protected _physXRotation: Quaternion = new Quaternion();
|
||||
|
||||
private _shapeFlags: ShapeFlag = ShapeFlag.SCENE_QUERY_SHAPE | ShapeFlag.SIMULATION_SHAPE;
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics) {
|
||||
this._physXPhysics = physXPhysics;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setRotation }
|
||||
*/
|
||||
setRotation(value: Vector3): void {
|
||||
const rotation = this._rotation.set(
|
||||
MathUtil.degreeToRadian(value.x),
|
||||
MathUtil.degreeToRadian(value.y),
|
||||
MathUtil.degreeToRadian(value.z)
|
||||
);
|
||||
Quaternion.rotationYawPitchRoll(rotation.y, rotation.x, rotation.z, this._physXRotation);
|
||||
this._axis && Quaternion.multiply(this._physXRotation, this._axis, this._physXRotation);
|
||||
this._physXRotation.normalize();
|
||||
this._setLocalPose();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setPosition }
|
||||
*/
|
||||
setPosition(value: Vector3): void {
|
||||
if (value !== this._position) {
|
||||
this._position.copyFrom(value);
|
||||
}
|
||||
const controllers = this._controllers;
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
controllers.get(i)._updateShapePosition(this._position, this._worldScale);
|
||||
}
|
||||
|
||||
this._setLocalPose();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setWorldScale }
|
||||
*/
|
||||
setWorldScale(scale: Vector3): void {
|
||||
this._worldScale.set(Math.abs(scale.x), Math.abs(scale.y), Math.abs(scale.z));
|
||||
this._setLocalPose();
|
||||
|
||||
const controllers = this._controllers;
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
controllers.get(i)._updateShapePosition(this._position, this._worldScale);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setContactOffset }
|
||||
* @default 0.02f * PxTolerancesScale::length
|
||||
*/
|
||||
setContactOffset(offset: number): void {
|
||||
this._contractOffset = offset;
|
||||
const controllers = this._controllers;
|
||||
if (controllers.length) {
|
||||
for (let i = 0, n = controllers.length; i < n; i++) {
|
||||
controllers.get(i)._pxController?.setContactOffset(offset);
|
||||
}
|
||||
} else {
|
||||
this._pxShape.setContactOffset(offset);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setMaterial }
|
||||
*/
|
||||
setMaterial(value: PhysXPhysicsMaterial): void {
|
||||
this._pxMaterial = value._pxMaterial;
|
||||
this._pxShape.setMaterial(this._pxMaterial);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setIsTrigger }
|
||||
*/
|
||||
setIsTrigger(value: boolean): void {
|
||||
this._modifyFlag(ShapeFlag.SIMULATION_SHAPE, !value);
|
||||
this._modifyFlag(ShapeFlag.TRIGGER_SHAPE, value);
|
||||
this._setShapeFlags(this._shapeFlags);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.pointDistance }
|
||||
*/
|
||||
pointDistance(point: Vector3): Vector4 {
|
||||
const info = this._pxGeometry.pointDistance(this._pxShape.getGlobalPose(), point);
|
||||
const closestPoint = info.closestPoint;
|
||||
const res = PhysXColliderShape._tempVector4;
|
||||
res.set(closestPoint.x, closestPoint.y, closestPoint.z, info.distance);
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.destroy }
|
||||
*/
|
||||
destroy(): void {
|
||||
this._pxShape.release();
|
||||
this._pxGeometry.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_setShapeFlags(flags: ShapeFlag) {
|
||||
this._shapeFlags = flags;
|
||||
const shapeFlags = new this._physXPhysics._physX.PxShapeFlags(this._shapeFlags);
|
||||
this._pxShape.setFlags(shapeFlags);
|
||||
shapeFlags.delete();
|
||||
}
|
||||
|
||||
protected _setLocalPose(): void {
|
||||
const transform = PhysXColliderShape.transform;
|
||||
Vector3.multiply(this._position, this._worldScale, transform.translation);
|
||||
transform.rotation = this._physXRotation;
|
||||
this._pxShape.setLocalPose(transform);
|
||||
}
|
||||
|
||||
protected _initialize(material: PhysXPhysicsMaterial, id: number): void {
|
||||
this._id = id;
|
||||
this._pxMaterial = material._pxMaterial;
|
||||
const shapeFlags = new this._physXPhysics._physX.PxShapeFlags(this._shapeFlags);
|
||||
this._pxShape = this._physXPhysics._pxPhysics.createShape(this._pxGeometry, material._pxMaterial, true, shapeFlags);
|
||||
shapeFlags.delete();
|
||||
this._pxShape.setUUID(id);
|
||||
}
|
||||
|
||||
private _modifyFlag(flag: ShapeFlag, value: boolean): void {
|
||||
this._shapeFlags = value ? this._shapeFlags | flag : this._shapeFlags & ~flag;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import { IPlaneColliderShape } from "@galacean/engine-design";
|
||||
import { Quaternion } from "@galacean/engine";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXPhysicsMaterial } from "../PhysXPhysicsMaterial";
|
||||
import { PhysXColliderShape } from "./PhysXColliderShape";
|
||||
|
||||
/**
|
||||
* Plane collider shape in PhysX.
|
||||
*/
|
||||
export class PhysXPlaneColliderShape extends PhysXColliderShape implements IPlaneColliderShape {
|
||||
constructor(physXPhysics: PhysXPhysics, uniqueID: number, material: PhysXPhysicsMaterial) {
|
||||
super(physXPhysics);
|
||||
this._axis = new Quaternion(0, 0, PhysXColliderShape.halfSqrt, PhysXColliderShape.halfSqrt);
|
||||
this._physXRotation.copyFrom(this._axis);
|
||||
|
||||
this._pxGeometry = new physXPhysics._physX.PxPlaneGeometry();
|
||||
this._initialize(material, uniqueID);
|
||||
this._setLocalPose();
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
import { Vector3 } from "@galacean/engine";
|
||||
import { ISphereColliderShape } from "@galacean/engine-design";
|
||||
import { PhysXPhysics } from "../PhysXPhysics";
|
||||
import { PhysXPhysicsMaterial } from "../PhysXPhysicsMaterial";
|
||||
import { PhysXColliderShape } from "./PhysXColliderShape";
|
||||
|
||||
/**
|
||||
* Sphere collider shape in PhysX.
|
||||
*/
|
||||
export class PhysXSphereColliderShape extends PhysXColliderShape implements ISphereColliderShape {
|
||||
private _radius: number;
|
||||
private _maxScale: number = 1;
|
||||
|
||||
constructor(physXPhysics: PhysXPhysics, uniqueID: number, radius: number, material: PhysXPhysicsMaterial) {
|
||||
super(physXPhysics);
|
||||
|
||||
this._radius = radius;
|
||||
this._pxGeometry = new physXPhysics._physX.PxSphereGeometry(radius * this._maxScale);
|
||||
this._initialize(material, uniqueID);
|
||||
this._setLocalPose();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc ISphereColliderShape.setRadius }
|
||||
*/
|
||||
setRadius(value: number): void {
|
||||
this._radius = value;
|
||||
this._pxGeometry.radius = value * this._maxScale;
|
||||
this._pxShape.setGeometry(this._pxGeometry);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc IColliderShape.setWorldScale }
|
||||
*/
|
||||
override setWorldScale(scale: Vector3): void {
|
||||
super.setWorldScale(scale);
|
||||
|
||||
this._maxScale = Math.max(Math.abs(scale.x), Math.abs(scale.y), Math.abs(scale.z));
|
||||
this._pxGeometry.radius = this._radius * this._maxScale;
|
||||
this._pxShape.setGeometry(this._pxGeometry);
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"declaration": true,
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"experimentalDecorators": true,
|
||||
"declarationDir": "types",
|
||||
"emitDeclarationOnly": true,
|
||||
"noImplicitOverride": true,
|
||||
"sourceMap": true,
|
||||
"incremental": false,
|
||||
"skipLibCheck": true,
|
||||
"stripInternal": true
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @galacean/engine-shaderlab
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { ShaderLab } from "@galacean/engine-shaderlab";
|
||||
|
||||
// Create ShaderLab
|
||||
const shaderLab = new ShaderLab();
|
||||
|
||||
// Create engine with shaderLab
|
||||
const engine = await WebGLEngine.create({ canvas: "canvas", shaderLab });
|
||||
|
||||
......
|
||||
|
||||
// Create shader by galacean shader code directly
|
||||
const shader = Shader.create(galaceanShaderCode);
|
||||
|
||||
.......
|
||||
|
||||
// Run engine
|
||||
engine.run()
|
||||
```
|
||||
|
||||
There are two versions of ShaderLab: `Release` and `Verbose`. The `Verbose` version offers more user-friendly diagnostic information for debug ShaderLab compilation errors, while the Release version provides superior performance.
|
||||
|
||||
you can use `Verbose` version by import:
|
||||
|
||||
```ts
|
||||
import { ShaderLab } from "@galacean/engine-shaderlab/verbose";
|
||||
```
|
||||
|
||||
## CFG Grammar conflict detection
|
||||
|
||||
The Galacean ShaderLab syntax is defined using Context-Free Grammar (CFG) and is documented within the `\*.y` file. When modifications to the ShaderLab syntax are required, it is recommended to make changes to the existing CFG syntax file, and employ [Bison](https://www.gnu.org/software/bison/manual/bison.html) to detect any potential grammar conflicts.
|
||||
|
||||
```sh
|
||||
bison ./Parser.y -r all
|
||||
```
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"name": "@galacean/engine-shaderlab",
|
||||
"version": "0.0.0-experimental-backup.0",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://registry.npmjs.org"
|
||||
},
|
||||
"repository": {
|
||||
"url": "https://github.com/galacean/engine.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "dist/main.js",
|
||||
"module": "dist/module.js",
|
||||
"browser": "dist/browser.min.js",
|
||||
"debug": "src/index.ts",
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"b:types": "tsc"
|
||||
},
|
||||
"umd": {
|
||||
"name": "Galacean.ShaderLab",
|
||||
"globals": {
|
||||
"@galacean/engine": "Galacean"
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist/**/*",
|
||||
"types/**/*",
|
||||
"verbose/package.json"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@galacean/engine-design": "workspace:*",
|
||||
"@galacean/engine": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@galacean/engine": "workspace:*"
|
||||
}
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
import { ShaderPosition } from "./common/ShaderPosition";
|
||||
import { ShaderRange } from "./common/ShaderRange";
|
||||
|
||||
export class GSError extends Error {
|
||||
static wrappingLineCount = 2;
|
||||
|
||||
constructor(
|
||||
name: GSErrorName,
|
||||
message: string,
|
||||
public readonly location: ShaderRange | ShaderPosition,
|
||||
public readonly source: string,
|
||||
public readonly file?: string
|
||||
) {
|
||||
super(message);
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
override toString(): string {
|
||||
let start: ShaderPosition, end: ShaderPosition;
|
||||
const { message, location, source } = this;
|
||||
if (!source) {
|
||||
return message;
|
||||
}
|
||||
|
||||
if (location instanceof ShaderPosition) {
|
||||
start = end = location;
|
||||
} else {
|
||||
start = location.start;
|
||||
end = location.end;
|
||||
}
|
||||
const lines = source.split("\n");
|
||||
|
||||
let diagnosticMessage = `${this.name}: ${message}\n\n`;
|
||||
|
||||
// #if _VERBOSE
|
||||
const lineSplit = "|···";
|
||||
|
||||
const wrappingLineCount = GSError.wrappingLineCount;
|
||||
for (let i = start.line - wrappingLineCount, n = end.line + wrappingLineCount; i <= n; i++) {
|
||||
const line = lines[i];
|
||||
diagnosticMessage += lineSplit + `${line}\n`;
|
||||
|
||||
if (i < start.line || i > end.line) continue;
|
||||
|
||||
let remarkStart = 0;
|
||||
let remarkEnd = line.length;
|
||||
let paddingLength = lineSplit.length;
|
||||
if (i === start.line) {
|
||||
remarkStart = start.column;
|
||||
paddingLength += start.column;
|
||||
}
|
||||
if (i === end.line) {
|
||||
remarkEnd = end.column;
|
||||
}
|
||||
const remarkLength = Math.max(remarkEnd - remarkStart, 1);
|
||||
|
||||
diagnosticMessage += " ".repeat(paddingLength) + "^".repeat(remarkLength) + "\n";
|
||||
}
|
||||
// #endif
|
||||
|
||||
return diagnosticMessage;
|
||||
}
|
||||
}
|
||||
|
||||
export enum GSErrorName {
|
||||
PreprocessorError = "PreprocessorError",
|
||||
CompilationError = "CompilationError",
|
||||
ScannerError = "ScannerError",
|
||||
CompilationWarn = "CompilationWarning"
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
import { ETokenType, GalaceanDataType, TypeAny } from "./common";
|
||||
import { BaseToken as Token } from "./common/BaseToken";
|
||||
import { TreeNode } from "./parser/AST";
|
||||
import { GrammarSymbol, NoneTerminal } from "./parser/GrammarSymbol";
|
||||
// #if _VERBOSE
|
||||
import { Keyword } from "./common/enums/Keyword";
|
||||
import State from "./lalr/State";
|
||||
// #endif
|
||||
|
||||
export class ParserUtils {
|
||||
static unwrapNodeByType<T = TreeNode>(node: TreeNode, type: NoneTerminal): T | undefined {
|
||||
const child = node.children[0];
|
||||
if (child instanceof Token) return;
|
||||
if (child.nt === type) return child as T;
|
||||
return ParserUtils.unwrapNodeByType(child, type);
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
/**
|
||||
* Check if type `tb` is compatible with type `ta`.
|
||||
*/
|
||||
static typeCompatible(ta: GalaceanDataType, tb: GalaceanDataType | undefined) {
|
||||
if (tb == undefined || tb === TypeAny) return true;
|
||||
if (ta === Keyword.INT) {
|
||||
return ta === tb || tb === Keyword.UINT;
|
||||
}
|
||||
return ta === tb;
|
||||
}
|
||||
|
||||
static toString(sm: GrammarSymbol) {
|
||||
if (this.isTerminal(sm)) {
|
||||
return ETokenType[sm] ?? Keyword[sm];
|
||||
}
|
||||
return NoneTerminal[sm];
|
||||
}
|
||||
// #endif
|
||||
|
||||
static isTerminal(sm: GrammarSymbol) {
|
||||
return sm < NoneTerminal.START;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
// #if _VERBOSE
|
||||
static printStatePool(logPath: string) {
|
||||
let output = "";
|
||||
|
||||
console.log("========== Parser Pool ==========");
|
||||
|
||||
let count = 0;
|
||||
for (const state of State.pool.values()) {
|
||||
count++;
|
||||
let tmp = "";
|
||||
tmp += `${state.id}: \n`.padEnd(4);
|
||||
for (const psItem of state.items) {
|
||||
tmp += " " + psItem.toString() + "\n";
|
||||
}
|
||||
output += tmp;
|
||||
}
|
||||
|
||||
console.log("state count:", count);
|
||||
console.log(output);
|
||||
}
|
||||
// #endif
|
||||
}
|
||||
@@ -1,189 +0,0 @@
|
||||
import { Logger, ShaderPass } from "@galacean/engine";
|
||||
/** @ts-ignore */
|
||||
import { ShaderLib } from "@galacean/engine";
|
||||
|
||||
export enum MacroValueType {
|
||||
Number, // 1, 1.1
|
||||
Symbol, // variable name
|
||||
FunctionCall, // function call, e.g. clamp(a, 0.0, 1.0)
|
||||
Other // shaderLab does not check this
|
||||
}
|
||||
|
||||
export interface MacroDefineInfo {
|
||||
isFunction: boolean;
|
||||
name: string;
|
||||
value: string;
|
||||
valueType: MacroValueType;
|
||||
params: string[];
|
||||
functionCallName: string;
|
||||
}
|
||||
|
||||
export interface MacroDefineList {
|
||||
[macroName: string]: MacroDefineInfo[];
|
||||
}
|
||||
|
||||
export class Preprocessor {
|
||||
private static readonly _includeReg = /^[ \t]*#include +"([\w\d./]+)"/gm;
|
||||
private static readonly _macroRegex =
|
||||
/^\s*#define\s+(\w+)[ ]*(\(([^)]*)\))?[ ]+(\(?\w+\)?.*?)(?:\/\/.*|\/\*.*?\*\/)?\s*$/gm;
|
||||
private static readonly _symbolReg = /^[a-zA-Z_][a-zA-Z0-9_]*$/;
|
||||
private static readonly _funcCallReg = /^([a-zA-Z_][a-zA-Z0-9_]*)\s*\((.*)\)$/;
|
||||
private static readonly _macroDefineIncludeMap = new Map<string, MacroDefineList>();
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
static _repeatIncludeSet = new Set<string>();
|
||||
|
||||
static parse(
|
||||
source: string,
|
||||
basePathForIncludeKey: string,
|
||||
outMacroDefineList: MacroDefineList,
|
||||
parseMacro = true
|
||||
): string {
|
||||
if (parseMacro) {
|
||||
this._parseMacroDefines(source, outMacroDefineList);
|
||||
}
|
||||
return source.replace(this._includeReg, (_, includeName) =>
|
||||
this._replace(includeName, basePathForIncludeKey, outMacroDefineList)
|
||||
);
|
||||
}
|
||||
|
||||
static getReferenceSymbolNames(macroDefineList: MacroDefineList, macroName: string, out: string[]): void {
|
||||
out.length = 0;
|
||||
const infos = macroDefineList[macroName];
|
||||
if (!infos) return;
|
||||
|
||||
for (let i = 0; i < infos.length; i++) {
|
||||
const info = infos[i];
|
||||
const valueType = info.valueType;
|
||||
if (valueType === MacroValueType.FunctionCall || valueType === MacroValueType.Symbol) {
|
||||
const referencedName = valueType === MacroValueType.FunctionCall ? info.functionCallName : info.value;
|
||||
if (info.params.indexOf(referencedName) !== -1) continue;
|
||||
if (out.indexOf(referencedName) === -1) out.push(referencedName);
|
||||
} else if (valueType === MacroValueType.Other) {
|
||||
// #if _VERBOSE
|
||||
Logger.warn(
|
||||
`Warning: Macro "${info.name}" has an unrecognized value "${info.value}". ShaderLab does not validate this type.`
|
||||
);
|
||||
// #endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _isNumber(str: string): boolean {
|
||||
return !isNaN(Number(str));
|
||||
}
|
||||
|
||||
private static _isExist(list: MacroDefineInfo[], item: MacroDefineInfo): boolean {
|
||||
return list.some(
|
||||
(e) =>
|
||||
e.valueType === item.valueType &&
|
||||
e.value === item.value &&
|
||||
e.isFunction === item.isFunction &&
|
||||
e.functionCallName === item.functionCallName &&
|
||||
e.params.length === item.params.length &&
|
||||
e.params.every((p, i) => p === item.params[i])
|
||||
);
|
||||
}
|
||||
|
||||
private static _parseMacroDefines(source: string, outMacroList: MacroDefineList): void {
|
||||
let match: RegExpExecArray | null;
|
||||
this._macroRegex.lastIndex = 0;
|
||||
|
||||
while ((match = this._macroRegex.exec(source)) !== null) {
|
||||
const [, name, paramsGroup, paramsStr, valueRaw] = match;
|
||||
const isFunction = !!paramsGroup && !!valueRaw;
|
||||
const params =
|
||||
isFunction && paramsStr
|
||||
? paramsStr
|
||||
.split(",")
|
||||
.map((p) => p.trim())
|
||||
.filter(Boolean)
|
||||
: [];
|
||||
const value = valueRaw ? valueRaw.trim() : "";
|
||||
|
||||
let valueType = MacroValueType.Other;
|
||||
let functionCallName = "";
|
||||
|
||||
if (this._isNumber(value)) {
|
||||
valueType = MacroValueType.Number;
|
||||
} else if (this._symbolReg.test(value)) {
|
||||
valueType = MacroValueType.Symbol;
|
||||
} else {
|
||||
const callMatch = this._funcCallReg.exec(value);
|
||||
if (callMatch) {
|
||||
valueType = MacroValueType.FunctionCall;
|
||||
functionCallName = callMatch[1];
|
||||
}
|
||||
}
|
||||
|
||||
const info: MacroDefineInfo = {
|
||||
isFunction,
|
||||
name,
|
||||
value,
|
||||
valueType,
|
||||
params,
|
||||
functionCallName
|
||||
};
|
||||
|
||||
const arr = outMacroList[name];
|
||||
if (arr) {
|
||||
if (!this._isExist(arr, info)) arr.push(info);
|
||||
} else {
|
||||
outMacroList[name] = [info];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _mergeMacroDefineLists(from: MacroDefineList, to: MacroDefineList): void {
|
||||
for (const macroName in from) {
|
||||
if (to[macroName]) {
|
||||
const target = to[macroName];
|
||||
const src = from[macroName];
|
||||
for (let i = 0; i < src.length; i++) {
|
||||
const info = src[i];
|
||||
if (!this._isExist(target, info)) target.push(info);
|
||||
}
|
||||
} else {
|
||||
to[macroName] = from[macroName];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _replace(
|
||||
includeName: string,
|
||||
basePathForIncludeKey: string,
|
||||
outMacroDefineList: MacroDefineList
|
||||
): string {
|
||||
let path: string;
|
||||
if (includeName[0] === ".") {
|
||||
// @ts-ignore
|
||||
path = new URL(includeName, basePathForIncludeKey).href.substring(ShaderPass._shaderRootPath.length);
|
||||
} else {
|
||||
path = includeName;
|
||||
}
|
||||
|
||||
const chunk = (ShaderLib as any)[path];
|
||||
if (!chunk) {
|
||||
Logger.error(`Shader slice "${path}" not founded.`);
|
||||
return "";
|
||||
}
|
||||
|
||||
if (this._repeatIncludeSet.has(path)) {
|
||||
Logger.warn(`Shader slice "${path}" is included multiple times.`);
|
||||
}
|
||||
this._repeatIncludeSet.add(path);
|
||||
|
||||
if (this._macroDefineIncludeMap.has(path)) {
|
||||
this._mergeMacroDefineLists(this._macroDefineIncludeMap.get(path)!, outMacroDefineList);
|
||||
} else {
|
||||
const chunkMacroDefineList: MacroDefineList = {};
|
||||
this._parseMacroDefines(chunk, chunkMacroDefineList);
|
||||
this._macroDefineIncludeMap.set(path, chunkMacroDefineList);
|
||||
this._mergeMacroDefineLists(chunkMacroDefineList, outMacroDefineList);
|
||||
}
|
||||
|
||||
return this.parse(chunk, basePathForIncludeKey, outMacroDefineList, false);
|
||||
}
|
||||
}
|
||||
@@ -1,120 +0,0 @@
|
||||
import { Logger, ShaderMacro, ShaderLanguage } from "@galacean/engine";
|
||||
import { IShaderLab, IShaderSource } from "@galacean/engine-design";
|
||||
import { IShaderProgramSource } from "@galacean/engine-design/types/shader-lab/IShaderProgramSource";
|
||||
import { GLES100Visitor, GLES300Visitor } from "./codeGen";
|
||||
import { ShaderPosition, ShaderRange } from "./common";
|
||||
import { Lexer } from "./lexer";
|
||||
import { MacroParser } from "./macroProcessor/MacroParser";
|
||||
import { ShaderTargetParser } from "./parser";
|
||||
import { Preprocessor } from "./Preprocessor";
|
||||
import { ShaderLabUtils } from "./ShaderLabUtils";
|
||||
import { ShaderSourceParser } from "./sourceParser/ShaderSourceParser";
|
||||
|
||||
export class ShaderLab implements IShaderLab {
|
||||
private static _parser = ShaderTargetParser.create();
|
||||
private static _shaderPositionPool = ShaderLabUtils.createObjectPool(ShaderPosition);
|
||||
private static _shaderRangePool = ShaderLabUtils.createObjectPool(ShaderRange);
|
||||
|
||||
// #if _VERBOSE
|
||||
static _processingPassText?: string;
|
||||
// #endif
|
||||
|
||||
static createPosition(index: number, line?: number, column?: number): ShaderPosition {
|
||||
const position = this._shaderPositionPool.get();
|
||||
position.set(
|
||||
index,
|
||||
// #if _VERBOSE
|
||||
line,
|
||||
column
|
||||
// #endif
|
||||
);
|
||||
return position;
|
||||
}
|
||||
|
||||
static createRange(start: ShaderPosition, end: ShaderPosition): ShaderRange {
|
||||
const range = this._shaderRangePool.get();
|
||||
range.set(start, end);
|
||||
return range;
|
||||
}
|
||||
|
||||
_parseShaderSource(sourceCode: string): IShaderSource {
|
||||
ShaderLabUtils.clearAllShaderLabObjectPool();
|
||||
const shaderSource = ShaderSourceParser.parse(sourceCode);
|
||||
|
||||
// #if _VERBOSE
|
||||
this._logErrors(ShaderSourceParser.errors);
|
||||
// #endif
|
||||
|
||||
return shaderSource;
|
||||
}
|
||||
|
||||
_parseShaderPass(
|
||||
source: string,
|
||||
vertexEntry: string,
|
||||
fragmentEntry: string,
|
||||
backend: ShaderLanguage,
|
||||
basePathForIncludeKey: string
|
||||
): IShaderProgramSource | undefined {
|
||||
const totalStartTime = performance.now();
|
||||
const macroDefineList = {};
|
||||
Preprocessor._repeatIncludeSet.clear();
|
||||
const noIncludeContent = Preprocessor.parse(source, basePathForIncludeKey, macroDefineList);
|
||||
Logger.info(`[Task - Pre processor] cost time ${performance.now() - totalStartTime}ms`);
|
||||
|
||||
const lexer = new Lexer(noIncludeContent, macroDefineList);
|
||||
|
||||
const tokens = lexer.tokenize();
|
||||
const { _parser: parser } = ShaderLab;
|
||||
|
||||
ShaderLab._processingPassText = noIncludeContent;
|
||||
|
||||
const program = parser.parse(tokens, macroDefineList);
|
||||
|
||||
// #if _VERBOSE
|
||||
this._logErrors(parser.errors);
|
||||
// #endif
|
||||
|
||||
if (!program) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const codeGen = backend === ShaderLanguage.GLSLES100 ? GLES100Visitor.getVisitor() : GLES300Visitor.getVisitor();
|
||||
|
||||
const codeGenStartTime = performance.now();
|
||||
const ret = codeGen.visitShaderProgram(program, vertexEntry, fragmentEntry);
|
||||
Logger.info(`[Task - CodeGen] cost time: ${performance.now() - codeGenStartTime}ms`);
|
||||
Logger.info(`[Task - Total compilation] cost time: ${performance.now() - totalStartTime}ms`);
|
||||
ShaderLab._processingPassText = undefined;
|
||||
|
||||
// #if _VERBOSE
|
||||
this._logErrors(codeGen.errors);
|
||||
// #endif
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
_parseMacros(content: string, macros: ShaderMacro[]): string {
|
||||
const startTime = performance.now();
|
||||
const parsedContent = MacroParser.parse(content, macros);
|
||||
Logger.info(`[Task - parse macros] cost time: ${performance.now() - startTime}ms`);
|
||||
|
||||
// #if _VERBOSE
|
||||
this._logErrors(MacroParser._errors);
|
||||
// #endif
|
||||
|
||||
return parsedContent;
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_logErrors(errors: Error[]) {
|
||||
if (errors.length === 0 || !Logger.isEnabled) return;
|
||||
Logger.error(`${errors.length} errors occur!`);
|
||||
for (const err of errors) {
|
||||
Logger.error(err.toString());
|
||||
}
|
||||
}
|
||||
// #endif
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import { ClearableObjectPool, IPoolElement, Logger } from "@galacean/engine";
|
||||
import { GSErrorName } from "./GSError";
|
||||
import { ShaderRange } from "./common/ShaderRange";
|
||||
import { ShaderPosition } from "./common/ShaderPosition";
|
||||
// #if _VERBOSE
|
||||
import { GSError } from "./GSError";
|
||||
// #endif
|
||||
|
||||
export class ShaderLabUtils {
|
||||
private static _shaderLabObjectPoolSet: ClearableObjectPool<IPoolElement>[] = [];
|
||||
|
||||
static createObjectPool<T extends IPoolElement>(type: new () => T) {
|
||||
const pool = new ClearableObjectPool<T>(type);
|
||||
ShaderLabUtils._shaderLabObjectPoolSet.push(pool);
|
||||
return pool;
|
||||
}
|
||||
|
||||
static clearAllShaderLabObjectPool() {
|
||||
for (let i = 0, n = ShaderLabUtils._shaderLabObjectPoolSet.length; i < n; i++) {
|
||||
ShaderLabUtils._shaderLabObjectPoolSet[i].clear();
|
||||
}
|
||||
}
|
||||
|
||||
static createGSError(
|
||||
message: string,
|
||||
errorName: GSErrorName,
|
||||
source: string,
|
||||
location: ShaderRange | ShaderPosition,
|
||||
file?: string
|
||||
): Error | undefined {
|
||||
// #if _VERBOSE
|
||||
return new GSError(errorName, message, location, source, file);
|
||||
// #else
|
||||
Logger.error(message);
|
||||
// #endif
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
import { IPoolElement } from "@galacean/engine";
|
||||
|
||||
export class TempArray<T> implements IPoolElement {
|
||||
array: Array<T> = new Array();
|
||||
|
||||
dispose(): void {
|
||||
this.array.length = 0;
|
||||
}
|
||||
}
|
||||
@@ -1,369 +0,0 @@
|
||||
import { ShaderPosition, ShaderRange } from "../common";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { GSErrorName } from "../GSError";
|
||||
import { ASTNode, TreeNode } from "../parser/AST";
|
||||
import { NoneTerminal } from "../parser/GrammarSymbol";
|
||||
import { ESymbolType, FnSymbol } from "../parser/symbolTable";
|
||||
import { NodeChild, StructProp } from "../parser/types";
|
||||
import { ParserUtils } from "../ParserUtils";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { VisitorContext } from "./VisitorContext";
|
||||
// #if _VERBOSE
|
||||
import { GSError } from "../GSError";
|
||||
// #endif
|
||||
import { Logger, ReturnableObjectPool } from "@galacean/engine";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
import { TempArray } from "../TempArray";
|
||||
import { ICodeSegment } from "./types";
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* The code generator
|
||||
*/
|
||||
export abstract class CodeGenVisitor {
|
||||
// #if _VERBOSE
|
||||
readonly errors: Error[] = [];
|
||||
// #endif
|
||||
|
||||
abstract getAttributeProp(prop: StructProp): string;
|
||||
abstract getVaryingProp(prop: StructProp): string;
|
||||
abstract getMRTProp(prop: StructProp): string;
|
||||
|
||||
protected static _tmpArrayPool = new ReturnableObjectPool(TempArray<string>, 10);
|
||||
|
||||
defaultCodeGen(children: NodeChild[]) {
|
||||
const pool = CodeGenVisitor._tmpArrayPool;
|
||||
let ret = pool.get();
|
||||
ret.dispose();
|
||||
for (const child of children) {
|
||||
if (child instanceof BaseToken) {
|
||||
ret.array.push(child.lexeme);
|
||||
} else {
|
||||
ret.array.push(child.codeGen(this));
|
||||
}
|
||||
}
|
||||
pool.return(ret);
|
||||
return ret.array.join(" ");
|
||||
}
|
||||
|
||||
visitPostfixExpression(node: ASTNode.PostfixExpression): string {
|
||||
const children = node.children;
|
||||
const derivationLength = children.length;
|
||||
const context = VisitorContext.context;
|
||||
|
||||
if (derivationLength === 3) {
|
||||
const postExpr = children[0] as ASTNode.PostfixExpression;
|
||||
const prop = children[2];
|
||||
|
||||
if (prop instanceof BaseToken) {
|
||||
if (context.isAttributeStruct(<string>postExpr.type)) {
|
||||
const error = context.referenceAttribute(prop);
|
||||
// #if _VERBOSE
|
||||
if (error) {
|
||||
this.errors.push(<GSError>error);
|
||||
}
|
||||
// #endif
|
||||
return prop.lexeme;
|
||||
} else if (context.isVaryingStruct(<string>postExpr.type)) {
|
||||
const error = context.referenceVarying(prop);
|
||||
// #if _VERBOSE
|
||||
if (error) {
|
||||
this.errors.push(<GSError>error);
|
||||
}
|
||||
// #endif
|
||||
return prop.lexeme;
|
||||
} else if (context.isMRTStruct(<string>postExpr.type)) {
|
||||
const error = context.referenceMRTProp(prop);
|
||||
// #if _VERBOSE
|
||||
if (error) {
|
||||
this.errors.push(<GSError>error);
|
||||
}
|
||||
// #endif
|
||||
return prop.lexeme;
|
||||
}
|
||||
|
||||
return `${postExpr.codeGen(this)}.${prop.lexeme}`;
|
||||
} else {
|
||||
return `${postExpr.codeGen(this)}.${prop.codeGen(this)}`;
|
||||
}
|
||||
} else if (derivationLength === 4) {
|
||||
const identNode = children[0] as ASTNode.PostfixExpression;
|
||||
const indexNode = children[2] as ASTNode.Expression;
|
||||
const identLexeme = identNode.codeGen(this);
|
||||
const indexLexeme = indexNode.codeGen(this);
|
||||
if (identLexeme === "gl_FragData") {
|
||||
this._reportError(identNode.location, "Please use MRT struct instead of gl_FragData.");
|
||||
}
|
||||
return `${identLexeme}[${indexLexeme}]`;
|
||||
}
|
||||
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
|
||||
visitVariableIdentifier(node: ASTNode.VariableIdentifier): string {
|
||||
for (let name of node.referenceGlobalSymbolNames) {
|
||||
VisitorContext.context.referenceGlobal(name, ESymbolType.Any);
|
||||
}
|
||||
|
||||
return node.getLexeme(this);
|
||||
}
|
||||
|
||||
visitFunctionCall(node: ASTNode.FunctionCall): string {
|
||||
const call = node.children[0] as ASTNode.FunctionCallGeneric;
|
||||
if (call.fnSymbol instanceof FnSymbol) {
|
||||
VisitorContext.context.referenceGlobal(call.fnSymbol.ident, ESymbolType.FN);
|
||||
|
||||
const paramList = call.children[2];
|
||||
if (paramList instanceof ASTNode.FunctionCallParameterList) {
|
||||
const astNodes = paramList.paramNodes;
|
||||
const paramInfoList = call.fnSymbol.astNode.protoType.parameterList;
|
||||
|
||||
const params = astNodes.filter((_, i) => {
|
||||
const typeInfo = paramInfoList?.[i]?.typeInfo;
|
||||
return (
|
||||
!typeInfo ||
|
||||
(!VisitorContext.context.isAttributeStruct(typeInfo.typeLexeme) &&
|
||||
!VisitorContext.context.isVaryingStruct(typeInfo.typeLexeme) &&
|
||||
!VisitorContext.context.isMRTStruct(typeInfo.typeLexeme))
|
||||
);
|
||||
});
|
||||
|
||||
let paramsCode = "";
|
||||
|
||||
for (let i = 0, length = params.length; i < length; i++) {
|
||||
const astNode = params[i];
|
||||
const code = astNode.codeGen(this);
|
||||
if (astNode instanceof ASTNode.MacroCallArgBlock || i === 0) {
|
||||
paramsCode += code;
|
||||
} else {
|
||||
paramsCode += `, ${code}`;
|
||||
}
|
||||
}
|
||||
|
||||
return `${call.fnSymbol.ident}(${paramsCode})`;
|
||||
}
|
||||
}
|
||||
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
|
||||
visitMacroCallFunction(node: ASTNode.MacroCallFunction): string {
|
||||
const children = node.children;
|
||||
const paramList = children[2];
|
||||
if (paramList instanceof ASTNode.FunctionCallParameterList) {
|
||||
const astNodes = paramList.paramNodes;
|
||||
|
||||
const params = astNodes.filter((node) => {
|
||||
if (node instanceof ASTNode.AssignmentExpression) {
|
||||
const variableParam = ParserUtils.unwrapNodeByType<ASTNode.VariableIdentifier>(
|
||||
node,
|
||||
NoneTerminal.variable_identifier
|
||||
);
|
||||
if (
|
||||
variableParam &&
|
||||
typeof variableParam.typeInfo === "string" &&
|
||||
(VisitorContext.context.isAttributeStruct(variableParam.typeInfo) ||
|
||||
VisitorContext.context.isVaryingStruct(variableParam.typeInfo) ||
|
||||
VisitorContext.context.isMRTStruct(variableParam.typeInfo))
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
let paramsCode = "";
|
||||
for (let i = 0, length = params.length; i < length; i++) {
|
||||
const node = params[i];
|
||||
const code = node.codeGen(this);
|
||||
|
||||
if (node instanceof ASTNode.MacroCallArgBlock || i === 0) {
|
||||
paramsCode += code;
|
||||
} else {
|
||||
paramsCode += `, ${code}`;
|
||||
}
|
||||
}
|
||||
|
||||
return `${node.macroName}(${paramsCode})`;
|
||||
} else {
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
}
|
||||
|
||||
visitStatementList(node: ASTNode.StatementList): string {
|
||||
const children = node.children as TreeNode[];
|
||||
if (children.length === 1) {
|
||||
return children[0].codeGen(this);
|
||||
} else {
|
||||
return `${children[0].codeGen(this)}\n${children[1].codeGen(this)}`;
|
||||
}
|
||||
}
|
||||
|
||||
visitSingleDeclaration(node: ASTNode.SingleDeclaration): string {
|
||||
const type = node.typeSpecifier.type;
|
||||
if (typeof type === "string") {
|
||||
VisitorContext.context.referenceGlobal(type, ESymbolType.STRUCT);
|
||||
}
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
|
||||
visitGlobalVariableDeclaration(node: ASTNode.VariableDeclaration): string {
|
||||
const children = node.children;
|
||||
const fullType = children[0];
|
||||
if (fullType instanceof ASTNode.FullySpecifiedType && fullType.typeSpecifier.isCustom) {
|
||||
VisitorContext.context.referenceGlobal(<string>fullType.type, ESymbolType.STRUCT);
|
||||
}
|
||||
return `uniform ${this.defaultCodeGen(children)}`;
|
||||
}
|
||||
|
||||
visitDeclaration(node: ASTNode.Declaration): string {
|
||||
const { context } = VisitorContext;
|
||||
const children = node.children;
|
||||
const child = children[0];
|
||||
|
||||
if (child instanceof ASTNode.InitDeclaratorList) {
|
||||
const typeLexeme = child.typeInfo.typeLexeme;
|
||||
if (context.isVaryingStruct(typeLexeme) || context.isMRTStruct(typeLexeme)) return "";
|
||||
}
|
||||
return this.defaultCodeGen(children);
|
||||
}
|
||||
|
||||
visitFunctionParameterList(node: ASTNode.FunctionParameterList): string {
|
||||
const params = node.parameterInfoList.filter(
|
||||
(item) =>
|
||||
!item.typeInfo ||
|
||||
(!VisitorContext.context.isAttributeStruct(item.typeInfo.typeLexeme) &&
|
||||
!VisitorContext.context.isVaryingStruct(item.typeInfo.typeLexeme) &&
|
||||
!VisitorContext.context.isMRTStruct(item.typeInfo.typeLexeme))
|
||||
);
|
||||
|
||||
let out = "";
|
||||
for (let i = 0, length = params.length; i < length; i++) {
|
||||
const item = params[i];
|
||||
const astNode = item.astNode;
|
||||
const code = astNode.codeGen(this);
|
||||
if (astNode instanceof ASTNode.MacroParamBlock || i === 0) {
|
||||
out += code;
|
||||
} else {
|
||||
out += `, ${code}`;
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
visitFunctionHeader(node: ASTNode.FunctionHeader): string {
|
||||
const returnType = node.returnType.typeSpecifier.lexeme;
|
||||
if (VisitorContext.context.isVaryingStruct(returnType)) {
|
||||
return `void ${node.ident.lexeme}(`;
|
||||
}
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
|
||||
visitJumpStatement(node: ASTNode.JumpStatement): string {
|
||||
const children = node.children;
|
||||
const cmd = children[0] as BaseToken;
|
||||
if (cmd.type === Keyword.RETURN) {
|
||||
const expr = children[1];
|
||||
if (expr instanceof ASTNode.Expression) {
|
||||
const returnVar = ParserUtils.unwrapNodeByType<ASTNode.VariableIdentifier>(
|
||||
expr,
|
||||
NoneTerminal.variable_identifier
|
||||
);
|
||||
if (VisitorContext.context.isVaryingStruct(<string>returnVar?.typeInfo)) {
|
||||
return "";
|
||||
}
|
||||
const returnFnCall = ParserUtils.unwrapNodeByType<ASTNode.FunctionCall>(expr, NoneTerminal.function_call);
|
||||
if (VisitorContext.context.isVaryingStruct(<string>returnFnCall?.type)) {
|
||||
return `${expr.codeGen(this)};`;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this.defaultCodeGen(children);
|
||||
}
|
||||
|
||||
visitFunctionIdentifier(node: ASTNode.FunctionIdentifier): string {
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
|
||||
visitStructSpecifier(node: ASTNode.StructSpecifier): string {
|
||||
const context = VisitorContext.context;
|
||||
const { varyingStructs, attributeStructs, mrtStructs } = context;
|
||||
const isVaryingStruct = varyingStructs.indexOf(node) !== -1;
|
||||
const isAttributeStruct = attributeStructs.indexOf(node) !== -1;
|
||||
const isMRTStruct = mrtStructs.indexOf(node) !== -1;
|
||||
|
||||
if (isVaryingStruct && isAttributeStruct) {
|
||||
this._reportError(node.location, "cannot use same struct as Varying and Attribute");
|
||||
}
|
||||
|
||||
if (isVaryingStruct && isMRTStruct) {
|
||||
this._reportError(node.location, "cannot use same struct as Varying and MRT");
|
||||
}
|
||||
|
||||
if (isAttributeStruct && isMRTStruct) {
|
||||
this._reportError(node.location, "cannot use same struct as Attribute and MRT");
|
||||
}
|
||||
|
||||
if (isVaryingStruct || isAttributeStruct || isMRTStruct) {
|
||||
let result: ICodeSegment[] = [];
|
||||
|
||||
result.push(
|
||||
...node.macroExpressions.map((item) => ({
|
||||
text: item instanceof BaseToken ? item.lexeme : item.codeGen(this),
|
||||
index: item.location.start.index
|
||||
}))
|
||||
);
|
||||
|
||||
for (const prop of node.propList) {
|
||||
const name = prop.ident.lexeme;
|
||||
if (isVaryingStruct && context._referencedVaryingList[name]?.indexOf(prop) >= 0) {
|
||||
result.push({
|
||||
text: `${this.getVaryingProp(prop)}\n`,
|
||||
index: prop.ident.location.start.index
|
||||
});
|
||||
} else if (isAttributeStruct && context._referencedAttributeList[name]?.indexOf(prop) >= 0) {
|
||||
result.push({
|
||||
text: `${this.getAttributeProp(prop)}\n`,
|
||||
index: prop.ident.location.start.index
|
||||
});
|
||||
} else if (isMRTStruct && context._referencedMRTList[name]?.indexOf(prop) >= 0) {
|
||||
result.push({
|
||||
text: `${this.getMRTProp(prop)}\n`,
|
||||
index: prop.ident.location.start.index
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const text = result
|
||||
.sort((a, b) => a.index - b.index)
|
||||
.map((item) => item.text)
|
||||
.join("");
|
||||
|
||||
return text;
|
||||
} else {
|
||||
return this.defaultCodeGen(node.children);
|
||||
}
|
||||
}
|
||||
|
||||
visitFunctionDefinition(fnNode: ASTNode.FunctionDefinition): string {
|
||||
const fnName = fnNode.protoType.ident.lexeme;
|
||||
const context = VisitorContext.context;
|
||||
|
||||
if (fnName == context.stageEntry) {
|
||||
const statements = fnNode.statements.codeGen(this);
|
||||
return `void main() ${statements}`;
|
||||
} else {
|
||||
return this.defaultCodeGen(fnNode.children);
|
||||
}
|
||||
}
|
||||
|
||||
protected _reportError(loc: ShaderRange | ShaderPosition, message: string): void {
|
||||
// #if _VERBOSE
|
||||
this.errors.push(new GSError(GSErrorName.CompilationError, message, loc, ShaderLab._processingPassText));
|
||||
// #else
|
||||
Logger.error(message);
|
||||
// #endif
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { ASTNode } from "../parser/AST";
|
||||
import { StructProp } from "../parser/types";
|
||||
import { GLESVisitor } from "./GLESVisitor";
|
||||
import { VisitorContext } from "./VisitorContext";
|
||||
|
||||
export class GLES100Visitor extends GLESVisitor {
|
||||
private static _singleton: GLES100Visitor;
|
||||
static getVisitor(): GLES100Visitor {
|
||||
if (!this._singleton) {
|
||||
this._singleton = new GLES100Visitor();
|
||||
}
|
||||
return this._singleton;
|
||||
}
|
||||
|
||||
override getAttributeProp(prop: StructProp): string {
|
||||
return `attribute ${prop.typeInfo.typeLexeme} ${prop.ident.lexeme};`;
|
||||
}
|
||||
|
||||
override getVaryingProp(prop: StructProp): string {
|
||||
return `varying ${prop.typeInfo.typeLexeme} ${prop.ident.lexeme};`;
|
||||
}
|
||||
|
||||
override getMRTProp(): string {
|
||||
return null;
|
||||
}
|
||||
|
||||
override visitPostfixExpression(node: ASTNode.PostfixExpression): string {
|
||||
const { children } = node;
|
||||
const postExpr = children[0];
|
||||
const { context } = VisitorContext;
|
||||
if (postExpr instanceof ASTNode.PostfixExpression && context.isMRTStruct(<string>postExpr.type)) {
|
||||
const propReferenced = children[2] as BaseToken;
|
||||
const prop = context.mrtList.find((item) => item.ident.lexeme === propReferenced.lexeme);
|
||||
if (!prop) {
|
||||
this._reportError(propReferenced.location, `not found mrt property: ${propReferenced.lexeme}`);
|
||||
return "";
|
||||
}
|
||||
return `gl_FragData[${prop.mrtIndex!}]`;
|
||||
}
|
||||
return super.visitPostfixExpression(node);
|
||||
}
|
||||
|
||||
override visitJumpStatement(node: ASTNode.JumpStatement): string {
|
||||
if (node.isFragReturnStatement) {
|
||||
if (VisitorContext.context.mrtStructs.length) {
|
||||
return "";
|
||||
}
|
||||
const expression = node.children[1] as ASTNode.Expression;
|
||||
return `gl_FragColor = ${expression.codeGen(this)}`;
|
||||
}
|
||||
return super.visitJumpStatement(node);
|
||||
}
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
import { EShaderStage } from "../common/enums/ShaderStage";
|
||||
import { ASTNode } from "../parser/AST";
|
||||
import { ShaderData } from "../parser/ShaderInfo";
|
||||
import { StructProp } from "../parser/types";
|
||||
import { GLESVisitor } from "./GLESVisitor";
|
||||
import { ICodeSegment } from "./types";
|
||||
import { VisitorContext } from "./VisitorContext";
|
||||
|
||||
const V3_GL_FragColor = "GS_glFragColor";
|
||||
|
||||
export class GLES300Visitor extends GLESVisitor {
|
||||
private static _singleton: GLES300Visitor;
|
||||
static getVisitor(): GLES300Visitor {
|
||||
if (!this._singleton) {
|
||||
this._singleton = new GLES300Visitor();
|
||||
}
|
||||
return this._singleton;
|
||||
}
|
||||
|
||||
private _otherCodeArray: ICodeSegment[] = [];
|
||||
private _fragColorVariableRegistered = false;
|
||||
|
||||
override reset(): void {
|
||||
super.reset();
|
||||
|
||||
this._otherCodeArray.length = 0;
|
||||
this._fragColorVariableRegistered = false;
|
||||
}
|
||||
|
||||
override getOtherGlobal(data: ShaderData, out: ICodeSegment[]): void {
|
||||
super.getOtherGlobal(data, out);
|
||||
|
||||
for (let i = 0, n = this._otherCodeArray.length; i < n; i++) {
|
||||
out.push(this._otherCodeArray[i]);
|
||||
}
|
||||
}
|
||||
|
||||
override getAttributeProp(prop: StructProp): string {
|
||||
return `in ${prop.typeInfo.typeLexeme} ${prop.ident.lexeme};`;
|
||||
}
|
||||
|
||||
override getVaryingProp(prop: StructProp): string {
|
||||
const qualifier = VisitorContext.context.stage === EShaderStage.FRAGMENT ? "in" : "out";
|
||||
return `${qualifier} ${prop.typeInfo.typeLexeme} ${prop.ident.lexeme};`;
|
||||
}
|
||||
|
||||
override getMRTProp(prop: StructProp): string {
|
||||
return `layout(location = ${prop.mrtIndex}) out vec4 ${prop.ident.lexeme};`;
|
||||
}
|
||||
|
||||
override visitFunctionIdentifier(node: ASTNode.FunctionIdentifier): string {
|
||||
const children = node.children;
|
||||
const typeSpecifier = children[0] as ASTNode.TypeSpecifier;
|
||||
if (typeSpecifier.children.length !== 1) {
|
||||
return this.defaultCodeGen(children);
|
||||
}
|
||||
let ident = node.lexeme;
|
||||
switch (node.ident) {
|
||||
case "texture2D":
|
||||
case "textureCube":
|
||||
ident = "texture";
|
||||
break;
|
||||
case "texture2DProj":
|
||||
ident = "textureProj";
|
||||
break;
|
||||
case "texture2DLodEXT":
|
||||
case "textureCubeLodEXT":
|
||||
ident = "textureLod";
|
||||
break;
|
||||
case "texture2DGradEXT":
|
||||
case "textureCubeGradEXT":
|
||||
ident = "textureGrad";
|
||||
break;
|
||||
case "texture2DProjLodEXT":
|
||||
ident = "textureProjLod";
|
||||
break;
|
||||
case "texture2DProjGradEXT":
|
||||
ident = "textureProjGrad";
|
||||
break;
|
||||
case "gl_FragDepthEXT":
|
||||
ident = "gl_FragDepth";
|
||||
break;
|
||||
}
|
||||
return ident;
|
||||
}
|
||||
|
||||
override visitVariableIdentifier(node: ASTNode.VariableIdentifier): string {
|
||||
const { context } = VisitorContext;
|
||||
if (context.stage === EShaderStage.FRAGMENT && node.getLexeme(this) === "gl_FragColor") {
|
||||
if (context.mrtStructs.length) {
|
||||
this._reportError(node.location, "gl_FragColor cannot be used with MRT (Multiple Render Targets).");
|
||||
return;
|
||||
}
|
||||
this._registerFragColorVariable();
|
||||
return V3_GL_FragColor;
|
||||
}
|
||||
return super.visitVariableIdentifier(node);
|
||||
}
|
||||
|
||||
override visitJumpStatement(node: ASTNode.JumpStatement): string {
|
||||
if (node.isFragReturnStatement) {
|
||||
if (VisitorContext.context.mrtStructs.length) {
|
||||
return "";
|
||||
}
|
||||
this._registerFragColorVariable();
|
||||
|
||||
const expression = node.children[1] as ASTNode.Expression;
|
||||
return `${V3_GL_FragColor} = ${expression.codeGen(this)};`;
|
||||
}
|
||||
return super.visitJumpStatement(node);
|
||||
}
|
||||
|
||||
private _registerFragColorVariable() {
|
||||
if (this._fragColorVariableRegistered) return;
|
||||
this._otherCodeArray.push({
|
||||
text: `out vec4 ${V3_GL_FragColor};`,
|
||||
index: 0
|
||||
});
|
||||
this._fragColorVariableRegistered = true;
|
||||
}
|
||||
}
|
||||
@@ -1,326 +0,0 @@
|
||||
import { IShaderInfo } from "@galacean/engine-design";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { EShaderStage } from "../common/enums/ShaderStage";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
import { ASTNode, TreeNode } from "../parser/AST";
|
||||
import { ShaderData } from "../parser/ShaderInfo";
|
||||
import { ESymbolType, FnSymbol, StructSymbol, SymbolInfo } from "../parser/symbolTable";
|
||||
import { CodeGenVisitor } from "./CodeGenVisitor";
|
||||
import { ICodeSegment } from "./types";
|
||||
import { VisitorContext } from "./VisitorContext";
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export abstract class GLESVisitor extends CodeGenVisitor {
|
||||
private _globalCodeArray: ICodeSegment[] = [];
|
||||
private static _lookupSymbol: SymbolInfo = new SymbolInfo("", null);
|
||||
private static _serializedGlobalKey = new Set();
|
||||
|
||||
reset(): void {
|
||||
const { _globalCodeArray: globalCodeArray } = this;
|
||||
globalCodeArray.length = 0;
|
||||
GLESVisitor._serializedGlobalKey.clear();
|
||||
}
|
||||
|
||||
getOtherGlobal(data: ShaderData, out: ICodeSegment[]): void {
|
||||
for (const precision of data.globalPrecisions) {
|
||||
out.push({ text: precision.codeGen(this), index: precision.location.start.index });
|
||||
}
|
||||
}
|
||||
|
||||
visitShaderProgram(node: ASTNode.GLShaderProgram, vertexEntry: string, fragmentEntry: string): IShaderInfo {
|
||||
// #if _VERBOSE
|
||||
this.errors.length = 0;
|
||||
// #endif
|
||||
VisitorContext.reset();
|
||||
this.reset();
|
||||
|
||||
const shaderData = node.shaderData;
|
||||
VisitorContext.context._passSymbolTable = shaderData.symbolTable;
|
||||
|
||||
const outerGlobalMacroDeclarations = shaderData.getOuterGlobalMacroDeclarations();
|
||||
|
||||
return {
|
||||
vertex: this._vertexMain(vertexEntry, shaderData, outerGlobalMacroDeclarations),
|
||||
fragment: this._fragmentMain(fragmentEntry, shaderData, outerGlobalMacroDeclarations)
|
||||
};
|
||||
}
|
||||
|
||||
private _vertexMain(
|
||||
entry: string,
|
||||
data: ShaderData,
|
||||
outerGlobalMacroDeclarations: ASTNode.GlobalDeclaration[]
|
||||
): string {
|
||||
const context = VisitorContext.context;
|
||||
context.stage = EShaderStage.VERTEX;
|
||||
context.stageEntry = entry;
|
||||
|
||||
const lookupSymbol = GLESVisitor._lookupSymbol;
|
||||
const symbolTable = data.symbolTable;
|
||||
lookupSymbol.set(entry, ESymbolType.FN);
|
||||
const fnSymbols = <FnSymbol[]>symbolTable.getSymbols(lookupSymbol, true, []);
|
||||
if (!fnSymbols.length) throw `no entry function found: ${entry}`;
|
||||
|
||||
const { attributeStructs, attributeList, varyingStructs, varyingList } = context;
|
||||
fnSymbols.forEach((fnSymbol) => {
|
||||
const fnNode = fnSymbol.astNode;
|
||||
const returnType = fnNode.protoType.returnType;
|
||||
|
||||
if (typeof returnType.type === "string") {
|
||||
lookupSymbol.set(returnType.type, ESymbolType.STRUCT);
|
||||
const varyingSymbols = <StructSymbol[]>symbolTable.getSymbols(lookupSymbol, true, []);
|
||||
if (!varyingSymbols.length) {
|
||||
this._reportError(returnType.location, `invalid varying struct: "${returnType.type}".`);
|
||||
} else {
|
||||
for (let i = 0; i < varyingSymbols.length; i++) {
|
||||
const varyingSymbol = varyingSymbols[i];
|
||||
const astNode = varyingSymbol.astNode;
|
||||
varyingStructs.push(astNode);
|
||||
for (const prop of astNode.propList) {
|
||||
varyingList.push(prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (returnType.type !== Keyword.VOID) {
|
||||
this._reportError(returnType.location, "vertex main entry can only return struct or void.");
|
||||
}
|
||||
|
||||
const paramList = fnNode.protoType.parameterList;
|
||||
const attributeParam = paramList?.[0];
|
||||
if (attributeParam) {
|
||||
const attributeType = attributeParam.typeInfo.type;
|
||||
if (typeof attributeType === "string") {
|
||||
lookupSymbol.set(attributeType, ESymbolType.STRUCT);
|
||||
const attributeSymbols = <StructSymbol[]>symbolTable.getSymbols(lookupSymbol, true, []);
|
||||
if (!attributeSymbols.length) {
|
||||
this._reportError(attributeParam.astNode.location, `invalid attribute struct: "${attributeType}".`);
|
||||
} else {
|
||||
for (let i = 0; i < attributeSymbols.length; i++) {
|
||||
const attributeSymbol = attributeSymbols[i];
|
||||
const astNode = attributeSymbol.astNode;
|
||||
attributeStructs.push(astNode);
|
||||
for (const prop of astNode.propList) {
|
||||
attributeList.push(prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const globalCodeArray = this._globalCodeArray;
|
||||
VisitorContext.context.referenceGlobal(entry, ESymbolType.FN);
|
||||
|
||||
this._getGlobalSymbol(globalCodeArray);
|
||||
this._getCustomStruct(context.attributeStructs, globalCodeArray);
|
||||
this._getCustomStruct(context.varyingStructs, globalCodeArray);
|
||||
this._getGlobalMacroDeclarations(outerGlobalMacroDeclarations, globalCodeArray);
|
||||
this.getOtherGlobal(data, globalCodeArray);
|
||||
|
||||
const globalCode = globalCodeArray
|
||||
.sort((a, b) => a.index - b.index)
|
||||
.map((item) => item.text)
|
||||
.join("\n");
|
||||
|
||||
VisitorContext.context.reset(false);
|
||||
this.reset();
|
||||
|
||||
return globalCode;
|
||||
}
|
||||
|
||||
private _fragmentMain(
|
||||
entry: string,
|
||||
data: ShaderData,
|
||||
outerGlobalMacroStatements: ASTNode.GlobalDeclaration[]
|
||||
): string {
|
||||
const context = VisitorContext.context;
|
||||
context.stage = EShaderStage.FRAGMENT;
|
||||
context.stageEntry = entry;
|
||||
|
||||
const lookupSymbol = GLESVisitor._lookupSymbol;
|
||||
const { symbolTable } = data;
|
||||
lookupSymbol.set(entry, ESymbolType.FN);
|
||||
const fnSymbols = <FnSymbol[]>symbolTable.getSymbols(lookupSymbol, true, []);
|
||||
if (!fnSymbols?.length) throw `no entry function found: ${entry}`;
|
||||
|
||||
fnSymbols.forEach((fnSymbol) => {
|
||||
const fnNode = fnSymbol.astNode;
|
||||
const { returnStatement } = fnNode;
|
||||
|
||||
if (returnStatement) {
|
||||
returnStatement.isFragReturnStatement = true;
|
||||
}
|
||||
|
||||
const { type: returnDataType, location: returnLocation } = fnNode.protoType.returnType;
|
||||
if (typeof returnDataType === "string") {
|
||||
lookupSymbol.set(returnDataType, ESymbolType.STRUCT);
|
||||
const mrtSymbols = <StructSymbol[]>symbolTable.getSymbols(lookupSymbol, true, []);
|
||||
if (!mrtSymbols.length) {
|
||||
this._reportError(returnLocation, `invalid mrt struct: ${returnDataType}`);
|
||||
} else {
|
||||
for (let i = 0; i < mrtSymbols.length; i++) {
|
||||
const mrtSymbol = mrtSymbols[i];
|
||||
const astNode = mrtSymbol.astNode;
|
||||
context.mrtStructs.push(astNode);
|
||||
for (const prop of astNode.propList) {
|
||||
context.mrtList.push(prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (returnDataType !== Keyword.VOID && returnDataType !== Keyword.VEC4) {
|
||||
this._reportError(returnLocation, "fragment main entry can only return struct or vec4.");
|
||||
}
|
||||
});
|
||||
|
||||
const globalCodeArray = this._globalCodeArray;
|
||||
VisitorContext.context.referenceGlobal(entry, ESymbolType.FN);
|
||||
|
||||
this._getGlobalSymbol(globalCodeArray);
|
||||
this._getCustomStruct(context.varyingStructs, globalCodeArray);
|
||||
this._getCustomStruct(context.mrtStructs, globalCodeArray);
|
||||
this._getGlobalMacroDeclarations(outerGlobalMacroStatements, globalCodeArray);
|
||||
this.getOtherGlobal(data, globalCodeArray);
|
||||
|
||||
const globalCode = globalCodeArray
|
||||
.sort((a, b) => a.index - b.index)
|
||||
.map((item) => item.text)
|
||||
.join("\n");
|
||||
|
||||
context.reset();
|
||||
this.reset();
|
||||
|
||||
return globalCode;
|
||||
}
|
||||
|
||||
private _getGlobalSymbol(out: ICodeSegment[]): void {
|
||||
const { _referencedGlobals } = VisitorContext.context;
|
||||
|
||||
const lastLength = Object.keys(_referencedGlobals).length;
|
||||
if (lastLength === 0) return;
|
||||
|
||||
for (const ident in _referencedGlobals) {
|
||||
if (GLESVisitor._serializedGlobalKey.has(ident)) continue;
|
||||
GLESVisitor._serializedGlobalKey.add(ident);
|
||||
|
||||
const symbols = _referencedGlobals[ident];
|
||||
for (let i = 0; i < symbols.length; i++) {
|
||||
const sm = symbols[i];
|
||||
const text = sm.astNode.codeGen(this) + (sm.type === ESymbolType.VAR ? ";" : "");
|
||||
if (!sm.isInMacroBranch) {
|
||||
out.push({
|
||||
text,
|
||||
index: sm.astNode.location.start.index
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(_referencedGlobals).length !== lastLength) {
|
||||
this._getGlobalSymbol(out);
|
||||
}
|
||||
}
|
||||
|
||||
private _getCustomStruct(structNodes: ASTNode.StructSpecifier[], out: ICodeSegment[]): void {
|
||||
for (const node of structNodes) {
|
||||
const text = node.codeGen(this);
|
||||
|
||||
if (!node.isInMacroBranch) {
|
||||
out.push({ text, index: node.location.start.index });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private _getGlobalMacroDeclarations(macros: ASTNode.GlobalDeclaration[], out: ICodeSegment[]): void {
|
||||
const context = VisitorContext.context;
|
||||
const referencedGlobals = context._referencedGlobals;
|
||||
const referencedGlobalMacroASTs = context._referencedGlobalMacroASTs;
|
||||
referencedGlobalMacroASTs.length = 0;
|
||||
|
||||
for (const symbols of Object.values(referencedGlobals)) {
|
||||
for (const symbol of symbols) {
|
||||
if (symbol.isInMacroBranch) {
|
||||
referencedGlobalMacroASTs.push(symbol.astNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const macro of macros) {
|
||||
let text: string;
|
||||
const child = macro.children[0];
|
||||
|
||||
if (child instanceof ASTNode.GlobalMacroIfStatement) {
|
||||
let result: ICodeSegment[] = [];
|
||||
result.push(
|
||||
...macro.macroExpressions.map((item) => ({
|
||||
text: item instanceof BaseToken ? item.lexeme : item.codeGen(this),
|
||||
index: item.location.start.index
|
||||
}))
|
||||
);
|
||||
|
||||
this._visitGlobalMacroIfStatement(child, result);
|
||||
|
||||
text = result
|
||||
.sort((a, b) => a.index - b.index)
|
||||
.map((item) => item.text)
|
||||
.join("\n");
|
||||
} else {
|
||||
text = macro.codeGen(this);
|
||||
}
|
||||
|
||||
out.push({
|
||||
text,
|
||||
index: macro.location.start.index
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private _visitGlobalMacroIfStatement(node: TreeNode, out: ICodeSegment[]): void {
|
||||
const children = node.children;
|
||||
for (let i = 0; i < children.length; i++) {
|
||||
const child = children[i];
|
||||
if (child instanceof ASTNode.PrecisionSpecifier) {
|
||||
out.push({
|
||||
text: child.codeGen(this),
|
||||
index: child.location.start.index
|
||||
});
|
||||
} else if (child instanceof ASTNode.FunctionDefinition) {
|
||||
if (VisitorContext.context._referencedGlobalMacroASTs.indexOf(child) !== -1) {
|
||||
out.push({
|
||||
text: child.getCache(), // code has generated in `_getGlobalSymbol`
|
||||
index: child.location.start.index
|
||||
});
|
||||
}
|
||||
} else if (child instanceof ASTNode.StructSpecifier) {
|
||||
const context = VisitorContext.context;
|
||||
const stage = context.stage;
|
||||
if (
|
||||
VisitorContext.context._referencedGlobalMacroASTs.indexOf(child) !== -1 ||
|
||||
(stage === EShaderStage.VERTEX
|
||||
? context.isAttributeStruct(child.ident?.lexeme) || context.isVaryingStruct(child.ident?.lexeme)
|
||||
: context.isVaryingStruct(child.ident?.lexeme) || context.isMRTStruct(child.ident?.lexeme))
|
||||
) {
|
||||
out.push({
|
||||
text: child.getCache(), // code has generated in `_getGlobalSymbol` or `_getCustomStruct`
|
||||
index: child.location.start.index
|
||||
});
|
||||
}
|
||||
} else if (child instanceof ASTNode.VariableDeclarationList) {
|
||||
const variableDeclarations = child.variableDeclarations;
|
||||
for (let i = 0; i < variableDeclarations.length; i++) {
|
||||
const variableDeclaration = variableDeclarations[i];
|
||||
if (VisitorContext.context._referencedGlobalMacroASTs.indexOf(variableDeclaration) !== -1) {
|
||||
out.push({
|
||||
text: variableDeclaration.getCache() + ";", // code has generated in `_getGlobalSymbol`
|
||||
index: variableDeclaration.location.start.index
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (child instanceof TreeNode) {
|
||||
this._visitGlobalMacroIfStatement(child, out);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { EShaderStage } from "../common/enums/ShaderStage";
|
||||
import { SymbolTable } from "../common/SymbolTable";
|
||||
import { GSErrorName } from "../GSError";
|
||||
import { ASTNode, TreeNode } from "../parser/AST";
|
||||
import { ESymbolType, SymbolInfo } from "../parser/symbolTable";
|
||||
import { StructProp } from "../parser/types";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
|
||||
/** @internal */
|
||||
export class VisitorContext {
|
||||
private static _lookupSymbol: SymbolInfo = new SymbolInfo("", null);
|
||||
private static _singleton: VisitorContext;
|
||||
static get context() {
|
||||
return this._singleton;
|
||||
}
|
||||
|
||||
static reset() {
|
||||
if (!this._singleton) {
|
||||
this._singleton = new VisitorContext();
|
||||
}
|
||||
this._singleton.reset();
|
||||
}
|
||||
|
||||
attributeStructs: ASTNode.StructSpecifier[] = [];
|
||||
attributeList: StructProp[] = [];
|
||||
varyingStructs: ASTNode.StructSpecifier[] = [];
|
||||
varyingList: StructProp[] = [];
|
||||
mrtStructs: ASTNode.StructSpecifier[] = [];
|
||||
mrtList: StructProp[] = [];
|
||||
|
||||
stage: EShaderStage;
|
||||
stageEntry: string;
|
||||
|
||||
_referencedAttributeList: Record<string, StructProp[]>;
|
||||
_referencedVaryingList: Record<string, StructProp[]>;
|
||||
_referencedMRTList: Record<string, StructProp[]>;
|
||||
_referencedGlobals: Record<string, SymbolInfo[]>;
|
||||
_referencedGlobalMacroASTs: TreeNode[] = [];
|
||||
|
||||
_passSymbolTable: SymbolTable<SymbolInfo>;
|
||||
|
||||
reset(resetAll = true) {
|
||||
if (resetAll) {
|
||||
this.attributeStructs.length = 0;
|
||||
this.attributeList.length = 0;
|
||||
this.varyingStructs.length = 0;
|
||||
this.varyingList.length = 0;
|
||||
this.mrtStructs.length = 0;
|
||||
this.mrtList.length = 0;
|
||||
}
|
||||
|
||||
this._referencedAttributeList = Object.create(null);
|
||||
this._referencedVaryingList = Object.create(null);
|
||||
this._referencedMRTList = Object.create(null);
|
||||
this._referencedGlobals = Object.create(null);
|
||||
this._referencedGlobalMacroASTs.length = 0;
|
||||
}
|
||||
|
||||
isAttributeStruct(type: string) {
|
||||
return this.attributeStructs.findIndex((item) => item.ident!.lexeme === type) !== -1;
|
||||
}
|
||||
|
||||
isVaryingStruct(type: string) {
|
||||
return this.varyingStructs.findIndex((item) => item.ident!.lexeme === type) !== -1;
|
||||
}
|
||||
|
||||
isMRTStruct(type: string) {
|
||||
return this.mrtStructs.findIndex((item) => item.ident!.lexeme === type) !== -1;
|
||||
}
|
||||
|
||||
referenceAttribute(ident: BaseToken): Error | void {
|
||||
const lexeme = ident.lexeme;
|
||||
if (this._referencedAttributeList[lexeme]) return;
|
||||
|
||||
const props = this.attributeList.filter((item) => item.ident.lexeme === lexeme);
|
||||
if (!props.length) {
|
||||
return ShaderLabUtils.createGSError(
|
||||
`referenced attribute not found: ${lexeme}`,
|
||||
GSErrorName.CompilationError,
|
||||
ShaderLab._processingPassText,
|
||||
ident.location
|
||||
);
|
||||
}
|
||||
this._referencedAttributeList[lexeme] = props;
|
||||
}
|
||||
|
||||
referenceVarying(ident: BaseToken): Error | void {
|
||||
const lexeme = ident.lexeme;
|
||||
if (this._referencedVaryingList[lexeme]) return;
|
||||
|
||||
const props = this.varyingList.filter((item) => item.ident.lexeme === lexeme);
|
||||
if (!props.length) {
|
||||
return ShaderLabUtils.createGSError(
|
||||
`referenced varying not found: ${lexeme}`,
|
||||
GSErrorName.CompilationError,
|
||||
ShaderLab._processingPassText,
|
||||
ident.location
|
||||
);
|
||||
}
|
||||
this._referencedVaryingList[lexeme] = props;
|
||||
}
|
||||
|
||||
referenceMRTProp(ident: BaseToken): Error | void {
|
||||
const lexeme = ident.lexeme;
|
||||
if (this._referencedMRTList[lexeme]) return;
|
||||
|
||||
const props = this.mrtList.filter((item) => item.ident.lexeme === lexeme);
|
||||
if (!props.length) {
|
||||
return ShaderLabUtils.createGSError(
|
||||
`referenced mrt not found: ${lexeme}`,
|
||||
GSErrorName.CompilationError,
|
||||
ShaderLab._processingPassText,
|
||||
ident.location
|
||||
);
|
||||
}
|
||||
this._referencedMRTList[lexeme] = props;
|
||||
}
|
||||
|
||||
referenceGlobal(ident: string, type: ESymbolType): void {
|
||||
if (this._referencedGlobals[ident]) return;
|
||||
|
||||
this._referencedGlobals[ident] = [];
|
||||
|
||||
const lookupSymbol = VisitorContext._lookupSymbol;
|
||||
lookupSymbol.set(ident, type);
|
||||
this._passSymbolTable.getSymbols(lookupSymbol, true, this._referencedGlobals[ident]);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
export { GLES100Visitor } from "./GLES100";
|
||||
export { GLES300Visitor } from "./GLES300";
|
||||
export { CodeGenVisitor } from "./CodeGenVisitor";
|
||||
@@ -1,12 +0,0 @@
|
||||
import { IShaderSource } from "@galacean/engine-design";
|
||||
|
||||
export type IRenderState = [
|
||||
/** Constant RenderState. */
|
||||
Record<number, boolean | string | number | any>,
|
||||
/** Variable RenderState. */
|
||||
Record<number, string>
|
||||
];
|
||||
|
||||
export type ITag = IShaderSource["subShaders"][number]["tags"];
|
||||
|
||||
export type ICodeSegment = { text: string; index: number };
|
||||
@@ -1,241 +0,0 @@
|
||||
import { Logger } from "@galacean/engine";
|
||||
import { ShaderPosition, ShaderRange } from ".";
|
||||
import { GSErrorName } from "../GSError";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
import { BaseToken } from "./BaseToken";
|
||||
|
||||
export type OnToken = (token: BaseToken, scanner: BaseLexer) => void;
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export abstract class BaseLexer {
|
||||
static isDigit(charCode: number): boolean {
|
||||
return charCode >= 48 && charCode <= 57; // 0-9
|
||||
}
|
||||
|
||||
// Check if character is alphabetic or underscore (valid word start)
|
||||
static isAlpha(charCode: number): boolean {
|
||||
return (
|
||||
charCode === 95 || // _
|
||||
(charCode >= 65 && charCode <= 90) || // A-Z
|
||||
(charCode >= 97 && charCode <= 122) // a-z
|
||||
);
|
||||
}
|
||||
|
||||
// Check if character is alphanumeric (alpha + digit)
|
||||
static isAlnum(charCode: number): boolean {
|
||||
return BaseLexer.isAlpha(charCode) || BaseLexer.isDigit(charCode);
|
||||
}
|
||||
|
||||
static isPreprocessorStartChar(charCode: number): boolean {
|
||||
return charCode === 35; // #
|
||||
}
|
||||
|
||||
static isWhiteSpaceChar(charCode: number, includeBreak: boolean): boolean {
|
||||
// Space || Tab
|
||||
if (charCode === 32 || charCode === 9) {
|
||||
return true;
|
||||
}
|
||||
return includeBreak && (charCode === 10 || charCode === 13); // \n || \r
|
||||
}
|
||||
|
||||
protected _currentIndex = 0;
|
||||
protected _source: string;
|
||||
|
||||
// #if _VERBOSE
|
||||
protected _column = 0;
|
||||
protected _line = 0;
|
||||
// #endif
|
||||
|
||||
get currentIndex(): number {
|
||||
return this._currentIndex;
|
||||
}
|
||||
|
||||
get source(): string {
|
||||
return this._source;
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
get line() {
|
||||
return this._line;
|
||||
}
|
||||
|
||||
get column() {
|
||||
return this._column;
|
||||
}
|
||||
// #endif
|
||||
|
||||
constructor(source?: string) {
|
||||
this._source = source;
|
||||
}
|
||||
|
||||
setSource(source: string): void {
|
||||
this._source = source;
|
||||
this._currentIndex = 0;
|
||||
// #if _VERBOSE
|
||||
this._line = this._column = 0;
|
||||
// #endif
|
||||
}
|
||||
|
||||
getShaderPosition(backOffset = 0): ShaderPosition {
|
||||
return ShaderLab.createPosition(
|
||||
this._currentIndex - backOffset,
|
||||
// #if _VERBOSE
|
||||
this._line,
|
||||
this._column - backOffset
|
||||
// #endif
|
||||
);
|
||||
}
|
||||
|
||||
isEnd(): boolean {
|
||||
return this._currentIndex >= this._source.length;
|
||||
}
|
||||
|
||||
getCurChar(): string {
|
||||
return this._source[this._currentIndex];
|
||||
}
|
||||
|
||||
getCurCharCode(): number {
|
||||
return this._source.charCodeAt(this._currentIndex);
|
||||
}
|
||||
|
||||
advance(count: number): void {
|
||||
// #if _VERBOSE
|
||||
const source = this._source;
|
||||
const startIndex = this._currentIndex;
|
||||
for (let i = 0; i < count; i++) {
|
||||
if (source[startIndex + i] === "\n") {
|
||||
this._line += 1;
|
||||
this._column = 0;
|
||||
} else {
|
||||
this._column += 1;
|
||||
}
|
||||
}
|
||||
// #endif
|
||||
this._currentIndex += count;
|
||||
}
|
||||
|
||||
skipSpace(includeLineBreak: boolean): void {
|
||||
while (BaseLexer.isWhiteSpaceChar(this.getCurCharCode(), includeLineBreak)) {
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
|
||||
skipCommentsAndSpace(): void {
|
||||
const source = this._source;
|
||||
const length = source.length;
|
||||
let index = this._currentIndex;
|
||||
|
||||
while (index < length) {
|
||||
// Skip whitespace
|
||||
while (index < length && BaseLexer.isWhiteSpaceChar(source.charCodeAt(index), true)) {
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for comments: 47 is '/'
|
||||
if (index + 1 >= length || source.charCodeAt(index) !== 47) break;
|
||||
|
||||
const nextChar = source.charCodeAt(index + 1);
|
||||
if (nextChar === 47) {
|
||||
// Single line comment: 10 is '\n', 13 is '\r'
|
||||
index += 2;
|
||||
while (index < length) {
|
||||
const charCode = source.charCodeAt(index);
|
||||
if (charCode === 10 || charCode === 13) break;
|
||||
index++;
|
||||
}
|
||||
} else if (nextChar === 42) {
|
||||
// Multi-line comment: 42 is '*'
|
||||
index += 2;
|
||||
while (index + 1 < length && !(source.charCodeAt(index) === 42 && source.charCodeAt(index + 1) === 47)) {
|
||||
index++;
|
||||
}
|
||||
index += 2; // Skip '*/'
|
||||
} else {
|
||||
break; // Not a comment, stop
|
||||
}
|
||||
}
|
||||
|
||||
this.advance(index - this._currentIndex);
|
||||
}
|
||||
|
||||
peek(to: number): string {
|
||||
const offset = this._currentIndex;
|
||||
return this._source.substring(offset, offset + to);
|
||||
}
|
||||
|
||||
scanLexeme(lexeme: string): void {
|
||||
this.skipCommentsAndSpace();
|
||||
const length = lexeme.length;
|
||||
const peek = this.peek(length);
|
||||
if (peek !== lexeme) {
|
||||
this.throwError(this.getShaderPosition(0), `Expect lexeme "${lexeme}", but got "${peek}"`);
|
||||
}
|
||||
this.advance(length);
|
||||
}
|
||||
|
||||
scanTwoExpectedLexemes(lexeme1: string, lexeme2: string): string | null {
|
||||
this.skipCommentsAndSpace();
|
||||
|
||||
// Check first lexeme
|
||||
if (this.peek(lexeme1.length) === lexeme1) {
|
||||
this.advance(lexeme1.length);
|
||||
return lexeme1;
|
||||
}
|
||||
|
||||
// Check second lexeme
|
||||
if (this.peek(lexeme2.length) === lexeme2) {
|
||||
this.advance(lexeme2.length);
|
||||
return lexeme2;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
throwError(pos: ShaderPosition | ShaderRange, ...msgs: any[]) {
|
||||
const error = ShaderLabUtils.createGSError(msgs.join(" "), GSErrorName.ScannerError, this._source, pos);
|
||||
// #if _VERBOSE
|
||||
Logger.error(error!.toString());
|
||||
// #endif
|
||||
throw error;
|
||||
}
|
||||
|
||||
scanPairedChar(left: string, right: string, balanced: boolean, skipLeading: boolean): string {
|
||||
if (!skipLeading) {
|
||||
this.scanLexeme(left);
|
||||
}
|
||||
|
||||
const start = this._currentIndex;
|
||||
const source = this._source;
|
||||
const sourceLength = source.length;
|
||||
|
||||
let currentIndex = this._currentIndex;
|
||||
if (balanced) {
|
||||
let level = 1;
|
||||
while (currentIndex < sourceLength) {
|
||||
const currentChar = source[currentIndex];
|
||||
if (currentChar === right && --level === 0) {
|
||||
break;
|
||||
} else if (currentChar === left) {
|
||||
level++;
|
||||
}
|
||||
currentIndex++;
|
||||
}
|
||||
} else {
|
||||
while (currentIndex < sourceLength) {
|
||||
if (source[currentIndex] === right) {
|
||||
break;
|
||||
}
|
||||
currentIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
this.advance(currentIndex + 1 - this._currentIndex);
|
||||
|
||||
return source.substring(start, currentIndex);
|
||||
}
|
||||
|
||||
abstract scanToken(onToken?: OnToken): void;
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import { ETokenType } from "./types";
|
||||
import { ShaderRange, ShaderPosition } from ".";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { IPoolElement } from "@galacean/engine";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
|
||||
export class BaseToken<T extends number = number> implements IPoolElement {
|
||||
static pool = ShaderLabUtils.createObjectPool(BaseToken);
|
||||
|
||||
type: T;
|
||||
lexeme: string;
|
||||
location: ShaderRange;
|
||||
|
||||
set(type: T, lexeme: string, start?: ShaderPosition);
|
||||
set(type: T, lexeme: string, location?: ShaderRange);
|
||||
set(type: T, lexeme: string, arg?: ShaderRange | ShaderPosition) {
|
||||
this.type = type;
|
||||
this.lexeme = lexeme;
|
||||
if (arg) {
|
||||
if (arg instanceof ShaderRange) {
|
||||
this.location = arg as ShaderRange;
|
||||
} else {
|
||||
const end = ShaderLab.createPosition(
|
||||
arg.index + lexeme.length,
|
||||
// #if _VERBOSE
|
||||
arg.line,
|
||||
arg.column + lexeme.length
|
||||
// #endif
|
||||
);
|
||||
this.location = ShaderLab.createRange(arg, end);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dispose(): void {}
|
||||
}
|
||||
|
||||
export const EOF = new BaseToken();
|
||||
EOF.set(ETokenType.EOF, "/EOF");
|
||||
@@ -1,7 +0,0 @@
|
||||
export interface IBaseSymbol {
|
||||
isInMacroBranch: boolean;
|
||||
|
||||
readonly ident: string;
|
||||
|
||||
equal(other: IBaseSymbol): boolean;
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
import { IPoolElement } from "@galacean/engine";
|
||||
|
||||
export class ShaderPosition implements IPoolElement {
|
||||
index: number;
|
||||
// #if _VERBOSE
|
||||
line: number;
|
||||
column: number;
|
||||
// #endif
|
||||
|
||||
set(
|
||||
index: number,
|
||||
// #if _VERBOSE
|
||||
line: number,
|
||||
column: number
|
||||
// #endif
|
||||
) {
|
||||
this.index = index;
|
||||
// #if _VERBOSE
|
||||
this.line = line;
|
||||
this.column = column;
|
||||
// #endif
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.index = 0;
|
||||
// #if _VERBOSE
|
||||
this.line = 0;
|
||||
this.column = 0;
|
||||
// #endif
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import { IPoolElement } from "@galacean/engine";
|
||||
import { ShaderPosition } from "./ShaderPosition";
|
||||
|
||||
export class ShaderRange implements IPoolElement {
|
||||
public start: ShaderPosition;
|
||||
public end: ShaderPosition;
|
||||
|
||||
set(start: ShaderPosition, end: ShaderPosition) {
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.start.dispose();
|
||||
this.end.dispose();
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
import { Logger } from "@galacean/engine";
|
||||
import { IBaseSymbol } from "./IBaseSymbol";
|
||||
|
||||
export class SymbolTable<T extends IBaseSymbol> {
|
||||
private _table: Map<string, T[]> = new Map();
|
||||
|
||||
insert(symbol: T, isInMacroBranch = false): void {
|
||||
symbol.isInMacroBranch = isInMacroBranch;
|
||||
|
||||
const entry = this._table.get(symbol.ident) ?? [];
|
||||
for (let i = 0, n = entry.length; i < n; i++) {
|
||||
if (entry[i].isInMacroBranch) continue;
|
||||
if (entry[i].equal(symbol)) {
|
||||
Logger.warn("Replace symbol:", symbol.ident);
|
||||
entry[i] = symbol;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
entry.push(symbol);
|
||||
this._table.set(symbol.ident, entry);
|
||||
}
|
||||
|
||||
getSymbol(symbol: T, includeMacro = false): T | undefined {
|
||||
const entry = this._table.get(symbol.ident);
|
||||
if (entry) {
|
||||
for (let i = entry.length - 1; i >= 0; i--) {
|
||||
const item = entry[i];
|
||||
if (!includeMacro && item.isInMacroBranch) continue;
|
||||
if (item.equal(symbol)) return item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getSymbols(symbol: T, includeMacro = false, out: T[]): T[] {
|
||||
out.length = 0;
|
||||
this._getSymbols(symbol, includeMacro, out);
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_getSymbols(symbol: T, includeMacro = false, out: T[]): T[] {
|
||||
const entry = this._table.get(symbol.ident);
|
||||
|
||||
if (entry) {
|
||||
for (let i = entry.length - 1; i >= 0; i--) {
|
||||
const item = entry[i];
|
||||
if (!includeMacro && item.isInMacroBranch) continue;
|
||||
if (item.equal(symbol)) out.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
import { IBaseSymbol } from "./IBaseSymbol";
|
||||
import { SymbolTable } from "./SymbolTable";
|
||||
|
||||
export class SymbolTableStack<S extends IBaseSymbol, T extends SymbolTable<S>> {
|
||||
stack: T[] = [];
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
_macroLevel = 0;
|
||||
|
||||
get scope(): T {
|
||||
return this.stack[this.stack.length - 1];
|
||||
}
|
||||
|
||||
get isInMacroBranch(): boolean {
|
||||
return this._macroLevel > 0;
|
||||
}
|
||||
|
||||
pushScope(scope: T): void {
|
||||
this.stack.push(scope);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.stack.length = 0;
|
||||
}
|
||||
|
||||
popScope(): T | undefined {
|
||||
return this.stack.pop();
|
||||
}
|
||||
|
||||
insert(symbol: S): void {
|
||||
this.scope.insert(symbol, this.isInMacroBranch);
|
||||
}
|
||||
|
||||
lookup(symbol: S, includeMacro = false): S | undefined {
|
||||
for (let i = this.stack.length - 1; i >= 0; i--) {
|
||||
const symbolTable = this.stack[i];
|
||||
const result = symbolTable.getSymbol(symbol, includeMacro);
|
||||
if (result) return result;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
lookupAll(symbol: S, includeMacro = false, out: S[]): S[] {
|
||||
out.length = 0;
|
||||
for (let i = this.stack.length - 1; i >= 0; i--) {
|
||||
const symbolTable = this.stack[i];
|
||||
symbolTable._getSymbols(symbol, includeMacro, out);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
export enum Keyword {
|
||||
CONST = 0,
|
||||
BOOL,
|
||||
FLOAT,
|
||||
DOUBLE,
|
||||
INT,
|
||||
UINT,
|
||||
BREAK,
|
||||
CONTINUE,
|
||||
DO,
|
||||
ELSE,
|
||||
FOR,
|
||||
IF,
|
||||
WHILE,
|
||||
DISCARD,
|
||||
RETURN,
|
||||
BVEC2,
|
||||
BVEC3,
|
||||
BVEC4,
|
||||
IVEC2,
|
||||
IVEC3,
|
||||
IVEC4,
|
||||
UVEC2,
|
||||
UVEC3,
|
||||
UVEC4,
|
||||
VEC2,
|
||||
VEC3,
|
||||
VEC4,
|
||||
VEC4_ARRAY,
|
||||
MAT2,
|
||||
MAT3,
|
||||
MAT4,
|
||||
MAT2X3,
|
||||
MAT2X4,
|
||||
MAT3X2,
|
||||
MAT3X4,
|
||||
MAT4X2,
|
||||
MAT4X3,
|
||||
IN,
|
||||
OUT,
|
||||
INOUT,
|
||||
CENTROID,
|
||||
SAMPLER2D,
|
||||
SAMPLER3D,
|
||||
SAMPLER_CUBE,
|
||||
SAMPLER2D_SHADOW,
|
||||
SAMPLER_CUBE_SHADOW,
|
||||
SAMPLER2D_ARRAY,
|
||||
SAMPLER2D_ARRAY_SHADOW,
|
||||
I_SAMPLER2D,
|
||||
I_SAMPLER3D,
|
||||
I_SAMPLER_CUBE,
|
||||
I_SAMPLER2D_ARRAY,
|
||||
U_SAMPLER2D,
|
||||
U_SAMPLER3D,
|
||||
U_SAMPLER_CUBE,
|
||||
U_SAMPLER2D_ARRAY,
|
||||
STRUCT,
|
||||
LAYOUT,
|
||||
LOCATION,
|
||||
VOID,
|
||||
|
||||
PRECISION,
|
||||
PRECISE,
|
||||
HIGHP,
|
||||
MEDIUMP,
|
||||
LOWP,
|
||||
INVARIANT,
|
||||
SMOOTH,
|
||||
FLAT,
|
||||
NOPERSPECTIVE,
|
||||
|
||||
// Common Tokens
|
||||
True,
|
||||
False,
|
||||
LeftBrace,
|
||||
RightBrace,
|
||||
Equal,
|
||||
|
||||
// Source Parser Tokens
|
||||
GSRenderQueueType,
|
||||
GSBlendState,
|
||||
GSDepthState,
|
||||
GSStencilState,
|
||||
GSRasterState,
|
||||
GSEditorProperties,
|
||||
GSEditorMacros,
|
||||
GSEditor,
|
||||
GSTags,
|
||||
GSVertexShader,
|
||||
GSFragmentShader,
|
||||
GSSubShader,
|
||||
GSPass,
|
||||
GSBlendFactor,
|
||||
GSBlendOperation,
|
||||
GSBool,
|
||||
GSNumber,
|
||||
GSColor,
|
||||
GSCompareFunction,
|
||||
GSStencilOperation,
|
||||
GSCullMode,
|
||||
GSUsePass,
|
||||
|
||||
// Macros
|
||||
MACRO_IF,
|
||||
MACRO_IFDEF,
|
||||
MACRO_IFNDEF,
|
||||
MACRO_ELSE,
|
||||
MACRO_ELIF,
|
||||
MACRO_ENDIF,
|
||||
MACRO_UNDEF,
|
||||
MACRO_DEFINE_EXPRESSION,
|
||||
MACRO_CONDITIONAL_EXPRESSION,
|
||||
MACRO_CALL
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
export enum EShaderStage {
|
||||
VERTEX,
|
||||
FRAGMENT,
|
||||
ALL
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
import { ShaderRange } from "./ShaderRange";
|
||||
import { ShaderPosition } from "./ShaderPosition";
|
||||
|
||||
export { ShaderRange, ShaderPosition };
|
||||
export * from "./types";
|
||||
export * from "./enums/Keyword";
|
||||
@@ -1,151 +0,0 @@
|
||||
import { Keyword } from "./enums/Keyword";
|
||||
|
||||
export enum ETokenType {
|
||||
ID = 1000,
|
||||
FLOAT_CONSTANT,
|
||||
INT_CONSTANT,
|
||||
STRING_CONST,
|
||||
/** << */
|
||||
LEFT_OP,
|
||||
/** \>> */
|
||||
RIGHT_OP,
|
||||
/** ++ */
|
||||
INC_OP,
|
||||
/** -- */
|
||||
DEC_OP,
|
||||
/** <= */
|
||||
LE_OP,
|
||||
/** \>= */
|
||||
GE_OP,
|
||||
/** == */
|
||||
EQ_OP,
|
||||
/** != */
|
||||
NE_OP,
|
||||
/** && */
|
||||
AND_OP,
|
||||
/** || */
|
||||
OR_OP,
|
||||
/** ^^ */
|
||||
XOR_OP,
|
||||
/** *= */
|
||||
MUL_ASSIGN,
|
||||
/** /= */
|
||||
DIV_ASSIGN,
|
||||
/** += */
|
||||
ADD_ASSIGN,
|
||||
/** -= */
|
||||
SUB_ASSIGN,
|
||||
/** %= */
|
||||
MOD_ASSIGN,
|
||||
/** <<= */
|
||||
LEFT_ASSIGN,
|
||||
/** >>= */
|
||||
RIGHT_ASSIGN,
|
||||
/** &= */
|
||||
AND_ASSIGN,
|
||||
/** ^= */
|
||||
XOR_ASSIGN,
|
||||
/** |= */
|
||||
OR_ASSIGN,
|
||||
/** ( */
|
||||
LEFT_PAREN,
|
||||
/** ) */
|
||||
RIGHT_PAREN,
|
||||
/** [ */
|
||||
LEFT_BRACKET,
|
||||
/** ] */
|
||||
RIGHT_BRACKET,
|
||||
/** { */
|
||||
LEFT_BRACE,
|
||||
/** } */
|
||||
RIGHT_BRACE,
|
||||
/** . */
|
||||
DOT,
|
||||
/** , */
|
||||
COMMA,
|
||||
COLON,
|
||||
/** = */
|
||||
EQUAL,
|
||||
/** ; */
|
||||
SEMICOLON,
|
||||
/** ! */
|
||||
BANG,
|
||||
/** \- */
|
||||
DASH,
|
||||
/** ~ */
|
||||
TILDE,
|
||||
PLUS,
|
||||
/** \* */
|
||||
STAR,
|
||||
/** / */
|
||||
SLASH,
|
||||
/** % */
|
||||
PERCENT,
|
||||
/** < */
|
||||
LEFT_ANGLE,
|
||||
/** \> */
|
||||
RIGHT_ANGLE,
|
||||
VERTICAL_BAR,
|
||||
/** ^ */
|
||||
CARET,
|
||||
/** & */
|
||||
AMPERSAND,
|
||||
/** ? */
|
||||
QUESTION,
|
||||
|
||||
NotWord,
|
||||
|
||||
/** ε */
|
||||
EPSILON = 1998,
|
||||
EOF = 1999
|
||||
}
|
||||
|
||||
export const TypeAny = 3000;
|
||||
|
||||
export type GalaceanDataType =
|
||||
| Keyword.VOID
|
||||
| Keyword.FLOAT
|
||||
| Keyword.BOOL
|
||||
| Keyword.INT
|
||||
| Keyword.UINT
|
||||
| Keyword.VEC2
|
||||
| Keyword.VEC3
|
||||
| Keyword.VEC4
|
||||
| Keyword.BVEC2
|
||||
| Keyword.BVEC3
|
||||
| Keyword.BVEC4
|
||||
| Keyword.IVEC2
|
||||
| Keyword.IVEC3
|
||||
| Keyword.IVEC4
|
||||
| Keyword.UVEC2
|
||||
| Keyword.UVEC3
|
||||
| Keyword.UVEC4
|
||||
| Keyword.MAT2
|
||||
| Keyword.MAT3
|
||||
| Keyword.MAT4
|
||||
| Keyword.MAT2X3
|
||||
| Keyword.MAT2X4
|
||||
| Keyword.MAT3X2
|
||||
| Keyword.MAT3X4
|
||||
| Keyword.MAT4X2
|
||||
| Keyword.MAT4X3
|
||||
| Keyword.SAMPLER2D
|
||||
| Keyword.SAMPLER3D
|
||||
| Keyword.SAMPLER_CUBE
|
||||
| Keyword.SAMPLER2D_SHADOW
|
||||
| Keyword.SAMPLER_CUBE_SHADOW
|
||||
| Keyword.SAMPLER2D_ARRAY
|
||||
| Keyword.SAMPLER2D_ARRAY_SHADOW
|
||||
| Keyword.I_SAMPLER2D
|
||||
| Keyword.I_SAMPLER3D
|
||||
| Keyword.I_SAMPLER_CUBE
|
||||
| Keyword.I_SAMPLER2D_ARRAY
|
||||
| Keyword.U_SAMPLER2D
|
||||
| Keyword.U_SAMPLER3D
|
||||
| Keyword.U_SAMPLER_CUBE
|
||||
| Keyword.U_SAMPLER2D_ARRAY
|
||||
| Keyword.VEC4_ARRAY
|
||||
| typeof TypeAny
|
||||
| string;
|
||||
|
||||
export type TokenType = ETokenType | Keyword;
|
||||
@@ -1,13 +0,0 @@
|
||||
export { ShaderLab } from "./ShaderLab";
|
||||
|
||||
export * from "./GSError";
|
||||
|
||||
//@ts-ignore
|
||||
export const version = `__buildVersion`;
|
||||
|
||||
let mode = "Release";
|
||||
// #if _VERBOSE
|
||||
mode = "Verbose";
|
||||
// #endif
|
||||
|
||||
console.log(`Galacean Engine ShaderLab Version: ${version} | Mode: ${mode}`);
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,226 +0,0 @@
|
||||
import { Logger } from "@galacean/engine";
|
||||
import { ETokenType } from "../common";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
import { Grammar } from "../parser/Grammar";
|
||||
import { GrammarSymbol, NoneTerminal, Terminal } from "../parser/GrammarSymbol";
|
||||
import State from "./State";
|
||||
import StateItem from "./StateItem";
|
||||
import { default as GrammarUtils, default as Utils } from "./Utils";
|
||||
import { ActionInfo, ActionTable, EAction, GotoTable, StateActionTable, StateGotoTable } from "./types";
|
||||
|
||||
/**
|
||||
* The [LALR1](https://web.stanford.edu/class/archive/cs/cs143/cs143.1128/handouts/140%20LALR%20Parsing.pdf) Parser generator
|
||||
*/
|
||||
export class LALR1 {
|
||||
readonly firstSetMap: Map<NoneTerminal, Set<Terminal>> = new Map();
|
||||
readonly followSetMap: Map<NoneTerminal, Set<Terminal>> = new Map();
|
||||
|
||||
readonly actionTable: StateActionTable = new Map();
|
||||
readonly gotoTable: StateGotoTable = new Map();
|
||||
private grammar: Grammar;
|
||||
|
||||
/** For circle detect */
|
||||
private _firstSetNTStack: NoneTerminal[] = [];
|
||||
|
||||
constructor(grammar: Grammar) {
|
||||
this.grammar = grammar;
|
||||
}
|
||||
|
||||
generate() {
|
||||
this.computeFirstSet();
|
||||
this.buildStateTable();
|
||||
}
|
||||
|
||||
private buildStateTable() {
|
||||
const startStateItemCore = [new StateItem(this.grammar.productions[0], 0, [ETokenType.EOF])];
|
||||
const startState = State.create(startStateItemCore);
|
||||
this._extendState(startState);
|
||||
}
|
||||
|
||||
private _extendState(state: State) {
|
||||
if (!state.needReInfer) return;
|
||||
this._closure(state);
|
||||
const newStates = this._inferNextState(state);
|
||||
for (const ns of newStates) {
|
||||
this._extendState(ns);
|
||||
}
|
||||
}
|
||||
|
||||
private _closure(state: State) {
|
||||
for (const core of state.cores) {
|
||||
if (!core.canReduce()) {
|
||||
this._extendStateItem(state, core);
|
||||
}
|
||||
}
|
||||
state.closured = true;
|
||||
return state;
|
||||
}
|
||||
|
||||
private _extendStateItem(state: State, item: StateItem) {
|
||||
if (GrammarUtils.isTerminal(item.curSymbol)) return;
|
||||
|
||||
const productionList = this.grammar.getProductionList(<NoneTerminal>item.curSymbol);
|
||||
|
||||
if (item.nextSymbol) {
|
||||
let newLookaheadSet = new Set<Terminal>();
|
||||
let lastFirstSet: Set<Terminal> | undefined;
|
||||
let terminalExist = false;
|
||||
// when A :=> a.BC, a; ==》 B :=> .xy, First(Ca)
|
||||
// newLookAhead = First(Ca)
|
||||
for (let i = 1, nextSymbol = item.symbolByOffset(1); !!nextSymbol; nextSymbol = item.symbolByOffset(++i)) {
|
||||
if (GrammarUtils.isTerminal(nextSymbol)) {
|
||||
newLookaheadSet.add(<Terminal>nextSymbol);
|
||||
terminalExist = true;
|
||||
break;
|
||||
}
|
||||
lastFirstSet = this.firstSetMap.get(<NoneTerminal>nextSymbol)!;
|
||||
for (const t of lastFirstSet) {
|
||||
newLookaheadSet.add(t);
|
||||
}
|
||||
if (!lastFirstSet.has(ETokenType.EPSILON)) break;
|
||||
}
|
||||
if (!terminalExist && lastFirstSet?.has(ETokenType.EPSILON)) {
|
||||
for (const t of item.lookaheadSet) {
|
||||
newLookaheadSet.add(t);
|
||||
}
|
||||
}
|
||||
|
||||
for (const production of productionList) {
|
||||
const newItem = state.createStateItem(production, 0);
|
||||
if (!state.items.has(newItem) || !Utils.isSubSet(newLookaheadSet, newItem.lookaheadSet)) {
|
||||
state.items.add(newItem);
|
||||
newItem.addLookahead(newLookaheadSet);
|
||||
this._extendStateItem(state, newItem);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const production of productionList) {
|
||||
const newItem = state.createStateItem(production, 0);
|
||||
if (!state.items.has(newItem) || !Utils.isSubSet(item.lookaheadSet, newItem.lookaheadSet)) {
|
||||
state.items.add(newItem);
|
||||
newItem.addLookahead(item.lookaheadSet);
|
||||
this._extendStateItem(state, newItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private _inferNextState(state: State): Set<State> {
|
||||
const coreMap: Map<GrammarSymbol, Set<StateItem>> = new Map();
|
||||
const stateActionTable: ActionTable = this.actionTable.get(state.id) ?? new Map();
|
||||
const stateGotoTable: GotoTable = this.gotoTable.get(state.id) ?? new Map();
|
||||
|
||||
this.actionTable.set(state.id, stateActionTable);
|
||||
this.gotoTable.set(state.id, stateGotoTable);
|
||||
|
||||
for (const stateItem of state.items) {
|
||||
if (stateItem.canReduce()) {
|
||||
let action: ActionInfo;
|
||||
if (stateItem.production.goal !== NoneTerminal.START) {
|
||||
action = {
|
||||
action: EAction.Reduce,
|
||||
target: stateItem.production.id
|
||||
};
|
||||
} else {
|
||||
action = { action: EAction.Accept };
|
||||
}
|
||||
|
||||
for (const t of stateItem.lookaheadSet) {
|
||||
this._addAction(stateActionTable, t, action);
|
||||
}
|
||||
} else {
|
||||
const nextItem = stateItem.advance();
|
||||
Utils.addMapSetItem(coreMap, stateItem.curSymbol, nextItem);
|
||||
}
|
||||
|
||||
stateItem.needReInfer = false;
|
||||
}
|
||||
|
||||
const newStates = new Set<State>();
|
||||
for (const [gs, cores] of coreMap.entries()) {
|
||||
const newState = State.create(Array.from(cores));
|
||||
if (GrammarUtils.isTerminal(gs)) {
|
||||
this._addAction(stateActionTable, <Terminal>gs, {
|
||||
action: EAction.Shift,
|
||||
target: newState.id
|
||||
});
|
||||
} else {
|
||||
stateGotoTable.set(<NoneTerminal>gs, newState.id);
|
||||
}
|
||||
|
||||
newStates.add(newState);
|
||||
}
|
||||
|
||||
return newStates;
|
||||
}
|
||||
|
||||
/** Resolve shift-reduce/reduce-reduce conflict detect */
|
||||
private _addAction(table: ActionTable, terminal: Terminal, action: ActionInfo) {
|
||||
const exist = table.get(terminal);
|
||||
if (exist && !Utils.isActionEqual(exist, action)) {
|
||||
// Resolve dangling else ambiguity
|
||||
if (terminal === Keyword.ELSE && exist.action === EAction.Shift && action.action === EAction.Reduce) {
|
||||
return;
|
||||
} else {
|
||||
// #if _VERBOSE
|
||||
Logger.warn(
|
||||
`conflict detect: <Terminal ${GrammarUtils.toString(terminal)}> \n`,
|
||||
Utils.printAction(exist),
|
||||
"\n",
|
||||
Utils.printAction(action)
|
||||
);
|
||||
// #endif
|
||||
}
|
||||
}
|
||||
table.set(terminal, action);
|
||||
}
|
||||
|
||||
// https://people.cs.pitt.edu/~jmisurda/teaching/cs1622/handouts/cs1622-first_and_follow.pdf
|
||||
private computeFirstSet() {
|
||||
for (const production of this.grammar.productions.slice(1)) {
|
||||
this._computeFirstSetForNT(production.goal);
|
||||
}
|
||||
}
|
||||
|
||||
private _computeFirstSetForNT(NT: NoneTerminal) {
|
||||
// circle detect
|
||||
const idx = this._firstSetNTStack.findIndex((item) => item === NT);
|
||||
if (idx !== -1) {
|
||||
const computingFS = this.firstSetMap.get(NT)!;
|
||||
const len = this._firstSetNTStack.length;
|
||||
for (let i = len - 1; i > idx; i--) {
|
||||
const curNT = this._firstSetNTStack[i];
|
||||
this.firstSetMap.set(curNT, computingFS);
|
||||
}
|
||||
return computingFS;
|
||||
}
|
||||
this._firstSetNTStack.push(NT);
|
||||
|
||||
const productionList = this.grammar.getProductionList(NT);
|
||||
const firstSet = new Set<Terminal>();
|
||||
this.firstSetMap.set(NT, firstSet);
|
||||
if (this.grammar.isNullableNT(NT)) firstSet.add(ETokenType.EPSILON);
|
||||
|
||||
for (const production of productionList) {
|
||||
let i = 0;
|
||||
for (; i < production.derivation.length; i++) {
|
||||
const gs = production.derivation[i];
|
||||
if (GrammarUtils.isTerminal(gs)) {
|
||||
firstSet.add(<Terminal>gs);
|
||||
break;
|
||||
}
|
||||
|
||||
const succeedFirstSet = this._computeFirstSetForNT(<NoneTerminal>gs);
|
||||
|
||||
for (const item of succeedFirstSet) {
|
||||
if (item !== ETokenType.EPSILON) firstSet.add(item);
|
||||
}
|
||||
if (!this.grammar.isNullableNT(<NoneTerminal>gs)) break;
|
||||
}
|
||||
if (i === production.derivation.length) firstSet.add(ETokenType.EPSILON);
|
||||
}
|
||||
|
||||
this._firstSetNTStack.pop();
|
||||
return firstSet;
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import { NoneTerminal, GrammarSymbol } from "../parser/GrammarSymbol";
|
||||
|
||||
export default class Production {
|
||||
private static _id = 0;
|
||||
static pool: Map<number, Production> = new Map();
|
||||
|
||||
readonly goal: NoneTerminal;
|
||||
readonly derivation: GrammarSymbol[];
|
||||
readonly id: number;
|
||||
|
||||
constructor(goal: NoneTerminal, derivation: GrammarSymbol[]) {
|
||||
this.goal = goal;
|
||||
this.derivation = derivation;
|
||||
this.id = Production._id++;
|
||||
Production.pool.set(this.id, this);
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
import { Terminal } from "../parser/GrammarSymbol";
|
||||
import Production from "./Production";
|
||||
import StateItem from "./StateItem";
|
||||
|
||||
export default class State {
|
||||
static closureMap: Map<string /** state mapKey */, State> = new Map();
|
||||
static pool: Map<number, State> = new Map();
|
||||
static _id = 0;
|
||||
|
||||
readonly id: number;
|
||||
readonly cores: Set<StateItem>;
|
||||
private _items: Set<StateItem>;
|
||||
get items() {
|
||||
return this._items;
|
||||
}
|
||||
|
||||
closured = false;
|
||||
get needReInfer() {
|
||||
for (const core of this.cores) {
|
||||
if (core.needReInfer) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private _stateItemPool: Map<string /** Map ID */, StateItem> = new Map();
|
||||
|
||||
static create(cores: StateItem[]) {
|
||||
const cacheKey = this.getMapKey(cores);
|
||||
const state = this.closureMap.get(cacheKey);
|
||||
if (state) {
|
||||
for (const core of cores) {
|
||||
// merge lookahead
|
||||
state.createStateItem(core.production, core.position, core.lookaheadSet);
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
const newState = new State(cores);
|
||||
this.closureMap.set(cacheKey, newState);
|
||||
return newState;
|
||||
}
|
||||
|
||||
// TODO: any optimization?
|
||||
static getMapKey(cores: StateItem[]) {
|
||||
return cores.map((item) => `${item.production.id},${item.position}`).join(";");
|
||||
}
|
||||
|
||||
private constructor(cores: Iterable<StateItem>) {
|
||||
this.id = State._id++;
|
||||
this._items = new Set([...cores]);
|
||||
this.cores = new Set([...cores]);
|
||||
for (const it of cores) {
|
||||
const itemMapKey = this.getStateItemMapKey(it.production, it.position);
|
||||
this._stateItemPool.set(itemMapKey, it);
|
||||
}
|
||||
State.pool.set(this.id, this);
|
||||
}
|
||||
|
||||
// TODO: any optimization?
|
||||
getStateItemMapKey(production: Production, position: number) {
|
||||
return `${production.id},${position}`;
|
||||
}
|
||||
|
||||
createStateItem(production: Production, position: number, lookaheadSet: Iterable<Terminal> = new Set()) {
|
||||
const mapId = this.getStateItemMapKey(production, position);
|
||||
const item = this._stateItemPool.get(mapId);
|
||||
if (item) {
|
||||
for (const la of lookaheadSet) {
|
||||
if (item.lookaheadSet.has(la)) continue;
|
||||
item.lookaheadSet.add(la);
|
||||
item.needReInfer = true;
|
||||
}
|
||||
return item;
|
||||
}
|
||||
const newItem = new StateItem(production, position, lookaheadSet);
|
||||
this._stateItemPool.set(mapId, newItem);
|
||||
return newItem;
|
||||
}
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
import { ETokenType } from "../common";
|
||||
import { NoneTerminal, Terminal } from "../parser/GrammarSymbol";
|
||||
import Production from "./Production";
|
||||
import GrammarUtils from "./Utils";
|
||||
|
||||
export default class StateItem {
|
||||
static _id = 0;
|
||||
|
||||
readonly production: Production;
|
||||
readonly position: number;
|
||||
readonly lookaheadSet: Set<Terminal>;
|
||||
readonly id: number;
|
||||
|
||||
_needReInfer = true;
|
||||
get needReInfer() {
|
||||
return this._needReInfer;
|
||||
}
|
||||
set needReInfer(v: boolean) {
|
||||
this._needReInfer = v;
|
||||
}
|
||||
|
||||
get curSymbol() {
|
||||
return this.production.derivation[this.position];
|
||||
}
|
||||
get nextSymbol() {
|
||||
return this.production.derivation[this.position + 1];
|
||||
}
|
||||
|
||||
constructor(production: Production, position: number, lookahead: Iterable<Terminal>) {
|
||||
this.production = production;
|
||||
this.position = position;
|
||||
this.lookaheadSet = new Set();
|
||||
for (const la of lookahead) {
|
||||
this.lookaheadSet.add(la);
|
||||
}
|
||||
this.id = StateItem._id++;
|
||||
}
|
||||
|
||||
addLookahead(ts: Iterable<Terminal>) {
|
||||
for (const t of ts) {
|
||||
if (this.lookaheadSet.has(t)) continue;
|
||||
this.lookaheadSet.add(t);
|
||||
this.needReInfer = true;
|
||||
}
|
||||
}
|
||||
|
||||
symbolByOffset(offset: number) {
|
||||
return this.production.derivation[this.position + offset];
|
||||
}
|
||||
|
||||
canReduce() {
|
||||
if (this.position > this.production.derivation.length - 1) return true;
|
||||
else {
|
||||
for (let i = this.position; i < this.production.derivation.length; i++) {
|
||||
if (this.production.derivation[i] !== ETokenType.EPSILON) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
advance() {
|
||||
// #if _VERBOSE
|
||||
if (this.canReduce()) throw `Error: advance reduce-able parsing state item`;
|
||||
// #endif
|
||||
return new StateItem(this.production, this.position + 1, this.lookaheadSet);
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
toString() {
|
||||
const coreItem = this.production.derivation.map((item) => GrammarUtils.toString(item));
|
||||
coreItem[this.position] = "." + (coreItem[this.position] ?? "");
|
||||
|
||||
return `${NoneTerminal[this.production.goal]} :=> ${coreItem.join("|")} ;${Array.from(this.lookaheadSet)
|
||||
.map((item) => GrammarUtils.toString(item))
|
||||
.join("/")}`;
|
||||
}
|
||||
// #endif
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import { ETokenType, ShaderRange } from "../common";
|
||||
import { ASTNode, TreeNode } from "../parser/AST";
|
||||
import { TranslationRule } from "../parser/SemanticAnalyzer";
|
||||
import { NoneTerminal, GrammarSymbol } from "../parser/GrammarSymbol";
|
||||
import Production from "./Production";
|
||||
import { ActionInfo, EAction } from "./types";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ClearableObjectPool, IPoolElement } from "@galacean/engine";
|
||||
import { NodeChild } from "../parser/types";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
|
||||
export default class GrammarUtils {
|
||||
static isTerminal(sm: GrammarSymbol) {
|
||||
return sm < NoneTerminal.START;
|
||||
}
|
||||
|
||||
static toString(sm: GrammarSymbol) {
|
||||
if (this.isTerminal(sm)) {
|
||||
return ETokenType[sm] ?? Keyword[sm];
|
||||
}
|
||||
return NoneTerminal[sm];
|
||||
}
|
||||
|
||||
static createProductionWithOptions(
|
||||
goal: NoneTerminal,
|
||||
options: GrammarSymbol[][],
|
||||
/** the ast node */
|
||||
astTypePool?: ClearableObjectPool<
|
||||
{ set: (loc: ShaderRange, children: NodeChild[]) => void } & IPoolElement & TreeNode
|
||||
>
|
||||
) {
|
||||
const ret: [GrammarSymbol[], TranslationRule | undefined][] = [];
|
||||
for (const opt of options) {
|
||||
ret.push([
|
||||
[goal, ...opt],
|
||||
function (sa, ...children) {
|
||||
if (!children[0]) return;
|
||||
const start = children[0].location.start;
|
||||
const end = children[children.length - 1].location.end;
|
||||
const location = ShaderLab.createRange(start, end);
|
||||
ASTNode.get(astTypePool ?? ASTNode.TrivialNode.pool, sa, location, children);
|
||||
}
|
||||
]);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static addMapSetItem<K, T>(map: Map<K, Set<T>>, k: K, v: T) {
|
||||
const set = map.get(k) ?? new Set();
|
||||
set.add(v);
|
||||
map.set(k, set);
|
||||
}
|
||||
|
||||
static isSubSet<T>(sa: Set<T>, sb: Set<T>) {
|
||||
for (const item of sa) {
|
||||
if (!sb.has(item)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static isActionEqual(a: ActionInfo, b: ActionInfo) {
|
||||
return a.action === b.action && a.target === b.target;
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
static printAction(actionInfo: ActionInfo) {
|
||||
const production = Production.pool.get(actionInfo.target!);
|
||||
return `<Action: ${EAction[actionInfo.action]} -> ${this.printProduction(production)}>`;
|
||||
}
|
||||
|
||||
static printProduction(production: Production) {
|
||||
const deriv = production.derivation.map((gs) => GrammarUtils.toString(gs)).join("|");
|
||||
return `${NoneTerminal[production.goal]} :=> ${deriv}`;
|
||||
}
|
||||
// #endif
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
export { LALR1 } from "./LALR1";
|
||||
export * from "./CFG";
|
||||
@@ -1,17 +0,0 @@
|
||||
import { NoneTerminal, Terminal } from "../parser/GrammarSymbol";
|
||||
|
||||
export type StateActionTable = Map<number /** state ID */, ActionTable>;
|
||||
export type ActionTable = Map<Terminal, ActionInfo>;
|
||||
export type StateGotoTable = Map<number /** state ID */, GotoTable>;
|
||||
export type GotoTable = Map<NoneTerminal, number /** state ID */>;
|
||||
|
||||
export enum EAction {
|
||||
Shift = 0,
|
||||
Reduce,
|
||||
Accept
|
||||
}
|
||||
|
||||
export interface ActionInfo {
|
||||
action: EAction;
|
||||
target?: number;
|
||||
}
|
||||
@@ -1,534 +0,0 @@
|
||||
import { ETokenType } from "../common";
|
||||
import { BaseLexer } from "../common/BaseLexer";
|
||||
import { BaseToken, EOF } from "../common/BaseToken";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
import { MacroDefineList } from "../Preprocessor";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
|
||||
/**
|
||||
* The Lexer of ShaderLab Compiler
|
||||
*/
|
||||
export class Lexer extends BaseLexer {
|
||||
private static _lexemeTable = <Record<string, Keyword>>{
|
||||
const: Keyword.CONST,
|
||||
bool: Keyword.BOOL,
|
||||
float: Keyword.FLOAT,
|
||||
double: Keyword.DOUBLE,
|
||||
int: Keyword.INT,
|
||||
uint: Keyword.UINT,
|
||||
break: Keyword.BREAK,
|
||||
continue: Keyword.CONTINUE,
|
||||
do: Keyword.DO,
|
||||
else: Keyword.ELSE,
|
||||
for: Keyword.FOR,
|
||||
if: Keyword.IF,
|
||||
while: Keyword.WHILE,
|
||||
discard: Keyword.DISCARD,
|
||||
return: Keyword.RETURN,
|
||||
bvec2: Keyword.BVEC2,
|
||||
bvec3: Keyword.BVEC3,
|
||||
bvec4: Keyword.BVEC4,
|
||||
ivec2: Keyword.IVEC2,
|
||||
ivec3: Keyword.IVEC3,
|
||||
ivec4: Keyword.IVEC4,
|
||||
uvec2: Keyword.UVEC2,
|
||||
uvec3: Keyword.UVEC3,
|
||||
uvec4: Keyword.UVEC4,
|
||||
vec2: Keyword.VEC2,
|
||||
vec3: Keyword.VEC3,
|
||||
vec4: Keyword.VEC4,
|
||||
mat2: Keyword.MAT2,
|
||||
mat3: Keyword.MAT3,
|
||||
mat4: Keyword.MAT4,
|
||||
in: Keyword.IN,
|
||||
out: Keyword.OUT,
|
||||
inout: Keyword.INOUT,
|
||||
sampler2D: Keyword.SAMPLER2D,
|
||||
samplerCube: Keyword.SAMPLER_CUBE,
|
||||
sampler3D: Keyword.SAMPLER3D,
|
||||
sampler2DShadow: Keyword.SAMPLER2D_SHADOW,
|
||||
samplerCubeShadow: Keyword.SAMPLER_CUBE_SHADOW,
|
||||
sampler2DArray: Keyword.SAMPLER2D_ARRAY,
|
||||
sampler2DArrayShadow: Keyword.SAMPLER2D_ARRAY_SHADOW,
|
||||
isampler2D: Keyword.I_SAMPLER2D,
|
||||
isampler3D: Keyword.I_SAMPLER3D,
|
||||
isamplerCube: Keyword.I_SAMPLER_CUBE,
|
||||
isampler2DArray: Keyword.I_SAMPLER2D_ARRAY,
|
||||
usampler2D: Keyword.U_SAMPLER2D,
|
||||
usampler3D: Keyword.U_SAMPLER3D,
|
||||
usamplerCube: Keyword.U_SAMPLER_CUBE,
|
||||
usampler2DArray: Keyword.U_SAMPLER2D_ARRAY,
|
||||
struct: Keyword.STRUCT,
|
||||
void: Keyword.VOID,
|
||||
true: Keyword.True,
|
||||
false: Keyword.False,
|
||||
precision: Keyword.PRECISION,
|
||||
precise: Keyword.PRECISE,
|
||||
highp: Keyword.HIGHP,
|
||||
mediump: Keyword.MEDIUMP,
|
||||
lowp: Keyword.LOWP,
|
||||
invariant: Keyword.INVARIANT,
|
||||
flat: Keyword.FLAT,
|
||||
smooth: Keyword.SMOOTH,
|
||||
noperspective: Keyword.NOPERSPECTIVE,
|
||||
centroid: Keyword.CENTROID,
|
||||
layout: Keyword.LAYOUT,
|
||||
location: Keyword.LOCATION,
|
||||
|
||||
// Macros ...
|
||||
"#if": Keyword.MACRO_IF,
|
||||
"#ifdef": Keyword.MACRO_IFDEF,
|
||||
"#ifndef": Keyword.MACRO_IFNDEF,
|
||||
"#else": Keyword.MACRO_ELSE,
|
||||
"#elif": Keyword.MACRO_ELIF,
|
||||
"#endif": Keyword.MACRO_ENDIF,
|
||||
"#undef": Keyword.MACRO_UNDEF
|
||||
};
|
||||
|
||||
private _needScanMacroConditionExpression = false;
|
||||
|
||||
*tokenize() {
|
||||
while (!this.isEnd()) {
|
||||
yield this.scanToken();
|
||||
}
|
||||
return EOF;
|
||||
}
|
||||
|
||||
constructor(
|
||||
source: string,
|
||||
public macroDefineList: MacroDefineList
|
||||
) {
|
||||
super(source);
|
||||
}
|
||||
|
||||
override scanToken(): BaseToken {
|
||||
this.skipCommentsAndSpace();
|
||||
if (this.isEnd()) {
|
||||
return EOF;
|
||||
}
|
||||
|
||||
if (this._needScanMacroConditionExpression) {
|
||||
this._needScanMacroConditionExpression = false;
|
||||
return this._scanMacroConditionExpression();
|
||||
}
|
||||
|
||||
const curCharCode = this.getCurCharCode();
|
||||
if (BaseLexer.isPreprocessorStartChar(curCharCode)) {
|
||||
return this._scanDirectives();
|
||||
}
|
||||
if (BaseLexer.isAlpha(curCharCode)) {
|
||||
return this._scanWord();
|
||||
}
|
||||
if (BaseLexer.isDigit(curCharCode)) {
|
||||
return this._scanNum();
|
||||
}
|
||||
|
||||
const start = this.getShaderPosition();
|
||||
const token = BaseToken.pool.get();
|
||||
let curChar: string;
|
||||
|
||||
switch (this.getCurChar()) {
|
||||
case "<":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "<") {
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.LEFT_ASSIGN, "<<=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.LEFT_OP, "<<", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.LE_OP, "<=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.LEFT_ANGLE, "<", start);
|
||||
break;
|
||||
|
||||
case ">":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === ">") {
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.RIGHT_ASSIGN, ">>=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.RIGHT_OP, ">>", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.GE_OP, ">=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.RIGHT_ANGLE, ">", start);
|
||||
break;
|
||||
|
||||
case "+":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "+") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.INC_OP, "++", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.ADD_ASSIGN, "+=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.PLUS, "+", start);
|
||||
break;
|
||||
|
||||
case "-":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "-") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.DEC_OP, "--", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.SUB_ASSIGN, "-=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.DASH, "-", start);
|
||||
break;
|
||||
|
||||
case "=":
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.EQ_OP, "==", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.EQUAL, "=", start);
|
||||
break;
|
||||
|
||||
case "!":
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.NE_OP, "!=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.BANG, "!", start);
|
||||
break;
|
||||
|
||||
case "&":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "&") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.AND_OP, "&&", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.ADD_ASSIGN, "&=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.AMPERSAND, "&", start);
|
||||
break;
|
||||
|
||||
case "|":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "|") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.OR_OP, "||", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.OR_ASSIGN, "|=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.VERTICAL_BAR, "|", start);
|
||||
break;
|
||||
|
||||
case "^":
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "^") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.XOR_OP, "^^", start);
|
||||
break;
|
||||
} else if (curChar === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.XOR_ASSIGN, "^=", start);
|
||||
break;
|
||||
}
|
||||
token.set(ETokenType.CARET, "^", start);
|
||||
break;
|
||||
|
||||
case "*":
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
token.set(ETokenType.MUL_ASSIGN, "*=", start);
|
||||
break;
|
||||
}
|
||||
|
||||
token.set(ETokenType.STAR, "*", start);
|
||||
break;
|
||||
|
||||
case "/":
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.DIV_ASSIGN, "/=", start);
|
||||
break;
|
||||
}
|
||||
|
||||
token.set(ETokenType.SLASH, "/", start);
|
||||
break;
|
||||
|
||||
case "%":
|
||||
this.advance(1);
|
||||
if (this.getCurChar() === "=") {
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.MOD_ASSIGN, "%=", start);
|
||||
break;
|
||||
}
|
||||
|
||||
token.set(ETokenType.PERCENT, "%", start);
|
||||
break;
|
||||
|
||||
case "(":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.LEFT_PAREN, "(", start);
|
||||
break;
|
||||
case ")":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.RIGHT_PAREN, ")", start);
|
||||
break;
|
||||
case "{":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.LEFT_BRACE, "{", start);
|
||||
break;
|
||||
case "}":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.RIGHT_BRACE, "}", start);
|
||||
break;
|
||||
case "[":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.LEFT_BRACKET, "[", start);
|
||||
break;
|
||||
case "]":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.RIGHT_BRACKET, "]", start);
|
||||
break;
|
||||
case ".":
|
||||
this.advance(1);
|
||||
if (BaseLexer.isDigit(this.getCurCharCode())) {
|
||||
return this._scanNumAfterDot();
|
||||
}
|
||||
|
||||
token.set(ETokenType.DOT, ".", start);
|
||||
break;
|
||||
case ",":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.COMMA, ",", start);
|
||||
break;
|
||||
case ":":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.COLON, ":", start);
|
||||
return token;
|
||||
case ";":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.SEMICOLON, ";", start);
|
||||
break;
|
||||
case "~":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.TILDE, "~", start);
|
||||
break;
|
||||
case "?":
|
||||
this.advance(1);
|
||||
|
||||
token.set(ETokenType.QUESTION, "?", start);
|
||||
break;
|
||||
case '"':
|
||||
this.advance(1);
|
||||
return this._scanStringConst();
|
||||
|
||||
default:
|
||||
this.throwError(this.getShaderPosition(0), `Unexpected character ${this.getCurChar()}`);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
private _scanStringConst(): BaseToken {
|
||||
const start = this.getShaderPosition();
|
||||
const buffer: string[] = [];
|
||||
while (this.getCurChar() !== '"') {
|
||||
buffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
this.advance(1);
|
||||
const range = ShaderLab.createRange(start, this.getShaderPosition());
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(ETokenType.STRING_CONST, buffer.join(""), range);
|
||||
return token;
|
||||
}
|
||||
|
||||
private _scanNumAfterDot(): BaseToken {
|
||||
const buffer = ["."];
|
||||
while (BaseLexer.isDigit(this.getCurCharCode())) {
|
||||
buffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
this._scanFloatSuffix(buffer);
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(ETokenType.FLOAT_CONSTANT, buffer.join(""), this.getShaderPosition(buffer.length));
|
||||
return token;
|
||||
}
|
||||
|
||||
private _scanUtilBreakLine(outBuffer: string[]): void {
|
||||
while (this.getCurChar() !== "\n" && !this.isEnd()) {
|
||||
outBuffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
|
||||
private _scanDirectives(): BaseToken {
|
||||
const buffer: string[] = [this.getCurChar()];
|
||||
const start = this.getShaderPosition();
|
||||
this.advance(1);
|
||||
while (BaseLexer.isAlpha(this.getCurCharCode())) {
|
||||
buffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
const token = BaseToken.pool.get();
|
||||
const word = buffer.join("");
|
||||
|
||||
// If it is a macro definition or conditional expression, we need to skip the rest of the line
|
||||
if (word === "#define") {
|
||||
this._scanUtilBreakLine(buffer);
|
||||
const word = buffer.join("") + "\n";
|
||||
token.set(Keyword.MACRO_DEFINE_EXPRESSION, word, start);
|
||||
} else {
|
||||
const kt = Lexer._lexemeTable[word];
|
||||
token.set(kt ?? ETokenType.ID, word, start);
|
||||
if (word === "#if" || word === "#elif") {
|
||||
this._needScanMacroConditionExpression = true;
|
||||
}
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
private _scanMacroConditionExpression(): BaseToken {
|
||||
const buffer = new Array<string>();
|
||||
const start = this.getShaderPosition();
|
||||
this._scanUtilBreakLine(buffer);
|
||||
const word = buffer.join("");
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(Keyword.MACRO_CONDITIONAL_EXPRESSION, word, start);
|
||||
return token;
|
||||
}
|
||||
|
||||
private _scanWord(): BaseToken {
|
||||
const buffer: string[] = [this.getCurChar()];
|
||||
const start = this.getShaderPosition();
|
||||
this.advance(1);
|
||||
while (BaseLexer.isAlnum(this.getCurCharCode())) {
|
||||
buffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
const token = BaseToken.pool.get();
|
||||
const word = buffer.join("");
|
||||
const kt = Lexer._lexemeTable[word];
|
||||
|
||||
if (this.macroDefineList[word]) {
|
||||
token.set(Keyword.MACRO_CALL, word, start);
|
||||
} else {
|
||||
token.set(kt ?? ETokenType.ID, word, start);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
private _scanNum(): BaseToken {
|
||||
const buffer: string[] = [];
|
||||
while (BaseLexer.isDigit(this.getCurCharCode())) {
|
||||
buffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
const curChar = this.getCurChar();
|
||||
if (curChar === ".") {
|
||||
buffer.push(curChar);
|
||||
this.advance(1);
|
||||
while (BaseLexer.isDigit(this.getCurCharCode())) {
|
||||
buffer.push(this.getCurChar());
|
||||
this.advance(1);
|
||||
}
|
||||
this._scanFloatSuffix(buffer);
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(ETokenType.FLOAT_CONSTANT, buffer.join(""), this.getShaderPosition(buffer.length));
|
||||
return token;
|
||||
} else {
|
||||
if (curChar === "e" || curChar === "E") {
|
||||
this._scanFloatSuffix(buffer);
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(ETokenType.FLOAT_CONSTANT, buffer.join(""), this.getShaderPosition(buffer.length));
|
||||
return token;
|
||||
} else {
|
||||
this._scanIntegerSuffix(buffer);
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(ETokenType.INT_CONSTANT, buffer.join(""), this.getShaderPosition(buffer.length));
|
||||
return token;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private _scanFloatSuffix(buffer: string[]): void {
|
||||
let curChar = this.getCurChar();
|
||||
if (curChar === "e" || curChar === "E") {
|
||||
buffer.push(curChar);
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
if (curChar === "+" || curChar === "-") {
|
||||
buffer.push(curChar);
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
}
|
||||
if (!BaseLexer.isDigit(this.getCurCharCode()))
|
||||
this.throwError(this.getShaderPosition(0), "lexing error, invalid exponent suffix.");
|
||||
|
||||
do {
|
||||
buffer.push(curChar);
|
||||
this.advance(1);
|
||||
curChar = this.getCurChar();
|
||||
} while (BaseLexer.isDigit(this.getCurCharCode()));
|
||||
}
|
||||
if (curChar === "f" || curChar === "F") {
|
||||
buffer.push(curChar);
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
|
||||
private _scanIntegerSuffix(buffer: string[]): void {
|
||||
const curChar = this.getCurChar();
|
||||
if (curChar === "u" || curChar === "U") {
|
||||
buffer.push(curChar);
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./Lexer";
|
||||
@@ -1,41 +0,0 @@
|
||||
import { ShaderRange } from "../common";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
// #if _VERBOSE
|
||||
import { GSErrorName } from "../GSError";
|
||||
// #endif
|
||||
|
||||
export class MacroDefine {
|
||||
private _replaceRegex?: RegExp;
|
||||
private readonly _argsLexemes?: string[];
|
||||
|
||||
get isFunction(): boolean {
|
||||
return !!this.args;
|
||||
}
|
||||
|
||||
constructor(
|
||||
public readonly macro: BaseToken,
|
||||
public readonly body?: BaseToken,
|
||||
public readonly location?: ShaderRange,
|
||||
public readonly args?: BaseToken[]
|
||||
) {
|
||||
if (args?.length > 0) {
|
||||
this._argsLexemes = this.args.map((item) => item.lexeme);
|
||||
this._replaceRegex = new RegExp(`\\b(${this._argsLexemes.join("|")})\\b`, "g");
|
||||
}
|
||||
}
|
||||
|
||||
expandFunctionBody(args: string[]): string {
|
||||
if (args.length !== this.args?.length) {
|
||||
throw ShaderLabUtils.createGSError("mismatched function macro", GSErrorName.PreprocessorError, "", this.location);
|
||||
}
|
||||
|
||||
if (args.length === 0) {
|
||||
return this.body.lexeme;
|
||||
}
|
||||
|
||||
return this.body.lexeme.replace(this._replaceRegex, (m) => {
|
||||
return args[this._argsLexemes.indexOf(m)];
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,669 +0,0 @@
|
||||
import { ShaderMacro } from "@galacean/engine";
|
||||
import { ShaderPosition, ShaderRange } from "../common";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { GSErrorName } from "../GSError";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
import { MacroParserConstant, MacroParserKeyword, MacroParserToken } from "./constants";
|
||||
import { MacroDefine } from "./MacroDefine";
|
||||
import MacroParserLexer from "./MacroParserLexer";
|
||||
import { PpUtils } from "./Utils";
|
||||
// #if _VERBOSE
|
||||
import PpSourceMap, { BlockInfo } from "./sourceMap";
|
||||
// #endif
|
||||
import { BaseLexer } from "../common/BaseLexer";
|
||||
|
||||
export interface ExpandSegment {
|
||||
// #if _VERBOSE
|
||||
block?: BlockInfo;
|
||||
// #endif
|
||||
rangeInBlock: ShaderRange;
|
||||
replace: string;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class MacroParser {
|
||||
static lexer: MacroParserLexer;
|
||||
|
||||
private static _definedMacros: Map<string, MacroDefine> = new Map();
|
||||
private static _expandSegmentsStack: ExpandSegment[][] = [[]];
|
||||
|
||||
private static _expandVisitedMacros: Record<string, number> = {};
|
||||
private static _expandVersionId: number = 1;
|
||||
|
||||
// #if _VERBOSE
|
||||
static _errors: Error[] = [];
|
||||
// #endif
|
||||
|
||||
static parse(source: string, macros: ShaderMacro[]): string | null {
|
||||
MacroParser._reset();
|
||||
|
||||
for (const macro of macros) {
|
||||
MacroParser._addPredefinedMacro(macro.name, macro.value);
|
||||
}
|
||||
|
||||
this.lexer = new MacroParserLexer(source);
|
||||
return MacroParser._parseDirectives(this.lexer);
|
||||
}
|
||||
|
||||
private static _reset() {
|
||||
this._expandSegmentsStack.length = 0;
|
||||
this._expandSegmentsStack.push([]);
|
||||
|
||||
this._definedMacros.clear();
|
||||
this._addPredefinedMacro("GL_ES");
|
||||
|
||||
// #if _VERBOSE
|
||||
this._errors.length = 0;
|
||||
// #endif
|
||||
}
|
||||
|
||||
private static _addPredefinedMacro(macro: string, value?: string) {
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(MacroParserToken.id, macro);
|
||||
|
||||
let macroBody: BaseToken | undefined;
|
||||
if (value != undefined) {
|
||||
macroBody = BaseToken.pool.get();
|
||||
macroBody.set(MacroParserToken.id, value);
|
||||
}
|
||||
|
||||
this._definedMacros.set(macro, new MacroDefine(token, macroBody));
|
||||
}
|
||||
|
||||
private static _parseDirectives(lexer: MacroParserLexer): string | null {
|
||||
let directive: BaseToken | undefined;
|
||||
while ((directive = lexer.scanToken())) {
|
||||
switch (directive.type) {
|
||||
case MacroParserToken.id:
|
||||
this._parseMacro(lexer, directive);
|
||||
break;
|
||||
case MacroParserKeyword.define:
|
||||
this._parseDefine(lexer);
|
||||
break;
|
||||
case MacroParserKeyword.undef:
|
||||
this._parseUndef(lexer);
|
||||
break;
|
||||
case MacroParserKeyword.if:
|
||||
this._parseIfDirective(lexer, MacroParserKeyword.if);
|
||||
break;
|
||||
case MacroParserKeyword.ifndef:
|
||||
this._parseIfDirective(lexer, MacroParserKeyword.ifndef);
|
||||
break;
|
||||
case MacroParserKeyword.ifdef:
|
||||
this._parseIfDirective(lexer, MacroParserKeyword.ifdef);
|
||||
break;
|
||||
}
|
||||
}
|
||||
// #if _VERBOSE
|
||||
if (this._errors.length > 0) return null;
|
||||
// #endif
|
||||
|
||||
return PpUtils.expand(this._getExpandSegments(), lexer.source, lexer.sourceMap);
|
||||
}
|
||||
|
||||
private static _getExpandSegments(): ExpandSegment[] {
|
||||
return this._expandSegmentsStack[this._expandSegmentsStack.length - 1];
|
||||
}
|
||||
|
||||
private static _reportError(loc: ShaderRange | ShaderPosition, message: string, source: string, file?: string) {
|
||||
const error = ShaderLabUtils.createGSError(message, GSErrorName.PreprocessorError, source, loc, file);
|
||||
// #if _VERBOSE
|
||||
this._errors.push(error);
|
||||
// #endif
|
||||
}
|
||||
|
||||
private static _parseIfDirective(lexer: MacroParserLexer, directiveType: MacroParserKeyword): void {
|
||||
const directiveLength =
|
||||
directiveType === MacroParserKeyword.if ? 3 : directiveType === MacroParserKeyword.ifdef ? 6 : 7; // #if = 3, #ifdef = 6, #ifndef = 7
|
||||
const start = lexer.currentIndex - directiveLength;
|
||||
let skipMacro = false;
|
||||
|
||||
let shouldInclude: MacroParserConstant;
|
||||
if (directiveType === MacroParserKeyword.if) {
|
||||
shouldInclude = this._parseConstantExpression(lexer);
|
||||
} else {
|
||||
const macroToken = lexer.scanWord();
|
||||
const lexeme = macroToken.lexeme;
|
||||
if (lexeme.startsWith("GL_")) {
|
||||
skipMacro = true;
|
||||
} else {
|
||||
const defined = this._definedMacros.get(lexeme);
|
||||
shouldInclude = directiveType === MacroParserKeyword.ifdef ? !!defined : !defined;
|
||||
}
|
||||
}
|
||||
|
||||
lexer.skipSpace(true);
|
||||
const { body, nextDirective } = lexer.scanMacroBranchBody();
|
||||
|
||||
if (skipMacro) return;
|
||||
|
||||
if (shouldInclude) {
|
||||
const end =
|
||||
nextDirective.type === MacroParserKeyword.endif ? lexer.getShaderPosition(0) : lexer.scanRemainMacro();
|
||||
const expanded = this._expandMacroChunk(body.lexeme, body.location, lexer);
|
||||
this._addContentReplace(
|
||||
lexer.file,
|
||||
ShaderLab.createPosition(start),
|
||||
end,
|
||||
expanded.content,
|
||||
lexer.blockRange,
|
||||
expanded.sourceMap
|
||||
);
|
||||
} else {
|
||||
this._addEmptyReplace(lexer, start);
|
||||
this._processConditionalDirective(nextDirective.type, lexer);
|
||||
}
|
||||
}
|
||||
|
||||
private static _processConditionalDirective(
|
||||
directive: MacroParserKeyword.elif | MacroParserKeyword.else | MacroParserKeyword.endif,
|
||||
scanner: MacroParserLexer
|
||||
) {
|
||||
if (directive === MacroParserKeyword.endif) {
|
||||
return;
|
||||
}
|
||||
|
||||
const start = scanner.currentIndex;
|
||||
|
||||
if (directive === MacroParserKeyword.else) {
|
||||
const { body } = scanner.scanMacroBranchBody();
|
||||
const expanded = this._expandMacroChunk(body.lexeme, body.location, scanner);
|
||||
this._addContentReplace(
|
||||
scanner.file,
|
||||
ShaderLab.createPosition(start),
|
||||
scanner.getShaderPosition(0),
|
||||
expanded.content,
|
||||
scanner.blockRange,
|
||||
expanded.sourceMap
|
||||
);
|
||||
} else if (directive === MacroParserKeyword.elif) {
|
||||
const constantExpr = this._parseConstantExpression(scanner);
|
||||
const { body, nextDirective } = scanner.scanMacroBranchBody();
|
||||
if (constantExpr) {
|
||||
const end =
|
||||
nextDirective.type === MacroParserKeyword.endif ? scanner.currentIndex : scanner.scanRemainMacro().index;
|
||||
const expanded = this._expandMacroChunk(body.lexeme, body.location, scanner);
|
||||
this._addContentReplace(
|
||||
scanner.file,
|
||||
ShaderLab.createPosition(start),
|
||||
ShaderLab.createPosition(end),
|
||||
expanded.content,
|
||||
scanner.blockRange,
|
||||
expanded.sourceMap
|
||||
);
|
||||
} else {
|
||||
this._addContentReplace(
|
||||
scanner.file,
|
||||
ShaderLab.createPosition(start),
|
||||
ShaderLab.createPosition(scanner.currentIndex),
|
||||
"",
|
||||
scanner.blockRange
|
||||
);
|
||||
this._processConditionalDirective(nextDirective.type, scanner);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseConstantExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
scanner.skipSpace(true);
|
||||
return this._parseLogicalOrExpression(scanner);
|
||||
}
|
||||
|
||||
private static _parseLogicalOrExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseLogicalAndExpression(scanner);
|
||||
const operator = scanner.peek(2);
|
||||
if (operator && operator === "||") {
|
||||
scanner.advance(2);
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseLogicalOrExpression(scanner);
|
||||
return operand1 || operand2;
|
||||
}
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseLogicalAndExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseEqualityExpression(scanner);
|
||||
const operator = scanner.peek(2);
|
||||
if (operator && operator === "&&") {
|
||||
scanner.advance(2);
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseLogicalAndExpression(scanner);
|
||||
return operand1 && operand2;
|
||||
}
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseEqualityExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseRelationalExpression(scanner);
|
||||
const operator = scanner.peek(2);
|
||||
if (operator && ["==", "!="].includes(operator)) {
|
||||
scanner.advance(2);
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseEqualityExpression(scanner);
|
||||
switch (operator) {
|
||||
case "==":
|
||||
return operand1 === operand2;
|
||||
case "!=":
|
||||
return operand1 !== operand2;
|
||||
}
|
||||
}
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseRelationalExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseShiftExpression(scanner) as number;
|
||||
let operator = scanner.peek(2);
|
||||
if (operator[1] !== "=") operator = operator[0];
|
||||
if (operator && [">", "<", ">=", "<="].includes(operator)) {
|
||||
const opPos = scanner.getShaderPosition(0);
|
||||
scanner.advance(operator.length);
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseRelationalExpression(scanner) as number;
|
||||
if (typeof operand1 !== typeof operand2 && typeof operand1 !== "number") {
|
||||
this._reportError(opPos, "invalid operator in relation expression.", scanner.source, scanner.file);
|
||||
return;
|
||||
}
|
||||
switch (operator) {
|
||||
case ">":
|
||||
return operand1 > operand2;
|
||||
case "<":
|
||||
return operand1 < operand2;
|
||||
case ">=":
|
||||
return operand1 >= operand2;
|
||||
case "<=":
|
||||
return operand1 <= operand2;
|
||||
}
|
||||
}
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseShiftExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseAdditiveExpression(scanner) as number;
|
||||
const operator = scanner.peek(2);
|
||||
if (operator && [">>", "<<"].includes(operator)) {
|
||||
const opPos = scanner.getShaderPosition(0);
|
||||
scanner.advance(2);
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseShiftExpression(scanner) as number;
|
||||
if (typeof operand1 !== typeof operand2 && typeof operand1 !== "number") {
|
||||
this._reportError(opPos, "invalid operator in shift expression.", scanner.source, scanner.file);
|
||||
return;
|
||||
}
|
||||
switch (operator) {
|
||||
case ">>":
|
||||
return operand1 >> operand2;
|
||||
case "<<":
|
||||
return operand1 << operand2;
|
||||
}
|
||||
}
|
||||
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseAdditiveExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseMulticativeExpression(scanner) as number;
|
||||
if ([">", "<"].includes(scanner.getCurChar())) {
|
||||
const opPos = scanner.getShaderPosition(0);
|
||||
scanner.advance(1);
|
||||
|
||||
const operator = scanner.getCurChar();
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseAdditiveExpression(scanner) as number;
|
||||
if (typeof operand1 !== typeof operand2 && typeof operand1 !== "number") {
|
||||
this._reportError(opPos, "invalid operator.", scanner.source, scanner.file);
|
||||
return false;
|
||||
}
|
||||
switch (operator) {
|
||||
case "+":
|
||||
return operand1 + operand2;
|
||||
case "-":
|
||||
return operand1 - operand2;
|
||||
}
|
||||
}
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseMulticativeExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
const operand1 = this._parseUnaryExpression(scanner) as number;
|
||||
scanner.skipSpace(false);
|
||||
if (["*", "/", "%"].includes(scanner.getCurChar())) {
|
||||
const opPos = scanner.getShaderPosition(0);
|
||||
const operator = scanner.getCurChar();
|
||||
scanner.skipSpace(false);
|
||||
const operand2 = this._parseMulticativeExpression(scanner) as number;
|
||||
if (typeof operand1 !== typeof operand2 && typeof operand1 !== "number") {
|
||||
this._reportError(opPos, "invalid operator.", scanner.source, scanner.file);
|
||||
return;
|
||||
}
|
||||
switch (operator) {
|
||||
case "*":
|
||||
return operand1 * operand2;
|
||||
case "/":
|
||||
return operand1 / operand2;
|
||||
case "%":
|
||||
return operand1 % operand2;
|
||||
}
|
||||
}
|
||||
return operand1;
|
||||
}
|
||||
|
||||
private static _parseUnaryExpression(scanner: MacroParserLexer) {
|
||||
const operator = scanner.getCurChar();
|
||||
if (["+", "-", "!"].includes(operator)) {
|
||||
const opPos = scanner.getShaderPosition(0);
|
||||
scanner.advance(1);
|
||||
scanner.skipSpace(false);
|
||||
const parenExpr = this._parseParenthesisExpression(scanner);
|
||||
if ((operator === "!" && typeof parenExpr !== "boolean") || (operator !== "!" && typeof parenExpr !== "number")) {
|
||||
this._reportError(opPos, "invalid operator.", scanner.source, scanner.file);
|
||||
}
|
||||
|
||||
switch (operator) {
|
||||
case "+":
|
||||
return parenExpr;
|
||||
case "-":
|
||||
return -parenExpr;
|
||||
case "!":
|
||||
return !parenExpr;
|
||||
}
|
||||
}
|
||||
return this._parseParenthesisExpression(scanner);
|
||||
}
|
||||
|
||||
private static _parseParenthesisExpression(scanner: MacroParserLexer): MacroParserConstant {
|
||||
if (scanner.getCurChar() === "(") {
|
||||
scanner.advance(1);
|
||||
scanner.skipSpace(false);
|
||||
const ret = this._parseConstantExpression(scanner);
|
||||
scanner.scanToChar(")");
|
||||
scanner.advance(1);
|
||||
return ret;
|
||||
}
|
||||
return this._parseConstant(scanner);
|
||||
}
|
||||
|
||||
private static _parseConstant(scanner: MacroParserLexer): MacroParserConstant {
|
||||
if (BaseLexer.isAlpha(scanner.getCurCharCode())) {
|
||||
const id = scanner.scanWord();
|
||||
if (id.type === MacroParserKeyword.defined) {
|
||||
const withParen = scanner.peekNonSpace() === "(";
|
||||
const macro = scanner.scanWord();
|
||||
if (withParen) {
|
||||
scanner.scanToChar(")");
|
||||
scanner.advance(1);
|
||||
}
|
||||
return !!this._definedMacros.get(macro.lexeme);
|
||||
} else {
|
||||
const macro = this._definedMacros.get(id.lexeme);
|
||||
|
||||
if (!macro) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!macro.body) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (macro.isFunction) {
|
||||
this._reportError(id.location, "invalid function macro usage", scanner.source, scanner.file);
|
||||
}
|
||||
const value = Number(macro.body.lexeme);
|
||||
if (!Number.isInteger(value)) {
|
||||
this._reportError(id.location, `invalid const macro: ${id.lexeme}`, scanner.source, scanner.file);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
} else if (BaseLexer.isDigit(scanner.getCurCharCode())) {
|
||||
const integer = scanner.scanInteger();
|
||||
return Number(integer.lexeme);
|
||||
} else {
|
||||
this._reportError(
|
||||
scanner.getShaderPosition(0),
|
||||
`invalid token: ${scanner.getCurChar()}`,
|
||||
scanner.source,
|
||||
scanner.file
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseMacroFunctionArgs(
|
||||
source: string,
|
||||
startIndex: number,
|
||||
macroName: string
|
||||
): { args: string[]; endIndex: number } {
|
||||
const length = source.length;
|
||||
let i = startIndex + macroName.length;
|
||||
|
||||
// Find opening parenthesis
|
||||
while (i < length && source.charCodeAt(i) !== 40) i++;
|
||||
|
||||
// Parse function arguments
|
||||
const args: string[] = [];
|
||||
let level = 1;
|
||||
let argStart = i + 1;
|
||||
let k = argStart;
|
||||
|
||||
while (k < length && level > 0) {
|
||||
const charCode = source.charCodeAt(k);
|
||||
if (charCode === 40) {
|
||||
level++;
|
||||
} else if (charCode === 41) {
|
||||
if (--level === 0) {
|
||||
const arg = source.substring(argStart, k).trim();
|
||||
if (arg.length > 0) args.push(arg);
|
||||
break;
|
||||
}
|
||||
} else if (charCode === 44 && level === 1) {
|
||||
const arg = source.substring(argStart, k).trim();
|
||||
if (arg.length > 0) args.push(arg);
|
||||
argStart = k + 1;
|
||||
}
|
||||
k++;
|
||||
}
|
||||
|
||||
return { args, endIndex: k + 1 };
|
||||
}
|
||||
|
||||
private static _expandMacroBody(body: string): string {
|
||||
const visitedMacros = this._expandVisitedMacros;
|
||||
const currentVersionId = ++this._expandVersionId;
|
||||
let expandedBody = body;
|
||||
let hasExpansion = true;
|
||||
|
||||
while (hasExpansion) {
|
||||
hasExpansion = false;
|
||||
const length = expandedBody.length;
|
||||
let i = 0;
|
||||
|
||||
while (i < length) {
|
||||
const charCode = expandedBody.charCodeAt(i);
|
||||
if (!BaseLexer.isAlpha(charCode)) {
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const start = i;
|
||||
while (i < length && BaseLexer.isAlnum(expandedBody.charCodeAt(i))) {
|
||||
i++;
|
||||
}
|
||||
|
||||
const macroName = expandedBody.substring(start, i);
|
||||
const macro = this._definedMacros.get(macroName);
|
||||
|
||||
if (!macro || visitedMacros[macroName] === currentVersionId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Prevent circular references
|
||||
visitedMacros[macroName] = currentVersionId;
|
||||
|
||||
let replacement: string;
|
||||
let endIndex: number;
|
||||
|
||||
if (!macro.isFunction) {
|
||||
replacement = macro.body?.lexeme ?? "";
|
||||
endIndex = i;
|
||||
} else {
|
||||
const { args, endIndex: newEndIndex } = this._parseMacroFunctionArgs(expandedBody, start, macroName);
|
||||
replacement = macro.expandFunctionBody(args);
|
||||
endIndex = newEndIndex;
|
||||
}
|
||||
|
||||
expandedBody = expandedBody.substring(0, start) + replacement + expandedBody.substring(endIndex);
|
||||
hasExpansion = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return expandedBody;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively expand macro body and expansion.
|
||||
*/
|
||||
private static _expandMacroChunk(
|
||||
chunk: string,
|
||||
loc: ShaderRange,
|
||||
parentScanner: MacroParserLexer
|
||||
): {
|
||||
content: string;
|
||||
// #if _VERBOSE
|
||||
sourceMap: PpSourceMap;
|
||||
// #endif
|
||||
};
|
||||
private static _expandMacroChunk(
|
||||
chunk: string,
|
||||
loc: ShaderRange,
|
||||
file: string
|
||||
): {
|
||||
content: string;
|
||||
// #if _VERBOSE
|
||||
sourceMap: PpSourceMap;
|
||||
// #endif
|
||||
};
|
||||
private static _expandMacroChunk(
|
||||
chunk: string,
|
||||
loc: ShaderRange,
|
||||
scannerOrFile: MacroParserLexer | string
|
||||
): {
|
||||
content: string;
|
||||
// #if _VERBOSE
|
||||
sourceMap: PpSourceMap;
|
||||
// #endif
|
||||
} {
|
||||
this._expandSegmentsStack.push([]);
|
||||
let scanner: MacroParserLexer;
|
||||
if (typeof scannerOrFile === "string") {
|
||||
scanner = new MacroParserLexer(chunk, scannerOrFile);
|
||||
} else {
|
||||
scanner = new MacroParserLexer(chunk, scannerOrFile.file, loc);
|
||||
}
|
||||
|
||||
const ret = this._parseDirectives(scanner);
|
||||
this._expandSegmentsStack.pop();
|
||||
return {
|
||||
content: ret,
|
||||
// #if _VERBOSE
|
||||
sourceMap: scanner.sourceMap
|
||||
// #endif
|
||||
};
|
||||
}
|
||||
|
||||
private static _addEmptyReplace(lexer: MacroParserLexer, start: number) {
|
||||
this._addContentReplace(
|
||||
lexer.file,
|
||||
ShaderLab.createPosition(start),
|
||||
lexer.getShaderPosition(0),
|
||||
"",
|
||||
lexer.blockRange
|
||||
);
|
||||
}
|
||||
|
||||
private static _addContentReplace(
|
||||
sourceFile: string,
|
||||
start: ShaderPosition,
|
||||
end: ShaderPosition,
|
||||
content: string,
|
||||
sourceRange?: ShaderRange,
|
||||
sourceMap?: PpSourceMap
|
||||
): void {
|
||||
// #if _VERBOSE
|
||||
const block = new BlockInfo(sourceFile, sourceRange, sourceMap);
|
||||
// #endif
|
||||
|
||||
const range = ShaderLab.createRange(start, end);
|
||||
this._getExpandSegments().push({
|
||||
// #if _VERBOSE
|
||||
block,
|
||||
// #endif
|
||||
rangeInBlock: range,
|
||||
replace: content
|
||||
});
|
||||
}
|
||||
|
||||
private static _parseDefine(lexer: MacroParserLexer): void {
|
||||
const start = lexer.getShaderPosition(7);
|
||||
const macroName = lexer.scanWord();
|
||||
|
||||
const { lexeme, location } = macroName;
|
||||
let { end } = location;
|
||||
if (this._definedMacros.get(lexeme) && lexeme.startsWith("GL_")) {
|
||||
this._reportError(location, `Redefined macro: ${lexeme}`, lexer.source, lexer.file);
|
||||
}
|
||||
|
||||
let macroArgs: BaseToken[] | undefined;
|
||||
if (lexer.getCurChar() === "(") {
|
||||
macroArgs = lexer.scanWordsUntilTerminator(")");
|
||||
end = lexer.getShaderPosition(0);
|
||||
}
|
||||
const macroBody = lexer.scanMacroBody();
|
||||
const range = ShaderLab.createRange(start, end);
|
||||
const macroDefine = new MacroDefine(macroName, macroBody, range, macroArgs);
|
||||
this._definedMacros.set(lexeme, macroDefine);
|
||||
|
||||
this._addContentReplace(lexer.file, start, lexer.getShaderPosition(0), "", lexer.blockRange);
|
||||
}
|
||||
|
||||
private static _parseUndef(lexer: MacroParserLexer): void {
|
||||
const start = lexer.getShaderPosition(6);
|
||||
const macroName = lexer.scanWord();
|
||||
this._definedMacros.delete(macroName.lexeme);
|
||||
|
||||
this._addContentReplace(lexer.file, start, lexer.getShaderPosition(0), "", lexer.blockRange);
|
||||
}
|
||||
|
||||
private static _parseMacro(lexer: MacroParserLexer, token: BaseToken) {
|
||||
const macro = this._definedMacros.get(token.lexeme);
|
||||
if (macro) {
|
||||
const { location } = token;
|
||||
if (macro.isFunction) {
|
||||
const { args, endIndex } = this._parseMacroFunctionArgs(lexer.source, location.start.index, token.lexeme);
|
||||
const macroBodyExpanded = macro.expandFunctionBody(args);
|
||||
const expandedContent = this._expandMacroBody(macroBodyExpanded);
|
||||
|
||||
const remainingLength = endIndex - location.end.index;
|
||||
lexer.advance(remainingLength);
|
||||
|
||||
this._addContentReplace(
|
||||
lexer.file,
|
||||
location.start,
|
||||
lexer.getShaderPosition(0),
|
||||
expandedContent,
|
||||
lexer.blockRange
|
||||
);
|
||||
} else {
|
||||
const macroContent = macro.body?.lexeme ?? "";
|
||||
const expandedContent = this._expandMacroBody(macroContent);
|
||||
|
||||
this._addContentReplace(lexer.file, location.start, location.end, expandedContent, lexer.blockRange);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
static convertSourceIndex(index: number) {
|
||||
return this.lexer.sourceMap.map(index);
|
||||
}
|
||||
// #endif
|
||||
}
|
||||
@@ -1,309 +0,0 @@
|
||||
import { ShaderPosition, ShaderRange } from "../common";
|
||||
// #if _VERBOSE
|
||||
import PpSourceMap from "./sourceMap";
|
||||
// #endif
|
||||
import { BaseLexer } from "../common/BaseLexer";
|
||||
import { BaseToken, EOF } from "../common/BaseToken";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { MacroParserKeyword, MacroParserToken } from "./constants";
|
||||
|
||||
export type OnToken = (token: BaseToken, scanner: MacroParserLexer) => void;
|
||||
|
||||
export default class MacroParserLexer extends BaseLexer {
|
||||
private static _isPpCharacters(charCode: number): boolean {
|
||||
return (
|
||||
charCode === 35 || // #
|
||||
BaseLexer.isAlnum(charCode) // _, A-Z, a-z, 0-9
|
||||
);
|
||||
}
|
||||
|
||||
private static _lexemeTable = <Record<string, MacroParserKeyword>>{
|
||||
"#define": MacroParserKeyword.define,
|
||||
"#undef": MacroParserKeyword.undef,
|
||||
"#if": MacroParserKeyword.if,
|
||||
"#ifdef": MacroParserKeyword.ifdef,
|
||||
"#ifndef": MacroParserKeyword.ifndef,
|
||||
"#else": MacroParserKeyword.else,
|
||||
"#elif": MacroParserKeyword.elif,
|
||||
"#endif": MacroParserKeyword.endif,
|
||||
defined: MacroParserKeyword.defined
|
||||
};
|
||||
|
||||
private macroLvl = 0;
|
||||
|
||||
// #if _VERBOSE
|
||||
readonly sourceMap = new PpSourceMap();
|
||||
readonly file: string;
|
||||
readonly blockRange?: ShaderRange;
|
||||
// #endif
|
||||
|
||||
constructor(
|
||||
source: string,
|
||||
// #if _VERBOSE
|
||||
file = "__main__",
|
||||
blockRange?: ShaderRange
|
||||
// #endif
|
||||
) {
|
||||
super(source);
|
||||
// #if _VERBOSE
|
||||
this.file = file;
|
||||
this.blockRange = blockRange;
|
||||
// #endif
|
||||
}
|
||||
|
||||
scanWordsUntilTerminator(terminatorChar: string): BaseToken[] {
|
||||
const tokens: BaseToken[] = [];
|
||||
while (true) {
|
||||
this.skipSpace(true);
|
||||
if (BaseLexer.isAlpha(this.getCurCharCode())) {
|
||||
tokens.push(this.scanWord());
|
||||
} else if (this.getCurChar() === terminatorChar) {
|
||||
this.advance(1);
|
||||
return tokens;
|
||||
} else {
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scanWord(): BaseToken {
|
||||
// Skip all non-alphabetic characters, primarily used for handling defined(MACRO) syntax
|
||||
while (!BaseLexer.isAlpha(this.getCurCharCode()) && !this.isEnd()) {
|
||||
this.advance(1);
|
||||
}
|
||||
|
||||
if (this.isEnd()) {
|
||||
return EOF;
|
||||
}
|
||||
|
||||
const start = this._currentIndex;
|
||||
while (BaseLexer.isAlnum(this.getCurCharCode()) && !this.isEnd()) {
|
||||
this.advance(1);
|
||||
}
|
||||
const end = this._currentIndex;
|
||||
const word = this._source.slice(start, end);
|
||||
if (end === start) {
|
||||
this.throwError(this.getShaderPosition(0), "no word found.");
|
||||
}
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
const tokenType = MacroParserLexer._lexemeTable[word] ?? MacroParserToken.id;
|
||||
token.set(tokenType, word, this.getShaderPosition(word.length));
|
||||
return token;
|
||||
}
|
||||
|
||||
override scanToken(): BaseToken | undefined {
|
||||
this.skipCommentsAndSpace();
|
||||
if (this.isEnd()) {
|
||||
return;
|
||||
}
|
||||
const source = this._source;
|
||||
let start = this._currentIndex;
|
||||
let found = false;
|
||||
for (var n = source.length; this._currentIndex < n; ) {
|
||||
if (MacroParserLexer._isPpCharacters(source.charCodeAt(this._currentIndex))) {
|
||||
this.advance(1);
|
||||
found = true;
|
||||
} else {
|
||||
if (found) {
|
||||
break;
|
||||
}
|
||||
this.advance(1);
|
||||
this.skipCommentsAndSpace();
|
||||
start = this._currentIndex;
|
||||
}
|
||||
}
|
||||
|
||||
const lexeme = source.slice(start, this._currentIndex);
|
||||
const token = BaseToken.pool.get();
|
||||
const type = MacroParserLexer._lexemeTable[lexeme] ?? MacroParserToken.id;
|
||||
token.set(type, lexeme, this.getShaderPosition(this._currentIndex - start));
|
||||
|
||||
if (type === MacroParserKeyword.if || type === MacroParserKeyword.ifdef || type === MacroParserKeyword.ifndef) {
|
||||
this.macroLvl++;
|
||||
} else if (type === MacroParserKeyword.endif) {
|
||||
this.macroLvl--;
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
scanQuotedString(): BaseToken<MacroParserToken.string_const> {
|
||||
this.skipSpace(true);
|
||||
const source = this._source;
|
||||
const sourceLength = source.length;
|
||||
const start = this.getShaderPosition(0);
|
||||
|
||||
let index = this._currentIndex;
|
||||
|
||||
// Check for opening quote
|
||||
if (source.charCodeAt(index) !== 34) {
|
||||
// 34 = '"'
|
||||
this.throwError(start, "Unexpected char, expected '\"'");
|
||||
}
|
||||
|
||||
const contentStart = ++index; // Skip opening quote and record start
|
||||
|
||||
// Fast scan to closing quote
|
||||
while (index < sourceLength && source.charCodeAt(index) !== 34) {
|
||||
index++;
|
||||
}
|
||||
|
||||
if (index >= sourceLength) {
|
||||
this.throwError(this.getShaderPosition(0), "Unexpected char, expected '\"'");
|
||||
}
|
||||
|
||||
const lexeme = source.slice(contentStart, index);
|
||||
this.advance(index + 1 - this._currentIndex); // Skip to after closing quote
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(MacroParserToken.string_const, lexeme, start);
|
||||
return token;
|
||||
}
|
||||
|
||||
scanToChar(char: string) {
|
||||
const source = this._source;
|
||||
while (source[this._currentIndex] !== char && !this.isEnd()) {
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
|
||||
scanMacroBranchBody(): {
|
||||
body: BaseToken<MacroParserToken.chunk>;
|
||||
nextDirective: BaseToken;
|
||||
} {
|
||||
const shaderPosition = this.getShaderPosition(0);
|
||||
const startLevel = this.macroLvl;
|
||||
|
||||
let nextDirective = this.scanToken()!;
|
||||
while (true) {
|
||||
const { type } = nextDirective;
|
||||
if (type === MacroParserKeyword.endif && startLevel - 1 === this.macroLvl) {
|
||||
break;
|
||||
} else if (
|
||||
(type === MacroParserKeyword.elif || type === MacroParserKeyword.else) &&
|
||||
startLevel === this.macroLvl
|
||||
) {
|
||||
break;
|
||||
}
|
||||
nextDirective = this.scanToken()!;
|
||||
}
|
||||
|
||||
const lexeme = this._source.slice(shaderPosition.index, this._currentIndex - nextDirective.lexeme.length - 1);
|
||||
const body = BaseToken.pool.get();
|
||||
body.set(MacroParserToken.chunk, lexeme, shaderPosition);
|
||||
return { body, nextDirective };
|
||||
}
|
||||
|
||||
scanPairedBlock(lc: string, rc: string): void {
|
||||
this.scanToChar(lc);
|
||||
let level = 0;
|
||||
const source = this._source;
|
||||
|
||||
do {
|
||||
const curChar = source[this._currentIndex];
|
||||
|
||||
if (curChar === lc) {
|
||||
level++;
|
||||
} else if (curChar === rc) {
|
||||
level--;
|
||||
}
|
||||
this.advance(1);
|
||||
} while (level > 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns end ShaderPosition
|
||||
*/
|
||||
scanRemainMacro(): ShaderPosition {
|
||||
const startLvl = this.macroLvl;
|
||||
let directive = this.scanToken()!;
|
||||
while (!this.isEnd() && (directive.type !== MacroParserKeyword.endif || startLvl - 1 !== this.macroLvl)) {
|
||||
directive = this.scanToken()!;
|
||||
}
|
||||
return this.getShaderPosition(0);
|
||||
}
|
||||
|
||||
peekNonSpace() {
|
||||
let current = this._currentIndex;
|
||||
while (/\s/.test(this._source[current])) {
|
||||
current += 1;
|
||||
}
|
||||
return this._source[current];
|
||||
}
|
||||
|
||||
scanInteger() {
|
||||
const start = this._currentIndex;
|
||||
while (BaseLexer.isDigit(this.getCurCharCode())) {
|
||||
this.advance(1);
|
||||
}
|
||||
if (this._currentIndex === start) {
|
||||
this.throwError(this.getShaderPosition(0), "no integer found");
|
||||
}
|
||||
const integer = this._source.slice(start, this._currentIndex);
|
||||
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(MacroParserToken.int_constant, integer, this.getShaderPosition(0));
|
||||
return token;
|
||||
}
|
||||
|
||||
scanMacroBody(): BaseToken<MacroParserToken.line_remain> {
|
||||
this.skipSpace(false);
|
||||
let lexeme = "";
|
||||
const source = this._source;
|
||||
const sourceLength = source.length;
|
||||
|
||||
const start = this.getShaderPosition(0);
|
||||
while (this._currentIndex < sourceLength) {
|
||||
const charCode = source.charCodeAt(this._currentIndex);
|
||||
|
||||
// Check for line break (terminates macro definition), break when encounter "\n"
|
||||
if (charCode === 10) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Check for comments (both single-line and multi-line)
|
||||
if (charCode === 47) {
|
||||
const nextIndex = this._currentIndex + 1;
|
||||
if (nextIndex < sourceLength) {
|
||||
const nextCharCode = source.charCodeAt(nextIndex);
|
||||
|
||||
// Single-line comment (terminates macro definition), break when encounter "//"
|
||||
if (nextCharCode === 47) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Multi-line comment (skip but don't terminate)
|
||||
if (nextCharCode === 42) {
|
||||
this.advance(2); // Skip "/*"
|
||||
|
||||
// Skip until end of multi-line comment
|
||||
while (this._currentIndex + 1 < sourceLength) {
|
||||
const currentIndex = this._currentIndex;
|
||||
if (source.charCodeAt(currentIndex) === 42 && source.charCodeAt(currentIndex + 1) === 47) {
|
||||
this.advance(2); // Skip "*/
|
||||
break;
|
||||
}
|
||||
this.advance(1);
|
||||
}
|
||||
|
||||
lexeme += " "; // Replace comment with space
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Accumulate useful character
|
||||
lexeme += source[this._currentIndex];
|
||||
this.advance(1);
|
||||
}
|
||||
|
||||
if (lexeme === "") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const valueToken = BaseToken.pool.get();
|
||||
valueToken.set(MacroParserToken.line_remain, lexeme, ShaderLab.createRange(start, this.getShaderPosition(0)));
|
||||
return valueToken;
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
import { ExpandSegment } from "./MacroParser";
|
||||
// #if _VERBOSE
|
||||
import PpSourceMap, { MapRange } from "./sourceMap";
|
||||
// #endif
|
||||
|
||||
export class PpUtils {
|
||||
static expand(
|
||||
segments: ExpandSegment[],
|
||||
source: string,
|
||||
// #if _VERBOSE
|
||||
sourceMap?: PpSourceMap
|
||||
//#endif
|
||||
) {
|
||||
const ret: string[] = [];
|
||||
let startIdx = 0;
|
||||
let generatedIdx = 0;
|
||||
|
||||
for (const seg of segments) {
|
||||
const originSlice = source.slice(startIdx, seg.rangeInBlock.start.index);
|
||||
ret.push(originSlice, seg.replace);
|
||||
|
||||
const generatedIdxEnd = generatedIdx + originSlice.length + seg.replace.length;
|
||||
|
||||
// #if _VERBOSE
|
||||
const mapRange = new MapRange(seg.block, seg.rangeInBlock, {
|
||||
start: generatedIdx + originSlice.length,
|
||||
end: generatedIdxEnd
|
||||
});
|
||||
sourceMap?.addMapRange(mapRange);
|
||||
// #endif
|
||||
|
||||
startIdx = seg.rangeInBlock.end.index;
|
||||
generatedIdx = generatedIdxEnd;
|
||||
}
|
||||
ret.push(source.slice(startIdx));
|
||||
const result = ret.join("");
|
||||
|
||||
// Replace multiple consecutive newlines with a single newline to clean up the output
|
||||
return result.replace(/\n\s*\n+/g, "\n");
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
export enum MacroParserToken {
|
||||
id,
|
||||
line_remain,
|
||||
chunk,
|
||||
int_constant,
|
||||
string_const,
|
||||
/** \>> */
|
||||
right_op,
|
||||
/** << */
|
||||
left_op,
|
||||
left_paren,
|
||||
right_paren,
|
||||
/** \>= */
|
||||
ge,
|
||||
/** <= */
|
||||
le,
|
||||
/** == */
|
||||
eq,
|
||||
/** != */
|
||||
neq,
|
||||
/** && */
|
||||
and,
|
||||
/** || */
|
||||
or,
|
||||
/** < */
|
||||
left_angle,
|
||||
/** \> */
|
||||
right_angle,
|
||||
/** \* */
|
||||
star,
|
||||
/** + */
|
||||
plus,
|
||||
/** \- */
|
||||
dash,
|
||||
/** ! */
|
||||
bang,
|
||||
/** \/ */
|
||||
slash,
|
||||
/** % */
|
||||
percent,
|
||||
|
||||
EOF = 100
|
||||
}
|
||||
|
||||
export enum MacroParserKeyword {
|
||||
define = 101,
|
||||
undef,
|
||||
if,
|
||||
ifdef,
|
||||
ifndef,
|
||||
else,
|
||||
elif,
|
||||
endif,
|
||||
|
||||
defined
|
||||
}
|
||||
|
||||
export type MacroParserConstant = boolean | number;
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./MacroParser";
|
||||
@@ -1,78 +0,0 @@
|
||||
import { ShaderRange } from "../../common/ShaderRange";
|
||||
|
||||
// #if _VERBOSE
|
||||
export class BlockInfo {
|
||||
readonly sourceFile: string;
|
||||
readonly rangeInFile?: ShaderRange;
|
||||
readonly sourceMap?: PpSourceMap;
|
||||
|
||||
constructor(sourceFile?: string, rangeInFile?: ShaderRange, sourceMap?: PpSourceMap) {
|
||||
this.sourceFile = sourceFile ?? "__main__";
|
||||
this.rangeInFile = rangeInFile;
|
||||
this.sourceMap = sourceMap;
|
||||
}
|
||||
}
|
||||
|
||||
export class MapRange {
|
||||
sourceLoc: { block: BlockInfo; rangeInBlock: ShaderRange };
|
||||
generatedLoc: { start: number; end: number };
|
||||
|
||||
constructor(sourceBlock: BlockInfo, rangeInBlock: ShaderRange, generatedLoc: { start: number; end: number }) {
|
||||
this.sourceLoc = { block: sourceBlock, rangeInBlock };
|
||||
this.generatedLoc = generatedLoc;
|
||||
}
|
||||
|
||||
getSourceIndex(generatedIdx: number) {
|
||||
const { block, rangeInBlock } = this.sourceLoc;
|
||||
if (block.sourceMap) {
|
||||
if (block.sourceFile !== "__main__") return block.sourceMap.map(generatedIdx - this.generatedLoc.start);
|
||||
else if (rangeInBlock) {
|
||||
return {
|
||||
sourceFile: block.sourceFile,
|
||||
index: (block.rangeInFile?.start.index ?? 0) + rangeInBlock.start.index
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
index: generatedIdx - this.generatedLoc.start + rangeInBlock.start.index + (block.rangeInFile?.start.index ?? 0),
|
||||
sourceFile: this.sourceLoc.block.sourceFile
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default class PpSourceMap {
|
||||
readonly mapRanges: MapRange[] = [];
|
||||
|
||||
static rangeContains(range: MapRange["generatedLoc"], index: number) {
|
||||
return range.start <= index && range.end > index;
|
||||
}
|
||||
|
||||
addMapRange(mapRange: MapRange) {
|
||||
this.mapRanges.push(mapRange);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns index
|
||||
*/
|
||||
map(index: number): { sourceFile: string; index: number } {
|
||||
let curRange: MapRange | undefined;
|
||||
for (const range of this.mapRanges) {
|
||||
const { generatedLoc } = range;
|
||||
if (PpSourceMap.rangeContains(generatedLoc, index)) {
|
||||
return range.getSourceIndex(index);
|
||||
} else if (range.generatedLoc.start < index) {
|
||||
curRange = range;
|
||||
continue;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!curRange) return { sourceFile: "__main__", index };
|
||||
return {
|
||||
index: index - curRange.generatedLoc.end + curRange.sourceLoc.rangeInBlock.end.index,
|
||||
sourceFile: curRange.sourceLoc.block.sourceFile
|
||||
};
|
||||
}
|
||||
}
|
||||
// #endif
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,34 +0,0 @@
|
||||
import { ETokenType } from "../common";
|
||||
import { NoneTerminal, GrammarSymbol } from "./GrammarSymbol";
|
||||
import Production from "../lalr/Production";
|
||||
|
||||
export class Grammar {
|
||||
readonly productions: Production[];
|
||||
|
||||
readonly startSymbol: NoneTerminal;
|
||||
|
||||
static create(start: NoneTerminal, productions: GrammarSymbol[][]) {
|
||||
const _ps = productions.map((gsl) => {
|
||||
return new Production(<NoneTerminal>gsl[0], gsl.slice(1));
|
||||
});
|
||||
return new Grammar(start, _ps);
|
||||
}
|
||||
|
||||
constructor(start: NoneTerminal, productions: Production[]) {
|
||||
this.startSymbol = start;
|
||||
productions.unshift(new Production(NoneTerminal.START, [start]));
|
||||
this.productions = productions;
|
||||
}
|
||||
|
||||
getProductionList(nonTerminal: NoneTerminal) {
|
||||
return this.productions.filter((item) => item.goal === nonTerminal);
|
||||
}
|
||||
|
||||
isNullableNT(NT: NoneTerminal) {
|
||||
return this.productions.find((item) => item.goal === NT && item.derivation[0] === ETokenType.EPSILON);
|
||||
}
|
||||
|
||||
getProductionByID(pid: number) {
|
||||
return Production.pool.get(pid);
|
||||
}
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
import { TokenType } from "../common";
|
||||
|
||||
export type Terminal = TokenType;
|
||||
|
||||
export enum NoneTerminal {
|
||||
START = 2000,
|
||||
// galacean
|
||||
gs_shader_program,
|
||||
|
||||
// glsl
|
||||
global_declaration,
|
||||
variable_declaration,
|
||||
variable_declaration_list,
|
||||
variable_declaration_statement,
|
||||
array_specifier_list,
|
||||
array_specifier,
|
||||
ext_builtin_type_specifier_nonarray,
|
||||
precision_specifier,
|
||||
variable_identifier,
|
||||
variable_identifier_node,
|
||||
primary_expression,
|
||||
postfix_expression,
|
||||
integer_expression,
|
||||
function_call,
|
||||
function_call_generic,
|
||||
function_call_header_no_parameters,
|
||||
function_call_header_with_parameters,
|
||||
function_call_parameter_list,
|
||||
function_call_header,
|
||||
function_identifier,
|
||||
constructor_identifier,
|
||||
unary_expression,
|
||||
unary_operator,
|
||||
multiplicative_expression,
|
||||
additive_expression,
|
||||
shift_expression,
|
||||
relational_expression,
|
||||
equality_expression,
|
||||
and_expression,
|
||||
inclusive_or_expression,
|
||||
exclusive_or_expression,
|
||||
logical_and_expression,
|
||||
logical_xor_expression,
|
||||
logical_or_expression,
|
||||
conditional_expression,
|
||||
assignment_expression,
|
||||
assignment_operator,
|
||||
expression,
|
||||
integer_constant_expression,
|
||||
integer_constant_expression_operator,
|
||||
declaration,
|
||||
function_prototype,
|
||||
function_declarator,
|
||||
function_header_with_parameters,
|
||||
function_header,
|
||||
function_parameter_list,
|
||||
parameter_declarator,
|
||||
parameter_declaration,
|
||||
parameter_qualifier,
|
||||
parameter_type_specifier,
|
||||
init_declarator_list,
|
||||
single_declaration,
|
||||
fully_specified_type,
|
||||
type_qualifier,
|
||||
single_type_qualifier,
|
||||
storage_qualifier,
|
||||
precision_qualifier,
|
||||
interpolation_qualifier,
|
||||
invariant_qualifier,
|
||||
type_specifier,
|
||||
type_specifier_nonarray,
|
||||
type_specifier_no_prec,
|
||||
basic_type,
|
||||
struct_specifier,
|
||||
struct_declaration_list,
|
||||
struct_declaration,
|
||||
layout_qualifier,
|
||||
struct_declarator_list,
|
||||
struct_declarator,
|
||||
identifier_list,
|
||||
decl_identifier,
|
||||
initializer,
|
||||
initializer_list,
|
||||
declaration_statement,
|
||||
simple_statement,
|
||||
compound_statement_no_scope,
|
||||
statement_with_scope,
|
||||
compound_statement,
|
||||
statement,
|
||||
statement_list,
|
||||
iteration_statement_no_new_scope,
|
||||
expression_statement,
|
||||
selection_statement,
|
||||
selection_rest_statement,
|
||||
condition,
|
||||
conditionopt,
|
||||
iteration_statement,
|
||||
for_init_statement,
|
||||
for_rest_statement,
|
||||
jump_statement,
|
||||
external_declaration,
|
||||
function_definition,
|
||||
field_selection,
|
||||
bool_constant,
|
||||
function_identifier_node,
|
||||
typename_identifier_node,
|
||||
scope_brace,
|
||||
scope_end_brace,
|
||||
|
||||
// Macro
|
||||
macro_undef,
|
||||
macro_push_context,
|
||||
macro_pop_context,
|
||||
macro_elif_expression,
|
||||
macro_else_expression,
|
||||
global_macro_if_statement,
|
||||
global_macro_declaration,
|
||||
global_macro_branch,
|
||||
macro_struct_declaration,
|
||||
macro_struct_branch,
|
||||
macro_if_statement,
|
||||
macro_branch,
|
||||
macro_param_case_list,
|
||||
macro_param_block,
|
||||
macro_parameter_branch,
|
||||
macro_call_arg_case_list,
|
||||
macro_call_arg_block,
|
||||
macro_call_arg_branch,
|
||||
|
||||
// Macro call
|
||||
macro_call_symbol,
|
||||
macro_call_function,
|
||||
|
||||
_ignore
|
||||
}
|
||||
|
||||
export type GrammarSymbol = Terminal | NoneTerminal;
|
||||
|
||||
export type Derivation = GrammarSymbol[];
|
||||
@@ -1,94 +0,0 @@
|
||||
import { ShaderRange } from "../common";
|
||||
import { SymbolTable } from "../common/SymbolTable";
|
||||
import { SymbolTableStack } from "../common/SymbolTableStack";
|
||||
import { GSError, GSErrorName } from "../GSError";
|
||||
import { SymbolInfo } from "../parser/symbolTable";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ASTNode, TreeNode } from "./AST";
|
||||
import { ShaderData } from "./ShaderInfo";
|
||||
import { NodeChild } from "./types";
|
||||
|
||||
import { Logger } from "@galacean/engine";
|
||||
import { MacroDefineList } from "../Preprocessor";
|
||||
|
||||
export type TranslationRule<T = any> = (sa: SemanticAnalyzer, ...tokens: NodeChild[]) => T;
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* The semantic analyzer of `ShaderLab` compiler.
|
||||
* - Build symbol table
|
||||
* - Static analysis
|
||||
*/
|
||||
export default class SemanticAnalyzer {
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
static _lookupSymbol: SymbolInfo = new SymbolInfo("", null);
|
||||
|
||||
semanticStack: TreeNode[] = [];
|
||||
acceptRule?: TranslationRule = undefined;
|
||||
symbolTableStack: SymbolTableStack<SymbolInfo, SymbolTable<SymbolInfo>> = new SymbolTableStack();
|
||||
curFunctionInfo: {
|
||||
header?: ASTNode.FunctionDeclarator;
|
||||
returnStatement?: ASTNode.JumpStatement;
|
||||
} = {};
|
||||
private _shaderData = new ShaderData();
|
||||
private _translationRuleTable: Map<number /** production id */, TranslationRule> = new Map();
|
||||
|
||||
private _macroDefineList: MacroDefineList;
|
||||
|
||||
// #if _VERBOSE
|
||||
readonly errors: Error[] = [];
|
||||
// #endif
|
||||
|
||||
get shaderData() {
|
||||
return this._shaderData;
|
||||
}
|
||||
|
||||
get macroDefineList(): MacroDefineList {
|
||||
return this._macroDefineList;
|
||||
}
|
||||
|
||||
constructor() {
|
||||
this.pushScope();
|
||||
}
|
||||
|
||||
reset(macroDefineList: MacroDefineList) {
|
||||
this._macroDefineList = macroDefineList;
|
||||
this.semanticStack.length = 0;
|
||||
this._shaderData = new ShaderData();
|
||||
this.symbolTableStack.clear();
|
||||
this.pushScope();
|
||||
// #if _VERBOSE
|
||||
this.errors.length = 0;
|
||||
// #endif
|
||||
}
|
||||
|
||||
pushScope() {
|
||||
this.symbolTableStack.pushScope(new SymbolTable<SymbolInfo>());
|
||||
}
|
||||
|
||||
popScope() {
|
||||
return this.symbolTableStack.popScope();
|
||||
}
|
||||
|
||||
addTranslationRule(pid: number, rule: TranslationRule) {
|
||||
this._translationRuleTable.set(pid, rule);
|
||||
}
|
||||
|
||||
getTranslationRule(pid: number) {
|
||||
return this._translationRuleTable.get(pid);
|
||||
}
|
||||
|
||||
reportError(loc: ShaderRange, message: string): void {
|
||||
// #if _VERBOSE
|
||||
this.errors.push(new GSError(GSErrorName.CompilationError, message, loc, ShaderLab._processingPassText));
|
||||
// #else
|
||||
Logger.error(message);
|
||||
// #endif
|
||||
}
|
||||
|
||||
reportWarning(loc: ShaderRange, message: string): void {
|
||||
Logger.warn(new GSError(GSErrorName.CompilationWarn, message, loc, ShaderLab._processingPassText).toString());
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import { SymbolInfo, SymbolTable } from "../parser/symbolTable";
|
||||
import { ASTNode } from "./AST";
|
||||
|
||||
export class ShaderData {
|
||||
symbolTable: SymbolTable<SymbolInfo>;
|
||||
|
||||
vertexMain: ASTNode.FunctionDefinition;
|
||||
fragmentMain: ASTNode.FunctionDefinition;
|
||||
|
||||
globalPrecisions: ASTNode.PrecisionSpecifier[] = [];
|
||||
|
||||
globalMacroDeclarations: ASTNode.GlobalDeclaration[] = [];
|
||||
|
||||
getOuterGlobalMacroDeclarations(): ASTNode.GlobalDeclaration[] {
|
||||
return this.globalMacroDeclarations.filter((node) => node.parent instanceof ASTNode.GLShaderProgram);
|
||||
}
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
import { Logger } from "@galacean/engine";
|
||||
import { ETokenType } from "../common";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { GSError, GSErrorName } from "../GSError";
|
||||
import { LALR1 } from "../lalr";
|
||||
import { addTranslationRule, createGrammar } from "../lalr/CFG";
|
||||
import { EAction, StateActionTable, StateGotoTable } from "../lalr/types";
|
||||
import { MacroDefineList } from "../Preprocessor";
|
||||
import { ParserUtils } from "../ParserUtils";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
import { ASTNode, TreeNode } from "./AST";
|
||||
import { Grammar } from "./Grammar";
|
||||
import { GrammarSymbol, NoneTerminal } from "./GrammarSymbol";
|
||||
import SematicAnalyzer from "./SemanticAnalyzer";
|
||||
import { TraceStackItem } from "./types";
|
||||
|
||||
/**
|
||||
* The syntax parser and sematic analyzer of `ShaderLab` compiler
|
||||
*/
|
||||
export class ShaderTargetParser {
|
||||
readonly actionTable: StateActionTable;
|
||||
readonly gotoTable: StateGotoTable;
|
||||
readonly grammar: Grammar;
|
||||
readonly sematicAnalyzer: SematicAnalyzer;
|
||||
private _traceBackStack: (TraceStackItem | number)[] = [];
|
||||
|
||||
private get curState() {
|
||||
return this._traceBackStack[this._traceBackStack.length - 1] as number;
|
||||
}
|
||||
private get stateActionTable() {
|
||||
return this.actionTable.get(this.curState)!;
|
||||
}
|
||||
private get stateGotoTable() {
|
||||
return this.gotoTable.get(this.curState);
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
/** @internal */
|
||||
get errors() {
|
||||
return this.sematicAnalyzer.errors;
|
||||
}
|
||||
// #endif
|
||||
|
||||
static _singleton: ShaderTargetParser;
|
||||
|
||||
static create() {
|
||||
if (!this._singleton) {
|
||||
const grammar = createGrammar();
|
||||
const generator = new LALR1(grammar);
|
||||
generator.generate();
|
||||
this._singleton = new ShaderTargetParser(generator.actionTable, generator.gotoTable, grammar);
|
||||
addTranslationRule(this._singleton.sematicAnalyzer);
|
||||
}
|
||||
|
||||
return this._singleton;
|
||||
}
|
||||
|
||||
private constructor(actionTable: StateActionTable, gotoTable: StateGotoTable, grammar: Grammar) {
|
||||
this.actionTable = actionTable;
|
||||
this.gotoTable = gotoTable;
|
||||
this.grammar = grammar;
|
||||
this.sematicAnalyzer = new SematicAnalyzer();
|
||||
}
|
||||
|
||||
parse(tokens: Generator<BaseToken, BaseToken>, macroDefineList: MacroDefineList): ASTNode.GLShaderProgram | null {
|
||||
this.sematicAnalyzer.reset(macroDefineList);
|
||||
const start = performance.now();
|
||||
const { _traceBackStack: traceBackStack, sematicAnalyzer } = this;
|
||||
traceBackStack.push(0);
|
||||
|
||||
let nextToken = tokens.next();
|
||||
let loopCount = 0;
|
||||
while (true) {
|
||||
loopCount += 1;
|
||||
const token = nextToken.value;
|
||||
|
||||
const actionInfo = this.stateActionTable.get(token.type);
|
||||
if (actionInfo?.action === EAction.Shift) {
|
||||
traceBackStack.push(token, actionInfo.target!);
|
||||
nextToken = tokens.next();
|
||||
} else if (actionInfo?.action === EAction.Accept) {
|
||||
Logger.info(
|
||||
`[Task - AST compilation] Accept! State automata run ${loopCount} times! cost time ${
|
||||
performance.now() - start
|
||||
}ms`
|
||||
);
|
||||
sematicAnalyzer.acceptRule?.(sematicAnalyzer);
|
||||
return sematicAnalyzer.semanticStack.pop() as ASTNode.GLShaderProgram;
|
||||
} else if (actionInfo?.action === EAction.Reduce) {
|
||||
const target = actionInfo.target!;
|
||||
const reduceProduction = this.grammar.getProductionByID(target)!;
|
||||
const translationRule = sematicAnalyzer.getTranslationRule(reduceProduction.id);
|
||||
|
||||
const values: (TreeNode | BaseToken)[] = [];
|
||||
|
||||
for (let i = reduceProduction.derivation.length - 1; i >= 0; i--) {
|
||||
if (reduceProduction.derivation[i] === ETokenType.EPSILON) continue;
|
||||
traceBackStack.pop();
|
||||
const token = traceBackStack.pop();
|
||||
if (token instanceof BaseToken) {
|
||||
values.unshift(token);
|
||||
} else {
|
||||
const astNode = sematicAnalyzer.semanticStack.pop()!;
|
||||
values.unshift(astNode);
|
||||
}
|
||||
}
|
||||
translationRule?.(sematicAnalyzer, ...values);
|
||||
|
||||
const gotoTable = this.stateGotoTable;
|
||||
traceBackStack.push(reduceProduction.goal);
|
||||
|
||||
const nextState = gotoTable?.get(reduceProduction.goal)!;
|
||||
traceBackStack.push(nextState);
|
||||
continue;
|
||||
} else {
|
||||
const error = ShaderLabUtils.createGSError(
|
||||
`Unexpected token ${token.lexeme}`,
|
||||
GSErrorName.CompilationError,
|
||||
ShaderLab._processingPassText,
|
||||
token.location
|
||||
);
|
||||
// #if _VERBOSE
|
||||
this.sematicAnalyzer.errors.push(<GSError>error);
|
||||
// #endif
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
private _printStack(nextToken: BaseToken) {
|
||||
let str = "";
|
||||
for (let i = 0; i < this._traceBackStack.length - 1; i++) {
|
||||
const state = <NoneTerminal>this._traceBackStack[i++];
|
||||
const token = this._traceBackStack[i];
|
||||
str += `State${state} - ${(<BaseToken>token).lexeme ?? ParserUtils.toString(token as GrammarSymbol)}; `;
|
||||
}
|
||||
str += `State${this._traceBackStack[this._traceBackStack.length - 1]} --- ${nextToken.lexeme}`;
|
||||
Logger.info(str);
|
||||
}
|
||||
// #endif
|
||||
}
|
||||
@@ -1,642 +0,0 @@
|
||||
// For cfg conflict test, used by bison
|
||||
|
||||
%token id
|
||||
%token INT_CONSTANT
|
||||
%token FLOAT_CONSTANT
|
||||
%token true
|
||||
%token false
|
||||
|
||||
%token void
|
||||
%token float
|
||||
%token int
|
||||
%token mat4
|
||||
%token struct
|
||||
%token highp
|
||||
%token mediemp
|
||||
%token lowp
|
||||
|
||||
%token const
|
||||
%token in
|
||||
%token out
|
||||
%token inout
|
||||
%token centroid
|
||||
|
||||
%token SMOOTH
|
||||
%token FLAT
|
||||
|
||||
%token PRECISE
|
||||
%token PRECISION
|
||||
|
||||
%token INVARIANT
|
||||
%token layout
|
||||
%token location
|
||||
|
||||
%token or
|
||||
%token xor
|
||||
%token and
|
||||
%token eq
|
||||
%token neq
|
||||
%token ge
|
||||
%token le
|
||||
%token left_op
|
||||
%token right_op
|
||||
%token INC_OP
|
||||
%token DEC_OP
|
||||
|
||||
%token MUL_ASSIGN
|
||||
%token DIV_ASSIGN
|
||||
%token MOD_ASSIGN
|
||||
%token ADD_ASSIGN
|
||||
%token SUB_ASSIGN
|
||||
%token LEFT_ASSIGN
|
||||
%token RIGHT_ASSIGN
|
||||
%token AND_ASSIGN
|
||||
%token XOR_ASSIGN
|
||||
%token OR_ASSIGN
|
||||
|
||||
%token IF ELSE WHILE FOR
|
||||
|
||||
%token CONTINUE BREAK RETURN DISCARD
|
||||
|
||||
%token MACRO_IF MACRO_IFDEF MACRO_IFNDEF MACRO_ELIF MACRO_ELSE MACRO_ENDIF
|
||||
%token MACRO_UNDEF
|
||||
%token MACRO_DEFINE_EXPRESSION MACRO_CONDITIONAL_EXPRESSION
|
||||
%token MACRO_CALL
|
||||
|
||||
|
||||
%%
|
||||
gs_shader_program:
|
||||
global_declaration
|
||||
| gs_shader_program global_declaration
|
||||
;
|
||||
|
||||
macro_call_symbol:
|
||||
MACRO_CALL
|
||||
;
|
||||
|
||||
macro_call_function:
|
||||
macro_call_symbol '(' ')'
|
||||
| macro_call_symbol '(' function_call_parameter_list ')'
|
||||
;
|
||||
|
||||
macro_undef:
|
||||
MACRO_UNDEF id
|
||||
| MACRO_UNDEF MACRO_CALL
|
||||
;
|
||||
|
||||
macro_push_context:
|
||||
MACRO_IF MACRO_CONDITIONAL_EXPRESSION
|
||||
| MACRO_IFDEF id
|
||||
| MACRO_IFNDEF id
|
||||
| MACRO_IFDEF macro_call_symbol
|
||||
| MACRO_IFNDEF macro_call_symbol
|
||||
;
|
||||
|
||||
macro_pop_context:
|
||||
MACRO_ENDIF
|
||||
;
|
||||
|
||||
macro_elif_expression:
|
||||
MACRO_ELIF MACRO_CONDITIONAL_EXPRESSION
|
||||
;
|
||||
|
||||
macro_else_expression:
|
||||
MACRO_ELSE
|
||||
;
|
||||
|
||||
global_declaration:
|
||||
precision_specifier
|
||||
| variable_declaration_statement
|
||||
| struct_specifier
|
||||
| function_definition
|
||||
| global_macro_if_statement
|
||||
| macro_undef
|
||||
| MACRO_DEFINE_EXPRESSION
|
||||
;
|
||||
|
||||
global_macro_declaration:
|
||||
global_declaration
|
||||
| global_macro_declaration global_declaration
|
||||
|
||||
|
||||
global_macro_if_statement:
|
||||
macro_push_context global_macro_declaration global_macro_branch
|
||||
| macro_push_context global_macro_branch
|
||||
;
|
||||
|
||||
global_macro_branch:
|
||||
macro_pop_context
|
||||
| macro_elif_expression global_macro_declaration global_macro_branch
|
||||
| macro_else_expression global_macro_declaration macro_pop_context
|
||||
| macro_elif_expression global_macro_branch
|
||||
| macro_else_expression macro_pop_context
|
||||
;
|
||||
|
||||
|
||||
variable_declaration:
|
||||
fully_specified_type id
|
||||
| fully_specified_type id array_specifier
|
||||
| fully_specified_type id '=' initializer
|
||||
;
|
||||
|
||||
variable_declaration_list:
|
||||
variable_declaration
|
||||
| variable_declaration_list ',' id
|
||||
| variable_declaration_list ',' id array_specifier
|
||||
;
|
||||
|
||||
variable_declaration_statement:
|
||||
variable_declaration_list ';'
|
||||
|
||||
variable_identifier:
|
||||
id
|
||||
| macro_call_symbol
|
||||
| macro_call_function
|
||||
;
|
||||
|
||||
precision_specifier:
|
||||
PRECISION precision_qualifier ext_builtin_type_specifier_nonarray ';'
|
||||
;
|
||||
|
||||
ext_builtin_type_specifier_nonarray:
|
||||
void
|
||||
| float
|
||||
| int
|
||||
| mat4
|
||||
;
|
||||
|
||||
type_specifier_nonarray:
|
||||
ext_builtin_type_specifier_nonarray
|
||||
| id
|
||||
;
|
||||
|
||||
struct_specifier:
|
||||
struct id '{' struct_declaration_list '}' ;
|
||||
| struct '{' struct_declaration_list '}' ;
|
||||
;
|
||||
|
||||
struct_declaration_list:
|
||||
struct_declaration
|
||||
| struct_declaration_list struct_declaration
|
||||
;
|
||||
|
||||
struct_declaration:
|
||||
type_specifier struct_declarator_list ';'
|
||||
| type_qualifier type_specifier struct_declarator_list ';'
|
||||
| layout_qualifier type_specifier struct_declarator ';'
|
||||
| macro_struct_declaration
|
||||
;
|
||||
|
||||
macro_struct_declaration:
|
||||
macro_push_context struct_declaration_list macro_struct_branch
|
||||
| macro_push_context macro_struct_branch
|
||||
;
|
||||
|
||||
macro_struct_branch:
|
||||
macro_pop_context
|
||||
| macro_elif_expression struct_declaration_list macro_struct_branch
|
||||
| macro_else_expression struct_declaration_list macro_pop_context
|
||||
| macro_elif_expression macro_struct_branch
|
||||
| macro_else_expression macro_pop_context
|
||||
;
|
||||
|
||||
layout_qualifier:
|
||||
layout '(' location '=' INT_CONSTANT ')'
|
||||
| layout '(' location '=' id ')'
|
||||
|
||||
|
||||
struct_declarator_list:
|
||||
struct_declarator
|
||||
| struct_declarator_list ',' struct_declarator
|
||||
;
|
||||
|
||||
struct_declarator:
|
||||
id
|
||||
| id array_specifier
|
||||
;
|
||||
|
||||
array_specifier:
|
||||
'[' ']'
|
||||
| '[' integer_constant_expression ']'
|
||||
;
|
||||
|
||||
type_specifier:
|
||||
type_specifier_nonarray
|
||||
| ext_builtin_type_specifier_nonarray array_specifier
|
||||
;
|
||||
|
||||
precision_qualifier:
|
||||
highp
|
||||
| mediemp
|
||||
| lowp
|
||||
;
|
||||
|
||||
type_qualifier:
|
||||
single_type_qualifier
|
||||
| type_qualifier single_type_qualifier
|
||||
;
|
||||
|
||||
single_type_qualifier:
|
||||
storage_qualifier
|
||||
| precision_qualifier
|
||||
| interpolation_qualifier
|
||||
| invariant_qualifier
|
||||
| PRECISE
|
||||
;
|
||||
|
||||
storage_qualifier:
|
||||
const
|
||||
| in
|
||||
| out
|
||||
| inout
|
||||
| centroid
|
||||
;
|
||||
|
||||
interpolation_qualifier:
|
||||
SMOOTH
|
||||
| FLAT
|
||||
;
|
||||
|
||||
invariant_qualifier:
|
||||
INVARIANT
|
||||
;
|
||||
|
||||
integer_constant_expression_operator:
|
||||
'+'
|
||||
| '-'
|
||||
| '*'
|
||||
| '/'
|
||||
| '%'
|
||||
;
|
||||
|
||||
integer_constant_expression:
|
||||
variable_identifier
|
||||
| INT_CONSTANT
|
||||
| integer_constant_expression integer_constant_expression_operator INT_CONSTANT
|
||||
| integer_constant_expression integer_constant_expression_operator variable_identifier
|
||||
;
|
||||
|
||||
conditional_expression:
|
||||
logical_or_expression
|
||||
| logical_or_expression '?' expression ':' assignment_expression
|
||||
;
|
||||
|
||||
logical_or_expression:
|
||||
logical_xor_expression
|
||||
| logical_or_expression or logical_xor_expression
|
||||
;
|
||||
|
||||
logical_xor_expression:
|
||||
logical_and_expression
|
||||
| logical_xor_expression xor logical_and_expression
|
||||
;
|
||||
|
||||
logical_and_expression:
|
||||
inclusive_or_expression
|
||||
| logical_and_expression and inclusive_or_expression
|
||||
;
|
||||
|
||||
inclusive_or_expression:
|
||||
exclusive_or_expression
|
||||
| inclusive_or_expression '|' exclusive_or_expression
|
||||
;
|
||||
|
||||
exclusive_or_expression:
|
||||
and_expression
|
||||
| exclusive_or_expression '^' and_expression
|
||||
;
|
||||
|
||||
and_expression:
|
||||
equality_expression
|
||||
| and_expression '&' equality_expression
|
||||
;
|
||||
|
||||
equality_expression:
|
||||
relational_expression
|
||||
| equality_expression eq relational_expression
|
||||
| equality_expression neq relational_expression
|
||||
;
|
||||
|
||||
relational_expression:
|
||||
shift_expression
|
||||
| relational_expression '<' shift_expression
|
||||
| relational_expression '>' shift_expression
|
||||
| relational_expression le shift_expression
|
||||
| relational_expression ge shift_expression
|
||||
;
|
||||
|
||||
shift_expression:
|
||||
additive_expression
|
||||
| shift_expression left_op additive_expression
|
||||
| shift_expression right_op additive_expression
|
||||
;
|
||||
|
||||
additive_expression:
|
||||
multiplicative_expression
|
||||
| additive_expression '+' multiplicative_expression
|
||||
| additive_expression '-' multiplicative_expression
|
||||
;
|
||||
|
||||
multiplicative_expression:
|
||||
unary_expression
|
||||
| multiplicative_expression '*' unary_expression
|
||||
| multiplicative_expression '/' unary_expression
|
||||
| multiplicative_expression '%' unary_expression
|
||||
;
|
||||
|
||||
unary_expression:
|
||||
postfix_expression
|
||||
| INC_OP unary_expression
|
||||
| DEC_OP unary_expression
|
||||
| unary_operator unary_expression
|
||||
|
||||
unary_operator:
|
||||
'+'
|
||||
| '-'
|
||||
| '!'
|
||||
| '~'
|
||||
;
|
||||
|
||||
postfix_expression:
|
||||
primary_expression
|
||||
| postfix_expression '[' expression ']'
|
||||
| function_call
|
||||
| postfix_expression '.' id
|
||||
| postfix_expression '.' function_call
|
||||
| postfix_expression INC_OP
|
||||
| postfix_expression DEC_OP
|
||||
;
|
||||
|
||||
primary_expression:
|
||||
variable_identifier
|
||||
| INT_CONSTANT
|
||||
| FLOAT_CONSTANT
|
||||
| true
|
||||
| false
|
||||
| '(' expression ')'
|
||||
;
|
||||
|
||||
expression:
|
||||
assignment_expression
|
||||
| expression ',' assignment_expression
|
||||
;
|
||||
|
||||
assignment_expression:
|
||||
conditional_expression
|
||||
| unary_expression assignment_operator assignment_expression
|
||||
;
|
||||
|
||||
assignment_operator:
|
||||
'='
|
||||
| MUL_ASSIGN
|
||||
| DIV_ASSIGN
|
||||
| MOD_ASSIGN
|
||||
| ADD_ASSIGN
|
||||
| SUB_ASSIGN
|
||||
| LEFT_ASSIGN
|
||||
| RIGHT_ASSIGN
|
||||
| AND_ASSIGN
|
||||
| XOR_ASSIGN
|
||||
| OR_ASSIGN
|
||||
;
|
||||
|
||||
function_call:
|
||||
function_call_generic
|
||||
;
|
||||
|
||||
function_call_generic:
|
||||
function_identifier '(' function_call_parameter_list ')'
|
||||
| function_identifier '(' ')'
|
||||
| function_identifier '(' void ')'
|
||||
;
|
||||
|
||||
function_call_parameter_list:
|
||||
assignment_expression
|
||||
| function_call_parameter_list ',' assignment_expression
|
||||
| macro_call_arg_block
|
||||
| function_call_parameter_list macro_call_arg_block
|
||||
;
|
||||
|
||||
|
||||
macro_call_arg_case_list:
|
||||
assignment_expression
|
||||
| ',' assignment_expression
|
||||
| macro_call_arg_block
|
||||
| macro_call_arg_case_list macro_call_arg_block
|
||||
| macro_call_arg_case_list ',' assignment_expression
|
||||
;
|
||||
|
||||
macro_call_arg_block:
|
||||
macro_push_context macro_call_arg_branch
|
||||
| macro_push_context macro_call_arg_case_list macro_call_arg_branch
|
||||
;
|
||||
|
||||
macro_call_arg_branch:
|
||||
macro_pop_context
|
||||
| macro_elif_expression macro_call_arg_case_list macro_call_arg_branch
|
||||
| macro_else_expression macro_call_arg_case_list macro_pop_context
|
||||
| macro_elif_expression macro_call_arg_branch
|
||||
| macro_else_expression macro_pop_context
|
||||
;
|
||||
|
||||
function_identifier:
|
||||
type_specifier
|
||||
;
|
||||
|
||||
function_definition:
|
||||
function_prototype compound_statement_no_scope
|
||||
;
|
||||
|
||||
function_prototype:
|
||||
function_declarator ')'
|
||||
;
|
||||
|
||||
function_declarator:
|
||||
function_header
|
||||
| function_header function_parameter_list
|
||||
;
|
||||
|
||||
function_header:
|
||||
fully_specified_type id '('
|
||||
;
|
||||
|
||||
fully_specified_type:
|
||||
type_specifier
|
||||
| type_qualifier type_specifier
|
||||
;
|
||||
|
||||
|
||||
function_parameter_list:
|
||||
parameter_declaration
|
||||
| function_parameter_list ',' parameter_declaration
|
||||
| macro_param_block
|
||||
| function_parameter_list macro_param_block
|
||||
;
|
||||
|
||||
macro_param_case_list:
|
||||
parameter_declaration
|
||||
| ',' parameter_declaration
|
||||
| macro_param_block
|
||||
| macro_param_case_list macro_param_block
|
||||
| macro_param_case_list ',' parameter_declaration
|
||||
;
|
||||
|
||||
macro_param_block:
|
||||
macro_push_context macro_parameter_branch
|
||||
| macro_push_context macro_param_case_list macro_parameter_branch
|
||||
;
|
||||
|
||||
macro_parameter_branch:
|
||||
macro_pop_context
|
||||
| macro_elif_expression macro_param_case_list macro_parameter_branch
|
||||
| macro_else_expression macro_param_case_list macro_pop_context
|
||||
| macro_elif_expression macro_parameter_branch
|
||||
| macro_else_expression macro_pop_context
|
||||
;
|
||||
|
||||
|
||||
parameter_declaration:
|
||||
type_qualifier parameter_declarator
|
||||
| parameter_declarator
|
||||
| macro_call_symbol
|
||||
| macro_call_function
|
||||
;
|
||||
|
||||
parameter_declarator:
|
||||
type_specifier id
|
||||
| type_specifier id array_specifier
|
||||
;
|
||||
|
||||
statement_list:
|
||||
statement
|
||||
| statement_list statement
|
||||
;
|
||||
|
||||
statement:
|
||||
compound_statement
|
||||
| simple_statement
|
||||
;
|
||||
|
||||
compound_statement_no_scope:
|
||||
'{' '}'
|
||||
| '{' statement_list '}'
|
||||
|
||||
compound_statement:
|
||||
'{' '}'
|
||||
| scope_brace statement_list scope_end_brace
|
||||
;
|
||||
|
||||
simple_statement:
|
||||
declaration
|
||||
| expression_statement
|
||||
| selection_statement
|
||||
| iteration_statement
|
||||
| jump_statement
|
||||
| macro_if_statement
|
||||
| macro_undef
|
||||
| MACRO_DEFINE_EXPRESSION
|
||||
;
|
||||
|
||||
declaration:
|
||||
function_prototype ';'
|
||||
| init_declarator_list ';'
|
||||
| type_qualifier id ';'
|
||||
| type_qualifier id identifier_list ';'
|
||||
| precision_specifier
|
||||
;
|
||||
|
||||
identifier_list:
|
||||
',' id
|
||||
| identifier_list ',' id
|
||||
;
|
||||
|
||||
init_declarator_list:
|
||||
single_declaration
|
||||
| init_declarator_list ',' id
|
||||
| init_declarator_list ',' id array_specifier
|
||||
| init_declarator_list ',' id array_specifier '=' initializer
|
||||
| init_declarator_list ',' id '=' initializer
|
||||
;
|
||||
|
||||
single_declaration:
|
||||
fully_specified_type id
|
||||
| fully_specified_type id array_specifier
|
||||
| fully_specified_type id '=' initializer
|
||||
| fully_specified_type id array_specifier '=' initializer
|
||||
;
|
||||
|
||||
initializer:
|
||||
assignment_expression
|
||||
| '{' initializer_list '}'
|
||||
;
|
||||
|
||||
initializer_list:
|
||||
initializer
|
||||
| initializer_list ',' initializer
|
||||
;
|
||||
|
||||
expression_statement:
|
||||
';'
|
||||
| expression ';'
|
||||
;
|
||||
|
||||
// Dangling else ambiguity
|
||||
selection_statement:
|
||||
IF '(' expression ')' statement
|
||||
| IF '(' expression ')' statement ELSE statement
|
||||
;
|
||||
|
||||
macro_if_statement:
|
||||
macro_push_context statement_list macro_branch
|
||||
| macro_push_context macro_branch
|
||||
;
|
||||
|
||||
macro_branch:
|
||||
macro_pop_context
|
||||
| macro_elif_expression statement_list macro_branch
|
||||
| macro_else_expression statement_list macro_pop_context
|
||||
| macro_elif_expression macro_branch
|
||||
| macro_else_expression macro_pop_context
|
||||
;
|
||||
|
||||
iteration_statement:
|
||||
WHILE '(' condition ')' statement
|
||||
| FOR '(' for_init_statement for_rest_statement ')' statement
|
||||
;
|
||||
|
||||
for_init_statement:
|
||||
expression_statement
|
||||
| declaration
|
||||
;
|
||||
|
||||
condition:
|
||||
expression
|
||||
| fully_specified_type id '=' initializer
|
||||
;
|
||||
|
||||
for_rest_statement:
|
||||
conditionopt ';'
|
||||
| conditionopt ';' expression
|
||||
;
|
||||
|
||||
conditionopt:
|
||||
/** empty */
|
||||
| condition
|
||||
;
|
||||
|
||||
jump_statement:
|
||||
CONTINUE ';'
|
||||
| BREAK ';'
|
||||
| RETURN ';'
|
||||
| RETURN expression ';'
|
||||
| DISCARD ';'
|
||||
;
|
||||
|
||||
scope_brace:
|
||||
'{'
|
||||
;
|
||||
|
||||
scope_end_brace:
|
||||
'}'
|
||||
;
|
||||
%%
|
||||
@@ -1,647 +0,0 @@
|
||||
import { GalaceanDataType, TypeAny } from "../../common";
|
||||
import { Keyword } from "../../common/enums/Keyword";
|
||||
import { EShaderStage } from "../../common/enums/ShaderStage";
|
||||
|
||||
export enum EGenType {
|
||||
GenType = 200,
|
||||
GenIntType,
|
||||
GenUintType,
|
||||
GenBoolType,
|
||||
Mat,
|
||||
BoolVec,
|
||||
IntVec,
|
||||
UintVec,
|
||||
Vec,
|
||||
GVec4,
|
||||
GSampler2D,
|
||||
GSampler3D,
|
||||
GSamplerCube,
|
||||
GSampler2DArray
|
||||
}
|
||||
|
||||
export type NonGenericGalaceanType = Exclude<GalaceanDataType, string>;
|
||||
type BuiltinType = NonGenericGalaceanType | EGenType;
|
||||
|
||||
function isGenericType(t: BuiltinType) {
|
||||
return t >= EGenType.GenType && t <= EGenType.GSampler2DArray;
|
||||
}
|
||||
|
||||
const BuiltinFunctionTable: Map<string, BuiltinFunction[]> = new Map();
|
||||
|
||||
export class BuiltinFunction {
|
||||
ident: string;
|
||||
readonly args: BuiltinType[];
|
||||
readonly scope: EShaderStage;
|
||||
|
||||
private _returnType: BuiltinType;
|
||||
private _realReturnType: NonGenericGalaceanType;
|
||||
|
||||
get realReturnType(): NonGenericGalaceanType {
|
||||
return this._realReturnType;
|
||||
}
|
||||
|
||||
private constructor(ident: string, returnType: BuiltinType, scope: EShaderStage, ...args: BuiltinType[]) {
|
||||
this.ident = ident;
|
||||
this._returnType = returnType;
|
||||
this.args = args;
|
||||
this.scope = scope;
|
||||
}
|
||||
|
||||
static getReturnType(fn: BuiltinFunction, genType?: NonGenericGalaceanType) {
|
||||
if (!isGenericType(fn._returnType)) return fn._returnType as NonGenericGalaceanType;
|
||||
return genType;
|
||||
}
|
||||
|
||||
static _create(ident: string, returnType: BuiltinType, ...args: BuiltinType[]) {
|
||||
const fn = new BuiltinFunction(ident, returnType, EShaderStage.ALL, ...args);
|
||||
const list = BuiltinFunctionTable.get(ident) ?? [];
|
||||
list.push(fn);
|
||||
BuiltinFunctionTable.set(ident, list);
|
||||
}
|
||||
|
||||
static _createWithScop(ident: string, returnType: BuiltinType, scope: EShaderStage, ...args: BuiltinType[]) {
|
||||
const fn = new BuiltinFunction(ident, returnType, scope, ...args);
|
||||
const list = BuiltinFunctionTable.get(ident) ?? [];
|
||||
list.push(fn);
|
||||
BuiltinFunctionTable.set(ident, list);
|
||||
}
|
||||
|
||||
// TODO: correct the type deduce, consider the following case:
|
||||
// It incorrectly inferred the type of the following expression as float, which should be vec3.
|
||||
// max(scatterAmt.xyz,0.0001)
|
||||
static getFn(ident: string, parameterTypes: NonGenericGalaceanType[]): BuiltinFunction | undefined {
|
||||
const list = BuiltinFunctionTable.get(ident);
|
||||
if (list) {
|
||||
for (let length = list.length, i = 0; i < length; i++) {
|
||||
const fn = list[i];
|
||||
const fnArgs = fn.args;
|
||||
const argLength = fnArgs.length;
|
||||
if (argLength !== parameterTypes.length) continue;
|
||||
// Try to match generic parameter type.
|
||||
let returnType = TypeAny;
|
||||
let found = true;
|
||||
for (let i = 0; i < argLength; i++) {
|
||||
const curFnArg = fnArgs[i];
|
||||
if (isGenericType(curFnArg)) {
|
||||
if (returnType === TypeAny) returnType = parameterTypes[i];
|
||||
} else {
|
||||
if (curFnArg !== parameterTypes[i] && parameterTypes[i] !== TypeAny) {
|
||||
found = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
fn._realReturnType = returnType;
|
||||
return fn;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static isExist(ident: string) {
|
||||
return !!BuiltinFunctionTable.get(ident);
|
||||
}
|
||||
}
|
||||
|
||||
BuiltinFunction._create("radians", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("degrees", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("sin", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("cos", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("tan", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("asin", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("acos", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("atan", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("atan", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("sinh", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("cosh", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("tanh", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("asinh", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("acosh", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("atanh", EGenType.GenType, EGenType.GenType);
|
||||
|
||||
BuiltinFunction._create("pow", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("exp", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("log", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("exp2", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("log2", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("sqrt", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("inversesqrt", EGenType.GenType, EGenType.GenType);
|
||||
|
||||
BuiltinFunction._create("abs", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("abs", EGenType.GenIntType, EGenType.GenIntType);
|
||||
BuiltinFunction._create("sign", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("sign", EGenType.GenIntType, EGenType.GenIntType);
|
||||
BuiltinFunction._create("floor", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("trunc", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("round", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("roundEven", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("ceil", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("fract", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("mod", EGenType.GenType, EGenType.GenType, Keyword.FLOAT);
|
||||
BuiltinFunction._create("mod", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("min", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("min", EGenType.GenType, EGenType.GenType, Keyword.FLOAT);
|
||||
BuiltinFunction._create("min", EGenType.GenIntType, EGenType.GenIntType, EGenType.GenIntType);
|
||||
BuiltinFunction._create("min", EGenType.GenIntType, EGenType.GenIntType, Keyword.INT);
|
||||
BuiltinFunction._create("min", EGenType.GenUintType, EGenType.GenUintType, EGenType.GenUintType);
|
||||
BuiltinFunction._create("min", EGenType.GenUintType, EGenType.GenUintType, Keyword.UINT);
|
||||
BuiltinFunction._create("max", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("max", EGenType.GenType, EGenType.GenType, Keyword.FLOAT);
|
||||
BuiltinFunction._create("max", EGenType.GenIntType, EGenType.GenIntType, EGenType.GenIntType);
|
||||
BuiltinFunction._create("max", EGenType.GenIntType, EGenType.GenIntType, Keyword.INT);
|
||||
BuiltinFunction._create("clamp", EGenType.GenType, EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("clamp", EGenType.GenType, EGenType.GenType, Keyword.FLOAT, Keyword.FLOAT);
|
||||
BuiltinFunction._create("clamp", EGenType.GenIntType, EGenType.GenIntType, EGenType.GenIntType, EGenType.GenIntType);
|
||||
BuiltinFunction._create("clamp", EGenType.GenIntType, EGenType.GenIntType, Keyword.INT, Keyword.INT);
|
||||
BuiltinFunction._create(
|
||||
"clamp",
|
||||
EGenType.GenUintType,
|
||||
EGenType.GenUintType,
|
||||
EGenType.GenUintType,
|
||||
EGenType.GenUintType
|
||||
);
|
||||
BuiltinFunction._create("clamp", EGenType.GenUintType, EGenType.GenUintType, Keyword.UINT, Keyword.UINT);
|
||||
BuiltinFunction._create("mix", EGenType.GenType, EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("mix", EGenType.GenType, EGenType.GenType, EGenType.GenType, Keyword.FLOAT);
|
||||
BuiltinFunction._create("mix", EGenType.GenType, EGenType.GenType, EGenType.GenType, EGenType.GenBoolType);
|
||||
BuiltinFunction._create("step", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("step", EGenType.GenType, Keyword.FLOAT, EGenType.GenType);
|
||||
BuiltinFunction._create("smoothstep", EGenType.GenType, EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("smoothstep", EGenType.GenType, Keyword.FLOAT, Keyword.FLOAT, EGenType.GenType);
|
||||
BuiltinFunction._create("isnan", EGenType.GenBoolType, EGenType.GenType);
|
||||
BuiltinFunction._create("isinf", EGenType.GenBoolType, EGenType.GenType);
|
||||
BuiltinFunction._create("floatBitsToInt", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("floatBitsToUint", EGenType.GenUintType, EGenType.GenType);
|
||||
BuiltinFunction._create("intBitsToFloat", EGenType.GenType, EGenType.GenIntType);
|
||||
BuiltinFunction._create("uintBitsToFloat", EGenType.GenType, EGenType.GenUintType);
|
||||
|
||||
BuiltinFunction._create("packSnorm2x16", Keyword.UINT, Keyword.VEC2);
|
||||
BuiltinFunction._create("unpackSnorm2x16", Keyword.VEC2, Keyword.UINT);
|
||||
BuiltinFunction._create("packUnorm2x16", Keyword.UINT, Keyword.VEC2);
|
||||
BuiltinFunction._create("unpackUnorm2x16", Keyword.VEC2, Keyword.UINT);
|
||||
BuiltinFunction._create("packHalf2x16", Keyword.UINT, Keyword.VEC2);
|
||||
BuiltinFunction._create("unpackHalf2x16", Keyword.VEC2, Keyword.UINT);
|
||||
|
||||
BuiltinFunction._create("length", Keyword.FLOAT, EGenType.GenType);
|
||||
BuiltinFunction._create("distance", Keyword.FLOAT, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("dot", Keyword.FLOAT, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("cross", Keyword.VEC3, Keyword.VEC3, Keyword.VEC3);
|
||||
BuiltinFunction._create("normalize", EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("faceforward", EGenType.GenType, EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("reflect", EGenType.GenType, EGenType.GenType, EGenType.GenType);
|
||||
BuiltinFunction._create("refract", EGenType.GenType, EGenType.GenType, EGenType.GenType, Keyword.FLOAT);
|
||||
BuiltinFunction._create("matrixCompMult", EGenType.Mat, EGenType.Mat, EGenType.Mat);
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT2, Keyword.VEC2, Keyword.VEC2);
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT3, Keyword.VEC3, Keyword.VEC3);
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT4, Keyword.VEC4, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT2X3, Keyword.VEC3, Keyword.VEC2);
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT3X2, Keyword.VEC2, Keyword.VEC3);
|
||||
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT2X4, Keyword.VEC4, Keyword.VEC2);
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT4X2, Keyword.VEC2, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT3X4, Keyword.VEC4, Keyword.VEC3);
|
||||
BuiltinFunction._create("outerProduct", Keyword.MAT4X3, Keyword.VEC3, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("transpose", Keyword.MAT2, Keyword.MAT2);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT3, Keyword.MAT3);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT4, Keyword.MAT4);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT2X3, Keyword.MAT3X2);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT3X2, Keyword.MAT2X3);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT4X2, Keyword.MAT2X4);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT2X4, Keyword.MAT4X2);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT3X4, Keyword.MAT4X3);
|
||||
BuiltinFunction._create("transpose", Keyword.MAT4X3, Keyword.MAT3X4);
|
||||
|
||||
BuiltinFunction._create("determinant", Keyword.FLOAT, Keyword.MAT2);
|
||||
BuiltinFunction._create("determinant", Keyword.FLOAT, Keyword.MAT3);
|
||||
BuiltinFunction._create("determinant", Keyword.FLOAT, Keyword.MAT4);
|
||||
|
||||
BuiltinFunction._create("inverse", Keyword.MAT2, Keyword.MAT2);
|
||||
BuiltinFunction._create("inverse", Keyword.MAT3, Keyword.MAT3);
|
||||
BuiltinFunction._create("inverse", Keyword.MAT4, Keyword.MAT4);
|
||||
|
||||
BuiltinFunction._create("lessThan", EGenType.BoolVec, EGenType.Vec, EGenType.Vec);
|
||||
BuiltinFunction._create("lessThan", EGenType.BoolVec, EGenType.IntVec, EGenType.IntVec);
|
||||
BuiltinFunction._create("lessThan", EGenType.BoolVec, EGenType.UintVec, EGenType.UintVec);
|
||||
|
||||
BuiltinFunction._create("lessThanEqual", EGenType.BoolVec, EGenType.Vec, EGenType.Vec);
|
||||
BuiltinFunction._create("lessThanEqual", EGenType.BoolVec, EGenType.IntVec, EGenType.IntVec);
|
||||
BuiltinFunction._create("lessThanEqual", EGenType.BoolVec, EGenType.UintVec, EGenType.UintVec);
|
||||
|
||||
BuiltinFunction._create("greaterThan", EGenType.BoolVec, EGenType.Vec, EGenType.Vec);
|
||||
BuiltinFunction._create("greaterThan", EGenType.BoolVec, EGenType.IntVec, EGenType.IntVec);
|
||||
BuiltinFunction._create("greaterThan", EGenType.BoolVec, EGenType.UintVec, EGenType.UintVec);
|
||||
|
||||
BuiltinFunction._create("greaterThanEqual", EGenType.BoolVec, EGenType.Vec, EGenType.Vec);
|
||||
BuiltinFunction._create("greaterThanEqual", EGenType.BoolVec, EGenType.IntVec, EGenType.IntVec);
|
||||
BuiltinFunction._create("greaterThanEqual", EGenType.BoolVec, EGenType.UintVec, EGenType.UintVec);
|
||||
|
||||
BuiltinFunction._create("equal", EGenType.BoolVec, EGenType.Vec, EGenType.Vec);
|
||||
BuiltinFunction._create("equal", EGenType.BoolVec, EGenType.IntVec, EGenType.IntVec);
|
||||
BuiltinFunction._create("equal", EGenType.BoolVec, EGenType.UintVec, EGenType.UintVec);
|
||||
BuiltinFunction._create("equal", EGenType.BoolVec, EGenType.BoolVec, EGenType.BoolVec);
|
||||
|
||||
BuiltinFunction._create("notEqual", EGenType.BoolVec, EGenType.Vec, EGenType.Vec);
|
||||
BuiltinFunction._create("notEqual", EGenType.BoolVec, EGenType.IntVec, EGenType.IntVec);
|
||||
BuiltinFunction._create("notEqual", EGenType.BoolVec, EGenType.UintVec, EGenType.UintVec);
|
||||
BuiltinFunction._create("notEqual", EGenType.BoolVec, EGenType.BoolVec, EGenType.BoolVec);
|
||||
|
||||
BuiltinFunction._create("any", Keyword.BOOL, EGenType.BoolVec);
|
||||
BuiltinFunction._create("all", Keyword.BOOL, EGenType.BoolVec);
|
||||
BuiltinFunction._create("not", EGenType.BoolVec, EGenType.BoolVec);
|
||||
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC2, EGenType.GSampler2D, Keyword.INT);
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC3, EGenType.GSampler3D, Keyword.INT);
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC2, EGenType.GSamplerCube, Keyword.INT);
|
||||
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC2, Keyword.SAMPLER2D_SHADOW, Keyword.INT);
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC2, Keyword.SAMPLER_CUBE_SHADOW, Keyword.INT);
|
||||
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC3, EGenType.GSampler2DArray, Keyword.INT);
|
||||
BuiltinFunction._create("textureSize", Keyword.IVEC3, Keyword.SAMPLER2D_ARRAY_SHADOW, Keyword.INT);
|
||||
|
||||
BuiltinFunction._create("texture2D", Keyword.VEC4, Keyword.SAMPLER2D, Keyword.VEC2);
|
||||
BuiltinFunction._create("texture2D", Keyword.VEC4, Keyword.SAMPLER2D, Keyword.VEC2, Keyword.FLOAT);
|
||||
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC2, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC2);
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC3);
|
||||
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, EGenType.GSamplerCube, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, EGenType.GSamplerCube, Keyword.VEC3);
|
||||
|
||||
BuiltinFunction._create("texture", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC3);
|
||||
|
||||
BuiltinFunction._create("texture", Keyword.FLOAT, Keyword.SAMPLER_CUBE_SHADOW, Keyword.VEC4, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture", Keyword.FLOAT, Keyword.SAMPLER_CUBE_SHADOW, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, Keyword.SAMPLER2D_ARRAY, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture", EGenType.GVec4, Keyword.SAMPLER2D_ARRAY, Keyword.VEC3);
|
||||
|
||||
BuiltinFunction._create("texture", Keyword.FLOAT, Keyword.SAMPLER2D_ARRAY_SHADOW, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("textureProj", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProj", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC3);
|
||||
BuiltinFunction._create("textureProj", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC4, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProj", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC4);
|
||||
BuiltinFunction._create("textureProj", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC4, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProj", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("textureProj", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC4, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProj", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC4);
|
||||
|
||||
BuiltinFunction._create("textureLod", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC2, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureLod", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureLod", EGenType.GVec4, EGenType.GSamplerCube, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureLod", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureLod", EGenType.GVec4, EGenType.GSampler2DArray, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture2DLodEXT", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC2, Keyword.FLOAT);
|
||||
BuiltinFunction._create("texture2DLodEXT", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC3, Keyword.FLOAT);
|
||||
|
||||
BuiltinFunction._create("textureCube", Keyword.SAMPLER_CUBE, Keyword.VEC3);
|
||||
BuiltinFunction._create("textureCube", Keyword.SAMPLER_CUBE, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureCube", EGenType.GVec4, EGenType.GSamplerCube, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureCubeLod", Keyword.SAMPLER_CUBE, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureCubeLodEXT", EGenType.GVec4, EGenType.GSamplerCube, Keyword.VEC3, Keyword.FLOAT);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureOffset", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC2, Keyword.IVEC2);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC3,
|
||||
Keyword.IVEC3,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureOffset", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC3, Keyword.IVEC3);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC3,
|
||||
Keyword.IVEC2,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureOffset", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC3, Keyword.IVEC2);
|
||||
BuiltinFunction._create(
|
||||
"textureOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2DArray,
|
||||
Keyword.VEC3,
|
||||
Keyword.IVEC2,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureOffset", EGenType.GVec4, EGenType.GSampler2DArray, Keyword.VEC3, Keyword.IVEC2);
|
||||
|
||||
BuiltinFunction._create("texelFetch", EGenType.GVec4, EGenType.GSampler2D, Keyword.IVEC2, Keyword.INT);
|
||||
BuiltinFunction._create("texelFetch", EGenType.GVec4, EGenType.GSampler3D, Keyword.IVEC3, Keyword.INT);
|
||||
BuiltinFunction._create("texelFetch", EGenType.GVec4, EGenType.GSampler2DArray, Keyword.IVEC3, Keyword.INT);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"texelFetchOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.IVEC2,
|
||||
Keyword.INT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"texelFetchOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.IVEC3,
|
||||
Keyword.INT,
|
||||
Keyword.IVEC3
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"texelFetchOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2DArray,
|
||||
Keyword.IVEC3,
|
||||
Keyword.INT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC3,
|
||||
Keyword.IVEC2,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureProjOffset", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC3, Keyword.IVEC2);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC4,
|
||||
Keyword.IVEC2,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureProjOffset", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC4, Keyword.IVEC2);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC4,
|
||||
Keyword.IVEC3,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureProjOffset", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC4, Keyword.IVEC3);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC4,
|
||||
Keyword.IVEC2,
|
||||
Keyword.FLOAT
|
||||
);
|
||||
BuiltinFunction._create("textureProjOffset", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC4, Keyword.IVEC2);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureLodOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC2,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureLodOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC3,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC3
|
||||
);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureLodOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC3,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureLodOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2DArray,
|
||||
Keyword.VEC3,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
|
||||
BuiltinFunction._create("textureProjLod", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC3, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProjLod", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC4, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProjLod", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC4, Keyword.FLOAT);
|
||||
BuiltinFunction._create("textureProjLod", Keyword.FLOAT, Keyword.SAMPLER2D_SHADOW, Keyword.VEC4, Keyword.FLOAT);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjLodOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC3,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjLodOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC4,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjLodOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC4,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC3
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjLodOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC4,
|
||||
Keyword.FLOAT,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
|
||||
BuiltinFunction._create("textureGrad", EGenType.GVec4, EGenType.GSampler2D, Keyword.VEC2, Keyword.VEC2, Keyword.VEC2);
|
||||
BuiltinFunction._create("textureGrad", EGenType.GVec4, EGenType.GSampler3D, Keyword.VEC3, Keyword.VEC3, Keyword.VEC3);
|
||||
BuiltinFunction._create("textureGrad", EGenType.GVec4, EGenType.GSamplerCube, Keyword.VEC3, Keyword.VEC3, Keyword.VEC3);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureGrad",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureGrad",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER_CUBE_SHADOW,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC3
|
||||
);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureGrad",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2DArray,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureGrad",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_ARRAY_SHADOW,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2
|
||||
);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureGradOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureGradOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC3,
|
||||
Keyword.IVEC3
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureGradOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureGradOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2DArray,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureGradOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_ARRAY_SHADOW,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjGrad",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjGrad",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjGrad",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC3
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjGrad",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2
|
||||
);
|
||||
|
||||
BuiltinFunction._create(
|
||||
"textureProjGradOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjGradOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler2D,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjGradOffset",
|
||||
EGenType.GVec4,
|
||||
EGenType.GSampler3D,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC3,
|
||||
Keyword.VEC3,
|
||||
Keyword.IVEC3
|
||||
);
|
||||
BuiltinFunction._create(
|
||||
"textureProjGradOffset",
|
||||
Keyword.FLOAT,
|
||||
Keyword.SAMPLER2D_SHADOW,
|
||||
Keyword.VEC4,
|
||||
Keyword.VEC2,
|
||||
Keyword.VEC2,
|
||||
Keyword.IVEC2
|
||||
);
|
||||
BuiltinFunction._createWithScop("dFdx", EGenType.GenType, EShaderStage.FRAGMENT, EGenType.GenType);
|
||||
BuiltinFunction._createWithScop("dFdy", EGenType.GenType, EShaderStage.FRAGMENT, EGenType.GenType);
|
||||
BuiltinFunction._createWithScop("fwidth", EGenType.GenType, EShaderStage.FRAGMENT, EGenType.GenType);
|
||||
@@ -1,2 +0,0 @@
|
||||
export * from "./functions";
|
||||
export * from "./variables";
|
||||
@@ -1,50 +0,0 @@
|
||||
import { GalaceanDataType } from "../../common";
|
||||
import { Keyword } from "../../common/enums/Keyword";
|
||||
import { EShaderStage } from "../../common/enums/ShaderStage";
|
||||
|
||||
export const BuiltinVariableTable: Map<string, BuiltinVariable> = new Map();
|
||||
|
||||
export class BuiltinVariable {
|
||||
type: GalaceanDataType;
|
||||
lexeme: string;
|
||||
scope: EShaderStage;
|
||||
|
||||
private constructor(type: GalaceanDataType, lexeme: string, scope: EShaderStage) {
|
||||
this.type = type;
|
||||
this.lexeme = lexeme;
|
||||
this.scope = scope;
|
||||
}
|
||||
|
||||
static createVariable(lexeme: string, type: GalaceanDataType, scope = EShaderStage.ALL) {
|
||||
const item = new BuiltinVariable(type, lexeme, scope);
|
||||
BuiltinVariableTable.set(lexeme, item);
|
||||
}
|
||||
|
||||
static getVar(ident: string): BuiltinVariable {
|
||||
return BuiltinVariableTable.get(ident);
|
||||
}
|
||||
}
|
||||
|
||||
BuiltinVariable.createVariable("gl_VertexID", Keyword.INT, EShaderStage.VERTEX);
|
||||
BuiltinVariable.createVariable("gl_InstanceID", Keyword.INT, EShaderStage.VERTEX);
|
||||
BuiltinVariable.createVariable("gl_Position", Keyword.VEC4, EShaderStage.VERTEX);
|
||||
BuiltinVariable.createVariable("gl_PointSize", Keyword.FLOAT, EShaderStage.VERTEX);
|
||||
|
||||
BuiltinVariable.createVariable("gl_FragCoord", Keyword.VEC4, EShaderStage.FRAGMENT);
|
||||
BuiltinVariable.createVariable("gl_FrontFacing", Keyword.BOOL, EShaderStage.FRAGMENT);
|
||||
BuiltinVariable.createVariable("gl_FragDepth", Keyword.FLOAT, EShaderStage.FRAGMENT);
|
||||
BuiltinVariable.createVariable("gl_PointCoord", Keyword.VEC2, EShaderStage.FRAGMENT);
|
||||
BuiltinVariable.createVariable("gl_FragColor", Keyword.VEC4, EShaderStage.FRAGMENT);
|
||||
BuiltinVariable.createVariable("gl_FragData", Keyword.VEC4_ARRAY, EShaderStage.FRAGMENT);
|
||||
|
||||
BuiltinVariable.createVariable("gl_MaxVertexAttribs", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxVertexUniformVectors", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxVertexOutputVectors", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxFragmentInputVectors", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxVertexTextureImageUnits", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxCombinedTextureImageUnits", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxTextureImageUnits", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxFragmentUniformVectors", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxDrawBuffers", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MinProgramTexelOffset", Keyword.INT);
|
||||
BuiltinVariable.createVariable("gl_MaxProgramTexelOffset", Keyword.INT);
|
||||
@@ -1,2 +0,0 @@
|
||||
export { ShaderTargetParser } from "./ShaderTargetParser";
|
||||
export { Grammar } from "./Grammar";
|
||||
@@ -1,15 +0,0 @@
|
||||
import { ASTNode } from "../AST";
|
||||
import { SymbolDataType } from "./SymbolDataType";
|
||||
import { ESymbolType, SymbolInfo } from "./SymbolInfo";
|
||||
|
||||
export class FnSymbol extends SymbolInfo {
|
||||
declare astNode: ASTNode.FunctionDefinition;
|
||||
|
||||
constructor(lexeme: string, astNode: ASTNode.FunctionDefinition) {
|
||||
const type = new SymbolDataType(
|
||||
astNode.protoType.returnType.type,
|
||||
astNode.protoType.returnType.typeSpecifier.lexeme
|
||||
);
|
||||
super(lexeme, ESymbolType.FN, astNode, type);
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { ASTNode } from "../AST";
|
||||
import { ESymbolType, SymbolInfo } from "./SymbolInfo";
|
||||
|
||||
export class StructSymbol extends SymbolInfo {
|
||||
declare astNode: ASTNode.StructSpecifier;
|
||||
|
||||
constructor(lexeme: string, astNode: ASTNode.StructSpecifier) {
|
||||
super(lexeme, ESymbolType.STRUCT, astNode);
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { GalaceanDataType } from "../../common";
|
||||
import { ASTNode } from "../AST";
|
||||
|
||||
export class SymbolDataType {
|
||||
constructor(
|
||||
public type: GalaceanDataType,
|
||||
public typeLexeme: string,
|
||||
public arraySpecifier?: ASTNode.ArraySpecifier
|
||||
) {}
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
import { IBaseSymbol } from "../../common/IBaseSymbol";
|
||||
import { GalaceanDataType, TypeAny } from "../../common/types";
|
||||
import { ASTNode } from "../AST";
|
||||
import { SymbolDataType } from "./SymbolDataType";
|
||||
|
||||
export enum ESymbolType {
|
||||
VAR,
|
||||
FN,
|
||||
STRUCT,
|
||||
Any
|
||||
}
|
||||
|
||||
export type SymbolAstNode =
|
||||
| ASTNode.Initializer
|
||||
| ASTNode.StructSpecifier
|
||||
| ASTNode.FunctionDefinition
|
||||
| ASTNode.ParameterDeclarator
|
||||
| ASTNode.InitDeclaratorList
|
||||
| ASTNode.VariableDeclaration;
|
||||
|
||||
export class SymbolInfo implements IBaseSymbol {
|
||||
constructor(
|
||||
public ident: string,
|
||||
public type: ESymbolType,
|
||||
public astNode?: SymbolAstNode,
|
||||
public dataType?: SymbolDataType,
|
||||
public paramSignature?: GalaceanDataType[],
|
||||
public isInMacroBranch = false
|
||||
) {}
|
||||
|
||||
set(
|
||||
ident: string,
|
||||
symbolType: ESymbolType,
|
||||
astNode?: SymbolAstNode,
|
||||
dataType?: SymbolDataType,
|
||||
paramSignature?: GalaceanDataType[]
|
||||
) {
|
||||
this.ident = ident;
|
||||
this.type = symbolType;
|
||||
this.astNode = astNode;
|
||||
this.dataType = dataType;
|
||||
this.paramSignature = paramSignature;
|
||||
}
|
||||
|
||||
equal(symbol: SymbolInfo): boolean {
|
||||
if (symbol.type !== ESymbolType.Any && this.type !== symbol.type) return false;
|
||||
if (this.type === ESymbolType.FN) {
|
||||
if (!symbol.astNode && !symbol.paramSignature) return true;
|
||||
|
||||
const params = (<ASTNode.FunctionDefinition>this.astNode).protoType.paramSig;
|
||||
const comparedParams = symbol.paramSignature ?? (<ASTNode.FunctionDefinition>symbol.astNode).protoType.paramSig;
|
||||
const length = params?.length;
|
||||
if (length !== comparedParams?.length) return false;
|
||||
for (let i = 0; i < length; i++) {
|
||||
const t1 = params[i],
|
||||
t2 = comparedParams[i];
|
||||
if (t1 === TypeAny || t2 === TypeAny) continue;
|
||||
if (t1 !== t2) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
import { ASTNode } from "../AST";
|
||||
import { SymbolDataType } from "./SymbolDataType";
|
||||
import { ESymbolType, SymbolInfo } from "./SymbolInfo";
|
||||
|
||||
export class VarSymbol extends SymbolInfo {
|
||||
declare astNode:
|
||||
| ASTNode.Initializer
|
||||
| ASTNode.ParameterDeclarator
|
||||
| ASTNode.InitDeclaratorList
|
||||
| ASTNode.VariableDeclaration;
|
||||
|
||||
readonly isGlobalVariable: boolean;
|
||||
|
||||
constructor(
|
||||
ident: string,
|
||||
dataType: SymbolDataType,
|
||||
isGlobalVariable: boolean,
|
||||
initAst:
|
||||
| ASTNode.Initializer
|
||||
| ASTNode.ParameterDeclarator
|
||||
| ASTNode.InitDeclaratorList
|
||||
| ASTNode.VariableDeclaration
|
||||
) {
|
||||
super(ident, ESymbolType.VAR, initAst, dataType);
|
||||
this.isGlobalVariable = isGlobalVariable;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
export * from "././../../common/SymbolTable";
|
||||
export * from "./FnSymbol";
|
||||
export * from "./StructSymbol";
|
||||
export * from "./SymbolDataType";
|
||||
export * from "./SymbolInfo";
|
||||
export * from "./VarSymbol";
|
||||
@@ -1,31 +0,0 @@
|
||||
import { GalaceanDataType } from "../common";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { ASTNode, TreeNode } from "./AST";
|
||||
import { NoneTerminal } from "./GrammarSymbol";
|
||||
|
||||
export type TraceStackItem = NoneTerminal | BaseToken;
|
||||
|
||||
export class SymbolType {
|
||||
constructor(
|
||||
public type: GalaceanDataType,
|
||||
public typeLexeme: string,
|
||||
public arraySpecifier?: ASTNode.ArraySpecifier
|
||||
) {}
|
||||
}
|
||||
|
||||
export class StructProp implements IParamInfo {
|
||||
constructor(
|
||||
public typeInfo: SymbolType,
|
||||
public ident: BaseToken,
|
||||
public mrtIndex?: number,
|
||||
public isInMacroBranch = false
|
||||
) {}
|
||||
}
|
||||
|
||||
export type NodeChild = TreeNode | BaseToken;
|
||||
|
||||
export type IParamInfo = {
|
||||
ident?: BaseToken;
|
||||
typeInfo?: SymbolType;
|
||||
astNode?: ASTNode.ParameterDeclaration | ASTNode.MacroParamBlock;
|
||||
};
|
||||
@@ -1,48 +0,0 @@
|
||||
import { IRenderStates, IShaderPassSource, IShaderSource, ISubShaderSource } from "@galacean/engine-design";
|
||||
|
||||
export class ShaderSourceFactory {
|
||||
static createRenderStates(): IRenderStates {
|
||||
return {
|
||||
constantMap: {},
|
||||
variableMap: {}
|
||||
};
|
||||
}
|
||||
|
||||
static createShaderSource(name: string): IShaderSource {
|
||||
return {
|
||||
name,
|
||||
subShaders: [],
|
||||
pendingContents: [],
|
||||
renderStates: this.createRenderStates()
|
||||
};
|
||||
}
|
||||
|
||||
static createSubShaderSource(name: string): ISubShaderSource {
|
||||
return {
|
||||
name,
|
||||
passes: [],
|
||||
pendingContents: [],
|
||||
tags: {},
|
||||
renderStates: this.createRenderStates()
|
||||
};
|
||||
}
|
||||
|
||||
static createShaderPassSource(name: string): IShaderPassSource {
|
||||
return {
|
||||
name,
|
||||
pendingContents: [],
|
||||
tags: {},
|
||||
renderStates: this.createRenderStates()
|
||||
} as IShaderPassSource;
|
||||
}
|
||||
|
||||
static createUsePass(name: string): IShaderPassSource {
|
||||
return {
|
||||
name,
|
||||
pendingContents: [],
|
||||
isUsePass: true,
|
||||
tags: {},
|
||||
renderStates: this.createRenderStates()
|
||||
} as IShaderPassSource;
|
||||
}
|
||||
}
|
||||
@@ -1,541 +0,0 @@
|
||||
import {
|
||||
BlendFactor,
|
||||
BlendOperation,
|
||||
Color,
|
||||
CompareFunction,
|
||||
CullMode,
|
||||
Logger,
|
||||
RenderQueueType,
|
||||
RenderStateElementKey,
|
||||
StencilOperation
|
||||
} from "@galacean/engine";
|
||||
import { IRenderStates, IShaderPassSource, IShaderSource, IStatement, ISubShaderSource } from "@galacean/engine-design";
|
||||
import { ETokenType, ShaderPosition, ShaderRange } from "../common";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { SymbolTableStack } from "../common/SymbolTableStack";
|
||||
import { GSErrorName } from "../GSError";
|
||||
// #if _VERBOSE
|
||||
import { GSError } from "../GSError";
|
||||
// #endif
|
||||
import { BaseLexer } from "../common/BaseLexer";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
import { SymbolTable } from "../common/SymbolTable";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
import { ShaderSourceFactory } from "./ShaderSourceFactory";
|
||||
import { ShaderSourceSymbol } from "./ShaderSourceSymbol";
|
||||
import SourceLexer from "./SourceLexer";
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export class ShaderSourceParser {
|
||||
static readonly errors = new Array<GSError>();
|
||||
|
||||
private static _renderStateConstMap = <Record<string, Record<string, number | string | boolean>>>{
|
||||
RenderQueueType,
|
||||
CompareFunction,
|
||||
StencilOperation,
|
||||
BlendOperation,
|
||||
BlendFactor,
|
||||
CullMode
|
||||
};
|
||||
private static _symbolTableStack = new SymbolTableStack<ShaderSourceSymbol, SymbolTable<ShaderSourceSymbol>>();
|
||||
private static _lexer = new SourceLexer();
|
||||
private static _lookupSymbol = new ShaderSourceSymbol("", null);
|
||||
|
||||
static parse(sourceCode: string): IShaderSource {
|
||||
const startTime = performance.now();
|
||||
|
||||
// Clear previous data
|
||||
this.errors.length = 0;
|
||||
this._symbolTableStack.clear();
|
||||
this._pushScope();
|
||||
|
||||
const lexer = this._lexer;
|
||||
lexer.setSource(sourceCode);
|
||||
|
||||
const shaderSource = this._parseShader(lexer);
|
||||
|
||||
const shaderPendingContents = shaderSource.pendingContents;
|
||||
const shaderRenderStates = shaderSource.renderStates;
|
||||
for (let i = 0, n = shaderSource.subShaders.length; i < n; i++) {
|
||||
const subShader = shaderSource.subShaders[i];
|
||||
const curSubShaderGlobalStatements = shaderPendingContents.concat(subShader.pendingContents);
|
||||
const globalSubShaderStates = {
|
||||
constantMap: { ...shaderRenderStates.constantMap },
|
||||
variableMap: { ...shaderRenderStates.variableMap }
|
||||
};
|
||||
this._mergeRenderStates(globalSubShaderStates, subShader.renderStates);
|
||||
|
||||
for (let j = 0, m = subShader.passes.length; j < m; j++) {
|
||||
const pass = subShader.passes[j];
|
||||
const globalPassRenderStates = {
|
||||
constantMap: { ...globalSubShaderStates.constantMap },
|
||||
variableMap: { ...globalSubShaderStates.variableMap }
|
||||
};
|
||||
this._mergeRenderStates(globalPassRenderStates, pass.renderStates);
|
||||
pass.renderStates = globalPassRenderStates;
|
||||
|
||||
if (pass.isUsePass) continue;
|
||||
const passGlobalStatements = curSubShaderGlobalStatements.concat(pass.pendingContents);
|
||||
pass.contents = passGlobalStatements.map((item) => item.content).join("\n");
|
||||
}
|
||||
}
|
||||
|
||||
Logger.info(`[Task - Source compilation] cost time ${performance.now() - startTime}ms`);
|
||||
return shaderSource;
|
||||
}
|
||||
|
||||
private static _parseShader(lexer: SourceLexer): IShaderSource {
|
||||
// Parse shader header
|
||||
lexer.scanLexeme("Shader");
|
||||
const name = lexer.scanPairedChar('"', '"', false, false);
|
||||
const shaderSource = ShaderSourceFactory.createShaderSource(name);
|
||||
lexer.scanLexeme("{");
|
||||
|
||||
let braceLevel = 1;
|
||||
lexer.skipCommentsAndSpace();
|
||||
let start = lexer.getShaderPosition(0);
|
||||
|
||||
const { pendingContents } = shaderSource;
|
||||
while (true) {
|
||||
const token = lexer.scanToken();
|
||||
switch (token.type) {
|
||||
case Keyword.GSSubShader:
|
||||
this._addPendingContents(start, token.lexeme.length, pendingContents);
|
||||
const subShader = this._parseSubShader();
|
||||
shaderSource.subShaders.push(subShader);
|
||||
start = lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.GSEditorProperties:
|
||||
case Keyword.GSEditorMacros:
|
||||
case Keyword.GSEditor:
|
||||
this._addPendingContents(start, token.lexeme.length, pendingContents);
|
||||
lexer.scanPairedChar("{", "}", true, false);
|
||||
start = lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.LeftBrace:
|
||||
++braceLevel;
|
||||
break;
|
||||
case Keyword.RightBrace:
|
||||
if (--braceLevel === 0) {
|
||||
this._addPendingContents(start, token.lexeme.length, pendingContents);
|
||||
this._popScope();
|
||||
return shaderSource;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
start = this._parseRenderState(token, start, pendingContents, shaderSource.renderStates);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseRenderStateDeclarationOrAssignment(outRenderStates: IRenderStates, stateToken: BaseToken): void {
|
||||
const lexer = this._lexer;
|
||||
const token = lexer.scanToken();
|
||||
if (token.type === ETokenType.ID) {
|
||||
// Declaration
|
||||
lexer.scanLexeme("{");
|
||||
const renderState = this._parseRenderStateProperties(stateToken.lexeme);
|
||||
const symbol = new ShaderSourceSymbol(token.lexeme, stateToken.type, renderState);
|
||||
this._symbolTableStack.insert(symbol);
|
||||
} else if (token.lexeme === "=") {
|
||||
// Check if it's direct assignment syntax sugar or variable assignment
|
||||
const nextToken = lexer.scanToken();
|
||||
|
||||
let renderState: IRenderStates;
|
||||
if (nextToken.lexeme === "{") {
|
||||
// Syntax: DepthState = { ... }
|
||||
renderState = this._parseRenderStateProperties(stateToken.lexeme);
|
||||
} else {
|
||||
// Syntax: DepthState = customDepthState;
|
||||
lexer.scanLexeme(";");
|
||||
const lookupSymbol = this._lookupSymbol;
|
||||
lookupSymbol.set(nextToken.lexeme, stateToken.type);
|
||||
const sm = this._symbolTableStack.lookup(lookupSymbol);
|
||||
if (!sm?.value) {
|
||||
this._createCompileError(`Invalid "${stateToken.lexeme}" variable: ${nextToken.lexeme}`, nextToken.location);
|
||||
// #if _VERBOSE
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
renderState = sm.value as IRenderStates;
|
||||
}
|
||||
this._mergeRenderStates(outRenderStates, renderState);
|
||||
}
|
||||
}
|
||||
|
||||
private static _mergeRenderStates(outTarget: IRenderStates, source: IRenderStates): void {
|
||||
// For each key in the source, remove it from the opposite map in target to ensure proper override
|
||||
const { constantMap: targetConstantMap, variableMap: targetVariableMap } = outTarget;
|
||||
const { constantMap: sourceConstantMap, variableMap: sourceVariableMap } = source;
|
||||
|
||||
for (const key in sourceConstantMap) {
|
||||
delete targetVariableMap[key];
|
||||
targetConstantMap[key] = sourceConstantMap[key];
|
||||
}
|
||||
|
||||
for (const key in sourceVariableMap) {
|
||||
delete targetConstantMap[key];
|
||||
targetVariableMap[key] = sourceVariableMap[key];
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseVariableDeclaration(): void {
|
||||
const lexer = this._lexer;
|
||||
const token = lexer.scanToken();
|
||||
lexer.scanLexeme(";");
|
||||
const symbol = new ShaderSourceSymbol(token.lexeme, token.type);
|
||||
this._symbolTableStack.insert(symbol);
|
||||
}
|
||||
|
||||
private static _pushScope(): void {
|
||||
const symbolTable = new SymbolTable<ShaderSourceSymbol>();
|
||||
this._symbolTableStack.pushScope(symbolTable);
|
||||
}
|
||||
|
||||
private static _popScope(): void {
|
||||
this._symbolTableStack.popScope();
|
||||
}
|
||||
|
||||
private static _parseRenderStateProperties(state: string): IRenderStates {
|
||||
const lexer = this._lexer;
|
||||
const renderStates = ShaderSourceFactory.createRenderStates();
|
||||
while (lexer.getCurChar() !== "}") {
|
||||
this._parseRenderStateProperty(state, renderStates);
|
||||
lexer.skipCommentsAndSpace();
|
||||
}
|
||||
lexer.advance(1);
|
||||
return renderStates;
|
||||
}
|
||||
|
||||
private static _createCompileError(message: string, location?: ShaderPosition | ShaderRange): void {
|
||||
const error = this._lexer.createCompileError(message, location);
|
||||
// #if _VERBOSE
|
||||
this.errors.push(<GSError>error);
|
||||
// #endif
|
||||
}
|
||||
|
||||
private static _parseRenderStateProperty(stateLexeme: string, out: IRenderStates): void {
|
||||
const lexer = this._lexer;
|
||||
const propertyToken = lexer.scanToken();
|
||||
const propertyLexeme = propertyToken.lexeme;
|
||||
let stateElementKey = propertyLexeme;
|
||||
if (stateLexeme === "BlendState" && propertyLexeme !== "BlendColor" && propertyLexeme !== "AlphaToCoverage") {
|
||||
let keyIndex = 0;
|
||||
const scannedLexeme = lexer.scanTwoExpectedLexemes("[", "=");
|
||||
if (scannedLexeme === "[") {
|
||||
keyIndex = lexer.scanNumber();
|
||||
lexer.scanLexeme("]");
|
||||
lexer.scanLexeme("=");
|
||||
} else if (scannedLexeme !== "=") {
|
||||
this._createCompileError(`Invalid syntax, expect '[' or '=', but got unexpected token`);
|
||||
// #if _VERBOSE
|
||||
lexer.scanToCharacter(";");
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
stateElementKey += keyIndex;
|
||||
} else {
|
||||
lexer.scanLexeme("=");
|
||||
}
|
||||
|
||||
const renderStateElementKey = RenderStateElementKey[stateLexeme + stateElementKey];
|
||||
if (renderStateElementKey === undefined) {
|
||||
this._createCompileError(`Invalid render state property ${propertyLexeme}`);
|
||||
// #if _VERBOSE
|
||||
lexer.scanToCharacter(";");
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
|
||||
lexer.skipCommentsAndSpace();
|
||||
let propertyValue: number | string | boolean | Color;
|
||||
|
||||
const curCharCode = lexer.getCurCharCode();
|
||||
if (BaseLexer.isDigit(curCharCode) || curCharCode === 46) {
|
||||
// Digit or '.'
|
||||
propertyValue = lexer.scanNumber();
|
||||
} else {
|
||||
const valueToken = lexer.scanToken();
|
||||
const valueTokenType = valueToken.type;
|
||||
|
||||
if (valueTokenType === Keyword.True) {
|
||||
propertyValue = true;
|
||||
} else if (valueTokenType === Keyword.False) {
|
||||
propertyValue = false;
|
||||
} else if (valueTokenType === Keyword.GSColor) {
|
||||
propertyValue = lexer.scanColor();
|
||||
} else if (lexer.getCurChar() === ".") {
|
||||
lexer.advance(1);
|
||||
const constValueToken = lexer.scanToken();
|
||||
propertyValue = this._renderStateConstMap[valueToken.lexeme]?.[constValueToken.lexeme];
|
||||
if (propertyValue == undefined) {
|
||||
this._createCompileError(
|
||||
`Invalid engine constant: ${valueToken.lexeme}.${constValueToken.lexeme}`,
|
||||
constValueToken.location
|
||||
);
|
||||
// #if _VERBOSE
|
||||
lexer.scanToCharacter(";");
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
} else {
|
||||
propertyValue = valueToken.lexeme;
|
||||
const lookupSymbol = this._lookupSymbol;
|
||||
lookupSymbol.set(valueToken.lexeme, ETokenType.ID);
|
||||
if (!this._symbolTableStack.lookup(lookupSymbol)) {
|
||||
this._createCompileError(`Invalid ${stateLexeme} variable: ${valueToken.lexeme}`, valueToken.location);
|
||||
// #if _VERBOSE
|
||||
lexer.scanToCharacter(";");
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
}
|
||||
}
|
||||
lexer.scanLexeme(";");
|
||||
if (typeof propertyValue === "string") {
|
||||
out.variableMap[renderStateElementKey] = propertyValue;
|
||||
} else {
|
||||
out.constantMap[renderStateElementKey] = propertyValue;
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseRenderQueueDeclarationOrAssignment(renderStates: IRenderStates): void {
|
||||
const lexer = this._lexer;
|
||||
const token = lexer.scanToken();
|
||||
if (token.type === ETokenType.ID) {
|
||||
// Declaration
|
||||
lexer.scanLexeme(";");
|
||||
const symbol = new ShaderSourceSymbol(token.lexeme, Keyword.GSRenderQueueType);
|
||||
this._symbolTableStack.insert(symbol);
|
||||
return;
|
||||
}
|
||||
|
||||
if (token.lexeme !== "=") {
|
||||
this._createCompileError(`Invalid syntax, expect character '=', but got ${token.lexeme}`, token.location);
|
||||
// #if _VERBOSE
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
const word = lexer.scanToken();
|
||||
lexer.scanLexeme(";");
|
||||
const value = this._renderStateConstMap.RenderQueueType[word.lexeme];
|
||||
const key = RenderStateElementKey.RenderQueueType;
|
||||
if (value == undefined) {
|
||||
renderStates.variableMap[key] = word.lexeme;
|
||||
const lookupSymbol = this._lookupSymbol;
|
||||
lookupSymbol.set(word.lexeme, Keyword.GSRenderQueueType);
|
||||
const sm = this._symbolTableStack.lookup(lookupSymbol);
|
||||
if (!sm) {
|
||||
this._createCompileError(`Invalid RenderQueueType variable: ${word.lexeme}`, word.location);
|
||||
// #if _VERBOSE
|
||||
return;
|
||||
// #endif
|
||||
}
|
||||
} else {
|
||||
renderStates.constantMap[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
private static _addPendingContents(
|
||||
start: ShaderPosition,
|
||||
backOffset: number,
|
||||
outPendingContents: IStatement[]
|
||||
): void {
|
||||
const lexer = this._lexer;
|
||||
if (lexer.hasPendingContent) {
|
||||
const endIndex = lexer.currentIndex - backOffset;
|
||||
outPendingContents.push({
|
||||
range: { start, end: { ...lexer.getShaderPosition(0), index: endIndex - 1 } },
|
||||
content: lexer.source.substring(start.index, endIndex - 1)
|
||||
});
|
||||
lexer.hasPendingContent = false;
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseSubShader(): ISubShaderSource {
|
||||
const lexer = this._lexer;
|
||||
this._pushScope();
|
||||
|
||||
let braceLevel = 1;
|
||||
const name = lexer.scanPairedChar('"', '"', false, false);
|
||||
const subShaderSource = ShaderSourceFactory.createSubShaderSource(name);
|
||||
lexer.scanLexeme("{");
|
||||
|
||||
lexer.skipCommentsAndSpace();
|
||||
let start = lexer.getShaderPosition(0);
|
||||
|
||||
while (true) {
|
||||
const token = lexer.scanToken();
|
||||
switch (token.type) {
|
||||
case Keyword.GSPass:
|
||||
this._addPendingContents(start, token.lexeme.length, subShaderSource.pendingContents);
|
||||
const pass = this._parsePass();
|
||||
subShaderSource.passes.push(pass);
|
||||
start = lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.GSUsePass:
|
||||
this._addPendingContents(start, token.lexeme.length, subShaderSource.pendingContents);
|
||||
const name = lexer.scanPairedChar('"', '"', false, false);
|
||||
subShaderSource.passes.push(ShaderSourceFactory.createUsePass(name));
|
||||
start = lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.LeftBrace:
|
||||
++braceLevel;
|
||||
break;
|
||||
case Keyword.RightBrace:
|
||||
if (--braceLevel === 0) {
|
||||
this._addPendingContents(start, token.lexeme.length, subShaderSource.pendingContents);
|
||||
this._popScope();
|
||||
return subShaderSource;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
start = this._parseRenderStateAndTags(
|
||||
token,
|
||||
start,
|
||||
subShaderSource.pendingContents,
|
||||
subShaderSource.renderStates,
|
||||
subShaderSource.tags
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseTags(tags: Record<string, number | string | boolean>): void {
|
||||
const lexer = this._lexer;
|
||||
lexer.scanLexeme("{");
|
||||
while (true) {
|
||||
const ident = lexer.scanToken();
|
||||
lexer.scanLexeme("=");
|
||||
const value = lexer.scanPairedChar('"', '"', false, false);
|
||||
lexer.skipCommentsAndSpace();
|
||||
|
||||
tags[ident.lexeme] = value;
|
||||
|
||||
if (lexer.peek(1) === "}") {
|
||||
lexer.advance(1);
|
||||
return;
|
||||
}
|
||||
lexer.scanLexeme(",");
|
||||
}
|
||||
}
|
||||
|
||||
private static _parsePass(): IShaderPassSource {
|
||||
this._pushScope();
|
||||
const lexer = this._lexer;
|
||||
|
||||
const name = lexer.scanPairedChar('"', '"', false, false);
|
||||
const passSource = ShaderSourceFactory.createShaderPassSource(name);
|
||||
lexer.scanLexeme("{");
|
||||
let braceLevel = 1;
|
||||
|
||||
lexer.skipCommentsAndSpace();
|
||||
let start = lexer.getShaderPosition(0);
|
||||
|
||||
while (true) {
|
||||
const token = lexer.scanToken();
|
||||
switch (token.type) {
|
||||
case Keyword.GSVertexShader:
|
||||
case Keyword.GSFragmentShader:
|
||||
this._addPendingContents(start, token.lexeme.length, passSource.pendingContents);
|
||||
lexer.scanLexeme("=");
|
||||
const entry = lexer.scanToken();
|
||||
if (passSource[token.lexeme]) {
|
||||
const error = ShaderLabUtils.createGSError(
|
||||
"Reassign main entry",
|
||||
GSErrorName.CompilationError,
|
||||
lexer.source,
|
||||
lexer.getShaderPosition(0)
|
||||
);
|
||||
// #if _VERBOSE
|
||||
Logger.error(error.toString());
|
||||
throw error;
|
||||
// #endif
|
||||
}
|
||||
const key = token.type === Keyword.GSVertexShader ? "vertexEntry" : "fragmentEntry";
|
||||
passSource[key] = entry.lexeme;
|
||||
lexer.scanLexeme(";");
|
||||
start = lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.LeftBrace:
|
||||
++braceLevel;
|
||||
break;
|
||||
case Keyword.RightBrace:
|
||||
if (--braceLevel === 0) {
|
||||
this._addPendingContents(start, token.lexeme.length, passSource.pendingContents);
|
||||
this._popScope();
|
||||
return passSource;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
start = this._parseRenderStateAndTags(
|
||||
token,
|
||||
start,
|
||||
passSource.pendingContents,
|
||||
passSource.renderStates,
|
||||
passSource.tags
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _parseRenderStateAndTags(
|
||||
token: BaseToken<number>,
|
||||
start: ShaderPosition,
|
||||
outGlobalContents: IStatement[],
|
||||
outRenderStates: IRenderStates,
|
||||
outTags: Record<string, number | string | boolean>
|
||||
): ShaderPosition {
|
||||
switch (token.type) {
|
||||
case Keyword.GSTags:
|
||||
this._addPendingContents(start, token.lexeme.length, outGlobalContents);
|
||||
this._parseTags(outTags);
|
||||
start = this._lexer.getShaderPosition(0);
|
||||
break;
|
||||
default:
|
||||
start = this._parseRenderState(token, start, outGlobalContents, outRenderStates);
|
||||
}
|
||||
return start;
|
||||
}
|
||||
|
||||
private static _parseRenderState(
|
||||
token: BaseToken<number>,
|
||||
start: ShaderPosition,
|
||||
outGlobalContents: IStatement[],
|
||||
outRenderStates: IRenderStates
|
||||
): ShaderPosition {
|
||||
switch (token.type) {
|
||||
case Keyword.GSBlendState:
|
||||
case Keyword.GSDepthState:
|
||||
case Keyword.GSRasterState:
|
||||
case Keyword.GSStencilState:
|
||||
this._addPendingContents(start, token.lexeme.length, outGlobalContents);
|
||||
this._parseRenderStateDeclarationOrAssignment(outRenderStates, token);
|
||||
start = this._lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.GSBlendFactor:
|
||||
case Keyword.GSBlendOperation:
|
||||
case Keyword.GSBool:
|
||||
case Keyword.GSNumber:
|
||||
case Keyword.GSColor:
|
||||
case Keyword.GSCompareFunction:
|
||||
case Keyword.GSStencilOperation:
|
||||
case Keyword.GSCullMode:
|
||||
this._addPendingContents(start, token.lexeme.length, outGlobalContents);
|
||||
this._parseVariableDeclaration();
|
||||
start = this._lexer.getShaderPosition(0);
|
||||
break;
|
||||
case Keyword.GSRenderQueueType:
|
||||
this._addPendingContents(start, token.lexeme.length, outGlobalContents);
|
||||
this._parseRenderQueueDeclarationOrAssignment(outRenderStates);
|
||||
start = this._lexer.getShaderPosition(0);
|
||||
break;
|
||||
default:
|
||||
// Unrecognized tokens are defined as pending content
|
||||
this._lexer.hasPendingContent = true;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
}
|
||||
@@ -1,152 +0,0 @@
|
||||
// For cft conflict test, used by bison
|
||||
|
||||
%token shader
|
||||
%token subshader
|
||||
%token pass
|
||||
%token string_const
|
||||
%token id
|
||||
%token render_queue_type
|
||||
%token blend_state
|
||||
%token depth_state
|
||||
%token stencil_state
|
||||
%token raster_state
|
||||
%token tags
|
||||
%token INT_CONSTANT
|
||||
%token FLOAT_CONSTANT
|
||||
%token true
|
||||
%token false
|
||||
%token engine_type
|
||||
%token render_state_prop_type
|
||||
%token UsePass
|
||||
%token Color_init
|
||||
%token VertextShader
|
||||
%token FragmentShader
|
||||
|
||||
%token plain_statements
|
||||
|
||||
%%
|
||||
shader_program:
|
||||
shader string_const '{' shader_statements '}'
|
||||
;
|
||||
|
||||
shader_statements:
|
||||
shader_statement
|
||||
| shader_statement shader_statements
|
||||
;
|
||||
|
||||
shader_statement:
|
||||
plain_statements
|
||||
| global_declaration_in_shader
|
||||
| subshader string_const '{' subshader_statements '}'
|
||||
;
|
||||
|
||||
subshader_statements:
|
||||
subshader_statement
|
||||
| subshader_statement subshader_statements
|
||||
;
|
||||
|
||||
subshader_statement:
|
||||
global_declaration
|
||||
| UsePass string_const
|
||||
| pass string_const '{' pass_statements '}'
|
||||
| plain_statements
|
||||
;
|
||||
|
||||
pass_statements:
|
||||
global_declaration
|
||||
| plain_statements
|
||||
| main_shader_assignment
|
||||
;
|
||||
|
||||
main_shader_assignment:
|
||||
VertextShader '=' id ';'
|
||||
FragmentShader '=' id ';'
|
||||
;
|
||||
|
||||
global_declaration_in_shader:
|
||||
// Engine type
|
||||
variable_declaration
|
||||
| render_state_assignment
|
||||
| render_state_declaration
|
||||
;
|
||||
|
||||
|
||||
global_declaration:
|
||||
// Engine type
|
||||
variable_declaration
|
||||
| render_queue_assignment
|
||||
| render_state_assignment
|
||||
| render_state_declaration
|
||||
| tag_specifier
|
||||
;
|
||||
|
||||
tag_specifier:
|
||||
tags '{' tag_assignment_list '}'
|
||||
;
|
||||
|
||||
tag_assignment_list:
|
||||
/** empty */
|
||||
| tag_assignment
|
||||
| tag_assignment_list ',' tag_assignment
|
||||
;
|
||||
|
||||
tag_assignment:
|
||||
id '=' tag_value
|
||||
;
|
||||
|
||||
tag_value:
|
||||
string_const
|
||||
| INT_CONSTANT
|
||||
| true
|
||||
| false
|
||||
;
|
||||
|
||||
render_queue_assignment:
|
||||
render_queue_type '=' id ';'
|
||||
;
|
||||
|
||||
variable_type:
|
||||
engine_type
|
||||
| render_state_prop_type
|
||||
| render_queue_type
|
||||
;
|
||||
|
||||
render_state_assignment:
|
||||
render_state_declarator '=' id ';'
|
||||
;
|
||||
|
||||
render_state_declaration:
|
||||
render_state_declarator id '{' render_state_prop_list '}'
|
||||
;
|
||||
|
||||
variable_declaration:
|
||||
variable_type id ';'
|
||||
;
|
||||
|
||||
render_state_declarator:
|
||||
blend_state
|
||||
| depth_state
|
||||
| stencil_state
|
||||
| raster_state
|
||||
;
|
||||
|
||||
render_state_prop_list:
|
||||
render_state_prop_assignment
|
||||
| render_state_prop_assignment render_state_prop_list
|
||||
;
|
||||
|
||||
render_state_prop_assignment:
|
||||
render_state_prop '=' id ';'
|
||||
render_state_prop '=' true ';'
|
||||
render_state_prop '=' false ';'
|
||||
render_state_prop '=' INT_CONSTANT ';'
|
||||
render_state_prop '=' FLOAT_CONSTANT ';'
|
||||
render_state_prop '=' id '.' id ';'
|
||||
render_state_prop '=' Color_init;
|
||||
;
|
||||
|
||||
render_state_prop:
|
||||
render_state_prop_type '[' INT_CONSTANT ']'
|
||||
| render_state_prop_type
|
||||
;
|
||||
%%
|
||||
@@ -1,21 +0,0 @@
|
||||
import { IBaseSymbol } from "../common/IBaseSymbol";
|
||||
|
||||
export class ShaderSourceSymbol implements IBaseSymbol {
|
||||
public isInMacroBranch: boolean = false;
|
||||
|
||||
constructor(
|
||||
public ident: string,
|
||||
public type: number,
|
||||
public value?: any
|
||||
) {}
|
||||
|
||||
set(ident: string, type: number, value?: any): void {
|
||||
this.ident = ident;
|
||||
this.type = type;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
equal(other: ShaderSourceSymbol): boolean {
|
||||
return this.type === other.type;
|
||||
}
|
||||
}
|
||||
@@ -1,215 +0,0 @@
|
||||
import { Color } from "@galacean/engine";
|
||||
import { ETokenType, ShaderPosition, ShaderRange } from "../common";
|
||||
import { BaseLexer } from "../common/BaseLexer";
|
||||
import { BaseToken } from "../common/BaseToken";
|
||||
import { Keyword } from "../common/enums/Keyword";
|
||||
import { GSErrorName } from "../GSError";
|
||||
import { ShaderLab } from "../ShaderLab";
|
||||
import { ShaderLabUtils } from "../ShaderLabUtils";
|
||||
|
||||
export default class SourceLexer extends BaseLexer {
|
||||
hasPendingContent = false;
|
||||
|
||||
private static _keywordLexemeTable = <Record<string, Keyword>>{
|
||||
RenderQueueType: Keyword.GSRenderQueueType,
|
||||
BlendState: Keyword.GSBlendState,
|
||||
DepthState: Keyword.GSDepthState,
|
||||
StencilState: Keyword.GSStencilState,
|
||||
RasterState: Keyword.GSRasterState,
|
||||
EditorProperties: Keyword.GSEditorProperties,
|
||||
EditorMacros: Keyword.GSEditorMacros,
|
||||
Editor: Keyword.GSEditor,
|
||||
Tags: Keyword.GSTags,
|
||||
VertexShader: Keyword.GSVertexShader,
|
||||
FragmentShader: Keyword.GSFragmentShader,
|
||||
SubShader: Keyword.GSSubShader,
|
||||
Pass: Keyword.GSPass,
|
||||
BlendFactor: Keyword.GSBlendFactor,
|
||||
BlendOperation: Keyword.GSBlendOperation,
|
||||
Bool: Keyword.GSBool,
|
||||
Number: Keyword.GSNumber,
|
||||
Color: Keyword.GSColor,
|
||||
CompareFunction: Keyword.GSCompareFunction,
|
||||
StencilOperation: Keyword.GSStencilOperation,
|
||||
CullMode: Keyword.GSCullMode,
|
||||
UsePass: Keyword.GSUsePass,
|
||||
|
||||
true: Keyword.True,
|
||||
false: Keyword.False
|
||||
};
|
||||
|
||||
private static _symbolLexemeTable = <Record<string, Keyword>>{
|
||||
"{": Keyword.LeftBrace,
|
||||
"}": Keyword.RightBrace,
|
||||
"=": Keyword.Equal
|
||||
};
|
||||
|
||||
private static _isWordSeparatorChar(charCode: number): boolean {
|
||||
return (
|
||||
charCode === 123 || // {
|
||||
charCode === 125 || // }
|
||||
charCode === 61 || // =
|
||||
charCode === 59 || // ;
|
||||
charCode === 46 || // . CullMode.Back
|
||||
charCode === 91 || // [ Enabled[0],
|
||||
charCode === 40 // ( Color(1.0, 1.0, 1.0, 1.0);
|
||||
);
|
||||
}
|
||||
|
||||
private static _scanDigits(source: string, startIndex: number): number {
|
||||
let currentIndex = startIndex;
|
||||
while (currentIndex < source.length) {
|
||||
const charCode = source.charCodeAt(currentIndex);
|
||||
if (BaseLexer.isDigit(charCode)) {
|
||||
currentIndex++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return currentIndex;
|
||||
}
|
||||
|
||||
scanNumber(): number {
|
||||
this.skipCommentsAndSpace();
|
||||
const start = this._currentIndex;
|
||||
const source = this._source;
|
||||
let index = this._currentIndex;
|
||||
|
||||
// Scan integer part
|
||||
index = SourceLexer._scanDigits(source, index);
|
||||
|
||||
// Scan decimal part if present
|
||||
if (source[index] === ".") {
|
||||
index = SourceLexer._scanDigits(source, index + 1);
|
||||
}
|
||||
|
||||
this.advance(index - this._currentIndex);
|
||||
return Number(source.substring(start, index));
|
||||
}
|
||||
|
||||
scanColor(): Color {
|
||||
this.scanLexeme("(");
|
||||
|
||||
let r = 0;
|
||||
let g = 0;
|
||||
let b = 0;
|
||||
let a = 1;
|
||||
|
||||
r = this.scanNumber();
|
||||
this.skipCommentsAndSpace();
|
||||
if (this.peek(1) !== ")") {
|
||||
this.scanLexeme(",");
|
||||
g = this.scanNumber();
|
||||
this.skipCommentsAndSpace();
|
||||
if (this.peek(1) !== ")") {
|
||||
this.scanLexeme(",");
|
||||
b = this.scanNumber();
|
||||
this.skipCommentsAndSpace();
|
||||
if (this.peek(1) !== ")") {
|
||||
this.scanLexeme(",");
|
||||
a = this.scanNumber();
|
||||
this.skipCommentsAndSpace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.scanLexeme(")");
|
||||
return new Color(r, g, b, a);
|
||||
}
|
||||
|
||||
override scanToken(): BaseToken {
|
||||
while (true) {
|
||||
this.skipCommentsAndSpace();
|
||||
|
||||
if (this.isEnd()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const start = this.getShaderPosition(0);
|
||||
if (BaseLexer.isAlpha(this.getCurCharCode())) {
|
||||
const wordToken = this._scanWord(start);
|
||||
if (wordToken !== null) {
|
||||
return wordToken;
|
||||
}
|
||||
this.hasPendingContent = true;
|
||||
continue; // Continue loop to scan next token if word was invalid
|
||||
}
|
||||
|
||||
const currentChar = this.getCurChar();
|
||||
const symbolKeyword = SourceLexer._symbolLexemeTable[currentChar];
|
||||
if (symbolKeyword !== undefined) {
|
||||
this.advance(1);
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(symbolKeyword, currentChar, start);
|
||||
return token;
|
||||
}
|
||||
|
||||
// Skip unrecognized character and continue
|
||||
this.advance(1);
|
||||
}
|
||||
}
|
||||
|
||||
// #if _VERBOSE
|
||||
scanToCharacter(char: string): void {
|
||||
while (this.getCurChar() !== char && !this.isEnd()) {
|
||||
this.advance(1);
|
||||
}
|
||||
this.advance(1);
|
||||
}
|
||||
// #endif
|
||||
|
||||
createCompileError(message: string, location?: ShaderPosition | ShaderRange) {
|
||||
return ShaderLabUtils.createGSError(
|
||||
message,
|
||||
GSErrorName.CompilationError,
|
||||
this.source,
|
||||
location ?? this.getShaderPosition(0)
|
||||
);
|
||||
}
|
||||
|
||||
private _scanWord(start: ShaderPosition): BaseToken | null {
|
||||
// Scan the complete word first
|
||||
while (BaseLexer.isAlnum(this.getCurCharCode()) && !this.isEnd()) {
|
||||
this.advance(1);
|
||||
}
|
||||
const end = this.getShaderPosition(0);
|
||||
|
||||
// Validate both boundaries in one optimized call
|
||||
if (!this._validateWordBoundaries(start.index, end.index)) {
|
||||
return null; // Invalid word due to boundary violation
|
||||
}
|
||||
|
||||
const lexeme = this._source.substring(start.index, end.index);
|
||||
const tokenType = SourceLexer._keywordLexemeTable[lexeme] ?? ETokenType.ID;
|
||||
const range = ShaderLab.createRange(start, end);
|
||||
const token = BaseToken.pool.get();
|
||||
token.set(tokenType, lexeme, range);
|
||||
return token;
|
||||
}
|
||||
|
||||
private _validateWordBoundaries(startIndex: number, endIndex: number): boolean {
|
||||
const source = this._source;
|
||||
|
||||
// Check previous boundary
|
||||
if (startIndex > 0) {
|
||||
const prevCharCode = source.charCodeAt(startIndex - 1);
|
||||
if (!this._isValidWordBoundary(prevCharCode)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check next boundary
|
||||
if (endIndex < source.length) {
|
||||
const nextCharCode = source.charCodeAt(endIndex);
|
||||
if (!this._isValidWordBoundary(nextCharCode)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private _isValidWordBoundary(charCode: number): boolean {
|
||||
return BaseLexer.isWhiteSpaceChar(charCode, true) || SourceLexer._isWordSeparatorChar(charCode);
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { ShaderSourceParser } from "./ShaderSourceParser";
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"declaration": true,
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"experimentalDecorators": true,
|
||||
"declarationDir": "types",
|
||||
"emitDeclarationOnly": true,
|
||||
"noImplicitOverride": true,
|
||||
"sourceMap": true,
|
||||
"incremental": false,
|
||||
"skipLibCheck": true,
|
||||
"stripInternal": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"ts-node": {
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"files": true
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"license": "MIT",
|
||||
"main": "../dist/main.verbose.js",
|
||||
"module": "../dist/module.verbose.js",
|
||||
"browser": "../dist/browser.verbose.min.js",
|
||||
"debug": "../src/index.ts",
|
||||
"types": "../types/index.d.ts",
|
||||
"umd": {
|
||||
"name": "Galacean.ShaderLab",
|
||||
"globals": {
|
||||
"@galacean/engine": "Galacean"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,8 +24,8 @@ const pkgs = fs
|
||||
};
|
||||
});
|
||||
|
||||
const shaderLabPkg = pkgs.find((item) => item.pkgJson.name === "@galacean/engine-shaderlab");
|
||||
pkgs.push({ ...shaderLabPkg, verboseMode: true });
|
||||
// const shaderLabPkg = pkgs.find((item) => item.pkgJson.name === "@galacean/engine-shaderlab");
|
||||
// pkgs.push({ ...shaderLabPkg, verboseMode: true });
|
||||
|
||||
// toGlobalName
|
||||
const extensions = [".js", ".jsx", ".ts", ".tsx"];
|
||||
@@ -170,6 +170,7 @@ switch (BUILD_TYPE) {
|
||||
|
||||
function getUMD() {
|
||||
const configs = pkgs.filter((pkg) => pkg.pkgJson.umd);
|
||||
console.log(configs);
|
||||
return configs
|
||||
.map((config) => makeRollupConfig({ ...config, type: "umd" }))
|
||||
.concat(
|
||||
|
||||
Reference in New Issue
Block a user