Best JavaScript code snippet using wpt
render_pass_descriptor.spec.ts
Source:render_pass_descriptor.spec.ts
...3`;4import { TestGroup } from '../../../framework/index.js';5import { ValidationTest } from './validation_test.js';6class F extends ValidationTest {7 createTexture(8 options: {9 format?: GPUTextureFormat;10 width?: number;11 height?: number;12 arrayLayerCount?: number;13 mipLevelCount?: number;14 sampleCount?: number;15 usage?: GPUTextureUsageFlags;16 } = {}17 ): GPUTexture {18 const {19 format = 'rgba8unorm',20 width = 16,21 height = 16,22 arrayLayerCount = 1,23 mipLevelCount = 1,24 sampleCount = 1,25 usage = GPUTextureUsage.OUTPUT_ATTACHMENT,26 } = options;27 return this.device.createTexture({28 size: { width, height, depth: 1 },29 format,30 arrayLayerCount,31 mipLevelCount,32 sampleCount,33 usage,34 });35 }36 getColorAttachment(37 texture: GPUTexture,38 textureViewDescriptor?: GPUTextureViewDescriptor39 ): GPURenderPassColorAttachmentDescriptor {40 const attachment = texture.createView(textureViewDescriptor);41 return {42 attachment,43 loadValue: { r: 1.0, g: 0.0, b: 0.0, a: 1.0 },44 };45 }46 getDepthStencilAttachment(47 texture: GPUTexture,48 textureViewDescriptor?: GPUTextureViewDescriptor49 ): GPURenderPassDepthStencilAttachmentDescriptor {50 const attachment = texture.createView(textureViewDescriptor);51 return {52 attachment,53 depthLoadValue: 1.0,54 depthStoreOp: 'store',55 stencilLoadValue: 0,56 stencilStoreOp: 'store',57 };58 }59 async tryRenderPass(success: boolean, descriptor: GPURenderPassDescriptor): Promise<void> {60 const commandEncoder = this.device.createCommandEncoder();61 const renderPass = commandEncoder.beginRenderPass(descriptor);62 renderPass.endPass();63 this.expectValidationError(() => {64 commandEncoder.finish();65 }, !success);66 }67}68export const g = new TestGroup(F);69g.test('a render pass with only one color is ok', t => {70 const colorTexture = t.createTexture({ format: 'rgba8unorm' });71 const descriptor = {72 colorAttachments: [t.getColorAttachment(colorTexture)],73 };74 t.tryRenderPass(true, descriptor);75});76g.test('a render pass with only one depth attachment is ok', t => {77 const depthStencilTexture = t.createTexture({ format: 'depth24plus-stencil8' });78 const descriptor = {79 colorAttachments: [],80 depthStencilAttachment: t.getDepthStencilAttachment(depthStencilTexture),81 };82 t.tryRenderPass(true, descriptor);83});84g.test('OOB color attachment indices are handled', async t => {85 const { colorAttachmentsCount, _success } = t.params;86 const colorAttachments = [];87 for (let i = 0; i < colorAttachmentsCount; i++) {88 const colorTexture = t.createTexture();89 colorAttachments.push(t.getColorAttachment(colorTexture));90 }91 await t.tryRenderPass(_success, { colorAttachments });92}).params([93 { colorAttachmentsCount: 4, _success: true }, // Control case94 { colorAttachmentsCount: 5, _success: false }, // Out of bounds95]);96g.test('attachments must have the same size', async t => {97 const colorTexture1x1A = t.createTexture({ width: 1, height: 1, format: 'rgba8unorm' });98 const colorTexture1x1B = t.createTexture({ width: 1, height: 1, format: 'rgba8unorm' });99 const colorTexture2x2 = t.createTexture({ width: 2, height: 2, format: 'rgba8unorm' });100 const depthStencilTexture1x1 = t.createTexture({101 width: 1,102 height: 1,103 format: 'depth24plus-stencil8',104 });105 const depthStencilTexture2x2 = t.createTexture({106 width: 2,107 height: 2,108 format: 'depth24plus-stencil8',109 });110 {111 // Control case: all the same size (1x1)112 const descriptor: GPURenderPassDescriptor = {113 colorAttachments: [114 t.getColorAttachment(colorTexture1x1A),115 t.getColorAttachment(colorTexture1x1B),116 ],117 depthStencilAttachment: t.getDepthStencilAttachment(depthStencilTexture1x1),118 };119 t.tryRenderPass(true, descriptor);120 }121 {122 // One of the color attachments has a different size123 const descriptor: GPURenderPassDescriptor = {124 colorAttachments: [125 t.getColorAttachment(colorTexture1x1A),126 t.getColorAttachment(colorTexture2x2),127 ],128 };129 await t.tryRenderPass(false, descriptor);130 }131 {132 // The depth stencil attachment has a different size133 const descriptor: GPURenderPassDescriptor = {134 colorAttachments: [135 t.getColorAttachment(colorTexture1x1A),136 t.getColorAttachment(colorTexture1x1B),137 ],138 depthStencilAttachment: t.getDepthStencilAttachment(depthStencilTexture2x2),139 };140 await t.tryRenderPass(false, descriptor);141 }142});143g.test('attachments must match whether they are used for color or depth stencil', async t => {144 const colorTexture = t.createTexture({ format: 'rgba8unorm' });145 const depthStencilTexture = t.createTexture({ format: 'depth24plus-stencil8' });146 {147 // Using depth-stencil for color148 const descriptor: GPURenderPassDescriptor = {149 colorAttachments: [t.getColorAttachment(depthStencilTexture)],150 };151 await t.tryRenderPass(false, descriptor);152 }153 {154 // Using color for depth-stencil155 const descriptor: GPURenderPassDescriptor = {156 colorAttachments: [],157 depthStencilAttachment: t.getDepthStencilAttachment(colorTexture),158 };159 await t.tryRenderPass(false, descriptor);160 }161});162g.test('check layer count for color or depth stencil', async t => {163 const { arrayLayerCount, baseArrayLayer, _success } = t.params;164 const ARRAY_LAYER_COUNT = 10;165 const MIP_LEVEL_COUNT = 1;166 const COLOR_FORMAT = 'rgba8unorm';167 const DEPTH_STENCIL_FORMAT = 'depth24plus-stencil8';168 const colorTexture = t.createTexture({169 format: COLOR_FORMAT,170 width: 32,171 height: 32,172 mipLevelCount: MIP_LEVEL_COUNT,173 arrayLayerCount: ARRAY_LAYER_COUNT,174 });175 const depthStencilTexture = t.createTexture({176 format: DEPTH_STENCIL_FORMAT,177 width: 32,178 height: 32,179 mipLevelCount: MIP_LEVEL_COUNT,180 arrayLayerCount: ARRAY_LAYER_COUNT,181 });182 const baseTextureViewDescriptor: GPUTextureViewDescriptor = {183 dimension: '2d-array',184 baseArrayLayer,185 arrayLayerCount,186 baseMipLevel: 0,187 mipLevelCount: MIP_LEVEL_COUNT,188 };189 {190 // Check 2D array texture view for color191 const textureViewDescriptor: GPUTextureViewDescriptor = {192 ...baseTextureViewDescriptor,193 format: COLOR_FORMAT,194 };195 const descriptor: GPURenderPassDescriptor = {196 colorAttachments: [t.getColorAttachment(colorTexture, textureViewDescriptor)],197 };198 await t.tryRenderPass(_success, descriptor);199 }200 {201 // Check 2D array texture view for depth stencil202 const textureViewDescriptor: GPUTextureViewDescriptor = {203 ...baseTextureViewDescriptor,204 format: DEPTH_STENCIL_FORMAT,205 };206 const descriptor: GPURenderPassDescriptor = {207 colorAttachments: [],208 depthStencilAttachment: t.getDepthStencilAttachment(209 depthStencilTexture,210 textureViewDescriptor211 ),212 };213 await t.tryRenderPass(_success, descriptor);214 }215}).params([216 { arrayLayerCount: 5, baseArrayLayer: 0, _success: false }, // using 2D array texture view with arrayLayerCount > 1 is not allowed217 { arrayLayerCount: 1, baseArrayLayer: 0, _success: true }, // using 2D array texture view that covers the first layer of the texture is OK218 { arrayLayerCount: 1, baseArrayLayer: 9, _success: true }, // using 2D array texture view that covers the last layer is OK for depth stencil219]);220g.test('check mip level count for color or depth stencil', async t => {221 const { mipLevelCount, baseMipLevel, _success } = t.params;222 const ARRAY_LAYER_COUNT = 1;223 const MIP_LEVEL_COUNT = 4;224 const COLOR_FORMAT = 'rgba8unorm';225 const DEPTH_STENCIL_FORMAT = 'depth24plus-stencil8';226 const colorTexture = t.createTexture({227 format: COLOR_FORMAT,228 width: 32,229 height: 32,230 mipLevelCount: MIP_LEVEL_COUNT,231 arrayLayerCount: ARRAY_LAYER_COUNT,232 });233 const depthStencilTexture = t.createTexture({234 format: DEPTH_STENCIL_FORMAT,235 width: 32,236 height: 32,237 mipLevelCount: MIP_LEVEL_COUNT,238 arrayLayerCount: ARRAY_LAYER_COUNT,239 });240 const baseTextureViewDescriptor: GPUTextureViewDescriptor = {241 dimension: '2d',242 baseArrayLayer: 0,243 arrayLayerCount: ARRAY_LAYER_COUNT,244 baseMipLevel,245 mipLevelCount,246 };247 {248 // Check 2D texture view for color249 const textureViewDescriptor: GPUTextureViewDescriptor = {250 ...baseTextureViewDescriptor,251 format: COLOR_FORMAT,252 };253 const descriptor: GPURenderPassDescriptor = {254 colorAttachments: [t.getColorAttachment(colorTexture, textureViewDescriptor)],255 };256 await t.tryRenderPass(_success, descriptor);257 }258 {259 // Check 2D texture view for depth stencil260 const textureViewDescriptor: GPUTextureViewDescriptor = {261 ...baseTextureViewDescriptor,262 format: DEPTH_STENCIL_FORMAT,263 };264 const descriptor: GPURenderPassDescriptor = {265 colorAttachments: [],266 depthStencilAttachment: t.getDepthStencilAttachment(267 depthStencilTexture,268 textureViewDescriptor269 ),270 };271 await t.tryRenderPass(_success, descriptor);272 }273}).params([274 { mipLevelCount: 2, baseMipLevel: 0, _success: false }, // using 2D texture view with mipLevelCount > 1 is not allowed275 { mipLevelCount: 1, baseMipLevel: 0, _success: true }, // using 2D texture view that covers the first level of the texture is OK276 { mipLevelCount: 1, baseMipLevel: 3, _success: true }, // using 2D texture view that covers the last level of the texture is OK277]);278g.test('it is invalid to set resolve target if color attachment is non multisampled', async t => {279 const colorTexture = t.createTexture({ sampleCount: 1 });280 const resolveTargetTexture = t.createTexture({ sampleCount: 1 });281 const descriptor: GPURenderPassDescriptor = {282 colorAttachments: [283 {284 attachment: colorTexture.createView(),285 resolveTarget: resolveTargetTexture.createView(),286 loadValue: { r: 1.0, g: 0.0, b: 0.0, a: 1.0 },287 },288 ],289 };290 await t.tryRenderPass(false, descriptor);291});292g.test('check the use of multisampled textures as color attachments', async t => {293 const colorTexture = t.createTexture({ sampleCount: 1 });294 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });295 {296 // It is allowed to use a multisampled color attachment without setting resolve target297 const descriptor: GPURenderPassDescriptor = {298 colorAttachments: [t.getColorAttachment(multisampledColorTexture)],299 };300 t.tryRenderPass(true, descriptor);301 }302 {303 // It is not allowed to use multiple color attachments with different sample counts304 const descriptor: GPURenderPassDescriptor = {305 colorAttachments: [306 t.getColorAttachment(colorTexture),307 t.getColorAttachment(multisampledColorTexture),308 ],309 };310 await t.tryRenderPass(false, descriptor);311 }312});313g.test('it is invalid to use a multisampled resolve target', async t => {314 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });315 const multisampledResolveTargetTexture = t.createTexture({ sampleCount: 4 });316 const colorAttachment = t.getColorAttachment(multisampledColorTexture);317 colorAttachment.resolveTarget = multisampledResolveTargetTexture.createView();318 const descriptor: GPURenderPassDescriptor = {319 colorAttachments: [colorAttachment],320 };321 await t.tryRenderPass(false, descriptor);322});323g.test('it is invalid to use a resolve target with array layer count greater than 1', async t => {324 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });325 const resolveTargetTexture = t.createTexture({ arrayLayerCount: 2 });326 const colorAttachment = t.getColorAttachment(multisampledColorTexture);327 colorAttachment.resolveTarget = resolveTargetTexture.createView();328 const descriptor: GPURenderPassDescriptor = {329 colorAttachments: [colorAttachment],330 };331 await t.tryRenderPass(false, descriptor);332});333g.test('it is invalid to use a resolve target with mipmap level count greater than 1', async t => {334 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });335 const resolveTargetTexture = t.createTexture({ mipLevelCount: 2 });336 const colorAttachment = t.getColorAttachment(multisampledColorTexture);337 colorAttachment.resolveTarget = resolveTargetTexture.createView();338 const descriptor: GPURenderPassDescriptor = {339 colorAttachments: [colorAttachment],340 };341 await t.tryRenderPass(false, descriptor);342});343g.test('it is invalid to use a resolve target whose usage is not output attachment', async t => {344 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });345 const resolveTargetTexture = t.createTexture({346 usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.COPY_DST,347 });348 const colorAttachment = t.getColorAttachment(multisampledColorTexture);349 colorAttachment.resolveTarget = resolveTargetTexture.createView();350 const descriptor: GPURenderPassDescriptor = {351 colorAttachments: [colorAttachment],352 };353 await t.tryRenderPass(false, descriptor);354});355g.test('it is invalid to use a resolve target in error state', async t => {356 const ARRAY_LAYER_COUNT = 1;357 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });358 const resolveTargetTexture = t.createTexture({ arrayLayerCount: ARRAY_LAYER_COUNT });359 const colorAttachment = t.getColorAttachment(multisampledColorTexture);360 t.expectValidationError(() => {361 colorAttachment.resolveTarget = resolveTargetTexture.createView({362 dimension: '2d',363 format: 'rgba8unorm',364 baseArrayLayer: ARRAY_LAYER_COUNT + 1,365 });366 });367 const descriptor: GPURenderPassDescriptor = {368 colorAttachments: [colorAttachment],369 };370 await t.tryRenderPass(false, descriptor);371});372g.test('use of multisampled attachment and non multisampled resolve target is allowed', async t => {373 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });374 const resolveTargetTexture = t.createTexture({ sampleCount: 1 });375 const colorAttachment = t.getColorAttachment(multisampledColorTexture);376 colorAttachment.resolveTarget = resolveTargetTexture.createView();377 const descriptor: GPURenderPassDescriptor = {378 colorAttachments: [colorAttachment],379 };380 t.tryRenderPass(true, descriptor);381});382g.test('use a resolve target in a format different than the attachment is not allowed', async t => {383 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });384 const resolveTargetTexture = t.createTexture({ format: 'bgra8unorm' });385 const colorAttachment = t.getColorAttachment(multisampledColorTexture);386 colorAttachment.resolveTarget = resolveTargetTexture.createView();387 const descriptor: GPURenderPassDescriptor = {388 colorAttachments: [colorAttachment],389 };390 await t.tryRenderPass(false, descriptor);391});392g.test('size of the resolve target must be the same as the color attachment', async t => {393 const size = 16;394 const multisampledColorTexture = t.createTexture({ width: size, height: size, sampleCount: 4 });395 const resolveTargetTexture = t.createTexture({396 width: size * 2,397 height: size * 2,398 mipLevelCount: 2,399 });400 {401 const resolveTargetTextureView = resolveTargetTexture.createView({402 baseMipLevel: 0,403 mipLevelCount: 1,404 });405 const colorAttachment = t.getColorAttachment(multisampledColorTexture);406 colorAttachment.resolveTarget = resolveTargetTextureView;407 const descriptor: GPURenderPassDescriptor = {408 colorAttachments: [colorAttachment],409 };410 await t.tryRenderPass(false, descriptor);411 }412 {413 const resolveTargetTextureView = resolveTargetTexture.createView({ baseMipLevel: 1 });414 const colorAttachment = t.getColorAttachment(multisampledColorTexture);415 colorAttachment.resolveTarget = resolveTargetTextureView;416 const descriptor: GPURenderPassDescriptor = {417 colorAttachments: [colorAttachment],418 };419 t.tryRenderPass(true, descriptor);420 }421});422g.test('check depth stencil attachment sample counts mismatch', async t => {423 const multisampledDepthStencilTexture = t.createTexture({424 sampleCount: 4,425 format: 'depth24plus-stencil8',426 });427 {428 // It is not allowed to use a depth stencil attachment whose sample count is different from the429 // one of the color attachment430 const depthStencilTexture = t.createTexture({431 sampleCount: 1,432 format: 'depth24plus-stencil8',433 });434 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });435 const descriptor: GPURenderPassDescriptor = {436 colorAttachments: [t.getColorAttachment(multisampledColorTexture)],437 depthStencilAttachment: t.getDepthStencilAttachment(depthStencilTexture),438 };439 await t.tryRenderPass(false, descriptor);440 }441 {442 const colorTexture = t.createTexture({ sampleCount: 1 });443 const descriptor: GPURenderPassDescriptor = {444 colorAttachments: [t.getColorAttachment(colorTexture)],445 depthStencilAttachment: t.getDepthStencilAttachment(multisampledDepthStencilTexture),446 };447 await t.tryRenderPass(false, descriptor);448 }449 {450 // It is allowed to use a multisampled depth stencil attachment whose sample count is equal to451 // the one of the color attachment.452 const multisampledColorTexture = t.createTexture({ sampleCount: 4 });453 const descriptor: GPURenderPassDescriptor = {454 colorAttachments: [t.getColorAttachment(multisampledColorTexture)],455 depthStencilAttachment: t.getDepthStencilAttachment(multisampledDepthStencilTexture),456 };457 t.tryRenderPass(true, descriptor);458 }459 {460 // It is allowed to use a multisampled depth stencil attachment with no color attachment461 const descriptor: GPURenderPassDescriptor = {462 colorAttachments: [],463 depthStencilAttachment: t.getDepthStencilAttachment(multisampledDepthStencilTexture),464 };465 t.tryRenderPass(true, descriptor);466 }...
init.js
Source:init.js
...97const u_viewInverseLoc = gl.getUniformLocation(program, "u_viewInverse");98const positionLoc = gl.getAttribLocation(program, "a_position");99const normalLoc = gl.getAttribLocation(program, "a_normal");100const texcoordLoc = gl.getAttribLocation(program, "a_texcoord");101const red = twgl.createTexture(gl, { src: 'assets/c_red.png' });102const green = twgl.createTexture(gl, { src: 'assets/c_green.png' });103const blue = twgl.createTexture(gl, { src: 'assets/c_blue.png' });104const white = twgl.createTexture(gl, { src: 'assets/c_white.png' });105const black = twgl.createTexture(gl, { src: 'assets/c_black.png' });106const gray = twgl.createTexture(gl, { src: 'assets/c_gray.png' });107const aqua = twgl.createTexture(gl, { src: 'assets/c_aqua.png' });108const yellow = twgl.createTexture(gl, { src: 'assets/c_yellow.png' });109const purple = twgl.createTexture(gl, { src: 'assets/c_purple.png' });110const sand = twgl.createTexture(gl, { src: 'assets/c_sand.png' });111const cinder = twgl.createTexture(gl, { src: 'assets/c_cinder.png' });112const sky = twgl.createTexture(gl, { src: 'assets/c_sky.png' });113const diceWhiteTex = twgl.createTexture(gl, { src: 'assets/diceWhite.jpg' });114const diceBlueTex = twgl.createTexture(gl, { src: 'assets/diceBlue.png' });115const check16Tex = twgl.createTexture(gl, { src: 'assets/check16.png' });116const check32Tex = twgl.createTexture(gl, { src: 'assets/check32.png' });117const check64Tex = twgl.createTexture(gl, { src: 'assets/check64.png' });118const earthTex = twgl.createTexture(gl, { src: 'assets/objects/earth/earth.png' });119const trainTex = twgl.createTexture(gl, { src: 'assets/objects/train/electrictrain.png', flipY: 1 });120const stationTex = twgl.createTexture(gl, { src: 'assets/objects/railway_station/railway_station.png', flipY: 1 });121const rocketTex = twgl.createTexture(gl, { src: 'assets/objects/rocket/rocket.jpg', flipY: 1 });122const barn_backTex = twgl.createTexture(gl, { src: 'assets/objects/barn/back.png' });123const planksTex = twgl.createTexture(gl, { src: 'assets/planks.jpg' });124const woodTex = twgl.createTexture(gl, { src: 'assets/wood.jpg' });125const barnTex = twgl.createTexture(gl, { src: 'assets/barn.jpg' });126const brickTex = twgl.createTexture(gl, { src: 'assets/brick.jpg' });127const clockTex = twgl.createTexture(gl, { src: 'assets/clock.jpg', flipY: 1, minLod:1,maxLod:1 });128const railTex = twgl.createTexture(gl, { src: 'assets/rail.png' });129const trainRoofTex = twgl.createTexture(gl, { src: 'assets/trainroof.png' });130const grassTex = twgl.createTexture(gl, { src: 'assets/grass.jpg' });131const rustTex = twgl.createTexture(gl, { src: 'assets/rust.jpg' });132const mirrorTex = twgl.createTexture(gl, { src: 'assets/mirror.png' });133const concreteTex = twgl.createTexture(gl, { src: 'assets/concrete.jpg' });134const shinglesTex = twgl.createTexture(gl, { src: 'assets/shingles.jpg' });135const roofing_metalTex = twgl.createTexture(gl, { src: 'assets/roofing_metal.jpg' });136const twin_diagramTex = twgl.createTexture(gl, { src: 'assets/twinDiagram.png', minLod:1,maxLod:1 });137const controlsTex = twgl.createTexture(gl, { src: 'assets/controls.png', minLod:1,maxLod:1 });138const gate_bronzeRoofTex = twgl.createTexture(gl, { src: 'assets/objects/gate/BronzeRoof1.jpg', flipY: 1 });139const gate_bronzeStatuesTex = twgl.createTexture(gl, { src: 'assets/objects/gate/BronzeStatues1.jpg', flipY: 1 });140const gate_columnsTex = twgl.createTexture(gl, { src: 'assets/objects/gate/Columns1.jpg', flipY: 1 });141const gate_darkerWallTex = twgl.createTexture(gl, { src: 'assets/objects/gate/DarkerWall1.jpg', flipY: 1 });142const gate_fregioTex = twgl.createTexture(gl, { src: 'assets/objects/gate/Fregio1.jpg', flipY: 1 });143const gate_marblecColumnsTex = twgl.createTexture(gl, { src: 'assets/objects/gate/MarbleColumns2.jpg', flipY: 1 });144const gate_sideGliphTex = twgl.createTexture(gl, { src: 'assets/objects/gate/SideGliph1.jpg', flipY: 1 });145const gate_templeFrontTex = twgl.createTexture(gl, { src: 'assets/objects/gate/TempleFront1.jpg', flipY: 1 });146const gate_wallTex = twgl.createTexture(gl, { src: 'assets/objects/gate/WhiteWall1.jpg', flipY: 1 });147const gate_sideBuildingTex = twgl.createTexture(gl, { src: 'assets/objects/gate/WindowsBuilding1.jpg', flipY: 1});148let pitch = 0;149let yaw = 0;150document.querySelector("#c").addEventListener("mousemove", e => {151 const mouseSensitivity = 0.003;152 yaw += e.movementX * mouseSensitivity;153 pitch -= e.movementY * mouseSensitivity;154});155document.addEventListener('pointerlockchange', (e) => {156 if (document.pointerLockElement == null) {157 showMenu();158 }159}, false);160let etherTime = 0;161let prevTime = 0;...
factory.js
Source:factory.js
1/****************************************************************************2 Copyright (c) 2020 Xiamen Yaji Software Co., Ltd.3 https://www.cocos.com/4 Permission is hereby granted, free of charge, to any person obtaining a copy5 of this software and associated engine source code (the "Software"), a limited,6 worldwide, royalty-free, non-assignable, revocable and non-exclusive license7 to use Cocos Creator solely to develop games on your target platforms. You shall8 not use Cocos Creator software for developing other software or tools that's9 used for developing games. You are not granted to publish, distribute,10 sublicense, and/or sell copies of Cocos Creator.11 The software or tools in this License Agreement are licensed, not sold.12 Xiamen Yaji Software Co., Ltd. reserves all rights not expressly granted to you.13 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR14 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,15 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE16 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER17 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,18 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN19 THE SOFTWARE.20 ****************************************************************************/21const Bundle = require('./bundle');22const Cache = require('./cache');23const { assets, bundles } = require('./shared');24const _creating = new Cache();25function createTexture (id, data, options, onComplete) {26 let out = null, err = null;27 try {28 out = new cc.Texture2D();29 out._nativeUrl = id;30 out._nativeAsset = data;31 }32 catch (e) {33 err = e;34 }35 onComplete && onComplete(err, out);36}37function createAudioClip (id, data, options, onComplete) {38 let out = new cc.AudioClip();39 out._nativeUrl = id;40 out._nativeAsset = data;41 out.duration = data.duration;42 onComplete && onComplete(null, out);43}44function createVideoClip (id, data, options, onComplete) {45 let out = new cc.VideoClip();46 out._nativeUrl = id;47 out._nativeAsset = data;48 onComplete && onComplete(null, out);49}50function createJsonAsset (id, data, options, onComplete) {51 let out = new cc.JsonAsset();52 out.json = data;53 onComplete && onComplete(null, out);54}55function createTextAsset (id, data, options, onComplete) {56 let out = new cc.TextAsset();57 out.text = data;58 onComplete && onComplete(null, out);59}60function createFont (id, data, options, onComplete) {61 let out = new cc.TTFFont();62 out._nativeUrl = id;63 out._nativeAsset = data;64 onComplete && onComplete(null, out);65}66function createBufferAsset (id, data, options, onComplete) {67 let out = new cc.BufferAsset();68 out._nativeUrl = id;69 out._nativeAsset = data;70 onComplete && onComplete(null, out);71}72function createAsset (id, data, options, onComplete) {73 let out = new cc.Asset();74 out._nativeUrl = id;75 out._nativeAsset = data;76 onComplete && onComplete(null, out);77}78function createBundle (id, data, options, onComplete) {79 let bundle = bundles.get(data.name);80 if (!bundle) {81 bundle = new Bundle();82 data.base = data.base || id + '/';83 bundle.init(data);84 }85 onComplete && onComplete(null, bundle);86}87const factory = {88 register (type, handler) {89 if (typeof type === 'object') {90 cc.js.mixin(producers, type);91 }92 else {93 producers[type] = handler;94 }95 },96 create (id, data, type, options, onComplete) {97 var func = producers[type] || producers['default'];98 let asset, creating;99 if (asset = assets.get(id)) {100 onComplete(null, asset);101 }102 else if (creating = _creating.get(id)) {103 creating.push(onComplete);104 }105 else {106 _creating.add(id, [onComplete]);107 func(id, data, options, function (err, data) {108 if (!err && data instanceof cc.Asset) {109 data._uuid = id;110 assets.add(id, data);111 }112 let callbacks = _creating.remove(id);113 for (let i = 0, l = callbacks.length; i < l; i++) {114 callbacks[i](err, data);115 }116 });117 }118 }119};120const producers = {121 // Images122 '.png' : createTexture,123 '.jpg' : createTexture,124 '.bmp' : createTexture,125 '.jpeg' : createTexture,126 '.gif' : createTexture,127 '.ico' : createTexture,128 '.tiff' : createTexture,129 '.webp' : createTexture,130 '.image' : createTexture,131 '.pvr': createTexture,132 '.pkm': createTexture,133 // Audio134 '.mp3' : createAudioClip,135 '.ogg' : createAudioClip,136 '.wav' : createAudioClip,137 '.m4a' : createAudioClip,138 // Video139 '.mp4' : createVideoClip,140 '.avi' : createVideoClip,141 '.mov' : createVideoClip,142 '.mpg' : createVideoClip,143 '.mpeg': createVideoClip,144 '.rm' : createVideoClip,145 '.rmvb': createVideoClip,146 // Txt147 '.txt' : createTextAsset,148 '.xml' : createTextAsset,149 '.vsh' : createTextAsset,150 '.fsh' : createTextAsset,151 '.atlas' : createTextAsset,152 '.tmx' : createTextAsset,153 '.tsx' : createTextAsset,154 '.fnt' : createTextAsset,155 '.json' : createJsonAsset,156 '.ExportJson' : createJsonAsset,157 // font158 '.font' : createFont,159 '.eot' : createFont,160 '.ttf' : createFont,161 '.woff' : createFont,162 '.svg' : createFont,163 '.ttc' : createFont,164 // Binary165 '.binary': createBufferAsset,166 '.bin': createBufferAsset,167 '.dbbin': createBufferAsset,168 '.skel': createBufferAsset,169 'bundle': createBundle,170 'default': createAsset171};...
Using AI Code Generation
1function createTextureFromImage(gl, image) {2 const texture = gl.createTexture();3 gl.bindTexture(gl.TEXTURE_2D, texture);4 gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);5 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);6 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);7 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);8 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);9 return texture;10}11function createTextureFromCanvas(gl, canvas) {12 const texture = gl.createTexture();13 gl.bindTexture(gl.TEXTURE_2D, texture);14 gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);15 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);16 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);17 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);18 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);19 return texture;20}21function createTextureFromVideo(gl, video) {22 const texture = gl.createTexture();23 gl.bindTexture(gl.TEXTURE_2D, texture);24 gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);25 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);26 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);27 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);28 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP
Using AI Code Generation
1var wpt = require('wpt');2var texture = wpt.createTexture(1, 1);3texture.setPixel(0, 0, 255, 255, 255, 255);4texture.save('test.png');5### wpt.createTexture(width, height)6### texture.setPixel(x, y, r, g, b, a)7### texture.save(filename)
Using AI Code Generation
1var wptoolkit = require('./wptoolkit.js');2var fs = require('fs');3var options = {4 'data' : new Buffer(100*100*4)5};6var texture = wptoolkit.createTexture(options);7fs.writeFile(options.fileName, texture, function(err) {8 if(err) {9 console.log(err);10 } else {11 console.log("The file was saved!");12 }13});14exports.createTexture = function(options) {15 var header = createHeader(options);16 var data = createData(options);17 return Buffer.concat([header, data]);18}19function createHeader(options) {20 var header = new Buffer(12);21 header.write('DDS ', 0);22 header.writeUInt32LE(124, 4);23 header.writeUInt32LE(0x00001007, 8);24 return header;25}26function createData(options) {27 var data = new Buffer(32);28 data.writeUInt32LE(options.width, 0);29 data.writeUInt32LE(options.height, 4);30 data.writeUInt32LE(options.data.length, 8);31 data.writeUInt32LE(0x00000000, 12);32 data.writeUInt32LE(0x00000000, 16);33 data.writeUInt32LE(0x00000000, 20);34 data.writeUInt32LE(0x00000000, 24);35 data.writeUInt32LE(0x00000000, 28);36 return data;37}
Using AI Code Generation
1var wptool = require('wptool');2var path = require('path');3var imagePath = path.join(__dirname, 'images', 'image.jpg');4wptool.createTexture(imagePath, function(err, texture) {5 if(err) {6 console.log('Error: ' + err);7 } else {8 console.log('Texture: ' + texture);9 }10});11var wptool = require('wptool');12var path = require('path');13var imagePath = path.join(__dirname, 'images', 'image.jpg');14wptool.createTexture(imagePath, function(err, texture) {15 if(err) {16 console.log('Error: ' + err);17 } else {18 console.log('Texture: ' + texture);19 wptool.setTexture(texture, 1, function(err) {20 if(err) {21 console.log('Error: ' + err);22 } else {23 console.log('Texture set successfully');24 }25 });26 }27});
Using AI Code Generation
1var texture = new WpTexture();2texture.createTexture('images/texture.png', function(texture) {3});4var texture = new WpTexture();5texture.getTexture('images/texture.png', function(texture) {6});7var texture = new WpTexture();8texture.clearTexture('images/texture.png');9var texturePool = new WpTexturePool();10texturePool.createTexture('images/texture.png', function(texture) {11});12var texturePool = new WpTexturePool();13texturePool.getTexture('images/texture.png', function(texture) {14});15var texturePool = new WpTexturePool();16texturePool.clearTexture('images/texture.png');17var textureLoader = new WpTextureLoader();18textureLoader.loadTexture('images/texture.png', function(texture) {19});
Using AI Code Generation
1var texture = new WPTexture();2texture.createTexture("img/texture.jpg", "texture1", "png");3WPTexture.prototype.createTexture = function (file, name, type) {4 var _this = this;5 var texture = new THREE.Texture();6 var loader = new THREE.ImageLoader();7 loader.load(file, function (image) {8 texture.image = image;9 texture.needsUpdate = true;10 });11 this.texture[name] = texture;12}13WPTexture.prototype.createTexture = function (file, name, type) {14 var _this = this;15 var texture = new THREE.Texture();16 var loader = new THREE.ImageLoader();17 loader.load(file, function (image) {18 texture.image = image;19 texture.needsUpdate = true;20 });21 this.texture[name] = texture;22}23WPTexture.prototype.createTexture = function (file, name, type) {24 var _this = this;25 var texture = new THREE.Texture();26 var loader = new THREE.ImageLoader();27 loader.load(file, function (image) {28 texture.image = image;29 texture.needsUpdate = true;30 });31 this.texture[name] = texture;32}33WPTexture.prototype.createTexture = function (file, name, type) {34 var _this = this;35 var texture = new THREE.Texture();36 var loader = new THREE.ImageLoader();37 loader.load(file, function (image) {38 texture.image = image;39 texture.needsUpdate = true;40 });41 this.texture[name] = texture;42}43WPTexture.prototype.createTexture = function (file, name, type) {44 var _this = this;45 var texture = new THREE.Texture();46 var loader = new THREE.ImageLoader();47 loader.load(file, function (image) {48 texture.image = image;49 texture.needsUpdate = true;50 });51 this.texture[name] = texture;52}53WPTexture.prototype.createTexture = function (file, name, type) {
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!