Threejs与AI结合生成3D场景

随着WebGL的性能提升和硬件设备的进步,基于Three.js和AI的3D场景生成技术将会有更为广泛的应用。
在这里插入图片描述

  • 推荐一个网站里边有各种免费高清图片 免费正版高清图片素材库 超过2.8百万张优质图片和视频素材可供免费使用和下载 - Pixabay - Pixabay
  • 让您的图像栩栩如生 LeiaPix Converter允许您将任何喜爱的图像转换为令人惊叹的深度动画。
    下面是一个基于vue3+vite 搭建的项目
    项目创建不过多展示了
    可以直接看vite官网
    直接上代码
<script setup>
import * as THREE from "three";
// 场景
const scene = new THREE.Scene();
// 相机
const camera = new THREE.PerspectiveCamera(
  90,
  window.innerWidth / window.innerHeight,
  0.1,
  1000
);
camera.position.set(0, 0, 5.5);
// 渲染器
let renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
// 加载纹理
const textureLoader = new THREE.TextureLoader();

const texture = textureLoader.load("/assets/kobe.jpg"); //图片
const depthTexture = textureLoader.load("/assets/kobe_depth.jpg"); //需要现在深度图

// 创建平面
const geometry = new THREE.PlaneGeometry(19.2, 12);
// const material = new THREE.MeshBasicMaterial({ map: texture });
// 鼠标坐标
const mouse = new THREE.Vector2();

// 着色器材质
const material = new THREE.ShaderMaterial({
  uniforms: {
    uTime: { value: 0 },
    uTexture: { value: texture },
    uDepthTexture: { value: depthTexture },
    uMouse: { value: mouse },
  },
  vertexShader: `
    varying vec2 vUv;
    void main() {
      vUv = uv;
      gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
    }
  `,
  fragmentShader: `
    uniform sampler2D uTexture;
    uniform sampler2D uDepthTexture;
    uniform vec2 uMouse;
    varying vec2 vUv;
    uniform float uTime;
    void main() {
      vec4 color = texture2D(uTexture, vUv);
      vec4 depth = texture2D(uDepthTexture, vUv);
      float depthValue = depth.r;
      float x = vUv.x + (uMouse.x+sin(uTime))*0.01*depthValue;
      float y = vUv.y + (uMouse.y+cos(uTime))*0.01*depthValue;
      vec4 newColor = texture2D(uTexture, vec2(x, y));
      gl_FragColor = newColor;
    }
  `,
});

const plane = new THREE.Mesh(geometry, material);
scene.add(plane);

// 渲染
requestAnimationFrame(function animate() {
  material.uniforms.uMouse.value = mouse;
  material.uniforms.uTime.value = performance.now() / 1000;
  requestAnimationFrame(animate);
  renderer.render(scene, camera);
  window.addEventListener("resize", onWindowResize);
});
function onWindowResize() {
  camera.aspect = window.innerWidth / window.innerHeight;
  camera.updateProjectionMatrix();
  renderer.setSize(window.innerWidth, window.innerHeight);
}
window.addEventListener("mousemove", (event) => {
  mouse.x = (event.clientX / window.innerWidth) * 2 - 1;
  mouse.y = -(event.clientY / window.innerHeight) * 2 + 1;
});
</script>

<template>
  <div></div>
</template>

<style>
* {
  margin: 0;
  padding: 0;
  box-sizing: border-box;
}

canvas {
  width: 100vw;
  height: 100vh;
  display: block;
  position: fixed;
  top: 0;
  left: 0;
}
</style>


猜你喜欢

转载自blog.csdn.net/weixin_58359043/article/details/130183411