상세 컨텐츠

본문 제목

[WebGL] Three.js - sound test example with Three.js . Three 사운드 예제

WEB/html5

by AlrepondTech 2018. 1. 10. 15:42

본문

반응형

 

 

 

=================================

=================================

=================================

 

 

 

 

출처: https://threejs.org/docs/#api/audio/Audio

 

Audio

Create a non-positional ( global ) audio object.

This uses the Web Audio API.

Example

// create an AudioListener and add it to the camera var listener = new THREE.AudioListener(); camera.add( listener ); // create a global audio source var sound = new THREE.Audio( listener ); // load a sound and set it as the Audio object's buffer var audioLoader = new THREE.AudioLoader(); audioLoader.load( 'sounds/ambient.ogg', function( buffer ) { sound.setBuffer( buffer ); sound.setLoop( true ); sound.setVolume( 0.5 ); sound.play(); });

Constructor

Audio( listener )

listener — (required) AudioListener instance.

Properties

.autoplay

Whether to start playback automatically. Default is false.

.context

The AudioContext of the listener given in the constructor.

.filters

Whether the audio is currently playing. Default is empty array.

.gain

GainNode created using AudioContext.createGain().

.hasPlaybackControl

Whether playback can be controlled using the play(), pause() etc. methods. Default is true.

.playbackRate

Speed of playback. Default is 1.

.isPlaying

Whether the audio is currently playing.

.startTime

The time at which the sound should begin to play. Same as the when paramter of AudioBufferSourceNode.start(). Default is 0.

.offset

An offset to the time within the audio buffer that playback should begin. Same as the offset paramter of AudioBufferSourceNode.start(). Default is 0.

.source

.sourceType

Type of the audio source. Default is string 'empty'.

.type

String denoting the type, set to 'Audio'.

Methods

.connect ()

Connect to the Audio.source. This is used internally on initialisation and when setting / removing filters.

.disconnect ()

Disconnect from the Audio.source. This is used internally when setting / removing filters.

.getFilter ()

Returns the first element of the filters array.

.getFilters ()

Returns the filters array.

.getLoop ()

Return the value of source.loop (whether playback should loop).

.getOutput ()

Return the gainNode.

.getPlaybackRate ()

Return the value of playbackRate.

.getVolume ( value )

Return the current volume.

.play ()

If hasPlaybackControl is true, starts playback.

.pause ()

If hasPlaybackControl is true, pauses playback.

.onEnded ()

Called automatically when playback finished. Sets If isPlaying to false.

.setBuffer ( audioBuffer )

Setup the source to the audioBuffer, and sets sourceType to 'buffer'.
If autoplay, also starts playback.

.setFilter ( filter )

Add the filter to the filters array.

.setFilters ( value )

value - arrays of filters.
Set the filters array to value.

.setLoop ( value )

Set source.loop to value (whether playback should loop).

.setNodeSource ( audioNode )

Setup the source to the audioBuffer, and sets sourceType to 'audioNode'.
Also sets hasPlaybackControl to false.

.setPlaybackRate ( value )

If hasPlaybackControl is enabled, set the playbackRate to value.

.setVolume ( value )

Set the volume.

.stop ()

If hasPlaybackControl is enabled, stops playback, resets startTime to 0 and sets isPlaying to false.

Source

src/audio/Audio.js

 

 

 

 

=================================

=================================

=================================

 

 

 

출처: http://free-tutorials.org/webgl-sound-test-example-with-three-js/

 

 

This is the result of my example : webgl test sound .

And this it’s the source code :

<!DOCTYPE html>
<html lang=”en”>
<head>
<title>three.js misc – sound</title>
<meta charset=”utf-8″>
<meta name=”viewport” content=”width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0″>
<style>
body {
background-color: #000000;
margin: 0px;
overflow: hidden;
font-family:Monospace;
font-size:13px;
text-align:center;
font-weight: bold;
text-align:center;
}
</style>
</head>
<body>
<div id=”container”></div>
<script src=”../js/three.min.js”></script>
<script src=”../js/controls/FirstPersonControls.js”></script>
<script src=”../js/Detector.js”></script>
<script>

if ( ! Detector.webgl ) Detector.addGetWebGLMessage();

var container;
var camera, controls, scene, renderer;
var light, pointLight;

var mesh;
var material_sphere1, material_sphere2;

var clock = new THREE.Clock();

init();
animate();

function init() {

container = document.getElementById( ‘container’ );

camera = new THREE.PerspectiveCamera( 50, window.innerWidth / window.innerHeight, 1, 10000 );
camera.position.set( 0, 25, 0 );

var listener = new THREE.AudioListener();
camera.add( listener );

controls = new THREE.FirstPersonControls( camera );

controls.movementSpeed = 70;
controls.lookSpeed = 0.05;
controls.noFly = true;
controls.lookVertical = false;

scene = new THREE.Scene();
scene.fog = new THREE.FogExp2( 0x000000, 0.0035 );

light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 0, 0.5, 1 ).normalize();
scene.add( light );

var sound1 = new THREE.Audio( listener );
sound1.load( ‘../sounds/sound.wav’ );
sound1.setRefDistance( 20 );
sound1.autoplay = true;
scene.add( sound1 );

var helper = new THREE.GridHelper( 500, 10 );
helper.color1.setHex( 0x444444 );
helper.color2.setHex( 0x444444 );
helper.position.y = 0.1
scene.add( helper );

renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.innerHTML = “”;
container.appendChild( renderer.domElement );
window.addEventListener( ‘resize’, onWindowResize, false );

}

function onWindowResize() {

camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
controls.handleResize();
}

function animate() {
requestAnimationFrame( animate );
render();
}

function render() {

var delta = clock.getDelta(),
time = clock.getElapsedTime() * 5;
controls.update( delta );
renderer.render( scene, camera );
}
</script>
</body>
</html>

 

 

 

=================================

=================================

=================================

 

 

반응형


관련글 더보기

댓글 영역