fermisurfer Git
修訂 | 6adc22a061bd7706669eb1f7e5a2c0bbbdc98f36 (tree) |
---|---|
時間 | 2020-11-10 17:32:38 |
作者 | Mitsuaki Kawamura <kawamitsuaki@gmai...> |
Commiter | Mitsuaki Kawamura |
Backup. It works.
@@ -1,15 +1,19 @@ | ||
1 | 1 | <!DOCTYPE html> |
2 | 2 | <html> |
3 | - <head> | |
4 | - <title>FermiSurfer on Web</title> | |
5 | - </head> | |
3 | +<head> | |
4 | + <title>FermiSurfer on Web</title> | |
5 | + <script src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.8.1/gl-matrix-min.js" | |
6 | + integrity="sha512-zhHQR0/H5SEBL3Wn6yYSaTTZej12z0hVZKOv3TwCUXT1z5qeqGcXJLLrbERYRScEDDpYIJhPC1fk31gqR783iQ==" | |
7 | + crossorigin="anonymous" defer> | |
8 | + </script> | |
9 | + <script type="text/javascript" src="./fermisurfer.js" defer></script> | |
10 | +</head> | |
6 | 11 | <body> |
7 | - <script type="text/javascript" src="./fermisurfer.js"></script> | |
8 | 12 | File: <input type="file" id="inputfile" name="inputfile"> |
9 | 13 | <input type="button" value="read" onclick="read_file();"> |
10 | 14 | <table border="0"> |
11 | 15 | <tr align="left"> |
12 | - <td><canvas id="canvas" width="600" height="600" style="border:solid black 1px;"></canvas></td> | |
16 | + <td><canvas id="glcanvas" width="600" height="600" style="border:solid black 1px;"></canvas></td> | |
13 | 17 | <td> |
14 | 18 | <table border="0"> |
15 | 19 | <tr align="left"> |
@@ -157,141 +161,5 @@ | ||
157 | 161 | </table> |
158 | 162 | |
159 | 163 | <pre id="log" style="border: 1px solid #ccc; max-width: 80em; overflow: auto; max-height: 10em;"></pre> |
160 | - <script type="text/javascript"> | |
161 | - function startup() { | |
162 | - var el = document.getElementById("canvas"); | |
163 | - el.addEventListener("touchstart", handleStart, false); | |
164 | - el.addEventListener("touchend", handleEnd, false); | |
165 | - el.addEventListener("touchcancel", handleCancel, false); | |
166 | - el.addEventListener("touchmove", handleMove, false); | |
167 | - } | |
168 | - | |
169 | - document.addEventListener("DOMContentLoaded", startup); | |
170 | - var ongoingTouches = []; | |
171 | - | |
172 | - function handleStart(evt) { | |
173 | - evt.preventDefault(); | |
174 | - var el = document.getElementById("canvas"); | |
175 | - var ctx = el.getContext("2d"); | |
176 | - var touches = evt.changedTouches; | |
177 | - | |
178 | - for (var i = 0; i < touches.length; i++) { | |
179 | - ongoingTouches.push(copyTouch(touches[i])); | |
180 | - } | |
181 | - } | |
182 | - | |
183 | - function handleMove(evt) { | |
184 | - evt.preventDefault(); | |
185 | - var el = document.getElementById("canvas"); | |
186 | - var ctx = el.getContext("2d"); | |
187 | - var rect = el.getBoundingClientRect() | |
188 | - var touches = evt.changedTouches; | |
189 | - var linecolor = document.getElementById('linecolor').color.value; | |
190 | - | |
191 | - for (var i = 0; i < touches.length; i++) { | |
192 | - var idx = ongoingTouchIndexById(touches[i].identifier); | |
193 | - | |
194 | - if (idx == 0) { | |
195 | - ctx.beginPath(); | |
196 | - ctx.moveTo(ongoingTouches[idx].clientX - rect.left, ongoingTouches[idx].clientY - rect.top); | |
197 | - ctx.lineTo(touches[i].clientX - rect.left, touches[i].clientY - rect.top); | |
198 | - ctx.lineWidth = 4; | |
199 | - ctx.strokeStyle = linecolor; | |
200 | - ctx.stroke(); | |
201 | - | |
202 | - ongoingTouches.splice(idx, 1, copyTouch(touches[i])); // swap in the new touch record | |
203 | - } | |
204 | - } | |
205 | - } | |
206 | - function handleEnd(evt) { | |
207 | - evt.preventDefault(); | |
208 | - var el = document.getElementById("canvas"); | |
209 | - var ctx = el.getContext("2d"); | |
210 | - var touches = evt.changedTouches; | |
211 | - | |
212 | - //log("touchend"); | |
213 | - for (var i = 0; i < touches.length; i++) { | |
214 | - var idx = ongoingTouchIndexById(touches[i].identifier); | |
215 | - | |
216 | - if (idx >= 0) { | |
217 | - ongoingTouches.splice(idx, 1); // remove it; we're done | |
218 | - } | |
219 | - } | |
220 | - } | |
221 | - function handleCancel(evt) { | |
222 | - evt.preventDefault(); | |
223 | - var touches = evt.changedTouches; | |
224 | - | |
225 | - for (var i = 0; i < touches.length; i++) { | |
226 | - var idx = ongoingTouchIndexById(touches[i].identifier); | |
227 | - ongoingTouches.splice(idx, 1); // remove it; we're done | |
228 | - } | |
229 | - } | |
230 | - function copyTouch({ identifier, clientX, clientY }) { | |
231 | - return { identifier, clientX, clientY }; | |
232 | - } | |
233 | - | |
234 | - function ongoingTouchIndexById(idToFind) { | |
235 | - for (var i = 0; i < ongoingTouches.length; i++) { | |
236 | - var id = ongoingTouches[i].identifier; | |
237 | - | |
238 | - if (id == idToFind) { | |
239 | - return i; | |
240 | - } | |
241 | - } | |
242 | - return -1; // not found | |
243 | - } | |
244 | - function clearCanvas() { | |
245 | - var el = document.getElementById("canvas"); | |
246 | - var ctx = el.getContext("2d"); | |
247 | - | |
248 | - ctx.clearRect(0, 0, el.width, el.height); | |
249 | - } | |
250 | - function resizeCanvas() { | |
251 | - var el = document.getElementById("canvas"); | |
252 | - var width = document.getElementById('width').value; | |
253 | - var height = document.getElementById('height').value; | |
254 | - | |
255 | - el.setAttribute("width", width); | |
256 | - el.setAttribute("height", height); | |
257 | - } | |
258 | - function log(msg) { | |
259 | - var p = document.getElementById('log'); | |
260 | - p.innerHTML = msg + "\n" + p.innerHTML; | |
261 | - } | |
262 | - | |
263 | - function post() { | |
264 | - var fd = new FormData(); | |
265 | - | |
266 | - var submittype = document.getElementById('submittype').subtype.value; | |
267 | - fd.append('submittype', submittype); | |
268 | - | |
269 | - var name = document.getElementById('submitname').value; | |
270 | - fd.append('name', name); | |
271 | - | |
272 | - if (submittype == "text") { | |
273 | - var comment = document.getElementById('comment').value; | |
274 | - fd.append('comment', comment); | |
275 | - } | |
276 | - else if (submittype == "image" || submittype == "file") { | |
277 | - const file = document.getElementById("file").files[0]; | |
278 | - fd.append('avatar', file); | |
279 | - } | |
280 | - else if (submittype == "freehand") { | |
281 | - img_url = canvas.toDataURL("image/png").replace(new RegExp("data:image/png;base64,"), ""); | |
282 | - fd.append('comment', img_url); | |
283 | - } | |
284 | - | |
285 | - const param = { | |
286 | - method: "POST", | |
287 | - body: fd | |
288 | - } | |
289 | - fetch("./index.php", param).then((res) => { | |
290 | - if (res.ok) { | |
291 | - window.location.reload(); | |
292 | - } | |
293 | - }); | |
294 | - } | |
295 | - </script> | |
296 | 164 | </body> |
297 | 165 | </html> |
@@ -21,6 +21,456 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
21 | 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
22 | 22 | THE SOFTWARE. |
23 | 23 | */ |
24 | +let gl; //test | |
25 | +let programInfo; | |
26 | +let rotatex = 0.0, rotatey = 0.0; | |
27 | + | |
28 | +// | |
29 | +// Start here | |
30 | +// | |
31 | +function main() { | |
32 | + let canvas = document.querySelector('#glcanvas'); | |
33 | + | |
34 | + gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl'); | |
35 | + | |
36 | + // If we don't have a GL context, give up now | |
37 | + | |
38 | + if (!gl) { | |
39 | + alert('Unable to initialize WebGL. Your browser or machine may not support it.'); | |
40 | + return; | |
41 | + } | |
42 | + | |
43 | + // Vertex shader program | |
44 | + | |
45 | + let vsSource = ` | |
46 | + attribute vec4 aVertexPosition; | |
47 | + attribute vec3 aVertexNormal; | |
48 | + attribute vec4 aVertexColor; | |
49 | + | |
50 | + uniform mat4 uNormalMatrix; | |
51 | + uniform mat4 uModelViewMatrix; | |
52 | + uniform mat4 uProjectionMatrix; | |
53 | + | |
54 | + varying lowp vec4 vColor; | |
55 | + varying highp vec3 vLighting; | |
56 | + | |
57 | + void main(void) { | |
58 | + gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition; | |
59 | + vColor = aVertexColor; | |
60 | + | |
61 | + // Apply lighting effect | |
62 | + | |
63 | + highp vec3 ambientLight = vec3(0.6, 0.6, 0.6); | |
64 | + highp vec3 directionalLightColor = vec3(1, 1, 1); | |
65 | + highp vec3 directionalVector = normalize(vec3(0.8, 0.8, 0.8)); | |
66 | + | |
67 | + highp vec4 transformedNormal = uNormalMatrix * vec4(aVertexNormal, 1.0); | |
68 | + | |
69 | + highp float directional = max(dot(transformedNormal.xyz, directionalVector), 0.0); | |
70 | + vLighting = ambientLight + (directionalLightColor * directional); | |
71 | + } | |
72 | + `; | |
73 | + | |
74 | + // Fragment shader program | |
75 | + | |
76 | + let fsSource = ` | |
77 | + varying lowp vec4 vColor; | |
78 | + varying highp vec3 vLighting; | |
79 | + | |
80 | + void main(void) { | |
81 | + gl_FragColor = vec4(vColor.rgb * vLighting, vColor.a); | |
82 | + } | |
83 | + `; | |
84 | + | |
85 | + // Initialize a shader program; this is where all the lighting | |
86 | + // for the vertices and so forth is established. | |
87 | + let shaderProgram = initShaderProgram(gl, vsSource, fsSource); | |
88 | + | |
89 | + // Collect all the info needed to use the shader program. | |
90 | + // Look up which attributes our shader program is using | |
91 | + // for aVertexPosition, aVevrtexColor and also | |
92 | + // look up uniform locations. | |
93 | + programInfo = { | |
94 | + program: shaderProgram, | |
95 | + attribLocations: { | |
96 | + vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'), | |
97 | + vertexNormal: gl.getAttribLocation(shaderProgram, 'aVertexNormal'), | |
98 | + vertexColor: gl.getAttribLocation(shaderProgram, 'aVertexColor'), | |
99 | + }, | |
100 | + uniformLocations: { | |
101 | + projectionMatrix: gl.getUniformLocation(shaderProgram, 'uProjectionMatrix'), | |
102 | + modelViewMatrix: gl.getUniformLocation(shaderProgram, 'uModelViewMatrix'), | |
103 | + normalMatrix: gl.getUniformLocation(shaderProgram, 'uNormalMatrix'), | |
104 | + } | |
105 | + }; | |
106 | + | |
107 | + // Here's where we call the routine that builds all the | |
108 | + // objects we'll be drawing. | |
109 | + | |
110 | + drawScene(programInfo, 0.5, 1.8); | |
111 | + | |
112 | + var el = document.getElementById("glcanvas"); | |
113 | + el.addEventListener("touchstart", handleStart, false); | |
114 | + el.addEventListener("touchend", handleEnd, false); | |
115 | + el.addEventListener("touchcancel", handleCancel, false); | |
116 | + el.addEventListener("touchmove", handleMove, false); | |
117 | + el.addEventListener("mousemove", mouseMove, false); | |
118 | + | |
119 | +} | |
120 | + | |
121 | +// | |
122 | +// initBuffers | |
123 | +// | |
124 | +// Initialize the buffers we'll need. For this demo, we just | |
125 | +// have one object -- a simple three-dimensional cube. | |
126 | +// | |
127 | +function initBuffers() { | |
128 | + let itri = 0; | |
129 | + | |
130 | + // Create a buffer for the cube's vertex positions. | |
131 | + // Select the positionBuffer as the one to apply buffer | |
132 | + // operations to from here out. | |
133 | + // Now create an array of positions for the cube. | |
134 | + | |
135 | + let nkvp = ntri[0] * 3 * 3; | |
136 | + let positions = new Float32Array(nkvp); | |
137 | + icount = 0; | |
138 | + for (itri = 0; itri < ntri[0]; itri++) { | |
139 | + for (ii = 0; ii < 3; ii++) { | |
140 | + for (jj = 0; jj < 3; jj++) { | |
141 | + positions[icount] = kvp[0][itri][ii][jj]; | |
142 | + icount += 1; | |
143 | + } | |
144 | + } | |
145 | + } | |
146 | + | |
147 | + // Now pass the list of positions into WebGL to build the | |
148 | + // shape. We do this by creating a Float32Array from the | |
149 | + // JavaScript array, then use it to fill the current buffer. | |
150 | + | |
151 | + let positionBuffer = gl.createBuffer(); | |
152 | + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); | |
153 | + gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW); | |
154 | + | |
155 | + // Set up the normals for the vertices, so that we can compute lighting. | |
156 | + | |
157 | + let nnmlp = ntri[0] * 3 * 3; | |
158 | + let vertexNormals = new Float32Array(nnmlp); | |
159 | + icount = 0; | |
160 | + for (itri = 0; itri < ntri[0]; itri++) { | |
161 | + for (ii = 0; ii < 3; ii++) { | |
162 | + for (jj = 0; jj < 3; jj++) { | |
163 | + vertexNormals[icount] = nmlp[0][itri][ii][jj]; | |
164 | + icount += 1; | |
165 | + } | |
166 | + } | |
167 | + } | |
168 | + | |
169 | + let normalBuffer = gl.createBuffer(); | |
170 | + gl.bindBuffer(gl.ARRAY_BUFFER, normalBuffer); | |
171 | + gl.bufferData(gl.ARRAY_BUFFER, vertexNormals, gl.STATIC_DRAW); | |
172 | + | |
173 | + // Now set up the colors for the faces. We'll use solid colors | |
174 | + // for each face. | |
175 | + | |
176 | + let nclr = ntri[0] * 3 * 4; | |
177 | + let colors = new Float32Array(nclr); | |
178 | + icount = 0; | |
179 | + for (itri = 0; itri < ntri[0]; itri++) { | |
180 | + for (ii = 0; ii < 3; ii++) { | |
181 | + for (jj = 0; jj < 4; jj++) { | |
182 | + colors[icount] = clr[0][jj + 4 * ii + 12 * itri]; | |
183 | + icount += 1; | |
184 | + } | |
185 | + } | |
186 | + } | |
187 | + | |
188 | + let colorBuffer = gl.createBuffer(); | |
189 | + gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer); | |
190 | + gl.bufferData(gl.ARRAY_BUFFER, colors, gl.STATIC_DRAW); | |
191 | + | |
192 | + return { | |
193 | + position: positionBuffer, | |
194 | + normal: normalBuffer, | |
195 | + color: colorBuffer, | |
196 | + }; | |
197 | +} | |
198 | + | |
199 | +// | |
200 | +// Draw the scene. | |
201 | +// | |
202 | +function drawScene(programInfo, rotatex, rotatey) { | |
203 | + gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque | |
204 | + gl.clearDepth(1.0); // Clear everything | |
205 | + gl.enable(gl.DEPTH_TEST); // Enable depth testing | |
206 | + gl.depthFunc(gl.LEQUAL); // Near things obscure far things | |
207 | + | |
208 | + // Clear the canvas before we start drawing on it. | |
209 | + | |
210 | + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); | |
211 | + | |
212 | + // Create a perspective matrix, a special matrix that is | |
213 | + // used to simulate the distortion of perspective in a camera. | |
214 | + // Our field of view is 45 degrees, with a width/height | |
215 | + // ratio that matches the display size of the canvas | |
216 | + // and we only want to see objects between 0.1 units | |
217 | + // and 100 units away from the camera. | |
218 | + | |
219 | + let fieldOfView = 45 * Math.PI / 180; // in radians | |
220 | + let aspect = gl.canvas.clientWidth / gl.canvas.clientHeight; | |
221 | + let zNear = 0.1; | |
222 | + let zFar = 100.0; | |
223 | + let projectionMatrix = mat4.create(); | |
224 | + | |
225 | + // note: glmatrix.js always has the first argument | |
226 | + // as the destination to receive the result. | |
227 | + mat4.perspective(projectionMatrix, | |
228 | + fieldOfView, | |
229 | + aspect, | |
230 | + zNear, | |
231 | + zFar); | |
232 | + | |
233 | + // Set the drawing position to the "identity" point, which is | |
234 | + // the center of the scene. | |
235 | + let modelViewMatrix = mat4.create(); | |
236 | + | |
237 | + // Now move the drawing position a bit to where we want to | |
238 | + // start drawing the square. | |
239 | + | |
240 | + mat4.translate(modelViewMatrix, // destination matrix | |
241 | + modelViewMatrix, // matrix to translate | |
242 | + [-0.0, 0.0, -6.0]); // amount to translate | |
243 | + mat4.rotate(modelViewMatrix, // destination matrix | |
244 | + modelViewMatrix, // matrix to rotate | |
245 | + rotatey, // amount to rotate in radians | |
246 | + [0, 0, 1]); // axis to rotate around (Z) | |
247 | + mat4.rotate(modelViewMatrix, // destination matrix | |
248 | + modelViewMatrix, // matrix to rotate | |
249 | + rotatex * .7,// amount to rotate in radians | |
250 | + [0, 1, 0]); // axis to rotate around (X) | |
251 | + let normalMatrix = mat4.create(); | |
252 | + mat4.invert(normalMatrix, modelViewMatrix); | |
253 | + mat4.transpose(normalMatrix, normalMatrix); | |
254 | + | |
255 | + buffers = initBuffers(); | |
256 | + | |
257 | + // Tell WebGL how to pull out the positions from the position | |
258 | + // buffer into the vertexPosition attribute | |
259 | + { | |
260 | + let numComponents = 3; | |
261 | + let type = gl.FLOAT; | |
262 | + let normalize = false; | |
263 | + let stride = 0; | |
264 | + let offset = 0; | |
265 | + gl.bindBuffer(gl.ARRAY_BUFFER, buffers.position); | |
266 | + gl.vertexAttribPointer( | |
267 | + programInfo.attribLocations.vertexPosition, | |
268 | + numComponents, | |
269 | + type, | |
270 | + normalize, | |
271 | + stride, | |
272 | + offset); | |
273 | + gl.enableVertexAttribArray( | |
274 | + programInfo.attribLocations.vertexPosition); | |
275 | + } | |
276 | + | |
277 | + // Tell WebGL how to pull out the colors from the color buffer | |
278 | + // into the vertexColor attribute. | |
279 | + { | |
280 | + let numComponents = 4; | |
281 | + let type = gl.FLOAT; | |
282 | + let normalize = false; | |
283 | + let stride = 0; | |
284 | + let offset = 0; | |
285 | + gl.bindBuffer(gl.ARRAY_BUFFER, buffers.color); | |
286 | + gl.vertexAttribPointer( | |
287 | + programInfo.attribLocations.vertexColor, | |
288 | + numComponents, | |
289 | + type, | |
290 | + normalize, | |
291 | + stride, | |
292 | + offset); | |
293 | + gl.enableVertexAttribArray( | |
294 | + programInfo.attribLocations.vertexColor); | |
295 | + } | |
296 | + | |
297 | + // Tell WebGL how to pull out the normals from | |
298 | + // the normal buffer into the vertexNormal attribute. | |
299 | + { | |
300 | + let numComponents = 3; | |
301 | + let type = gl.FLOAT; | |
302 | + let normalize = false; | |
303 | + let stride = 0; | |
304 | + let offset = 0; | |
305 | + gl.bindBuffer(gl.ARRAY_BUFFER, buffers.normal); | |
306 | + gl.vertexAttribPointer( | |
307 | + programInfo.attribLocations.vertexNormal, | |
308 | + numComponents, | |
309 | + type, | |
310 | + normalize, | |
311 | + stride, | |
312 | + offset); | |
313 | + gl.enableVertexAttribArray( | |
314 | + programInfo.attribLocations.vertexNormal); | |
315 | + } | |
316 | + | |
317 | + // Tell WebGL to use our program when drawing | |
318 | + | |
319 | + gl.useProgram(programInfo.program); | |
320 | + | |
321 | + // Set the shader uniforms | |
322 | + | |
323 | + gl.uniformMatrix4fv( | |
324 | + programInfo.uniformLocations.projectionMatrix, | |
325 | + false, | |
326 | + projectionMatrix); | |
327 | + gl.uniformMatrix4fv( | |
328 | + programInfo.uniformLocations.modelViewMatrix, | |
329 | + false, | |
330 | + modelViewMatrix); | |
331 | + gl.uniformMatrix4fv( | |
332 | + programInfo.uniformLocations.normalMatrix, | |
333 | + false, | |
334 | + normalMatrix); | |
335 | + | |
336 | + { | |
337 | + let vertexCount = ntri[0]*3; | |
338 | + let offset = 0; | |
339 | + gl.drawArrays(gl.TRIANGLE_STRIP, offset, vertexCount); | |
340 | + } | |
341 | +} | |
342 | + | |
343 | +// | |
344 | +// Initialize a shader program, so WebGL knows how to draw our data | |
345 | +// | |
346 | +function initShaderProgram(gl, vsSource, fsSource) { | |
347 | + let vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource); | |
348 | + let fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource); | |
349 | + | |
350 | + // Create the shader program | |
351 | + | |
352 | + let shaderProgram = gl.createProgram(); | |
353 | + gl.attachShader(shaderProgram, vertexShader); | |
354 | + gl.attachShader(shaderProgram, fragmentShader); | |
355 | + gl.linkProgram(shaderProgram); | |
356 | + | |
357 | + // If creating the shader program failed, alert | |
358 | + | |
359 | + if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) { | |
360 | + alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram)); | |
361 | + return null; | |
362 | + } | |
363 | + | |
364 | + return shaderProgram; | |
365 | +} | |
366 | + | |
367 | +// | |
368 | +// creates a shader of the given type, uploads the source and | |
369 | +// compiles it. | |
370 | +// | |
371 | +function loadShader(gl, type, source) { | |
372 | + let shader = gl.createShader(type); | |
373 | + | |
374 | + // Send the source to the shader object | |
375 | + | |
376 | + gl.shaderSource(shader, source); | |
377 | + | |
378 | + // Compile the shader program | |
379 | + | |
380 | + gl.compileShader(shader); | |
381 | + | |
382 | + // See if it compiled successfully | |
383 | + | |
384 | + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { | |
385 | + alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader)); | |
386 | + gl.deleteShader(shader); | |
387 | + return null; | |
388 | + } | |
389 | + | |
390 | + return shader; | |
391 | +} | |
392 | + | |
393 | +var ongoingTouches = []; | |
394 | + | |
395 | +function handleStart(evt) { | |
396 | + evt.preventDefault(); | |
397 | + var touches = evt.changedTouches; | |
398 | + | |
399 | + for (var i = 0; i < touches.length; i++) { | |
400 | + ongoingTouches.push(copyTouch(touches[i])); | |
401 | + } | |
402 | +} | |
403 | + | |
404 | +function handleMove(evt) { | |
405 | + evt.preventDefault(); | |
406 | + var touches = evt.changedTouches; | |
407 | + | |
408 | + for (var i = 0; i < touches.length; i++) { | |
409 | + var idx = ongoingTouchIndexById(touches[i].identifier); | |
410 | + | |
411 | + if (idx == 0) { | |
412 | + rotatex += -0.01 * (touches[i].clientX - ongoingTouches[idx].clientX); | |
413 | + rotatey += 0.01 * (touches[i].clientY - ongoingTouches[idx].clientY); | |
414 | + drawScene(programInfo, rotatex, rotatey); | |
415 | + ongoingTouches.splice(idx, 1, copyTouch(touches[i])); // swap in the new touch record | |
416 | + } | |
417 | + } | |
418 | +} | |
419 | +function handleEnd(evt) { | |
420 | + evt.preventDefault(); | |
421 | + var touches = evt.changedTouches; | |
422 | + | |
423 | + //log("touchend"); | |
424 | + for (var i = 0; i < touches.length; i++) { | |
425 | + var idx = ongoingTouchIndexById(touches[i].identifier); | |
426 | + | |
427 | + if (idx >= 0) { | |
428 | + ongoingTouches.splice(idx, 1); // remove it; we're done | |
429 | + } | |
430 | + } | |
431 | +} | |
432 | +function handleCancel(evt) { | |
433 | + evt.preventDefault(); | |
434 | + var touches = evt.changedTouches; | |
435 | + | |
436 | + for (var i = 0; i < touches.length; i++) { | |
437 | + var idx = ongoingTouchIndexById(touches[i].identifier); | |
438 | + ongoingTouches.splice(idx, 1); // remove it; we're done | |
439 | + } | |
440 | +} | |
441 | +function copyTouch({ identifier, clientX, clientY }) { | |
442 | + return { identifier, clientX, clientY }; | |
443 | +} | |
444 | + | |
445 | +function ongoingTouchIndexById(idToFind) { | |
446 | + for (var i = 0; i < ongoingTouches.length; i++) { | |
447 | + var id = ongoingTouches[i].identifier; | |
448 | + | |
449 | + if (id == idToFind) { | |
450 | + return i; | |
451 | + } | |
452 | + } | |
453 | + return -1; // not found | |
454 | +} | |
455 | + | |
456 | +function mouseMove(evt) { | |
457 | + rotatex += -0.01 * evt.movementX; | |
458 | + rotatey += 0.01 * evt.movementY; | |
459 | + drawScene(programInfo, rotatex, rotatey); | |
460 | +} | |
461 | + | |
462 | + | |
463 | + | |
464 | + | |
465 | + | |
466 | + | |
467 | + | |
468 | + | |
469 | + | |
470 | + | |
471 | + | |
472 | + | |
473 | + | |
24 | 474 | function test() { |
25 | 475 | terminal("test"); |
26 | 476 | } |
@@ -3041,6 +3491,9 @@ function read_file() | ||
3041 | 3491 | max_and_min_bz(); |
3042 | 3492 | // |
3043 | 3493 | compute_patch_segment(); |
3494 | + | |
3495 | + main(); | |
3496 | + | |
3044 | 3497 | }; |
3045 | 3498 | reader.onerror = function () { |
3046 | 3499 | terminal("File can not be loaded."); |