MySafeCode commited on
Commit
e937288
·
verified ·
1 Parent(s): b427e53

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +314 -0
app.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Single-file Gradio app: .vox + .gltf/.glb viewer using a WebGL (three.js) embedded viewer.
3
+
4
+ Run: python gradio_vox_gltf_viewer.py
5
+
6
+ Dependencies:
7
+ pip install gradio numpy pyvox
8
+
9
+ Notes:
10
+ - .vox files are parsed server-side with pyvox and serialized to JSON (positions + palette indices/colors).
11
+ - The HTML viewer (three.js) receives the JSON payload and constructs an InstancedMesh of cubes for fast rendering.
12
+ - .gltf/.glb files are passed through to the client and loaded with GLTFLoader.
13
+ - This is a minimal starter; performance and features (lighting, orbit controls options, palette editing, LOD) can be added.
14
+ """
15
+
16
+ import base64
17
+ import json
18
+ import io
19
+ import os
20
+ from pathlib import Path
21
+ from typing import Tuple
22
+
23
+ import gradio as gr
24
+ import numpy as np
25
+ from pyvox.parser import VoxParser
26
+
27
+
28
+ def parse_vox_to_json(file) -> dict:
29
+ """Parse a .vox file (MagicaVoxel) and return a small JSON structure with positions and colors.
30
+
31
+ We'll extract voxels from the first model and transform them into two lists:
32
+ - positions: flat list [x0,y0,z0,x1,y1,z1,...]
33
+ - colors: flat list [r,g,b,a, ...] (0-255)
34
+
35
+ Keep it compact so we can send it to the client and reconstruct an InstancedMesh.
36
+ """
37
+ # pyvox expects a filename or file-like; ensure we give it a file-like with a name
38
+ if hasattr(file, "name") and os.path.exists(file.name):
39
+ vox = VoxParser(file.name).parse()
40
+ else:
41
+ # save to a temporary buffer
42
+ data = file.read()
43
+ buf = io.BytesIO(data)
44
+ # pyvox parser needs a real filename to determine extension in some versions; work around by using parse_from_bytes
45
+ # but to keep compatibility, write to a temp file
46
+ tmp_path = Path("_tmp_upload.vox")
47
+ tmp_path.write_bytes(data)
48
+ vox = VoxParser(str(tmp_path)).parse()
49
+ try:
50
+ tmp_path.unlink()
51
+ except Exception:
52
+ pass
53
+
54
+ model = vox.models[0]
55
+ voxels = model.voxels # list of Voxel(x,y,z,c)
56
+
57
+ positions = []
58
+ colors = []
59
+
60
+ # MagicaVoxel palette: vox.palette is a list of 256 RGBA ints or tuples depending on pyvox version
61
+ palette = None
62
+ try:
63
+ palette = vox.palette
64
+ except Exception:
65
+ palette = None
66
+
67
+ for v in voxels:
68
+ positions.extend([int(v.x), int(v.y), int(v.z)])
69
+ # color index (1-based in MagicaVoxel). pyvox Voxel.c is 1..n
70
+ ci = int(v.c) - 1
71
+ if palette is not None and 0 <= ci < len(palette):
72
+ col = palette[ci]
73
+ # palette entries may be ints or tuples
74
+ if isinstance(col, int):
75
+ # ARGB or 0xAARRGGBB? We'll fallback to splitting
76
+ a = (col >> 24) & 0xFF
77
+ r = (col >> 16) & 0xFF
78
+ g = (col >> 8) & 0xFF
79
+ b = col & 0xFF
80
+ colors.extend([r, g, b, a])
81
+ else:
82
+ # assume tuple/list
83
+ if len(col) >= 4:
84
+ r, g, b, a = col[:4]
85
+ else:
86
+ r, g, b = col[:3]
87
+ a = 255
88
+ colors.extend([int(r), int(g), int(b), int(a)])
89
+ else:
90
+ # fallback: map palette index to a color via a simple hash
91
+ h = (ci * 2654435761) & 0xFFFFFFFF
92
+ r = (h >> 16) & 0xFF
93
+ g = (h >> 8) & 0xFF
94
+ b = h & 0xFF
95
+ colors.extend([r, g, b, 255])
96
+
97
+ bbox = {
98
+ "size_x": int(model.size.x),
99
+ "size_y": int(model.size.y),
100
+ "size_z": int(model.size.z),
101
+ }
102
+
103
+ return {"positions": positions, "colors": colors, "bbox": bbox}
104
+
105
+
106
+ def file_to_dataurl(file) -> Tuple[str, str]:
107
+ """Return (data_url, mime) for uploaded file to pass to client (for glTF).
108
+
109
+ We'll base64 encode the file and produce a data URL.
110
+ """
111
+ name = getattr(file, "name", None)
112
+ content = file.read() if hasattr(file, "read") else file
113
+ if isinstance(content, str):
114
+ content = content.encode("utf-8")
115
+
116
+ if name is None:
117
+ # try to sniff from start of bytes
118
+ ext = ".bin"
119
+ else:
120
+ ext = Path(name).suffix.lower()
121
+
122
+ if ext in (".gltf", ".json"):
123
+ mime = "model/gltf+json"
124
+ elif ext == ".glb":
125
+ mime = "model/gltf-binary"
126
+ else:
127
+ # fallback
128
+ mime = "application/octet-stream"
129
+
130
+ b64 = base64.b64encode(content).decode("ascii")
131
+ data_url = f"data:{mime};base64,{b64}"
132
+ return data_url, mime
133
+
134
+
135
+ # HTML viewer template (three.js via module imports). We'll inject a JSON payload for .vox or a data URL for glTF.
136
+ HTML_TEMPLATE = r"""
137
+ <!doctype html>
138
+ <html>
139
+ <head>
140
+ <meta charset="utf-8" />
141
+ <title>VOX / glTF Viewer</title>
142
+ <style>
143
+ html,body { height: 100%; margin: 0; }
144
+ #viewer { width: 100%; height: 640px; display: block; }
145
+ </style>
146
+ </head>
147
+ <body>
148
+ <div id="viewer"></div>
149
+
150
+ <script type="module">
151
+ import * as THREE from 'https://unpkg.com/[email protected]/build/three.module.js';
152
+ import { OrbitControls } from 'https://unpkg.com/[email protected]/examples/jsm/controls/OrbitControls.js';
153
+ import { GLTFLoader } from 'https://unpkg.com/[email protected]/examples/jsm/loaders/GLTFLoader.js';
154
+
155
+ const container = document.getElementById('viewer');
156
+ const width = container.clientWidth;
157
+ const height = container.clientHeight;
158
+
159
+ const scene = new THREE.Scene();
160
+ scene.background = new THREE.Color(0x111111);
161
+
162
+ const camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 2000);
163
+ camera.position.set(100, 100, 200);
164
+
165
+ const renderer = new THREE.WebGLRenderer({ antialias: true });
166
+ renderer.setSize(width, height);
167
+ container.appendChild(renderer.domElement);
168
+
169
+ const controls = new OrbitControls(camera, renderer.domElement);
170
+ controls.target.set(0, 0, 0);
171
+ controls.update();
172
+
173
+ const ambient = new THREE.AmbientLight(0xffffff, 0.6);
174
+ scene.add(ambient);
175
+ const dir = new THREE.DirectionalLight(0xffffff, 0.8);
176
+ dir.position.set(100, 100, 100);
177
+ scene.add(dir);
178
+
179
+ // receive payload from Python via global 'PAYLOAD'
180
+ // PAYLOAD: { type: 'vox'|'gltf', data: {...} }
181
+
182
+ function buildVoxScene(data) {
183
+ const positions = data.positions; // flat list
184
+ const colors = data.colors; // flat list
185
+
186
+ const n = positions.length / 3;
187
+ const instanceCount = n;
188
+
189
+ const box = new THREE.BoxGeometry(1, 1, 1);
190
+ const material = new THREE.MeshStandardMaterial({ vertexColors: true });
191
+
192
+ // InstancedMesh with instanceColor attribute
193
+ const mesh = new THREE.InstancedMesh(box, material, instanceCount);
194
+ mesh.instanceMatrix.setUsage(THREE.DynamicDrawUsage);
195
+
196
+ const colorArray = new Float32Array(instanceCount * 3);
197
+
198
+ const dummy = new THREE.Object3D();
199
+ for (let i = 0; i < n; i++) {
200
+ const x = positions[i*3 + 0];
201
+ const y = positions[i*3 + 1];
202
+ const z = positions[i*3 + 2];
203
+ dummy.position.set(x - data.bbox.size_x/2, y - data.bbox.size_y/2, z - data.bbox.size_z/2);
204
+ dummy.updateMatrix();
205
+ mesh.setMatrixAt(i, dummy.matrix);
206
+
207
+ // color: r,g,b,a (0..255)
208
+ const r = colors[i*4 + 0] / 255.0;
209
+ const g = colors[i*4 + 1] / 255.0;
210
+ const b = colors[i*4 + 2] / 255.0;
211
+ colorArray[i*3 + 0] = r;
212
+ colorArray[i*3 + 1] = g;
213
+ colorArray[i*3 + 2] = b;
214
+ }
215
+
216
+ mesh.instanceColor = new THREE.InstancedBufferAttribute(colorArray, 3);
217
+ scene.add(mesh);
218
+
219
+ // center camera
220
+ const maxDim = Math.max(data.bbox.size_x, data.bbox.size_y, data.bbox.size_z);
221
+ camera.position.set(maxDim * 1.2, maxDim * 1.2, maxDim * 1.2);
222
+ controls.target.set(0, 0, 0);
223
+ controls.update();
224
+ }
225
+
226
+ function loadGltfFromDataUrl(dataUrl) {
227
+ const loader = new GLTFLoader();
228
+ loader.parse(atob(dataUrl.split(',')[1]), '', function(gltf) {
229
+ scene.add(gltf.scene);
230
+ // center
231
+ const box = new THREE.Box3().setFromObject(gltf.scene);
232
+ const size = new THREE.Vector3(); box.getSize(size);
233
+ const maxDim = Math.max(size.x, size.y, size.z);
234
+ camera.position.set(maxDim * 1.2, maxDim * 1.2, maxDim * 1.2);
235
+ controls.target.copy(box.getCenter(new THREE.Vector3()));
236
+ controls.update();
237
+ }, function(err){ console.error(err); });
238
+ }
239
+
240
+ function animate() {
241
+ requestAnimationFrame(animate);
242
+ renderer.render(scene, camera);
243
+ }
244
+ animate();
245
+
246
+ // payload inserted by Python when HTML is generated
247
+ if (window.PAYLOAD) {
248
+ if (window.PAYLOAD.type === 'vox') {
249
+ buildVoxScene(window.PAYLOAD.data);
250
+ } else if (window.PAYLOAD.type === 'gltf') {
251
+ loadGltfFromDataUrl(window.PAYLOAD.dataUrl);
252
+ }
253
+ }
254
+
255
+ // handle resize
256
+ window.addEventListener('resize', () => {
257
+ const w = container.clientWidth; const h = container.clientHeight;
258
+ camera.aspect = w/h; camera.updateProjectionMatrix(); renderer.setSize(w,h);
259
+ });
260
+
261
+ </script>
262
+ </body>
263
+ </html>
264
+ """
265
+
266
+
267
+ def build_html_for_vox(vox_json: dict) -> str:
268
+ """Return final HTML with the PAYLOAD injected as a JS global."""
269
+ payload = {"type": "vox", "data": vox_json}
270
+ # JSON must be safe inside the HTML; use json.dumps
271
+ injected = f"<script>window.PAYLOAD = {json.dumps(payload)};</script>"
272
+ html = injected + HTML_TEMPLATE
273
+ return html
274
+
275
+
276
+ def build_html_for_gltf(data_url: str) -> str:
277
+ payload_script = f"<script>window.PAYLOAD = {{type:'gltf', dataUrl: '{data_url}'}};</script>"
278
+ html = payload_script + HTML_TEMPLATE
279
+ return html
280
+
281
+
282
+ def viewer_from_upload(file) -> str:
283
+ """Main Gradio callback: accept uploaded file, return HTML string with viewer."""
284
+ if file is None:
285
+ return "<div style='color:#eee'>No file uploaded yet.</div>"
286
+
287
+ ext = Path(getattr(file, 'name', '')).suffix.lower()
288
+ if ext == '.vox':
289
+ vox_json = parse_vox_to_json(file)
290
+ return build_html_for_vox(vox_json)
291
+ elif ext in ('.gltf', '.glb'):
292
+ data_url, mime = file_to_dataurl(file)
293
+ return build_html_for_gltf(data_url)
294
+ else:
295
+ return f"<div style='color:#f88'>Unsupported extension: {ext}. Upload .vox, .gltf or .glb</div>"
296
+
297
+
298
+ # Build Gradio interface
299
+ with gr.Blocks(css="""
300
+ #viewer { border: 1px solid #444 }
301
+ """) as demo:
302
+ gr.Markdown("# VOX + glTF Viewer\nUpload a `.vox` (MagicaVoxel) or `.gltf` / `.glb` file and preview it in a Three.js WebGL viewer.")
303
+
304
+ with gr.Row():
305
+ uploader = gr.File(label="Upload .vox / .gltf / .glb", file_types=['.vox', '.gltf', '.glb'])
306
+ html_out = gr.HTML("<div style='color:#888'>Upload a file to preview</div>")
307
+
308
+ uploader.change(viewer_from_upload, inputs=[uploader], outputs=[html_out])
309
+
310
+ gr.Markdown("---\nNotes: Viewer uses InstancedMesh for voxel rendering; large scenes may be heavy. Consider downsampling or LOD.")
311
+
312
+
313
+ if __name__ == '__main__':
314
+ demo.launch()